repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
jgeewax/gcloud-python | runtimeconfig/google/cloud/runtimeconfig/connection.py | 3 | 1748 | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create / interact with Google Cloud RuntimeConfig connections."""
from google.cloud import _http
class Connection(_http.JSONConnection):
"""A connection to Google Cloud RuntimeConfig via the JSON REST API.
:type credentials: :class:`oauth2client.client.OAuth2Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
connection.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: (Optional) HTTP object to make requests.
:type api_base_url: str
:param api_base_url: The base of the API call URL. Defaults to the value
:attr:`Connection.API_BASE_URL`.
"""
API_BASE_URL = 'https://runtimeconfig.googleapis.com'
"""The base of the API call URL."""
API_VERSION = 'v1beta1'
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}'
"""A template for the URL of a particular API call."""
SCOPE = ('https://www.googleapis.com/auth/cloudruntimeconfig',)
"""The scopes required for authenticating as a RuntimeConfig consumer."""
| apache-2.0 |
Intel-tensorflow/tensorflow | tensorflow/python/modules_with_exports.py | 6 | 4041 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Imports modules that should be scanned during API generation.
This file should eventually contain everything we need to scan looking for
tf_export decorators.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import,g-bad-import-order,g-import-not-at-top
# pylint: disable=unused-import,g-importing-member
# Protocol buffers
from tensorflow.core.framework.graph_pb2 import *
from tensorflow.core.framework.node_def_pb2 import *
from tensorflow.core.framework.summary_pb2 import *
from tensorflow.core.framework.attr_value_pb2 import *
from tensorflow.core.protobuf.meta_graph_pb2 import TensorInfo
from tensorflow.core.protobuf.meta_graph_pb2 import MetaGraphDef
from tensorflow.core.protobuf.config_pb2 import *
from tensorflow.core.util.event_pb2 import *
# Framework
from tensorflow.python.framework.framework_lib import * # pylint: disable=redefined-builtin
from tensorflow.python.framework.versions import *
from tensorflow.python.framework import config
from tensorflow.python.framework import errors
from tensorflow.python.framework import graph_util
# Session
from tensorflow.python.client.client_lib import *
# Ops
from tensorflow.python.ops.standard_ops import * # pylint: disable=redefined-builtin
# Namespaces
from tensorflow.python.ops import initializers_ns as initializers
from tensorflow.python.util.tf_export import tf_export
# _internal APIs
from tensorflow.python.distribute.combinations import generate
from tensorflow.python.distribute.multi_process_runner import *
from tensorflow.python.distribute.multi_worker_test_base import *
from tensorflow.python.distribute.strategy_combinations import *
from tensorflow.python.framework.combinations import *
from tensorflow.python.framework.composite_tensor import *
from tensorflow.python.framework.test_combinations import *
from tensorflow.python.util.tf_decorator import make_decorator
from tensorflow.python.util.tf_decorator import unwrap
from tensorflow.python.distribute.parameter_server_strategy_v2 import *
from tensorflow.python.distribute.coordinator.cluster_coordinator import *
tf_export('__internal__.decorator.make_decorator', v1=[])(make_decorator)
tf_export('__internal__.decorator.unwrap', v1=[])(unwrap)
# Export protos
# pylint: disable=undefined-variable
tf_export(v1=['AttrValue'])(AttrValue)
tf_export(v1=['ConfigProto'])(ConfigProto)
tf_export(v1=['Event', 'summary.Event'])(Event)
tf_export(v1=['GPUOptions'])(GPUOptions)
tf_export(v1=['GraphDef'])(GraphDef)
tf_export(v1=['GraphOptions'])(GraphOptions)
tf_export(v1=['HistogramProto'])(HistogramProto)
tf_export(v1=['LogMessage'])(LogMessage)
tf_export(v1=['MetaGraphDef'])(MetaGraphDef)
tf_export(v1=['NameAttrList'])(NameAttrList)
tf_export(v1=['NodeDef'])(NodeDef)
tf_export(v1=['OptimizerOptions'])(OptimizerOptions)
tf_export(v1=['RunMetadata'])(RunMetadata)
tf_export(v1=['RunOptions'])(RunOptions)
tf_export(v1=['SessionLog', 'summary.SessionLog'])(SessionLog)
tf_export(v1=['Summary', 'summary.Summary'])(Summary)
tf_export(v1=['summary.SummaryDescription'])(SummaryDescription)
tf_export(v1=['SummaryMetadata'])(SummaryMetadata)
tf_export(v1=['summary.TaggedRunMetadata'])(TaggedRunMetadata)
tf_export(v1=['TensorInfo'])(TensorInfo)
# pylint: enable=undefined-variable
| apache-2.0 |
Vagab0nd/SiCKRAGE | lib3/future/backports/test/pystone.py | 80 | 7427 | #!/usr/bin/env python3
"""
"PYSTONE" Benchmark Program
Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes)
Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013.
Translated from ADA to C by Rick Richardson.
Every method to preserve ADA-likeness has been used,
at the expense of C-ness.
Translated from C to Python by Guido van Rossum.
Version History:
Version 1.1 corrects two bugs in version 1.0:
First, it leaked memory: in Proc1(), NextRecord ends
up having a pointer to itself. I have corrected this
by zapping NextRecord.PtrComp at the end of Proc1().
Second, Proc3() used the operator != to compare a
record to None. This is rather inefficient and not
true to the intention of the original benchmark (where
a pointer comparison to None is intended; the !=
operator attempts to find a method __cmp__ to do value
comparison of the record). Version 1.1 runs 5-10
percent faster than version 1.0, so benchmark figures
of different versions can't be compared directly.
"""
from __future__ import print_function
from time import clock
LOOPS = 50000
__version__ = "1.1"
[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6)
class Record(object):
def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0,
IntComp = 0, StringComp = 0):
self.PtrComp = PtrComp
self.Discr = Discr
self.EnumComp = EnumComp
self.IntComp = IntComp
self.StringComp = StringComp
def copy(self):
return Record(self.PtrComp, self.Discr, self.EnumComp,
self.IntComp, self.StringComp)
TRUE = 1
FALSE = 0
def main(loops=LOOPS):
benchtime, stones = pystones(loops)
print("Pystone(%s) time for %d passes = %g" % \
(__version__, loops, benchtime))
print("This machine benchmarks at %g pystones/second" % stones)
def pystones(loops=LOOPS):
return Proc0(loops)
IntGlob = 0
BoolGlob = FALSE
Char1Glob = '\0'
Char2Glob = '\0'
Array1Glob = [0]*51
Array2Glob = [x[:] for x in [Array1Glob]*51]
PtrGlb = None
PtrGlbNext = None
def Proc0(loops=LOOPS):
global IntGlob
global BoolGlob
global Char1Glob
global Char2Glob
global Array1Glob
global Array2Glob
global PtrGlb
global PtrGlbNext
starttime = clock()
for i in range(loops):
pass
nulltime = clock() - starttime
PtrGlbNext = Record()
PtrGlb = Record()
PtrGlb.PtrComp = PtrGlbNext
PtrGlb.Discr = Ident1
PtrGlb.EnumComp = Ident3
PtrGlb.IntComp = 40
PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING"
String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING"
Array2Glob[8][7] = 10
starttime = clock()
for i in range(loops):
Proc5()
Proc4()
IntLoc1 = 2
IntLoc2 = 3
String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING"
EnumLoc = Ident2
BoolGlob = not Func2(String1Loc, String2Loc)
while IntLoc1 < IntLoc2:
IntLoc3 = 5 * IntLoc1 - IntLoc2
IntLoc3 = Proc7(IntLoc1, IntLoc2)
IntLoc1 = IntLoc1 + 1
Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3)
PtrGlb = Proc1(PtrGlb)
CharIndex = 'A'
while CharIndex <= Char2Glob:
if EnumLoc == Func1(CharIndex, 'C'):
EnumLoc = Proc6(Ident1)
CharIndex = chr(ord(CharIndex)+1)
IntLoc3 = IntLoc2 * IntLoc1
IntLoc2 = IntLoc3 / IntLoc1
IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1
IntLoc1 = Proc2(IntLoc1)
benchtime = clock() - starttime - nulltime
if benchtime == 0.0:
loopsPerBenchtime = 0.0
else:
loopsPerBenchtime = (loops / benchtime)
return benchtime, loopsPerBenchtime
def Proc1(PtrParIn):
PtrParIn.PtrComp = NextRecord = PtrGlb.copy()
PtrParIn.IntComp = 5
NextRecord.IntComp = PtrParIn.IntComp
NextRecord.PtrComp = PtrParIn.PtrComp
NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
if NextRecord.Discr == Ident1:
NextRecord.IntComp = 6
NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
NextRecord.PtrComp = PtrGlb.PtrComp
NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
else:
PtrParIn = NextRecord.copy()
NextRecord.PtrComp = None
return PtrParIn
def Proc2(IntParIO):
IntLoc = IntParIO + 10
while 1:
if Char1Glob == 'A':
IntLoc = IntLoc - 1
IntParIO = IntLoc - IntGlob
EnumLoc = Ident1
if EnumLoc == Ident1:
break
return IntParIO
def Proc3(PtrParOut):
global IntGlob
if PtrGlb is not None:
PtrParOut = PtrGlb.PtrComp
else:
IntGlob = 100
PtrGlb.IntComp = Proc7(10, IntGlob)
return PtrParOut
def Proc4():
global Char2Glob
BoolLoc = Char1Glob == 'A'
BoolLoc = BoolLoc or BoolGlob
Char2Glob = 'B'
def Proc5():
global Char1Glob
global BoolGlob
Char1Glob = 'A'
BoolGlob = FALSE
def Proc6(EnumParIn):
EnumParOut = EnumParIn
if not Func3(EnumParIn):
EnumParOut = Ident4
if EnumParIn == Ident1:
EnumParOut = Ident1
elif EnumParIn == Ident2:
if IntGlob > 100:
EnumParOut = Ident1
else:
EnumParOut = Ident4
elif EnumParIn == Ident3:
EnumParOut = Ident2
elif EnumParIn == Ident4:
pass
elif EnumParIn == Ident5:
EnumParOut = Ident3
return EnumParOut
def Proc7(IntParI1, IntParI2):
IntLoc = IntParI1 + 2
IntParOut = IntParI2 + IntLoc
return IntParOut
def Proc8(Array1Par, Array2Par, IntParI1, IntParI2):
global IntGlob
IntLoc = IntParI1 + 5
Array1Par[IntLoc] = IntParI2
Array1Par[IntLoc+1] = Array1Par[IntLoc]
Array1Par[IntLoc+30] = IntLoc
for IntIndex in range(IntLoc, IntLoc+2):
Array2Par[IntLoc][IntIndex] = IntLoc
Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1
Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc]
IntGlob = 5
def Func1(CharPar1, CharPar2):
CharLoc1 = CharPar1
CharLoc2 = CharLoc1
if CharLoc2 != CharPar2:
return Ident1
else:
return Ident2
def Func2(StrParI1, StrParI2):
IntLoc = 1
while IntLoc <= 1:
if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
CharLoc = 'A'
IntLoc = IntLoc + 1
if CharLoc >= 'W' and CharLoc <= 'Z':
IntLoc = 7
if CharLoc == 'X':
return TRUE
else:
if StrParI1 > StrParI2:
IntLoc = IntLoc + 7
return TRUE
else:
return FALSE
def Func3(EnumParIn):
EnumLoc = EnumParIn
if EnumLoc == Ident3: return TRUE
return FALSE
if __name__ == '__main__':
import sys
def error(msg):
print(msg, end=' ', file=sys.stderr)
print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr)
sys.exit(100)
nargs = len(sys.argv) - 1
if nargs > 1:
error("%d arguments are too many;" % nargs)
elif nargs == 1:
try: loops = int(sys.argv[1])
except ValueError:
error("Invalid argument %r;" % sys.argv[1])
else:
loops = LOOPS
main(loops)
| gpl-3.0 |
TeslaProject/external_chromium_org | chrome/common/extensions/docs/server2/manifest_features_test.py | 122 | 1163 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from manifest_features import ConvertDottedKeysToNested
class ManifestFeaturesTest(unittest.TestCase):
def testConvertDottedKeysToNested(self):
docs = {
'doc1.sub2': {
'name': 'doc1.sub2'
},
'doc1': {
'name': 'doc1'
},
'doc2': {
'name': 'doc2'
},
'doc1.sub1.subsub1': {
'name': 'doc1.sub1.subsub1'
},
'doc1.sub1': {
'name': 'doc1.sub1'
}
}
expected_docs = {
'doc1': {
'name': 'doc1',
'children': {
'sub1': {
'name': 'sub1',
'children': {
'subsub1': {
'name' :'subsub1'
}
}
},
'sub2': {
'name': 'sub2'
}
}
},
'doc2': {
'name': 'doc2'
}
}
self.assertEqual(expected_docs, ConvertDottedKeysToNested(docs))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
cherusk/ansible | lib/ansible/modules/network/avi/avi_poolgroup.py | 28 | 5112 | #!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 16.3.8
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_poolgroup
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of PoolGroup Avi RESTful Object
description:
- This module is used to configure PoolGroup object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
cloud_config_cksum:
description:
- Checksum of cloud configuration for poolgroup.
- Internally set by cloud connector.
cloud_ref:
description:
- It is a reference to an object of type cloud.
created_by:
description:
- Name of the user who created the object.
deployment_policy_ref:
description:
- When setup autoscale manager will automatically promote new pools into production when deployment goals are met.
- It is a reference to an object of type poolgroupdeploymentpolicy.
description:
description:
- Description of pool group.
fail_action:
description:
- Enable an action - close connection, http redirect, or local http response - when a pool group failure happens.
- By default, a connection will be closed, in case the pool group experiences a failure.
members:
description:
- List of pool group members object of type poolgroupmember.
min_servers:
description:
- The minimum number of servers to distribute traffic to.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.
name:
description:
- The name of the pool group.
required: true
priority_labels_ref:
description:
- Uuid of the priority labels.
- If not provided, pool group member priority label will be interpreted as a number with a larger number considered higher priority.
- It is a reference to an object of type prioritylabels.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Uuid of the pool group.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create PoolGroup object
avi_poolgroup:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_poolgroup
"""
RETURN = '''
obj:
description: PoolGroup (api/poolgroup) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
cloud_config_cksum=dict(type='str',),
cloud_ref=dict(type='str',),
created_by=dict(type='str',),
deployment_policy_ref=dict(type='str',),
description=dict(type='str',),
fail_action=dict(type='dict',),
members=dict(type='list',),
min_servers=dict(type='int',),
name=dict(type='str', required=True),
priority_labels_ref=dict(type='str',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=16.3.5.post1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'poolgroup',
set([]))
if __name__ == '__main__':
main()
| gpl-3.0 |
kosz85/django | tests/admin_views/tests.py | 12 | 271383 | import datetime
import os
import re
import unittest
from urllib.parse import parse_qsl, urljoin, urlparse
import pytz
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, modify_settings,
override_settings, skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix, patch_logger
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.deprecation import RemovedInDjango21Warning
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Answer, Answer2, Article, BarAccount, Book,
Bookmark, Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child,
Choice, City, Collector, Color, ComplexSortedPerson, CoverLetter,
CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel,
Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount,
FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link,
MainPrepopulated, Media, ModelWithStringPrimaryKey, OtherStory, Paper,
Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona,
Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post,
PrePopulatedPost, Promo, Question, ReadablePizza, Recommendation,
Recommender, RelatedPrepopulated, RelatedWithUUIDPKModel, Report,
Restaurant, RowLevelChangePermissionModel, SecretHideout, Section,
ShortMessage, Simple, State, Story, SuperSecretHideout, SuperVillain,
Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject,
UnorderedObject, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
admin_form = response.context['adminform']
fieldsets = list(admin_form)
field_lines = []
for fieldset in fieldsets:
field_lines += list(fieldset)
fields = []
for field_line in field_lines:
fields += list(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field['name'] == field_name:
return field
@override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.color1 = Color.objects.create(value='Red', warm=True)
cls.color2 = Color.objects.create(value='Orange', warm=True)
cls.color3 = Color.objects.create(value='Blue', warm=False)
cls.color4 = Color.objects.create(value='Green', warm=False)
cls.fab1 = Fabric.objects.create(surface='x')
cls.fab2 = Fabric.objects.create(surface='y')
cls.fab3 = Fabric.objects.create(surface='plain')
cls.b1 = Book.objects.create(name='Book 1')
cls.b2 = Book.objects.create(name='Book 2')
cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1)
cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2)
cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1)
cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2)
cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1')
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2')
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(force_bytes(text1)),
response.content.index(force_bytes(text2)),
(failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset)
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_admin_static_template_tag(self):
"""
admin_static.static points to the collectstatic version
(as django.contrib.collectstatic is in INSTALLED_APPS).
"""
old_url = staticfiles_storage.base_url
staticfiles_storage.base_url = '/test/'
try:
self.assertEqual(static('path'), '/test/path')
finally:
staticfiles_storage.base_url = old_url
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertContains(
response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response"
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(quote("abc/<b>"),)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""section with ID "abc/<b>" doesn't exist. Perhaps it was deleted?"""]
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '')
)
self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""super villain with ID "abc" doesn't exist. Perhaps it was deleted?"""]
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertContains(response, 'title with a new\\nline')
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse('admin:admin_views_section_change', args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
# callables display the callable name.
self.assertContains(response, 'column-callable_year')
self.assertContains(response, 'field-callable_year')
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, 'column-lambda8')
self.assertContains(response, 'field-lambda8')
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order."
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(
response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order."
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(
response, '2009', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2000',
"Results of sorting on ModelAdmin method are out of order."
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(
response, '2000', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2009',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name='Karnataka')
City.objects.create(state=state, name='Bangalore')
response = self.client.get(reverse('admin:admin_views_city_changelist'), {})
response.context['cl'].list_display = ['id', 'name', 'state']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), True)
response.context['cl'].list_display = ['id', 'name', 'state_id']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name='Foo')
Vodcast.objects.create(media=media)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'), {})
response.context['cl'].list_display = ['media']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), True)
response.context['cl'].list_display = ['media_id']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertContains(
response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view"
)
self.assertNotContains(
response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to"
)
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': {
'values': [c.id for c in Chapter.objects.all()],
'test': lambda obj, value: obj.chap.id == value,
},
'chap__title': {
'values': [c.title for c in Chapter.objects.all()],
'test': lambda obj, value: obj.chap.title == value,
},
'chap__book__id__exact': {
'values': [b.id for b in Book.objects.all()],
'test': lambda obj, value: obj.chap.book.id == value,
},
'chap__book__name': {
'values': [b.name for b in Book.objects.all()],
'test': lambda obj, value: obj.chap.book.name == value,
},
'chap__book__promo__id__exact': {
'values': [p.id for p in Promo.objects.all()],
'test': lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists(),
},
'chap__book__promo__name': {
'values': [p.name for p in Promo.objects.all()],
'test': lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists(),
},
# A forward relation (book) after a reverse relation (promo).
'guest_author__promo__book__id__exact': {
'values': [p.id for p in Book.objects.all()],
'test': lambda obj, value: obj.guest_author.promo_set.filter(book=value).exists(),
},
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,))
link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response, '<a href="?surface__exact=x" title="Horizontal">Horizontal</a>',
msg_prefix=fail_msg, html=True
)
self.assertContains(
response, '<a href="?surface__exact=y" title="Vertical">Vertical</a>',
msg_prefix=fail_msg, html=True
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.svg')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_jsi18n_with_context(self):
response = self.client.get(reverse('admin-extra-context:jsi18n'))
self.assertEqual(response.status_code, 200)
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_changelist')
response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse('admin:admin_views_referencedbygenrel_changelist')
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(url, {TO_FIELD_VAR: 'object_id'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
section = Section.objects.create()
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_change', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_delete', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse('admin:index'))
self.assertNotContains(
response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.'
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name='Cheese')
cheese = Topping.objects.create(name='cheese')
post_data = {'name': pizza.name, 'toppings': [cheese.pk]}
response = self.client.post(reverse('admin:admin_views_readablepizza_change', args=(pizza.pk,)), post_data)
self.assertRedirects(response, reverse('admin:admin_views_readablepizza_changelist'))
pizza_ctype = ContentType.objects.get_for_model(ReadablePizza, for_concrete_model=False)
log = LogEntry.objects.filter(content_type=pizza_ctype, object_id=pizza.pk).first()
self.assertEqual(log.get_change_message(), 'Changed toppings.')
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">%s</th>' % o, html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_resolve_admin_views(self):
index_match = resolve('/test_admin/admin4/')
list_match = resolve('/test_admin/admin4/auth/user/')
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TIME_ZONE='America/Sao_Paulo', USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
date = pytz.timezone('America/Sao_Paulo').localize(datetime.datetime(2016, 10, 16, 15), is_dst=None)
q = Question.objects.create(question='Why?', expires=date)
Answer2.objects.create(question=q, answer='Because.')
response = self.client.get(reverse('admin:admin_views_answer2_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'question__expires__day=16')
self.assertContains(response, 'question__expires__month=10')
self.assertContains(response, 'question__expires__year=2016')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's and the shared tests templates dirs in DIRS to take precedence
# over the admin's templates dir.
'DIRS': [
os.path.join(os.path.dirname(__file__), 'templates'),
os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(reverse('admin:admin_views_customarticle_add') + '?%s=1' % IS_POPUP_VAR, {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39',
IS_POPUP_VAR: '1'
})
self.assertEqual(response.template_name, 'custom_admin/popup_response.html')
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_change_password_template(self):
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, 'bodyclass_consistency_check ')
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(response, '<input type="text" name="username" value="super" style="display: none" />')
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'vendor/jquery/jquery.js')
self.assertContains(response, 'vendor/jquery/jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'vendor/jquery/jquery.js')
self.assertNotContains(response, 'vendor/jquery/jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(ROOT_URLCONF='admin_views.urls')
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest('id')
self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,)))
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name))
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), {
'_saveasnew': '',
'gender': 'invalid',
'_addanother': 'fail',
})
self.assertContains(response, 'Please correct the errors below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': 'Child',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': '_invalid',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': '_invalid',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': 'Father',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
@override_settings(ROOT_URLCONF='admin_views.urls')
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
self.assertContains(login, 'path/to/media.css')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse('admin2:index'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username='super')
response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(
ROOT_URLCONF='admin_views.urls',
# Test with the admin's documented list of required context processors.
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can add Articles
cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts)))
# User who can change Articles
cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
# User who can delete Articles
cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts)))
cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta)))
# login POST dicts
cls.index_url = reverse('admin:index')
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super',
'password': 'secret',
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'secret',
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'notsecret',
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'adduser',
'password': 'secret',
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'changeuser',
'password': 'secret',
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'deleteuser',
'password': 'secret',
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'joepublic',
'password': 'secret',
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(login, 'form', 'username', ['This field is required.'])
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse('admin:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index'))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = 'You are authenticated as {}'
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertNotContains(response, hint_template.format(''), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(
addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(
addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error'
)
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors'
)
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
for login_user in [self.superuser, self.adduser, self.changeuser, self.deleteuser]:
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
'_saveasnew': 'Save as new',
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest('id')
self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,)))
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse('admin:admin_views_article_delete', args=('nonexistent',))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""article with ID "nonexistent" doesn't exist. Perhaps it was deleted?"""]
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(name="even id")
for login_user in [self.superuser, self.adduser, self.changeuser, self.deleteuser]:
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""article with ID "foo" doesn't exist. Perhaps it was deleted?"""]
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the "change section" link.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the "delete section" link.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse('admin7:index')
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin7:app_list', args=('admin_views',)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doen't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True)
self.assertContains(
response,
'<li class="success">The article "Fun & games" was added successfully.</li>',
html=True
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta)))
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.v1 = Villain.objects.create(name='Adam')
cls.v2 = Villain.objects.create(name='Sue')
cls.sv1 = SuperVillain.objects.create(name='Bob')
cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2)
cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2)
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1)
cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1)
cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1)
cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1)
cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
force_bytes(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)),
reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)),
)
)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertRegex(response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta)))
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question='Why?')
Answer.objects.create(question=q, answer='Because.')
response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'})
self.assertEqual(Question.objects.count(), 1)
self.assertContains(response, "would require deleting the following protected related objects")
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(self.pl1.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(self.pl2.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse(
'admin:admin_views_supervillain_change', args=(self.sv1.pk,)
),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,)))
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name='djangoproject')
tag = FunkyTag.objects.create(content_object=bookmark, name='django')
tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,)))
self.assertContains(response, should_contain)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.v1 = Villain.objects.create(name='Adam')
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something')
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),))
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse('admin:admin_views_modelwithstringprimarykey_add')
change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),))
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link))
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted
@override_settings(ROOT_URLCONF='admin_views.urls')
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = '/test_admin/admin/secure-view2/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.b1 = Book.objects.create(name='Lærdommer')
cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1)
cls.chap1 = Chapter.objects.create(
title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>',
book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1)
cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>')
cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>')
cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>')
cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>')
cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>')
cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>')
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "%s" % self.per3.pk,
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
response = self.client.get(reverse('admin:admin_views_story_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title='The adventures of Guido',
content='Once upon a time in Djangoland...',
)
story2 = OtherStory.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar')
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo')
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few')
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas')
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True
)
self.assertTrue(response.context['cl'].show_admin_actions)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
self.pks = [EmptyModel.objects.create().id for i in range(3)]
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
url = reverse('admin:admin_views_emptymodel_change', args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""empty model with ID "1" doesn't exist. Perhaps it was deleted?"""]
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'Candidate, Best</a>" was added successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'%s</a>" was added successfully.</li>' %
(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Urgent telegram</a>" was added successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'%s</a>" was added successfully.</li>' %
(reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'John Doe II</a>" was changed successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'%s</a>" was changed successfully.</li>' %
(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Telegram without typo</a>" was changed successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'%s</a>" was changed successfully.</li>' %
(reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF='admin_views.urls')
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF='admin_views.urls')
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(response, ""dependency_ids": ["#id_title"]")
self.assertContains(response, ""id": "#id_prepopulatedsubpost_set-0-subslug"")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(response, ""dependency_ids": ["#id_title"]")
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug""
)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
@override_settings(ROOT_URLCONF='admin_views.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add'))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_id('id_name').send_keys(' this is the mAin nÀMë and it\'s awεšomeııı')
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesomeiii-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesomeiii')
self.assertEqual(slug3, 'main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys(
' here is a sŤāÇkeð inline ! '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys(
' now you haVe anöther sŤāÇkeð inline with a very ... '
'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value')
# 50 characters maximum for slug1 field
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo')
# 60 characters maximum for slug2 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo')
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys(
'And now, with a tÃbűlaŘ inline !!!'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys(
r'a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšomeııı',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesomeiii-2012-02-18',
slug2='option-two-main-name-and-its-awesomeiii',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo',
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name=r'a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,))
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add'))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide")
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name='Test section')
Article.objects.create(
title='foo',
content='<p>Middle content</p>',
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist'))
# Change popup
self.selenium.find_element_by_id('change_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Change section')
name_input = self.selenium.find_element_by_id('id_name')
name_input.clear()
name_input.send_keys('<i>edited section</i>')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, '<i>edited section</i>')
# Add popup
self.selenium.find_element_by_id('add_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Add section')
self.selenium.find_element_by_id('id_name').send_keys('new section')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, 'new section')
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('change_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id))
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.support.ui import Select
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add'))
self.selenium.find_element_by_id('add_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_id('id_title').send_keys('test')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('delete_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Yes, I\'m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, '---------')
self.assertEqual(select.first_selected_option.get_attribute('value'), '')
def test_list_editable_raw_id_fields(self):
parent = ParentWithUUIDPK.objects.create(title='test')
parent2 = ParentWithUUIDPK.objects.create(title='test2')
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('lookup_id_form-0-parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
# Select "parent2" in the popup.
self.selenium.find_element_by_link_text(str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value')
self.assertEqual(value, str(parent2.pk))
@override_settings(ROOT_URLCONF='admin_views.urls')
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br /> tags
self.assertContains(response, 'Multiline<br />test<br />string')
self.assertContains(response, '<div class="readonly">Multiline<br />html<br />content</div>', html=True)
self.assertContains(response, 'InlineMultiline<br />test<br />string')
self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help">', 3)
self.assertContains(
response,
'<div class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</div>',
html=True
)
self.assertContains(
response,
'<div class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</div>',
html=True
)
self.assertContains(
response,
'<div class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</div>',
html=True
)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test", content="test",
readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest',
)
Link.objects.create(
url="http://www.djangoproject.com", post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
# Checking readonly field.
self.assertContains(response, 'test<br /><br />test<br /><br />test<br /><br />test')
# Checking readonly field in inline.
self.assertContains(response, 'test<br />link')
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<div class="readonly">No opinion</div>', html=True)
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_pizza_change', args=(pizza.pk,)))
self.assertContains(response, '<label>Toppings:</label>', html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name='Adam')
pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl)
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), 'Brand New Plot')
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), '-') # default empty value
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertContains(response, '<div class="help">Overridden help text for the date</div>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF='admin_views.urls')
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse('admin:admin_views_inquisition_changelist'))
self.assertContains(response, 'list-display-sketch')
@override_settings(ROOT_URLCONF='admin_views.urls')
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'adminform', 'password', [])
self.assertFormError(response, 'adminform', 'password2', ["The two password fields didn't match."])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1')
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True)
self.assertContains(response, '"obj": "newuser"')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
'_popup': '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"obj": "newuser"')
self.assertContains(response, '"action": "change"')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
'_popup': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"action": "delete"')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(ROOT_URLCONF='admin_views.urls')
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF='admin_views.urls')
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
ROOT_URLCONF='admin_views.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse('admin:admin_views_answer_changelist'))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % (date.year,)
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
self.assertFalse(response.context['has_permission'])
self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
self.assertContains(response, '<input type="hidden" name="next" value="%s" />' % reverse('admin:index'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(
change_user_url
)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy2').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
with override_script_prefix('/prefix/'):
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormsetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2"}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'),
post_data)
self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.'])
msg = "The form 'adminform' in context 0 does not contain the non-field error 'Error'"
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, 'adminform', None, ['Error'])
self.assertFormsetError(
response, 'inline_admin_formset', 0, None,
['Children must share a family name with their parents in this contrived test case']
)
msg = "The formset 'inline_admin_formset' in context 10 does not contain any non-form errors."
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(response, 'inline_admin_formset', None, None, ['Error'])
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6,
family_name="Test1")
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1"}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.'])
self.assertFormsetError(
response, 'inline_admin_formset', 0, None,
['Children must share a family name with their parents in this contrived test case']
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(admin.check(), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,)))
self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name))
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(ROOT_URLCONF='admin_views.urls')
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk)))
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name))
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestETagWithAdminView(SimpleTestCase):
# The admin is compatible with ETags (#16003).
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True), ignore_warnings(category=RemovedInDjango21Warning):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertTrue(response.has_header('ETag'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
| bsd-3-clause |
kaiyou/docker-py | docker/transport/npipesocket.py | 13 | 5857 | import functools
import io
import six
import win32file
import win32pipe
cERROR_PIPE_BUSY = 0xe7
cSECURITY_SQOS_PRESENT = 0x100000
cSECURITY_ANONYMOUS = 0
RETRY_WAIT_TIMEOUT = 10000
def check_closed(f):
@functools.wraps(f)
def wrapped(self, *args, **kwargs):
if self._closed:
raise RuntimeError(
'Can not reuse socket after connection was closed.'
)
return f(self, *args, **kwargs)
return wrapped
class NpipeSocket(object):
""" Partial implementation of the socket API over windows named pipes.
This implementation is only designed to be used as a client socket,
and server-specific methods (bind, listen, accept...) are not
implemented.
"""
def __init__(self, handle=None):
self._timeout = win32pipe.NMPWAIT_USE_DEFAULT_WAIT
self._handle = handle
self._closed = False
def accept(self):
raise NotImplementedError()
def bind(self, address):
raise NotImplementedError()
def close(self):
self._handle.Close()
self._closed = True
@check_closed
def connect(self, address):
win32pipe.WaitNamedPipe(address, self._timeout)
try:
handle = win32file.CreateFile(
address,
win32file.GENERIC_READ | win32file.GENERIC_WRITE,
0,
None,
win32file.OPEN_EXISTING,
cSECURITY_ANONYMOUS | cSECURITY_SQOS_PRESENT,
0
)
except win32pipe.error as e:
# See Remarks:
# https://msdn.microsoft.com/en-us/library/aa365800.aspx
if e.winerror == cERROR_PIPE_BUSY:
# Another program or thread has grabbed our pipe instance
# before we got to it. Wait for availability and attempt to
# connect again.
win32pipe.WaitNamedPipe(address, RETRY_WAIT_TIMEOUT)
return self.connect(address)
raise e
self.flags = win32pipe.GetNamedPipeInfo(handle)[0]
self._handle = handle
self._address = address
@check_closed
def connect_ex(self, address):
return self.connect(address)
@check_closed
def detach(self):
self._closed = True
return self._handle
@check_closed
def dup(self):
return NpipeSocket(self._handle)
@check_closed
def fileno(self):
return int(self._handle)
def getpeername(self):
return self._address
def getsockname(self):
return self._address
def getsockopt(self, level, optname, buflen=None):
raise NotImplementedError()
def ioctl(self, control, option):
raise NotImplementedError()
def listen(self, backlog):
raise NotImplementedError()
def makefile(self, mode=None, bufsize=None):
if mode.strip('b') != 'r':
raise NotImplementedError()
rawio = NpipeFileIOBase(self)
if bufsize is None or bufsize <= 0:
bufsize = io.DEFAULT_BUFFER_SIZE
return io.BufferedReader(rawio, buffer_size=bufsize)
@check_closed
def recv(self, bufsize, flags=0):
err, data = win32file.ReadFile(self._handle, bufsize)
return data
@check_closed
def recvfrom(self, bufsize, flags=0):
data = self.recv(bufsize, flags)
return (data, self._address)
@check_closed
def recvfrom_into(self, buf, nbytes=0, flags=0):
return self.recv_into(buf, nbytes, flags), self._address
@check_closed
def recv_into(self, buf, nbytes=0):
if six.PY2:
return self._recv_into_py2(buf, nbytes)
readbuf = buf
if not isinstance(buf, memoryview):
readbuf = memoryview(buf)
err, data = win32file.ReadFile(
self._handle,
readbuf[:nbytes] if nbytes else readbuf
)
return len(data)
def _recv_into_py2(self, buf, nbytes):
err, data = win32file.ReadFile(self._handle, nbytes or len(buf))
n = len(data)
buf[:n] = data
return n
@check_closed
def send(self, string, flags=0):
err, nbytes = win32file.WriteFile(self._handle, string)
return nbytes
@check_closed
def sendall(self, string, flags=0):
return self.send(string, flags)
@check_closed
def sendto(self, string, address):
self.connect(address)
return self.send(string)
def setblocking(self, flag):
if flag:
return self.settimeout(None)
return self.settimeout(0)
def settimeout(self, value):
if value is None:
# Blocking mode
self._timeout = win32pipe.NMPWAIT_WAIT_FOREVER
elif not isinstance(value, (float, int)) or value < 0:
raise ValueError('Timeout value out of range')
elif value == 0:
# Non-blocking mode
self._timeout = win32pipe.NMPWAIT_NO_WAIT
else:
# Timeout mode - Value converted to milliseconds
self._timeout = value * 1000
def gettimeout(self):
return self._timeout
def setsockopt(self, level, optname, value):
raise NotImplementedError()
@check_closed
def shutdown(self, how):
return self.close()
class NpipeFileIOBase(io.RawIOBase):
def __init__(self, npipe_socket):
self.sock = npipe_socket
def close(self):
super(NpipeFileIOBase, self).close()
self.sock = None
def fileno(self):
return self.sock.fileno()
def isatty(self):
return False
def readable(self):
return True
def readinto(self, buf):
return self.sock.recv_into(buf)
def seekable(self):
return False
def writable(self):
return False
| apache-2.0 |
zhjunlang/kbengine | kbe/res/scripts/common/Lib/idlelib/ReplaceDialog.py | 76 | 6642 | from tkinter import *
from idlelib import SearchEngine
from idlelib.SearchDialogBase import SearchDialogBase
import re
def replace(text):
root = text._root()
engine = SearchEngine.get(root)
if not hasattr(engine, "_replacedialog"):
engine._replacedialog = ReplaceDialog(root, engine)
dialog = engine._replacedialog
dialog.open(text)
class ReplaceDialog(SearchDialogBase):
title = "Replace Dialog"
icon = "Replace"
def __init__(self, root, engine):
SearchDialogBase.__init__(self, root, engine)
self.replvar = StringVar(root)
def open(self, text):
SearchDialogBase.open(self, text)
try:
first = text.index("sel.first")
except TclError:
first = None
try:
last = text.index("sel.last")
except TclError:
last = None
first = first or text.index("insert")
last = last or first
self.show_hit(first, last)
self.ok = 1
def create_entries(self):
SearchDialogBase.create_entries(self)
self.replent = self.make_entry("Replace with:", self.replvar)[0]
def create_command_buttons(self):
SearchDialogBase.create_command_buttons(self)
self.make_button("Find", self.find_it)
self.make_button("Replace", self.replace_it)
self.make_button("Replace+Find", self.default_command, 1)
self.make_button("Replace All", self.replace_all)
def find_it(self, event=None):
self.do_find(0)
def replace_it(self, event=None):
if self.do_find(self.ok):
self.do_replace()
def default_command(self, event=None):
if self.do_find(self.ok):
if self.do_replace(): # Only find next match if replace succeeded.
# A bad re can cause a it to fail.
self.do_find(0)
def _replace_expand(self, m, repl):
""" Helper function for expanding a regular expression
in the replace field, if needed. """
if self.engine.isre():
try:
new = m.expand(repl)
except re.error:
self.engine.report_error(repl, 'Invalid Replace Expression')
new = None
else:
new = repl
return new
def replace_all(self, event=None):
prog = self.engine.getprog()
if not prog:
return
repl = self.replvar.get()
text = self.text
res = self.engine.search_text(text, prog)
if not res:
text.bell()
return
text.tag_remove("sel", "1.0", "end")
text.tag_remove("hit", "1.0", "end")
line = res[0]
col = res[1].start()
if self.engine.iswrap():
line = 1
col = 0
ok = 1
first = last = None
# XXX ought to replace circular instead of top-to-bottom when wrapping
text.undo_block_start()
while 1:
res = self.engine.search_forward(text, prog, line, col, 0, ok)
if not res:
break
line, m = res
chars = text.get("%d.0" % line, "%d.0" % (line+1))
orig = m.group()
new = self._replace_expand(m, repl)
if new is None:
break
i, j = m.span()
first = "%d.%d" % (line, i)
last = "%d.%d" % (line, j)
if new == orig:
text.mark_set("insert", last)
else:
text.mark_set("insert", first)
if first != last:
text.delete(first, last)
if new:
text.insert(first, new)
col = i + len(new)
ok = 0
text.undo_block_stop()
if first and last:
self.show_hit(first, last)
self.close()
def do_find(self, ok=0):
if not self.engine.getprog():
return False
text = self.text
res = self.engine.search_text(text, None, ok)
if not res:
text.bell()
return False
line, m = res
i, j = m.span()
first = "%d.%d" % (line, i)
last = "%d.%d" % (line, j)
self.show_hit(first, last)
self.ok = 1
return True
def do_replace(self):
prog = self.engine.getprog()
if not prog:
return False
text = self.text
try:
first = pos = text.index("sel.first")
last = text.index("sel.last")
except TclError:
pos = None
if not pos:
first = last = pos = text.index("insert")
line, col = SearchEngine.get_line_col(pos)
chars = text.get("%d.0" % line, "%d.0" % (line+1))
m = prog.match(chars, col)
if not prog:
return False
new = self._replace_expand(m, self.replvar.get())
if new is None:
return False
text.mark_set("insert", first)
text.undo_block_start()
if m.group():
text.delete(first, last)
if new:
text.insert(first, new)
text.undo_block_stop()
self.show_hit(first, text.index("insert"))
self.ok = 0
return True
def show_hit(self, first, last):
text = self.text
text.mark_set("insert", first)
text.tag_remove("sel", "1.0", "end")
text.tag_add("sel", first, last)
text.tag_remove("hit", "1.0", "end")
if first == last:
text.tag_add("hit", first)
else:
text.tag_add("hit", first, last)
text.see("insert")
text.update_idletasks()
def close(self, event=None):
SearchDialogBase.close(self, event)
self.text.tag_remove("hit", "1.0", "end")
def _replace_dialog(parent):
root = Tk()
root.title("Test ReplaceDialog")
width, height, x, y = list(map(int, re.split('[x+]', parent.geometry())))
root.geometry("+%d+%d"%(x, y + 150))
# mock undo delegator methods
def undo_block_start():
pass
def undo_block_stop():
pass
text = Text(root)
text.undo_block_start = undo_block_start
text.undo_block_stop = undo_block_stop
text.pack()
text.insert("insert","This is a sample string.\n"*10)
def show_replace():
text.tag_add(SEL, "1.0", END)
replace(text)
text.tag_remove(SEL, "1.0", END)
button = Button(root, text="Replace", command=show_replace)
button.pack()
if __name__ == '__main__':
from idlelib.idle_test.htest import run
run(_replace_dialog)
| lgpl-3.0 |
apixandru/intellij-community | python/lib/Lib/encodings/ptcp154.py | 647 | 8950 | """ Python Character Mapping Codec generated from 'PTCP154.txt' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='ptcp154',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0496, # CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER
0x0081: 0x0492, # CYRILLIC CAPITAL LETTER GHE WITH STROKE
0x0082: 0x04ee, # CYRILLIC CAPITAL LETTER U WITH MACRON
0x0083: 0x0493, # CYRILLIC SMALL LETTER GHE WITH STROKE
0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x0085: 0x2026, # HORIZONTAL ELLIPSIS
0x0086: 0x04b6, # CYRILLIC CAPITAL LETTER CHE WITH DESCENDER
0x0087: 0x04ae, # CYRILLIC CAPITAL LETTER STRAIGHT U
0x0088: 0x04b2, # CYRILLIC CAPITAL LETTER HA WITH DESCENDER
0x0089: 0x04af, # CYRILLIC SMALL LETTER STRAIGHT U
0x008a: 0x04a0, # CYRILLIC CAPITAL LETTER BASHKIR KA
0x008b: 0x04e2, # CYRILLIC CAPITAL LETTER I WITH MACRON
0x008c: 0x04a2, # CYRILLIC CAPITAL LETTER EN WITH DESCENDER
0x008d: 0x049a, # CYRILLIC CAPITAL LETTER KA WITH DESCENDER
0x008e: 0x04ba, # CYRILLIC CAPITAL LETTER SHHA
0x008f: 0x04b8, # CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE
0x0090: 0x0497, # CYRILLIC SMALL LETTER ZHE WITH DESCENDER
0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x0095: 0x2022, # BULLET
0x0096: 0x2013, # EN DASH
0x0097: 0x2014, # EM DASH
0x0098: 0x04b3, # CYRILLIC SMALL LETTER HA WITH DESCENDER
0x0099: 0x04b7, # CYRILLIC SMALL LETTER CHE WITH DESCENDER
0x009a: 0x04a1, # CYRILLIC SMALL LETTER BASHKIR KA
0x009b: 0x04e3, # CYRILLIC SMALL LETTER I WITH MACRON
0x009c: 0x04a3, # CYRILLIC SMALL LETTER EN WITH DESCENDER
0x009d: 0x049b, # CYRILLIC SMALL LETTER KA WITH DESCENDER
0x009e: 0x04bb, # CYRILLIC SMALL LETTER SHHA
0x009f: 0x04b9, # CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE
0x00a1: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U (Byelorussian)
0x00a2: 0x045e, # CYRILLIC SMALL LETTER SHORT U (Byelorussian)
0x00a3: 0x0408, # CYRILLIC CAPITAL LETTER JE
0x00a4: 0x04e8, # CYRILLIC CAPITAL LETTER BARRED O
0x00a5: 0x0498, # CYRILLIC CAPITAL LETTER ZE WITH DESCENDER
0x00a6: 0x04b0, # CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE
0x00a8: 0x0401, # CYRILLIC CAPITAL LETTER IO
0x00aa: 0x04d8, # CYRILLIC CAPITAL LETTER SCHWA
0x00ad: 0x04ef, # CYRILLIC SMALL LETTER U WITH MACRON
0x00af: 0x049c, # CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE
0x00b1: 0x04b1, # CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE
0x00b2: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x00b3: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x00b4: 0x0499, # CYRILLIC SMALL LETTER ZE WITH DESCENDER
0x00b5: 0x04e9, # CYRILLIC SMALL LETTER BARRED O
0x00b8: 0x0451, # CYRILLIC SMALL LETTER IO
0x00b9: 0x2116, # NUMERO SIGN
0x00ba: 0x04d9, # CYRILLIC SMALL LETTER SCHWA
0x00bc: 0x0458, # CYRILLIC SMALL LETTER JE
0x00bd: 0x04aa, # CYRILLIC CAPITAL LETTER ES WITH DESCENDER
0x00be: 0x04ab, # CYRILLIC SMALL LETTER ES WITH DESCENDER
0x00bf: 0x049d, # CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE
0x00c0: 0x0410, # CYRILLIC CAPITAL LETTER A
0x00c1: 0x0411, # CYRILLIC CAPITAL LETTER BE
0x00c2: 0x0412, # CYRILLIC CAPITAL LETTER VE
0x00c3: 0x0413, # CYRILLIC CAPITAL LETTER GHE
0x00c4: 0x0414, # CYRILLIC CAPITAL LETTER DE
0x00c5: 0x0415, # CYRILLIC CAPITAL LETTER IE
0x00c6: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
0x00c7: 0x0417, # CYRILLIC CAPITAL LETTER ZE
0x00c8: 0x0418, # CYRILLIC CAPITAL LETTER I
0x00c9: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
0x00ca: 0x041a, # CYRILLIC CAPITAL LETTER KA
0x00cb: 0x041b, # CYRILLIC CAPITAL LETTER EL
0x00cc: 0x041c, # CYRILLIC CAPITAL LETTER EM
0x00cd: 0x041d, # CYRILLIC CAPITAL LETTER EN
0x00ce: 0x041e, # CYRILLIC CAPITAL LETTER O
0x00cf: 0x041f, # CYRILLIC CAPITAL LETTER PE
0x00d0: 0x0420, # CYRILLIC CAPITAL LETTER ER
0x00d1: 0x0421, # CYRILLIC CAPITAL LETTER ES
0x00d2: 0x0422, # CYRILLIC CAPITAL LETTER TE
0x00d3: 0x0423, # CYRILLIC CAPITAL LETTER U
0x00d4: 0x0424, # CYRILLIC CAPITAL LETTER EF
0x00d5: 0x0425, # CYRILLIC CAPITAL LETTER HA
0x00d6: 0x0426, # CYRILLIC CAPITAL LETTER TSE
0x00d7: 0x0427, # CYRILLIC CAPITAL LETTER CHE
0x00d8: 0x0428, # CYRILLIC CAPITAL LETTER SHA
0x00d9: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
0x00da: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
0x00db: 0x042b, # CYRILLIC CAPITAL LETTER YERU
0x00dc: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x00dd: 0x042d, # CYRILLIC CAPITAL LETTER E
0x00de: 0x042e, # CYRILLIC CAPITAL LETTER YU
0x00df: 0x042f, # CYRILLIC CAPITAL LETTER YA
0x00e0: 0x0430, # CYRILLIC SMALL LETTER A
0x00e1: 0x0431, # CYRILLIC SMALL LETTER BE
0x00e2: 0x0432, # CYRILLIC SMALL LETTER VE
0x00e3: 0x0433, # CYRILLIC SMALL LETTER GHE
0x00e4: 0x0434, # CYRILLIC SMALL LETTER DE
0x00e5: 0x0435, # CYRILLIC SMALL LETTER IE
0x00e6: 0x0436, # CYRILLIC SMALL LETTER ZHE
0x00e7: 0x0437, # CYRILLIC SMALL LETTER ZE
0x00e8: 0x0438, # CYRILLIC SMALL LETTER I
0x00e9: 0x0439, # CYRILLIC SMALL LETTER SHORT I
0x00ea: 0x043a, # CYRILLIC SMALL LETTER KA
0x00eb: 0x043b, # CYRILLIC SMALL LETTER EL
0x00ec: 0x043c, # CYRILLIC SMALL LETTER EM
0x00ed: 0x043d, # CYRILLIC SMALL LETTER EN
0x00ee: 0x043e, # CYRILLIC SMALL LETTER O
0x00ef: 0x043f, # CYRILLIC SMALL LETTER PE
0x00f0: 0x0440, # CYRILLIC SMALL LETTER ER
0x00f1: 0x0441, # CYRILLIC SMALL LETTER ES
0x00f2: 0x0442, # CYRILLIC SMALL LETTER TE
0x00f3: 0x0443, # CYRILLIC SMALL LETTER U
0x00f4: 0x0444, # CYRILLIC SMALL LETTER EF
0x00f5: 0x0445, # CYRILLIC SMALL LETTER HA
0x00f6: 0x0446, # CYRILLIC SMALL LETTER TSE
0x00f7: 0x0447, # CYRILLIC SMALL LETTER CHE
0x00f8: 0x0448, # CYRILLIC SMALL LETTER SHA
0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
0x00fa: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
0x00fb: 0x044b, # CYRILLIC SMALL LETTER YERU
0x00fc: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
0x00fd: 0x044d, # CYRILLIC SMALL LETTER E
0x00fe: 0x044e, # CYRILLIC SMALL LETTER YU
0x00ff: 0x044f, # CYRILLIC SMALL LETTER YA
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| apache-2.0 |
htwenhe/DJOA | xadmin/plugins/topnav.py | 21 | 2601 |
from django.template import loader
from django.utils.text import capfirst
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.translation import ugettext as _
from xadmin.sites import site
from xadmin.filters import SEARCH_VAR
from xadmin.views import BaseAdminPlugin, CommAdminView
class TopNavPlugin(BaseAdminPlugin):
global_search_models = None
global_add_models = None
def get_context(self, context):
return context
# Block Views
def block_top_navbar(self, context, nodes):
search_models = []
site_name = self.admin_site.name
if self.global_search_models == None:
models = self.admin_site._registry.keys()
else:
models = self.global_search_models
for model in models:
app_label = model._meta.app_label
if self.has_model_perm(model, "view"):
info = (app_label, model._meta.model_name)
if getattr(self.admin_site._registry[model], 'search_fields', None):
try:
search_models.append({
'title': _('Search %s') % capfirst(model._meta.verbose_name_plural),
'url': reverse('xadmin:%s_%s_changelist' % info, current_app=site_name),
'model': model
})
except NoReverseMatch:
pass
return nodes.append(loader.render_to_string('xadmin/blocks/comm.top.topnav.html', {'search_models': search_models, 'search_name': SEARCH_VAR}))
def block_top_navmenu(self, context, nodes):
add_models = []
site_name = self.admin_site.name
if self.global_add_models == None:
models = self.admin_site._registry.keys()
else:
models = self.global_add_models
for model in models:
app_label = model._meta.app_label
if self.has_model_perm(model, "add"):
info = (app_label, model._meta.model_name)
try:
add_models.append({
'title': _('Add %s') % capfirst(model._meta.verbose_name),
'url': reverse('xadmin:%s_%s_add' % info, current_app=site_name),
'model': model
})
except NoReverseMatch:
pass
nodes.append(
loader.render_to_string('xadmin/blocks/comm.top.topnav.html', {'add_models': add_models}))
site.register_plugin(TopNavPlugin, CommAdminView)
| mit |
ubic135/odoo-design | addons/report_intrastat/__init__.py | 377 | 1079 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import report_intrastat
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jamiefolsom/edx-platform | common/lib/xmodule/xmodule/tests/test_delay_between_attempts.py | 115 | 12476 | """
Tests the logic of problems with a delay between attempt submissions.
Note that this test file is based off of test_capa_module.py and as
such, uses the same CapaFactory problem setup to test the functionality
of the check_problem method of a capa module when the "delay between quiz
submissions" setting is set to different values
"""
import unittest
import textwrap
import datetime
from mock import Mock
import xmodule
from xmodule.capa_module import CapaModule
from opaque_keys.edx.locations import Location
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from . import get_test_system
from pytz import UTC
class CapaFactoryWithDelay(object):
"""
Create problem modules class, specialized for delay_between_attempts
test cases. This factory seems different enough from the one in
test_capa_module that unifying them is unattractive.
Removed the unused optional arguments.
"""
sample_problem_xml = textwrap.dedent("""\
<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal places?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
""")
num = 0
@classmethod
def next_num(cls):
"""
Return the next cls number
"""
cls.num += 1
return cls.num
@classmethod
def input_key(cls, input_num=2):
"""
Return the input key to use when passing GET parameters
"""
return "input_" + cls.answer_key(input_num)
@classmethod
def answer_key(cls, input_num=2):
"""
Return the key stored in the capa problem answer dict
"""
return (
"%s_%d_1" % (
"-".join(['i4x', 'edX', 'capa_test', 'problem', 'SampleProblem%d' % cls.num]),
input_num,
)
)
@classmethod
def create(
cls,
max_attempts=None,
attempts=None,
correct=False,
last_submission_time=None,
submission_wait_seconds=None
):
"""
Optional parameters here are cut down to what we actually use vs. the regular CapaFactory.
"""
location = Location("edX", "capa_test", "run", "problem", "SampleProblem{0}".format(cls.next_num()))
field_data = {'data': cls.sample_problem_xml}
if max_attempts is not None:
field_data['max_attempts'] = max_attempts
if last_submission_time is not None:
field_data['last_submission_time'] = last_submission_time
if submission_wait_seconds is not None:
field_data['submission_wait_seconds'] = submission_wait_seconds
descriptor = Mock(weight="1")
if attempts is not None:
# converting to int here because I keep putting "0" and "1" in the tests
# since everything else is a string.
field_data['attempts'] = int(attempts)
system = get_test_system()
system.render_template = Mock(return_value="<div>Test Template HTML</div>")
module = CapaModule(
descriptor,
system,
DictFieldData(field_data),
ScopeIds(None, None, location, location),
)
if correct:
# Could set the internal state formally, but here we just jam in the score.
module.get_score = lambda: {'score': 1, 'total': 1}
else:
module.get_score = lambda: {'score': 0, 'total': 1}
return module
class XModuleQuizAttemptsDelayTest(unittest.TestCase):
"""
Class to test delay between quiz attempts.
"""
def create_and_check(self,
num_attempts=None,
last_submission_time=None,
submission_wait_seconds=None,
considered_now=None,
skip_check_problem=False):
"""Unified create and check code for the tests here."""
module = CapaFactoryWithDelay.create(
attempts=num_attempts,
max_attempts=99,
last_submission_time=last_submission_time,
submission_wait_seconds=submission_wait_seconds
)
module.done = False
get_request_dict = {CapaFactoryWithDelay.input_key(): "3.14"}
if skip_check_problem:
return (module, None)
if considered_now is not None:
result = module.check_problem(get_request_dict, considered_now)
else:
result = module.check_problem(get_request_dict)
return (module, result)
def test_first_submission(self):
# Not attempted yet
num_attempts = 0
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=None
)
# Successfully submitted and answered
# Also, the number of attempts should increment by 1
self.assertEqual(result['success'], 'correct')
self.assertEqual(module.attempts, num_attempts + 1)
def test_no_wait_time(self):
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime.now(UTC),
submission_wait_seconds=0
)
# Successfully submitted and answered
# Also, the number of attempts should increment by 1
self.assertEqual(result['success'], 'correct')
self.assertEqual(module.attempts, num_attempts + 1)
def test_submit_quiz_in_rapid_succession(self):
# Already attempted once (just now) and thus has a submitted time
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime.now(UTC),
submission_wait_seconds=123
)
# You should get a dialog that tells you to wait
# Also, the number of attempts should not be incremented
self.assertRegexpMatches(result['success'], r"You must wait at least.*")
self.assertEqual(module.attempts, num_attempts)
def test_submit_quiz_too_soon(self):
# Already attempted once (just now)
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=180,
considered_now=datetime.datetime(2013, 12, 6, 0, 18, 36, tzinfo=UTC)
)
# You should get a dialog that tells you to wait 2 minutes
# Also, the number of attempts should not be incremented
self.assertRegexpMatches(result['success'], r"You must wait at least 3 minutes between submissions. 2 minutes remaining\..*")
self.assertEqual(module.attempts, num_attempts)
def test_submit_quiz_1_second_too_soon(self):
# Already attempted once (just now)
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=180,
considered_now=datetime.datetime(2013, 12, 6, 0, 20, 35, tzinfo=UTC)
)
# You should get a dialog that tells you to wait 2 minutes
# Also, the number of attempts should not be incremented
self.assertRegexpMatches(result['success'], r"You must wait at least 3 minutes between submissions. 1 second remaining\..*")
self.assertEqual(module.attempts, num_attempts)
def test_submit_quiz_as_soon_as_allowed(self):
# Already attempted once (just now)
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=180,
considered_now=datetime.datetime(2013, 12, 6, 0, 20, 36, tzinfo=UTC)
)
# Successfully submitted and answered
# Also, the number of attempts should increment by 1
self.assertEqual(result['success'], 'correct')
self.assertEqual(module.attempts, num_attempts + 1)
def test_submit_quiz_after_delay_expired(self):
# Already attempted once (just now)
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=180,
considered_now=datetime.datetime(2013, 12, 6, 0, 24, 0, tzinfo=UTC)
)
# Successfully submitted and answered
# Also, the number of attempts should increment by 1
self.assertEqual(result['success'], 'correct')
self.assertEqual(module.attempts, num_attempts + 1)
def test_still_cannot_submit_after_max_attempts(self):
# Already attempted once (just now) and thus has a submitted time
num_attempts = 99
# Regular create_and_check should fail
with self.assertRaises(xmodule.exceptions.NotFoundError):
(module, unused_result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=180,
considered_now=datetime.datetime(2013, 12, 6, 0, 24, 0, tzinfo=UTC)
)
# Now try it without the check_problem
(module, unused_result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=180,
considered_now=datetime.datetime(2013, 12, 6, 0, 24, 0, tzinfo=UTC),
skip_check_problem=True
)
# Expect that number of attempts NOT incremented
self.assertEqual(module.attempts, num_attempts)
def test_submit_quiz_with_long_delay(self):
# Already attempted once (just now)
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=60 * 60 * 2,
considered_now=datetime.datetime(2013, 12, 6, 2, 15, 35, tzinfo=UTC)
)
# You should get a dialog that tells you to wait 2 minutes
# Also, the number of attempts should not be incremented
self.assertRegexpMatches(result['success'], r"You must wait at least 2 hours between submissions. 2 minutes 1 second remaining\..*")
self.assertEqual(module.attempts, num_attempts)
def test_submit_quiz_with_involved_pretty_print(self):
# Already attempted once (just now)
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=60 * 60 * 2 + 63,
considered_now=datetime.datetime(2013, 12, 6, 1, 15, 40, tzinfo=UTC)
)
# You should get a dialog that tells you to wait 2 minutes
# Also, the number of attempts should not be incremented
self.assertRegexpMatches(result['success'], r"You must wait at least 2 hours 1 minute 3 seconds between submissions. 1 hour 2 minutes 59 seconds remaining\..*")
self.assertEqual(module.attempts, num_attempts)
def test_submit_quiz_with_nonplural_pretty_print(self):
# Already attempted once (just now)
num_attempts = 1
(module, result) = self.create_and_check(
num_attempts=num_attempts,
last_submission_time=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC),
submission_wait_seconds=60,
considered_now=datetime.datetime(2013, 12, 6, 0, 17, 36, tzinfo=UTC)
)
# You should get a dialog that tells you to wait 2 minutes
# Also, the number of attempts should not be incremented
self.assertRegexpMatches(result['success'], r"You must wait at least 1 minute between submissions. 1 minute remaining\..*")
self.assertEqual(module.attempts, num_attempts)
| agpl-3.0 |
colinligertwood/odoo | addons/mrp/tests/test_multicompany.py | 374 | 2660 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tests import common
class TestMrpMulticompany(common.TransactionCase):
def setUp(self):
super(TestMrpMulticompany, self).setUp()
cr, uid = self.cr, self.uid
# Usefull models
self.ir_model_data = self.registry('ir.model.data')
self.res_users = self.registry('res.users')
self.stock_location = self.registry('stock.location')
group_user_id = self.registry('ir.model.data').xmlid_to_res_id(cr, uid, 'base.group_user')
group_stock_manager_id = self.registry('ir.model.data').xmlid_to_res_id(cr, uid, 'stock.group_stock_manager')
company_2_id = self.registry('ir.model.data').xmlid_to_res_id(cr, uid, 'stock.res_company_1')
self.multicompany_user_id = self.res_users.create(cr, uid,
{'name': 'multicomp', 'login': 'multicomp',
'groups_id': [(6, 0, [group_user_id, group_stock_manager_id])],
'company_id': company_2_id, 'company_ids': [(6,0,[company_2_id])]})
def test_00_multicompany_user(self):
"""check no error on getting default mrp.production values in multicompany setting"""
cr, uid, context = self.cr, self.multicompany_user_id, {}
fields = ['location_src_id', 'location_dest_id']
defaults = self.stock_location.default_get(cr, uid, ['location_id', 'location_dest_id', 'type'], context)
for field in fields:
if defaults.get(field):
try:
self.stock_location.check_access_rule(cr, uid, [defaults[field]], 'read', context)
except Exception, exc:
assert False, "unreadable location %s: %s" % (field, exc)
| agpl-3.0 |
ttglennhall/DjangoGirlsTutorial | myvenv/lib/python3.4/site-packages/pip/vcs/__init__.py | 536 | 8748 | """Handles all VCS (version control) support"""
import os
import shutil
from pip.backwardcompat import urlparse, urllib
from pip.log import logger
from pip.util import (display_path, backup_dir, find_command,
rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
# Register more schemes with urlparse for various version control systems
urlparse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urlparse, 'uses_fragment', None):
urlparse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
def __iter__(self):
return self._registry.__iter__()
@property
def backends(self):
return list(self._registry.values())
@property
def dirnames(self):
return [backend.dirname for backend in self.backends]
@property
def all_schemes(self):
schemes = []
for backend in self.backends:
schemes.extend(backend.schemes)
return schemes
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warn('Cannot register VCS %s' % cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
def unregister(self, cls=None, name=None):
if name in self._registry:
del self._registry[name]
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warn('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
Return the name of the version control backend if found at given
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
path = os.path.join(location, vc_type.dirname)
if os.path.exists(path):
return vc_type.name
return None
def get_backend(self, name):
name = name.lower()
if name in self._registry:
return self._registry[name]
def get_backend_from_location(self, location):
vc_type = self.get_backend_name(location)
if vc_type:
return self.get_backend(vc_type)
return None
vcs = VcsSupport()
class VersionControl(object):
name = ''
dirname = ''
def __init__(self, url=None, *args, **kwargs):
self.url = url
self._cmd = None
super(VersionControl, self).__init__(*args, **kwargs)
def _filter(self, line):
return (logger.INFO, line)
def _is_local_repository(self, repo):
"""
posix absolute paths start with os.path.sep,
win32 ones ones start with drive (like c:\\folder)
"""
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
@property
def cmd(self):
if self._cmd is not None:
return self._cmd
command = find_command(self.name)
logger.info('Found command %r at %r' % (self.name, command))
self._cmd = command
return command
def get_url_rev(self):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
"""
error_message = (
"Sorry, '%s' is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp")
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urlparse.urlsplit(url)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def get_info(self, location):
"""
Returns (url, revision), where both are strings
"""
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
Normalize a URL for comparison by unquoting it and removing any trailing slash.
"""
return urllib.unquote(url).rstrip('/')
def compare_urls(self, url1, url2):
"""
Compare two repo URLs for identity, ignoring incidental differences.
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def parse_vcs_bundle_file(self, content):
"""
Takes the contents of the bundled text file that explains how to revert
the stripped off version control data of the given package and returns
the URL and revision of it.
"""
raise NotImplementedError
def obtain(self, dest):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
"""
raise NotImplementedError
def switch(self, dest, url, rev_options):
"""
Switch the repo at ``dest`` to point to ``URL``.
"""
raise NotImplemented
def update(self, dest, rev_options):
"""
Update an already-existing repo to the given ``rev_options``.
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options, rev_display):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
"""
checkout = True
prompt = False
if os.path.exists(dest):
checkout = False
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.info('%s in %s exists, and has correct URL (%s)' %
(self.repo_name.title(), display_path(dest),
url))
logger.notify('Updating %s %s%s' %
(display_path(dest), self.repo_name,
rev_display))
self.update(dest, rev_options)
else:
logger.warn('%s %s in %s exists with URL %s' %
(self.name, self.repo_name,
display_path(dest), existing_url))
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warn('Directory %s already exists, '
'and is not a %s %s.' %
(dest, self.name, self.repo_name))
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warn('The plan is to install the %s repository %s' %
(self.name, url))
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.notify('Switching %s %s to %s%s' %
(self.repo_name, display_path(dest), url,
rev_display))
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warn('Deleting %s' % display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warn('Backing up %s to %s'
% (display_path(dest), dest_dir))
shutil.move(dest, dest_dir)
checkout = True
return checkout
def unpack(self, location):
if os.path.exists(location):
rmtree(location)
self.obtain(location)
def get_src_requirement(self, dist, location, find_tags=False):
raise NotImplementedError
def get_src_requirement(dist, location, find_tags):
version_control = vcs.get_backend_from_location(location)
if version_control:
return version_control().get_src_requirement(dist, location, find_tags)
logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
return dist.as_requirement()
| mit |
zstackio/zstack-woodpecker | integrationtest/vm/multihosts/volumes/paths/path30.py | 1 | 2878 | import zstackwoodpecker.test_state as ts_header
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template2",
path_list=[[TestAction.create_volume, "volume1", "=scsi"],
[TestAction.attach_volume, "vm1", "volume1"],
[TestAction.create_volume, "volume2", "=scsi"],
[TestAction.attach_volume, "vm1", "volume2"],
[TestAction.create_volume, "volume3", "=scsi"],
[TestAction.attach_volume, "vm1", "volume3"],
[TestAction.create_volume, "volume4", "=scsi"],
[TestAction.attach_volume, "vm1", "volume4"],
[TestAction.create_volume, "volume5", "=scsi"],
[TestAction.attach_volume, "vm1", "volume5"],
[TestAction.create_volume, "volume6", "=scsi"],
[TestAction.attach_volume, "vm1", "volume6"],
[TestAction.create_volume, "volume7", "=scsi"],
[TestAction.attach_volume, "vm1", "volume7"],
[TestAction.create_volume, "volume8", "=scsi"],
[TestAction.attach_volume, "vm1", "volume8"],
[TestAction.detach_volume, "volume1"],
[TestAction.detach_volume, "volume2"],
[TestAction.detach_volume, "volume3"],
[TestAction.detach_volume, "volume4"],
[TestAction.detach_volume, "volume5"],
[TestAction.detach_volume, "volume6"],
[TestAction.detach_volume, "volume7"],
[TestAction.detach_volume, "volume8"],
[TestAction.stop_vm, "vm1"],
[TestAction.ps_migrate_volume, "vm1-root"],
[TestAction.ps_migrate_volume, "volume1"],
[TestAction.attach_volume, "vm1", "volume1"],
[TestAction.clone_vm, "vm1", "vm2", "=full"],
[TestAction.create_volume_snapshot, "volume2", "snapshot1"],
[TestAction.create_volume_snapshot, "volume3", "snapshot2"],
[TestAction.delete_volume_snapshot, "snapshot1"],
[TestAction.ps_migrate_volume, "volume2"],
[TestAction.attach_volume, "vm1", "volume2"],
[TestAction.create_image_from_volume, "vm1", "image1"],
[TestAction.ps_migrate_volume, "volume3"],
[TestAction.delete_volume_snapshot, "snapshot2"],
[TestAction.reboot_vm, "vm1"]])
| apache-2.0 |
kenorb/BitTorrent | khashmir/knet.py | 2 | 2506 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# knet.py
# create a network of khashmir nodes
# usage: knet.py <num_nodes> <start_port> <ip_address>
from khashmir import Khashmir
from random import randrange
import sys, os
class Network:
def __init__(self, size=0, startport=5555, localip='127.0.0.1'):
self.num = size
self.startport = startport
self.localip = localip
def _done(self, val):
self.done = 1
def setUp(self):
self.kfiles()
self.l = []
for i in range(self.num):
self.l.append(Khashmir('', self.startport + i, '/tmp/kh%s.db' % (self.startport + i)))
reactor.iterate()
reactor.iterate()
for i in self.l:
i.addContact(self.localip, self.l[randrange(0,self.num)].port)
i.addContact(self.localip, self.l[randrange(0,self.num)].port)
i.addContact(self.localip, self.l[randrange(0,self.num)].port)
reactor.iterate()
reactor.iterate()
reactor.iterate()
for i in self.l:
self.done = 0
i.findCloseNodes(self._done)
while not self.done:
reactor.iterate()
for i in self.l:
self.done = 0
i.findCloseNodes(self._done)
while not self.done:
reactor.iterate()
def tearDown(self):
for i in self.l:
i.listenport.stopListening()
self.kfiles()
def kfiles(self):
for i in range(self.startport, self.startport+self.num):
try:
os.unlink('/tmp/kh%s.db' % i)
except:
pass
reactor.iterate()
if __name__ == "__main__":
n = Network(int(sys.argv[1]), int(sys.argv[2]), sys.argv[3])
n.setUp()
try:
reactor.run()
finally:
n.tearDown()
| gpl-3.0 |
Enlik/entropy | lib/entropy/transceivers/uri_handlers/plugins/interfaces/ssh_plugin.py | 6 | 16156 | # -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <lxnay@sabayon.org>
@contact: lxnay@sabayon.org
@copyright: Fabio Erculiani
@license: GPL-2
B{EntropyTransceiver SSH URI Handler module}.
"""
import re
import os
import errno
import time
import shutil
import codecs
from entropy.const import const_isnumber, const_debug_write, \
const_mkdtemp, const_mkstemp, etpConst
from entropy.output import brown, darkgreen, teal
from entropy.i18n import _
from entropy.transceivers.exceptions import TransceiverConnectionError
from entropy.transceivers.uri_handlers.skel import EntropyUriHandler
class EntropySshUriHandler(EntropyUriHandler):
"""
EntropyUriHandler based SSH (with pubkey) transceiver plugin.
"""
PLUGIN_API_VERSION = 4
_DEFAULT_TIMEOUT = 60
_DEFAULT_PORT = 22
_TXC_CMD = "/usr/bin/scp"
_SSH_CMD = "/usr/bin/ssh"
@staticmethod
def approve_uri(uri):
if uri.startswith("ssh://"):
return True
return False
@staticmethod
def get_uri_name(uri):
myuri = uri.split("/")[2:][0].split(":")[0]
myuri = myuri.split("@")[-1]
return myuri
@staticmethod
def hide_sensible_data(uri):
return uri
def __init__(self, uri):
EntropyUriHandler.__init__(self, uri)
self._timeout = EntropySshUriHandler._DEFAULT_TIMEOUT
import socket, subprocess, pty
self._socket, self._subprocess = socket, subprocess
self._pty = pty
self.__host = EntropySshUriHandler.get_uri_name(self._uri)
self.__user, self.__port, self.__dir = self.__extract_scp_data(
self._uri)
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def __extract_scp_data(self, uri):
no_ssh_split = uri.split("ssh://")[-1]
user = ''
if "@" in no_ssh_split:
user = no_ssh_split.split("@")[0]
port = uri.split(":")[-1]
try:
port = int(port)
except ValueError:
port = EntropySshUriHandler._DEFAULT_PORT
sdir = '~/'
proposed_sdir = no_ssh_split.split(":", 1)[-1].split(":")[0]
if proposed_sdir:
sdir = proposed_sdir
return user, port, sdir
def _parse_progress_line(self, line):
line_data = line.strip().split()
if len(line_data) < 5:
const_debug_write(__name__,
"_parse_progress_line: cannot parse: %s" % (line_data,))
# mmh... not possible to properly parse data
self.output(line.strip(), back = True)
return
const_debug_write(__name__,
"_parse_progress_line: parsing: %s" % (line_data,))
file_name = line_data[0]
percent = line_data[1]
tx_speed = line_data[3]
tx_size = line_data[2]
eta = line_data[4]
# create text
mytxt = _("Transfer status")
current_txt = "<-> (%s) %s: " % (teal(file_name), brown(mytxt),) + \
darkgreen(tx_size) + " " + \
brown("[") + str(percent) + brown("]") + \
" " + eta + " " + tx_speed
self.output(current_txt, back = True, header = " ")
def _update_progress(self, std_r):
if self._silent:
# stfu !
return
read_buf = ""
try:
char = std_r.read(1)
while char:
if (char == "\r") and read_buf:
self._parse_progress_line(read_buf)
read_buf = ""
elif (char != "\r"):
read_buf += char
char = std_r.read(1)
except IOError:
return
def _fork_cmd(self, args):
pid, fd = self._pty.fork()
if pid == 0:
proc = self._subprocess.Popen(args)
os._exit(proc.wait())
elif pid == -1:
raise TransceiverConnectionError("cannot forkpty()")
else:
dead = False
return_code = 1
std_r = os.fdopen(fd, "r")
while not dead:
try:
dead, return_code = os.waitpid(pid, os.WNOHANG)
except OSError as e:
if e.errno != errno.ECHILD:
raise
dead = True
time.sleep(0.5)
self._update_progress(std_r)
std_r.close()
return return_code
def _exec_cmd(self, args):
fd, tmp_path = const_mkstemp(prefix="entropy.transceivers.ssh_plug")
fd_err, tmp_path_err = const_mkstemp(
prefix="entropy.transceivers.ssh_plug")
try:
with os.fdopen(fd, "wb") as std_f:
with os.fdopen(fd_err, "wb") as std_f_err:
proc = self._subprocess.Popen(args, stdout = std_f,
stderr = std_f_err)
exec_rc = proc.wait()
enc = etpConst['conf_encoding']
with codecs.open(tmp_path, "r", encoding=enc) as std_f:
output = std_f.read()
with codecs.open(tmp_path_err, "r", encoding=enc) as std_f:
error = std_f.read()
finally:
os.remove(tmp_path)
os.remove(tmp_path_err)
return exec_rc, output, error
def _setup_common_args(self, remote_path):
args = []
if const_isnumber(self._timeout):
args += ["-o", "ConnectTimeout=%s" % (self._timeout,),
"-o", "ServerAliveCountMax=4", # hardcoded
"-o", "ServerAliveInterval=15"] # hardcoded
if self._speed_limit:
args += ["-l", str(self._speed_limit*8)] # scp wants kbits/sec
remote_ptr = os.path.join(self.__dir, remote_path)
remote_str = ""
if self.__user:
remote_str += self.__user + "@"
remote_str += self.__host + ":" + remote_ptr
return args, remote_str
def download(self, remote_path, save_path):
args = [EntropySshUriHandler._TXC_CMD]
c_args, remote_str = self._setup_common_args(remote_path)
tmp_save_path = save_path + EntropyUriHandler.TMP_TXC_FILE_EXT
args.extend(c_args)
args += ["-B", "-P", str(self.__port), remote_str, tmp_save_path]
down_sts = self._fork_cmd(args) == os.EX_OK
if not down_sts:
try:
os.remove(tmp_save_path)
except OSError:
return False
return False
os.rename(tmp_save_path, save_path)
return True
def download_many(self, remote_paths, save_dir):
if not remote_paths: # nothing to download
return True
def do_rmdir(path):
try:
shutil.rmtree(path, True)
except (shutil.Error, OSError, IOError,):
pass
tmp_dir = const_mkdtemp(prefix="ssh_plugin.download_many")
args = [EntropySshUriHandler._TXC_CMD]
c_args, remote_str = self._setup_common_args(remote_paths.pop())
args += c_args
args += ["-B", "-P", str(self.__port)]
args += [remote_str] + [self._setup_common_args(x)[1] for x in \
remote_paths] + [tmp_dir]
down_sts = self._fork_cmd(args) == os.EX_OK
if not down_sts:
do_rmdir(tmp_dir)
return False
# now move
for tmp_file in os.listdir(tmp_dir):
tmp_path = os.path.join(tmp_dir, tmp_file)
save_path = os.path.join(save_dir, tmp_file)
try:
os.rename(tmp_path, save_path)
except OSError:
shutil.move(tmp_path, save_path)
do_rmdir(tmp_dir)
return True
def upload(self, load_path, remote_path):
args = [EntropySshUriHandler._TXC_CMD]
tmp_remote_path = remote_path + EntropyUriHandler.TMP_TXC_FILE_EXT
c_args, remote_str = self._setup_common_args(tmp_remote_path)
args.extend(c_args)
args += ["-B", "-P", str(self.__port), load_path, remote_str]
upload_sts = self._fork_cmd(args) == os.EX_OK
if not upload_sts:
self.delete(tmp_remote_path)
return False
# atomic rename
return self.rename(tmp_remote_path, remote_path)
valid_lock_path = re.compile("^([A-Za-z0-9/\.:\-_~]+)$")
def lock(self, remote_path):
# we trust dir but not remote_path, because we do
# shell code below.
reg = EntropySshUriHandler.valid_lock_path
if not reg.match(remote_path):
raise ValueError("illegal lock path")
remote_ptr = os.path.join(self.__dir, remote_path)
remote_ptr_lock = os.path.join(
self.__dir, os.path.dirname(remote_path),
"." + os.path.basename(remote_path))
remote_ptr_lock += ".lock"
const_debug_write(__name__,
"lock(): remote_ptr: %s, lock: %s" % (
remote_ptr, remote_ptr_lock,))
args, remote_str = self._setup_fs_args()
lock_cmd = '( flock -x -n 9; if [ "${?}" != "0" ]; ' + \
'then echo -n "FAIL"; else if [ -f ' + remote_ptr + ' ]; then ' + \
'echo -n "FAIL"; else touch ' + remote_ptr + ' && ' + \
'rm ' + remote_ptr_lock + ' && echo -n "OK"; fi; fi ) 9> ' \
+ remote_ptr_lock
args += [remote_str, lock_cmd]
exec_rc, output, error = self._exec_cmd(args)
const_debug_write(__name__,
"lock(), outcome: lock: %s, rc: %s, out: %s, err: %s" % (
remote_ptr_lock, exec_rc, output, error,))
return output == "OK"
def upload_many(self, load_path_list, remote_dir):
def do_rm(path):
try:
os.remove(path)
except OSError:
pass
# first of all, copy files renaming them
tmp_file_map = {}
try:
for load_path in load_path_list:
tmp_fd, tmp_path = const_mkstemp(
suffix = EntropyUriHandler.TMP_TXC_FILE_EXT,
prefix = "._%s" % (os.path.basename(load_path),))
os.close(tmp_fd)
shutil.copy2(load_path, tmp_path)
tmp_file_map[tmp_path] = load_path
args = [EntropySshUriHandler._TXC_CMD]
c_args, remote_str = self._setup_common_args(remote_dir)
args += c_args
args += ["-B", "-P", str(self.__port)]
args += sorted(tmp_file_map.keys())
args += [remote_str]
upload_sts = self._fork_cmd(args) == os.EX_OK
if not upload_sts:
return False
# atomic rename
rename_fine = True
for tmp_path, orig_path in tmp_file_map.items():
tmp_file = os.path.basename(tmp_path)
orig_file = os.path.basename(orig_path)
tmp_remote_path = os.path.join(remote_dir, tmp_file)
remote_path = os.path.join(remote_dir, orig_file)
self.output(
"<-> %s %s %s" % (
brown(tmp_file),
teal("=>"),
darkgreen(orig_file),
),
header = " ",
back = True
)
rc = self.rename(tmp_remote_path, remote_path)
if not rc:
rename_fine = False
finally:
for path in tmp_file_map.keys():
do_rm(path)
return rename_fine
def _setup_fs_args(self):
args = [EntropySshUriHandler._SSH_CMD, "-p", str(self.__port)]
remote_str = ""
if self.__user:
remote_str += self.__user + "@"
remote_str += self.__host
return args, remote_str
def rename(self, remote_path_old, remote_path_new):
args, remote_str = self._setup_fs_args()
remote_ptr_old = os.path.join(self.__dir, remote_path_old)
remote_ptr_new = os.path.join(self.__dir, remote_path_new)
args += [remote_str, "mv", remote_ptr_old, remote_ptr_new]
return self._exec_cmd(args)[0] == os.EX_OK
def copy(self, remote_path_old, remote_path_new):
args, remote_str = self._setup_fs_args()
tmp_remote_path_new = remote_path_new + \
EntropyUriHandler.TMP_TXC_FILE_EXT
remote_ptr_old = os.path.join(self.__dir, remote_path_old)
remote_ptr_new = os.path.join(self.__dir, tmp_remote_path_new)
args += [remote_str, "cp", "-p", remote_ptr_old, remote_ptr_new]
if self._exec_cmd(args)[0] != 0:
self.delete(tmp_remote_path_new)
return False
# atomic rename
done = self.rename(tmp_remote_path_new, remote_path_new)
if not done:
self.delete(tmp_remote_path_new)
return done
def delete(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "rm", remote_ptr]
return self._exec_cmd(args)[0] == os.EX_OK
def delete_many(self, remote_paths):
remote_ptrs = []
args, remote_str = self._setup_fs_args()
for remote_path in remote_paths:
remote_ptr = os.path.join(self.__dir, remote_path)
remote_ptrs.append(remote_ptr)
args += [remote_str, "rm"] + remote_ptrs
return self._exec_cmd(args)[0] == os.EX_OK
def get_md5(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "md5sum", remote_ptr]
exec_rc, output, error = self._exec_cmd(args)
if exec_rc:
return None
return output.strip().split()[0]
def list_content(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "ls", "-1", remote_ptr]
exec_rc, output, error = self._exec_cmd(args)
if exec_rc:
return []
return [x for x in output.split("\n") if x]
def list_content_metadata(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "ls", "-1lA", remote_ptr]
exec_rc, output, error = self._exec_cmd(args)
if exec_rc:
return []
data = []
for item in output.split("\n"):
item = item.strip().split()
if len(item) < 5:
continue
perms, owner, group, size, name = item[0], item[2], item[3], \
item[4], item[-1]
data.append((name, size, owner, group, perms,))
return data
def is_dir(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "test", "-d", remote_ptr]
exec_rc, output, error = self._exec_cmd(args)
return exec_rc == os.EX_OK
def is_file(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "test", "-f", remote_ptr]
exec_rc, output, error = self._exec_cmd(args)
return exec_rc == os.EX_OK
def is_path_available(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "stat", remote_ptr]
exec_rc, output, error = self._exec_cmd(args)
return exec_rc == os.EX_OK
def makedirs(self, remote_path):
args, remote_str = self._setup_fs_args()
remote_ptr = os.path.join(self.__dir, remote_path)
args += [remote_str, "mkdir", "-p", remote_ptr]
exec_rc, output, error = self._exec_cmd(args)
return exec_rc == os.EX_OK
def keep_alive(self):
return
def close(self):
return
| gpl-2.0 |
HyperloopTeam/FullOpenMDAO | lib/python2.7/site-packages/traits-4.3.0-py2.7-macosx-10.10-x86_64.egg/traits/util/deprecated.py | 1 | 1167 | """ A decorator for marking methods/functions as deprecated. """
# Standard library imports.
import logging
# Logging.
logger = logging.getLogger(__name__)
# We only warn about each function or method once!
_cache = {}
def deprecated(message):
""" A factory for decorators for marking methods/functions as deprecated.
"""
def decorator(fn):
""" A decorator for marking methods/functions as deprecated. """
def wrapper(*args, **kw):
""" The method/function wrapper. """
global _cache
module_name = fn.__module__
function_name = fn.__name__
if (module_name, function_name) not in _cache:
logging.warn(
'DEPRECATED: %s.%s, %s' % (
module_name, function_name, message
)
)
_cache[(module_name, function_name)] = True
return fn(*args, **kw)
wrapper.__doc__ = fn.__doc__
wrapper.__name__ = fn.__name__
return wrapper
return decorator
#### EOF ######################################################################
| gpl-2.0 |
cparawhore/ProyectoSubastas | site-packages/django/contrib/admindocs/views.py | 10 | 15480 | from importlib import import_module
import inspect
import os
import re
import warnings
from django import template
from django.apps import apps
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.views.decorators import staff_member_required
from django.db import models
from django.core.exceptions import ViewDoesNotExist
from django.http import Http404
from django.core import urlresolvers
from django.contrib.admindocs import utils
from django.utils.decorators import method_decorator
from django.utils.deprecation import RemovedInDjango18Warning
from django.utils._os import upath
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
# Exclude methods starting with these strings from documentation
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
if getattr(settings, 'ADMIN_FOR', None):
warnings.warn('The ADMIN_FOR setting has been removed, you can remove '
'this setting from your configuration.', RemovedInDjango18Warning,
stacklevel=2)
class BaseAdminDocsView(TemplateView):
"""
Base view for admindocs views.
"""
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
if not utils.docutils_is_available:
# Display an error message for people without docutils
self.template_name = 'admin_doc/missing_docutils.html'
return self.render_to_response(admin.site.each_context())
return super(BaseAdminDocsView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs.update({'root_path': urlresolvers.reverse('admin:index')})
kwargs.update(admin.site.each_context())
return super(BaseAdminDocsView, self).get_context_data(**kwargs)
class BookmarkletsView(BaseAdminDocsView):
template_name = 'admin_doc/bookmarklets.html'
def get_context_data(self, **kwargs):
context = super(BookmarkletsView, self).get_context_data(**kwargs)
context.update({
'admin_url': "%s://%s%s" % (
self.request.scheme, self.request.get_host(), context['root_path'])
})
return context
class TemplateTagIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_tag_index.html'
def get_context_data(self, **kwargs):
load_all_installed_template_libraries()
tags = []
app_libs = list(six.iteritems(template.libraries))
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for tag_name, tag_func in library.tags.items():
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
if title:
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
if body:
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
if library in template.builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
tags.append({
'name': tag_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'tags': tags})
return super(TemplateTagIndexView, self).get_context_data(**kwargs)
class TemplateFilterIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_filter_index.html'
def get_context_data(self, **kwargs):
load_all_installed_template_libraries()
filters = []
app_libs = list(six.iteritems(template.libraries))
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for filter_name, filter_func in library.filters.items():
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
if title:
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
if body:
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
if library in template.builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
filters.append({
'name': filter_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'filters': filters})
return super(TemplateFilterIndexView, self).get_context_data(**kwargs)
class ViewIndexView(BaseAdminDocsView):
template_name = 'admin_doc/view_index.html'
def get_context_data(self, **kwargs):
views = []
urlconf = import_module(settings.ROOT_URLCONF)
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
for (func, regex, namespace, name) in view_functions:
views.append({
'full_name': '%s.%s' % (func.__module__, getattr(func, '__name__', func.__class__.__name__)),
'url': simplify_regex(regex),
'url_name': ':'.join((namespace or []) + (name and [name] or [])),
'namespace': ':'.join((namespace or [])),
'name': name,
})
kwargs.update({'views': views})
return super(ViewIndexView, self).get_context_data(**kwargs)
class ViewDetailView(BaseAdminDocsView):
template_name = 'admin_doc/view_detail.html'
def get_context_data(self, **kwargs):
view = self.kwargs['view']
mod, func = urlresolvers.get_mod_func(view)
try:
view_func = getattr(import_module(mod), func)
except (ImportError, AttributeError):
raise Http404
title, body, metadata = utils.parse_docstring(view_func.__doc__)
if title:
title = utils.parse_rst(title, 'view', _('view:') + view)
if body:
body = utils.parse_rst(body, 'view', _('view:') + view)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
kwargs.update({
'name': view,
'summary': title,
'body': body,
'meta': metadata,
})
return super(ViewDetailView, self).get_context_data(**kwargs)
class ModelIndexView(BaseAdminDocsView):
template_name = 'admin_doc/model_index.html'
def get_context_data(self, **kwargs):
m_list = [m._meta for m in apps.get_models()]
kwargs.update({'models': m_list})
return super(ModelIndexView, self).get_context_data(**kwargs)
class ModelDetailView(BaseAdminDocsView):
template_name = 'admin_doc/model_detail.html'
def get_context_data(self, **kwargs):
# Get the model class.
try:
app_config = apps.get_app_config(self.kwargs['app_label'])
except LookupError:
raise Http404(_("App %(app_label)r not found") % self.kwargs)
try:
model = app_config.get_model(self.kwargs['model_name'])
except LookupError:
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % self.kwargs)
opts = model._meta
# Gather fields/field descriptions.
fields = []
for field in opts.fields:
# ForeignKey is a special case since the field will actually be a
# descriptor that returns the other object
if isinstance(field, models.ForeignKey):
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = utils.parse_rst(
(_("the related `%(app_label)s.%(data_type)s` object") % {
'app_label': app_label, 'data_type': data_type,
}),
'model',
_('model:') + data_type,
)
else:
data_type = get_readable_field_data_type(field)
verbose = field.verbose_name
fields.append({
'name': field.name,
'data_type': data_type,
'verbose': verbose,
'help_text': field.help_text,
})
# Gather many-to-many fields.
for field in opts.many_to_many:
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': app_label, 'object_name': data_type}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % field.name,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
# Gather model methods.
for func_name, func in model.__dict__.items():
if (inspect.isfunction(func) and len(inspect.getargspec(func)[0]) == 1):
try:
for exclude in MODEL_METHODS_EXCLUDE:
if func_name.startswith(exclude):
raise StopIteration
except StopIteration:
continue
verbose = func.__doc__
if verbose:
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
fields.append({
'name': func_name,
'data_type': get_return_data_type(func_name),
'verbose': verbose,
})
# Gather related objects
for rel in opts.get_all_related_objects() + opts.get_all_related_many_to_many_objects():
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': rel.opts.app_label, 'object_name': rel.opts.object_name}
accessor = rel.get_accessor_name()
fields.append({
'name': "%s.all" % accessor,
'data_type': 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % accessor,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
kwargs.update({
'name': '%s.%s' % (opts.app_label, opts.object_name),
# Translators: %s is an object type name
'summary': _("Attributes on %s objects") % opts.object_name,
'description': model.__doc__,
'fields': fields,
})
return super(ModelDetailView, self).get_context_data(**kwargs)
class TemplateDetailView(BaseAdminDocsView):
template_name = 'admin_doc/template_detail.html'
def get_context_data(self, **kwargs):
template = self.kwargs['template']
templates = []
for dir in settings.TEMPLATE_DIRS:
template_file = os.path.join(dir, template)
templates.append({
'file': template_file,
'exists': os.path.exists(template_file),
'contents': lambda: open(template_file).read() if os.path.exists(template_file) else '',
'order': list(settings.TEMPLATE_DIRS).index(dir),
})
kwargs.update({
'name': template,
'templates': templates,
})
return super(TemplateDetailView, self).get_context_data(**kwargs)
####################
# Helper functions #
####################
def load_all_installed_template_libraries():
# Load/register all template tag libraries from installed apps.
for module_name in template.get_templatetags_modules():
mod = import_module(module_name)
try:
libraries = [
os.path.splitext(p)[0]
for p in os.listdir(os.path.dirname(upath(mod.__file__)))
if p.endswith('.py') and p[0].isalpha()
]
except OSError:
libraries = []
for library_name in libraries:
try:
template.get_library(library_name)
except template.InvalidTemplateLibrary:
pass
def get_return_data_type(func_name):
"""Return a somewhat-helpful data type given a function name"""
if func_name.startswith('get_'):
if func_name.endswith('_list'):
return 'List'
elif func_name.endswith('_count'):
return 'Integer'
return ''
def get_readable_field_data_type(field):
"""Returns the description for a given field type, if it exists,
Fields' descriptions can contain format strings, which will be interpolated
against the values of field.__dict__ before being output."""
return field.description % field.__dict__
def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a two-tuple: (view_func, regex)
"""
views = []
for p in urlpatterns:
if hasattr(p, 'url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(
patterns,
base + p.regex.pattern,
(namespace or []) + (p.namespace and [p.namespace] or [])
))
elif hasattr(p, 'callback'):
try:
views.append((p.callback, base + p.regex.pattern,
namespace, p.name))
except ViewDoesNotExist:
continue
else:
raise TypeError(_("%s does not appear to be a urlpattern object") % p)
return views
named_group_matcher = re.compile(r'\(\?P(<\w+>).+?\)')
non_named_group_matcher = re.compile(r'\(.*?\)')
def simplify_regex(pattern):
"""
Clean up urlpattern regexes into something somewhat readable by Mere Humans:
turns something like "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
into "<sport_slug>/athletes/<athlete_slug>/"
"""
# handle named groups first
pattern = named_group_matcher.sub(lambda m: m.group(1), pattern)
# handle non-named groups
pattern = non_named_group_matcher.sub("<var>", pattern)
# clean up any outstanding regex-y characters.
pattern = pattern.replace('^', '').replace('$', '').replace('?', '').replace('//', '/').replace('\\', '')
if not pattern.startswith('/'):
pattern = '/' + pattern
return pattern
| mit |
syci/account-financial-tools | currency_rate_date_check/company.py | 38 | 1695 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Currency rate date check module for Odoo
# Copyright (C) 2012-2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class ResCompany(models.Model):
_inherit = 'res.company'
currency_rate_max_delta = fields.Integer(
string='Max Time Delta in Days for Currency Rates', default=7,
help="This is the maximum interval in days between "
"the date associated with the amount to convert and the date "
"of the nearest currency rate available in Odoo.")
_sql_constraints = [
('currency_rate_max_delta_positive',
'CHECK (currency_rate_max_delta >= 0)',
"The value of the field 'Max Time Delta in Days for Currency Rates' "
"must be positive or 0."),
]
| agpl-3.0 |
mosesfistos1/beetbox | beetsplug/ftintitle.py | 10 | 6188 | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Verrus, <github.com/Verrus/beets-plugin-featInTitle>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Moves "featured" artists to the title from the artist field.
"""
from __future__ import division, absolute_import, print_function
import re
from beets import plugins
from beets import ui
from beets.util import displayable_path
def split_on_feat(artist):
"""Given an artist string, split the "main" artist from any artist
on the right-hand side of a string like "feat". Return the main
artist, which is always a string, and the featuring artist, which
may be a string or None if none is present.
"""
# split on the first "feat".
regex = re.compile(plugins.feat_tokens(), re.IGNORECASE)
parts = [s.strip() for s in regex.split(artist, 1)]
if len(parts) == 1:
return parts[0], None
else:
return tuple(parts)
def contains_feat(title):
"""Determine whether the title contains a "featured" marker.
"""
return bool(re.search(plugins.feat_tokens(), title, flags=re.IGNORECASE))
def find_feat_part(artist, albumartist):
"""Attempt to find featured artists in the item's artist fields and
return the results. Returns None if no featured artist found.
"""
feat_part = None
# Look for the album artist in the artist field. If it's not
# present, give up.
albumartist_split = artist.split(albumartist, 1)
if len(albumartist_split) <= 1:
return feat_part
# If the last element of the split (the right-hand side of the
# album artist) is nonempty, then it probably contains the
# featured artist.
elif albumartist_split[-1] != '':
# Extract the featured artist from the right-hand side.
_, feat_part = split_on_feat(albumartist_split[-1])
# Otherwise, if there's nothing on the right-hand side, look for a
# featuring artist on the left-hand side.
else:
lhs, rhs = split_on_feat(albumartist_split[0])
if lhs:
feat_part = lhs
return feat_part
class FtInTitlePlugin(plugins.BeetsPlugin):
def __init__(self):
super(FtInTitlePlugin, self).__init__()
self.config.add({
'auto': True,
'drop': False,
'format': u'feat. {0}',
})
self._command = ui.Subcommand(
'ftintitle',
help=u'move featured artists to the title field')
self._command.parser.add_option(
u'-d', u'--drop', dest='drop',
action='store_true', default=False,
help=u'drop featuring from artists and ignore title update')
if self.config['auto']:
self.import_stages = [self.imported]
def commands(self):
def func(lib, opts, args):
self.config.set_args(opts)
drop_feat = self.config['drop'].get(bool)
write = ui.should_write()
for item in lib.items(ui.decargs(args)):
self.ft_in_title(item, drop_feat)
item.store()
if write:
item.try_write()
self._command.func = func
return [self._command]
def imported(self, session, task):
"""Import hook for moving featuring artist automatically.
"""
drop_feat = self.config['drop'].get(bool)
for item in task.imported_items():
self.ft_in_title(item, drop_feat)
item.store()
def update_metadata(self, item, feat_part, drop_feat):
"""Choose how to add new artists to the title and set the new
metadata. Also, print out messages about any changes that are made.
If `drop_feat` is set, then do not add the artist to the title; just
remove it from the artist field.
"""
# In all cases, update the artist fields.
self._log.info(u'artist: {0} -> {1}', item.artist, item.albumartist)
item.artist = item.albumartist
if item.artist_sort:
# Just strip the featured artist from the sort name.
item.artist_sort, _ = split_on_feat(item.artist_sort)
# Only update the title if it does not already contain a featured
# artist and if we do not drop featuring information.
if not drop_feat and not contains_feat(item.title):
feat_format = self.config['format'].as_str()
new_format = feat_format.format(feat_part)
new_title = u"{0} {1}".format(item.title, new_format)
self._log.info(u'title: {0} -> {1}', item.title, new_title)
item.title = new_title
def ft_in_title(self, item, drop_feat):
"""Look for featured artists in the item's artist fields and move
them to the title.
"""
artist = item.artist.strip()
albumartist = item.albumartist.strip()
# Check whether there is a featured artist on this track and the
# artist field does not exactly match the album artist field. In
# that case, we attempt to move the featured artist to the title.
_, featured = split_on_feat(artist)
if featured and albumartist != artist and albumartist:
self._log.info('{}', displayable_path(item.path))
feat_part = None
# Attempt to find the featured artist.
feat_part = find_feat_part(artist, albumartist)
# If we have a featuring artist, move it to the title.
if feat_part:
self.update_metadata(item, feat_part, drop_feat)
else:
self._log.info(u'no featuring artists found')
| mit |
msingh172/youtube-dl | youtube_dl/extractor/rbmaradio.py | 145 | 1887 | # encoding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
)
class RBMARadioIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?rbmaradio\.com/shows/(?P<videoID>[^/]+)$'
_TEST = {
'url': 'http://www.rbmaradio.com/shows/ford-lopatin-live-at-primavera-sound-2011',
'md5': '6bc6f9bcb18994b4c983bc3bf4384d95',
'info_dict': {
'id': 'ford-lopatin-live-at-primavera-sound-2011',
'ext': 'mp3',
"uploader_id": "ford-lopatin",
"location": "Spain",
"description": "Joel Ford and Daniel ’Oneohtrix Point Never’ Lopatin fly their midified pop extravaganza to Spain. Live at Primavera Sound 2011.",
"uploader": "Ford & Lopatin",
"title": "Live at Primavera Sound 2011",
},
}
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
video_id = m.group('videoID')
webpage = self._download_webpage(url, video_id)
json_data = self._search_regex(r'window\.gon.*?gon\.show=(.+?);$',
webpage, 'json data', flags=re.MULTILINE)
try:
data = json.loads(json_data)
except ValueError as e:
raise ExtractorError('Invalid JSON: ' + str(e))
video_url = data['akamai_url'] + '&cbr=256'
return {
'id': video_id,
'url': video_url,
'title': data['title'],
'description': data.get('teaser_text'),
'location': data.get('country_of_origin'),
'uploader': data.get('host', {}).get('name'),
'uploader_id': data.get('host', {}).get('slug'),
'thumbnail': data.get('image', {}).get('large_url_2x'),
'duration': data.get('duration'),
}
| unlicense |
Julian/TomsBestFriend | tomsbestfriend.py | 1 | 4221 | import datetime
from parsley import makeGrammar as make_grammar, _GrammarWrapper
__version__ = "0.1.0"
class Duplicated(Exception):
"""
A key group was duplicated, or contained a duplicate value.
"""
@classmethod
def in_group(cls, key, key_group=None):
group_str = "the document" if key_group is None else repr(key_group)
return cls("%r already appears in %s." % (key, group_str))
toml_grammar = r"""
document = key_group*:groups -> document(groups)
key_group = (header_line:header value_line*:values | (-> []):header value_line+:values) ignore -> header, values
header_line = ignore '[' key_name:name ']' line_end -> name
key_name = key_segment:first ('.' key_segment)*:rest -> [first] + rest
key_segment = <(~('[' | ']' | '.') anything)+>
value_line = ~header_line ignore name:k ws '=' ws value:v line_end -> (k, v)
name = <(~(space | '=' | nl) anything)+>
value = string | datetime | float | integer | boolean | array
array = '[' ignore elements:members ignore ']' -> self.array(members)
elements = (value:first (ignore ',' ignore value)*:rest ','? -> [first] + rest) | -> []
string = '"' (escape_char | ~('"' | '\\') anything)*:c '"' -> ''.join(c).decode("utf-8")
escape_char = '\\' (('0' -> '\0')
|('b' -> '\b')
|('t' -> '\t')
|('n' -> '\n')
|('f' -> '\f')
|('r' -> '\r')
|('"' -> '"')
|('\\' -> '\\')
|('/' -> '/')
|escape_unichar)
escape_unichar = 'u' <hexdigit{4}>:hs -> unichr(int(hs, 16)).encode("utf-8")
integer = ('-' | -> ''):sign digit1_9:first <digit*>:rest -> int(sign + first + rest)
float = integer:whole '.' <digit+>:frac -> float(str(whole) + "." + frac)
boolean = ('true' -> True) | ('false' -> False)
datetime = (digit1_9:first digit{3}:rest -> "".join([first] + rest)):year '-'
digit{2}:month '-'
digit{2}:day 'T'
digit{2}:hour ':'
digit{2}:minute ':'
digit{2}:second
(('.' digit+) | -> 0):microsecond
'Z' -> datetime(
year=int("".join(year)),
month=int("".join(month)),
day=int("".join(day)),
hour=int("".join(hour)),
minute=int("".join(minute)),
second=int("".join(second)),
)
line_end = ws comment? nl
ignore = (comment | space | nl)*
comment = '#' (~'\n' anything)*
ws = space*
space = ' ' | '\t'
nl = '\r\n' | '\r' | '\n'
digit1_9 = :x ?(x in '123456789') -> x
hexdigit = :x ?(x in '0123456789abcdefABCDEF') -> x
"""
def document(groups):
doc = dict()
for header, values in sorted(groups):
key_group, subgroup = doc, None
if header:
path, key = header[:-1], header[-1]
for subgroup in path:
key_group = key_group.setdefault(subgroup, {})
if key in key_group:
raise Duplicated.in_group(key, subgroup)
key_group[key] = key_group = {}
for key, value in values:
if key in key_group:
raise Duplicated.in_group(key, subgroup)
key_group[key] = value
return doc
_TOMLParser = make_grammar(
toml_grammar,
bindings={"document" : document, "datetime" : datetime.datetime},
name="TOMLParser",
unwrap=True,
)
class TOMLParser(_TOMLParser):
"""
A TOML parser.
"""
def __init__(self, toml, homogeneous_arrays=True):
"""
Initialize me.
:argument str toml: some TOML
:argument bool homogeneous_arrays: enfore homogeneity of array members
"""
super(TOMLParser, self).__init__(toml)
self.homogeneous_arrays = homogeneous_arrays
def array(self, members):
if self.homogeneous_arrays and len(set(type(e) for e in members)) > 1:
raise TypeError("%r is not homogeneous." % (members,))
return members
def loads(toml, **kwargs):
"""
Load some ``TOML`` from a string.
:argument kwargs: passed along to :class:`TOMLParser`
"""
return _GrammarWrapper(TOMLParser(toml, **kwargs), toml).document()
| mit |
TheMOOCAgency/edx-platform | openedx/core/lib/block_structure/transformer_registry.py | 35 | 1706 | """
Block Structure Transformer Registry implemented using the platform's
PluginManager.
"""
from openedx.core.lib.api.plugins import PluginManager
class TransformerRegistry(PluginManager):
"""
Registry for all of the block structure transformers that have been
made available.
All block structure transformers should implement
`BlockStructureTransformer`.
"""
NAMESPACE = 'openedx.block_structure_transformer'
USE_PLUGIN_MANAGER = True
@classmethod
def get_registered_transformers(cls):
"""
Returns a set of all registered transformers.
Returns:
{BlockStructureTransformer} - All transformers that are
registered with the platform's PluginManager.
"""
if cls.USE_PLUGIN_MANAGER:
return set(cls.get_available_plugins().itervalues())
else:
return set()
@classmethod
def find_unregistered(cls, transformers):
"""
Find and returns the names of all the transformers from the
given list that aren't registered with the platform's
PluginManager.
Arguments:
transformers ([BlockStructureTransformer] - List of
transformers to check in the registry.
Returns:
set([string]) - Set of names of a subset of the given
transformers that weren't found in the registry.
"""
registered_transformer_names = set(reg_trans.name() for reg_trans in cls.get_registered_transformers())
requested_transformer_names = set(transformer.name() for transformer in transformers)
return requested_transformer_names - registered_transformer_names
| agpl-3.0 |
helldorado/ansible | lib/ansible/module_utils/facts/network/hpux.py | 232 | 3015 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.network.base import Network, NetworkCollector
class HPUXNetwork(Network):
"""
HP-UX-specifig subclass of Network. Defines networking facts:
- default_interface
- interfaces (a list of interface names)
- interface_<name> dictionary of ipv4 address information.
"""
platform = 'HP-UX'
def populate(self, collected_facts=None):
network_facts = {}
netstat_path = self.module.get_bin_path('netstat')
if netstat_path is None:
return network_facts
default_interfaces_facts = self.get_default_interfaces()
network_facts.update(default_interfaces_facts)
interfaces = self.get_interfaces_info()
network_facts['interfaces'] = interfaces.keys()
for iface in interfaces:
network_facts[iface] = interfaces[iface]
return network_facts
def get_default_interfaces(self):
default_interfaces = {}
rc, out, err = self.module.run_command("/usr/bin/netstat -nr")
lines = out.splitlines()
for line in lines:
words = line.split()
if len(words) > 1:
if words[0] == 'default':
default_interfaces['default_interface'] = words[4]
default_interfaces['default_gateway'] = words[1]
return default_interfaces
def get_interfaces_info(self):
interfaces = {}
rc, out, err = self.module.run_command("/usr/bin/netstat -ni")
lines = out.splitlines()
for line in lines:
words = line.split()
for i in range(len(words) - 1):
if words[i][:3] == 'lan':
device = words[i]
interfaces[device] = {'device': device}
address = words[i + 3]
interfaces[device]['ipv4'] = {'address': address}
network = words[i + 2]
interfaces[device]['ipv4'] = {'network': network,
'interface': device,
'address': address}
return interfaces
class HPUXNetworkCollector(NetworkCollector):
_fact_class = HPUXNetwork
_platform = 'HP-UX'
| gpl-3.0 |
JiminHong/mine | node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py | 1509 | 17165 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 and later, non-Express have a x64-x86 cross that we want
# to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
import _winreg
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
return _winreg.QueryValueEx(hkey, value)[0]
except WindowsError:
return None
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
project_version='14.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
if version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if not allow_fallback:
raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
| mit |
blooparksystems/odoo | addons/website_sale/controllers/website_mail.py | 3 | 2273 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import urlparse
from openerp import SUPERUSER_ID
from openerp import http
from openerp.addons.web.http import request
from openerp.addons.website_mail.controllers.main import WebsiteMail
class WebsiteMailController(WebsiteMail):
@http.route(['/website_mail/post/json'], type='json', auth='public', website=True)
def chatter_json(self, res_model='', res_id=None, message='', **kw):
params = kw.copy()
params.pop('rating', False)
message_data = super(WebsiteMailController, self).chatter_json(res_model=res_model, res_id=res_id, message=message, **params)
if message_data and kw.get('rating') and res_model == 'product.template': # restrict rating only for product template
rating = request.env['rating.rating'].create({
'rating': float(kw.get('rating')),
'res_model': res_model,
'res_id': res_id,
'message_id': message_data['id'],
})
message_data.update({
'rating_default_value': rating.rating,
'rating_disabled': True,
})
return message_data
@http.route(['/website_mail/post/post'], type='http', method=['POST'], auth='public', website=True)
def chatter_post(self, res_model='', res_id=None, message='', redirect=None, **kw):
params = kw.copy()
params.pop('rating')
response = super(WebsiteMailController, self).chatter_post(res_model=res_model, res_id=res_id, message=message, redirect=redirect, **params)
if kw.get('rating') and res_model == 'product.template': # restrict rating only for product template
try:
fragment = urlparse.urlparse(response.location).fragment
message_id = int(fragment.replace('message-', ''))
rating = request.env['rating.rating'].create({
'rating': float(kw.get('rating')),
'res_model': res_model,
'res_id': res_id,
'message_id': message_id,
})
print rating
except Exception:
pass
return response
| gpl-3.0 |
KyoungRan/Django_React_ex | Django_React_Workshop-mbrochh/django/myvenv/lib/python3.4/site-packages/django/contrib/flatpages/templatetags/flatpages.py | 117 | 3630 | from django import template
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.shortcuts import get_current_site
register = template.Library()
class FlatpageNode(template.Node):
def __init__(self, context_name, starts_with=None, user=None):
self.context_name = context_name
if starts_with:
self.starts_with = template.Variable(starts_with)
else:
self.starts_with = None
if user:
self.user = template.Variable(user)
else:
self.user = None
def render(self, context):
if 'request' in context:
site_pk = get_current_site(context['request']).pk
else:
site_pk = settings.SITE_ID
flatpages = FlatPage.objects.filter(sites__id=site_pk)
# If a prefix was specified, add a filter
if self.starts_with:
flatpages = flatpages.filter(
url__startswith=self.starts_with.resolve(context))
# If the provided user is not authenticated, or no user
# was provided, filter the list to only public flatpages.
if self.user:
user = self.user.resolve(context)
if not user.is_authenticated:
flatpages = flatpages.filter(registration_required=False)
else:
flatpages = flatpages.filter(registration_required=False)
context[self.context_name] = flatpages
return ''
@register.tag
def get_flatpages(parser, token):
"""
Retrieves all flatpage objects available for the current site and
visible to the specific user (or visible to all users if no user is
specified). Populates the template context with them in a variable
whose name is defined by the ``as`` clause.
An optional ``for`` clause can be used to control the user whose
permissions are to be used in determining which flatpages are visible.
An optional argument, ``starts_with``, can be applied to limit the
returned flatpages to those beginning with a particular base URL.
This argument can be passed as a variable or a string, as it resolves
from the template context.
Syntax::
{% get_flatpages ['url_starts_with'] [for user] as context_name %}
Example usage::
{% get_flatpages as flatpages %}
{% get_flatpages for someuser as flatpages %}
{% get_flatpages '/about/' as about_pages %}
{% get_flatpages prefix as about_pages %}
{% get_flatpages '/about/' for someuser as about_pages %}
"""
bits = token.split_contents()
syntax_message = ("%(tag_name)s expects a syntax of %(tag_name)s "
"['url_starts_with'] [for user] as context_name" %
dict(tag_name=bits[0]))
# Must have at 3-6 bits in the tag
if len(bits) >= 3 and len(bits) <= 6:
# If there's an even number of bits, there's no prefix
if len(bits) % 2 == 0:
prefix = bits[1]
else:
prefix = None
# The very last bit must be the context name
if bits[-2] != 'as':
raise template.TemplateSyntaxError(syntax_message)
context_name = bits[-1]
# If there are 5 or 6 bits, there is a user defined
if len(bits) >= 5:
if bits[-4] != 'for':
raise template.TemplateSyntaxError(syntax_message)
user = bits[-3]
else:
user = None
return FlatpageNode(context_name, starts_with=prefix, user=user)
else:
raise template.TemplateSyntaxError(syntax_message)
| mit |
gyfora/flink | flink-python/pyflink/dataset/execution_environment.py | 4 | 8178 | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.common.execution_config import ExecutionConfig
from pyflink.common.restart_strategy import RestartStrategies
from pyflink.java_gateway import get_gateway
from pyflink.util.utils import load_java_class
class ExecutionEnvironment(object):
"""
The ExecutionEnvironment is the context in which a program is executed.
The environment provides methods to control the job execution (such as setting the parallelism)
and to interact with the outside world (data access).
"""
def __init__(self, j_execution_environment):
self._j_execution_environment = j_execution_environment
def get_parallelism(self):
"""
Gets the parallelism with which operation are executed by default.
:return: The parallelism.
"""
return self._j_execution_environment.getParallelism()
def set_parallelism(self, parallelism):
"""
Sets the parallelism for operations executed through this environment.
Setting a parallelism of x here will cause all operators to run with
x parallel instances.
:param parallelism: The parallelism.
"""
self._j_execution_environment.setParallelism(parallelism)
def get_default_local_parallelism(self):
"""
Gets the default parallelism that will be used for the local execution environment.
:return: The parallelism.
"""
return self._j_execution_environment.getDefaultLocalParallelism()
def set_default_local_parallelism(self, parallelism):
"""
Sets the default parallelism that will be used for the local execution environment.
:param parallelism: The parallelism.
"""
self._j_execution_environment.setDefaultLocalParallelism(parallelism)
def get_config(self):
"""
Gets the config object that defines execution parameters.
:return: An :class:`ExecutionConfig` object, the environment's execution configuration.
"""
return ExecutionConfig(self._j_execution_environment.getConfig())
def set_restart_strategy(self, restart_strategy_configuration):
"""
Sets the restart strategy configuration. The configuration specifies which restart strategy
will be used for the execution graph in case of a restart.
Example:
::
>>> env.set_restart_strategy(RestartStrategies.no_restart())
:param restart_strategy_configuration: Restart strategy configuration to be set.
"""
self._j_execution_environment.setRestartStrategy(
restart_strategy_configuration._j_restart_strategy_configuration)
def get_restart_strategy(self):
"""
Returns the specified restart strategy configuration.
:return: The restart strategy configuration to be used.
"""
return RestartStrategies._from_j_restart_strategy(
self._j_execution_environment.getRestartStrategy())
def add_default_kryo_serializer(self, type_class_name, serializer_class_name):
"""
Adds a new Kryo default serializer to the Runtime.
Example:
::
>>> env.add_default_kryo_serializer("com.aaa.bbb.TypeClass", "com.aaa.bbb.Serializer")
:param type_class_name: The full-qualified java class name of the types serialized with the
given serializer.
:param serializer_class_name: The full-qualified java class name of the serializer to use.
"""
type_clz = load_java_class(type_class_name)
j_serializer_clz = load_java_class(serializer_class_name)
self._j_execution_environment.addDefaultKryoSerializer(type_clz, j_serializer_clz)
def register_type_with_kryo_serializer(self, type_class_name, serializer_class_name):
"""
Registers the given Serializer via its class as a serializer for the given type at the
KryoSerializer.
Example:
::
>>> env.register_type_with_kryo_serializer("com.aaa.bbb.TypeClass",
... "com.aaa.bbb.Serializer")
:param type_class_name: The full-qualified java class name of the types serialized with
the given serializer.
:param serializer_class_name: The full-qualified java class name of the serializer to use.
"""
type_clz = load_java_class(type_class_name)
j_serializer_clz = load_java_class(serializer_class_name)
self._j_execution_environment.registerTypeWithKryoSerializer(type_clz, j_serializer_clz)
def register_type(self, type_class_name):
"""
Registers the given type with the serialization stack. If the type is eventually
serialized as a POJO, then the type is registered with the POJO serializer. If the
type ends up being serialized with Kryo, then it will be registered at Kryo to make
sure that only tags are written.
Example:
::
>>> env.register_type("com.aaa.bbb.TypeClass")
:param type_class_name: The full-qualified java class name of the type to register.
"""
type_clz = load_java_class(type_class_name)
self._j_execution_environment.registerType(type_clz)
def execute(self, job_name=None):
"""
Triggers the program execution. The environment will execute all parts of the program that
have resulted in a "sink" operation.
The program execution will be logged and displayed with the given job name.
:param job_name: Desired name of the job, optional.
"""
if job_name is None:
self._j_execution_environment.execute()
else:
self._j_execution_environment.execute(job_name)
def get_execution_plan(self):
"""
Creates the plan with which the system will execute the program, and returns it as
a String using a JSON representation of the execution data flow graph.
Note that this needs to be called, before the plan is executed.
If the compiler could not be instantiated, or the master could not
be contacted to retrieve information relevant to the execution planning,
an exception will be thrown.
:return: The execution plan of the program, as a JSON String.
"""
return self._j_execution_environment.getExecutionPlan()
@staticmethod
def get_execution_environment():
"""
Creates an execution environment that represents the context in which the program is
currently executed. If the program is invoked standalone, this method returns a local
execution environment. If the program is invoked from within the command line client to be
submitted to a cluster, this method returns the execution environment of this cluster.
:return: The :class:`ExecutionEnvironment` of the context in which the program is executed.
"""
gateway = get_gateway()
j_execution_environment = gateway.jvm.org.apache.flink.api.java.ExecutionEnvironment\
.getExecutionEnvironment()
return ExecutionEnvironment(j_execution_environment)
| apache-2.0 |
bilgili/nest-simulator | pynest/nest/tests/test_connect_fixed_outdegree.py | 2 | 5394 | # -*- coding: utf-8 -*-
#
# test_connect_fixed_outdegree.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import unittest
import scipy.stats
import nest
from . import test_connect_helpers as hf
from .test_connect_parameters import TestParams
class TestFixedOutDegree(TestParams):
# specify connection pattern and specific params
rule = 'fixed_outdegree'
conn_dict = {'rule': rule}
# sizes of source-, target-population and outdegree for connection test
N1 = 50
N2 = 70
Nout = 10
conn_dict['outdegree'] = Nout
# sizes of source-, target-population and outdegree for statistical test
N_s = 10
N_t = 10
C = 10
# Critical values and number of iterations of two level test
stat_dict = {'alpha2': 0.05, 'n_runs': 100}
# tested on each mpi process separately
def testErrorMessages(self):
got_error = False
conn_params = self.conn_dict.copy()
conn_params['autapses'] = True
conn_params['multapses'] = False
conn_params['outdegree'] = self.N2 + 1
try:
self.setUpNetwork(conn_params)
except:
got_error = True
self.assertTrue(got_error)
def testOutDegree(self):
conn_params = self.conn_dict.copy()
conn_params['autapses'] = False
conn_params['multapses'] = False
self.setUpNetwork(conn_params)
# make sure the outdegree is right
M = hf.get_connectivity_matrix(self.pop1, self.pop2)
outds = np.sum(M,axis=0)
self.assertTrue(hf.mpi_assert(outds, self.Nout*np.ones(self.N1)))
# make sure no connections were drawn from the target to the source population
M = hf.get_connectivity_matrix(self.pop2, self.pop1)
M_none = np.zeros((len(self.pop1),len(self.pop2)))
self.assertTrue(hf.mpi_assert(M, M_none))
def testStatistics(self):
conn_params = self.conn_dict.copy()
conn_params['autapses'] = True
conn_params['multapses'] = True
conn_params['outdegree'] = self.C
expected = hf.get_expected_degrees_fixedDegrees(self.C, 'out', self.N_s, self.N_t)
pvalues = []
for i in range(self.stat_dict['n_runs']):
hf.reset_seed(i, self.nr_threads)
self.setUpNetwork(conn_dict=conn_params,N1=self.N_s,N2=self.N_t)
degrees = hf.get_degrees('in', self.pop1, self.pop2)
degrees = hf.gather_data(degrees)
if degrees is not None:
chi, p = hf.chi_squared_check(degrees, expected)
pvalues.append(p)
hf.mpi_barrier()
if degrees is not None:
ks, p = scipy.stats.kstest(pvalues, 'uniform')
self.assertTrue( p > self.stat_dict['alpha2'] )
def testAutapses(self):
conn_params = self.conn_dict.copy()
N = 10
conn_params['multapses'] = False
# test that autapses exist
conn_params['outdegree'] = N
conn_params['autapses'] = True
pop = nest.Create('iaf_neuron', N)
nest.Connect(pop, pop, conn_params)
# make sure all connections do exist
M = hf.get_connectivity_matrix(pop, pop)
self.assertTrue(hf.mpi_assert(M, np.ones(N), 'diagonal'))
nest.ResetKernel()
# test that autapses were excluded
conn_params['outdegree'] = N-1
conn_params['autapses'] = False
pop = nest.Create('iaf_neuron', N)
nest.Connect(pop, pop, conn_params)
# make sure all connections do exist
M = hf.get_connectivity_matrix(pop, pop)
self.assertTrue(hf.mpi_assert(M, np.zeros(N), 'diagonal'))
def testMultapses(self):
conn_params = self.conn_dict.copy()
N = 3
conn_params['autapses'] = True
# test that multapses were drawn
conn_params['outdegree'] = N+1
conn_params['multapses'] = True
pop = nest.Create('iaf_neuron', N)
nest.Connect(pop, pop, conn_params)
nr_conns = len(nest.GetConnections(pop,pop))
self.assertTrue(hf.mpi_assert(nr_conns, conn_params['outdegree']*N))
nest.ResetKernel()
# test that no multapses exist
conn_params['outdegree'] = N
conn_params['multapses'] = False
pop = nest.Create('iaf_neuron', N)
nest.Connect(pop, pop, conn_params)
M = hf.get_connectivity_matrix(pop, pop)
M = hf.gather_data(M)
if M is not None:
self.assertTrue(M.flatten, np.ones(N*N))
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(TestFixedOutDegree)
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == '__main__':
run()
| gpl-2.0 |
hoangt/tpzsimul.gem5 | util/batch/job.py | 77 | 7365 | #!/usr/bin/env python
# Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Kevin Lim
import os, os.path, shutil, signal, socket, sys
from os import environ as env
from os.path import join as joinpath, expanduser
def date():
import time
return time.strftime('%a %b %e %H:%M:%S %Z %Y', time.localtime())
def cleandir(dir):
for root, dirs, files in os.walk(dir, False):
for name in files:
os.remove(joinpath(root, name))
for name in dirs:
os.rmdir(joinpath(root, name))
class rsync:
def __init__(self):
self.sudo = False
self.rsync = 'rsync'
self.compress = False
self.archive = True
self.delete = False
self.options = ''
def do(self, src, dst):
args = []
if self.sudo:
args.append('sudo')
args.append(self.rsync)
if (self.archive):
args.append('-a')
if (self.compress):
args.append('-z')
if (self.delete):
args.append('--delete')
if len(self.options):
args.append(self.options)
args.append(src)
args.append(dst)
return os.spawnvp(os.P_WAIT, args[0], args)
class JobDir(object):
def __init__(self, dir):
self.dir = dir
def file(self, filename):
return joinpath(self.dir, filename)
def create(self):
if os.path.exists(self.dir):
if not os.path.isdir(self.dir):
sys.exit('%s is not a directory. Cannot build job' % self.dir)
else:
os.mkdir(self.dir)
def exists(self):
return os.path.isdir(self.dir)
def clean(self):
cleandir(self.dir)
def hasfile(self, filename):
return os.path.isfile(self.file(filename))
def echofile(self, filename, string):
filename = self.file(filename)
try:
f = file(filename, 'w')
print >>f, string
f.flush()
f.close()
except IOError,e:
sys.exit(e)
def rmfile(self, filename):
filename = self.file(filename)
if os.path.isfile(filename):
os.unlink(filename)
def readval(self, filename):
filename = self.file(filename)
f = file(filename, 'r')
value = f.readline().strip()
f.close()
return value
def setstatus(self, string):
filename = self.file('.status')
try:
f = file(filename, 'a')
print >>f, string
f.flush()
f.close()
except IOError,e:
sys.exit(e)
def getstatus(self):
filename = self.file('.status')
try:
f = file(filename, 'r')
except IOError, e:
return 'none'
# fast forward to the end
for line in f: pass
# the first word on the last line is the status
return line.split(' ')[0]
def __str__(self):
return self.dir
if __name__ == '__main__':
import platform
binaries = { 'i686' : 'm5.i386',
'x86_64' : 'm5.amd64' }
binary = binaries[platform.machine()]
cwd = os.getcwd()
rootdir = env.setdefault('ROOTDIR', os.path.dirname(cwd))
oar_jobid = int(env['OAR_JOBID'])
oar_jobname = os.path.basename(cwd)
#pbs_jobname = env['PBS_JOBNAME']
basedir = joinpath(rootdir, 'Base')
jobname = env.setdefault('JOBNAME', oar_jobname)
jobfile = env.setdefault('JOBFILE', joinpath(rootdir, 'Test.py'))
outdir = env.setdefault('OUTPUT_DIR', cwd)
env['POOLJOB'] = 'True'
if os.path.isdir("/work"):
workbase = "/work"
else:
workbase = "/tmp/"
workdir = joinpath(workbase, '%s.%s' % (env['USER'], oar_jobid))
host = socket.gethostname()
os.umask(0022)
jobdir = JobDir(outdir)
started = date()
jobdir.echofile('.running', started)
jobdir.rmfile('.queued')
jobdir.echofile('.host', host)
jobdir.setstatus('running on %s on %s' % (host, started))
if os.path.isdir(workdir):
cleandir(workdir)
else:
os.mkdir(workdir)
if False and os.path.isdir('/z/dist'):
sync = rsync()
sync.delete = True
sync.sudo = True
sync.do('poolfs::dist/m5/', '/z/dist/m5/')
try:
os.chdir(workdir)
except OSError,e:
sys.exit(e)
os.symlink(jobdir.file('output'), 'status.out')
args = [ joinpath(basedir, binary), joinpath(basedir, 'run.py') ]
if not len(args):
sys.exit("no arguments")
print 'starting job... %s' % started
print ' '.join(args)
print
sys.stdout.flush()
childpid = os.fork()
if not childpid:
# Execute command
sys.stdin.close()
fd = os.open(jobdir.file("output"),
os.O_WRONLY | os.O_CREAT | os.O_TRUNC)
os.dup2(fd, sys.stdout.fileno())
os.dup2(fd, sys.stderr.fileno())
os.execvp(args[0], args)
def handler(signum, frame):
if childpid != 0:
os.kill(childpid, signum)
signal.signal(signal.SIGHUP, handler)
signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGQUIT, handler)
signal.signal(signal.SIGTERM, handler)
signal.signal(signal.SIGCONT, handler)
signal.signal(signal.SIGUSR1, handler)
signal.signal(signal.SIGUSR2, handler)
done = 0
while not done:
try:
thepid,ec = os.waitpid(childpid, 0)
if ec:
print 'Exit code ', ec
status = 'failure'
else:
status = 'success'
done = 1
except OSError:
pass
complete = date()
print '\njob complete... %s' % complete
jobdir.echofile('.%s' % status, complete)
jobdir.rmfile('.running')
jobdir.setstatus('%s on %s' % (status, complete))
| bsd-3-clause |
tareqalayan/ansible | lib/ansible/modules/network/f5/bigip_device_ntp.py | 8 | 7943 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_device_ntp
short_description: Manage NTP servers on a BIG-IP
description:
- Manage NTP servers on a BIG-IP.
version_added: 2.2
options:
ntp_servers:
description:
- A list of NTP servers to set on the device. At least one of C(ntp_servers)
or C(timezone) is required.
state:
description:
- The state of the NTP servers on the system. When C(present), guarantees
that the NTP servers are set on the system. When C(absent), removes the
specified NTP servers from the device configuration.
default: present
choices:
- absent
- present
timezone:
description:
- The timezone to set for NTP lookups. At least one of C(ntp_servers) or
C(timezone) is required.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Set NTP server
bigip_device_ntp:
ntp_servers:
- 192.0.2.23
password: secret
server: lb.mydomain.com
user: admin
validate_certs: no
delegate_to: localhost
- name: Set timezone
bigip_device_ntp:
password: secret
server: lb.mydomain.com
timezone: America/Los_Angeles
user: admin
validate_certs: no
delegate_to: localhost
'''
RETURN = r'''
ntp_servers:
description: The NTP servers that were set on the device
returned: changed
type: list
sample: ["192.0.2.23", "192.0.2.42"]
timezone:
description: The timezone that was set on the device
returned: changed
type: string
sample: true
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {
'servers': 'ntp_servers'
}
api_attributes = [
'servers', 'timezone',
]
updatables = [
'ntp_servers', 'timezone'
]
returnables = [
'ntp_servers', 'timezone'
]
absentables = [
'ntp_servers'
]
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Parameters()
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if changed:
self.changes = Parameters(params=changed)
return True
return False
def _absent_changed_options(self):
changed = {}
for key in Parameters.absentables:
if getattr(self.want, key) is not None:
set_want = set(getattr(self.want, key))
set_have = set(getattr(self.have, key))
if set_want != set_have:
changed[key] = list(set_want)
if changed:
self.changes = Parameters(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.update()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def should_absent(self):
result = self._absent_changed_options()
if result:
return True
return False
def absent(self):
self.have = self.read_current_from_device()
if not self.should_absent():
return False
if self.module.check_mode:
return True
self.absent_on_device()
return True
def update_on_device(self):
params = self.want.api_params()
resource = self.client.api.tm.sys.ntp.load()
resource.update(**params)
def read_current_from_device(self):
resource = self.client.api.tm.sys.ntp.load()
result = resource.attrs
return Parameters(params=result)
def absent_on_device(self):
params = self.changes.api_params()
resource = self.client.api.tm.sys.ntp.load()
resource.update(**params)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
ntp_servers=dict(
type='list',
),
timezone=dict(),
state=dict(
default='present',
choices=['present', 'absent']
),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_one_of = [
['ntp_servers', 'timezone']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_one_of=spec.required_one_of
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
mverzett/rootpy | rootpy/extern/lockfile/sqlitelockfile.py | 487 | 5540 | from __future__ import absolute_import, division
import time
import os
try:
unicode
except NameError:
unicode = str
from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked
class SQLiteLockFile(LockBase):
"Demonstrate SQL-based locking."
testdb = None
def __init__(self, path, threaded=True, timeout=None):
"""
>>> lock = SQLiteLockFile('somefile')
>>> lock = SQLiteLockFile('somefile', threaded=False)
"""
LockBase.__init__(self, path, threaded, timeout)
self.lock_file = unicode(self.lock_file)
self.unique_name = unicode(self.unique_name)
if SQLiteLockFile.testdb is None:
import tempfile
_fd, testdb = tempfile.mkstemp()
os.close(_fd)
os.unlink(testdb)
del _fd, tempfile
SQLiteLockFile.testdb = testdb
import sqlite3
self.connection = sqlite3.connect(SQLiteLockFile.testdb)
c = self.connection.cursor()
try:
c.execute("create table locks"
"("
" lock_file varchar(32),"
" unique_name varchar(32)"
")")
except sqlite3.OperationalError:
pass
else:
self.connection.commit()
import atexit
atexit.register(os.unlink, SQLiteLockFile.testdb)
def acquire(self, timeout=None):
timeout = timeout is not None and timeout or self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
if timeout is None:
wait = 0.1
elif timeout <= 0:
wait = 0
else:
wait = timeout / 10
cursor = self.connection.cursor()
while True:
if not self.is_locked():
# Not locked. Try to lock it.
cursor.execute("insert into locks"
" (lock_file, unique_name)"
" values"
" (?, ?)",
(self.lock_file, self.unique_name))
self.connection.commit()
# Check to see if we are the only lock holder.
cursor.execute("select * from locks"
" where unique_name = ?",
(self.unique_name,))
rows = cursor.fetchall()
if len(rows) > 1:
# Nope. Someone else got there. Remove our lock.
cursor.execute("delete from locks"
" where unique_name = ?",
(self.unique_name,))
self.connection.commit()
else:
# Yup. We're done, so go home.
return
else:
# Check to see if we are the only lock holder.
cursor.execute("select * from locks"
" where unique_name = ?",
(self.unique_name,))
rows = cursor.fetchall()
if len(rows) == 1:
# We're the locker, so go home.
return
# Maybe we should wait a bit longer.
if timeout is not None and time.time() > end_time:
if timeout > 0:
# No more waiting.
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
# Someone else has the lock and we are impatient..
raise AlreadyLocked("%s is already locked" % self.path)
# Well, okay. We'll give it a bit longer.
time.sleep(wait)
def release(self):
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
if not self.i_am_locking():
raise NotMyLock("%s is locked, but not by me (by %s)" %
(self.unique_name, self._who_is_locking()))
cursor = self.connection.cursor()
cursor.execute("delete from locks"
" where unique_name = ?",
(self.unique_name,))
self.connection.commit()
def _who_is_locking(self):
cursor = self.connection.cursor()
cursor.execute("select unique_name from locks"
" where lock_file = ?",
(self.lock_file,))
return cursor.fetchone()[0]
def is_locked(self):
cursor = self.connection.cursor()
cursor.execute("select * from locks"
" where lock_file = ?",
(self.lock_file,))
rows = cursor.fetchall()
return not not rows
def i_am_locking(self):
cursor = self.connection.cursor()
cursor.execute("select * from locks"
" where lock_file = ?"
" and unique_name = ?",
(self.lock_file, self.unique_name))
return not not cursor.fetchall()
def break_lock(self):
cursor = self.connection.cursor()
cursor.execute("delete from locks"
" where lock_file = ?",
(self.lock_file,))
self.connection.commit()
| gpl-3.0 |
SNoiraud/gramps | gramps/gen/filters/rules/source/_hastag.py | 5 | 1716 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Rule that checks for a source with a particular tag.
"""
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .._hastagbase import HasTagBase
#-------------------------------------------------------------------------
#
# HasTag
#
#-------------------------------------------------------------------------
class HasTag(HasTagBase):
"""
Rule that checks for a source with a particular tag.
"""
labels = [ _('Tag:') ]
name = _('Sources with the <tag>')
description = _("Matches sources with the particular tag")
| gpl-2.0 |
divio/djangocms-table | djangocms_table/cms_plugins.py | 1 | 1442 | import json
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.conf import settings
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from .forms import TableForm
from .models import Table
from .utils import static_url
class TablePlugin(CMSPluginBase):
model = Table
form = TableForm
name = _("Table")
render_template = "cms/plugins/table.html"
text_enabled = True
fieldsets = (
(None, {
'fields': ('name',)
}),
(_('Headers'), {
'fields': (('headers_top', 'headers_left', 'headers_bottom'),)
}),
(None, {
'fields': ('table_data', 'csv_upload')
})
)
def render(self, context, instance, placeholder):
try:
data = json.loads(instance.table_data)
except:
data = "error"
context.update({
'name': instance.name,
'data': data,
'instance':instance,
})
return context
def icon_src(self, instance):
return static_url("img/table.png")
def response_change(self, request, obj):
response = super(TablePlugin, self).response_change(request, obj)
if 'csv_upload' in request.FILES.keys():
self.object_successfully_changed = False
return response
plugin_pool.register_plugin(TablePlugin)
| bsd-3-clause |
TsinghuaX/edx-platform | lms/djangoapps/certificates/models.py | 2 | 5529 | from django.contrib.auth.models import User
from django.db import models
from datetime import datetime
"""
Certificates are created for a student and an offering of a course.
When a certificate is generated, a unique ID is generated so that
the certificate can be verified later. The ID is a UUID4, so that
it can't be easily guessed and so that it is unique.
Certificates are generated in batches by a cron job, when a
certificate is available for download the GeneratedCertificate
table is updated with information that will be displayed
on the course overview page.
State diagram:
[deleted,error,unavailable] [error,downloadable]
+ + +
| | |
| | |
add_cert regen_cert del_cert
| | |
v v v
[generating] [regenerating] [deleting]
+ + +
| | |
certificate certificate certificate
created removed,created deleted
+----------------+-------------+------->[error]
| | |
| | |
v v v
[downloadable] [downloadable] [deleted]
Eligibility:
Students are eligible for a certificate if they pass the course
with the following exceptions:
If the student has allow_certificate set to False in the student profile
he will never be issued a certificate.
If the user and course is present in the certificate whitelist table
then the student will be issued a certificate regardless of his grade,
unless he has allow_certificate set to False.
"""
class CertificateStatuses(object):
deleted = 'deleted'
deleting = 'deleting'
downloadable = 'downloadable'
error = 'error'
generating = 'generating'
notpassing = 'notpassing'
regenerating = 'regenerating'
restricted = 'restricted'
unavailable = 'unavailable'
class CertificateWhitelist(models.Model):
"""
Tracks students who are whitelisted, all users
in this table will always qualify for a certificate
regardless of their grade unless they are on the
embargoed country restriction list
(allow_certificate set to False in userprofile).
"""
user = models.ForeignKey(User)
course_id = models.CharField(max_length=255, blank=True, default='')
whitelist = models.BooleanField(default=0)
class GeneratedCertificate(models.Model):
user = models.ForeignKey(User)
course_id = models.CharField(max_length=255, blank=True, default='')
verify_uuid = models.CharField(max_length=32, blank=True, default='')
download_uuid = models.CharField(max_length=32, blank=True, default='')
download_url = models.CharField(max_length=128, blank=True, default='')
grade = models.CharField(max_length=5, blank=True, default='')
key = models.CharField(max_length=32, blank=True, default='')
distinction = models.BooleanField(default=False)
status = models.CharField(max_length=32, default='unavailable')
name = models.CharField(blank=True, max_length=255)
created_date = models.DateTimeField(
auto_now_add=True, default=datetime.now)
modified_date = models.DateTimeField(
auto_now=True, default=datetime.now)
error_reason = models.CharField(max_length=512, blank=True, default='')
class Meta:
unique_together = (('user', 'course_id'),)
def certificate_status_for_student(student, course_id):
'''
This returns a dictionary with a key for status, and other information.
The status is one of the following:
unavailable - No entry for this student--if they are actually in
the course, they probably have not been graded for
certificate generation yet.
generating - A request has been made to generate a certificate,
but it has not been generated yet.
regenerating - A request has been made to regenerate a certificate,
but it has not been generated yet.
deleting - A request has been made to delete a certificate.
deleted - The certificate has been deleted.
downloadable - The certificate is available for download.
notpassing - The student was graded but is not passing
restricted - The student is on the restricted embargo list and
should not be issued a certificate. This will
be set if allow_certificate is set to False in
the userprofile table
If the status is "downloadable", the dictionary also contains
"download_url".
If the student has been graded, the dictionary also contains their
grade for the course with the key "grade".
'''
try:
generated_certificate = GeneratedCertificate.objects.get(
user=student, course_id=course_id)
d = {'status': generated_certificate.status}
if generated_certificate.grade:
d['grade'] = generated_certificate.grade
if generated_certificate.status == CertificateStatuses.downloadable:
d['download_url'] = generated_certificate.download_url
return d
except GeneratedCertificate.DoesNotExist:
pass
return {'status': CertificateStatuses.unavailable}
| agpl-3.0 |
stackforge/monasca-ui | monitoring/enabled/_50_admin_add_monitoring_panel.py | 2 | 1689 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monascaclient import exc
DASHBOARD = "monitoring"
# A list of applications to be added to INSTALLED_APPS.
ADD_INSTALLED_APPS = ['monitoring']
# A list of angular modules to be added as dependencies to horizon app.
ADD_ANGULAR_MODULES = ['monitoringApp']
# A list of javascript files to be included for all pages
ADD_JS_FILES = ['monitoring/js/app.js',
'monitoring/js/filters.js',
'monitoring/js/controllers.js',
'monitoring/js/directives.js',
'monitoring/js/services.js',
'monitoring/js/ng-tags-input.js']
ADD_SCSS_FILES = [
'monitoring/css/alarm-create.scss']
# A dictionary of exception classes to be added to HORIZON['exceptions'].
_RECOVERABLE_ERRORS = (exc.UnprocessableEntity, exc.Conflict,
exc.BadRequest, exc.ConnectionError,
exc.Forbidden, exc.InternalServerError)
_NOT_FOUND_ERRORS = (exc.NotFound,)
_UNAUTHORIZED_ERRORS = (exc.Unauthorized,)
ADD_EXCEPTIONS = {
'recoverable': _RECOVERABLE_ERRORS,
'not_found': _NOT_FOUND_ERRORS,
'unauthorized': _UNAUTHORIZED_ERRORS,
}
| apache-2.0 |
determinedcheetahs/cheetah_juniper | hadoop/src/contrib/hod/testing/main.py | 182 | 2928 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import unittest, os, sys, re
myPath = os.path.realpath(sys.argv[0])
rootDirectory = re.sub("/testing/.*", "", myPath)
testingDir = os.path.join(rootDirectory, "testing")
sys.path.append(rootDirectory)
from testing.lib import printSeparator, printLine
moduleList = []
allList = []
excludes = [
]
# Build a module list by scanning through all files in testingDir
for file in os.listdir(testingDir):
if(re.search(r".py$", file) and re.search(r"^test", file)):
# All .py files with names starting in 'test'
module = re.sub(r"^test","",file)
module = re.sub(r".py$","",module)
allList.append(module)
if module not in excludes:
moduleList.append(module)
printLine("All testcases - %s" % allList)
printLine("Excluding the testcases - %s" % excludes)
printLine("Executing the testcases - %s" % moduleList)
testsResult = 0
# Now import each of these modules and start calling the corresponding
#testSuite methods
for moduleBaseName in moduleList:
try:
module = "testing.test" + moduleBaseName
suiteCaller = "Run" + moduleBaseName + "Tests"
printSeparator()
printLine("Running %s" % suiteCaller)
# Import the corresponding test cases module
imported_module = __import__(module , fromlist=[suiteCaller] )
# Call the corresponding suite method now
testRes = getattr(imported_module, suiteCaller)()
testsResult = testsResult + testRes
printLine("Finished %s. TestSuite Result : %s\n" % \
(suiteCaller, testRes))
except ImportError, i:
# Failed to import a test module
printLine(i)
testsResult = testsResult + 1
pass
except AttributeError, n:
# Failed to get suiteCaller from a test module
printLine(n)
testsResult = testsResult + 1
pass
except Exception, e:
# Test module suiteCaller threw some exception
printLine("%s failed. \nReason : %s" % (suiteCaller, e))
printLine("Skipping %s" % suiteCaller)
testsResult = testsResult + 1
pass
if testsResult != 0:
printSeparator()
printLine("Total testcases with failure or error : %s" % testsResult)
sys.exit(testsResult)
| apache-2.0 |
chugunovyar/factoryForBuild | env/lib/python2.7/site-packages/scipy/optimize/_differentialevolution.py | 23 | 32558 | """
differential_evolution: The differential evolution global optimization algorithm
Added by Andrew Nelson 2014
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from scipy.optimize import OptimizeResult, minimize
from scipy.optimize.optimize import _status_message
import numbers
__all__ = ['differential_evolution']
_MACHEPS = np.finfo(np.float64).eps
def differential_evolution(func, bounds, args=(), strategy='best1bin',
maxiter=1000, popsize=15, tol=0.01,
mutation=(0.5, 1), recombination=0.7, seed=None,
callback=None, disp=False, polish=True,
init='latinhypercube'):
"""Finds the global minimum of a multivariate function.
Differential Evolution is stochastic in nature (does not use gradient
methods) to find the minimium, and can search large areas of candidate
space, but often requires larger numbers of function evaluations than
conventional gradient based techniques.
The algorithm is due to Storn and Price [1]_.
Parameters
----------
func : callable
The objective function to be minimized. Must be in the form
``f(x, *args)``, where ``x`` is the argument in the form of a 1-D array
and ``args`` is a tuple of any additional fixed parameters needed to
completely specify the function.
bounds : sequence
Bounds for variables. ``(min, max)`` pairs for each element in ``x``,
defining the lower and upper bounds for the optimizing argument of
`func`. It is required to have ``len(bounds) == len(x)``.
``len(bounds)`` is used to determine the number of parameters in ``x``.
args : tuple, optional
Any additional fixed parameters needed to
completely specify the objective function.
strategy : str, optional
The differential evolution strategy to use. Should be one of:
- 'best1bin'
- 'best1exp'
- 'rand1exp'
- 'randtobest1exp'
- 'best2exp'
- 'rand2exp'
- 'randtobest1bin'
- 'best2bin'
- 'rand2bin'
- 'rand1bin'
The default is 'best1bin'.
maxiter : int, optional
The maximum number of generations over which the entire population is
evolved. The maximum number of function evaluations (with no polishing)
is: ``(maxiter + 1) * popsize * len(x)``
popsize : int, optional
A multiplier for setting the total population size. The population has
``popsize * len(x)`` individuals.
tol : float, optional
When the mean of the population energies, multiplied by tol,
divided by the standard deviation of the population energies
is greater than 1 the solving process terminates:
``convergence = mean(pop) * tol / stdev(pop) > 1``
mutation : float or tuple(float, float), optional
The mutation constant. In the literature this is also known as
differential weight, being denoted by F.
If specified as a float it should be in the range [0, 2].
If specified as a tuple ``(min, max)`` dithering is employed. Dithering
randomly changes the mutation constant on a generation by generation
basis. The mutation constant for that generation is taken from
``U[min, max)``. Dithering can help speed convergence significantly.
Increasing the mutation constant increases the search radius, but will
slow down convergence.
recombination : float, optional
The recombination constant, should be in the range [0, 1]. In the
literature this is also known as the crossover probability, being
denoted by CR. Increasing this value allows a larger number of mutants
to progress into the next generation, but at the risk of population
stability.
seed : int or `np.random.RandomState`, optional
If `seed` is not specified the `np.RandomState` singleton is used.
If `seed` is an int, a new `np.random.RandomState` instance is used,
seeded with seed.
If `seed` is already a `np.random.RandomState instance`, then that
`np.random.RandomState` instance is used.
Specify `seed` for repeatable minimizations.
disp : bool, optional
Display status messages
callback : callable, `callback(xk, convergence=val)`, optional
A function to follow the progress of the minimization. ``xk`` is
the current value of ``x0``. ``val`` represents the fractional
value of the population convergence. When ``val`` is greater than one
the function halts. If callback returns `True`, then the minimization
is halted (any polishing is still carried out).
polish : bool, optional
If True (default), then `scipy.optimize.minimize` with the `L-BFGS-B`
method is used to polish the best population member at the end, which
can improve the minimization slightly.
init : string, optional
Specify how the population initialization is performed. Should be
one of:
- 'latinhypercube'
- 'random'
The default is 'latinhypercube'. Latin Hypercube sampling tries to
maximize coverage of the available parameter space. 'random' initializes
the population randomly - this has the drawback that clustering can
occur, preventing the whole of parameter space being covered.
Returns
-------
res : OptimizeResult
The optimization result represented as a `OptimizeResult` object.
Important attributes are: ``x`` the solution array, ``success`` a
Boolean flag indicating if the optimizer exited successfully and
``message`` which describes the cause of the termination. See
`OptimizeResult` for a description of other attributes. If `polish`
was employed, and a lower minimum was obtained by the polishing, then
OptimizeResult also contains the ``jac`` attribute.
Notes
-----
Differential evolution is a stochastic population based method that is
useful for global optimization problems. At each pass through the population
the algorithm mutates each candidate solution by mixing with other candidate
solutions to create a trial candidate. There are several strategies [2]_ for
creating trial candidates, which suit some problems more than others. The
'best1bin' strategy is a good starting point for many systems. In this
strategy two members of the population are randomly chosen. Their difference
is used to mutate the best member (the `best` in `best1bin`), :math:`b_0`,
so far:
.. math::
b' = b_0 + mutation * (population[rand0] - population[rand1])
A trial vector is then constructed. Starting with a randomly chosen 'i'th
parameter the trial is sequentially filled (in modulo) with parameters from
`b'` or the original candidate. The choice of whether to use `b'` or the
original candidate is made with a binomial distribution (the 'bin' in
'best1bin') - a random number in [0, 1) is generated. If this number is
less than the `recombination` constant then the parameter is loaded from
`b'`, otherwise it is loaded from the original candidate. The final
parameter is always loaded from `b'`. Once the trial candidate is built
its fitness is assessed. If the trial is better than the original candidate
then it takes its place. If it is also better than the best overall
candidate it also replaces that.
To improve your chances of finding a global minimum use higher `popsize`
values, with higher `mutation` and (dithering), but lower `recombination`
values. This has the effect of widening the search radius, but slowing
convergence.
.. versionadded:: 0.15.0
Examples
--------
Let us consider the problem of minimizing the Rosenbrock function. This
function is implemented in `rosen` in `scipy.optimize`.
>>> from scipy.optimize import rosen, differential_evolution
>>> bounds = [(0,2), (0, 2), (0, 2), (0, 2), (0, 2)]
>>> result = differential_evolution(rosen, bounds)
>>> result.x, result.fun
(array([1., 1., 1., 1., 1.]), 1.9216496320061384e-19)
Next find the minimum of the Ackley function
(http://en.wikipedia.org/wiki/Test_functions_for_optimization).
>>> from scipy.optimize import differential_evolution
>>> import numpy as np
>>> def ackley(x):
... arg1 = -0.2 * np.sqrt(0.5 * (x[0] ** 2 + x[1] ** 2))
... arg2 = 0.5 * (np.cos(2. * np.pi * x[0]) + np.cos(2. * np.pi * x[1]))
... return -20. * np.exp(arg1) - np.exp(arg2) + 20. + np.e
>>> bounds = [(-5, 5), (-5, 5)]
>>> result = differential_evolution(ackley, bounds)
>>> result.x, result.fun
(array([ 0., 0.]), 4.4408920985006262e-16)
References
----------
.. [1] Storn, R and Price, K, Differential Evolution - a Simple and
Efficient Heuristic for Global Optimization over Continuous Spaces,
Journal of Global Optimization, 1997, 11, 341 - 359.
.. [2] http://www1.icsi.berkeley.edu/~storn/code.html
.. [3] http://en.wikipedia.org/wiki/Differential_evolution
"""
solver = DifferentialEvolutionSolver(func, bounds, args=args,
strategy=strategy, maxiter=maxiter,
popsize=popsize, tol=tol,
mutation=mutation,
recombination=recombination,
seed=seed, polish=polish,
callback=callback,
disp=disp,
init=init)
return solver.solve()
class DifferentialEvolutionSolver(object):
"""This class implements the differential evolution solver
Parameters
----------
func : callable
The objective function to be minimized. Must be in the form
``f(x, *args)``, where ``x`` is the argument in the form of a 1-D array
and ``args`` is a tuple of any additional fixed parameters needed to
completely specify the function.
bounds : sequence
Bounds for variables. ``(min, max)`` pairs for each element in ``x``,
defining the lower and upper bounds for the optimizing argument of
`func`. It is required to have ``len(bounds) == len(x)``.
``len(bounds)`` is used to determine the number of parameters in ``x``.
args : tuple, optional
Any additional fixed parameters needed to
completely specify the objective function.
strategy : str, optional
The differential evolution strategy to use. Should be one of:
- 'best1bin'
- 'best1exp'
- 'rand1exp'
- 'randtobest1exp'
- 'best2exp'
- 'rand2exp'
- 'randtobest1bin'
- 'best2bin'
- 'rand2bin'
- 'rand1bin'
The default is 'best1bin'
maxiter : int, optional
The maximum number of generations over which the entire population is
evolved. The maximum number of function evaluations (with no polishing)
is: ``(maxiter + 1) * popsize * len(x)``
popsize : int, optional
A multiplier for setting the total population size. The population has
``popsize * len(x)`` individuals.
tol : float, optional
When the mean of the population energies, multiplied by tol,
divided by the standard deviation of the population energies
is greater than 1 the solving process terminates:
``convergence = mean(pop) * tol / stdev(pop) > 1``
mutation : float or tuple(float, float), optional
The mutation constant. In the literature this is also known as
differential weight, being denoted by F.
If specified as a float it should be in the range [0, 2].
If specified as a tuple ``(min, max)`` dithering is employed. Dithering
randomly changes the mutation constant on a generation by generation
basis. The mutation constant for that generation is taken from
U[min, max). Dithering can help speed convergence significantly.
Increasing the mutation constant increases the search radius, but will
slow down convergence.
recombination : float, optional
The recombination constant, should be in the range [0, 1]. In the
literature this is also known as the crossover probability, being
denoted by CR. Increasing this value allows a larger number of mutants
to progress into the next generation, but at the risk of population
stability.
seed : int or `np.random.RandomState`, optional
If `seed` is not specified the `np.random.RandomState` singleton is
used.
If `seed` is an int, a new `np.random.RandomState` instance is used,
seeded with `seed`.
If `seed` is already a `np.random.RandomState` instance, then that
`np.random.RandomState` instance is used.
Specify `seed` for repeatable minimizations.
disp : bool, optional
Display status messages
callback : callable, `callback(xk, convergence=val)`, optional
A function to follow the progress of the minimization. ``xk`` is
the current value of ``x0``. ``val`` represents the fractional
value of the population convergence. When ``val`` is greater than one
the function halts. If callback returns `True`, then the minimization
is halted (any polishing is still carried out).
polish : bool, optional
If True, then `scipy.optimize.minimize` with the `L-BFGS-B` method
is used to polish the best population member at the end. This requires
a few more function evaluations.
maxfun : int, optional
Set the maximum number of function evaluations. However, it probably
makes more sense to set `maxiter` instead.
init : string, optional
Specify which type of population initialization is performed. Should be
one of:
- 'latinhypercube'
- 'random'
"""
# Dispatch of mutation strategy method (binomial or exponential).
_binomial = {'best1bin': '_best1',
'randtobest1bin': '_randtobest1',
'best2bin': '_best2',
'rand2bin': '_rand2',
'rand1bin': '_rand1'}
_exponential = {'best1exp': '_best1',
'rand1exp': '_rand1',
'randtobest1exp': '_randtobest1',
'best2exp': '_best2',
'rand2exp': '_rand2'}
def __init__(self, func, bounds, args=(),
strategy='best1bin', maxiter=1000, popsize=15,
tol=0.01, mutation=(0.5, 1), recombination=0.7, seed=None,
maxfun=np.inf, callback=None, disp=False, polish=True,
init='latinhypercube'):
if strategy in self._binomial:
self.mutation_func = getattr(self, self._binomial[strategy])
elif strategy in self._exponential:
self.mutation_func = getattr(self, self._exponential[strategy])
else:
raise ValueError("Please select a valid mutation strategy")
self.strategy = strategy
self.callback = callback
self.polish = polish
self.tol = tol
# Mutation constant should be in [0, 2). If specified as a sequence
# then dithering is performed.
self.scale = mutation
if (not np.all(np.isfinite(mutation)) or
np.any(np.array(mutation) >= 2) or
np.any(np.array(mutation) < 0)):
raise ValueError('The mutation constant must be a float in '
'U[0, 2), or specified as a tuple(min, max)'
' where min < max and min, max are in U[0, 2).')
self.dither = None
if hasattr(mutation, '__iter__') and len(mutation) > 1:
self.dither = [mutation[0], mutation[1]]
self.dither.sort()
self.cross_over_probability = recombination
self.func = func
self.args = args
# convert tuple of lower and upper bounds to limits
# [(low_0, high_0), ..., (low_n, high_n]
# -> [[low_0, ..., low_n], [high_0, ..., high_n]]
self.limits = np.array(bounds, dtype='float').T
if (np.size(self.limits, 0) != 2 or not
np.all(np.isfinite(self.limits))):
raise ValueError('bounds should be a sequence containing '
'real valued (min, max) pairs for each value'
' in x')
if maxiter is None: # the default used to be None
maxiter = 1000
self.maxiter = maxiter
if maxfun is None: # the default used to be None
maxfun = np.inf
self.maxfun = maxfun
# population is scaled to between [0, 1].
# We have to scale between parameter <-> population
# save these arguments for _scale_parameter and
# _unscale_parameter. This is an optimization
self.__scale_arg1 = 0.5 * (self.limits[0] + self.limits[1])
self.__scale_arg2 = np.fabs(self.limits[0] - self.limits[1])
self.parameter_count = np.size(self.limits, 1)
self.random_number_generator = _make_random_gen(seed)
# default population initialization is a latin hypercube design, but
# there are other population initializations possible.
self.num_population_members = popsize * self.parameter_count
self.population_shape = (self.num_population_members,
self.parameter_count)
self._nfev = 0
if init == 'latinhypercube':
self.init_population_lhs()
elif init == 'random':
self.init_population_random()
else:
raise ValueError("The population initialization method must be one"
"of 'latinhypercube' or 'random'")
self.disp = disp
def init_population_lhs(self):
"""
Initializes the population with Latin Hypercube Sampling.
Latin Hypercube Sampling ensures that each parameter is uniformly
sampled over its range.
"""
rng = self.random_number_generator
# Each parameter range needs to be sampled uniformly. The scaled
# parameter range ([0, 1)) needs to be split into
# `self.num_population_members` segments, each of which has the following
# size:
segsize = 1.0 / self.num_population_members
# Within each segment we sample from a uniform random distribution.
# We need to do this sampling for each parameter.
samples = (segsize * rng.random_sample(self.population_shape)
# Offset each segment to cover the entire parameter range [0, 1)
+ np.linspace(0., 1., self.num_population_members,
endpoint=False)[:, np.newaxis])
# Create an array for population of candidate solutions.
self.population = np.zeros_like(samples)
# Initialize population of candidate solutions by permutation of the
# random samples.
for j in range(self.parameter_count):
order = rng.permutation(range(self.num_population_members))
self.population[:, j] = samples[order, j]
# reset population energies
self.population_energies = (np.ones(self.num_population_members) *
np.inf)
# reset number of function evaluations counter
self._nfev = 0
def init_population_random(self):
"""
Initialises the population at random. This type of initialization
can possess clustering, Latin Hypercube sampling is generally better.
"""
rng = self.random_number_generator
self.population = rng.random_sample(self.population_shape)
# reset population energies
self.population_energies = (np.ones(self.num_population_members) *
np.inf)
# reset number of function evaluations counter
self._nfev = 0
@property
def x(self):
"""
The best solution from the solver
Returns
-------
x : ndarray
The best solution from the solver.
"""
return self._scale_parameters(self.population[0])
@property
def convergence(self):
"""
The standard deviation of the population energies divided by their
mean.
"""
return (np.std(self.population_energies) /
np.abs(np.mean(self.population_energies) + _MACHEPS))
def solve(self):
"""
Runs the DifferentialEvolutionSolver.
Returns
-------
res : OptimizeResult
The optimization result represented as a ``OptimizeResult`` object.
Important attributes are: ``x`` the solution array, ``success`` a
Boolean flag indicating if the optimizer exited successfully and
``message`` which describes the cause of the termination. See
`OptimizeResult` for a description of other attributes. If `polish`
was employed, and a lower minimum was obtained by the polishing,
then OptimizeResult also contains the ``jac`` attribute.
"""
nit, warning_flag = 0, False
status_message = _status_message['success']
# The population may have just been initialized (all entries are
# np.inf). If it has you have to calculate the initial energies.
# Although this is also done in the evolve generator it's possible
# that someone can set maxiter=0, at which point we still want the
# initial energies to be calculated (the following loop isn't run).
if np.all(np.isinf(self.population_energies)):
self._calculate_population_energies()
# do the optimisation.
for nit in range(1, self.maxiter + 1):
# evolve the population by a generation
try:
next(self)
except StopIteration:
warning_flag = True
status_message = _status_message['maxfev']
break
if self.disp:
print("differential_evolution step %d: f(x)= %g"
% (nit,
self.population_energies[0]))
# stop when the fractional s.d. of the population is less than tol
# of the mean energy
convergence = self.convergence
if (self.callback and
self.callback(self._scale_parameters(self.population[0]),
convergence=self.tol / convergence) is True):
warning_flag = True
status_message = ('callback function requested stop early '
'by returning True')
break
if convergence < self.tol or warning_flag:
break
else:
status_message = _status_message['maxiter']
warning_flag = True
DE_result = OptimizeResult(
x=self.x,
fun=self.population_energies[0],
nfev=self._nfev,
nit=nit,
message=status_message,
success=(warning_flag is not True))
if self.polish:
result = minimize(self.func,
np.copy(DE_result.x),
method='L-BFGS-B',
bounds=self.limits.T,
args=self.args)
self._nfev += result.nfev
DE_result.nfev = self._nfev
if result.fun < DE_result.fun:
DE_result.fun = result.fun
DE_result.x = result.x
DE_result.jac = result.jac
# to keep internal state consistent
self.population_energies[0] = result.fun
self.population[0] = self._unscale_parameters(result.x)
return DE_result
def _calculate_population_energies(self):
"""
Calculate the energies of all the population members at the same time.
Puts the best member in first place. Useful if the population has just
been initialised.
"""
for index, candidate in enumerate(self.population):
if self._nfev > self.maxfun:
break
parameters = self._scale_parameters(candidate)
self.population_energies[index] = self.func(parameters,
*self.args)
self._nfev += 1
minval = np.argmin(self.population_energies)
# put the lowest energy into the best solution position.
lowest_energy = self.population_energies[minval]
self.population_energies[minval] = self.population_energies[0]
self.population_energies[0] = lowest_energy
self.population[[0, minval], :] = self.population[[minval, 0], :]
def __iter__(self):
return self
def __next__(self):
"""
Evolve the population by a single generation
Returns
-------
x : ndarray
The best solution from the solver.
fun : float
Value of objective function obtained from the best solution.
"""
# the population may have just been initialized (all entries are
# np.inf). If it has you have to calculate the initial energies
if np.all(np.isinf(self.population_energies)):
self._calculate_population_energies()
if self.dither is not None:
self.scale = (self.random_number_generator.rand()
* (self.dither[1] - self.dither[0]) + self.dither[0])
for candidate in range(self.num_population_members):
if self._nfev > self.maxfun:
raise StopIteration
# create a trial solution
trial = self._mutate(candidate)
# ensuring that it's in the range [0, 1)
self._ensure_constraint(trial)
# scale from [0, 1) to the actual parameter value
parameters = self._scale_parameters(trial)
# determine the energy of the objective function
energy = self.func(parameters, *self.args)
self._nfev += 1
# if the energy of the trial candidate is lower than the
# original population member then replace it
if energy < self.population_energies[candidate]:
self.population[candidate] = trial
self.population_energies[candidate] = energy
# if the trial candidate also has a lower energy than the
# best solution then replace that as well
if energy < self.population_energies[0]:
self.population_energies[0] = energy
self.population[0] = trial
return self.x, self.population_energies[0]
def next(self):
"""
Evolve the population by a single generation
Returns
-------
x : ndarray
The best solution from the solver.
fun : float
Value of objective function obtained from the best solution.
"""
# next() is required for compatibility with Python2.7.
return self.__next__()
def _scale_parameters(self, trial):
"""
scale from a number between 0 and 1 to parameters.
"""
return self.__scale_arg1 + (trial - 0.5) * self.__scale_arg2
def _unscale_parameters(self, parameters):
"""
scale from parameters to a number between 0 and 1.
"""
return (parameters - self.__scale_arg1) / self.__scale_arg2 + 0.5
def _ensure_constraint(self, trial):
"""
make sure the parameters lie between the limits
"""
for index, param in enumerate(trial):
if param > 1 or param < 0:
trial[index] = self.random_number_generator.rand()
def _mutate(self, candidate):
"""
create a trial vector based on a mutation strategy
"""
trial = np.copy(self.population[candidate])
rng = self.random_number_generator
fill_point = rng.randint(0, self.parameter_count)
if (self.strategy == 'randtobest1exp' or
self.strategy == 'randtobest1bin'):
bprime = self.mutation_func(candidate,
self._select_samples(candidate, 5))
else:
bprime = self.mutation_func(self._select_samples(candidate, 5))
if self.strategy in self._binomial:
crossovers = rng.rand(self.parameter_count)
crossovers = crossovers < self.cross_over_probability
# the last one is always from the bprime vector for binomial
# If you fill in modulo with a loop you have to set the last one to
# true. If you don't use a loop then you can have any random entry
# be True.
crossovers[fill_point] = True
trial = np.where(crossovers, bprime, trial)
return trial
elif self.strategy in self._exponential:
i = 0
while (i < self.parameter_count and
rng.rand() < self.cross_over_probability):
trial[fill_point] = bprime[fill_point]
fill_point = (fill_point + 1) % self.parameter_count
i += 1
return trial
def _best1(self, samples):
"""
best1bin, best1exp
"""
r0, r1 = samples[:2]
return (self.population[0] + self.scale *
(self.population[r0] - self.population[r1]))
def _rand1(self, samples):
"""
rand1bin, rand1exp
"""
r0, r1, r2 = samples[:3]
return (self.population[r0] + self.scale *
(self.population[r1] - self.population[r2]))
def _randtobest1(self, candidate, samples):
"""
randtobest1bin, randtobest1exp
"""
r0, r1 = samples[:2]
bprime = np.copy(self.population[candidate])
bprime += self.scale * (self.population[0] - bprime)
bprime += self.scale * (self.population[r0] -
self.population[r1])
return bprime
def _best2(self, samples):
"""
best2bin, best2exp
"""
r0, r1, r2, r3 = samples[:4]
bprime = (self.population[0] + self.scale *
(self.population[r0] + self.population[r1] -
self.population[r2] - self.population[r3]))
return bprime
def _rand2(self, samples):
"""
rand2bin, rand2exp
"""
r0, r1, r2, r3, r4 = samples
bprime = (self.population[r0] + self.scale *
(self.population[r1] + self.population[r2] -
self.population[r3] - self.population[r4]))
return bprime
def _select_samples(self, candidate, number_samples):
"""
obtain random integers from range(self.num_population_members),
without replacement. You can't have the original candidate either.
"""
idxs = list(range(self.num_population_members))
idxs.remove(candidate)
self.random_number_generator.shuffle(idxs)
idxs = idxs[:number_samples]
return idxs
def _make_random_gen(seed):
"""Turn seed into a np.random.RandomState instance
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, (numbers.Integral, np.integer)):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
| gpl-3.0 |
0jpq0/kbengine | kbe/res/scripts/common/Lib/test/test_pep247.py | 84 | 2242 | """
Test suite to check compilance with PEP 247, the standard API
for hashing algorithms
"""
import hmac
import unittest
from hashlib import md5, sha1, sha224, sha256, sha384, sha512
from test import support
class Pep247Test(unittest.TestCase):
def check_module(self, module, key=None):
self.assertTrue(hasattr(module, 'digest_size'))
self.assertTrue(module.digest_size is None or module.digest_size > 0)
self.check_object(module.new, module.digest_size, key)
def check_object(self, cls, digest_size, key, digestmod=None):
if key is not None:
if digestmod is None:
digestmod = md5
obj1 = cls(key, digestmod=digestmod)
obj2 = cls(key, b'string', digestmod=digestmod)
h1 = cls(key, b'string', digestmod=digestmod).digest()
obj3 = cls(key, digestmod=digestmod)
obj3.update(b'string')
h2 = obj3.digest()
else:
obj1 = cls()
obj2 = cls(b'string')
h1 = cls(b'string').digest()
obj3 = cls()
obj3.update(b'string')
h2 = obj3.digest()
self.assertEqual(h1, h2)
self.assertTrue(hasattr(obj1, 'digest_size'))
if digest_size is not None:
self.assertEqual(obj1.digest_size, digest_size)
self.assertEqual(obj1.digest_size, len(h1))
obj1.update(b'string')
obj_copy = obj1.copy()
self.assertEqual(obj1.digest(), obj_copy.digest())
self.assertEqual(obj1.hexdigest(), obj_copy.hexdigest())
digest, hexdigest = obj1.digest(), obj1.hexdigest()
hd2 = ""
for byte in digest:
hd2 += '%02x' % byte
self.assertEqual(hd2, hexdigest)
def test_md5(self):
self.check_object(md5, None, None)
def test_sha(self):
self.check_object(sha1, None, None)
self.check_object(sha224, None, None)
self.check_object(sha256, None, None)
self.check_object(sha384, None, None)
self.check_object(sha512, None, None)
def test_hmac(self):
self.check_module(hmac, key=b'abc')
def test_main():
support.run_unittest(Pep247Test)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
google/makani | lib/python/pack2/tests/parser_test_specialized.py | 1 | 4667 | # Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import textwrap
import unittest
from makani.lib.python.pack2 import parser
class TestParserSpecialized(unittest.TestCase):
def testSpecialized(self):
text = textwrap.dedent("""\
enum32 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
}
param BaseConfigParams {
string[32] name;
int32 revision;
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision revision;
}
""")
expected = textwrap.dedent("""\
param BaseConfigParams {
string[32] name; // offset: 0
int32 revision; // offset: 32
}
param FooConfigParams {
string[32] name; // offset: 0
enum32 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
} revision; // offset: 32
}
""")
p = parser.Parser()
metadata = p.Parse(text)
self.assertEqual(str(metadata), expected)
def testParentNotDefined(self):
text = textwrap.dedent("""\
enum32 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision revision;
}
""")
p = parser.Parser()
with self.assertRaises(parser.ParseError):
p.Parse(text)
def testEnumNotDefined(self):
text = textwrap.dedent("""\
param BaseConfigParams {
string[32] name;
int32 revision;
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision revision;
}
""")
p = parser.Parser()
with self.assertRaises(parser.ParseError):
p.Parse(text)
def testMisnamedField(self):
text = textwrap.dedent("""\
param BaseConfigParams {
string[32] name;
int32 revision;
}
enum32 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision bad_revision;
}
""")
p = parser.Parser()
with self.assertRaises(parser.ParseError):
p.Parse(text)
def testNotSignedInt(self):
text = textwrap.dedent("""\
param BaseConfigParams {
string[32] name;
uint32 revision;
}
enum32 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision revision;
}
""")
p = parser.Parser()
with self.assertRaises(parser.ParseError):
p.Parse(text)
def testMismatchedWidth(self):
text = textwrap.dedent("""\
param BaseConfigParams {
string[32] name;
int32 revision;
}
enum16 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision revision;
}
""")
p = parser.Parser()
with self.assertRaises(parser.ParseError):
p.Parse(text)
def testBadParentExtent(self):
text = textwrap.dedent("""\
param BaseConfigParams {
string[32] name;
int32 revision[2];
}
enum32 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision revision;
}
""")
p = parser.Parser()
with self.assertRaises(parser.ParseError):
p.Parse(text)
def testBadFieldExtent(self):
text = textwrap.dedent("""\
param BaseConfigParams {
string[32] name;
int32 revision;
}
enum32 FooRevision {
kFooRevisionA = 0,
kFooRevisionB = 1,
}
specialize(BaseConfigParams) FooConfigParams {
FooRevision revision[2];
}
""")
p = parser.Parser()
with self.assertRaises(parser.ParseError):
p.Parse(text)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
williballenthin/idawilli | scripts/go/go_fixup_fptrs.py | 1 | 2062 | """
when IDA's auto-discovery of functions in 64-bit Windows Go executables fails,
scan for global (.rdata) pointers into the code section (.text) and assume these are function pointers.
"""
import idc
import ida_name
import ida_auto
import ida_bytes
import idautils
def enum_segments():
for segstart in idautils.Segments():
segend = idc.get_segm_end(segstart)
segname = idc.get_segm_name(segstart)
yield segstart, segend, segname
def find_pointers(start, end):
for va in range(start, end-0x8):
ptr = ida_bytes.get_qword(va)
if idc.get_segm_start(ptr) != idc.BADADDR:
yield va, ptr, 8
ptr = ida_bytes.get_dword(va)
if idc.get_segm_start(ptr) != idc.BADADDR:
yield va, ptr, 4
def is_head(va):
return ida_bytes.is_head(idc.get_full_flags(va))
def get_head(va):
if is_head(va):
return va
else:
return idc.prev_head(va)
def is_code(va):
if is_head(va):
flags = idc.get_full_flags(va)
return ida_bytes.is_code(flags)
else:
head = get_head(va)
return is_code(head)
def is_unknown(va):
return ida_bytes.is_unknown(idc.get_full_flags(va))
def main():
for segstart, segend, segname in enum_segments():
if segname not in ('.rdata', 'UPX1'):
continue
print(segname)
for src, dst, size in find_pointers(segstart, segend):
if idc.get_segm_name(dst) not in (".text", "UPX0"):
continue
if is_code(dst):
continue
print("new function pointer: 0x%x -> 0x%x" % (src, dst))
ida_auto.auto_make_code(dst)
ida_auto.auto_make_proc(dst)
ida_bytes.del_items(src, size)
ida_bytes.create_data(src, idc.FF_QWORD if size == 8 else idc.FF_DWORD, size, idc.BADADDR)
# this doesn't seem to always work :-(
idc.op_plain_offset(src, -1, 0)
ida_name.set_name(src, "j_%s_%x" % (src, dst))
if __name__ == '__main__':
main()
| apache-2.0 |
miumok98/weblate | weblate/trans/management/commands/updatechecks.py | 11 | 1050 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from weblate.trans.management.commands import WeblateLangCommand
class Command(WeblateLangCommand):
help = 'updates checks for units'
def handle(self, *args, **options):
for unit in self.iterate_units(*args, **options):
unit.run_checks()
| gpl-3.0 |
plotly/plotly.py | packages/python/plotly/plotly/graph_objs/scatterpolargl/_line.py | 2 | 7248 | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scatterpolargl"
_path_str = "scatterpolargl.line"
_valid_props = {"color", "dash", "shape", "width"}
# color
# -----
@property
def color(self):
"""
Sets the line color.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# dash
# ----
@property
def dash(self):
"""
Sets the style of the lines.
The 'dash' property is an enumeration that may be specified as:
- One of the following enumeration values:
['solid', 'dot', 'dash', 'longdash', 'dashdot',
'longdashdot']
Returns
-------
Any
"""
return self["dash"]
@dash.setter
def dash(self, val):
self["dash"] = val
# shape
# -----
@property
def shape(self):
"""
Determines the line shape. The values correspond to step-wise
line shapes.
The 'shape' property is an enumeration that may be specified as:
- One of the following enumeration values:
['linear', 'hv', 'vh', 'hvh', 'vhv']
Returns
-------
Any
"""
return self["shape"]
@shape.setter
def shape(self, val):
self["shape"] = val
# width
# -----
@property
def width(self):
"""
Sets the line width (in px).
The 'width' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
Sets the line color.
dash
Sets the style of the lines.
shape
Determines the line shape. The values correspond to
step-wise line shapes.
width
Sets the line width (in px).
"""
def __init__(
self, arg=None, color=None, dash=None, shape=None, width=None, **kwargs
):
"""
Construct a new Line object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.scatterpolargl.Line`
color
Sets the line color.
dash
Sets the style of the lines.
shape
Determines the line shape. The values correspond to
step-wise line shapes.
width
Sets the line width (in px).
Returns
-------
Line
"""
super(Line, self).__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scatterpolargl.Line
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scatterpolargl.Line`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("dash", None)
_v = dash if dash is not None else _v
if _v is not None:
self["dash"] = _v
_v = arg.pop("shape", None)
_v = shape if shape is not None else _v
if _v is not None:
self["shape"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| mit |
tuenti/Diamond | src/collectors/slabinfo/slabinfo.py | 54 | 2532 | # coding=utf-8
"""
The SlabInfoCollector collects metrics on process stats from
/proc/slabinfo
#### Dependencies
* /proc/slabinfo
"""
import platform
import os
import diamond.collector
# Detect the architecture of the system
# and set the counters for MAX_VALUES
# appropriately. Otherwise, rolling over
# counters will cause incorrect or
# negative values.
if platform.architecture()[0] == '64bit':
counter = (2 ** 64) - 1
else:
counter = (2 ** 32) - 1
class SlabInfoCollector(diamond.collector.Collector):
PROC = '/proc/slabinfo'
def get_default_config_help(self):
config_help = super(SlabInfoCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(SlabInfoCollector, self).get_default_config()
config.update({
'path': 'slabinfo'
})
return config
def collect(self):
"""
Collect process stat data
"""
if not os.access(self.PROC, os.R_OK):
return False
# Open PROC file
file = open(self.PROC, 'r')
# Get data
for line in file:
if line.startswith('slabinfo'):
continue
if line.startswith('#'):
keys = line.split()[1:]
continue
data = line.split()
for key in ['<active_objs>', '<num_objs>', '<objsize>',
'<objperslab>', '<pagesperslab>']:
i = keys.index(key)
metric_name = data[0] + '.' + key.replace(
'<', '').replace('>', '')
metric_value = int(data[i])
self.publish(metric_name, metric_value)
for key in ['<limit>', '<batchcount>', '<sharedfactor>']:
i = keys.index(key)
metric_name = data[0] + '.tunables.' + key.replace(
'<', '').replace('>', '')
metric_value = int(data[i])
self.publish(metric_name, metric_value)
for key in ['<active_slabs>', '<num_slabs>', '<sharedavail>']:
i = keys.index(key)
metric_name = data[0] + '.slabdata.' + key.replace(
'<', '').replace('>', '')
metric_value = int(data[i])
self.publish(metric_name, metric_value)
# Close file
file.close()
| mit |
ananthonline/grpc | src/python/grpcio/grpc/framework/interfaces/base/base.py | 21 | 12606 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The base interface of RPC Framework.
Implementations of this interface support the conduct of "operations":
exchanges between two distinct ends of an arbitrary number of data payloads
and metadata such as a name for the operation, initial and terminal metadata
in each direction, and flow control. These operations may be used for transfers
of data, remote procedure calls, status indication, or anything else
applications choose.
"""
# threading is referenced from specification in this module.
import abc
import enum
import threading # pylint: disable=unused-import
import six
# abandonment is referenced from specification in this module.
from grpc.framework.foundation import abandonment # pylint: disable=unused-import
class NoSuchMethodError(Exception):
"""Indicates that an unrecognized operation has been called.
Attributes:
code: A code value to communicate to the other side of the operation along
with indication of operation termination. May be None.
details: A details value to communicate to the other side of the operation
along with indication of operation termination. May be None.
"""
def __init__(self, code, details):
"""Constructor.
Args:
code: A code value to communicate to the other side of the operation
along with indication of operation termination. May be None.
details: A details value to communicate to the other side of the
operation along with indication of operation termination. May be None.
"""
self.code = code
self.details = details
class Outcome(object):
"""The outcome of an operation.
Attributes:
kind: A Kind value coarsely identifying how the operation terminated.
code: An application-specific code value or None if no such value was
provided.
details: An application-specific details value or None if no such value was
provided.
"""
@enum.unique
class Kind(enum.Enum):
"""Ways in which an operation can terminate."""
COMPLETED = 'completed'
CANCELLED = 'cancelled'
EXPIRED = 'expired'
LOCAL_SHUTDOWN = 'local shutdown'
REMOTE_SHUTDOWN = 'remote shutdown'
RECEPTION_FAILURE = 'reception failure'
TRANSMISSION_FAILURE = 'transmission failure'
LOCAL_FAILURE = 'local failure'
REMOTE_FAILURE = 'remote failure'
class Completion(six.with_metaclass(abc.ABCMeta)):
"""An aggregate of the values exchanged upon operation completion.
Attributes:
terminal_metadata: A terminal metadata value for the operaton.
code: A code value for the operation.
message: A message value for the operation.
"""
class OperationContext(six.with_metaclass(abc.ABCMeta)):
"""Provides operation-related information and action."""
@abc.abstractmethod
def outcome(self):
"""Indicates the operation's outcome (or that the operation is ongoing).
Returns:
None if the operation is still active or the Outcome value for the
operation if it has terminated.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_termination_callback(self, callback):
"""Adds a function to be called upon operation termination.
Args:
callback: A callable to be passed an Outcome value on operation
termination.
Returns:
None if the operation has not yet terminated and the passed callback will
later be called when it does terminate, or if the operation has already
terminated an Outcome value describing the operation termination and the
passed callback will not be called as a result of this method call.
"""
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the operation.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the operation to complete before it is considered to have
timed out. Zero is returned if the operation has terminated.
"""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the operation if the operation has not yet terminated."""
raise NotImplementedError()
@abc.abstractmethod
def fail(self, exception):
"""Indicates that the operation has failed.
Args:
exception: An exception germane to the operation failure. May be None.
"""
raise NotImplementedError()
class Operator(six.with_metaclass(abc.ABCMeta)):
"""An interface through which to participate in an operation."""
@abc.abstractmethod
def advance(
self, initial_metadata=None, payload=None, completion=None,
allowance=None):
"""Progresses the operation.
Args:
initial_metadata: An initial metadata value. Only one may ever be
communicated in each direction for an operation, and they must be
communicated no later than either the first payload or the completion.
payload: A payload value.
completion: A Completion value. May only ever be non-None once in either
direction, and no payloads may be passed after it has been communicated.
allowance: A positive integer communicating the number of additional
payloads allowed to be passed by the remote side of the operation.
"""
raise NotImplementedError()
class ProtocolReceiver(six.with_metaclass(abc.ABCMeta)):
"""A means of receiving protocol values during an operation."""
@abc.abstractmethod
def context(self, protocol_context):
"""Accepts the protocol context object for the operation.
Args:
protocol_context: The protocol context object for the operation.
"""
raise NotImplementedError()
class Subscription(six.with_metaclass(abc.ABCMeta)):
"""Describes customer code's interest in values from the other side.
Attributes:
kind: A Kind value describing the overall kind of this value.
termination_callback: A callable to be passed the Outcome associated with
the operation after it has terminated. Must be non-None if kind is
Kind.TERMINATION_ONLY. Must be None otherwise.
allowance: A callable behavior that accepts positive integers representing
the number of additional payloads allowed to be passed to the other side
of the operation. Must be None if kind is Kind.FULL. Must not be None
otherwise.
operator: An Operator to be passed values from the other side of the
operation. Must be non-None if kind is Kind.FULL. Must be None otherwise.
protocol_receiver: A ProtocolReceiver to be passed protocol objects as they
become available during the operation. Must be non-None if kind is
Kind.FULL.
"""
@enum.unique
class Kind(enum.Enum):
NONE = 'none'
TERMINATION_ONLY = 'termination only'
FULL = 'full'
class Servicer(six.with_metaclass(abc.ABCMeta)):
"""Interface for service implementations."""
@abc.abstractmethod
def service(self, group, method, context, output_operator):
"""Services an operation.
Args:
group: The group identifier of the operation to be serviced.
method: The method identifier of the operation to be serviced.
context: An OperationContext object affording contextual information and
actions.
output_operator: An Operator that will accept output values of the
operation.
Returns:
A Subscription via which this object may or may not accept more values of
the operation.
Raises:
NoSuchMethodError: If this Servicer does not handle operations with the
given group and method.
abandonment.Abandoned: If the operation has been aborted and there no
longer is any reason to service the operation.
"""
raise NotImplementedError()
class End(six.with_metaclass(abc.ABCMeta)):
"""Common type for entry-point objects on both sides of an operation."""
@abc.abstractmethod
def start(self):
"""Starts this object's service of operations."""
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this object's service of operations.
This object will refuse service of new operations as soon as this method is
called but operations under way at the time of the call may be given a
grace period during which they are allowed to finish.
Args:
grace: A duration of time in seconds to allow ongoing operations to
terminate before being forcefully terminated by the stopping of this
End. May be zero to terminate all ongoing operations and immediately
stop.
Returns:
A threading.Event that will be set to indicate all operations having
terminated and this End having completely stopped. The returned event
may not be set until after the full grace period (if some ongoing
operation continues for the full length of the period) or it may be set
much sooner (if for example this End had no operations in progress at
the time its stop method was called).
"""
raise NotImplementedError()
@abc.abstractmethod
def operate(
self, group, method, subscription, timeout, initial_metadata=None,
payload=None, completion=None, protocol_options=None):
"""Commences an operation.
Args:
group: The group identifier of the invoked operation.
method: The method identifier of the invoked operation.
subscription: A Subscription to which the results of the operation will be
passed.
timeout: A length of time in seconds to allow for the operation.
initial_metadata: An initial metadata value to be sent to the other side
of the operation. May be None if the initial metadata will be later
passed via the returned operator or if there will be no initial metadata
passed at all.
payload: An initial payload for the operation.
completion: A Completion value indicating the end of transmission to the
other side of the operation.
protocol_options: A value specified by the provider of a Base interface
implementation affording custom state and behavior.
Returns:
A pair of objects affording information about the operation and action
continuing the operation. The first element of the returned pair is an
OperationContext for the operation and the second element of the
returned pair is an Operator to which operation values not passed in
this call should later be passed.
"""
raise NotImplementedError()
@abc.abstractmethod
def operation_stats(self):
"""Reports the number of terminated operations broken down by outcome.
Returns:
A dictionary from Outcome.Kind value to an integer identifying the number
of operations that terminated with that outcome kind.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_idle_action(self, action):
"""Adds an action to be called when this End has no ongoing operations.
Args:
action: A callable that accepts no arguments.
"""
raise NotImplementedError()
| bsd-3-clause |
heysion/clone-cliapp | cliapp/settings_tests.py | 1 | 17612 | # Copyright (C) 2009-2012 Lars Wirzenius
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import StringIO
import unittest
import cliapp
class SettingsTests(unittest.TestCase):
def setUp(self):
self.settings = cliapp.Settings('appname', '1.0')
def test_has_progname(self):
self.assertEqual(self.settings.progname, 'appname')
def test_sets_progname(self):
self.settings.progname = 'foo'
self.assertEqual(self.settings.progname, 'foo')
def test_has_version(self):
self.assertEqual(self.settings.version, '1.0')
def test_sets_usage_from_func(self):
s = cliapp.Settings('appname', '1.0', usage=lambda: 'xyzzy')
p = s.build_parser()
self.assert_('xyzzy' in p.usage)
def test_adds_default_options_and_settings(self):
self.assert_('output' in self.settings)
self.assert_('log' in self.settings)
self.assert_('log-level' in self.settings)
def test_iterates_over_canonical_settings_names(self):
known = ['output', 'log', 'log-level']
self.assertEqual(sorted(x for x in self.settings if x in known),
sorted(known))
def test_keys_returns_canonical_names(self):
known = ['output', 'log', 'log-level']
self.assertEqual(sorted(x for x in self.settings.keys() if x in known),
sorted(known))
def test_parses_options(self):
self.settings.string(['foo'], 'foo help', group='foo')
self.settings.boolean(['bar'], 'bar help')
self.settings.parse_args(['--foo=foovalue', '--bar'])
self.assertEqual(self.settings['foo'], 'foovalue')
self.assertEqual(self.settings['bar'], True)
def test_parses_boolean_negation_option(self):
self.settings.boolean(['bar'], 'bar help')
self.settings.parse_args(['--bar', '--no-bar'])
self.assertEqual(self.settings['bar'], False)
def test_parses_boolean_negation_option_in_group(self):
self.settings.boolean(['bar'], 'bar help', group='bar')
self.settings.parse_args(['--bar', '--no-bar'])
self.assertEqual(self.settings['bar'], False)
def test_does_not_have_foo_setting_by_default(self):
self.assertFalse('foo' in self.settings)
def test_raises_keyerror_for_getting_unknown_setting(self):
self.assertRaises(KeyError, self.settings.__getitem__, 'foo')
def test_raises_keyerror_for_setting_unknown_setting(self):
self.assertRaises(KeyError, self.settings.__setitem__, 'foo', 'bar')
def test_adds_string_setting(self):
self.settings.string(['foo'], 'foo help')
self.assert_('foo' in self.settings)
def test_adds_string_list_setting(self):
self.settings.string_list(['foo'], 'foo help')
self.assert_('foo' in self.settings)
def test_string_list_is_empty_list_by_default(self):
self.settings.string_list(['foo'], '')
self.settings.parse_args([])
self.assertEqual(self.settings['foo'], [])
def test_string_list_parses_one_item(self):
self.settings.string_list(['foo'], '')
self.settings.parse_args(['--foo=foo'])
self.assertEqual(self.settings['foo'], ['foo'])
def test_string_list_parses_two_items(self):
self.settings.string_list(['foo'], '')
self.settings.parse_args(['--foo=foo', '--foo', 'bar'])
self.assertEqual(self.settings['foo'], ['foo', 'bar'])
def test_string_list_uses_nonempty_default_if_given(self):
self.settings.string_list(['foo'], '', default=['bar'])
self.settings.parse_args([])
self.assertEqual(self.settings['foo'], ['bar'])
def test_string_list_uses_ignores_default_if_user_provides_values(self):
self.settings.string_list(['foo'], '', default=['bar'])
self.settings.parse_args(['--foo=pink', '--foo=punk'])
self.assertEqual(self.settings['foo'], ['pink', 'punk'])
def test_adds_choice_setting(self):
self.settings.choice(['foo'], ['foo', 'bar'], 'foo help')
self.assert_('foo' in self.settings)
def test_choice_defaults_to_first_one(self):
self.settings.choice(['foo'], ['foo', 'bar'], 'foo help')
self.settings.parse_args([])
self.assertEqual(self.settings['foo'], 'foo')
def test_choice_accepts_any_valid_value(self):
self.settings.choice(['foo'], ['foo', 'bar'], 'foo help')
self.settings.parse_args(['--foo=foo'])
self.assertEqual(self.settings['foo'], 'foo')
self.settings.parse_args(['--foo=bar'])
self.assertEqual(self.settings['foo'], 'bar')
def test_choice_raises_error_for_unacceptable_value(self):
self.settings.choice(['foo'], ['foo', 'bar'], 'foo help')
self.assertRaises(SystemExit,
self.settings.parse_args, ['--foo=xyzzy'],
suppress_errors=True)
def test_adds_boolean_setting(self):
self.settings.boolean(['foo'], 'foo help')
self.assert_('foo' in self.settings)
def test_boolean_setting_is_false_by_default(self):
self.settings.boolean(['foo'], 'foo help')
self.assertFalse(self.settings['foo'])
def test_sets_boolean_setting_to_true_for_many_true_values(self):
self.settings.boolean(['foo'], 'foo help')
self.settings['foo'] = True
self.assert_(self.settings['foo'])
self.settings['foo'] = 1
self.assert_(self.settings['foo'])
def test_sets_boolean_setting_to_false_for_many_false_values(self):
self.settings.boolean(['foo'], 'foo help')
self.settings['foo'] = False
self.assertFalse(self.settings['foo'])
self.settings['foo'] = 0
self.assertFalse(self.settings['foo'])
self.settings['foo'] = ()
self.assertFalse(self.settings['foo'])
self.settings['foo'] = []
self.assertFalse(self.settings['foo'])
self.settings['foo'] = ''
self.assertFalse(self.settings['foo'])
def test_sets_boolean_to_true_from_config_file(self):
def fake_open(filename):
return StringIO.StringIO('[config]\nfoo = yes\n')
self.settings.boolean(['foo'], 'foo help')
self.settings.load_configs(open_file=fake_open)
self.assertEqual(self.settings['foo'], True)
def test_sets_boolean_to_false_from_config_file(self):
def fake_open(filename):
return StringIO.StringIO('[config]\nfoo = False\n')
self.settings.boolean(['foo'], 'foo help')
self.settings.load_configs(open_file=fake_open)
self.assertEqual(self.settings['foo'], False)
def test_adds_bytesize_setting(self):
self.settings.bytesize(['foo'], 'foo help')
self.assert_('foo' in self.settings)
def test_parses_bytesize_option(self):
self.settings.bytesize(['foo'], 'foo help')
self.settings.parse_args(args=['--foo=xyzzy'])
self.assertEqual(self.settings['foo'], 0)
self.settings.parse_args(args=['--foo=123'])
self.assertEqual(self.settings['foo'], 123)
self.settings.parse_args(args=['--foo=123k'])
self.assertEqual(self.settings['foo'], 123 * 1000)
self.settings.parse_args(args=['--foo=123m'])
self.assertEqual(self.settings['foo'], 123 * 1000 ** 2)
self.settings.parse_args(args=['--foo=123g'])
self.assertEqual(self.settings['foo'], 123 * 1000 ** 3)
self.settings.parse_args(args=['--foo=123t'])
self.assertEqual(self.settings['foo'], 123 * 1000 ** 4)
self.settings.parse_args(args=['--foo=123kib'])
self.assertEqual(self.settings['foo'], 123 * 1024)
self.settings.parse_args(args=['--foo=123mib'])
self.assertEqual(self.settings['foo'], 123 * 1024 ** 2)
self.settings.parse_args(args=['--foo=123gib'])
self.assertEqual(self.settings['foo'], 123 * 1024 ** 3)
self.settings.parse_args(args=['--foo=123tib'])
self.assertEqual(self.settings['foo'], 123 * 1024 ** 4)
def test_adds_integer_setting(self):
self.settings.integer(['foo'], 'foo help')
self.assert_('foo' in self.settings)
def test_parses_integer_option(self):
self.settings.integer(['foo'], 'foo help', default=123)
self.settings.parse_args(args=[])
self.assertEqual(self.settings['foo'], 123)
self.settings.parse_args(args=['--foo=123'])
self.assertEqual(self.settings['foo'], 123)
def test_has_list_of_default_config_files(self):
defaults = self.settings.default_config_files
self.assert_(isinstance(defaults, list))
self.assert_(len(defaults) > 0)
def test_listconfs_returns_empty_list_for_nonexistent_directory(self):
self.assertEqual(self.settings.listconfs('notexist'), [])
def test_listconfs_lists_config_files_only(self):
def mock_listdir(dirname):
return ['foo.conf', 'foo.notconf']
names = self.settings.listconfs('.', listdir=mock_listdir)
self.assertEqual(names, ['./foo.conf'])
def test_listconfs_sorts_names_in_C_locale(self):
def mock_listdir(dirname):
return ['foo.conf', 'bar.conf']
names = self.settings.listconfs('.', listdir=mock_listdir)
self.assertEqual(names, ['./bar.conf', './foo.conf'])
def test_has_config_files_attribute(self):
self.assertEqual(self.settings.config_files,
self.settings.default_config_files)
def test_has_config_files_list_can_be_changed(self):
self.settings.config_files += ['./foo']
self.assertEqual(self.settings.config_files,
self.settings.default_config_files + ['./foo'])
def test_loads_config_files(self):
def mock_open(filename, mode=None):
return StringIO.StringIO('''\
[config]
foo = yeehaa
''')
self.settings.string(['foo'], 'foo help')
self.settings.config_files = ['whatever.conf']
self.settings.load_configs(open_file=mock_open)
self.assertEqual(self.settings['foo'], 'yeehaa')
def test_loads_string_list_from_config_files(self):
def mock_open(filename, mode=None):
return StringIO.StringIO('''\
[config]
foo = yeehaa
bar = ping, pong
''')
self.settings.string_list(['foo'], 'foo help')
self.settings.string_list(['bar'], 'bar help')
self.settings.config_files = ['whatever.conf']
self.settings.load_configs(open_file=mock_open)
self.assertEqual(self.settings['foo'], ['yeehaa'])
self.assertEqual(self.settings['bar'], ['ping', 'pong'])
def test_handles_defaults_with_config_files(self):
def mock_open(filename, mode=None):
return StringIO.StringIO('''\
[config]
''')
self.settings.string(['foo'], 'foo help', default='foo')
self.settings.string_list(['bar'], 'bar help', default=['bar'])
self.settings.config_files = ['whatever.conf']
self.settings.load_configs(open_file=mock_open)
self.assertEqual(self.settings['foo'], 'foo')
self.assertEqual(self.settings['bar'], ['bar'])
def test_handles_overridden_defaults_with_config_files(self):
def mock_open(filename, mode=None):
return StringIO.StringIO('''\
[config]
foo = yeehaa
bar = ping, pong
''')
self.settings.string(['foo'], 'foo help', default='foo')
self.settings.string_list(['bar'], 'bar help', default=['bar'])
self.settings.config_files = ['whatever.conf']
self.settings.load_configs(open_file=mock_open)
self.assertEqual(self.settings['foo'], 'yeehaa')
self.assertEqual(self.settings['bar'], ['ping', 'pong'])
def test_handles_values_from_config_files_overridden_on_command_line(self):
def mock_open(filename, mode=None):
return StringIO.StringIO('''\
[config]
foo = yeehaa
bar = ping, pong
''')
self.settings.string(['foo'], 'foo help', default='foo')
self.settings.string_list(['bar'], 'bar help', default=['bar'])
self.settings.config_files = ['whatever.conf']
self.settings.load_configs(open_file=mock_open)
self.settings.parse_args(['--foo=red', '--bar=blue', '--bar=white'])
self.assertEqual(self.settings['foo'], 'red')
self.assertEqual(self.settings['bar'], ['blue', 'white'])
def test_load_configs_raises_error_for_unknown_variable(self):
def mock_open(filename, mode=None):
return StringIO.StringIO('''\
[config]
unknown = variable
''')
self.assertRaises(
cliapp.UnknownConfigVariable,
self.settings.load_configs,
open_file=mock_open)
def test_load_configs_ignore_errors_opening_a_file(self):
def mock_open(filename, mode=None):
raise IOError()
self.assertEqual(self.settings.load_configs(open_file=mock_open), None)
def test_adds_config_file_with_dash_dash_config(self):
self.settings.parse_args(['--config=foo.conf'])
self.assertEqual(self.settings.config_files,
self.settings.default_config_files + ['foo.conf'])
def test_ignores_default_configs(self):
self.settings.parse_args(['--no-default-configs'])
self.assertEqual(self.settings.config_files, [])
def test_ignores_then_adds_configs_works(self):
self.settings.parse_args(['--no-default-configs', '--config=foo.conf'])
self.assertEqual(self.settings.config_files, ['foo.conf'])
def test_require_raises_error_if_string_unset(self):
self.settings.string(['foo'], 'foo help', default=None)
self.assertRaises(cliapp.AppException, self.settings.require,
'foo')
def test_require_is_ok_with_set_string(self):
self.settings.string(['foo'], 'foo help', default=None)
self.settings['foo'] = 'bar'
self.assertEqual(self.settings.require('foo'), None)
def test_require_is_ok_with_default_string(self):
self.settings.string(['foo'], 'foo help', default='foo default')
self.assertEqual(self.settings.require('foo'), None)
def test_require_raises_error_if_string_list_unset(self):
self.settings.string_list(['foo'], 'foo help')
self.assertRaises(cliapp.AppException, self.settings.require, 'foo')
def test_require_is_ok_with_set_string_list(self):
self.settings.string(['foo'], 'foo help')
self.settings['foo'] = ['foo', 'bar']
self.assertEqual(self.settings.require('foo'), None)
def test_require_is_ok_with_default_string_list(self):
self.settings.string(['foo'], 'foo help', default=['foo'])
self.assertEqual(self.settings.require('foo'), None)
def test_require_is_ok_with_unset_choice(self):
self.settings.choice(['foo'], ['foo', 'bar'], 'foo help')
self.assertEqual(self.settings.require('foo'), None)
def test_require_is_ok_with_unset_boolean(self):
self.settings.boolean(['foo'], 'foo help')
self.assertEqual(self.settings.require('foo'), None)
def test_require_is_ok_with_unset_bytesize(self):
self.settings.bytesize(['foo'], 'foo help')
self.assertEqual(self.settings.require('foo'), None)
def test_require_is_ok_with_unset_integer(self):
self.settings.integer(['foo'], 'foo help')
self.assertEqual(self.settings.require('foo'), None)
def test_require_raises_error_when_one_value_of_several_is_unset(self):
self.settings.string(['foo'], 'foo help')
self.settings.string(['bar'], 'bar help', default=None)
args = ['foo', 'bar']
self.assertRaises(cliapp.AppException, self.settings.require, *args)
def test_require_is_ok_with_multiple_values(self):
self.settings.string(['foo'], 'foo help')
self.settings.string(['bar'], 'bar help')
args = ['foo', 'bar']
self.assertEqual(self.settings.require(*args), None)
def test_exports_configparser_with_settings(self):
self.settings.integer(['foo'], 'foo help', default=1)
self.settings.string(['bar'], 'bar help', default='yo')
cp = self.settings.as_cp()
self.assertEqual(cp.get('config', 'foo'), '1')
self.assertEqual(cp.get('config', 'bar'), 'yo')
def test_exports_all_config_sections_via_as_cp(self):
def mock_open(filename, mode=None):
return StringIO.StringIO('''\
[config]
foo = yeehaa
[other]
bar = dodo
''')
self.settings.string(['foo'], 'foo help', default='foo')
self.settings.config_files = ['whatever.conf']
self.settings.load_configs(open_file=mock_open)
cp = self.settings.as_cp()
self.assertEqual(sorted(cp.sections()), ['config', 'other'])
self.assertEqual(cp.get('config', 'foo'), 'yeehaa')
self.assertEqual(cp.options('other'), ['bar'])
self.assertEqual(cp.get('other', 'bar'), 'dodo')
| gpl-2.0 |
brokenjacobs/ansible | test/units/plugins/cache/test_cache.py | 91 | 4078 | # (c) 2012-2015, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest, mock
from ansible.errors import AnsibleError
from ansible.plugins.cache import FactCache
from ansible.plugins.cache.base import BaseCacheModule
from ansible.plugins.cache.memory import CacheModule as MemoryCache
HAVE_MEMCACHED = True
try:
import memcache
except ImportError:
HAVE_MEMCACHED = False
else:
# Use an else so that the only reason we skip this is for lack of
# memcached, not errors importing the plugin
from ansible.plugins.cache.memcached import CacheModule as MemcachedCache
HAVE_REDIS = True
try:
import redis
except ImportError:
HAVE_REDIS = False
else:
from ansible.plugins.cache.redis import CacheModule as RedisCache
class TestFactCache(unittest.TestCase):
def setUp(self):
with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'):
self.cache = FactCache()
def test_copy(self):
self.cache['avocado'] = 'fruit'
self.cache['daisy'] = 'flower'
a_copy = self.cache.copy()
self.assertEqual(type(a_copy), dict)
self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower'))
def test_plugin_load_failure(self):
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
self.assertRaisesRegexp(AnsibleError,
"Unable to load the facts cache plugin.*json.*",
FactCache)
class TestAbstractClass(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_subclass_error(self):
class CacheModule1(BaseCacheModule):
pass
with self.assertRaises(TypeError):
CacheModule1()
class CacheModule2(BaseCacheModule):
def get(self, key):
super(CacheModule2, self).get(key)
with self.assertRaises(TypeError):
CacheModule2()
def test_subclass_success(self):
class CacheModule3(BaseCacheModule):
def get(self, key):
super(CacheModule3, self).get(key)
def set(self, key, value):
super(CacheModule3, self).set(key, value)
def keys(self):
super(CacheModule3, self).keys()
def contains(self, key):
super(CacheModule3, self).contains(key)
def delete(self, key):
super(CacheModule3, self).delete(key)
def flush(self):
super(CacheModule3, self).flush()
def copy(self):
super(CacheModule3, self).copy()
self.assertIsInstance(CacheModule3(), CacheModule3)
@unittest.skipUnless(HAVE_MEMCACHED, 'python-memcached module not installed')
def test_memcached_cachemodule(self):
self.assertIsInstance(MemcachedCache(), MemcachedCache)
def test_memory_cachemodule(self):
self.assertIsInstance(MemoryCache(), MemoryCache)
@unittest.skipUnless(HAVE_REDIS, 'Redis python module not installed')
def test_redis_cachemodule(self):
self.assertIsInstance(RedisCache(), RedisCache)
| gpl-3.0 |
jtimon/bitcoin | test/functional/p2p_mempool.py | 45 | 1157 | #!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test p2p mempool message.
Test that nodes are disconnected if they send mempool messages when bloom
filters are not enabled.
"""
from test_framework.messages import msg_mempool
from test_framework.mininode import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class P2PMempoolTests(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-peerbloomfilters=0"]]
def run_test(self):
# Add a p2p connection
self.nodes[0].add_p2p_connection(P2PInterface())
#request mempool
self.nodes[0].p2p.send_message(msg_mempool())
self.nodes[0].p2p.wait_for_disconnect()
#mininode must be disconnected at this point
assert_equal(len(self.nodes[0].getpeerinfo()), 0)
if __name__ == '__main__':
P2PMempoolTests().main()
| mit |
odoomrp/odoomrp-wip | stock_purchase_unit/models/purchase_order.py | 29 | 1545 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, api
class PurchaseOrder(models.Model):
_inherit = 'purchase.order'
@api.model
def _prepare_order_line_move(self, order, order_line, picking_id,
group_id):
res = super(PurchaseOrder, self)._prepare_order_line_move(
order, order_line, picking_id, group_id)
for vals in res:
vals.update({'product_uop': (order_line.product_uop.id or
order_line.product_uom.id),
'product_uop_qty': (order_line.product_uop_qty or
order_line.product_qty)})
return res
| agpl-3.0 |
indictranstech/tele-frappe | frappe/model/db_schema.py | 4 | 13509 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Syncs a database table to the `DocType` (metadata)
.. note:: This module is only used internally
"""
import re
import os
import frappe
from frappe import _
from frappe.utils import cstr, cint
class InvalidColumnName(frappe.ValidationError): pass
type_map = {
'Currency': ('decimal', '18,6')
,'Int': ('int', '11')
,'Float': ('decimal', '18,6')
,'Percent': ('decimal', '18,6')
,'Check': ('int', '1')
,'Small Text': ('text', '')
,'Long Text': ('longtext', '')
,'Code': ('longtext', '')
,'Text Editor': ('longtext', '')
,'Date': ('date', '')
,'Datetime': ('datetime', '6')
,'Time': ('time', '6')
,'Text': ('text', '')
,'Data': ('varchar', '255')
,'Link': ('varchar', '255')
,'Dynamic Link':('varchar', '255')
,'Password': ('varchar', '255')
,'Select': ('varchar', '255')
,'Read Only': ('varchar', '255')
,'Attach': ('varchar', '255')
,'Attach Image':('varchar', '255')
}
default_columns = ['name', 'creation', 'modified', 'modified_by', 'owner',
'docstatus', 'parent', 'parentfield', 'parenttype', 'idx']
default_shortcuts = ['_Login', '__user', '_Full Name', 'Today', '__today', "now", "Now"]
def updatedb(dt):
"""
Syncs a `DocType` to the table
* creates if required
* updates columns
* updates indices
"""
res = frappe.db.sql("select ifnull(issingle, 0) from tabDocType where name=%s", (dt,))
if not res:
raise Exception, 'Wrong doctype "%s" in updatedb' % dt
if not res[0][0]:
frappe.db.commit()
tab = DbTable(dt, 'tab')
tab.sync()
frappe.db.begin()
class DbTable:
def __init__(self, doctype, prefix = 'tab'):
self.doctype = doctype
self.name = prefix + doctype
self.columns = {}
self.current_columns = {}
# lists for change
self.add_column = []
self.change_type = []
self.add_index = []
self.drop_index = []
self.set_default = []
# load
self.get_columns_from_docfields()
def sync(self):
if not self.name in DbManager(frappe.db).get_tables_list(frappe.db.cur_db_name):
self.create()
else:
self.alter()
def create(self):
add_text = ''
# columns
column_defs = self.get_column_definitions()
if column_defs: add_text += ',\n'.join(column_defs) + ',\n'
# index
index_defs = self.get_index_definitions()
if index_defs: add_text += ',\n'.join(index_defs) + ',\n'
# create table
frappe.db.sql("""create table `%s` (
name varchar(255) not null primary key,
creation datetime(6),
modified datetime(6),
modified_by varchar(255),
owner varchar(255),
docstatus int(1) default '0',
parent varchar(255),
parentfield varchar(255),
parenttype varchar(255),
idx int(8),
%sindex parent(parent))
ENGINE=InnoDB
ROW_FORMAT=COMPRESSED
CHARACTER SET=utf8mb4
COLLATE=utf8mb4_unicode_ci""" % (self.name, add_text))
def get_column_definitions(self):
column_list = [] + default_columns
ret = []
for k in self.columns.keys():
if k not in column_list:
d = self.columns[k].get_definition()
if d:
ret.append('`'+ k+ '` ' + d)
column_list.append(k)
return ret
def get_index_definitions(self):
ret = []
for key, col in self.columns.items():
if col.set_index and col.fieldtype in type_map and \
type_map.get(col.fieldtype)[0] not in ('text', 'longtext'):
ret.append('index `' + key + '`(`' + key + '`)')
return ret
def get_columns_from_docfields(self):
"""
get columns from docfields and custom fields
"""
fl = frappe.db.sql("SELECT * FROM tabDocField WHERE parent = %s", self.doctype, as_dict = 1)
precisions = {}
if not frappe.flags.in_install:
custom_fl = frappe.db.sql("""\
SELECT * FROM `tabCustom Field`
WHERE dt = %s AND docstatus < 2""", (self.doctype,), as_dict=1)
if custom_fl: fl += custom_fl
# get precision from property setters
for ps in frappe.get_all("Property Setter", fields=["field_name", "value"],
filters={"doc_type": self.doctype, "doctype_or_field": "DocField", "property": "precision"}):
precisions[ps.field_name] = ps.value
for f in fl:
self.columns[f['fieldname']] = DbColumn(self, f['fieldname'],
f['fieldtype'], f.get('length'), f.get('default'), f.get('search_index'),
f.get('options'), f.get('unique'), precisions.get(f['fieldname']) or f.get('precision'))
def get_columns_from_db(self):
self.show_columns = frappe.db.sql("desc `%s`" % self.name)
for c in self.show_columns:
self.current_columns[c[0]] = {'name': c[0],
'type':c[1], 'index':c[3]=="MUL", 'default':c[4], "unique":c[3]=="UNI"}
# GET foreign keys
def get_foreign_keys(self):
fk_list = []
txt = frappe.db.sql("show create table `%s`" % self.name)[0][1]
for line in txt.split('\n'):
if line.strip().startswith('CONSTRAINT') and line.find('FOREIGN')!=-1:
try:
fk_list.append((line.split('`')[3], line.split('`')[1]))
except IndexError:
pass
return fk_list
# Drop foreign keys
def drop_foreign_keys(self):
if not self.drop_foreign_key:
return
fk_list = self.get_foreign_keys()
# make dictionary of constraint names
fk_dict = {}
for f in fk_list:
fk_dict[f[0]] = f[1]
# drop
for col in self.drop_foreign_key:
frappe.db.sql("set foreign_key_checks=0")
frappe.db.sql("alter table `%s` drop foreign key `%s`" % (self.name, fk_dict[col.fieldname]))
frappe.db.sql("set foreign_key_checks=1")
def alter(self):
self.get_columns_from_db()
for col in self.columns.values():
col.build_for_alter_table(self.current_columns.get(col.fieldname, None))
query = []
for col in self.add_column:
query.append("add column `{}` {}".format(col.fieldname, col.get_definition()))
for col in self.change_type:
query.append("change `{}` `{}` {}".format(col.fieldname, col.fieldname, col.get_definition()))
for col in self.add_index:
# if index key not exists
if not frappe.db.sql("show index from `%s` where key_name = %s" %
(self.name, '%s'), col.fieldname):
query.append("add index `{}`(`{}`)".format(col.fieldname, col.fieldname))
for col in self.drop_index:
if col.fieldname != 'name': # primary key
# if index key exists
if frappe.db.sql("""show index from `{0}`
where key_name=%s
and Non_unique=%s""".format(self.name), (col.fieldname, 1 if col.unique else 0)):
query.append("drop index `{}`".format(col.fieldname))
for col in self.set_default:
if col.fieldname=="name":
continue
if not col.default:
col_default = "null"
else:
col_default = '"{}"'.format(col.default.replace('"', '\\"'))
query.append('alter column `{}` set default {}'.format(col.fieldname, col_default))
if query:
frappe.db.sql("alter table `{}` {}".format(self.name, ", ".join(query)))
class DbColumn:
def __init__(self, table, fieldname, fieldtype, length, default,
set_index, options, unique, precision):
self.table = table
self.fieldname = fieldname
self.fieldtype = fieldtype
self.length = length
self.set_index = set_index
self.default = default
self.options = options
self.unique = unique
self.precision = precision
def get_definition(self, with_default=1):
column_def = get_definition(self.fieldtype, self.precision)
if not column_def:
return column_def
if self.default and (self.default not in default_shortcuts) \
and not self.default.startswith(":") and column_def not in ('text', 'longtext'):
column_def += ' default "' + self.default.replace('"', '\"') + '"'
if self.unique and (column_def not in ('text', 'longtext')):
column_def += ' unique'
return column_def
def build_for_alter_table(self, current_def):
column_def = get_definition(self.fieldtype)
# no columns
if not column_def:
return
# to add?
if not current_def:
self.fieldname = validate_column_name(self.fieldname)
self.table.add_column.append(self)
return
# type
if (current_def['type'] != column_def) or (self.unique and not current_def['unique'] \
and column_def in ('text', 'longtext')):
self.table.change_type.append(self)
else:
# index
if (current_def['index'] and not self.set_index):
self.table.drop_index.append(self)
if (current_def['unique'] and not self.unique) and not (column_def in ('text', 'longtext')):
self.table.drop_index.append(self)
if (not current_def['index'] and self.set_index) and not (column_def in ('text', 'longtext')):
self.table.add_index.append(self)
# default
if (self.default_changed(current_def) \
and (self.default not in default_shortcuts) \
and not cstr(self.default).startswith(":") \
and not (column_def in ['text','longtext'])):
self.table.set_default.append(self)
def default_changed(self, current_def):
if "decimal" in current_def['type']:
return self.default_changed_for_decimal(current_def)
else:
return current_def['default'] != self.default
def default_changed_for_decimal(self, current_def):
try:
if current_def['default'] in ("", None) and self.default in ("", None):
# both none, empty
return False
elif current_def['default'] in ("", None):
try:
# check if new default value is valid
float(self.default)
return True
except ValueError:
return False
elif self.default in ("", None):
# new default value is empty
return True
else:
# NOTE float() raise ValueError when "" or None is passed
return float(current_def['default'])!=float(self.default)
except TypeError:
return True
class DbManager:
"""
Basically, a wrapper for oft-used mysql commands. like show tables,databases, variables etc...
#TODO:
0. Simplify / create settings for the restore database source folder
0a. Merge restore database and extract_sql(from frappe_server_tools).
1. Setter and getter for different mysql variables.
2. Setter and getter for mysql variables at global level??
"""
def __init__(self,db):
"""
Pass root_conn here for access to all databases.
"""
if db:
self.db = db
def get_variables(self,regex):
"""
Get variables that match the passed pattern regex
"""
return list(self.db.sql("SHOW VARIABLES LIKE '%s'"%regex))
def get_table_schema(self,table):
"""
Just returns the output of Desc tables.
"""
return list(self.db.sql("DESC `%s`"%table))
def get_tables_list(self,target=None):
"""get list of tables"""
if target:
self.db.use(target)
return [t[0] for t in self.db.sql("SHOW TABLES")]
def create_user(self, user, password, host):
#Create user if it doesn't exist.
try:
if password:
self.db.sql("CREATE USER '%s'@'%s' IDENTIFIED BY '%s';" % (user[:16], host, password))
else:
self.db.sql("CREATE USER '%s'@'%s';" % (user[:16], host))
except Exception:
raise
def delete_user(self, target, host):
# delete user if exists
try:
self.db.sql("DROP USER '%s'@'%s';" % (target, host))
except Exception, e:
if e.args[0]==1396:
pass
else:
raise
def create_database(self,target):
if target in self.get_database_list():
self.drop_database(target)
self.db.sql("CREATE DATABASE IF NOT EXISTS `%s` ;" % target)
def drop_database(self,target):
self.db.sql("DROP DATABASE IF EXISTS `%s`;"%target)
def grant_all_privileges(self, target, user, host):
self.db.sql("GRANT ALL PRIVILEGES ON `%s`.* TO '%s'@'%s';" % (target, user, host))
def grant_select_privilges(self, db, table, user, host):
if table:
self.db.sql("GRANT SELECT ON %s.%s to '%s'@'%s';" % (db, table, user, host))
else:
self.db.sql("GRANT SELECT ON %s.* to '%s'@'%s';" % (db, user, host))
def flush_privileges(self):
self.db.sql("FLUSH PRIVILEGES")
def get_database_list(self):
"""get list of databases"""
return [d[0] for d in self.db.sql("SHOW DATABASES")]
def restore_database(self,target,source,user,password):
from frappe.utils import make_esc
esc = make_esc('$ ')
os.system("mysql -u %s -p%s -h%s %s < %s" % \
(esc(user), esc(password), esc(frappe.db.host), esc(target), source))
def drop_table(self,table_name):
"""drop table if exists"""
if not table_name in self.get_tables_list():
return
self.db.sql("DROP TABLE IF EXISTS %s "%(table_name))
def validate_column_name(n):
n = n.replace(' ','_').strip().lower()
special_characters = re.findall("[\W]", n, re.UNICODE)
if special_characters:
special_characters = ", ".join('"{0}"'.format(c) for c in special_characters)
frappe.throw(_("Fieldname {0} cannot have special characters like {1}").format(cstr(n), special_characters), InvalidColumnName)
return n
def remove_all_foreign_keys():
frappe.db.sql("set foreign_key_checks = 0")
frappe.db.commit()
for t in frappe.db.sql("select name from tabDocType where ifnull(issingle,0)=0"):
dbtab = DbTable(t[0])
try:
fklist = dbtab.get_foreign_keys()
except Exception, e:
if e.args[0]==1146:
fklist = []
else:
raise
for f in fklist:
frappe.db.sql("alter table `tab%s` drop foreign key `%s`" % (t[0], f[1]))
def get_definition(fieldtype, precision=None):
d = type_map.get(fieldtype)
if not d:
return
ret = d[0]
if d[1]:
length = d[1]
if fieldtype in ["Float", "Currency", "Percent"] and cint(precision) > 6:
length = '18,9'
ret += '(' + length + ')'
return ret
def add_column(doctype, column_name, fieldtype, precision=None):
frappe.db.commit()
frappe.db.sql("alter table `tab%s` add column %s %s" % (doctype,
column_name, get_definition(fieldtype, precision)))
| mit |
dennis-sheil/commandergenius | project/jni/python/src/Mac/scripts/bgenall.py | 39 | 1495 | # bgenall - Generate all bgen-generated modules
#
import sys
import os
import string
def bgenone(dirname, shortname):
os.chdir(dirname)
print '%s:'%shortname
# Sigh, we don't want to lose CVS history, so two
# modules have funny names:
if shortname == 'carbonevt':
modulename = 'CarbonEvtscan'
elif shortname == 'ibcarbon':
modulename = 'IBCarbonscan'
else:
modulename = shortname + 'scan'
try:
m = __import__(modulename)
except:
print "Error:", shortname, sys.exc_info()[1]
return 0
try:
m.main()
except:
print "Error:", shortname, sys.exc_info()[1]
return 0
return 1
def main():
success = []
failure = []
sys.path.insert(0, os.curdir)
if len(sys.argv) > 1:
srcdir = sys.argv[1]
else:
srcdir = os.path.join(os.path.join(sys.prefix, 'Mac'), 'Modules')
srcdir = os.path.abspath(srcdir)
contents = os.listdir(srcdir)
for name in contents:
moduledir = os.path.join(srcdir, name)
scanmodule = os.path.join(moduledir, name +'scan.py')
if os.path.exists(scanmodule):
if bgenone(moduledir, name):
success.append(name)
else:
failure.append(name)
print 'Done:', string.join(success, ' ')
if failure:
print 'Failed:', string.join(failure, ' ')
return 0
return 1
if __name__ == '__main__':
rv = main()
sys.exit(not rv)
| lgpl-2.1 |
sssllliang/edx-analytics-pipeline | edx/analytics/tasks/load_internal_reporting_user_activity.py | 2 | 8980 | """
Loads the user_activity table into the warehouse through the pipeline via Hive.
On the roadmap is to write a task that runs validation queries on the aggregated Hive data pre-load.
"""
import logging
import luigi
from edx.analytics.tasks.url import ExternalURL
from edx.analytics.tasks.user_activity import UserActivityTableTask
from edx.analytics.tasks.vertica_load import VerticaCopyTask, VerticaCopyTaskMixin, CredentialFileVerticaTarget
from edx.analytics.tasks.database_imports import ImportAuthUserTask
from edx.analytics.tasks.util.hive import HiveTableFromQueryTask, WarehouseMixin, HivePartition
log = logging.getLogger(__name__)
class AggregateInternalReportingUserActivityTableHive(HiveTableFromQueryTask):
"""Aggregate the user activity table in Hive."""
interval = luigi.DateIntervalParameter()
n_reduce_tasks = luigi.Parameter()
def requires(self):
"""
This task reads from auth_user and user_activity_daily, so require that they be
loaded into Hive (via MySQL loads into Hive or via the pipeline as needed).
"""
return [ImportAuthUserTask(overwrite=False, destination=self.warehouse_path),
UserActivityTableTask(interval=self.interval, warehouse_path=self.warehouse_path,
n_reduce_tasks=self.n_reduce_tasks)]
@property
def table(self):
return 'internal_reporting_user_activity'
@property
def columns(self):
return [
('user_id', 'INT'),
('course_id', 'STRING'),
('date', 'STRING'),
('activity_type', 'STRING'),
('number_of_activities', 'INT'),
]
@property
def partition(self):
return HivePartition('dt', self.interval.date_b.isoformat()) # pylint: disable=no-member
@property
def insert_query(self):
return """
SELECT
au.id
, uad.course_id
, uad.date
, uad.category
, uad.count
FROM auth_user au
JOIN user_activity_daily uad ON au.username = uad.username
"""
class LoadInternalReportingUserActivityToWarehouse(WarehouseMixin, VerticaCopyTask):
"""
Loads the user activity table from Hive into the Vertica data warehouse.
Parameters:
interval: a date_interval object containing the interval over which to pull data for user location. Should
usually be from the beginning of edX to the present day (i.e. through the previous day).
n_reduce_tasks: number of reduce tasks
overwrite: whether or not to overwrite existing outputs; set to False by default for now
"""
interval = luigi.DateIntervalParameter()
n_reduce_tasks = luigi.Parameter()
@property
def partition(self):
"""The table is partitioned by date."""
return HivePartition('dt', self.interval.date_b.isoformat()) # pylint: disable=no-member
@property
def insert_source_task(self):
return (
# Get the location of the Hive table, so it can be opened and read.
AggregateInternalReportingUserActivityTableHive(
n_reduce_tasks=self.n_reduce_tasks,
interval=self.interval,
warehouse_path=self.warehouse_path,
overwrite=self.overwrite,
)
)
@property
def table(self):
return 'f_user_activity'
@property
def default_columns(self):
"""List of tuples defining name and definition of automatically-filled columns."""
return None
@property
def auto_primary_key(self):
"""The warehouse schema defines an auto primary key called row_number for this table."""
return ('row_number', 'AUTO_INCREMENT')
@property
def foreign_key_mapping(self):
"""Foreign keys are specified in the warehouse schema."""
return {}
@property
def columns(self):
return [
('user_id', 'INTEGER'),
('course_id', 'VARCHAR(256)'),
('date', 'DATE'),
('activity_type', 'VARCHAR(200)'),
('number_of_activities', 'INTEGER')
]
class BuildInternalReportingUserActivityCombinedView(VerticaCopyTaskMixin, WarehouseMixin, luigi.Task):
"""luigi task to build the combined view on top of the history and production tables for user activity."""
interval = luigi.DateIntervalParameter()
n_reduce_tasks = luigi.Parameter()
history_schema = luigi.Parameter(default='history')
def requires(self):
return {'insert_source': LoadInternalReportingUserActivityToWarehouse(
n_reduce_tasks=self.n_reduce_tasks,
interval=self.interval,
warehouse_path=self.warehouse_path,
overwrite=self.overwrite,
schema=self.schema,
credentials=self.credentials),
'credentials': ExternalURL(self.credentials)}
@property
def view(self):
"""The "table name" is the name of the view we build over the table we insert here and the history table."""
return "f_user_activity_combined"
def update_id(self):
"""All that matters is whether we've built the view before, and the parameter information doesn't matter."""
return "user_activity_view_built"
def run(self):
"""Construct the view on top of the historical and new user activity tables."""
connection = self.output().connect()
try:
cursor = connection.cursor()
# We mark this task as complete first, since the view creation does an implicit commit.
self.output().touch(connection)
# Creating the view commits the transaction as well.
build_view_query = """CREATE VIEW {schema}.{view} AS SELECT * FROM (
SELECT * FROM {schema}.f_user_activity
UNION
SELECT * FROM {history}.f_user_activity
) AS u""".format(schema=self.schema, view=self.view, history=self.history_schema)
log.debug(build_view_query)
cursor.execute(build_view_query)
log.debug("Committed transaction.")
except Exception as exc:
log.debug("Rolled back the transaction; exception raised: %s", str(exc))
connection.rollback()
raise
finally:
connection.close()
def output(self):
"""
Returns a Vertica target noting that the update occurred.
"""
return CredentialFileVerticaTarget(
credentials_target=self.input()['credentials'],
table=self.view,
schema=self.schema,
update_id=self.update_id()
)
def complete(self):
"""
OverwriteOutputMixin redefines the complete method so that tasks are re-run, which is great for the Vertica
loading tasks where we would delete and then re-start, but detrimental here, as the existence of the view does
not depend on the data inside the table, only on the table's existence. We override this method again to
revert to the standard luigi complete() method, because we can't meaningfully re-run this task given that
CREATE VIEW IF NOT EXISTS and DROP VIEW IF EXISTS are not supported in Vertica.
"""
return self.output().exists()
class InternalReportingUserActivityWorkflow(VerticaCopyTaskMixin, WarehouseMixin, luigi.WrapperTask):
"""Wrapper to provide a single entry point for the user activity table construction and view construction."""
interval = luigi.DateIntervalParameter()
n_reduce_tasks = luigi.Parameter()
history_schema = luigi.Parameter(default='history')
def requires(self):
"""
We require the view to be built and the data to be loaded.
The existence check for the output of the view-building task is merely an attempt to select from the view
(and thus makes no guarantee about the freshness of the data in the table), so we may need to re-run the
warehouse loading task even if the view-building task has been built, and thus we require the warehouse load
task here too.
"""
return [LoadInternalReportingUserActivityToWarehouse(
n_reduce_tasks=self.n_reduce_tasks,
interval=self.interval,
warehouse_path=self.warehouse_path,
overwrite=self.overwrite,
schema=self.schema,
credentials=self.credentials),
BuildInternalReportingUserActivityCombinedView(
n_reduce_tasks=self.n_reduce_tasks,
interval=self.interval,
warehouse_path=self.warehouse_path,
overwrite=self.overwrite,
schema=self.schema,
credentials=self.credentials,
history_schema=self.history_schema
)
]
| agpl-3.0 |
ar7z1/ansible | lib/ansible/modules/network/a10/a10_server_axapi3.py | 50 | 8970 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Mischa Peters <mpeters@a10networks.com>
# (c) 2016, Eric Chou <ericc@a10networks.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: a10_server_axapi3
version_added: 2.3
short_description: Manage A10 Networks AX/SoftAX/Thunder/vThunder devices
description:
- Manage SLB (Server Load Balancer) server objects on A10 Networks devices via aXAPIv3.
author: "Eric Chou (@ericchou) based on previous work by Mischa Peters (@mischapeters)"
extends_documentation_fragment:
- a10
- url
options:
server_name:
description:
- The SLB (Server Load Balancer) server name.
required: true
aliases: ['server']
server_ip:
description:
- The SLB (Server Load Balancer) server IPv4 address.
required: true
aliases: ['ip', 'address']
server_status:
description:
- The SLB (Server Load Balancer) virtual server status.
default: enable
aliases: ['action']
choices: ['enable', 'disable']
server_ports:
description:
- A list of ports to create for the server. Each list item should be a dictionary which specifies the C(port:)
and C(protocol:).
aliases: ['port']
operation:
description:
- Create, Update or Remove SLB server. For create and update operation, we use the IP address and server
name specified in the POST message. For delete operation, we use the server name in the request URI.
default: create
choices: ['create', 'update', 'remove']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled devices using self-signed certificates.
type: bool
default: 'yes'
'''
RETURN = '''
#
'''
EXAMPLES = '''
# Create a new server
- a10_server:
host: a10.mydomain.com
username: myadmin
password: mypassword
server: test
server_ip: 1.1.1.100
validate_certs: false
server_status: enable
write_config: yes
operation: create
server_ports:
- port-number: 8080
protocol: tcp
action: enable
- port-number: 8443
protocol: TCP
'''
import json
from ansible.module_utils.network.a10.a10 import axapi_call_v3, a10_argument_spec, axapi_authenticate_v3, axapi_failure
from ansible.module_utils.network.a10.a10 import AXAPI_PORT_PROTOCOLS
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import url_argument_spec
VALID_PORT_FIELDS = ['port-number', 'protocol', 'action']
def validate_ports(module, ports):
for item in ports:
for key in item:
if key not in VALID_PORT_FIELDS:
module.fail_json(msg="invalid port field (%s), must be one of: %s" % (key, ','.join(VALID_PORT_FIELDS)))
# validate the port number is present and an integer
if 'port-number' in item:
try:
item['port-number'] = int(item['port-number'])
except:
module.fail_json(msg="port-number entries in the port definitions must be integers")
else:
module.fail_json(msg="port definitions must define the port-number field")
# validate the port protocol is present, no need to convert to the internal API integer value in v3
if 'protocol' in item:
protocol = item['protocol']
if not protocol:
module.fail_json(msg="invalid port protocol, must be one of: %s" % ','.join(AXAPI_PORT_PROTOCOLS))
else:
item['protocol'] = protocol
else:
module.fail_json(msg="port definitions must define the port protocol (%s)" % ','.join(AXAPI_PORT_PROTOCOLS))
# 'status' is 'action' in AXAPIv3
# no need to convert the status, a.k.a action, to the internal API integer value in v3
# action is either enabled or disabled
if 'action' in item:
action = item['action']
if action not in ['enable', 'disable']:
module.fail_json(msg="server action must be enable or disable")
else:
item['action'] = 'enable'
def main():
argument_spec = a10_argument_spec()
argument_spec.update(url_argument_spec())
argument_spec.update(
dict(
operation=dict(type='str', default='create', choices=['create', 'update', 'delete']),
server_name=dict(type='str', aliases=['server'], required=True),
server_ip=dict(type='str', aliases=['ip', 'address'], required=True),
server_status=dict(type='str', default='enable', aliases=['action'], choices=['enable', 'disable']),
server_ports=dict(type='list', aliases=['port'], default=[]),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False
)
host = module.params['host']
username = module.params['username']
password = module.params['password']
operation = module.params['operation']
write_config = module.params['write_config']
slb_server = module.params['server_name']
slb_server_ip = module.params['server_ip']
slb_server_status = module.params['server_status']
slb_server_ports = module.params['server_ports']
axapi_base_url = 'https://{}/axapi/v3/'.format(host)
axapi_auth_url = axapi_base_url + 'auth/'
signature = axapi_authenticate_v3(module, axapi_auth_url, username, password)
# validate the ports data structure
validate_ports(module, slb_server_ports)
json_post = {
"server-list": [
{
"name": slb_server,
"host": slb_server_ip
}
]
}
# add optional module parameters
if slb_server_ports:
json_post['server-list'][0]['port-list'] = slb_server_ports
if slb_server_status:
json_post['server-list'][0]['action'] = slb_server_status
slb_server_data = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='GET', body='', signature=signature)
# for empty slb server list
if axapi_failure(slb_server_data):
slb_server_exists = False
else:
slb_server_list = [server['name'] for server in slb_server_data['server-list']]
if slb_server in slb_server_list:
slb_server_exists = True
else:
slb_server_exists = False
changed = False
if operation == 'create':
if slb_server_exists is False:
result = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='POST', body=json.dumps(json_post), signature=signature)
if axapi_failure(result):
module.fail_json(msg="failed to create the server: %s" % result['response']['err']['msg'])
changed = True
else:
module.fail_json(msg="server already exists, use state='update' instead")
changed = False
# if we changed things, get the full info regarding result
if changed:
result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='GET', body='', signature=signature)
else:
result = slb_server_data
elif operation == 'delete':
if slb_server_exists:
result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='DELETE', body='', signature=signature)
if axapi_failure(result):
module.fail_json(msg="failed to delete server: %s" % result['response']['err']['msg'])
changed = True
else:
result = dict(msg="the server was not present")
elif operation == 'update':
if slb_server_exists:
result = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='PUT', body=json.dumps(json_post), signature=signature)
if axapi_failure(result):
module.fail_json(msg="failed to update server: %s" % result['response']['err']['msg'])
changed = True
else:
result = dict(msg="the server was not present")
# if the config has changed, save the config unless otherwise requested
if changed and write_config:
write_result = axapi_call_v3(module, axapi_base_url + 'write/memory/', method='POST', body='', signature=signature)
if axapi_failure(write_result):
module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg'])
# log out gracefully and exit
axapi_call_v3(module, axapi_base_url + 'logoff/', method='POST', body='', signature=signature)
module.exit_json(changed=changed, content=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
nichit93/Implementation-of-TRED-in-ns-3 | src/bridge/bindings/modulegen__gcc_ILP32.py | 38 | 201909 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.bridge', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper [class]
module.add_class('BridgeHelper')
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NetDeviceQueue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NetDeviceQueue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## channel.h (module 'network'): ns3::Channel [class]
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class]
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class]
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## net-device.h (module 'network'): ns3::NetDeviceQueue [class]
module.add_class('NetDeviceQueue', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >'])
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface [class]
module.add_class('NetDeviceQueueInterface', import_from_module='ns.network', parent=root_module['ns3::Object'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::QueueItem [class]
module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
## net-device.h (module 'network'): ns3::QueueItem::Uint8Values [enumeration]
module.add_enum('Uint8Values', ['IP_DSFIELD'], outer_class=root_module['ns3::QueueItem'], import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel [class]
module.add_class('BridgeChannel', parent=root_module['ns3::Channel'])
## bridge-net-device.h (module 'bridge'): ns3::BridgeNetDevice [class]
module.add_class('BridgeNetDevice', parent=root_module['ns3::NetDevice'])
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace TracedValueCallback
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_TracedValueCallback(module):
root_module = module.get_root()
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *', u'ns3::TracedValueCallback::Time')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) **', u'ns3::TracedValueCallback::Time*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *&', u'ns3::TracedValueCallback::Time&')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3BridgeHelper_methods(root_module, root_module['ns3::BridgeHelper'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3NetDeviceQueue_Ns3Empty_Ns3DefaultDeleter__lt__ns3NetDeviceQueue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >'])
register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3Channel_methods(root_module, root_module['ns3::Channel'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NetDeviceQueue_methods(root_module, root_module['ns3::NetDeviceQueue'])
register_Ns3NetDeviceQueueInterface_methods(root_module, root_module['ns3::NetDeviceQueueInterface'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3QueueItem_methods(root_module, root_module['ns3::QueueItem'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3BridgeChannel_methods(root_module, root_module['ns3::BridgeChannel'])
register_Ns3BridgeNetDevice_methods(root_module, root_module['ns3::BridgeNetDevice'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3BridgeHelper_methods(root_module, cls):
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper::BridgeHelper(ns3::BridgeHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BridgeHelper const &', 'arg0')])
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper::BridgeHelper() [constructor]
cls.add_constructor([])
## bridge-helper.h (module 'bridge'): ns3::NetDeviceContainer ns3::BridgeHelper::Install(ns3::Ptr<ns3::Node> node, ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::NetDeviceContainer', 'c')])
## bridge-helper.h (module 'bridge'): ns3::NetDeviceContainer ns3::BridgeHelper::Install(std::string nodeName, ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('std::string', 'nodeName'), param('ns3::NetDeviceContainer', 'c')])
## bridge-helper.h (module 'bridge'): void ns3::BridgeHelper::SetDeviceAttribute(std::string n1, ns3::AttributeValue const & v1) [member function]
cls.add_method('SetDeviceAttribute',
'void',
[param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')])
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
deprecated=True, is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'uid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor]
cls.add_constructor([param('long double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function]
cls.add_method('IsInitialized',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NetDeviceQueue_Ns3Empty_Ns3DefaultDeleter__lt__ns3NetDeviceQueue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter< ns3::NetDeviceQueue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount(ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter< ns3::QueueItem > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3Channel_methods(root_module, cls):
## channel.h (module 'network'): ns3::Channel::Channel(ns3::Channel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Channel const &', 'arg0')])
## channel.h (module 'network'): ns3::Channel::Channel() [constructor]
cls.add_constructor([])
## channel.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Channel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): static ns3::TypeId ns3::Channel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3EmptyAttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NetDeviceQueue_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDeviceQueue::NetDeviceQueue(ns3::NetDeviceQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceQueue const &', 'arg0')])
## net-device.h (module 'network'): ns3::NetDeviceQueue::NetDeviceQueue() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::Ptr<ns3::QueueLimits> ns3::NetDeviceQueue::GetQueueLimits() [member function]
cls.add_method('GetQueueLimits',
'ns3::Ptr< ns3::QueueLimits >',
[])
## net-device.h (module 'network'): bool ns3::NetDeviceQueue::IsStopped() const [member function]
cls.add_method('IsStopped',
'bool',
[],
is_const=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::NotifyQueuedBytes(uint32_t bytes) [member function]
cls.add_method('NotifyQueuedBytes',
'void',
[param('uint32_t', 'bytes')])
## net-device.h (module 'network'): void ns3::NetDeviceQueue::NotifyTransmittedBytes(uint32_t bytes) [member function]
cls.add_method('NotifyTransmittedBytes',
'void',
[param('uint32_t', 'bytes')])
## net-device.h (module 'network'): void ns3::NetDeviceQueue::ResetQueueLimits() [member function]
cls.add_method('ResetQueueLimits',
'void',
[])
## net-device.h (module 'network'): void ns3::NetDeviceQueue::SetQueueLimits(ns3::Ptr<ns3::QueueLimits> ql) [member function]
cls.add_method('SetQueueLimits',
'void',
[param('ns3::Ptr< ns3::QueueLimits >', 'ql')])
## net-device.h (module 'network'): void ns3::NetDeviceQueue::SetWakeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetWakeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Start() [member function]
cls.add_method('Start',
'void',
[],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Wake() [member function]
cls.add_method('Wake',
'void',
[],
is_virtual=True)
return
def register_Ns3NetDeviceQueueInterface_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface::NetDeviceQueueInterface(ns3::NetDeviceQueueInterface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceQueueInterface const &', 'arg0')])
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface::NetDeviceQueueInterface() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::CreateTxQueues() [member function]
cls.add_method('CreateTxQueues',
'void',
[])
## net-device.h (module 'network'): uint8_t ns3::NetDeviceQueueInterface::GetNTxQueues() const [member function]
cls.add_method('GetNTxQueues',
'uint8_t',
[],
is_const=True)
## net-device.h (module 'network'): ns3::Callback<unsigned char, ns3::Ptr<ns3::QueueItem>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::NetDeviceQueueInterface::GetSelectQueueCallback() const [member function]
cls.add_method('GetSelectQueueCallback',
'ns3::Callback< unsigned char, ns3::Ptr< ns3::QueueItem >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::NetDeviceQueue> ns3::NetDeviceQueueInterface::GetTxQueue(uint8_t i) const [member function]
cls.add_method('GetTxQueue',
'ns3::Ptr< ns3::NetDeviceQueue >',
[param('uint8_t', 'i')],
is_const=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDeviceQueueInterface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::SetSelectQueueCallback(ns3::Callback<unsigned char, ns3::Ptr<ns3::QueueItem>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetSelectQueueCallback',
'void',
[param('ns3::Callback< unsigned char, ns3::Ptr< ns3::QueueItem >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::SetTxQueuesN(uint8_t numTxQueues) [member function]
cls.add_method('SetTxQueuesN',
'void',
[param('uint8_t', 'numTxQueues')])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3QueueItem_methods(root_module, cls):
cls.add_output_stream_operator()
## net-device.h (module 'network'): ns3::QueueItem::QueueItem(ns3::Ptr<ns3::Packet> p) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p')])
## net-device.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::QueueItem::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## net-device.h (module 'network'): uint32_t ns3::QueueItem::GetPacketSize() const [member function]
cls.add_method('GetPacketSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::QueueItem::GetUint8Value(ns3::QueueItem::Uint8Values field, uint8_t & value) const [member function]
cls.add_method('GetUint8Value',
'bool',
[param('ns3::QueueItem::Uint8Values', 'field'), param('uint8_t &', 'value')],
is_const=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::QueueItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3BridgeChannel_methods(root_module, cls):
## bridge-channel.h (module 'bridge'): static ns3::TypeId ns3::BridgeChannel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel::BridgeChannel() [constructor]
cls.add_constructor([])
## bridge-channel.h (module 'bridge'): void ns3::BridgeChannel::AddChannel(ns3::Ptr<ns3::Channel> bridgedChannel) [member function]
cls.add_method('AddChannel',
'void',
[param('ns3::Ptr< ns3::Channel >', 'bridgedChannel')])
## bridge-channel.h (module 'bridge'): uint32_t ns3::BridgeChannel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True, is_virtual=True)
## bridge-channel.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeChannel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3BridgeNetDevice_methods(root_module, cls):
## bridge-net-device.h (module 'bridge'): ns3::BridgeNetDevice::BridgeNetDevice() [constructor]
cls.add_constructor([])
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::AddBridgePort(ns3::Ptr<ns3::NetDevice> bridgePort) [member function]
cls.add_method('AddBridgePort',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'bridgePort')])
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeNetDevice::GetBridgePort(uint32_t n) const [member function]
cls.add_method('GetBridgePort',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'n')],
is_const=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::Channel> ns3::BridgeNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint32_t ns3::BridgeNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint16_t ns3::BridgeNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint32_t ns3::BridgeNetDevice::GetNBridgePorts() const [member function]
cls.add_method('GetNBridgePorts',
'uint32_t',
[],
is_const=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::Node> ns3::BridgeNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): static ns3::TypeId ns3::BridgeNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ForwardBroadcast(ns3::Ptr<ns3::NetDevice> incomingPort, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Mac48Address src, ns3::Mac48Address dst) [member function]
cls.add_method('ForwardBroadcast',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'incomingPort'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'src'), param('ns3::Mac48Address', 'dst')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ForwardUnicast(ns3::Ptr<ns3::NetDevice> incomingPort, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Mac48Address src, ns3::Mac48Address dst) [member function]
cls.add_method('ForwardUnicast',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'incomingPort'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'src'), param('ns3::Mac48Address', 'dst')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeNetDevice::GetLearnedState(ns3::Mac48Address source) [member function]
cls.add_method('GetLearnedState',
'ns3::Ptr< ns3::NetDevice >',
[param('ns3::Mac48Address', 'source')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::Learn(ns3::Mac48Address source, ns3::Ptr<ns3::NetDevice> port) [member function]
cls.add_method('Learn',
'void',
[param('ns3::Mac48Address', 'source'), param('ns3::Ptr< ns3::NetDevice >', 'port')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ReceiveFromDevice(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Address const & source, ns3::Address const & destination, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('ReceiveFromDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'destination'), param('ns3::NetDevice::PacketType', 'packetType')],
visibility='protected')
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_TracedValueCallback(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 |
kevinmel2000/sl4a | python/src/Mac/Modules/ah/ahscan.py | 34 | 1360 | # Scan an Apple header file, generating a Python file of generator calls.
import sys
from bgenlocations import TOOLBOXDIR, BGENDIR
sys.path.append(BGENDIR)
from scantools import Scanner_OSX
LONG = "AppleHelp"
SHORT = "ah"
OBJECT = "NOTUSED"
def main():
input = LONG + ".h"
output = SHORT + "gen.py"
defsoutput = TOOLBOXDIR + LONG + ".py"
scanner = MyScanner(input, output, defsoutput)
scanner.scan()
scanner.close()
print "=== Testing definitions output code ==="
execfile(defsoutput, {}, {})
print "=== Done scanning and generating, now importing the generated code... ==="
exec "import " + SHORT + "support"
print "=== Done. It's up to you to compile it now! ==="
class MyScanner(Scanner_OSX):
def destination(self, type, name, arglist):
classname = "Function"
listname = "functions"
if arglist:
t, n, m = arglist[0]
# This is non-functional today
if t == OBJECT and m == "InMode":
classname = "Method"
listname = "methods"
return classname, listname
def makeblacklistnames(self):
return [
]
def makeblacklisttypes(self):
return [
]
def makerepairinstructions(self):
return [
]
if __name__ == "__main__":
main()
| apache-2.0 |
broxtronix/distributed | distributed/scheduler.py | 1 | 115021 | from __future__ import print_function, division, absolute_import
from collections import defaultdict, deque, OrderedDict
from datetime import datetime, timedelta
import logging
import math
from math import log
import os
import pickle
import random
import resource
import socket
from time import time
from timeit import default_timer
try:
from cytoolz import frequencies, topk
except ImportError:
from toolz import frequencies, topk
from toolz import memoize, valmap, first, second, keymap, unique, concat, merge
from tornado import gen
from tornado.gen import Return
from tornado.queues import Queue
from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.iostream import StreamClosedError, IOStream
from dask.compatibility import PY3, unicode
from dask.core import reverse_dict
from dask.order import order
from .batched import BatchedSend
from .utils_comm import (scatter_to_workers, gather_from_workers)
from .core import (rpc, connect, read, write, MAX_BUFFER_SIZE,
Server, send_recv, coerce_to_address, error_message)
from .utils import (All, ignoring, clear_queue, get_ip, ignore_exceptions,
ensure_ip, log_errors, key_split, mean, divide_n_among_bins)
from .config import config
logger = logging.getLogger(__name__)
BANDWIDTH = config.get('bandwidth', 100e6)
ALLOWED_FAILURES = config.get('allowed-failures', 3)
LOG_PDB = config.get('pdb-on-err') or os.environ.get('DASK_ERROR_PDB', False)
class Scheduler(Server):
""" Dynamic distributed task scheduler
The scheduler tracks the current state of workers, data, and computations.
The scheduler listens for events and responds by controlling workers
appropriately. It continuously tries to use the workers to execute an ever
growing dask graph.
All events are handled quickly, in linear time with respect to their input
(which is often of constant size) and generally within a millisecond. To
accomplish this the scheduler tracks a lot of state. Every operation
maintains the consistency of this state.
The scheduler communicates with the outside world through Tornado IOStreams
It maintains a consistent and valid view of the world even when listening
to several clients at once.
A Scheduler is typically started either with the ``dask-scheduler``
executable::
$ dask-scheduler
Scheduler started at 127.0.0.1:8786
Or within a LocalCluster a Client starts up without connection
information::
>>> c = Client() # doctest: +SKIP
>>> c.cluster.scheduler # doctest: +SKIP
Scheduler(...)
Users typically do not interact with the scheduler directly but rather with
the client object ``Client``.
**State**
The scheduler contains the following state variables. Each variable is
listed along with what it stores and a brief description.
* **tasks:** ``{key: task}``:
Dictionary mapping key to a serialized task like the following:
``{'function': b'...', 'args': b'...'}`` or ``{'task': b'...'}``
* **dependencies:** ``{key: {keys}}``:
Dictionary showing which keys depend on which others
* **dependents:** ``{key: {keys}}``:
Dictionary showing which keys are dependent on which others
* **task_state:** ``{key: string}``:
Dictionary listing the current state of every task among the following:
released, waiting, stacks, queue, no-worker, processing, memory, erred
* **priority:** ``{key: tuple}``:
A score per key that determines its priority
* **waiting:** ``{key: {key}}``:
Dictionary like dependencies but excludes keys already computed
* **waiting_data:** ``{key: {key}}``:
Dictionary like dependents but excludes keys already computed
* **ready:** ``deque(key)``
Keys that are ready to run, but not yet assigned to a worker
* **stacks:** ``{worker: [keys]}``:
List of keys waiting to be sent to each worker
* **processing:** ``{worker: {key: cost}}``:
Set of keys currently in execution on each worker and their expected
duration
* **stack_durations:** ``{worker: [ints]}``:
Expected durations of stacked tasks
* **stacks_duration:** ``{worker: int}``:
Total duration of all tasks in each workers stack
* **rprocessing:** ``{key: {worker}}``:
Set of workers currently executing a particular task
* **who_has:** ``{key: {worker}}``:
Where each key lives. The current state of distributed memory.
* **has_what:** ``{worker: {key}}``:
What worker has what keys. The transpose of who_has.
* **released:** ``{keys}``
Set of keys that are known, but released from memory
* **unrunnable:** ``{key}``
Keys that we are unable to run
* **restrictions:** ``{key: {hostnames}}``:
A set of hostnames per key of where that key can be run. Usually this
is empty unless a key has been specifically restricted to only run on
certain hosts. These restrictions don't include a worker port. Any
worker on that hostname is deemed valid.
* **loose_restrictions:** ``{key}``:
Set of keys for which we are allow to violate restrictions (see above)
if not valid workers are present.
* **exceptions:** ``{key: Exception}``:
A dict mapping keys to remote exceptions
* **tracebacks:** ``{key: list}``:
A dict mapping keys to remote tracebacks stored as a list of strings
* **exceptions_blame:** ``{key: key}``:
A dict mapping a key to another key on which it depends that has failed
* **suspicious_tasks:** ``{key: int}``
Number of times a task has been involved in a worker failure
* **deleted_keys:** ``{key: {workers}}``
Locations of workers that have keys that should be deleted
* **wants_what:** ``{client: {key}}``:
What keys are wanted by each client.. The transpose of who_wants.
* **who_wants:** ``{key: {client}}``:
Which clients want each key. The active targets of computation.
* **nbytes:** ``{key: int}``:
Number of bytes for a key as reported by workers holding that key.
* **stealable:** ``[[key]]``
A list of stacks of stealable keys, ordered by stealability
* **ncores:** ``{worker: int}``:
Number of cores owned by each worker
* **idle:** ``{worker}``:
Set of workers that are not fully utilized
* **worker_info:** ``{worker: {str: data}}``:
Information about each worker
* **host_info:** ``{hostname: dict}``:
Information about each worker host
* **worker_bytes:** ``{worker: int}``:
Number of bytes in memory on each worker
* **occupancy:** ``{worker: time}``
Expected runtime for all tasks currently processing on a worker
* **services:** ``{str: port}``:
Other services running on this scheduler, like HTTP
* **loop:** ``IOLoop``:
The running Tornado IOLoop
* **streams:** ``[IOStreams]``:
A list of Tornado IOStreams from which we both accept stimuli and
report results
* **task_duration:** ``{key-prefix: time}``
Time we expect certain functions to take, e.g. ``{'sum': 0.25}``
* **coroutines:** ``[Futures]``:
A list of active futures that control operation
* **scheduler_queues:** ``[Queues]``:
A list of Tornado Queues from which we accept stimuli
* **report_queues:** ``[Queues]``:
A list of Tornado Queues on which we report results
"""
default_port = 8786
def __init__(self, center=None, loop=None,
max_buffer_size=MAX_BUFFER_SIZE, delete_interval=500,
synchronize_worker_interval=60000,
ip=None, services=None, allowed_failures=ALLOWED_FAILURES,
validate=False, steal=True, **kwargs):
# Attributes
self.ip = ip or get_ip()
self.allowed_failures = allowed_failures
self.validate = validate
self.status = None
self.delete_interval = delete_interval
self.synchronize_worker_interval = synchronize_worker_interval
self.steal = steal
# Communication state
self.loop = loop or IOLoop.current()
self.scheduler_queues = [Queue()]
self.report_queues = []
self.worker_streams = dict()
self.streams = dict()
self.coroutines = []
self._worker_coroutines = []
self._ipython_kernel = None
# Task state
self.tasks = dict()
self.task_state = dict()
self.dependencies = dict()
self.dependents = dict()
self.generation = 0
self.released = set()
self.priority = dict()
self.nbytes = dict()
self.worker_bytes = dict()
self.processing = dict()
self.rprocessing = defaultdict(set)
self.task_duration = {prefix: 0.00001 for prefix in fast_tasks}
self.restrictions = dict()
self.loose_restrictions = set()
self.suspicious_tasks = defaultdict(lambda: 0)
self.stacks = dict()
self.stack_durations = dict()
self.stack_duration = dict()
self.waiting = dict()
self.waiting_data = dict()
self.ready = deque()
self.unrunnable = set()
self.idle = set()
self.maybe_idle = set()
self.who_has = dict()
self.has_what = dict()
self.who_wants = defaultdict(set)
self.wants_what = defaultdict(set)
self.deleted_keys = defaultdict(set)
self.exceptions = dict()
self.tracebacks = dict()
self.exceptions_blame = dict()
self.datasets = dict()
self.stealable = [set() for i in range(12)]
self.key_stealable = dict()
self.stealable_unknown_durations = defaultdict(set)
# Worker state
self.ncores = dict()
self.worker_info = dict()
self.host_info = defaultdict(dict)
self.aliases = dict()
self.saturated = set()
self.occupancy = dict()
self.plugins = []
self.transition_log = deque(maxlen=config.get('transition-log-length',
100000))
self.compute_handlers = {'update-graph': self.update_graph,
'update-data': self.update_data,
'missing-data': self.stimulus_missing_data,
'client-releases-keys': self.client_releases_keys,
'restart': self.restart}
self.handlers = {'register-client': self.add_client,
'scatter': self.scatter,
'register': self.add_worker,
'unregister': self.remove_worker,
'gather': self.gather,
'cancel': self.stimulus_cancel,
'feed': self.feed,
'terminate': self.close,
'broadcast': self.broadcast,
'ncores': self.get_ncores,
'has_what': self.get_has_what,
'who_has': self.get_who_has,
'stacks': self.get_stacks,
'processing': self.get_processing,
'nbytes': self.get_nbytes,
'add_keys': self.add_keys,
'rebalance': self.rebalance,
'replicate': self.replicate,
'start_ipython': self.start_ipython,
'list_datasets': self.list_datasets,
'get_dataset': self.get_dataset,
'publish_dataset': self.publish_dataset,
'unpublish_dataset': self.unpublish_dataset,
'update_data': self.update_data,
'change_worker_cores': self.change_worker_cores}
self.services = {}
for k, v in (services or {}).items():
if isinstance(k, tuple):
k, port = k
else:
port = 0
try:
service = v(self, io_loop=self.loop)
service.listen(port)
self.services[k] = service
except Exception as e:
logger.info("Could not launch service: %s-%d", k, port,
exc_info=True)
self._transitions = {
('released', 'waiting'): self.transition_released_waiting,
('waiting', 'ready'): self.transition_waiting_ready,
('waiting', 'released'): self.transition_waiting_released,
('queue', 'processing'): self.transition_ready_processing,
('stacks', 'processing'): self.transition_ready_processing,
('processing', 'released'): self.transition_processing_released,
('queue', 'released'): self.transition_ready_released,
('stacks', 'released'): self.transition_ready_released,
('no-worker', 'released'): self.transition_ready_released,
('processing', 'memory'): self.transition_processing_memory,
('processing', 'erred'): self.transition_processing_erred,
('released', 'forgotten'): self.transition_released_forgotten,
('memory', 'forgotten'): self.transition_memory_forgotten,
('erred', 'forgotten'): self.transition_released_forgotten,
('memory', 'released'): self.transition_memory_released,
('released', 'erred'): self.transition_released_erred
}
connection_limit = resource.getrlimit(resource.RLIMIT_NOFILE)[0] / 2
super(Scheduler, self).__init__(handlers=self.handlers,
max_buffer_size=max_buffer_size, io_loop=self.loop,
connection_limit=connection_limit, **kwargs)
##################
# Administration #
##################
def __str__(self):
return '<Scheduler: "%s:%d" processes: %d cores: %d>' % (
self.ip, self.port, len(self.ncores), sum(self.ncores.values()))
__repr__ = __str__
def __del__(self):
self.close_streams()
@property
def address(self):
return '%s:%d' % (self.ip, self.port)
@property
def address_tuple(self):
return (self.ip, self.port)
def identity(self, stream):
""" Basic information about ourselves and our cluster """
d = {'type': type(self).__name__,
'id': str(self.id),
'workers': list(self.ncores),
'services': {key: v.port for (key, v) in self.services.items()},
'workers': dict(self.worker_info)}
return d
def start(self, port=8786, start_queues=True):
""" Clear out old state and restart all running coroutines """
collections = [self.tasks, self.dependencies, self.dependents,
self.waiting, self.waiting_data, self.released, self.priority,
self.nbytes, self.restrictions, self.loose_restrictions,
self.ready, self.who_wants, self.wants_what]
for collection in collections:
collection.clear()
with ignoring(AttributeError):
for c in self._worker_coroutines:
c.cancel()
self._delete_periodic_callback = \
PeriodicCallback(callback=self.clear_data_from_workers,
callback_time=self.delete_interval,
io_loop=self.loop)
self._delete_periodic_callback.start()
self._synchronize_data_periodic_callback = \
PeriodicCallback(callback=self.synchronize_worker_data,
callback_time=self.synchronize_worker_interval,
io_loop=self.loop)
self._synchronize_data_periodic_callback.start()
if start_queues:
self.loop.add_callback(self.handle_queues, self.scheduler_queues[0], None)
for cor in self.coroutines:
if cor.done():
exc = cor.exception()
if exc:
raise exc
if self.status != 'running':
self.listen(port)
self.status = 'running'
logger.info("Scheduler at: %20s:%s", self.ip, self.port)
for k, v in self.services.items():
logger.info("%9s at: %20s:%s", k, self.ip, v.port)
return self.finished()
@gen.coroutine
def finished(self):
""" Wait until all coroutines have ceased """
while any(not c.done() for c in self.coroutines):
yield All(self.coroutines)
def close_streams(self):
""" Close all active IOStreams """
with ignoring(AttributeError):
for r in self._rpcs.values():
r.close_rpc()
for stream in self.streams.values():
stream.stream.close()
@gen.coroutine
def close(self, stream=None, fast=False):
""" Send cleanup signal to all coroutines then wait until finished
See Also
--------
Scheduler.cleanup
"""
self._delete_periodic_callback.stop()
self._synchronize_data_periodic_callback.stop()
for service in self.services.values():
service.stop()
yield self.cleanup()
if not fast:
yield self.finished()
self.close_streams()
self.status = 'closed'
self.stop()
@gen.coroutine
def cleanup(self):
""" Clean up queues and coroutines, prepare to stop """
if self.status == 'closing':
raise gen.Return()
self.status = 'closing'
logger.debug("Cleaning up coroutines")
for w, bstream in list(self.worker_streams.items()):
with ignoring(AttributeError):
yield bstream.close(ignore_closed=True)
for s in self.scheduler_queues[1:]:
s.put_nowait({'op': 'close-stream'})
for q in self.report_queues:
q.put_nowait({'op': 'close'})
###########
# Stimuli #
###########
def add_worker(self, stream=None, address=None, keys=(), ncores=None,
name=None, coerce_address=True, nbytes=None, now=None,
host_info=None, **info):
""" Add a new worker to the cluster """
with log_errors():
local_now = time()
now = now or time()
info = info or {}
host_info = host_info or {}
if coerce_address:
address = self.coerce_address(address)
host, port = address.split(':')
self.host_info[host]['last-seen'] = local_now
if address not in self.worker_info:
self.worker_info[address] = dict()
if info:
self.worker_info[address].update(info)
if host_info:
self.host_info[host].update(host_info)
delay = time() - now
self.worker_info[address]['time-delay'] = delay
self.worker_info[address]['last-seen'] = time()
if address in self.ncores:
return 'OK'
name = name or address
if name in self.aliases:
return 'name taken, %s' % name
if coerce_address:
if 'ports' not in self.host_info[host]:
self.host_info[host].update({'ports': set(), 'cores': 0})
self.host_info[host]['ports'].add(port)
self.host_info[host]['cores'] += ncores
self.ncores[address] = ncores
self.aliases[name] = address
self.worker_info[address]['name'] = name
if address not in self.processing:
self.has_what[address] = set()
self.worker_bytes[address] = 0
self.processing[address] = dict()
self.occupancy[address] = 0
self.stacks[address] = deque()
self.stack_durations[address] = deque()
self.stack_duration[address] = 0
if nbytes:
self.nbytes.update(nbytes)
# for key in keys: # TODO
# self.mark_key_in_memory(key, [address])
self.worker_streams[address] = BatchedSend(interval=2, loop=self.loop)
self._worker_coroutines.append(self.worker_stream(address))
if self.ncores[address] > len(self.processing[address]):
self.idle.add(address)
for key in list(self.unrunnable):
r = self.restrictions.get(key, [])
if address in r or host in r or name in r:
self.transitions({key: 'released'})
self.maybe_idle.add(address)
self.ensure_occupied()
logger.info("Register %s", str(address))
return 'OK'
def update_graph(self, client=None, tasks=None, keys=None,
dependencies=None, restrictions=None, priority=None,
loose_restrictions=None):
"""
Add new computations to the internal dask graph
This happens whenever the Client calls submit, map, get, or compute.
"""
for k in list(tasks):
if tasks[k] is k:
del tasks[k]
if k in self.tasks:
del tasks[k]
original_keys = keys
keys = set(keys)
for k in keys:
self.who_wants[k].add(client)
self.wants_what[client].add(k)
n = 0
while len(tasks) != n: # walk thorough new tasks, cancel any bad deps
n = len(tasks)
for k, deps in list(dependencies.items()):
if any(dep not in self.dependencies and dep not in tasks
for dep in deps): # bad key
logger.info('User asked for computation on lost data, %s', k)
del tasks[k]
del dependencies[k]
if k in keys:
keys.remove(k)
self.report({'op': 'cancelled-key', 'key': k})
self.client_releases_keys(keys=[k], client=client)
stack = list(keys)
touched = set()
while stack:
k = stack.pop()
if k in self.dependencies:
continue
touched.add(k)
if k not in self.tasks and k in tasks:
self.tasks[k] = tasks[k]
self.dependencies[k] = set(dependencies.get(k, ()))
self.released.add(k)
self.task_state[k] = 'released'
for dep in self.dependencies[k]:
if dep not in self.dependents:
self.dependents[dep] = set()
self.dependents[dep].add(k)
if k not in self.dependents:
self.dependents[k] = set()
stack.extend(self.dependencies[k])
recommendations = OrderedDict()
new_priority = priority or order(tasks) # TODO: define order wrt old graph
self.generation += 1 # older graph generations take precedence
for key in set(new_priority) & touched:
if key not in self.priority:
self.priority[key] = (self.generation, new_priority[key]) # prefer old
if restrictions:
restrictions = {k: set(map(self.coerce_address, v))
for k, v in restrictions.items()}
self.restrictions.update(restrictions)
if loose_restrictions:
self.loose_restrictions |= set(loose_restrictions)
for key in sorted(touched | keys, key=self.priority.get):
if self.task_state[key] == 'released':
recommendations[key] = 'waiting'
for key in touched | keys:
for dep in self.dependencies[key]:
if dep in self.exceptions_blame:
self.exceptions_blame[key] = self.exceptions_blame[dep]
recommendations[key] = 'erred'
break
self.transitions(recommendations)
for plugin in self.plugins[:]:
try:
plugin.update_graph(self, client=client, tasks=tasks,
keys=keys, restrictions=restrictions or {},
dependencies=dependencies,
loose_restrictions=loose_restrictions)
except Exception as e:
logger.exception(e)
for key in keys:
if self.task_state[key] in ('memory', 'erred'):
self.report_on_key(key)
self.ensure_occupied()
def stimulus_task_finished(self, key=None, worker=None, **kwargs):
""" Mark that a task has finished execution on a particular worker """
# logger.debug("Stimulus task finished %s, %s", key, worker)
self.maybe_idle.add(worker)
if key not in self.task_state:
return {}
if self.task_state[key] == 'processing':
recommendations = self.transition(key, 'memory', worker=worker,
**kwargs)
else:
recommendations = {}
if self.task_state[key] == 'memory':
self.who_has[key].add(worker)
if key not in self.has_what[worker]:
self.worker_bytes[worker] += self.nbytes.get(key, 1000)
self.has_what[worker].add(key)
return recommendations
def stimulus_task_erred(self, key=None, worker=None,
exception=None, traceback=None, **kwargs):
""" Mark that a task has erred on a particular worker """
logger.debug("Stimulus task erred %s, %s", key, worker)
self.maybe_idle.add(worker)
if key not in self.task_state:
return {}
if self.task_state[key] == 'processing':
recommendations = self.transition(key, 'erred', cause=key,
exception=exception, traceback=traceback)
else:
recommendations = {}
return recommendations
def stimulus_missing_data(self, keys=None, key=None, worker=None,
ensure=True, **kwargs):
""" Mark that certain keys have gone missing. Recover. """
logger.debug("Stimulus missing data %s, %s", key, worker)
if worker:
self.maybe_idle.add(worker)
recommendations = OrderedDict()
for k in set(keys):
if self.task_state.get(k) == 'memory':
for w in set(self.who_has[k]):
self.has_what[w].remove(k)
self.who_has[k].remove(w)
self.worker_bytes[w] -= self.nbytes.get(k, 1000)
recommendations[k] = 'released'
if key:
recommendations[key] = 'released'
self.transitions(recommendations)
if ensure:
self.ensure_occupied()
return {}
def remove_worker(self, stream=None, address=None, safe=False):
"""
Remove worker from cluster
We do this when a worker reports that it plans to leave or when it
appears to be unresponsive. This may send its tasks back to a released
state.
"""
with log_errors(pdb=LOG_PDB):
address = self.coerce_address(address)
logger.info("Remove worker %s", address)
if address not in self.processing:
return 'already-removed'
with ignoring(AttributeError):
stream = self.worker_streams[address].stream
if not stream.closed():
stream.close()
host, port = address.split(':')
self.host_info[host]['cores'] -= self.ncores[address]
self.host_info[host]['ports'].remove(port)
if not self.host_info[host]['ports']:
del self.host_info[host]
del self.worker_streams[address]
del self.ncores[address]
del self.aliases[self.worker_info[address]['name']]
del self.worker_info[address]
if address in self.maybe_idle:
self.maybe_idle.remove(address)
if address in self.idle:
self.idle.remove(address)
if address in self.saturated:
self.saturated.remove(address)
recommendations = OrderedDict()
in_flight = set(self.processing.pop(address))
for k in list(in_flight):
self.rprocessing[k].remove(address)
if not safe:
self.suspicious_tasks[k] += 1
if not safe and self.suspicious_tasks[k] > self.allowed_failures:
e = pickle.dumps(KilledWorker(k, address))
r = self.transition(k, 'erred', exception=e, cause=k)
recommendations.update(r)
in_flight.remove(k)
elif not self.rprocessing[k]:
recommendations[k] = 'released'
for k in self.stacks.pop(address):
if k in self.tasks:
recommendations[k] = 'waiting'
del self.stack_durations[address]
del self.stack_duration[address]
del self.occupancy[address]
del self.worker_bytes[address]
for key in self.has_what.pop(address):
self.who_has[key].remove(address)
if not self.who_has[key]:
if key in self.tasks:
recommendations[key] = 'released'
else:
recommendations[key] = 'forgotten'
self.transitions(recommendations)
if not self.stacks:
logger.info("Lost all workers")
self.ensure_occupied()
return 'OK'
def stimulus_cancel(self, stream, keys=None, client=None):
""" Stop execution on a list of keys """
logger.info("Client %s requests to cancel %d keys", client, len(keys))
for key in keys:
self.cancel_key(key, client)
def cancel_key(self, key, client, retries=5):
""" Cancel a particular key and all dependents """
# TODO: this should be converted to use the transition mechanism
if key not in self.who_wants: # no key yet, lets try again in 500ms
if retries:
self.loop.add_future(gen.sleep(0.2),
lambda _: self.cancel_key(key, client, retries - 1))
return
if self.who_wants[key] == {client}: # no one else wants this key
for dep in list(self.dependents[key]):
self.cancel_key(dep, client)
logger.debug("Scheduler cancels key %s", key)
self.report({'op': 'cancelled-key', 'key': key})
self.client_releases_keys(keys=[key], client=client)
def client_releases_keys(self, keys=None, client=None):
""" Remove keys from client desired list """
for key in list(keys):
if key in self.wants_what[client]:
self.wants_what[client].remove(key)
s = self.who_wants[key]
s.remove(client)
if not s:
del self.who_wants[key]
if key in self.waiting_data and not self.waiting_data[key]:
r = self.transition(key, 'released')
self.transitions(r)
if key in self.dependents and not self.dependents[key]:
r = self.transition(key, 'forgotten')
self.transitions(r)
def client_wants_keys(self, keys=None, client=None):
for k in keys:
self.who_wants[k].add(client)
self.wants_what[client].add(k)
######################################
# Task Validation (currently unused) #
######################################
def validate_released(self, key):
assert key in self.dependencies
assert self.task_state[key] == 'released'
assert key not in self.waiting_data
assert key not in self.who_has
assert key not in self.rprocessing
# assert key not in self.ready
assert key not in self.waiting
assert not any(key in self.waiting_data.get(dep, ())
for dep in self.dependencies[key])
assert key in self.released
def validate_waiting(self, key):
assert key in self.waiting
assert key in self.waiting_data
assert key not in self.who_has
assert key not in self.rprocessing
assert key not in self.released
for dep in self.dependencies[key]:
assert (dep in self.who_has) + (dep in self.waiting[key]) == 1
def validate_processing(self, key):
assert key not in self.waiting
assert key in self.waiting_data
assert key in self.rprocessing
for w in self.rprocessing[key]:
assert key in self.processing[w]
assert key not in self.who_has
for dep in self.dependencies[key]:
assert dep in self.who_has
def validate_memory(self, key):
assert key in self.who_has
assert key not in self.rprocessing
assert key not in self.waiting
assert key not in self.released
for dep in self.dependents[key]:
assert (dep in self.who_has) + (dep in self.waiting_data[key]) == 1
def validate_queue(self, key):
# assert key in self.ready
assert key not in self.released
assert key not in self.rprocessing
assert key not in self.who_has
assert key not in self.waiting
for dep in self.dependencies[key]:
assert dep in self.who_has
def validate_stacks(self, key):
# assert any(key in stack for stack in self.stacks.values())
assert key not in self.released
assert key not in self.rprocessing
assert key not in self.who_has
assert key not in self.waiting
for dep in self.dependencies[key]:
assert dep in self.who_has
def validate_key(self, key):
try:
try:
func = getattr(self, 'validate_' + self.task_state[key])
except KeyError:
logger.debug("Key lost: %s", key)
except AttributeError:
logger.info("self.validate_%s not found", self.task_state[key])
else:
func(key)
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def validate_state(self, allow_overlap=False, allow_bad_stacks=True):
validate_state(self.dependencies, self.dependents, self.waiting,
self.waiting_data, self.ready, self.who_has, self.stacks,
self.processing, None, self.released, self.who_wants,
self.wants_what, tasks=self.tasks, erred=self.exceptions_blame,
allow_overlap=allow_overlap, allow_bad_stacks=allow_bad_stacks)
if not (set(self.ncores) == \
set(self.has_what) == \
set(self.stacks) == \
set(self.processing) == \
set(self.worker_info) == \
set(self.worker_streams)):
raise ValueError("Workers not the same in all collections")
assert self.worker_bytes == {w: sum(self.nbytes[k] for k in keys)
for w, keys in self.has_what.items()}
for w in self.stacks:
assert abs(sum(self.stack_durations[w]) - self.stack_duration[w]) < 1e-8
assert len(self.stack_durations[w]) == len(self.stacks[w])
###################
# Manage Messages #
###################
def report(self, msg):
"""
Publish updates to all listening Queues and Streams
If the message contains a key then we only send the message to those
streams that care about the key.
"""
for q in self.report_queues:
q.put_nowait(msg)
if 'key' in msg:
streams = [self.streams[c]
for c in self.who_wants.get(msg['key'], ())
if c in self.streams]
else:
streams = self.streams.values()
for s in streams:
try:
s.send(msg)
# logger.debug("Scheduler sends message to client %s", msg)
except StreamClosedError:
logger.critical("Tried writing to closed stream: %s", msg)
@gen.coroutine
def add_client(self, stream, client=None):
""" Add client to network
We listen to all future messages from this IOStream.
"""
logger.info("Receive client connection: %s", client)
bstream = BatchedSend(interval=2, loop=self.loop)
bstream.start(stream)
self.streams[client] = bstream
try:
yield self.handle_messages(stream, bstream, client=client)
finally:
if not stream.closed():
bstream.send({'op': 'stream-closed'})
yield bstream.close(ignore_closed=True)
del self.streams[client]
logger.info("Close client connection: %s", client)
def remove_client(self, client=None):
""" Remove client from network """
logger.info("Remove client %s", client)
self.client_releases_keys(self.wants_what.get(client, ()), client)
with ignoring(KeyError):
del self.wants_what[client]
@gen.coroutine
def handle_messages(self, in_queue, report, client=None):
"""
The master client coroutine. Handles all inbound messages from clients.
This runs once per Client IOStream or Queue.
See Also
--------
Scheduler.worker_stream: The equivalent function for workers
"""
with log_errors(pdb=LOG_PDB):
if isinstance(in_queue, Queue):
next_message = in_queue.get
elif isinstance(in_queue, IOStream):
next_message = lambda: read(in_queue)
else:
raise NotImplementedError()
if isinstance(report, Queue):
put = report.put_nowait
elif isinstance(report, IOStream):
put = lambda msg: write(report, msg)
elif isinstance(report, BatchedSend):
put = report.send
else:
put = lambda msg: None
put({'op': 'stream-start'})
breakout = False
while True:
try:
msgs = yield next_message()
except (StreamClosedError, AssertionError, GeneratorExit):
break
except Exception as e:
logger.exception(e)
put(error_message(e, status='scheduler-error'))
continue
if not isinstance(msgs, list):
msgs = [msgs]
for msg in msgs:
# logger.debug("scheduler receives message %s", msg)
try:
op = msg.pop('op')
except Exception as e:
logger.exception(e)
put(error_message(e, status='scheduler-error'))
if op == 'close-stream':
breakout = True
break
elif op == 'close':
breakout = True
self.close()
break
elif op in self.compute_handlers:
try:
result = self.compute_handlers[op](**msg)
if isinstance(result, gen.Future):
yield result
except Exception as e:
logger.exception(e)
raise
else:
logger.warn("Bad message: op=%s, %s", op, msg, exc_info=True)
if op == 'close':
breakout = True
break
if breakout:
break
self.remove_client(client=client)
logger.debug('Finished handle_messages coroutine')
def handle_queues(self, scheduler_queue, report_queue):
"""
Register new control and report queues to the Scheduler
Queues are not in common use. This may be deprecated in the future.
"""
self.scheduler_queues.append(scheduler_queue)
if report_queue:
self.report_queues.append(report_queue)
future = self.handle_messages(scheduler_queue, report_queue)
self.coroutines.append(future)
return future
def send_task_to_worker(self, worker, key):
""" Send a single computational task to a worker """
msg = {'op': 'compute-task',
'key': key}
deps = self.dependencies[key]
if deps:
msg['who_has'] = {dep: tuple(self.who_has.get(dep, ()))
for dep in deps}
task = self.tasks[key]
if type(task) is dict:
msg.update(task)
else:
msg['task'] = task
self.worker_streams[worker].send(msg)
@gen.coroutine
def worker_stream(self, worker):
"""
Listen to responses from a single worker
This is the main loop for scheduler-worker interaction
See Also
--------
Scheduler.handle_messages: Equivalent coroutine for clients
"""
yield gen.sleep(0)
ip, port = coerce_to_address(worker, out=tuple)
stream = yield connect(ip, port)
yield write(stream, {'op': 'compute-stream'})
self.worker_streams[worker].start(stream)
logger.info("Starting worker compute stream, %s", worker)
try:
while True:
msgs = yield read(stream)
if not isinstance(msgs, list):
msgs = [msgs]
if worker in self.worker_info:
recommendations = OrderedDict()
for msg in msgs:
# logger.debug("Compute response from worker %s, %s",
# worker, msg)
if msg == 'OK': # from close
break
self.correct_time_delay(worker, msg)
key = msg['key']
if msg['status'] == 'OK':
r = self.stimulus_task_finished(worker=worker, **msg)
recommendations.update(r)
elif msg['status'] == 'error':
r = self.stimulus_task_erred(worker=worker, **msg)
recommendations.update(r)
elif msg['status'] == 'missing-data':
r = self.stimulus_missing_data(worker=worker,
ensure=False, **msg)
recommendations.update(r)
else:
logger.warn("Unknown message type, %s, %s",
msg['status'], msg)
self.transitions(recommendations)
if self.validate:
logger.info("Messages: %s\nRecommendations: %s",
msgs, recommendations)
self.ensure_occupied()
except (StreamClosedError, IOError, OSError):
logger.info("Worker failed from closed stream: %s", worker)
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
finally:
if not stream.closed():
stream.close()
self.remove_worker(address=worker)
def correct_time_delay(self, worker, msg):
"""
Apply offset time delay in message times.
Clocks on different workers differ. We keep track of a relative "now"
through periodic heartbeats. We use this known delay to align message
times to Scheduler local time. In particular this helps with
diagnostics.
Operates in place
"""
if 'time-delay' in self.worker_info[worker]:
delay = self.worker_info[worker]['time-delay']
for key in ['transfer_start', 'transfer_stop', 'time',
'compute_start', 'compute_stop', 'disk_load_start',
'disk_load_stop']:
if key in msg:
msg[key] += delay
@gen.coroutine
def clear_data_from_workers(self):
""" Send delete signals to clear unused data from workers
This watches the ``.deleted_keys`` attribute, which stores a set of
keys to be deleted from each worker. This function is run periodically
by the ``._delete_periodic_callback`` to actually remove the data.
This runs every ``self.delete_interval`` milliseconds.
"""
if self.deleted_keys:
d = self.deleted_keys.copy()
self.deleted_keys.clear()
coroutines = [self.rpc(addr=worker).delete_data(
keys=list(keys - self.has_what.get(worker,
set())),
report=False)
for worker, keys in d.items()
if keys]
for worker, keys in d.items():
logger.debug("Remove %d keys from worker %s", len(keys), worker)
yield ignore_exceptions(coroutines, socket.error, StreamClosedError)
raise Return('OK')
def add_plugin(self, plugin):
"""
Add external plugin to scheduler
See http://distributed.readthedocs.io/en/latest/plugins.html
"""
self.plugins.append(plugin)
def remove_plugin(self, plugin):
""" Remove external plugin from scheduler """
self.plugins.remove(plugin)
############################
# Less common interactions #
############################
@gen.coroutine
def scatter(self, stream=None, data=None, workers=None, client=None,
broadcast=False, timeout=2):
""" Send data out to workers
See also
--------
Scheduler.broadcast:
"""
start = time()
while not self.ncores:
yield gen.sleep(0.2)
if time() > start + timeout:
raise gen.TimeoutError("No workers found")
if workers is not None:
workers = [self.coerce_address(w) for w in workers]
ncores = workers if workers is not None else self.ncores
keys, who_has, nbytes = yield scatter_to_workers(ncores, data,
report=False,
serialize=False)
self.update_data(who_has=who_has, nbytes=nbytes, client=client)
if broadcast:
if broadcast == True:
n = len(ncores)
else:
n = broadcast
yield self.replicate(keys=keys, workers=workers, n=n)
raise gen.Return(keys)
@gen.coroutine
def gather(self, stream=None, keys=None):
""" Collect data in from workers """
keys = list(keys)
who_has = {key: self.who_has.get(key, ()) for key in keys}
try:
data = yield gather_from_workers(who_has, deserialize=False,
rpc=self.rpc, close=False)
result = {'status': 'OK', 'data': data}
except KeyError as e:
logger.debug("Couldn't gather keys %s", e)
result = {'status': 'error', 'keys': e.args}
raise gen.Return(result)
@gen.coroutine
def restart(self, environment=None):
""" Restart all workers. Reset local state. """
n = len(self.ncores)
with log_errors():
logger.debug("Send shutdown signal to workers")
for q in self.scheduler_queues + self.report_queues:
clear_queue(q)
nannies = {addr: d['services']['nanny']
for addr, d in self.worker_info.items()}
for addr in nannies:
self.remove_worker(address=addr)
for client, keys in self.wants_what.items():
self.client_releases_keys(keys=keys, client=client)
logger.debug("Send kill signal to nannies: %s", nannies)
nannies = [rpc(ip=worker_address.split(':')[0], port=n_port)
for worker_address, n_port in nannies.items()]
yield All([nanny.kill() for nanny in nannies])
logger.debug("Received done signal from nannies")
while self.ncores:
yield gen.sleep(0.01)
logger.debug("Workers all removed. Sending startup signal")
# All quiet
resps = yield All([nanny.instantiate(close=True,
environment=environment) for nanny in nannies])
assert all(resp == 'OK' for resp in resps)
self.start()
logger.debug("All workers reporting in")
self.report({'op': 'restart'})
for plugin in self.plugins[:]:
try:
plugin.restart(self)
except Exception as e:
logger.exception(e)
@gen.coroutine
def broadcast(self, stream=None, msg=None, workers=None, hosts=None,
nanny=False):
""" Broadcast message to workers, return all results """
if workers is None:
if hosts is None:
workers = list(self.ncores)
else:
workers = []
if hosts is not None:
for host in hosts:
if host in self.host_info:
workers.extend([host + ':' + port
for port in self.host_info[host]['ports']])
# TODO replace with worker_list
if nanny:
addresses = []
for addr in workers:
ip = addr.split(':')[0]
port = self.worker_info[addr]['services']['nanny']
addresses.append('%s:%d' % (ip, port))
else:
addresses = workers
results = yield All([send_recv(arg=address, close=True, **msg)
for address in addresses])
raise Return(dict(zip(workers, results)))
@gen.coroutine
def rebalance(self, stream=None, keys=None, workers=None):
""" Rebalance keys so that each worker stores roughly equal bytes
**Policy**
This orders the workers by what fraction of bytes of the existing keys
they have. It walks down this list from most-to-least. At each worker
it sends the largest results it can find and sends them to the least
occupied worker until either the sender or the recipient are at the
average expected load.
"""
with log_errors():
keys = set(keys or self.who_has)
workers = set(workers or self.ncores)
if not keys.issubset(self.who_has):
raise Return({'status': 'missing-data',
'keys': list(keys - set(self.who_has))})
workers_by_key = {k: self.who_has.get(k, set()) & workers for k in keys}
keys_by_worker = {w: set() for w in workers}
for k, v in workers_by_key.items():
for vv in v:
keys_by_worker[vv].add(k)
worker_bytes = {w: sum(self.nbytes.get(k, 1000) for k in v)
for w, v in keys_by_worker.items()}
avg = sum(worker_bytes.values()) / len(worker_bytes)
sorted_workers = list(map(first, sorted(worker_bytes.items(),
key=second, reverse=True)))
recipients = iter(reversed(sorted_workers))
recipient = next(recipients)
msgs = [] # (sender, recipient, key)
for sender in sorted_workers[:len(workers) // 2]:
sender_keys = {k: self.nbytes.get(k, 1000)
for k in keys_by_worker[sender]}
sender_keys = iter(sorted(sender_keys.items(),
key=second, reverse=True))
try:
while worker_bytes[sender] > avg:
while (worker_bytes[recipient] < avg and
worker_bytes[sender] > avg):
k, nb = next(sender_keys)
if k not in keys_by_worker[recipient]:
keys_by_worker[recipient].add(k)
# keys_by_worker[sender].remove(k)
msgs.append((sender, recipient, k))
worker_bytes[sender] -= nb
worker_bytes[recipient] += nb
if worker_bytes[sender] > avg:
recipient = next(recipients)
except StopIteration:
break
to_recipients = defaultdict(lambda: defaultdict(list))
to_senders = defaultdict(list)
for sender, recipient, key in msgs:
to_recipients[recipient][key].append(sender)
to_senders[sender].append(key)
result = yield {r: self.rpc(addr=r).gather(who_has=v)
for r, v in to_recipients.items()}
if not all(r['status'] == 'OK' for r in result.values()):
raise Return({'status': 'missing-data',
'keys': sum([r['keys'] for r in result
if 'keys' in r], [])})
for sender, recipient, key in msgs:
self.who_has[key].add(recipient)
self.has_what[recipient].add(key)
self.worker_bytes[recipient] += self.nbytes.get(key, 1000)
result = yield {r: self.rpc(addr=r).delete_data(keys=v, report=False)
for r, v in to_senders.items()}
for sender, recipient, key in msgs:
self.who_has[key].remove(sender)
self.has_what[sender].remove(key)
self.worker_bytes[sender] -= self.nbytes.get(key, 1000)
raise Return({'status': 'OK'})
@gen.coroutine
def replicate(self, stream=None, keys=None, n=None, workers=None,
branching_factor=2, delete=True):
""" Replicate data throughout cluster
This performs a tree copy of the data throughout the network
individually on each piece of data.
Parameters
----------
keys: Iterable
list of keys to replicate
n: int
Number of replications we expect to see within the cluster
branching_factor: int, optional
The number of workers that can copy data in each generation
See also
--------
Scheduler.rebalance
"""
workers = set(self.workers_list(workers))
if n is None:
n = len(workers)
n = min(n, len(workers))
keys = set(keys)
if n == 0:
raise ValueError("Can not use replicate to delete data")
if not keys.issubset(self.who_has):
raise Return({'status': 'missing-data',
'keys': list(keys - set(self.who_has))})
# Delete extraneous data
if delete:
del_keys = {k: random.sample(self.who_has[k] & workers,
len(self.who_has[k] & workers) - n)
for k in keys
if len(self.who_has[k] & workers) > n}
del_workers = {k: v for k, v in reverse_dict(del_keys).items() if v}
yield [self.rpc(addr=worker).delete_data(keys=list(keys),
report=False)
for worker, keys in del_workers.items()]
for worker, keys in del_workers.items():
self.has_what[worker] -= keys
for key in keys:
self.who_has[key].remove(worker)
self.worker_bytes[worker] -= self.nbytes.get(key, 1000)
keys = {k for k in keys if len(self.who_has[k] & workers) < n}
# Copy not-yet-filled data
while keys:
gathers = defaultdict(dict)
for k in list(keys):
missing = workers - self.who_has[k]
count = min(max(n - len(self.who_has[k] & workers), 0),
branching_factor * len(self.who_has[k]))
if not count:
keys.remove(k)
else:
sample = random.sample(missing, count)
for w in sample:
gathers[w][k] = list(self.who_has[k])
results = yield {w: self.rpc(addr=w).gather(who_has=who_has)
for w, who_has in gathers.items()}
for w, v in results.items():
if v['status'] == 'OK':
self.add_keys(address=w, keys=list(gathers[w]))
def workers_to_close(self, memory_ratio=2):
if not self.idle or self.ready:
return []
limit_bytes = {w: self.worker_info[w]['memory_limit']
for w in self.worker_info}
worker_bytes = self.worker_bytes
limit = sum(limit_bytes.values())
total = sum(worker_bytes.values())
idle = sorted(self.idle, key=worker_bytes.get, reverse=True)
to_close = []
while idle:
w = idle.pop()
limit -= limit_bytes[w]
if limit >= memory_ratio * total: # still plenty of space
to_close.append(w)
else:
break
return to_close
@gen.coroutine
def retire_workers(self, stream=None, workers=None, remove=True):
if workers is None:
while True:
try:
workers = self.workers_to_close()
if workers:
yield self.retire_workers(workers=workers, remove=remove)
raise gen.Return(list(workers))
except KeyError: # keys left during replicate
pass
workers = set(workers)
keys = set.union(*[self.has_what[w] for w in workers])
keys = {k for k in keys if self.who_has[k].issubset(workers)}
other_workers = set(self.worker_info) - workers
if keys:
if other_workers:
yield self.replicate(keys=keys, workers=other_workers, n=1,
delete=False)
else:
raise gen.Return([])
if remove:
for w in workers:
self.remove_worker(address=w, safe=True)
raise gen.Return(list(workers))
@gen.coroutine
def synchronize_worker_data(self, stream=None, worker=None):
if worker is None:
result = yield {w: self.synchronize_worker_data(worker=w)
for w in self.worker_info}
result = {k: v for k, v in result.items() if any(v.values())}
if result:
logger.info("Excess keys found on workers: %s", result)
raise Return(result or None)
else:
keys = yield self.rpc(addr=worker).keys()
keys = set(keys)
missing = self.has_what[worker] - keys
if missing:
logger.info("Expected data missing from worker: %s, %s",
worker, missing)
extra = keys - self.has_what[worker] - self.deleted_keys[worker]
if extra:
yield gen.sleep(self.synchronize_worker_interval / 1000) # delay
keys = yield self.rpc(addr=worker).keys() # check again
extra &= set(keys) # make sure the keys are still on worker
extra -= self.has_what[worker] # and still unknown to scheduler
if extra: # still around? delete them
yield self.rpc(addr=worker).delete_data(keys=list(extra),
report=False)
raise Return({'extra': list(extra), 'missing': list(missing)})
def add_keys(self, stream=None, address=None, keys=()):
"""
Learn that a worker has certain keys
This should not be used in practice and is mostly here for legacy
reasons.
"""
address = coerce_to_address(address)
if address not in self.worker_info:
return 'not found'
for key in keys:
if key in self.who_has:
if key not in self.has_what[address]:
self.worker_bytes[address] += self.nbytes.get(key, 1000)
self.has_what[address].add(key)
self.who_has[key].add(address)
# else:
# TODO: delete key from worker
return 'OK'
def update_data(self, stream=None, who_has=None, nbytes=None, client=None):
"""
Learn that new data has entered the network from an external source
See Also
--------
Scheduler.mark_key_in_memory
"""
with log_errors():
who_has = {k: [self.coerce_address(vv) for vv in v]
for k, v in who_has.items()}
logger.debug("Update data %s", who_has)
if client:
self.client_wants_keys(keys=list(who_has), client=client)
# for key, workers in who_has.items(): # TODO
# self.mark_key_in_memory(key, workers)
self.nbytes.update(nbytes)
for key, workers in who_has.items():
if key not in self.dependents:
self.dependents[key] = set()
if key not in self.dependencies:
self.dependencies[key] = set()
self.task_state[key] = 'memory'
self.who_has[key] = set(workers)
for w in workers:
if key not in self.has_what[w]:
self.worker_bytes[w] += self.nbytes.get(key, 1000)
self.has_what[w].add(key)
self.waiting_data[key] = set()
self.report({'op': 'key-in-memory',
'key': key,
'workers': list(workers)})
def report_on_key(self, key):
if key not in self.task_state:
self.report({'op': 'cancelled-key',
'key': key})
elif self.task_state[key] == 'memory':
self.report({'op': 'key-in-memory',
'key': key})
elif self.task_state[key] == 'erred':
failing_key = self.exceptions_blame[key]
self.report({'op': 'task-erred',
'key': key,
'exception': self.exceptions[failing_key],
'traceback': self.tracebacks.get(failing_key, None)})
@gen.coroutine
def feed(self, stream, function=None, setup=None, teardown=None, interval=1, **kwargs):
"""
Provides a data stream to external requester
Caution: this runs arbitrary Python code on the scheduler. This should
eventually be phased out. It is mostly used by diagnostics.
"""
import pickle
if function:
function = pickle.loads(function)
if setup:
setup = pickle.loads(setup)
if teardown:
teardown = pickle.loads(teardown)
state = setup(self) if setup else None
if isinstance(state, gen.Future):
state = yield state
try:
while True:
if state is None:
response = function(self)
else:
response = function(self, state)
yield write(stream, response)
yield gen.sleep(interval)
except (OSError, IOError, StreamClosedError):
if teardown:
teardown(self, state)
def get_stacks(self, stream=None, workers=None):
if workers is not None:
workers = set(map(self.coerce_address, workers))
return {w: list(self.stacks[w]) for w in workers}
else:
return valmap(list, self.stacks)
def get_processing(self, stream=None, workers=None):
if workers is not None:
workers = set(map(self.coerce_address, workers))
return {w: list(self.processing[w]) for w in workers}
else:
return valmap(list, self.processing)
def get_who_has(self, stream=None, keys=None):
if keys is not None:
return {k: list(self.who_has.get(k, [])) for k in keys}
else:
return valmap(list, self.who_has)
def get_has_what(self, stream=None, workers=None):
if workers is not None:
workers = map(self.coerce_address, workers)
return {w: list(self.has_what.get(w, ())) for w in workers}
else:
return valmap(list, self.has_what)
def get_ncores(self, stream=None, workers=None):
if workers is not None:
workers = map(self.coerce_address, workers)
return {w: self.ncores.get(w, None) for w in workers}
else:
return self.ncores
def get_nbytes(self, stream=None, keys=None, summary=True):
with log_errors():
if keys is not None:
result = {k: self.nbytes[k] for k in keys}
else:
result = self.nbytes
if summary:
out = defaultdict(lambda: 0)
for k, v in result.items():
out[key_split(k)] += v
result = out
return result
def publish_dataset(self, stream=None, keys=None, data=None, name=None,
client=None):
if name in self.datasets:
raise KeyError("Dataset %s already exists" % name)
self.client_wants_keys(keys, 'published-%s' % name)
self.datasets[name] = {'data': data, 'keys': keys}
return {'status': 'OK', 'name': name}
def unpublish_dataset(self, stream=None, name=None):
out = self.datasets.pop(name, {'keys': []})
self.client_releases_keys(out['keys'], 'published-%s' % name)
def list_datasets(self, *args):
return list(sorted(self.datasets.keys()))
def get_dataset(self, stream, name=None, client=None):
if name in self.datasets:
return self.datasets[name]
else:
raise KeyError("Dataset '%s' not found" % name)
def change_worker_cores(self, stream=None, worker=None, diff=0):
""" Add or remove cores from a worker
This is used when a worker wants to spin off a long-running task
"""
self.ncores[worker] += diff
self.maybe_idle.add(worker)
self.ensure_occupied()
#####################
# State Transitions #
#####################
def transition_released_waiting(self, key):
try:
if self.validate:
assert key in self.tasks
assert key in self.dependencies
assert key in self.dependents
assert key not in self.waiting
# assert key not in self.readyset
# assert key not in self.rstacks
assert key not in self.who_has
assert key not in self.rprocessing
# assert all(dep in self.task_state
# for dep in self.dependencies[key])
if not all(dep in self.task_state for dep in
self.dependencies[key]):
return {key: 'forgotten'}
self.waiting[key] = set()
recommendations = OrderedDict()
for dep in self.dependencies[key]:
if dep in self.exceptions_blame:
self.exceptions_blame[key] = self.exceptions_blame[dep]
recommendations[key] = 'erred'
return recommendations
for dep in self.dependencies[key]:
if dep not in self.who_has:
self.waiting[key].add(dep)
if dep in self.released:
recommendations[dep] = 'waiting'
else:
self.waiting_data[dep].add(key)
if not self.waiting[key]:
recommendations[key] = 'ready'
self.waiting_data[key] = {dep for dep in self.dependents[key]
if dep not in self.who_has
and dep not in self.released
and dep not in self.exceptions_blame}
self.task_state[key] = 'waiting'
self.released.remove(key)
if self.validate:
assert key in self.waiting
assert key in self.waiting_data
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_waiting_ready(self, key):
try:
if self.validate:
assert key in self.waiting
assert not self.waiting[key]
assert key not in self.who_has
assert key not in self.exceptions_blame
assert key not in self.rprocessing
# assert key not in self.readyset
assert key not in self.unrunnable
assert all(dep in self.who_has
for dep in self.dependencies[key])
del self.waiting[key]
if self.dependencies.get(key, None) or key in self.restrictions:
new_worker = decide_worker(self.dependencies, self.stacks,
self.stack_duration, self.processing, self.who_has,
self.has_what, self.restrictions,
self.loose_restrictions, self.nbytes, self.ncores, key)
if not new_worker:
self.unrunnable.add(key)
self.task_state[key] = 'no-worker'
else:
self.stacks[new_worker].append(key)
duration = self.task_duration.get(key_split(key), 0.5)
self.stack_durations[new_worker].append(duration)
self.stack_duration[new_worker] += duration
self.maybe_idle.add(new_worker)
self.put_key_in_stealable(key)
self.task_state[key] = 'stacks'
else:
self.ready.appendleft(key)
self.task_state[key] = 'queue'
return {}
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_ready_processing(self, key, worker=None, latency=5e-3):
try:
if self.validate:
assert key not in self.waiting
assert key not in self.who_has
assert key not in self.exceptions_blame
assert self.task_state[key] in ('queue', 'stacks')
if self.task_state[key] == 'no-worker':
raise ValueError()
assert worker
duration = self.task_duration.get(key_split(key), latency*100)
self.processing[worker][key] = duration
self.rprocessing[key].add(worker)
self.occupancy[worker] += duration
self.task_state[key] = 'processing'
self.remove_key_from_stealable(key)
# logger.debug("Send job to worker: %s, %s", worker, key)
try:
self.send_task_to_worker(worker, key)
except StreamClosedError:
self.remove_worker(worker)
return {}
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_processing_memory(self, key, nbytes=None, type=None,
worker=None, compute_start=None, compute_stop=None,
transfer_start=None, transfer_stop=None, **kwargs):
try:
if self.validate:
assert key in self.rprocessing
assert all(key in self.processing[w] for w in self.rprocessing[key])
assert key not in self.waiting
assert key not in self.who_has
assert key not in self.exceptions_blame
# assert all(dep in self.waiting_data[key ] for dep in
# self.dependents[key] if self.task_state[dep] in
# ['waiting', 'queue', 'stacks'])
# assert key not in self.nbytes
assert self.task_state[key] == 'processing'
if worker not in self.processing:
return {key: 'released'}
#############################
# Update Timing Information #
#############################
if compute_start:
# Update average task duration for worker
info = self.worker_info[worker]
ks = key_split(key)
gap = (transfer_start or compute_start) - info.get('last-task', 0)
old_duration = self.task_duration.get(ks, 0)
new_duration = compute_stop - compute_start
if (not old_duration or
gap > max(10e-3, info.get('latency', 0), old_duration)):
avg_duration = new_duration
else:
avg_duration = (0.5 * old_duration
+ 0.5 * new_duration)
self.task_duration[ks] = avg_duration
if ks in self.stealable_unknown_durations:
for k in self.stealable_unknown_durations.pop(ks, ()):
if self.task_state.get(k) == 'stacks':
self.put_key_in_stealable(k)
info['last-task'] = compute_stop
############################
# Update State Information #
############################
if nbytes:
self.nbytes[key] = nbytes
self.who_has[key] = set()
if worker:
self.who_has[key].add(worker)
self.has_what[worker].add(key)
self.worker_bytes[worker] += self.nbytes.get(key, 1000)
if nbytes:
self.nbytes[key] = nbytes
workers = self.rprocessing.pop(key)
for worker in workers:
self.occupancy[worker] -= self.processing[worker].pop(key)
recommendations = OrderedDict()
deps = self.dependents.get(key, [])
if len(deps) > 1:
deps = sorted(deps, key=self.priority.get, reverse=True)
for dep in deps:
if dep in self.waiting:
s = self.waiting[dep]
s.remove(key)
if not s: # new task ready to run
recommendations[dep] = 'ready'
for dep in self.dependencies.get(key, []):
if dep in self.waiting_data:
s = self.waiting_data[dep]
s.remove(key)
if (not s and dep and
dep not in self.who_wants and
not self.waiting_data.get(dep)):
recommendations[dep] = 'released'
if (not self.waiting_data.get(key) and
key not in self.who_wants):
recommendations[key] = 'released'
else:
msg = {'op': 'key-in-memory',
'key': key}
if type is not None:
msg['type'] = type
self.report(msg)
self.task_state[key] = 'memory'
if self.validate:
assert key not in self.rprocessing
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_memory_released(self, key, safe=False):
try:
if self.validate:
assert key in self.who_has
assert key not in self.released
# assert key not in self.readyset
assert key not in self.waiting
assert key not in self.rprocessing
if safe:
assert not self.waiting_data.get(key)
# assert key not in self.who_wants
recommendations = OrderedDict()
for dep in self.waiting_data.get(key, ()): # lost dependency
if self.task_state[dep] == 'waiting':
self.waiting[dep].add(key)
else:
recommendations[dep] = 'waiting'
workers = self.who_has.pop(key)
for w in workers:
if w in self.worker_info: # in case worker has died
self.has_what[w].remove(key)
self.worker_bytes[w] -= self.nbytes.get(key, 1000)
self.deleted_keys[w].add(key)
self.released.add(key)
self.task_state[key] = 'released'
self.report({'op': 'lost-data', 'key': key})
if key not in self.tasks: # pure data
recommendations[key] = 'forgotten'
elif not all(dep in self.task_state
for dep in self.dependencies[key]):
recommendations[key] = 'forgotten'
elif key in self.who_wants or self.waiting_data.get(key):
recommendations[key] = 'waiting'
if key in self.waiting_data:
del self.waiting_data[key]
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_released_erred(self, key):
try:
if self.validate:
with log_errors(pdb=LOG_PDB):
assert key in self.exceptions_blame
assert key not in self.who_has
assert key not in self.waiting
assert key not in self.waiting_data
recommendations = {}
failing_key = self.exceptions_blame[key]
for dep in self.dependents[key]:
self.exceptions_blame[dep] = failing_key
if dep not in self.who_has:
recommendations[dep] = 'erred'
self.report({'op': 'task-erred',
'key': key,
'exception': self.exceptions[failing_key],
'traceback': self.tracebacks.get(failing_key, None)})
self.task_state[key] = 'erred'
self.released.remove(key)
# TODO: waiting data?
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_waiting_released(self, key):
try:
if self.validate:
assert key in self.waiting
assert key in self.waiting_data
assert key not in self.who_has
assert key not in self.rprocessing
recommendations = {}
del self.waiting[key]
for dep in self.dependencies[key]:
if dep in self.waiting_data:
if key in self.waiting_data[dep]:
self.waiting_data[dep].remove(key)
if not self.waiting_data[dep] and dep not in self.who_wants:
recommendations[dep] = 'released'
assert self.task_state[dep] != 'erred'
self.task_state[key] = 'released'
self.released.add(key)
if self.validate:
assert not any(key in self.waiting_data.get(dep, ())
for dep in self.dependencies[key])
if any(dep not in self.task_state for dep in
self.dependencies[key]):
recommendations[key] = 'forgotten'
elif (key not in self.exceptions_blame and
(key in self.who_wants or self.waiting_data.get(key))):
recommendations[key] = 'waiting'
del self.waiting_data[key]
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_processing_released(self, key):
try:
if self.validate:
assert key in self.rprocessing
assert key not in self.who_has
assert self.task_state[key] == 'processing'
for w in self.rprocessing.pop(key):
self.occupancy[w] -= self.processing[w].pop(key)
self.released.add(key)
self.task_state[key] = 'released'
recommendations = OrderedDict()
if any(dep not in self.task_state
for dep in self.dependencies[key]):
recommendations[key] = 'forgotten'
elif self.waiting_data[key] or key in self.who_wants:
recommendations[key] = 'waiting'
else:
for dep in self.dependencies[key]:
if dep not in self.released:
assert key in self.waiting_data[dep]
self.waiting_data[dep].remove(key)
if not self.waiting_data[dep] and dep not in self.who_wants:
recommendations[dep] = 'released'
del self.waiting_data[key]
if self.validate:
assert key not in self.rprocessing
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_ready_released(self, key):
try:
if self.validate:
assert key not in self.who_has
assert self.task_state[key] in ('stacks', 'queue', 'no-worker')
if self.task_state[key] == 'no-worker':
self.unrunnable.remove(key)
if self.task_state[key] == 'stacks': # TODO: non-linear
for w in self.stacks:
if key in self.stacks[w]:
for i, k in enumerate(self.stacks[w]):
if k == key:
del self.stacks[w][i]
duration = self.stack_durations[w][i]
del self.stack_durations[w][i]
self.stack_duration[w] -= duration
break
self.released.add(key)
self.task_state[key] = 'released'
for dep in self.dependencies[key]:
try:
self.waiting_data[dep].remove(key)
except KeyError: # dep may also be released
pass
# TODO: maybe release dep if not about to wait?
if self.waiting_data[key] or key in self.who_wants:
recommendations = {key: 'waiting'}
else:
recommendations = {}
del self.waiting_data[key]
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_processing_erred(self, key, cause=None, exception=None,
traceback=None):
try:
if self.validate:
assert cause or key in self.exceptions_blame
assert key in self.rprocessing
assert key not in self.who_has
assert key not in self.waiting
# assert key not in self.rstacks
# assert key not in self.readyset
if exception:
self.exceptions[key] = exception
if traceback:
self.tracebacks[key] = traceback
if cause:
self.exceptions_blame[key] = cause
failing_key = self.exceptions_blame[key]
recommendations = {}
for dep in self.dependents[key]:
self.exceptions_blame[dep] = key
recommendations[dep] = 'erred'
for dep in self.dependencies.get(key, []):
if dep in self.waiting_data:
s = self.waiting_data[dep]
if key in s:
s.remove(key)
if (not s and dep and
dep not in self.who_wants and
not self.waiting_data.get(dep)):
recommendations[dep] = 'released'
for w in self.rprocessing.pop(key):
self.occupancy[w] -= self.processing[w].pop(key)
del self.waiting_data[key] # do anything with this?
self.task_state[key] = 'erred'
self.report({'op': 'task-erred',
'key': key,
'exception': self.exceptions[failing_key],
'traceback': self.tracebacks.get(failing_key)})
if self.validate:
assert key not in self.rprocessing
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def remove_key(self, key):
if key in self.tasks:
del self.tasks[key]
del self.task_state[key]
if key in self.dependencies:
del self.dependencies[key]
del self.dependents[key]
if key in self.restrictions:
del self.restrictions[key]
if key in self.loose_restrictions:
self.loose_restrictions.remove(key)
if key in self.priority:
del self.priority[key]
if key in self.exceptions:
del self.exceptions[key]
if key in self.exceptions_blame:
del self.exceptions_blame[key]
if key in self.released:
self.released.remove(key)
if key in self.waiting_data:
del self.waiting_data[key]
if key in self.suspicious_tasks:
del self.suspicious_tasks[key]
if key in self.nbytes:
del self.nbytes[key]
def transition_memory_forgotten(self, key):
try:
if self.validate:
assert key in self.dependents
assert self.task_state[key] == 'memory'
assert key in self.waiting_data
assert key in self.who_has
assert key not in self.rprocessing
# assert key not in self.ready
assert key not in self.waiting
recommendations = {}
for dep in self.waiting_data[key]:
recommendations[dep] = 'forgotten'
for dep in self.dependents[key]:
if self.task_state[dep] == 'released':
recommendations[dep] = 'forgotten'
for dep in self.dependencies.get(key, ()):
try:
s = self.dependents[dep]
s.remove(key)
if not s and dep not in self.who_wants:
assert dep is not key
recommendations[dep] = 'forgotten'
except KeyError:
pass
workers = self.who_has.pop(key)
for w in workers:
if w in self.worker_info: # in case worker has died
self.has_what[w].remove(key)
self.worker_bytes[w] -= self.nbytes.get(key, 1000)
self.deleted_keys[w].add(key)
if self.validate:
assert all(key not in self.dependents[dep]
for dep in self.dependencies[key]
if dep in self.task_state)
assert all(key not in self.waiting_data.get(dep, ())
for dep in self.dependencies[key]
if dep in self.task_state)
self.remove_key(key)
self.report_on_key(key)
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition_released_forgotten(self, key):
try:
if self.validate:
assert key in self.dependencies
assert self.task_state[key] in ('released', 'erred')
# assert not self.waiting_data[key]
if key in self.tasks and self.dependencies[key].issubset(self.task_state):
assert key not in self.who_wants
assert not self.dependents[key]
assert not any(key in self.waiting_data.get(dep, ())
for dep in self.dependencies[key])
assert key not in self.who_has
assert key not in self.rprocessing
# assert key not in self.ready
assert key not in self.waiting
recommendations = {}
for dep in self.dependencies[key]:
try:
s = self.dependents[dep]
s.remove(key)
if not s and dep not in self.who_wants:
assert dep is not key
recommendations[dep] = 'forgotten'
except KeyError:
pass
for dep in self.dependents[key]:
if self.task_state[dep] not in ('memory', 'error'):
recommendations[dep] = 'forgotten'
for dep in self.dependents[key]:
if self.task_state[dep] == 'released':
recommendations[dep] = 'forgotten'
for dep in self.dependencies[key]:
try:
self.waiting_data[dep].remove(key)
except KeyError:
pass
if self.validate:
assert all(key not in self.dependents[dep]
for dep in self.dependencies[key]
if dep in self.task_state)
assert all(key not in self.waiting_data.get(dep, ())
for dep in self.dependencies[key]
if dep in self.task_state)
self.remove_key(key)
self.report_on_key(key)
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transition(self, key, finish, *args, **kwargs):
""" Transition a key from its current state to the finish state
Examples
--------
>>> self.transition('x', 'waiting')
{'x': 'ready'}
Returns
-------
Dictionary of recommendations for future transitions
See Also
--------
Scheduler.transitions: transitive version of this function
"""
try:
try:
start = self.task_state[key]
except KeyError:
return {}
if start == finish:
return {}
if (start, finish) in self._transitions:
func = self._transitions[start, finish]
recommendations = func(key, *args, **kwargs)
else:
func = self._transitions['released', finish]
assert not args and not kwargs
a = self.transition(key, 'released')
if key in a:
func = self._transitions['released', a[key]]
b = func(key)
a = a.copy()
a.update(b)
recommendations = a
start = 'released'
finish2 = self.task_state.get(key, 'forgotten')
self.transition_log.append((key, start, finish2, recommendations))
if self.validate:
logger.info("Transition %s->%s: %s New: %s",
start, finish2, key, recommendations)
for plugin in self.plugins:
try:
plugin.transition(key, start, finish2, *args, **kwargs)
except Exception:
logger.info("Plugin failed with exception", exc_info=True)
return recommendations
except Exception as e:
logger.exception(e)
if LOG_PDB:
import pdb; pdb.set_trace()
raise
def transitions(self, recommendations):
""" Process transitions until none are left
This includes feedback from previous transitions and continues until we
reach a steady state
"""
keys = set()
recommendations = recommendations.copy()
while recommendations:
key, finish = recommendations.popitem()
keys.add(key)
new = self.transition(key, finish)
recommendations.update(new)
if self.validate:
for key in keys:
self.validate_key(key)
def transition_story(self, *keys):
""" Get all transitions that touch one of the input keys """
keys = set(keys)
return [t for t in self.transition_log
if t[0] in keys or keys.intersection(t[3])]
##############################
# Assigning Tasks to Workers #
##############################
def ensure_occupied(self):
""" Run ready tasks on idle workers
**Work stealing policy**
If some workers are idle but not others, if there are no globally ready
tasks, and if there are tasks in worker stacks, then we start to pull
preferred tasks from overburdened workers and deploy them back into the
global pool in the following manner.
We determine the number of tasks to reclaim as the number of all tasks
in all stacks times the fraction of idle workers to all workers.
We sort the stacks by size and walk through them, reclaiming half of
each stack until we have enough task to fill the global pool.
We are careful not to reclaim tasks that are restricted to run on
certain workers.
See also
--------
Scheduler.ensure_occupied_queue
Scheduler.ensure_occupied_stacks
Scheduler.work_steal
"""
with log_errors(pdb=LOG_PDB):
for worker in self.maybe_idle:
self.ensure_occupied_stacks(worker)
self.maybe_idle.clear()
if self.idle and self.ready:
if len(self.ready) < len(self.idle):
def keyfunc(w):
return (-len(self.stacks[w]) - len(self.processing[w]),
-len(self.has_what.get(w, ())))
for worker in topk(len(self.ready), self.idle, key=keyfunc):
self.ensure_occupied_queue(worker, count=1)
else:
# Fill up empty cores
workers = list(self.idle)
free_cores = [self.ncores[w] - len(self.processing[w])
for w in workers]
workers2 = [] # Clean out workers that *are* actually full
free_cores2 = []
for w, fs in zip(workers, free_cores):
if fs > 0:
workers2.append(w)
free_cores2.append(fs)
if workers2:
n = min(sum(free_cores2), len(self.ready))
counts = divide_n_among_bins(n, free_cores2)
for worker, count in zip(workers2, counts):
self.ensure_occupied_queue(worker, count=count)
# Fill up unsaturated cores by time
workers = list(self.idle)
latency = 5e-3
free_time = [latency * self.ncores[w] - self.occupancy[w]
for w in workers]
workers2 = [] # Clean out workers that *are* actually full
free_time2 = []
for w, fs in zip(workers, free_time):
if fs > 0:
workers2.append(w)
free_time2.append(fs)
total_free_time = sum(free_time2)
if workers2 and total_free_time > 0:
tasks = []
while self.ready and total_free_time > 0:
task = self.ready.pop()
if self.task_state.get(task) != 'queue':
continue
total_free_time -= self.task_duration.get(key_split(task), 1)
tasks.append(task)
self.ready.extend(tasks[::-1])
counts = divide_n_among_bins(len(tasks), free_time2)
for worker, count in zip(workers2, counts):
self.ensure_occupied_queue(worker, count=count)
if self.idle and any(self.stealable):
thieves = self.work_steal()
for worker in thieves:
self.ensure_occupied_stacks(worker)
def ensure_occupied_stacks(self, worker):
""" Send tasks to worker while it has tasks and free cores
These tasks may come from the worker's own stacks or from the global
ready deque.
We update the idle workers set appropriately.
See Also
--------
Scheduler.ensure_occupied
Scheduler.ensure_occupied_queue
"""
stack = self.stacks[worker]
latency = 5e-3
while (stack and
(self.ncores[worker] > len(self.processing[worker]) or
self.occupancy[worker] < latency * self.ncores[worker])):
key = stack.pop()
duration = self.stack_durations[worker].pop()
self.stack_duration[worker] -= duration
if self.task_state.get(key) == 'stacks':
r = self.transition(key, 'processing',
worker=worker, latency=latency)
if stack:
self.saturated.add(worker)
if worker in self.idle:
self.idle.remove(worker)
else:
if worker in self.saturated:
self.saturated.remove(worker)
self._check_idle(worker)
def put_key_in_stealable(self, key):
ratio, loc = self.steal_time_ratio(key)
if ratio is not None:
self.stealable[loc].add(key)
self.key_stealable[key] = loc
def remove_key_from_stealable(self, key):
loc = self.key_stealable.pop(key, None)
if loc is not None:
try:
self.stealable[loc].remove(key)
except:
pass
def ensure_occupied_queue(self, worker, count):
"""
Send at most count tasks from the ready queue to the specified worker
See also
--------
Scheduler.ensure_occupied
Scheduler.ensure_occupied_stacks
"""
for i in range(count):
try:
key = self.ready.pop()
while self.task_state.get(key) != 'queue':
key = self.ready.pop()
except (IndexError, KeyError):
break
if self.task_state[key] == 'queue':
r = self.transition(key, 'processing', worker=worker)
self._check_idle(worker)
def work_steal(self):
""" Steal tasks from saturated workers to idle workers
This moves tasks from the bottom of the stacks of over-occupied workers
to the stacks of idling workers.
See also
--------
Scheduler.ensure_occupied
"""
if not self.steal:
return []
with log_errors():
thieves = set()
for level, stealable in enumerate(self.stealable[:-1]):
if not stealable:
continue
if len(self.idle) == len(self.ncores): # no stacks
stealable.clear()
continue
# Enough idleness to continue?
ratio = 2 ** (level - 3)
n_saturated = len(self.ncores) - len(self.idle)
duration_if_hold = len(stealable) / n_saturated
duration_if_steal = ratio
if level > 1 and duration_if_hold < duration_if_steal:
break
while stealable and self.idle:
for w in list(self.idle):
try:
key = stealable.pop()
except:
break
else:
if self.task_state.get(key, 'stacks'):
self.stacks[w].append(key)
duration = self.task_duration.get(key_split(key), 0.5)
self.stack_durations[w].append(duration)
self.stack_duration[w] += duration
thieves.add(w)
if (self.ncores[w] <=
len(self.processing[w]) + len(self.stacks[w])):
self.idle.remove(w)
if stealable:
break
logger.debug('Stolen tasks for %d workers', len(thieves))
return thieves
def steal_time_ratio(self, key, bandwidth=BANDWIDTH):
""" The compute to communication time ratio of a key
Returns
-------
ratio: The compute/communication time ratio of the task
loc: The self.stealable bin into which this key should go
"""
if key in self.restrictions and key not in self.loose_restrictions:
return None, None # don't steal
nbytes = sum(self.nbytes.get(k, 1000) for k in self.dependencies[key])
transfer_time = nbytes / bandwidth
split = key_split(key)
if split in fast_tasks:
return None, None
try:
compute_time = self.task_duration[split]
except KeyError:
self.stealable_unknown_durations[split].add(key)
return None, None
else:
try:
ratio = compute_time / transfer_time
except ZeroDivisionError:
ratio = 10000
if ratio > 8:
loc = 0
elif ratio < 2**-8:
loc = -1
else:
loc = int(-round(log(ratio) / log(2), 0) + 3)
return ratio, loc
def issaturated(self, worker, latency=5e-3):
"""
Determine if a worker has enough work to avoid being idle
A worker is saturated if the following criteria are met
1. It is working on at least as many tasks as it has cores
2. The expected time it will take to complete all of its currently
assigned tasks is at least a full round-trip time. This is
relevant when it has many small tasks
"""
return (len(self.stacks[worker]) + len(self.processing[worker])
> self.ncores[worker] and
self.occupancy[worker] > latency * self.ncores[worker])
def _check_idle(self, worker, latency=5e-3):
if not self.issaturated(worker, latency=latency):
self.idle.add(worker)
elif worker in self.idle:
self.idle.remove(worker)
#####################
# Utility functions #
#####################
def coerce_address(self, addr):
"""
Coerce possible input addresses to canonical form
Handles lists, strings, bytes, tuples, or aliases
"""
if isinstance(addr, list):
addr = tuple(addr)
if addr in self.aliases:
addr = self.aliases[addr]
if isinstance(addr, bytes):
addr = addr.decode()
if addr in self.aliases:
addr = self.aliases[addr]
if isinstance(addr, unicode):
if ':' in addr:
addr = tuple(addr.rsplit(':', 1))
else:
addr = ensure_ip(addr)
if isinstance(addr, tuple):
ip, port = addr
if PY3 and isinstance(ip, bytes):
ip = ip.decode()
ip = ensure_ip(ip)
port = int(port)
addr = '%s:%d' % (ip, port)
return addr
def workers_list(self, workers):
"""
List of qualifying workers
Takes a list of worker addresses or hostnames.
Returns a list of all worker addresses that match
"""
if workers is None:
return list(self.ncores)
out = set()
for w in workers:
if ':' in w:
out.add(w)
else:
out.update({ww for ww in self.ncores if w in ww}) # TODO: quadratic
return list(out)
def start_ipython(self, stream=None):
"""Start an IPython kernel
Returns Jupyter connection info dictionary.
"""
from ._ipython_utils import start_ipython
if self._ipython_kernel is None:
self._ipython_kernel = start_ipython(
ip=self.ip,
ns={'scheduler': self},
log=logger,
)
return self._ipython_kernel.get_connection_info()
def decide_worker(dependencies, stacks, stack_duration, processing, who_has,
has_what, restrictions, loose_restrictions, nbytes, ncores, key):
""" Decide which worker should take task
>>> dependencies = {'c': {'b'}, 'b': {'a'}}
>>> stacks = {'alice:8000': ['z'], 'bob:8000': []}
>>> processing = {'alice:8000': set(), 'bob:8000': set()}
>>> who_has = {'a': {'alice:8000'}}
>>> has_what = {'alice:8000': {'a'}}
>>> nbytes = {'a': 100}
>>> ncores = {'alice:8000': 1, 'bob:8000': 1}
>>> restrictions = {}
>>> loose_restrictions = set()
We choose the worker that has the data on which 'b' depends (alice has 'a')
>>> decide_worker(dependencies, stacks, processing, who_has, has_what,
... restrictions, loose_restrictions, nbytes, ncores, 'b')
'alice:8000'
If both Alice and Bob have dependencies then we choose the less-busy worker
>>> who_has = {'a': {'alice:8000', 'bob:8000'}}
>>> has_what = {'alice:8000': {'a'}, 'bob:8000': {'a'}}
>>> decide_worker(dependencies, stacks, processing, who_has, has_what,
... restrictions, loose_restrictions, nbytes, ncores, 'b')
'bob:8000'
Optionally provide restrictions of where jobs are allowed to occur
>>> restrictions = {'b': {'alice', 'charlie'}}
>>> decide_worker(dependencies, stacks, processing, who_has, has_what,
... restrictions, loose_restrictions, nbytes, ncores, 'b')
'alice:8000'
If the task requires data communication, then we choose to minimize the
number of bytes sent between workers. This takes precedence over worker
occupancy.
>>> dependencies = {'c': {'a', 'b'}}
>>> who_has = {'a': {'alice:8000'}, 'b': {'bob:8000'}}
>>> has_what = {'alice:8000': {'a'}, 'bob:8000': {'b'}}
>>> nbytes = {'a': 1, 'b': 1000}
>>> stacks = {'alice:8000': [], 'bob:8000': []}
>>> decide_worker(dependencies, stacks, processing, who_has, has_what,
... {}, set(), nbytes, ncores, 'c')
'bob:8000'
"""
deps = dependencies[key]
assert all(d in who_has for d in deps)
workers = frequencies([w for dep in deps
for w in who_has[dep]])
if not workers:
workers = stacks
if key in restrictions:
r = restrictions[key]
workers = {w for w in workers if w in r or w.split(':')[0] in r} # TODO: nonlinear
if not workers:
workers = {w for w in stacks if w in r or w.split(':')[0] in r}
if not workers:
if key in loose_restrictions:
return decide_worker(dependencies, stacks, stack_duration,
processing, who_has, has_what, {}, set(), nbytes,
ncores, key)
else:
return None
if not workers or not stacks:
return None
if len(workers) == 1:
return first(workers)
# Select worker that will finish task first
def objective(w):
comm_bytes = sum([nbytes.get(k, 1000) for k in dependencies[key]
if w not in who_has[k]])
stack_time = stack_duration[w] / ncores[w]
start_time = comm_bytes / BANDWIDTH + stack_time
return start_time
return min(workers, key=objective)
def validate_state(dependencies, dependents, waiting, waiting_data, ready,
who_has, stacks, processing, finished_results, released,
who_wants, wants_what, tasks=None, allow_overlap=False, allow_bad_stacks=False,
erred=None, **kwargs):
"""
Validate a current runtime state
This performs a sequence of checks on the entire graph, running in about
linear time. This raises assert errors if anything doesn't check out.
"""
in_stacks = {k for v in stacks.values() for k in v}
in_processing = {k for v in processing.values() for k in v}
keys = {key for key in dependents if not dependents[key]}
ready_set = set(ready)
assert set(waiting).issubset(dependencies), "waiting not subset of deps"
assert set(waiting_data).issubset(dependents), "waiting_data not subset"
if tasks is not None:
assert ready_set.issubset(tasks), "All ready tasks are tasks"
assert set(dependents).issubset(set(tasks) | set(who_has)), "all dependents tasks"
assert set(dependencies).issubset(set(tasks) | set(who_has)), "all dependencies tasks"
for k, v in waiting.items():
assert v, "waiting on empty set"
assert v.issubset(dependencies[k]), "waiting set not dependencies"
for vv in v:
assert vv not in who_has, ("waiting dependency in memory", k, vv)
assert vv not in released, ("dependency released", k, vv)
for dep in dependencies[k]:
assert dep in v or who_has.get(dep), ("dep missing", k, dep)
for k, v in waiting_data.items():
for vv in v:
if vv in released:
raise ValueError('dependent not in play', k, vv)
if not (vv in ready_set or
vv in waiting or
vv in in_stacks or
vv in in_processing):
raise ValueError('dependent not in play2', k, vv)
for v in concat(processing.values()):
assert v in dependencies, "all processing keys in dependencies"
for key in who_has:
assert key in waiting_data or key in who_wants
@memoize
def check_key(key):
""" Validate a single key, recurse downwards """
vals = ([key in waiting,
key in ready,
key in in_stacks,
key in in_processing,
not not who_has.get(key),
key in released,
key in erred])
if ((allow_overlap and sum(vals) < 1) or
(not allow_overlap and sum(vals) != 1)):
if not (in_stacks and waiting): # known ok state
raise ValueError("Key exists in wrong number of places", key, vals)
for dep in dependencies[key]:
if dep in dependents:
check_key(dep) # Recursive case
if who_has.get(key):
assert not any(key in waiting.get(dep, ())
for dep in dependents.get(key, ()))
assert not waiting.get(key)
if not allow_bad_stacks and (key in in_stacks or key in in_processing):
if not all(who_has.get(dep) for dep in dependencies[key]):
raise ValueError("Key in stacks/processing without all deps",
key)
assert not waiting.get(key)
assert key not in ready
if finished_results is not None:
if key in finished_results:
assert who_has.get(key)
assert key in keys
if key in keys and who_has.get(key):
assert key in finished_results
for key, s in who_wants.items():
assert s, "empty who_wants"
for client in s:
assert key in wants_what[client]
if key in waiting:
assert waiting[key], 'waiting empty'
if key in ready:
assert key not in waiting
return True
assert all(map(check_key, keys))
_round_robin = [0]
fast_tasks = {'rechunk-split', 'shuffle-split'}
class KilledWorker(Exception):
pass
| bsd-3-clause |
sklnet/openhdf-enigma2 | lib/python/Plugins/SystemPlugins/AnimationSetup/plugin.py | 4 | 8962 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigNumber, ConfigSelectionNumber, getConfigListEntry
from Plugins.Plugin import PluginDescriptor
from enigma import setAnimation_current, setAnimation_speed
# default = slide to left
g_default = {
"current": 6,
"speed" : 20,
}
g_max_speed = 30
g_animation_paused = False
g_orig_show = None
g_orig_doClose = None
config.misc.window_animation_default = ConfigNumber(default=g_default["current"])
config.misc.window_animation_speed = ConfigSelectionNumber(1, g_max_speed, 1, default=g_default["speed"])
class AnimationSetupConfig(ConfigListScreen, Screen):
skin= """
<screen position="center,center" size="600,140" title="Animation Settings">
<widget name="config" position="0,0" size="600,100" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/red.png" position="0,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,100" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
</screen>
"""
def __init__(self, session):
self.session = session
self.entrylist = []
Screen.__init__(self, session)
ConfigListScreen.__init__(self, self.entrylist)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions",], {
"ok" : self.keyGreen,
"green" : self.keyGreen,
"yellow" : self.keyYellow,
"red" : self.keyRed,
"cancel" : self.keyRed,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Default"))
self.makeConfigList()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Animation Setup'))
def keyGreen(self):
config.misc.window_animation_speed.save()
setAnimation_speed(int(config.misc.window_animation_speed.value))
self.close()
def keyRed(self):
config.misc.window_animation_speed.cancel()
self.close()
def keyYellow(self):
global g_default
config.misc.window_animation_speed.value = g_default["speed"]
self.makeConfigList()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def makeConfigList(self):
self.entrylist = []
entrySpeed = getConfigListEntry(_("Animation Speed"), config.misc.window_animation_speed)
self.entrylist.append(entrySpeed)
self["config"].list = self.entrylist
self["config"].l.setList(self.entrylist)
class AnimationSetupScreen(Screen):
animationSetupItems = [
{"idx":0, "name":_("Disable Animations")},
{"idx":1, "name":_("Simple fade")},
{"idx":2, "name":_("Grow drop")},
{"idx":3, "name":_("Grow from left")},
{"idx":4, "name":_("Popup")},
{"idx":5, "name":_("Slide drop")},
{"idx":6, "name":_("Slide left to right")},
{"idx":7, "name":_("Slide top to bottom")},
{"idx":8, "name":_("Stripes")},
]
skin = """
<screen name="AnimationSetup" position="center,center" size="580,400" title="Animation Setup">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" zPosition="1" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#18188b" transparent="1" />
<widget name="list" position="10,60" size="560,364" scrollbarMode="showOnDemand" />
<widget source="introduction" render="Label" position="0,370" size="560,40" zPosition="10" font="Regular;20" valign="center" backgroundColor="#25062748" transparent="1" />
</screen>"""
def __init__(self, session):
self.skin = AnimationSetupScreen.skin
Screen.__init__(self, session)
self.animationList = []
self["introduction"] = StaticText(_("* current animation"))
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Setting"))
self["key_blue"] = StaticText(_("Preview"))
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.keyclose,
"save": self.ok,
"ok" : self.ok,
"yellow": self.config,
"blue": self.preview
}, -3)
self["list"] = MenuList(self.animationList)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
l = []
for x in self.animationSetupItems:
key = x.get("idx", 0)
name = x.get("name", "??")
if key == config.misc.window_animation_default.value:
name = "* %s" % (name)
l.append( (name, key) )
self["list"].setList(l)
def ok(self):
current = self["list"].getCurrent()
if current:
key = current[1]
config.misc.window_animation_default.value = key
config.misc.window_animation_default.save()
setAnimation_current(key)
self.close()
def keyclose(self):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
self.close()
def config(self):
self.session.open(AnimationSetupConfig)
def preview(self):
current = self["list"].getCurrent()
if current:
global g_animation_paused
tmp = g_animation_paused
g_animation_paused = False
setAnimation_current(current[1])
self.session.open(MessageBox, current[0], MessageBox.TYPE_INFO, timeout=3)
g_animation_paused = tmp
def checkAttrib(self, paused):
global g_animation_paused
if g_animation_paused is paused and self.skinAttributes is not None:
for (attr, value) in self.skinAttributes:
if attr == "animationPaused" and value in ("1", "on"):
return True
return False
def screen_show(self):
global g_animation_paused
if g_animation_paused:
setAnimation_current(0)
g_orig_show(self)
if checkAttrib(self, False):
g_animation_paused = True
def screen_doClose(self):
global g_animation_paused
if checkAttrib(self, True):
g_animation_paused = False
setAnimation_current(config.misc.window_animation_default.value)
g_orig_doClose(self)
def animationSetupMain(session, **kwargs):
session.open(AnimationSetupScreen)
def startAnimationSetup(menuid):
if menuid != "system":
return []
return [( _("Animations"), animationSetupMain, "animation_setup", None)]
def sessionAnimationSetup(session, reason, **kwargs):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
global g_orig_show, g_orig_doClose
if g_orig_show is None:
g_orig_show = Screen.show
if g_orig_doClose is None:
g_orig_doClose = Screen.doClose
Screen.show = screen_show
Screen.doClose = screen_doClose
def Plugins(**kwargs):
plugin_list = [
PluginDescriptor(
name = "Animations",
description = "Setup UI animations",
where = PluginDescriptor.WHERE_MENU,
needsRestart = False,
fnc = startAnimationSetup),
PluginDescriptor(
where = PluginDescriptor.WHERE_SESSIONSTART,
needsRestart = False,
fnc = sessionAnimationSetup),
]
return plugin_list;
| gpl-2.0 |
Azure/azure-linux-extensions | VMEncryption/main/oscrypto/rhel_68/RHEL68EncryptionStateMachine.py | 3 | 7277 | #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
import inspect
import os
import sys
import traceback
from time import sleep
scriptdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
maindir = os.path.abspath(os.path.join(scriptdir, '../../'))
sys.path.append(maindir)
transitionsdir = os.path.abspath(os.path.join(scriptdir, '../../transitions'))
sys.path.append(transitionsdir)
from oscrypto import *
from encryptstates import *
from Common import *
from CommandExecutor import *
from DiskUtil import *
from transitions import *
class RHEL68EncryptionStateMachine(OSEncryptionStateMachine):
states = [
State(name='uninitialized'),
State(name='prereq', on_enter='on_enter_state'),
State(name='selinux', on_enter='on_enter_state'),
State(name='stripdown', on_enter='on_enter_state'),
State(name='unmount_oldroot', on_enter='on_enter_state'),
State(name='encrypt_block_device', on_enter='on_enter_state'),
State(name='patch_boot_system', on_enter='on_enter_state'),
State(name='completed'),
]
transitions = [
{
'trigger': 'skip_encryption',
'source': 'uninitialized',
'dest': 'completed'
},
{
'trigger': 'enter_prereq',
'source': 'uninitialized',
'dest': 'prereq'
},
{
'trigger': 'enter_selinux',
'source': 'prereq',
'dest': 'selinux',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_stripdown',
'source': 'selinux',
'dest': 'stripdown',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_unmount_oldroot',
'source': 'stripdown',
'dest': 'unmount_oldroot',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'retry_unmount_oldroot',
'source': 'unmount_oldroot',
'dest': 'unmount_oldroot',
'before': 'on_enter_state'
},
{
'trigger': 'enter_encrypt_block_device',
'source': 'unmount_oldroot',
'dest': 'encrypt_block_device',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_patch_boot_system',
'source': 'encrypt_block_device',
'dest': 'patch_boot_system',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'stop_machine',
'source': 'patch_boot_system',
'dest': 'completed',
'conditions': 'should_exit_previous_state'
},
]
def on_enter_state(self):
super(RHEL68EncryptionStateMachine, self).on_enter_state()
def should_exit_previous_state(self):
# when this is called, self.state is still the "source" state in the transition
return super(RHEL68EncryptionStateMachine, self).should_exit_previous_state()
def __init__(self, hutil, distro_patcher, logger, encryption_environment):
super(RHEL68EncryptionStateMachine, self).__init__(hutil, distro_patcher, logger, encryption_environment)
self.state_objs = {
'prereq': PrereqState(self.context),
'selinux': SelinuxState(self.context),
'stripdown': StripdownState(self.context),
'unmount_oldroot': UnmountOldrootState(self.context),
'encrypt_block_device': EncryptBlockDeviceState(self.context),
'patch_boot_system': PatchBootSystemState(self.context),
}
self.state_machine = Machine(model=self,
states=RHEL68EncryptionStateMachine.states,
transitions=RHEL68EncryptionStateMachine.transitions,
initial='uninitialized')
def start_encryption(self):
proc_comm = ProcessCommunicator()
self.command_executor.Execute(command_to_execute="mount",
raise_exception_on_failure=True,
communicator=proc_comm)
if '/dev/mapper/osencrypt' in proc_comm.stdout:
self.logger.log("OS volume is already encrypted")
self.skip_encryption()
self.log_machine_state()
return
self.log_machine_state()
self.enter_prereq()
self.log_machine_state()
self.enter_selinux()
self.log_machine_state()
self.enter_stripdown()
self.log_machine_state()
oldroot_unmounted_successfully = False
attempt = 1
while not oldroot_unmounted_successfully:
self.logger.log("Attempt #{0} to unmount /oldroot".format(attempt))
try:
if attempt == 1:
self.enter_unmount_oldroot()
elif attempt > 10:
raise Exception("Could not unmount /oldroot in 10 attempts")
else:
self.retry_unmount_oldroot()
self.log_machine_state()
except Exception as e:
message = "Attempt #{0} to unmount /oldroot failed with error: {1}, stack trace: {2}".format(attempt,
e,
traceback.format_exc())
self.logger.log(msg=message)
self.hutil.do_status_report(operation='EnableEncryptionOSVolume',
status=CommonVariables.extension_error_status,
status_code=str(CommonVariables.unmount_oldroot_error),
message=message)
sleep(10)
if attempt > 10:
raise Exception(message)
else:
oldroot_unmounted_successfully = True
finally:
attempt += 1
self.enter_encrypt_block_device()
self.log_machine_state()
self.enter_patch_boot_system()
self.log_machine_state()
self.stop_machine()
self.log_machine_state()
self._reboot()
| apache-2.0 |
punker76/SmartAlarmClock | SmartAlarmClock/www/temboo/Library/Google/Calendar/GetAllEvents.py | 2 | 4254 | # -*- coding: utf-8 -*-
###############################################################################
#
# GetAllEvents
# Retrieve data for all events in a specified calendar.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetAllEvents(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetAllEvents Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Google/Calendar/GetAllEvents')
def new_input_set(self):
return GetAllEventsInputSet()
def _make_result_set(self, result, path):
return GetAllEventsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetAllEventsChoreographyExecution(session, exec_id, path)
class GetAllEventsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetAllEvents
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
InputSet._set_input(self, 'AccessToken', value)
def set_CalendarID(self, value):
"""
Set the value of the CalendarID input for this Choreo. ((required, string) The unique ID for the calendar with the events to retrieve.)
"""
InputSet._set_input(self, 'CalendarID', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
InputSet._set_input(self, 'ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
InputSet._set_input(self, 'ClientSecret', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth Refresh Token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.)
"""
InputSet._set_input(self, 'RefreshToken', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that response should be in. Can be set to xml or json. Defaults to json.)
"""
InputSet._set_input(self, 'ResponseFormat', value)
class GetAllEventsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetAllEvents Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Google. Corresponds to the ResponseFormat input. Defaults to JSON.)
"""
return self._output.get('Response', None)
class GetAllEventsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetAllEventsResultSet(response, path)
| mit |
IptvBrasilGroup/Cleitonleonelcreton.repository | plugin.video.iptvbrondemand.PC/requestsX/compat.py | 101 | 2600 | # -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
#: Python 3.0.x
is_py30 = (is_py3 and _ver[1] == 0)
#: Python 3.1.x
is_py31 = (is_py3 and _ver[1] == 1)
#: Python 3.2.x
is_py32 = (is_py3 and _ver[1] == 2)
#: Python 3.3.x
is_py33 = (is_py3 and _ver[1] == 3)
#: Python 3.4.x
is_py34 = (is_py3 and _ver[1] == 4)
#: Python 2.7.x
is_py27 = (is_py2 and _ver[1] == 7)
#: Python 2.6.x
is_py26 = (is_py2 and _ver[1] == 6)
#: Python 2.5.x
is_py25 = (is_py2 and _ver[1] == 5)
#: Python 2.4.x
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# ---------
# Platforms
# ---------
# Syntax sugar.
_ver = sys.version.lower()
is_pypy = ('pypy' in _ver)
is_jython = ('jython' in _ver)
is_ironpython = ('iron' in _ver)
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
# Windows-based system.
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
is_linux = ('linux' in str(sys.platform).lower())
is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try:
import simplejson as json
except (ImportError, SyntaxError):
# simplejson does not support Python 3.2, it thows a SyntaxError
# because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
| gpl-2.0 |
wanghq/goots | doc/aliyun-tablestore-python-sdk-2.0.9/examples/get_row.py | 4 | 1851 | # -*- coding: utf8 -*-
from example_config import *
from ots2 import *
import time
table_name = 'GetRowExample'
def create_table(ots_client):
schema_of_primary_key = [('gid', 'INTEGER'), ('uid', 'INTEGER')]
table_meta = TableMeta(table_name, schema_of_primary_key)
reserved_throughput = ReservedThroughput(CapacityUnit(0, 0))
ots_client.create_table(table_meta, reserved_throughput)
print 'Table has been created.'
def delete_table(ots_client):
ots_client.delete_table(table_name)
print 'Table \'%s\' has been deleted.' % table_name
def put_row(ots_client):
primary_key = {'gid':1, 'uid':101}
attribute_columns = {'name':'John', 'mobile':15100000000, 'address':'China', 'age':20}
condition = Condition('EXPECT_NOT_EXIST') # Expect not exist: put it into table only when this row is not exist.
consumed = ots_client.put_row(table_name, condition, primary_key, attribute_columns)
print u'Write succeed, consume %s write cu.' % consumed.write
def get_row(ots_client):
primary_key = {'gid':1, 'uid':101}
columns_to_get = ['name', 'address', 'age'] # given a list of columns to get, or empty list if you want to get entire row.
consumed, primary_key_columns, attribute_columns = ots_client.get_row(table_name, primary_key, columns_to_get)
print u'Read succeed, consume %s read cu.' % consumed.read
print u'Value of attribute \'name\': %s' % attribute_columns.get('name')
print u'Value of attribute \'address\': %s' % attribute_columns.get('address')
print u'Value of attribute \'age\': %s' % attribute_columns.get('age')
if __name__ == '__main__':
ots_client = OTSClient(OTS_ENDPOINT, OTS_ID, OTS_SECRET, OTS_INSTANCE)
create_table(ots_client)
time.sleep(3) # wait for table ready
put_row(ots_client)
get_row(ots_client)
delete_table(ots_client)
| mit |
wavefrontHQ/python-client | wavefront_api_client/models/response_container_dashboard.py | 1 | 4347 | # coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ResponseContainerDashboard(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'response': 'Dashboard',
'status': 'ResponseStatus'
}
attribute_map = {
'response': 'response',
'status': 'status'
}
def __init__(self, response=None, status=None): # noqa: E501
"""ResponseContainerDashboard - a model defined in Swagger""" # noqa: E501
self._response = None
self._status = None
self.discriminator = None
if response is not None:
self.response = response
self.status = status
@property
def response(self):
"""Gets the response of this ResponseContainerDashboard. # noqa: E501
:return: The response of this ResponseContainerDashboard. # noqa: E501
:rtype: Dashboard
"""
return self._response
@response.setter
def response(self, response):
"""Sets the response of this ResponseContainerDashboard.
:param response: The response of this ResponseContainerDashboard. # noqa: E501
:type: Dashboard
"""
self._response = response
@property
def status(self):
"""Gets the status of this ResponseContainerDashboard. # noqa: E501
:return: The status of this ResponseContainerDashboard. # noqa: E501
:rtype: ResponseStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ResponseContainerDashboard.
:param status: The status of this ResponseContainerDashboard. # noqa: E501
:type: ResponseStatus
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResponseContainerDashboard, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResponseContainerDashboard):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| apache-2.0 |
mitar/django | django/contrib/localflavor/hk/forms.py | 10 | 2434 | """
Hong Kong specific Form helpers
"""
from __future__ import absolute_import
import re
from django.core.validators import EMPTY_VALUES
from django.forms import CharField
from django.forms import ValidationError
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
hk_phone_digits_re = re.compile(r'^(?:852-?)?(\d{4})[-\.]?(\d{4})$')
hk_special_numbers = ('999', '992', '112')
hk_phone_prefixes = ('2', '3', '5', '6', '8', '9')
hk_formats = ['XXXX-XXXX', '852-XXXX-XXXX', '(+852) XXXX-XXXX',
'XXXX XXXX', 'XXXXXXXX']
class HKPhoneNumberField(CharField):
"""
Validate Hong Kong phone number.
The input format can be either one of the followings:
'XXXX-XXXX', '852-XXXX-XXXX', '(+852) XXXX-XXXX',
'XXXX XXXX', or 'XXXXXXXX'.
The output format is 'XXXX-XXXX'.
Note: The phone number shall not start with 999, 992, or 112.
And, it should start with either 2, 3, 5, 6, 8, or 9.
Ref - http://en.wikipedia.org/wiki/Telephone_numbers_in_Hong_Kong
"""
default_error_messages = {
'disguise': _('Phone number should not start with ' \
'one of the followings: %s.' % \
', '.join(hk_special_numbers)),
'invalid': _('Phone number must be in one of the following formats: '
'%s.' % ', '.join(hk_formats)),
'prefix': _('Phone number should start with ' \
'one of the followings: %s.' % \
', '.join(hk_phone_prefixes)),
}
def __init__(self, *args, **kwargs):
super(HKPhoneNumberField, self).__init__(*args, **kwargs)
def clean(self, value):
super(HKPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\(|\)|\s+|\+)', '', smart_unicode(value))
m = hk_phone_digits_re.search(value)
if not m:
raise ValidationError(self.error_messages['invalid'])
value = u'%s-%s' % (m.group(1), m.group(2))
for special in hk_special_numbers:
if value.startswith(special):
raise ValidationError(self.error_messages['disguise'])
prefix_found = map(lambda prefix: value.startswith(prefix),
hk_phone_prefixes)
if not any(prefix_found):
raise ValidationError(self.error_messages['prefix'])
return value
| bsd-3-clause |
NoUsername/PrivateNotesExperimental | lib/django_openid_auth/tests/urls.py | 4 | 1649 | # django-openid-auth - OpenID integration for django.contrib.auth
#
# Copyright (C) 2009 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from django.http import HttpResponse
from django.conf.urls.defaults import *
def get_user(request):
return HttpResponse(request.user.username)
urlpatterns = patterns('',
(r'^getuser/$', get_user),
(r'^openid/', include('django_openid_auth.urls')),
)
| agpl-3.0 |
geodrinx/gearthview | ext-libs/twisted/internet/_ssl.py | 53 | 1199 | # -*- test-case-name: twisted.test.test_ssl -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module implements helpers for switching to TLS on an existing transport.
@since: 11.1
"""
class _TLSDelayed(object):
"""
State tracking record for TLS startup parameters. Used to remember how
TLS should be started when starting it is delayed to wait for the output
buffer to be flushed.
@ivar bufferedData: A C{list} which contains all the data which was
written to the transport after an attempt to start TLS was made but
before the buffers outstanding at that time could be flushed and TLS
could really be started. This is appended to by the transport's
write and writeSequence methods until it is possible to actually
start TLS, then it is written to the TLS-enabled transport.
@ivar context: An SSL context factory object to use to start TLS.
@ivar extra: An extra argument to pass to the transport's C{startTLS}
method.
"""
def __init__(self, bufferedData, context, extra):
self.bufferedData = bufferedData
self.context = context
self.extra = extra
| gpl-3.0 |
code4futuredotorg/reeborg_tw | src/libraries/brython/Lib/genericpath.py | 727 | 3093 | """
Path operations common to more than one OS
Do not use directly. The OS specific modules import the appropriate
functions from this module themselves.
"""
import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile']
# Does a path exist?
# This is false for dangling symbolic links on systems that support them.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
except os.error:
return False
return True
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path on systems that support symlinks
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir()
# can be true for the same path on systems that support symlinks
def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
except os.error:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end, ignoring
leading dots. Returns "(root, ext)"; ext may be empty."""
# NOTE: This code must work for text and bytes strings.
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex:filenameIndex+1] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, p[:0]
| agpl-3.0 |
freifeld/cpabDiffeo | cpab/gpu/expm/main_cublas.py | 1 | 5368 | #!/usr/bin/env python
"""
Created on Wed Jul 15 13:46:42 2015
Author: Oren Freifeld
Email: freifeld@csail.mit.edu
"""
import numpy as np
from of.utils import *
from pycuda.compiler import SourceModule
from pycuda.driver import Context
from of.gpu import *
from scipy.linalg import expm
krnl="""
# include <stdlib.h>
# include <stdio.h>
# include <math.h>
# include <complex.h>
# include <time.h>
# include <string.h>
#include "/home/freifeld/gpu_expm/tmp.h"
/******************************************************************************/
__device__ double *r8mat_expm1 ( int n, double a[] )
/******************************************************************************/
/*
Purpose:
R8MAT_EXPM1 is essentially MATLAB's built-in matrix exponential algorithm.
Licensing:
This code is distributed under the GNU LGPL license.
Modified:
01 December 2011
Author:
Cleve Moler, Charles Van Loan
Reference:
Cleve Moler, Charles VanLoan,
Nineteen Dubious Ways to Compute the Exponential of a Matrix,
Twenty-Five Years Later,
SIAM Review,
Volume 45, Number 1, March 2003, pages 3-49.
Parameters:
Input, int N, the dimension of the matrix.
Input, double A[N*N], the matrix.
Output, double R8MAT_EXPM1[N*N], the estimate for exp(A).
*/
{
double *a2;
double a_norm;
double c;
double *d;
double *e;
int ee;
int k;
const double one = 1.0;
int p;
const int q = 6;
int s;
double t;
double *x;
a2 = r8mat_copy_new ( n, n, a );
a_norm = r8mat_norm_li ( n, n, a2 );
ee = ( int ) ( r8_log_2 ( a_norm ) ) + 1;
s = i4_max ( 0, ee + 1 );
t = 1.0 / pow ( 2.0, s );
r8mat_scale ( n, n, t, a2 );
x = r8mat_copy_new ( n, n, a2 );
c = 0.5;
e = r8mat_identity_new ( n );
r8mat_add ( n, n, one, e, c, a2, e );
d = r8mat_identity_new ( n );
r8mat_add ( n, n, one, d, -c, a2, d );
p = 1;
for ( k = 2; k <= q; k++ )
{
c = c * ( double ) ( q - k + 1 ) / ( double ) ( k * ( 2 * q - k + 1 ) );
r8mat_mm ( n, n, n, a2, x, x );
r8mat_add ( n, n, c, x, one, e, e );
if ( p )
{
r8mat_add ( n, n, c, x, one, d, d );
}
else
{
r8mat_add ( n, n, -c, x, one, d, d );
}
p = !p;
}
/*
E -> inverse(D) * E
*/
r8mat_minvm ( n, n, d, e, e );
/*
E -> E^(2*S)
*/
for ( k = 1; k <= s; k++ )
{
r8mat_mm ( n, n, n, e, e, e );
}
free ( a2 );
free ( d );
free ( x );
return e;
}
extern "C"{
__global__ void f(double* As,double* Ts,int N,int n)
{
int idx = threadIdx.x + blockIdx.x*blockDim.x;
if (idx >= N)
return;
double a,b,c,d;
double delta_tmp;
double delta;
double cosh_delta,sinh_delta,sinh_delta_over_delta;
double cos_delta,sin_delta,sin_delta_over_delta;
double exp_of_ave_of_a_and_d;
a=As[idx*2*2 ];
b=As[idx*2*2+1];
c=As[idx*2*2+2];
d=As[idx*2*2+3];
r8mat_expm1 (2, As );
delta_tmp = (a-d)*(a-d) + 4*b*c;
exp_of_ave_of_a_and_d = exp((a+d)/2);
if (delta_tmp == 0){
Ts[idx*2*2] = (1 + (a-d)/2) * exp_of_ave_of_a_and_d;
Ts[idx*2*2+1] = b * exp_of_ave_of_a_and_d;
Ts[idx*2*2+2] = c * exp_of_ave_of_a_and_d;
Ts[idx*2*2+3] = (1 - (a-d)/2) * exp_of_ave_of_a_and_d;
}
else if (delta_tmp >0){
delta = sqrt(delta_tmp) / 2;
cosh_delta = cosh(delta);
sinh_delta = sinh(delta);
sinh_delta_over_delta = sinh_delta / delta;
Ts[idx*2*2] = (cosh_delta + (a-d)/2 * sinh_delta_over_delta) * exp_of_ave_of_a_and_d;
Ts[idx*2*2+1] = b * sinh_delta_over_delta * exp_of_ave_of_a_and_d;
Ts[idx*2*2+2] = c * sinh_delta_over_delta * exp_of_ave_of_a_and_d;
Ts[idx*2*2+3] = (cosh_delta - (a-d)/2 * sinh_delta_over_delta) * exp_of_ave_of_a_and_d ;
}
else{
delta = sqrt(-delta_tmp) / 2 ;
cos_delta = cos(delta);
sin_delta = sin(delta);
sin_delta_over_delta = sin_delta / delta;
Ts[idx*2*2] = (cos_delta + (a-d)/2 * sin_delta_over_delta) * exp_of_ave_of_a_and_d;
Ts[idx*2*2+1] = b * sin_delta_over_delta * exp_of_ave_of_a_and_d;
Ts[idx*2*2+2] = c * sin_delta_over_delta * exp_of_ave_of_a_and_d;
Ts[idx*2*2+3] = (cos_delta - (a-d)/2 * sin_delta_over_delta) * exp_of_ave_of_a_and_d;
}
}
}
"""
try:
Context.get_device()
except:
import pycuda.autoinit
mod = SourceModule(krnl,no_extern_c=True)
f = mod.get_function("f")
threadsPerBlock=1024/2/2
if __name__ == "__main__":
N = 500
n = 2
As = CpuGpuArray.zeros((N,n,n))
Ts = CpuGpuArray.zeros_like(As)
As.cpu[:] = np.random.standard_normal(As.shape)
As.cpu2gpu()
# print As.gpu
nBlocks = int(np.ceil(float(N) / float(threadsPerBlock)))
tic = time.clock()
f(As.gpu,Ts.gpu,np.int32(N),np.int32(n),grid=(nBlocks,1,1),block=(threadsPerBlock,1,1))
Ts.gpu2cpu()
Ts.cpu*=1
toc = time.clock()
print 'time (gpu)',toc-tic
print '---------------'
# print Ts.cpu
tic = time.clock()
Ts_scipy = map(expm,As.cpu)
toc = time.clock()
print 'time (gpu)',toc-tic
print "np.allclose(Ts_scipy,Ts.cpu) = ", np.allclose(Ts_scipy,Ts.cpu)
| mit |
dustin/wokkel | wokkel/test/test_pubsub.py | 2 | 99856 | # Copyright (c) 2003-2009 Ralph Meijer
# See LICENSE for details.
"""
Tests for L{wokkel.pubsub}
"""
from zope.interface import verify
from twisted.trial import unittest
from twisted.internet import defer
from twisted.words.xish import domish
from twisted.words.protocols.jabber import error
from twisted.words.protocols.jabber.jid import JID
from twisted.words.protocols.jabber.xmlstream import toResponse
from wokkel import data_form, disco, iwokkel, pubsub, shim
from wokkel.generic import parseXml
from wokkel.test.helpers import TestableRequestHandlerMixin, XmlStreamStub
NS_PUBSUB = 'http://jabber.org/protocol/pubsub'
NS_PUBSUB_CONFIG = 'http://jabber.org/protocol/pubsub#node_config'
NS_PUBSUB_ERRORS = 'http://jabber.org/protocol/pubsub#errors'
NS_PUBSUB_EVENT = 'http://jabber.org/protocol/pubsub#event'
NS_PUBSUB_OWNER = 'http://jabber.org/protocol/pubsub#owner'
NS_PUBSUB_META_DATA = 'http://jabber.org/protocol/pubsub#meta-data'
def calledAsync(fn):
"""
Function wrapper that fires a deferred upon calling the given function.
"""
d = defer.Deferred()
def func(*args, **kwargs):
try:
result = fn(*args, **kwargs)
except:
d.errback()
else:
d.callback(result)
return d, func
class PubSubClientTest(unittest.TestCase):
timeout = 2
def setUp(self):
self.stub = XmlStreamStub()
self.protocol = pubsub.PubSubClient()
self.protocol.xmlstream = self.stub.xmlstream
self.protocol.connectionInitialized()
def test_interface(self):
"""
Do instances of L{pubsub.PubSubClient} provide L{iwokkel.IPubSubClient}?
"""
verify.verifyObject(iwokkel.IPubSubClient, self.protocol)
def test_eventItems(self):
"""
Test receiving an items event resulting in a call to itemsReceived.
"""
message = domish.Element((None, 'message'))
message['from'] = 'pubsub.example.org'
message['to'] = 'user@example.org/home'
event = message.addElement((NS_PUBSUB_EVENT, 'event'))
items = event.addElement('items')
items['node'] = 'test'
item1 = items.addElement('item')
item1['id'] = 'item1'
item2 = items.addElement('retract')
item2['id'] = 'item2'
item3 = items.addElement('item')
item3['id'] = 'item3'
def itemsReceived(event):
self.assertEquals(JID('user@example.org/home'), event.recipient)
self.assertEquals(JID('pubsub.example.org'), event.sender)
self.assertEquals('test', event.nodeIdentifier)
self.assertEquals([item1, item2, item3], event.items)
d, self.protocol.itemsReceived = calledAsync(itemsReceived)
self.stub.send(message)
return d
def test_eventItemsCollection(self):
"""
Test receiving an items event resulting in a call to itemsReceived.
"""
message = domish.Element((None, 'message'))
message['from'] = 'pubsub.example.org'
message['to'] = 'user@example.org/home'
event = message.addElement((NS_PUBSUB_EVENT, 'event'))
items = event.addElement('items')
items['node'] = 'test'
headers = shim.Headers([('Collection', 'collection')])
message.addChild(headers)
def itemsReceived(event):
self.assertEquals(JID('user@example.org/home'), event.recipient)
self.assertEquals(JID('pubsub.example.org'), event.sender)
self.assertEquals('test', event.nodeIdentifier)
self.assertEquals({'Collection': ['collection']}, event.headers)
d, self.protocol.itemsReceived = calledAsync(itemsReceived)
self.stub.send(message)
return d
def test_eventDelete(self):
"""
Test receiving a delete event resulting in a call to deleteReceived.
"""
message = domish.Element((None, 'message'))
message['from'] = 'pubsub.example.org'
message['to'] = 'user@example.org/home'
event = message.addElement((NS_PUBSUB_EVENT, 'event'))
delete = event.addElement('delete')
delete['node'] = 'test'
def deleteReceived(event):
self.assertEquals(JID('user@example.org/home'), event.recipient)
self.assertEquals(JID('pubsub.example.org'), event.sender)
self.assertEquals('test', event.nodeIdentifier)
d, self.protocol.deleteReceived = calledAsync(deleteReceived)
self.stub.send(message)
return d
def test_eventDeleteRedirect(self):
"""
Test receiving a delete event with a redirect URI.
"""
message = domish.Element((None, 'message'))
message['from'] = 'pubsub.example.org'
message['to'] = 'user@example.org/home'
event = message.addElement((NS_PUBSUB_EVENT, 'event'))
delete = event.addElement('delete')
delete['node'] = 'test'
uri = 'xmpp:pubsub.example.org?;node=test2'
delete.addElement('redirect')['uri'] = uri
def deleteReceived(event):
self.assertEquals(JID('user@example.org/home'), event.recipient)
self.assertEquals(JID('pubsub.example.org'), event.sender)
self.assertEquals('test', event.nodeIdentifier)
self.assertEquals(uri, event.redirectURI)
d, self.protocol.deleteReceived = calledAsync(deleteReceived)
self.stub.send(message)
return d
def test_event_purge(self):
"""
Test receiving a purge event resulting in a call to purgeReceived.
"""
message = domish.Element((None, 'message'))
message['from'] = 'pubsub.example.org'
message['to'] = 'user@example.org/home'
event = message.addElement((NS_PUBSUB_EVENT, 'event'))
items = event.addElement('purge')
items['node'] = 'test'
def purgeReceived(event):
self.assertEquals(JID('user@example.org/home'), event.recipient)
self.assertEquals(JID('pubsub.example.org'), event.sender)
self.assertEquals('test', event.nodeIdentifier)
d, self.protocol.purgeReceived = calledAsync(purgeReceived)
self.stub.send(message)
return d
def test_createNode(self):
"""
Test sending create request.
"""
def cb(nodeIdentifier):
self.assertEquals('test', nodeIdentifier)
d = self.protocol.createNode(JID('pubsub.example.org'), 'test')
d.addCallback(cb)
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('set', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'create', NS_PUBSUB))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
response = toResponse(iq, 'result')
self.stub.send(response)
return d
def test_createNodeInstant(self):
"""
Test sending create request resulting in an instant node.
"""
def cb(nodeIdentifier):
self.assertEquals('test', nodeIdentifier)
d = self.protocol.createNode(JID('pubsub.example.org'))
d.addCallback(cb)
iq = self.stub.output[-1]
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'create', NS_PUBSUB))
child = children[0]
self.assertFalse(child.hasAttribute('node'))
response = toResponse(iq, 'result')
command = response.addElement((NS_PUBSUB, 'pubsub'))
create = command.addElement('create')
create['node'] = 'test'
self.stub.send(response)
return d
def test_createNodeRenamed(self):
"""
Test sending create request resulting in renamed node.
"""
def cb(nodeIdentifier):
self.assertEquals('test2', nodeIdentifier)
d = self.protocol.createNode(JID('pubsub.example.org'), 'test')
d.addCallback(cb)
iq = self.stub.output[-1]
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'create', NS_PUBSUB))
child = children[0]
self.assertEquals('test', child['node'])
response = toResponse(iq, 'result')
command = response.addElement((NS_PUBSUB, 'pubsub'))
create = command.addElement('create')
create['node'] = 'test2'
self.stub.send(response)
return d
def test_createNodeWithSender(self):
"""
Test sending create request from a specific JID.
"""
d = self.protocol.createNode(JID('pubsub.example.org'), 'test',
sender=JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('user@example.org', iq['from'])
response = toResponse(iq, 'result')
self.stub.send(response)
return d
def test_deleteNode(self):
"""
Test sending delete request.
"""
d = self.protocol.deleteNode(JID('pubsub.example.org'), 'test')
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('set', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB_OWNER, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'delete', NS_PUBSUB_OWNER))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
response = toResponse(iq, 'result')
self.stub.send(response)
return d
def test_deleteNodeWithSender(self):
"""
Test sending delete request.
"""
d = self.protocol.deleteNode(JID('pubsub.example.org'), 'test',
sender=JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('user@example.org', iq['from'])
response = toResponse(iq, 'result')
self.stub.send(response)
return d
def test_publish(self):
"""
Test sending publish request.
"""
item = pubsub.Item()
d = self.protocol.publish(JID('pubsub.example.org'), 'test', [item])
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('set', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'publish', NS_PUBSUB))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
items = list(domish.generateElementsQNamed(child.children,
'item', NS_PUBSUB))
self.assertEquals(1, len(items))
self.assertIdentical(item, items[0])
response = toResponse(iq, 'result')
self.stub.send(response)
return d
def test_publishNoItems(self):
"""
Test sending publish request without items.
"""
d = self.protocol.publish(JID('pubsub.example.org'), 'test')
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('set', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'publish', NS_PUBSUB))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
response = toResponse(iq, 'result')
self.stub.send(response)
return d
def test_publishWithSender(self):
"""
Test sending publish request from a specific JID.
"""
item = pubsub.Item()
d = self.protocol.publish(JID('pubsub.example.org'), 'test', [item],
JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('user@example.org', iq['from'])
response = toResponse(iq, 'result')
self.stub.send(response)
return d
def test_subscribe(self):
"""
Test sending subscription request.
"""
d = self.protocol.subscribe(JID('pubsub.example.org'), 'test',
JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('set', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'subscribe', NS_PUBSUB))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
self.assertEquals('user@example.org', child['jid'])
response = toResponse(iq, 'result')
pubsub = response.addElement((NS_PUBSUB, 'pubsub'))
subscription = pubsub.addElement('subscription')
subscription['node'] = 'test'
subscription['jid'] = 'user@example.org'
subscription['subscription'] = 'subscribed'
self.stub.send(response)
return d
def test_subscribePending(self):
"""
Test sending subscription request that results in a pending
subscription.
"""
d = self.protocol.subscribe(JID('pubsub.example.org'), 'test',
JID('user@example.org'))
iq = self.stub.output[-1]
response = toResponse(iq, 'result')
command = response.addElement((NS_PUBSUB, 'pubsub'))
subscription = command.addElement('subscription')
subscription['node'] = 'test'
subscription['jid'] = 'user@example.org'
subscription['subscription'] = 'pending'
self.stub.send(response)
self.assertFailure(d, pubsub.SubscriptionPending)
return d
def test_subscribeUnconfigured(self):
"""
Test sending subscription request that results in an unconfigured
subscription.
"""
d = self.protocol.subscribe(JID('pubsub.example.org'), 'test',
JID('user@example.org'))
iq = self.stub.output[-1]
response = toResponse(iq, 'result')
command = response.addElement((NS_PUBSUB, 'pubsub'))
subscription = command.addElement('subscription')
subscription['node'] = 'test'
subscription['jid'] = 'user@example.org'
subscription['subscription'] = 'unconfigured'
self.stub.send(response)
self.assertFailure(d, pubsub.SubscriptionUnconfigured)
return d
def test_subscribeWithSender(self):
"""
Test sending subscription request from a specific JID.
"""
d = self.protocol.subscribe(JID('pubsub.example.org'), 'test',
JID('user@example.org'),
sender=JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('user@example.org', iq['from'])
response = toResponse(iq, 'result')
pubsub = response.addElement((NS_PUBSUB, 'pubsub'))
subscription = pubsub.addElement('subscription')
subscription['node'] = 'test'
subscription['jid'] = 'user@example.org'
subscription['subscription'] = 'subscribed'
self.stub.send(response)
return d
def test_unsubscribe(self):
"""
Test sending unsubscription request.
"""
d = self.protocol.unsubscribe(JID('pubsub.example.org'), 'test',
JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('set', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'unsubscribe', NS_PUBSUB))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
self.assertEquals('user@example.org', child['jid'])
self.stub.send(toResponse(iq, 'result'))
return d
def test_unsubscribeWithSender(self):
"""
Test sending unsubscription request from a specific JID.
"""
d = self.protocol.unsubscribe(JID('pubsub.example.org'), 'test',
JID('user@example.org'),
sender=JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('user@example.org', iq['from'])
self.stub.send(toResponse(iq, 'result'))
return d
def test_items(self):
"""
Test sending items request.
"""
def cb(items):
self.assertEquals([], items)
d = self.protocol.items(JID('pubsub.example.org'), 'test')
d.addCallback(cb)
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('get', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'items', NS_PUBSUB))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
response = toResponse(iq, 'result')
items = response.addElement((NS_PUBSUB, 'pubsub')).addElement('items')
items['node'] = 'test'
self.stub.send(response)
return d
def test_itemsMaxItems(self):
"""
Test sending items request, with limit on the number of items.
"""
def cb(items):
self.assertEquals(2, len(items))
self.assertEquals([item1, item2], items)
d = self.protocol.items(JID('pubsub.example.org'), 'test', maxItems=2)
d.addCallback(cb)
iq = self.stub.output[-1]
self.assertEquals('pubsub.example.org', iq.getAttribute('to'))
self.assertEquals('get', iq.getAttribute('type'))
self.assertEquals('pubsub', iq.pubsub.name)
self.assertEquals(NS_PUBSUB, iq.pubsub.uri)
children = list(domish.generateElementsQNamed(iq.pubsub.children,
'items', NS_PUBSUB))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('test', child['node'])
self.assertEquals('2', child['max_items'])
response = toResponse(iq, 'result')
items = response.addElement((NS_PUBSUB, 'pubsub')).addElement('items')
items['node'] = 'test'
item1 = items.addElement('item')
item1['id'] = 'item1'
item2 = items.addElement('item')
item2['id'] = 'item2'
self.stub.send(response)
return d
def test_itemsWithSender(self):
"""
Test sending items request from a specific JID.
"""
d = self.protocol.items(JID('pubsub.example.org'), 'test',
sender=JID('user@example.org'))
iq = self.stub.output[-1]
self.assertEquals('user@example.org', iq['from'])
response = toResponse(iq, 'result')
items = response.addElement((NS_PUBSUB, 'pubsub')).addElement('items')
items['node'] = 'test'
self.stub.send(response)
return d
class PubSubRequestTest(unittest.TestCase):
def test_fromElementPublish(self):
"""
Test parsing a publish request.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<publish node='test'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('publish', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
self.assertEqual([], request.items)
def test_fromElementPublishItems(self):
"""
Test parsing a publish request with items.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<publish node='test'>
<item id="item1"/>
<item id="item2"/>
</publish>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual(2, len(request.items))
self.assertEqual(u'item1', request.items[0]["id"])
self.assertEqual(u'item2', request.items[1]["id"])
def test_fromElementPublishNoNode(self):
"""
A publish request to the root node should raise an exception.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<publish/>
</pubsub>
</iq>
"""
err = self.assertRaises(error.StanzaError,
pubsub.PubSubRequest.fromElement,
parseXml(xml))
self.assertEqual('bad-request', err.condition)
self.assertEqual(NS_PUBSUB_ERRORS, err.appCondition.uri)
self.assertEqual('nodeid-required', err.appCondition.name)
def test_fromElementSubscribe(self):
"""
Test parsing a subscription request.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscribe node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('subscribe', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
self.assertEqual(JID('user@example.org/Home'), request.subscriber)
def test_fromElementSubscribeEmptyNode(self):
"""
Test parsing a subscription request to the root node.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscribe jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('', request.nodeIdentifier)
def test_fromElementSubscribeNoJID(self):
"""
Subscribe requests without a JID should raise a bad-request exception.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscribe node='test'/>
</pubsub>
</iq>
"""
err = self.assertRaises(error.StanzaError,
pubsub.PubSubRequest.fromElement,
parseXml(xml))
self.assertEqual('bad-request', err.condition)
self.assertEqual(NS_PUBSUB_ERRORS, err.appCondition.uri)
self.assertEqual('jid-required', err.appCondition.name)
def test_fromElementUnsubscribe(self):
"""
Test parsing an unsubscription request.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<unsubscribe node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('unsubscribe', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
self.assertEqual(JID('user@example.org/Home'), request.subscriber)
def test_fromElementUnsubscribeNoJID(self):
"""
Unsubscribe requests without a JID should raise a bad-request exception.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<unsubscribe node='test'/>
</pubsub>
</iq>
"""
err = self.assertRaises(error.StanzaError,
pubsub.PubSubRequest.fromElement,
parseXml(xml))
self.assertEqual('bad-request', err.condition)
self.assertEqual(NS_PUBSUB_ERRORS, err.appCondition.uri)
self.assertEqual('jid-required', err.appCondition.name)
def test_fromElementOptionsGet(self):
"""
Test parsing a request for getting subscription options.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<options node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('optionsGet', request.verb)
def test_fromElementOptionsSet(self):
"""
Test parsing a request for setting subscription options.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<options node='test' jid='user@example.org/Home'>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#subscribe_options</value>
</field>
<field var='pubsub#deliver'><value>1</value></field>
</x>
</options>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('optionsSet', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
self.assertEqual(JID('user@example.org/Home'), request.subscriber)
self.assertEqual({'pubsub#deliver': '1'}, request.options)
def test_fromElementOptionsSetCancel(self):
"""
Test parsing a request for cancelling setting subscription options.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<options node='test' jid='user@example.org/Home'>
<x xmlns='jabber:x:data' type='cancel'/>
</options>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual({}, request.options)
def test_fromElementOptionsSetBadFormType(self):
"""
On a options set request unknown fields should be ignored.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<options node='test' jid='user@example.org/Home'>
<x xmlns='jabber:x:data' type='result'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#deliver'><value>1</value></field>
</x>
</options>
</pubsub>
</iq>
"""
err = self.assertRaises(error.StanzaError,
pubsub.PubSubRequest.fromElement,
parseXml(xml))
self.assertEqual('bad-request', err.condition)
self.assertEqual(None, err.appCondition)
def test_fromElementOptionsSetNoForm(self):
"""
On a options set request a form is required.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<options node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
err = self.assertRaises(error.StanzaError,
pubsub.PubSubRequest.fromElement,
parseXml(xml))
self.assertEqual('bad-request', err.condition)
self.assertEqual(None, err.appCondition)
def test_fromElementSubscriptions(self):
"""
Test parsing a request for all subscriptions.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscriptions/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('subscriptions', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
def test_fromElementAffiliations(self):
"""
Test parsing a request for all affiliations.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<affiliations/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('affiliations', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
def test_fromElementCreate(self):
"""
Test parsing a request to create a node.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<create node='mynode'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('create', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('mynode', request.nodeIdentifier)
def test_fromElementCreateInstant(self):
"""
Test parsing a request to create an instant node.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<create/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertIdentical(None, request.nodeIdentifier)
def test_fromElementDefault(self):
"""
Test parsing a request for the default node configuration.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<default/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('default', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('leaf', request.nodeType)
def test_fromElementDefaultCollection(self):
"""
Parsing a request for the default configuration extracts the node type.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<default>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#node_type'>
<value>collection</value>
</field>
</x>
</default>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('collection', request.nodeType)
def test_fromElementConfigureGet(self):
"""
Test parsing a node configuration get request.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('configureGet', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
def test_fromElementConfigureSet(self):
"""
On a node configuration set request the Data Form is parsed.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#deliver_payloads'><value>0</value></field>
<field var='pubsub#persist_items'><value>1</value></field>
</x>
</configure>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('configureSet', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
self.assertEqual({'pubsub#deliver_payloads': '0',
'pubsub#persist_items': '1'}, request.options)
def test_fromElementConfigureSetCancel(self):
"""
The node configuration is cancelled, so no options.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='cancel'/>
</configure>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual({}, request.options)
def test_fromElementConfigureSetBadFormType(self):
"""
On a node configuration set request unknown fields should be ignored.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='result'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#deliver_payloads'><value>0</value></field>
<field var='x-myfield'><value>1</value></field>
</x>
</configure>
</pubsub>
</iq>
"""
err = self.assertRaises(error.StanzaError,
pubsub.PubSubRequest.fromElement,
parseXml(xml))
self.assertEqual('bad-request', err.condition)
self.assertEqual(None, err.appCondition)
def test_fromElementConfigureSetNoForm(self):
"""
On a node configuration set request a form is required.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'/>
</pubsub>
</iq>
"""
err = self.assertRaises(error.StanzaError,
pubsub.PubSubRequest.fromElement,
parseXml(xml))
self.assertEqual('bad-request', err.condition)
self.assertEqual(None, err.appCondition)
def test_fromElementItems(self):
"""
Test parsing an items request.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<items node='test'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('items', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
self.assertIdentical(None, request.maxItems)
self.assertEqual([], request.itemIdentifiers)
def test_fromElementRetract(self):
"""
Test parsing a retract request.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<retract node='test'>
<item id='item1'/>
<item id='item2'/>
</retract>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('retract', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
self.assertEqual(['item1', 'item2'], request.itemIdentifiers)
def test_fromElementPurge(self):
"""
Test parsing a purge request.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<purge node='test'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('purge', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
def test_fromElementDelete(self):
"""
Test parsing a delete request.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<delete node='test'/>
</pubsub>
</iq>
"""
request = pubsub.PubSubRequest.fromElement(parseXml(xml))
self.assertEqual('delete', request.verb)
self.assertEqual(JID('user@example.org'), request.sender)
self.assertEqual(JID('pubsub.example.org'), request.recipient)
self.assertEqual('test', request.nodeIdentifier)
class PubSubServiceTest(unittest.TestCase, TestableRequestHandlerMixin):
"""
Tests for L{pubsub.PubSubService}.
"""
def setUp(self):
self.stub = XmlStreamStub()
self.resource = pubsub.PubSubResource()
self.service = pubsub.PubSubService(self.resource)
self.service.send = self.stub.xmlstream.send
def test_interface(self):
"""
Do instances of L{pubsub.PubSubService} provide L{iwokkel.IPubSubService}?
"""
verify.verifyObject(iwokkel.IPubSubService, self.service)
def test_connectionMade(self):
"""
Verify setup of observers in L{pubsub.connectionMade}.
"""
requests = []
def handleRequest(iq):
requests.append(iq)
self.service.xmlstream = self.stub.xmlstream
self.service.handleRequest = handleRequest
self.service.connectionMade()
for namespace in (NS_PUBSUB, NS_PUBSUB_OWNER):
for stanzaType in ('get', 'set'):
iq = domish.Element((None, 'iq'))
iq['type'] = stanzaType
iq.addElement((namespace, 'pubsub'))
self.stub.xmlstream.dispatch(iq)
self.assertEqual(4, len(requests))
def test_getDiscoInfo(self):
"""
Test getDiscoInfo calls getNodeInfo and returns some minimal info.
"""
def cb(info):
discoInfo = disco.DiscoInfo()
for item in info:
discoInfo.append(item)
self.assertIn(('pubsub', 'service'), discoInfo.identities)
self.assertIn(disco.NS_DISCO_ITEMS, discoInfo.features)
d = self.service.getDiscoInfo(JID('user@example.org/home'),
JID('pubsub.example.org'), '')
d.addCallback(cb)
return d
def test_getDiscoInfoNodeType(self):
"""
Test getDiscoInfo with node type.
"""
def cb(info):
discoInfo = disco.DiscoInfo()
for item in info:
discoInfo.append(item)
self.assertIn(('pubsub', 'collection'), discoInfo.identities)
def getInfo(requestor, target, nodeIdentifier):
return defer.succeed({'type': 'collection',
'meta-data': {}})
self.resource.getInfo = getInfo
d = self.service.getDiscoInfo(JID('user@example.org/home'),
JID('pubsub.example.org'), '')
d.addCallback(cb)
return d
def test_getDiscoInfoMetaData(self):
"""
Test getDiscoInfo with returned meta data.
"""
def cb(info):
discoInfo = disco.DiscoInfo()
for item in info:
discoInfo.append(item)
self.assertIn(('pubsub', 'leaf'), discoInfo.identities)
self.assertIn(NS_PUBSUB_META_DATA, discoInfo.extensions)
form = discoInfo.extensions[NS_PUBSUB_META_DATA]
self.assertIn('pubsub#node_type', form.fields)
def getInfo(requestor, target, nodeIdentifier):
metaData = [{'var': 'pubsub#persist_items',
'label': 'Persist items to storage',
'value': True}]
return defer.succeed({'type': 'leaf', 'meta-data': metaData})
self.resource.getInfo = getInfo
d = self.service.getDiscoInfo(JID('user@example.org/home'),
JID('pubsub.example.org'), '')
d.addCallback(cb)
return d
def test_getDiscoInfoResourceFeatures(self):
"""
Test getDiscoInfo with the resource features.
"""
def cb(info):
discoInfo = disco.DiscoInfo()
for item in info:
discoInfo.append(item)
self.assertIn('http://jabber.org/protocol/pubsub#publish',
discoInfo.features)
self.resource.features = ['publish']
d = self.service.getDiscoInfo(JID('user@example.org/home'),
JID('pubsub.example.org'), '')
d.addCallback(cb)
return d
def test_getDiscoItemsRoot(self):
"""
Test getDiscoItems on the root node.
"""
def getNodes(requestor, service, nodeIdentifier):
return defer.succeed(['node1', 'node2'])
def cb(items):
self.assertEqual(2, len(items))
item1, item2 = items
self.assertEqual(JID('pubsub.example.org'), item1.entity)
self.assertEqual('node1', item1.nodeIdentifier)
self.assertEqual(JID('pubsub.example.org'), item2.entity)
self.assertEqual('node2', item2.nodeIdentifier)
self.resource.getNodes = getNodes
d = self.service.getDiscoItems(JID('user@example.org/home'),
JID('pubsub.example.org'),
'')
d.addCallback(cb)
return d
def test_getDiscoItemsRootHideNodes(self):
"""
Test getDiscoItems on the root node.
"""
def getNodes(requestor, service, nodeIdentifier):
raise Exception("Unexpected call to getNodes")
def cb(items):
self.assertEqual([], items)
self.service.hideNodes = True
self.resource.getNodes = getNodes
d = self.service.getDiscoItems(JID('user@example.org/home'),
JID('pubsub.example.org'),
'')
d.addCallback(cb)
return d
def test_getDiscoItemsNonRoot(self):
"""
Test getDiscoItems on a non-root node.
"""
def getNodes(requestor, service, nodeIdentifier):
return defer.succeed(['node1', 'node2'])
def cb(items):
self.assertEqual(2, len(items))
self.resource.getNodes = getNodes
d = self.service.getDiscoItems(JID('user@example.org/home'),
JID('pubsub.example.org'),
'test')
d.addCallback(cb)
return d
def test_on_publish(self):
"""
A publish request should result in L{PubSubService.publish} being
called.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<publish node='test'/>
</pubsub>
</iq>
"""
def publish(request):
return defer.succeed(None)
self.resource.publish = publish
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_on_subscribe(self):
"""
A successful subscription should return the current subscription.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscribe node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
def subscribe(request):
return defer.succeed(pubsub.Subscription(request.nodeIdentifier,
request.subscriber,
'subscribed'))
def cb(element):
self.assertEqual('pubsub', element.name)
self.assertEqual(NS_PUBSUB, element.uri)
subscription = element.subscription
self.assertEqual(NS_PUBSUB, subscription.uri)
self.assertEqual('test', subscription['node'])
self.assertEqual('user@example.org/Home', subscription['jid'])
self.assertEqual('subscribed', subscription['subscription'])
self.resource.subscribe = subscribe
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_subscribeEmptyNode(self):
"""
A successful subscription on root node should return no node attribute.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscribe jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
def subscribe(request):
return defer.succeed(pubsub.Subscription(request.nodeIdentifier,
request.subscriber,
'subscribed'))
def cb(element):
self.assertFalse(element.subscription.hasAttribute('node'))
self.resource.subscribe = subscribe
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_unsubscribe(self):
"""
A successful unsubscription should return an empty response.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<unsubscribe node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
def unsubscribe(request):
return defer.succeed(None)
def cb(element):
self.assertIdentical(None, element)
self.resource.unsubscribe = unsubscribe
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_optionsGet(self):
"""
Getting subscription options is not supported.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<options node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_on_optionsSet(self):
"""
Setting subscription options is not supported.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<options node='test' jid='user@example.org/Home'>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#subscribe_options</value>
</field>
<field var='pubsub#deliver'><value>1</value></field>
</x>
</options>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_on_subscriptions(self):
"""
A subscriptions request should result in
L{PubSubService.subscriptions} being called and the result prepared
for the response.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscriptions/>
</pubsub>
</iq>
"""
def subscriptions(request):
subscription = pubsub.Subscription('test', JID('user@example.org'),
'subscribed')
return defer.succeed([subscription])
def cb(element):
self.assertEqual('pubsub', element.name)
self.assertEqual(NS_PUBSUB, element.uri)
self.assertEqual(NS_PUBSUB, element.subscriptions.uri)
children = list(element.subscriptions.elements())
self.assertEqual(1, len(children))
subscription = children[0]
self.assertEqual('subscription', subscription.name)
self.assertEqual(NS_PUBSUB, subscription.uri)
self.assertEqual('user@example.org', subscription['jid'])
self.assertEqual('test', subscription['node'])
self.assertEqual('subscribed', subscription['subscription'])
self.resource.subscriptions = subscriptions
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_affiliations(self):
"""
A subscriptions request should result in
L{PubSubService.affiliations} being called and the result prepared
for the response.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<affiliations/>
</pubsub>
</iq>
"""
def affiliations(request):
affiliation = ('test', 'owner')
return defer.succeed([affiliation])
def cb(element):
self.assertEqual('pubsub', element.name)
self.assertEqual(NS_PUBSUB, element.uri)
self.assertEqual(NS_PUBSUB, element.affiliations.uri)
children = list(element.affiliations.elements())
self.assertEqual(1, len(children))
affiliation = children[0]
self.assertEqual('affiliation', affiliation.name)
self.assertEqual(NS_PUBSUB, affiliation.uri)
self.assertEqual('test', affiliation['node'])
self.assertEqual('owner', affiliation['affiliation'])
self.resource.affiliations = affiliations
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_create(self):
"""
Replies to create node requests don't return the created node.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<create node='mynode'/>
</pubsub>
</iq>
"""
def create(request):
return defer.succeed(request.nodeIdentifier)
def cb(element):
self.assertIdentical(None, element)
self.resource.create = create
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_createChanged(self):
"""
Replies to create node requests return the created node if changed.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<create node='mynode'/>
</pubsub>
</iq>
"""
def create(request):
return defer.succeed(u'myrenamednode')
def cb(element):
self.assertEqual('pubsub', element.name)
self.assertEqual(NS_PUBSUB, element.uri)
self.assertEqual(NS_PUBSUB, element.create.uri)
self.assertEqual(u'myrenamednode',
element.create.getAttribute('node'))
self.resource.create = create
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_createInstant(self):
"""
Replies to create instant node requests return the created node.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<create/>
</pubsub>
</iq>
"""
def create(request):
return defer.succeed(u'random')
def cb(element):
self.assertEqual('pubsub', element.name)
self.assertEqual(NS_PUBSUB, element.uri)
self.assertEqual(NS_PUBSUB, element.create.uri)
self.assertEqual(u'random', element.create.getAttribute('node'))
self.resource.create = create
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_default(self):
"""
A default request should result in
L{PubSubService.getDefaultConfiguration} being called.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<default/>
</pubsub>
</iq>
"""
def getConfigurationOptions():
return {
"pubsub#persist_items":
{"type": "boolean",
"label": "Persist items to storage"},
"pubsub#deliver_payloads":
{"type": "boolean",
"label": "Deliver payloads with event notifications"}
}
def default(request):
return defer.succeed({})
def cb(element):
self.assertEqual('pubsub', element.name)
self.assertEqual(NS_PUBSUB_OWNER, element.uri)
self.assertEqual(NS_PUBSUB_OWNER, element.default.uri)
form = data_form.Form.fromElement(element.default.x)
self.assertEqual(NS_PUBSUB_CONFIG, form.formNamespace)
self.resource.getConfigurationOptions = getConfigurationOptions
self.resource.default = default
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_defaultCollection(self):
"""
Responses to default requests should depend on passed node type.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<default>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#node_type'>
<value>collection</value>
</field>
</x>
</default>
</pubsub>
</iq>
"""
def getConfigurationOptions():
return {
"pubsub#deliver_payloads":
{"type": "boolean",
"label": "Deliver payloads with event notifications"}
}
def default(request):
return defer.succeed({})
self.resource.getConfigurationOptions = getConfigurationOptions
self.resource.default = default
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_on_defaultUnknownNodeType(self):
"""
A default request should result in
L{PubSubResource.default} being called.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<default>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#node_type'>
<value>unknown</value>
</field>
</x>
</default>
</pubsub>
</iq>
"""
def default(request):
self.fail("Unexpected call to getConfiguration")
def cb(result):
self.assertEquals('not-acceptable', result.condition)
self.resource.default = default
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_on_configureGet(self):
"""
On a node configuration get
requestL{PubSubResource.configureGet} is called and results in a
data form with the configuration.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'/>
</pubsub>
</iq>
"""
def getConfigurationOptions():
return {
"pubsub#persist_items":
{"type": "boolean",
"label": "Persist items to storage"},
"pubsub#deliver_payloads":
{"type": "boolean",
"label": "Deliver payloads with event notifications"},
"pubsub#owner":
{"type": "jid-single",
"label": "Owner of the node"}
}
def configureGet(request):
return defer.succeed({'pubsub#deliver_payloads': '0',
'pubsub#persist_items': '1',
'pubsub#owner': JID('user@example.org'),
'x-myfield': ['a', 'b']})
def cb(element):
self.assertEqual('pubsub', element.name)
self.assertEqual(NS_PUBSUB_OWNER, element.uri)
self.assertEqual(NS_PUBSUB_OWNER, element.configure.uri)
form = data_form.Form.fromElement(element.configure.x)
self.assertEqual(NS_PUBSUB_CONFIG, form.formNamespace)
fields = form.fields
self.assertIn('pubsub#deliver_payloads', fields)
field = fields['pubsub#deliver_payloads']
self.assertEqual('boolean', field.fieldType)
field.typeCheck()
self.assertEqual(False, field.value)
self.assertIn('pubsub#persist_items', fields)
field = fields['pubsub#persist_items']
self.assertEqual('boolean', field.fieldType)
field.typeCheck()
self.assertEqual(True, field.value)
self.assertIn('pubsub#owner', fields)
field = fields['pubsub#owner']
self.assertEqual('jid-single', field.fieldType)
field.typeCheck()
self.assertEqual(JID('user@example.org'), field.value)
self.assertNotIn('x-myfield', fields)
self.resource.getConfigurationOptions = getConfigurationOptions
self.resource.configureGet = configureGet
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_configureSet(self):
"""
On a node configuration set request the Data Form is parsed and
L{PubSubResource.configureSet} is called with the passed options.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#deliver_payloads'><value>0</value></field>
<field var='pubsub#persist_items'><value>1</value></field>
</x>
</configure>
</pubsub>
</iq>
"""
def getConfigurationOptions():
return {
"pubsub#persist_items":
{"type": "boolean",
"label": "Persist items to storage"},
"pubsub#deliver_payloads":
{"type": "boolean",
"label": "Deliver payloads with event notifications"}
}
def configureSet(request):
self.assertEqual({'pubsub#deliver_payloads': False,
'pubsub#persist_items': True}, request.options)
return defer.succeed(None)
self.resource.getConfigurationOptions = getConfigurationOptions
self.resource.configureSet = configureSet
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_on_configureSetCancel(self):
"""
The node configuration is cancelled,
L{PubSubResource.configureSet} not called.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='cancel'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
</x>
</configure>
</pubsub>
</iq>
"""
def configureSet(request):
self.fail("Unexpected call to setConfiguration")
self.resource.configureSet = configureSet
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_on_configureSetIgnoreUnknown(self):
"""
On a node configuration set request unknown fields should be ignored.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#deliver_payloads'><value>0</value></field>
<field var='x-myfield'><value>1</value></field>
</x>
</configure>
</pubsub>
</iq>
"""
def getConfigurationOptions():
return {
"pubsub#persist_items":
{"type": "boolean",
"label": "Persist items to storage"},
"pubsub#deliver_payloads":
{"type": "boolean",
"label": "Deliver payloads with event notifications"}
}
def configureSet(request):
self.assertEquals(['pubsub#deliver_payloads'],
request.options.keys())
self.resource.getConfigurationOptions = getConfigurationOptions
self.resource.configureSet = configureSet
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_on_configureSetBadFormType(self):
"""
On a node configuration set request unknown fields should be ignored.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='result'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#deliver_payloads'><value>0</value></field>
<field var='x-myfield'><value>1</value></field>
</x>
</configure>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('bad-request', result.condition)
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_on_items(self):
"""
On a items request, return all items for the given node.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<items node='test'/>
</pubsub>
</iq>
"""
def items(request):
return defer.succeed([pubsub.Item('current')])
def cb(element):
self.assertEqual(NS_PUBSUB, element.uri)
self.assertEqual(NS_PUBSUB, element.items.uri)
self.assertEqual(1, len(element.items.children))
item = element.items.children[-1]
self.assertTrue(domish.IElement.providedBy(item))
self.assertEqual('item', item.name)
self.assertEqual(NS_PUBSUB, item.uri)
self.assertEqual('current', item['id'])
self.resource.items = items
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
d = self.handleRequest(xml)
d.addCallback(cb)
return d
def test_on_retract(self):
"""
A retract request should result in L{PubSubResource.retract}
being called.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<retract node='test'>
<item id='item1'/>
<item id='item2'/>
</retract>
</pubsub>
</iq>
"""
def retract(request):
return defer.succeed(None)
self.resource.retract = retract
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_on_purge(self):
"""
A purge request should result in L{PubSubResource.purge} being
called.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<purge node='test'/>
</pubsub>
</iq>
"""
def purge(request):
return defer.succeed(None)
self.resource.purge = purge
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_on_delete(self):
"""
A delete request should result in L{PubSubResource.delete} being
called.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<delete node='test'/>
</pubsub>
</iq>
"""
def delete(request):
return defer.succeed(None)
self.resource.delete = delete
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
return self.handleRequest(xml)
def test_notifyDelete(self):
"""
Subscribers should be sent a delete notification.
"""
subscriptions = [JID('user@example.org')]
self.service.notifyDelete(JID('pubsub.example.org'), 'test',
subscriptions)
message = self.stub.output[-1]
self.assertEquals('message', message.name)
self.assertIdentical(None, message.uri)
self.assertEquals('user@example.org', message['to'])
self.assertEquals('pubsub.example.org', message['from'])
self.assertTrue(message.event)
self.assertEqual(NS_PUBSUB_EVENT, message.event.uri)
self.assertTrue(message.event.delete)
self.assertEqual(NS_PUBSUB_EVENT, message.event.delete.uri)
self.assertTrue(message.event.delete.hasAttribute('node'))
self.assertEqual('test', message.event.delete['node'])
def test_notifyDeleteRedirect(self):
"""
Subscribers should be sent a delete notification with redirect.
"""
redirectURI = 'xmpp:pubsub.example.org?;node=test2'
subscriptions = [JID('user@example.org')]
self.service.notifyDelete(JID('pubsub.example.org'), 'test',
subscriptions, redirectURI)
message = self.stub.output[-1]
self.assertEquals('message', message.name)
self.assertIdentical(None, message.uri)
self.assertEquals('user@example.org', message['to'])
self.assertEquals('pubsub.example.org', message['from'])
self.assertTrue(message.event)
self.assertEqual(NS_PUBSUB_EVENT, message.event.uri)
self.assertTrue(message.event.delete)
self.assertEqual(NS_PUBSUB_EVENT, message.event.delete.uri)
self.assertTrue(message.event.delete.hasAttribute('node'))
self.assertEqual('test', message.event.delete['node'])
self.assertTrue(message.event.delete.redirect)
self.assertEqual(NS_PUBSUB_EVENT, message.event.delete.redirect.uri)
self.assertTrue(message.event.delete.redirect.hasAttribute('uri'))
self.assertEqual(redirectURI, message.event.delete.redirect['uri'])
def test_on_subscriptionsGet(self):
"""
Getting subscription options is not supported.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<subscriptions/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('manage-subscriptions',
result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_on_subscriptionsSet(self):
"""
Setting subscription options is not supported.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<subscriptions/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('manage-subscriptions',
result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_on_affiliationsGet(self):
"""
Getting subscription options is not supported.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<affiliations/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('modify-affiliations',
result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_on_affiliationsSet(self):
"""
Setting subscription options is not supported.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<affiliations/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('modify-affiliations',
result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
class PubSubServiceWithoutResourceTest(unittest.TestCase, TestableRequestHandlerMixin):
def setUp(self):
self.stub = XmlStreamStub()
self.service = pubsub.PubSubService()
self.service.send = self.stub.xmlstream.send
def test_publish(self):
"""
Non-overridden L{PubSubService.publish} yields unsupported error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<publish node='mynode'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('publish', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_subscribe(self):
"""
Non-overridden L{PubSubService.subscribe} yields unsupported error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscribe node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('subscribe', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_unsubscribe(self):
"""
Non-overridden L{PubSubService.unsubscribe} yields unsupported error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<unsubscribe node='test' jid='user@example.org/Home'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('subscribe', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_subscriptions(self):
"""
Non-overridden L{PubSubService.subscriptions} yields unsupported error.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<subscriptions/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-subscriptions',
result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_affiliations(self):
"""
Non-overridden L{PubSubService.affiliations} yields unsupported error.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<affiliations/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-affiliations',
result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_create(self):
"""
Non-overridden L{PubSubService.create} yields unsupported error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<create node='mynode'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('create-nodes', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_getDefaultConfiguration(self):
"""
Non-overridden L{PubSubService.getDefaultConfiguration} yields
unsupported error.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<default/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-default', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_getConfiguration(self):
"""
Non-overridden L{PubSubService.getConfiguration} yields unsupported
error.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('config-node', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_setConfiguration(self):
"""
Non-overridden L{PubSubService.setConfiguration} yields unsupported
error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<configure node='test'>
<x xmlns='jabber:x:data' type='submit'>
<field var='FORM_TYPE' type='hidden'>
<value>http://jabber.org/protocol/pubsub#node_config</value>
</field>
<field var='pubsub#deliver_payloads'><value>0</value></field>
<field var='pubsub#persist_items'><value>1</value></field>
</x>
</configure>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('config-node', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_items(self):
"""
Non-overridden L{PubSubService.items} yields unsupported error.
"""
xml = """
<iq type='get' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<items node='test'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-items', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_retract(self):
"""
Non-overridden L{PubSubService.retract} yields unsupported error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub'>
<retract node='test'>
<item id='item1'/>
<item id='item2'/>
</retract>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retract-items', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_purge(self):
"""
Non-overridden L{PubSubService.purge} yields unsupported error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<purge node='test'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('purge-nodes', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_delete(self):
"""
Non-overridden L{PubSubService.delete} yields unsupported error.
"""
xml = """
<iq type='set' to='pubsub.example.org'
from='user@example.org'>
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
<delete node='test'/>
</pubsub>
</iq>
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('delete-nodes', result.appCondition['feature'])
d = self.handleRequest(xml)
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
class PubSubResourceTest(unittest.TestCase):
def setUp(self):
self.resource = pubsub.PubSubResource()
def test_interface(self):
"""
Do instances of L{pubsub.PubSubResource} provide L{iwokkel.IPubSubResource}?
"""
verify.verifyObject(iwokkel.IPubSubResource, self.resource)
def test_getNodes(self):
"""
Default getNodes returns an empty list.
"""
def cb(nodes):
self.assertEquals([], nodes)
d = self.resource.getNodes(JID('user@example.org/home'),
JID('pubsub.example.org'),
'')
d.addCallback(cb)
return d
def test_publish(self):
"""
Non-overridden L{PubSubResource.publish} yields unsupported
error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('publish', result.appCondition['feature'])
d = self.resource.publish(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_subscribe(self):
"""
Non-overridden subscriptions yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('subscribe', result.appCondition['feature'])
d = self.resource.subscribe(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_unsubscribe(self):
"""
Non-overridden unsubscribe yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('subscribe', result.appCondition['feature'])
d = self.resource.unsubscribe(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_subscriptions(self):
"""
Non-overridden subscriptions yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-subscriptions',
result.appCondition['feature'])
d = self.resource.subscriptions(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_affiliations(self):
"""
Non-overridden affiliations yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-affiliations',
result.appCondition['feature'])
d = self.resource.affiliations(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_create(self):
"""
Non-overridden create yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('create-nodes', result.appCondition['feature'])
d = self.resource.create(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_default(self):
"""
Non-overridden default yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-default',
result.appCondition['feature'])
d = self.resource.default(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_configureGet(self):
"""
Non-overridden configureGet yields unsupported
error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('config-node', result.appCondition['feature'])
d = self.resource.configureGet(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_configureSet(self):
"""
Non-overridden configureSet yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('config-node', result.appCondition['feature'])
d = self.resource.configureSet(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_items(self):
"""
Non-overridden items yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retrieve-items', result.appCondition['feature'])
d = self.resource.items(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_retract(self):
"""
Non-overridden retract yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('retract-items', result.appCondition['feature'])
d = self.resource.retract(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_purge(self):
"""
Non-overridden purge yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('purge-nodes', result.appCondition['feature'])
d = self.resource.purge(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
def test_delete(self):
"""
Non-overridden delete yields unsupported error.
"""
def cb(result):
self.assertEquals('feature-not-implemented', result.condition)
self.assertEquals('unsupported', result.appCondition.name)
self.assertEquals(NS_PUBSUB_ERRORS, result.appCondition.uri)
self.assertEquals('delete-nodes', result.appCondition['feature'])
d = self.resource.delete(pubsub.PubSubRequest())
self.assertFailure(d, error.StanzaError)
d.addCallback(cb)
return d
| mit |
renesugar/arrow | python/pyarrow/tests/test_compute.py | 1 | 4441 | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import pytest
import pyarrow as pa
all_array_types = [
('bool', [True, False, False, True, True]),
('uint8', np.arange(5)),
('int8', np.arange(5)),
('uint16', np.arange(5)),
('int16', np.arange(5)),
('uint32', np.arange(5)),
('int32', np.arange(5)),
('uint64', np.arange(5, 10)),
('int64', np.arange(5, 10)),
('float', np.arange(0, 0.5, 0.1)),
('double', np.arange(0, 0.5, 0.1)),
('string', ['a', 'b', None, 'ddd', 'ee']),
('binary', [b'a', b'b', b'c', b'ddd', b'ee']),
(pa.binary(3), [b'abc', b'bcd', b'cde', b'def', b'efg']),
(pa.list_(pa.int8()), [[1, 2], [3, 4], [5, 6], None, [9, 16]]),
(pa.large_list(pa.int16()), [[1], [2, 3, 4], [5, 6], None, [9, 16]]),
(pa.struct([('a', pa.int8()), ('b', pa.int8())]), [
{'a': 1, 'b': 2}, None, {'a': 3, 'b': 4}, None, {'a': 5, 'b': 6}]),
]
@pytest.mark.parametrize('arrow_type', [
pa.int8(),
pa.int16(),
pa.int64(),
pa.uint8(),
pa.uint16(),
pa.uint64(),
pa.float32(),
pa.float64()
])
def test_sum(arrow_type):
arr = pa.array([1, 2, 3, 4], type=arrow_type)
assert arr.sum() == 10
@pytest.mark.parametrize(('ty', 'values'), all_array_types)
def test_take(ty, values):
arr = pa.array(values, type=ty)
for indices_type in [pa.uint8(), pa.int64()]:
indices = pa.array([0, 4, 2, None], type=indices_type)
result = arr.take(indices)
result.validate()
expected = pa.array([values[0], values[4], values[2], None], type=ty)
assert result.equals(expected)
# empty indices
indices = pa.array([], type=indices_type)
result = arr.take(indices)
result.validate()
expected = pa.array([], type=ty)
assert result.equals(expected)
indices = pa.array([2, 5])
with pytest.raises(IndexError):
arr.take(indices)
indices = pa.array([2, -1])
with pytest.raises(IndexError):
arr.take(indices)
def test_take_indices_types():
arr = pa.array(range(5))
for indices_type in ['uint8', 'int8', 'uint16', 'int16',
'uint32', 'int32', 'uint64', 'int64']:
indices = pa.array([0, 4, 2, None], type=indices_type)
result = arr.take(indices)
result.validate()
expected = pa.array([0, 4, 2, None])
assert result.equals(expected)
for indices_type in [pa.float32(), pa.float64()]:
indices = pa.array([0, 4, 2], type=indices_type)
with pytest.raises(TypeError):
arr.take(indices)
@pytest.mark.parametrize('ordered', [False, True])
def test_take_dictionary(ordered):
arr = pa.DictionaryArray.from_arrays([0, 1, 2, 0, 1, 2], ['a', 'b', 'c'],
ordered=ordered)
result = arr.take(pa.array([0, 1, 3]))
result.validate()
assert result.to_pylist() == ['a', 'b', 'a']
assert result.dictionary.to_pylist() == ['a', 'b', 'c']
assert result.type.ordered is ordered
@pytest.mark.parametrize(('ty', 'values'), all_array_types)
def test_filter(ty, values):
arr = pa.array(values, type=ty)
mask = pa.array([True, False, False, True, None])
result = arr.filter(mask)
result.validate()
expected = pa.array([values[0], values[3], None], type=ty)
assert result.equals(expected)
# non-boolean dtype
mask = pa.array([0, 1, 0, 1, 0])
with pytest.raises(TypeError, match="got int64"):
arr.filter(mask)
# wrong length
mask = pa.array([True, False, True])
with pytest.raises(ValueError, match="must have identical lengths"):
arr.filter(mask)
| apache-2.0 |
magul/pywikibot-core | scripts/coordinate_import.py | 1 | 4411 | #!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
Coordinate importing script.
Usage:
python pwb.py coordinate_import -lang:en -family:wikipedia \
-cat:Category:Coordinates_not_on_Wikidata
This will work on all pages in the category "coordinates not on Wikidata" and
will import the coordinates on these pages to Wikidata.
The data from the "GeoData" extension
(https://www.mediawiki.org/wiki/Extension:GeoData)
is used so that extension has to be setup properly. You can look at the
[[Special:Nearby]] page on your local Wiki to see if it's populated.
You can use any typical pagegenerator to provide with a list of pages:
python pwb.py coordinate_import -lang:it -family:wikipedia \
-namespace:0 -transcludes:Infobox_stazione_ferroviaria
The following command line parameters are supported:
-create Create items for pages without one.
¶ms;
"""
#
# (C) Multichill, 2014
# (C) Pywikibot team, 2013-2017
#
# Distributed under the terms of MIT License.
#
from __future__ import absolute_import, unicode_literals
import pywikibot
from pywikibot import pagegenerators, WikidataBot
from pywikibot.exceptions import CoordinateGlobeUnknownException
docuReplacements = {'¶ms;': pagegenerators.parameterHelp}
class CoordImportRobot(WikidataBot):
"""A bot to import coordinates to Wikidata."""
def __init__(self, generator, **kwargs):
"""
Constructor.
@param generator: A generator that yields Page objects.
"""
self.availableOptions['create'] = False
super(CoordImportRobot, self).__init__(**kwargs)
self.generator = generator
self.cacheSources()
self.prop = 'P625'
self.create_missing_item = self.getOption('create')
def has_coord_qualifier(self, claims):
"""
Check if self.prop is used as property for a qualifier.
@param claims: the Wikibase claims to check in
@type claims: dict
@return: the first property for which self.prop
is used as qualifier, or None if any
@return: unicode or None
"""
for prop in claims:
for claim in claims[prop]:
if self.prop in claim.qualifiers:
return prop
def treat_page_and_item(self, page, item):
"""Treat page/item."""
coordinate = page.coordinates(primary_only=True)
if not coordinate:
return
claims = item.get().get('claims')
if self.prop in claims:
pywikibot.output(u'Item %s already contains coordinates (%s)'
% (item.title(), self.prop))
return
prop = self.has_coord_qualifier(claims)
if prop:
pywikibot.output(u'Item %s already contains coordinates'
u' (%s) as qualifier for %s'
% (item.title(), self.prop, prop))
return
newclaim = pywikibot.Claim(self.repo, self.prop)
newclaim.setTarget(coordinate)
pywikibot.output(u'Adding %s, %s to %s' % (coordinate.lat,
coordinate.lon,
item.title()))
try:
item.addClaim(newclaim)
source = self.getSource(page.site)
if source:
newclaim.addSource(source, bot=True)
except CoordinateGlobeUnknownException as e:
pywikibot.output(u'Skipping unsupported globe: %s' % e.args)
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
# Process global args and prepare generator args parser
local_args = pywikibot.handle_args(args)
generator_factory = pagegenerators.GeneratorFactory()
create_new = False
for arg in local_args:
if generator_factory.handleArg(arg):
continue
if arg == '-create':
create_new = True
generator = generator_factory.getCombinedGenerator(preload=True)
if generator:
coordbot = CoordImportRobot(generator, create=create_new)
coordbot.run()
return True
else:
pywikibot.bot.suggest_help(missing_generator=True)
return False
if __name__ == "__main__":
main()
| mit |
chhao91/QGIS | python/plugins/db_manager/db_plugins/html_elems.py | 12 | 5176 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
class HtmlContent:
def __init__(self, data):
self.data = data if not isinstance(data, HtmlContent) else data.data
def toHtml(self):
if isinstance(self.data, list) or isinstance(self.data, tuple):
html = u''
for item in self.data:
html += HtmlContent(item).toHtml()
return html
if hasattr(self.data, 'toHtml'):
return self.data.toHtml()
html = unicode(self.data).replace("\n", "<br>")
return html
def hasContents(self):
if isinstance(self.data, list) or isinstance(self.data, tuple):
empty = True
for item in self.data:
if item.hasContents():
empty = False
break
return not empty
if hasattr(self.data, 'hasContents'):
return self.data.hasContents()
return len(self.data) > 0
class HtmlElem:
def __init__(self, tag, data, attrs=None):
self.tag = tag
self.data = data if isinstance(data, HtmlContent) else HtmlContent(data)
self.attrs = attrs if attrs is not None else dict()
if 'tag' in self.attrs:
self.setTag(self.attrs['tag'])
del self.attrs['tag']
def setTag(self, tag):
self.tag = tag
def getOriginalData(self):
return self.data.data
def setAttr(self, name, value):
self.attrs[name] = value
def getAttrsHtml(self):
html = u''
for k, v in self.attrs.iteritems():
html += u' %s="%s"' % (k, v)
return html
def openTagHtml(self):
return u"<%s%s>" % (self.tag, self.getAttrsHtml())
def closeTagHtml(self):
return u"</%s>" % self.tag
def toHtml(self):
return u"%s%s%s" % (self.openTagHtml(), self.data.toHtml(), self.closeTagHtml())
def hasContents(self):
return self.data.toHtml() != ""
class HtmlParagraph(HtmlElem):
def __init__(self, data, attrs=None):
HtmlElem.__init__(self, 'p', data, attrs)
class HtmlListItem(HtmlElem):
def __init__(self, data, attrs=None):
HtmlElem.__init__(self, 'li', data, attrs)
class HtmlList(HtmlElem):
def __init__(self, items, attrs=None):
# make sure to have HtmlListItem items
items = list(items)
for i, item in enumerate(items):
if not isinstance(item, HtmlListItem):
items[i] = HtmlListItem(item)
HtmlElem.__init__(self, 'ul', items, attrs)
class HtmlTableCol(HtmlElem):
def __init__(self, data, attrs=None):
HtmlElem.__init__(self, 'td', data, attrs)
def closeTagHtml(self):
# FIX INVALID BEHAVIOR: an empty cell as last table's cell break margins
return u" %s" % HtmlElem.closeTagHtml(self)
class HtmlTableRow(HtmlElem):
def __init__(self, cols, attrs=None):
# make sure to have HtmlTableCol items
cols = list(cols)
for i, c in enumerate(cols):
if not isinstance(c, HtmlTableCol):
cols[i] = HtmlTableCol(c)
HtmlElem.__init__(self, 'tr', cols, attrs)
class HtmlTableHeader(HtmlTableRow):
def __init__(self, cols, attrs=None):
HtmlTableRow.__init__(self, cols, attrs)
for c in self.getOriginalData():
c.setTag('th')
class HtmlTable(HtmlElem):
def __init__(self, rows, attrs=None):
# make sure to have HtmlTableRow items
rows = list(rows)
for i, r in enumerate(rows):
if not isinstance(r, HtmlTableRow):
rows[i] = HtmlTableRow(r)
HtmlElem.__init__(self, 'table', rows, attrs)
class HtmlWarning(HtmlContent):
def __init__(self, data):
data = ['<img src=":/icons/warning-20px.png"> ', data]
HtmlContent.__init__(self, data)
class HtmlSection(HtmlContent):
def __init__(self, title, content=None):
data = ['<div class="section"><h2>', title, '</h2>']
if content is not None:
data.extend(['<div>', content, '</div>'])
data.append('</div>')
HtmlContent.__init__(self, data)
| gpl-2.0 |
cdrooom/odoo | addons/l10n_be_coda/wizard/__init__.py | 8 | 1031 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_coda_import
| agpl-3.0 |
msdogan/HydropowerProject | Pumped_Storage/Pumped_Storage.py | 1 | 12823 | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 12 10:50:15 2016
@author: msdogan
"""
# This code optimizes pump-storage hydropower facility operations.
# Mustafa Dogan
### 02/22/2017
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import scipy as sp
from scipy.optimize import differential_evolution
import pandas as pd
import seaborn as sns
sns.set_style('whitegrid')
# This part is all about data (hourly marginal price (wholesale) $/MWh)
##*****************************************************************************
# this function creates price-duration curves
def dur_curve(load, duration, time_period):
data_raw, INTERVALSTARTTIME_GMT, INTERVALENDTIME_GMT, OPR_DT, OPR_HR = [],[],[],[],[]
if duration == 'Monthly':
c_month = months.index(time_period) + 1 # python starts from index 0
for i in range(len(load)):
if load.OPR_DT[i].split('-')[1] == c_month:
data_raw.append(load.Price[i])
INTERVALSTARTTIME_GMT.append(load.INTERVALSTARTTIME_GMT[i])
INTERVALENDTIME_GMT.append(load.INTERVALENDTIME_GMT[i])
OPR_DT.append(load.OPR_DT[i])
OPR_HR.append(load.OPR_HR[i])
elif duration == 'Annual':
for i in range(len(load)):
if load.OPR_DT[i].split('-')[0] == time_period: # Unit is $/MWh
data_raw.append(load.Price[i])
INTERVALSTARTTIME_GMT.append(load.INTERVALSTARTTIME_GMT[i])
INTERVALENDTIME_GMT.append(load.INTERVALENDTIME_GMT[i])
OPR_DT.append(load.OPR_DT[i])
OPR_HR.append(load.OPR_HR[i])
elif duration == 'Daily': # does not work for now
y,m,d = time_period.split("-") # year, month, day
for i in range(len(load)):
if load.OPR_DT[i].split('-')[0] == y:
if load.OPR_DT[i].split('-')[1] == m:
if load.OPR_DT[i].split('-')[2] == d:
data_raw.append(load.Price[i])
INTERVALSTARTTIME_GMT.append(load.INTERVALSTARTTIME_GMT[i])
INTERVALENDTIME_GMT.append(load.INTERVALENDTIME_GMT[i])
OPR_DT.append(load.OPR_DT[i])
OPR_HR.append(load.OPR_HR[i])
else:
print('please define correct duration and/or time period')
return
prc_data = [[],[],[],[],[]]
prc_data[0],prc_data[1],prc_data[2],prc_data[3],prc_data[4]=INTERVALSTARTTIME_GMT,INTERVALENDTIME_GMT,OPR_DT,OPR_HR,data_raw
prc_ordered = pd.DataFrame(np.array(prc_data).T, columns = columns).sort_values(['INTERVALSTARTTIME_GMT'])
s_name = 'price_ordered_' + str(time_period) + '.csv'
prc_ordered.to_csv(s_name, index=False, header=True)
# after determining what duration and time period to use, create price-duration data
data = np.sort(data_raw) # sort data
rank = sp.stats.rankdata(data, method='average') # calculate the rank
rank = rank[::-1]
prob = [100*(rank[i]/(len(data)+1)) for i in range(len(data))] # frequency data
# save price-duration data
col = ['Price', 'Frequency']
pdur = [[],[]]
pdur[0],pdur[1] = data, prob
pdur = np.array(pdur)
price_duration = pd.DataFrame(pdur.T, columns = col, dtype = 'float')
s_name = 'price_duration_' + str(time_period) + '.csv'
price_duration.to_csv(s_name)
return price_duration, prc_ordered
# Load Price data from OASIS (CAISO) http://oasis.caiso.com/mrioasis/logon.do
name = 'PRC_HASP_LMP.csv'
df = pd.read_csv(name, parse_dates=True) # read data and sort by time (gmt)
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep','Oct', 'Nov', 'Dec']
P = [[],[],[],[],[]] # empty list to store required data
columns = ['INTERVALSTARTTIME_GMT', 'INTERVALENDTIME_GMT', 'OPR_DT', 'OPR_HR', 'Price'] # headers for data frame
# We are only interested in , start time, end time and LMP
for i in range(len(df)):
if df.LMP_TYPE[i] == "LMP": # Unit is $/MWh
P[0].append(df.INTERVALSTARTTIME_GMT[i]) # GMT start
P[1].append(df.INTERVALENDTIME_GMT[i]) # GMT end
P[2].append(df.OPR_DT[i]) # OPR Date
P[3].append(df.OPR_HR[i]) # OPR hour
P[4].append(df.MW[i]) # price $/MWh
P = np.array(P) # convert list to numpy array
price = pd.DataFrame(P.T, columns = columns) # convert list to data frame
# Examples of 'dur_curve' function use
# Annual Duration and Time
#duration = 'Annual'
#time = '2016'
# Monthly Duration and Time
# duration = 'Monthly'
# time = 'Aug'
# Daily Duration and Time
duration = 'Daily'
time = '2016-09-01'
price_duration, prc_ordered = dur_curve(price, duration, time)
##*****************************************************************************
# Equations
# power_hydro (Watt) = e * g (m/s2) * rho (kg/m3) * Q (m3/s) * head (m)
# power_pump (Watt) = 1/e * g (m/s2) * rho (kg/m3) * Q (m3/s) * head (m)
# generation (Wh) = power (Watt) * hour (h) = 1/(10**6) (MWh)
# revenue ($) = generation (MWh) * price ($/MWh)
# parameters
e_g = 0.90 # generation efficiency
e_p = 0.85 # pumping efficiency
g = 9.81 # m/s2 - acceleration of gravity
rho = 1000 # kg/m3 - density of water
Q_g = 100 # m3/s - water flow for turbine
Q_p = 100 # m3/s - water flow for pumping
head_g = 100 # m - generating head
head_p = 100 # m - pumping head
# objective function to maximize - continuous function
def obj_func_cont(xx, e_g, e_p, g, rho, Q_g, Q_p, head_g, head_p, optimizing = True):
H_T = int(price_duration.Frequency.max()) # total duration (100%)
x1 = np.arange(0,xx)
y1 = f(x1)
x2 = np.arange(H_T-xx,H_T)
y2 = f(x2)
Power_Revenue = np.trapz(y1, x1, dx=0.1, axis = -1)*e_g*rho*g*Q_g*head_g/(10**6)
Pumping_Cost = np.trapz(y2, x2, dx=0.1, axis = -1)/e_p*rho*g*Q_p*head_p/(10**6)
z = Power_Revenue - Pumping_Cost # profit
return -z if optimizing else z
# objective function to maximize - discrete
def obj_func_disc(xx, e_g, e_p, g, rho, Q_g, Q_p, head_g, head_p, optimizing = True):
dH = 0.1 # discretization level
H_T = int(price_duration.Frequency.max()) # total duration (100%)
Power_Revenue = 0
for gen_H in np.arange(0,xx,dH):
Power_Revenue += f(gen_H)*e_g*rho*g*Q_g*head_g*dH/(10**6)
Pumping_Cost = 0
for pump_H in np.arange(H_T-xx,H_T,dH):
Pumping_Cost += f(pump_H)/e_p*rho*g*Q_p*head_p*dH/(10**6)
z = Power_Revenue - Pumping_Cost # profit
return -z if optimizing else z
## objective function to maximize - discrete, no curve fitting
def obj_func_disc_nofit(xx, e_g, e_p, g, rho, Q_g, Q_p, head_g, head_p, optimizing = True):
H_T = int(price_duration.Frequency.max()) # total duration (100%)
prc_g, prc_p, freq_g, freq_p = [],[],[],[]
for i,x in enumerate(price_duration.Frequency):
if x < xx: # Power Generation price and duration
prc_g.append(price_duration.Price[i]), freq_g.append(x)
if H_T - xx < x < H_T: # Pumping price and duration
prc_p.append(price_duration.Price[i]), freq_p.append(x)
prc_g = np.array(prc_g) # generation price
prc_p = np.array(prc_p) # pumping price
freq_g = np.array(freq_g) # generation duration
freq_p = np.array(freq_p) # pumping duration
# Use numerical integration to integrate (Trapezoidal rule)
Power_Revenue = np.trapz(prc_g, freq_g, dx=0.1, axis = -1)*e_g*rho*g*Q_g*head_g/(10**6)
Pumping_Cost = np.trapz(prc_p, freq_p, dx=0.1, axis = -1)/e_p*rho*g*Q_p*head_p/(10**6)
z = Power_Revenue - Pumping_Cost # profit
return z if optimizing else -z
# fit a curve
z = np.polyfit(price_duration.Frequency, price_duration.Price, 9)
f = np.poly1d(z)
x_new = np.linspace(0, price_duration.Frequency.max(), 50)
y_new = f(x_new)
# normal distribution (cumulative, exceedance)
y_norm = np.linspace(0, price_duration.Price.max(), 50)
x_norm = sp.stats.norm(price_duration.Price.mean(), price_duration.Price.std()).sf(y_norm)*100 # survival function
# Reduced Analytical solution without integration: e_g * e_p = P(1-H_G)/P(H_G)
#for i,item in enumerate(price_duration.Frequency):
# if (item + (price_duration.Frequency.max()-item)) <= 100: # total proability cannot exceed 1 (100%)
# if round(f(price_duration.Frequency.max()-item)/f(item),2) == round(e_g * e_p,2):
# H_G = item
# print(H_G)
# differential evolution
result = differential_evolution(obj_func_disc_nofit, bounds=[(0,100)], args = (e_g, e_p, g, rho, Q_g, Q_p, head_g, head_p), maxiter=1000, seed = 1)
H_G = result.x
# print price-duration data and curve fitting
plt.scatter(price_duration.Frequency, price_duration.Price)
plt.xlim([0,price_duration.Frequency.max()])
plt.ylim([0,price_duration.Price.max()])
plt.plot(x_norm, y_norm, 'cyan', label = 'Normal Dist.', linewidth=2) # normal dist. plot
plt.plot(x_new, y_new, 'r', label = 'Curve fit') # curve fit plot
plt.ylabel('15 min price $/MWh', fontsize = 14)
plt.xlabel('duration %', fontsize = 14)
plt.title('Optimal Generating and Pumping Hours for ' + str(time), fontsize = 16)
plt.grid(False)
plt.axvline(x=H_G, linewidth=2, color='k', label = 'Generate Power', linestyle = 'dashed')
plt.axvline(x=price_duration.Frequency.max()-H_G, linewidth=2, color='b', label = 'Pump', linestyle = 'dashed')
plt.legend(fontsize = 12, loc=9)
plt.text(H_G-3,price_duration.Price.min()+(price_duration.Price.max()+price_duration.Price.min())/4, 'Generating Hours, >= ' + str(round(f(H_G),2)) + ' $/MWh', color = 'k', rotation = 'vertical')
plt.text(price_duration.Frequency.max()-H_G+1,price_duration.Price.min()+(price_duration.Price.max()+price_duration.Price.min())/4, 'Pumping Hours, <= ' + str(round(f(price_duration.Frequency.max()-H_G),2)) + ' $/MWh', color = 'b', rotation = 'vertical')
plt.text(5,5,'Generate', fontsize = 15, color = 'k')
plt.text(45,5,'Stop', fontsize = 15, color = 'r')
plt.text(83,5,'Pump', fontsize = 15, color = 'b')
plt.savefig('figure_pd_'+str(time)+'.pdf', transparent=True)
plt.show()
# enumeration
enum_h = np.arange(price_duration.Frequency.min(), price_duration.Frequency.max(), 1)
simulation =np.zeros(len(enum_h))
for i,item in enumerate(enum_h):
simulation[i] = obj_func_cont(item, e_g, e_p, g, rho, Q_g, Q_p, head_g, head_p, optimizing = False)
index = np.where(simulation == simulation.max())[0]
plt.plot(enum_h, simulation, label = 'Net Profit (Gen-Pump)')
plt.axhline(y=0, linewidth=0.5, color='k')
plt.annotate('max', xy=(enum_h[index],simulation.max()), xytext=(enum_h[index],simulation.max()), arrowprops=dict(facecolor='black', shrink=0.5), fontsize = 12)
plt.title('Enumeration Line for ' + str(time), fontsize = 16)
plt.xlabel('duration %', fontsize = 14)
plt.ylabel('profit $/hour', fontsize = 14)
plt.legend(fontsize = 12, loc=1)
plt.grid(False)
plt.savefig('figure_enum_'+str(time)+'.pdf', transparent=True)
plt.show()
prc = np.array(prc_ordered.Price)
gen_prc = np.zeros(len(prc)) # generating price time-series
pump_prc = np.zeros(len(prc)) # pumping price time-series
plot_gen_prc = np.zeros(len(prc)) # this is only for plotting purposes
for i,item in enumerate(prc):
if float(item) >= f(H_G):
gen_prc[i] = item # store generating price
plot_gen_prc[i] = float(max(prc))
if float(item) <= f(price_duration.Frequency.max()-H_G):
pump_prc[i] = item # store pumping price
# # plot time-series data
plot_prc = [prc[i] for i in range(len(prc_ordered.Price))]
plt.bar(range(len(pump_prc)), pump_prc, align='center', color='b', label = 'Pumping Price', alpha=0.25)
plt.bar(range(len(plot_gen_prc)), plot_gen_prc, align='center', color='k', label = 'Generating Price', alpha=0.25)
plt.bar(range(len(gen_prc)), gen_prc, align='center', linewidth=0, color='white', alpha=1)
plt.plot(plot_prc, linewidth=1.5, color='r', label = 'Hourly Price') # use "marker = 'o'" to see points
plt.axhline(y=f(H_G), linewidth=2, color='k', label = 'Generate Power', linestyle = 'dashed')
plt.axhline(y=f(price_duration.Frequency.max()-H_G), linewidth=2, color='b', label = 'Pump', linestyle = 'dashed')
plt.legend(fontsize = 12, loc=9)
plt.xlim([0,len(prc_ordered.Price)])
plt.ylim([0,float(max(prc))])
plt.grid(False)
plt.title('15 Min Price Time-series for ' + str(time), fontsize = 16)
plt.ylabel('15 Min price $/MWh', fontsize = 14)
plt.xlabel('15 min', fontsize = 14)
plt.text(5,f(H_G)+1,'Generate', fontsize = 15, color = 'k')
plt.text(5,(f(H_G)-f(price_duration.Frequency.max()-H_G))/2+f(price_duration.Frequency.max()-H_G),'Stop', fontsize = 15,color = 'r')
plt.text(5,f(price_duration.Frequency.max()-H_G)-3,'Pump', fontsize = 15, color = 'b')
plt.savefig('figure_ts_'+str(time)+'.pdf', transparent=True)
plt.show()
print(result) # show EA solver message
print('')
print('*******Optimal Operation at '+ str(round(H_G,2)) + ' % of Total 15 minutes*******')
| mit |
sfanous/Pyecobee | pyecobee/objects/user.py | 1 | 14461 | """
This module is home to the User class
"""
from pyecobee.ecobee_object import EcobeeObject
class User(EcobeeObject):
"""
This class has been auto generated by scraping
https://www.ecobee.com/home/developer/api/documentation/v1/objects/User.shtml
Attribute names have been generated by converting ecobee property
names from camelCase to snake_case.
A getter property has been generated for each attribute.
A setter property has been generated for each attribute whose value
of READONLY is "no".
An __init__ argument without a default value has been generated if
the value of REQUIRED is "yes".
An __init__ argument with a default value of None has been generated
if the value of REQUIRED is "no".
"""
__slots__ = [
'_user_name',
'_display_name',
'_first_name',
'_last_name',
'_honorific',
'_register_date',
'_register_time',
'_default_thermostat_identifier',
'_management_ref',
'_utility_ref',
'_support_ref',
'_phone_number',
'_utility_time_zone',
'_management_time_zone',
'_is_residential',
'_is_developer',
'_is_management',
'_is_utility',
'_is_contractor',
]
attribute_name_map = {
'user_name': 'userName',
'userName': 'user_name',
'display_name': 'displayName',
'displayName': 'display_name',
'first_name': 'firstName',
'firstName': 'first_name',
'last_name': 'lastName',
'lastName': 'last_name',
'honorific': 'honorific',
'register_date': 'registerDate',
'registerDate': 'register_date',
'register_time': 'registerTime',
'registerTime': 'register_time',
'default_thermostat_identifier': 'defaultThermostatIdentifier',
'defaultThermostatIdentifier': 'default_thermostat_identifier',
'management_ref': 'managementRef',
'managementRef': 'management_ref',
'utility_ref': 'utilityRef',
'utilityRef': 'utility_ref',
'support_ref': 'supportRef',
'supportRef': 'support_ref',
'phone_number': 'phoneNumber',
'phoneNumber': 'phone_number',
'utility_time_zone': 'utilityTimeZone',
'utilityTimeZone': 'utility_time_zone',
'management_time_zone': 'managementTimeZone',
'managementTimeZone': 'management_time_zone',
'is_residential': 'isResidential',
'isResidential': 'is_residential',
'is_developer': 'isDeveloper',
'isDeveloper': 'is_developer',
'is_management': 'isManagement',
'isManagement': 'is_management',
'is_utility': 'isUtility',
'isUtility': 'is_utility',
'is_contractor': 'isContractor',
'isContractor': 'is_contractor',
}
attribute_type_map = {
'user_name': 'six.text_type',
'display_name': 'six.text_type',
'first_name': 'six.text_type',
'last_name': 'six.text_type',
'honorific': 'six.text_type',
'register_date': 'six.text_type',
'register_time': 'six.text_type',
'default_thermostat_identifier': 'six.text_type',
'management_ref': 'six.text_type',
'utility_ref': 'six.text_type',
'support_ref': 'six.text_type',
'phone_number': 'six.text_type',
'utility_time_zone': 'six.text_type',
'management_time_zone': 'six.text_type',
'is_residential': 'bool',
'is_developer': 'bool',
'is_management': 'bool',
'is_utility': 'bool',
'is_contractor': 'bool',
}
def __init__(
self,
user_name,
display_name=None,
first_name=None,
last_name=None,
honorific=None,
register_date=None,
register_time=None,
default_thermostat_identifier=None,
management_ref=None,
utility_ref=None,
support_ref=None,
phone_number=None,
utility_time_zone=None,
management_time_zone=None,
is_residential=None,
is_developer=None,
is_management=None,
is_utility=None,
is_contractor=None,
):
"""
Construct an User instance
"""
self._user_name = user_name
self._display_name = display_name
self._first_name = first_name
self._last_name = last_name
self._honorific = honorific
self._register_date = register_date
self._register_time = register_time
self._default_thermostat_identifier = default_thermostat_identifier
self._management_ref = management_ref
self._utility_ref = utility_ref
self._support_ref = support_ref
self._phone_number = phone_number
self._utility_time_zone = utility_time_zone
self._management_time_zone = management_time_zone
self._is_residential = is_residential
self._is_developer = is_developer
self._is_management = is_management
self._is_utility = is_utility
self._is_contractor = is_contractor
@property
def user_name(self):
"""
Gets the user_name attribute of this User instance.
:return: The value of the user_name attribute of this User
instance.
:rtype: six.text_type
"""
return self._user_name
@property
def display_name(self):
"""
Gets the display_name attribute of this User instance.
:return: The value of the display_name attribute of this User
instance.
:rtype: six.text_type
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name attribute of this User instance.
:param display_name: The display_name value to set for the
display_name attribute of this User instance.
:type: six.text_type
"""
self._display_name = display_name
@property
def first_name(self):
"""
Gets the first_name attribute of this User instance.
:return: The value of the first_name attribute of this User
instance.
:rtype: six.text_type
"""
return self._first_name
@first_name.setter
def first_name(self, first_name):
"""
Sets the first_name attribute of this User instance.
:param first_name: The first_name value to set for the
first_name attribute of this User instance.
:type: six.text_type
"""
self._first_name = first_name
@property
def last_name(self):
"""
Gets the last_name attribute of this User instance.
:return: The value of the last_name attribute of this User
instance.
:rtype: six.text_type
"""
return self._last_name
@last_name.setter
def last_name(self, last_name):
"""
Sets the last_name attribute of this User instance.
:param last_name: The last_name value to set for the last_name
attribute of this User instance.
:type: six.text_type
"""
self._last_name = last_name
@property
def honorific(self):
"""
Gets the honorific attribute of this User instance.
:return: The value of the honorific attribute of this User
instance.
:rtype: six.text_type
"""
return self._honorific
@property
def register_date(self):
"""
Gets the register_date attribute of this User instance.
:return: The value of the register_date attribute of this User
instance.
:rtype: six.text_type
"""
return self._register_date
@property
def register_time(self):
"""
Gets the register_time attribute of this User instance.
:return: The value of the register_time attribute of this User
instance.
:rtype: six.text_type
"""
return self._register_time
@property
def default_thermostat_identifier(self):
"""
Gets the default_thermostat_identifier attribute of this User
instance.
:return: The value of the default_thermostat_identifier
attribute of this User instance.
:rtype: six.text_type
"""
return self._default_thermostat_identifier
@property
def management_ref(self):
"""
Gets the management_ref attribute of this User instance.
:return: The value of the management_ref attribute of this User
instance.
:rtype: six.text_type
"""
return self._management_ref
@property
def utility_ref(self):
"""
Gets the utility_ref attribute of this User instance.
:return: The value of the utility_ref attribute of this User
instance.
:rtype: six.text_type
"""
return self._utility_ref
@property
def support_ref(self):
"""
Gets the support_ref attribute of this User instance.
:return: The value of the support_ref attribute of this User
instance.
:rtype: six.text_type
"""
return self._support_ref
@property
def phone_number(self):
"""
Gets the phone_number attribute of this User instance.
:return: The value of the phone_number attribute of this User
instance.
:rtype: six.text_type
"""
return self._phone_number
@phone_number.setter
def phone_number(self, phone_number):
"""
Sets the phone_number attribute of this User instance.
:param phone_number: The phone_number value to set for the
phone_number attribute of this User instance.
:type: six.text_type
"""
self._phone_number = phone_number
@property
def utility_time_zone(self):
"""
Gets the utility_time_zone attribute of this User instance.
:return: The value of the utility_time_zone attribute of this
User instance.
:rtype: six.text_type
"""
return self._utility_time_zone
@utility_time_zone.setter
def utility_time_zone(self, utility_time_zone):
"""
Sets the utility_time_zone attribute of this User instance.
:param utility_time_zone: The utility_time_zone value to set for
the utility_time_zone attribute of this User instance.
:type: six.text_type
"""
self._utility_time_zone = utility_time_zone
@property
def management_time_zone(self):
"""
Gets the management_time_zone attribute of this User instance.
:return: The value of the management_time_zone attribute of this
User instance.
:rtype: six.text_type
"""
return self._management_time_zone
@management_time_zone.setter
def management_time_zone(self, management_time_zone):
"""
Sets the management_time_zone attribute of this User instance.
:param management_time_zone: The management_time_zone value to
set for the management_time_zone attribute of this User
instance.
:type: six.text_type
"""
self._management_time_zone = management_time_zone
@property
def is_residential(self):
"""
Gets the is_residential attribute of this User instance.
:return: The value of the is_residential attribute of this User
instance.
:rtype: bool
"""
return self._is_residential
@is_residential.setter
def is_residential(self, is_residential):
"""
Sets the is_residential attribute of this User instance.
:param is_residential: The is_residential value to set for the
is_residential attribute of this User instance.
:type: bool
"""
self._is_residential = is_residential
@property
def is_developer(self):
"""
Gets the is_developer attribute of this User instance.
:return: The value of the is_developer attribute of this User
instance.
:rtype: bool
"""
return self._is_developer
@is_developer.setter
def is_developer(self, is_developer):
"""
Sets the is_developer attribute of this User instance.
:param is_developer: The is_developer value to set for the
is_developer attribute of this User instance.
:type: bool
"""
self._is_developer = is_developer
@property
def is_management(self):
"""
Gets the is_management attribute of this User instance.
:return: The value of the is_management attribute of this User
instance.
:rtype: bool
"""
return self._is_management
@is_management.setter
def is_management(self, is_management):
"""
Sets the is_management attribute of this User instance.
:param is_management: The is_management value to set for the
is_management attribute of this User instance.
:type: bool
"""
self._is_management = is_management
@property
def is_utility(self):
"""
Gets the is_utility attribute of this User instance.
:return: The value of the is_utility attribute of this User
instance.
:rtype: bool
"""
return self._is_utility
@is_utility.setter
def is_utility(self, is_utility):
"""
Sets the is_utility attribute of this User instance.
:param is_utility: The is_utility value to set for the
is_utility attribute of this User instance.
:type: bool
"""
self._is_utility = is_utility
@property
def is_contractor(self):
"""
Gets the is_contractor attribute of this User instance.
:return: The value of the is_contractor attribute of this User
instance.
:rtype: bool
"""
return self._is_contractor
@is_contractor.setter
def is_contractor(self, is_contractor):
"""
Sets the is_contractor attribute of this User instance.
:param is_contractor: The is_contractor value to set for the
is_contractor attribute of this User instance.
:type: bool
"""
self._is_contractor = is_contractor
| mit |
hjanime/bcbio-nextgen | bcbio/variation/bamprep.py | 4 | 6720 | """Provide piped, no disk-IO, BAM preparation for variant calling.
Handles independent analysis of chromosome regions, allowing parallel
runs of this step.
"""
import os
import toolz as tz
from bcbio import bam, broad, utils
from bcbio.distributed.transaction import file_transaction, tx_tmpdir
from bcbio.pipeline import config_utils, shared
from bcbio.pipeline import datadict as dd
from bcbio.provenance import do
from bcbio.variation import realign
# ## GATK/Picard preparation
def region_to_gatk(region):
if isinstance(region, (list, tuple)):
chrom, start, end = region
return "%s:%s-%s" % (chrom, start + 1, end)
else:
return region
def _gatk_extract_reads_cl(data, region, prep_params, tmp_dir):
"""Use GATK to extract reads from full BAM file, recalibrating if configured.
"""
args = ["-T", "PrintReads",
"-L", region_to_gatk(region),
"-R", data["sam_ref"],
"-I", data["work_bam"]]
if prep_params.get("max_depth"):
args += ["--downsample_to_coverage", str(prep_params["max_depth"])]
if prep_params["recal"] == "gatk":
if "prep_recal" in data and _recal_has_reads(data["prep_recal"]):
args += ["-BQSR", data["prep_recal"]]
elif prep_params["recal"]:
raise NotImplementedError("Recalibration method %s" % prep_params["recal"])
jvm_opts = broad.get_gatk_framework_opts(data["config"],
memscale={"direction": "decrease", "magnitude": 3})
return [config_utils.get_program("gatk-framework", data["config"])] + jvm_opts + args
def _recal_has_reads(in_file):
with open(in_file) as in_handle:
return not in_handle.readline().startswith("# No aligned reads")
def _piped_input_cl(data, region, tmp_dir, out_base_file, prep_params):
"""Retrieve the commandline for streaming input into preparation step.
"""
cl = _gatk_extract_reads_cl(data, region, prep_params, tmp_dir)
sel_file = data["work_bam"]
bam.index(sel_file, data["config"])
return sel_file, " ".join(cl)
def _piped_realign_gatk(data, region, cl, out_base_file, tmp_dir, prep_params):
"""Perform realignment with GATK, using input commandline.
GATK requires writing to disk and indexing before realignment.
"""
broad_runner = broad.runner_from_config(data["config"])
pa_bam = "%s-prealign%s" % os.path.splitext(out_base_file)
if not utils.file_exists(pa_bam):
with file_transaction(data, pa_bam) as tx_out_file:
cmd = "{cl} -o {tx_out_file}".format(**locals())
do.run(cmd, "GATK pre-alignment {0}".format(region), data)
bam.index(pa_bam, data["config"])
recal_file = realign.gatk_realigner_targets(broad_runner, pa_bam, data["sam_ref"], data["config"],
region=region_to_gatk(region),
known_vrns=dd.get_variation_resources(data))
recal_cl = realign.gatk_indel_realignment_cl(broad_runner, pa_bam, data["sam_ref"],
recal_file, tmp_dir, region=region_to_gatk(region),
known_vrns=dd.get_variation_resources(data))
return pa_bam, " ".join(recal_cl)
def _cleanup_tempfiles(data, tmp_files):
for tmp_file in tmp_files:
if tmp_file and tmp_file != data["work_bam"]:
for ext in [".bam", ".bam.bai", ".bai"]:
fname = "%s%s" % (os.path.splitext(tmp_file)[0], ext)
if os.path.exists(fname):
os.remove(fname)
def _piped_bamprep_region_gatk(data, region, prep_params, out_file, tmp_dir):
"""Perform semi-piped BAM preparation using Picard/GATK tools.
"""
broad_runner = broad.runner_from_config(data["config"])
cur_bam, cl = _piped_input_cl(data, region, tmp_dir, out_file, prep_params)
if not prep_params["realign"]:
prerecal_bam = None
elif prep_params["realign"] == "gatk":
prerecal_bam, cl = _piped_realign_gatk(data, region, cl, out_file, tmp_dir,
prep_params)
else:
raise NotImplementedError("Realignment method: %s" % prep_params["realign"])
with file_transaction(data, out_file) as tx_out_file:
out_flag = ("-o" if (prep_params["realign"] == "gatk"
or not prep_params["realign"])
else ">")
cmd = "{cl} {out_flag} {tx_out_file}".format(**locals())
do.run(cmd, "GATK: realign {0}".format(region), data)
_cleanup_tempfiles(data, [cur_bam, prerecal_bam])
# ## Shared functionality
def _get_prep_params(data):
"""Retrieve configuration parameters with defaults for preparing BAM files.
"""
algorithm = data["config"]["algorithm"]
recal_param = algorithm.get("recalibrate", True)
recal_param = "gatk" if recal_param is True else recal_param
realign_param = algorithm.get("realign", True)
realign_param = "gatk" if realign_param is True else realign_param
max_depth = algorithm.get("coverage_depth_max", 10000)
return {"recal": recal_param, "realign": realign_param,
"max_depth": max_depth}
def _need_prep(data):
prep_params = _get_prep_params(data)
return prep_params["recal"] or prep_params["realign"]
def _piped_bamprep_region(data, region, out_file, tmp_dir):
"""Do work of preparing BAM input file on the selected region.
"""
if _need_prep(data):
prep_params = _get_prep_params(data)
_piped_bamprep_region_gatk(data, region, prep_params, out_file, tmp_dir)
else:
raise ValueError("No recalibration or realignment specified")
def piped_bamprep(data, region=None, out_file=None):
"""Perform full BAM preparation using pipes to avoid intermediate disk IO.
Handles recalibration and realignment of original BAMs.
"""
data["region"] = region
if not _need_prep(data):
return [data]
else:
utils.safe_makedir(os.path.dirname(out_file))
if region[0] == "nochrom":
prep_bam = shared.write_nochr_reads(data["work_bam"], out_file, data["config"])
elif region[0] == "noanalysis":
prep_bam = shared.write_noanalysis_reads(data["work_bam"], region[1], out_file,
data["config"])
else:
if not utils.file_exists(out_file):
with tx_tmpdir(data) as tmp_dir:
_piped_bamprep_region(data, region, out_file, tmp_dir)
prep_bam = out_file
bam.index(prep_bam, data["config"])
data["work_bam"] = prep_bam
return [data]
| mit |
ioannistsanaktsidis/invenio | modules/bibdocfile/lib/bibdocfile_config.py | 29 | 2827 | ## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import re
try:
from invenio.config import CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_MISC
except ImportError:
CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_MISC = {
'can_revise_doctypes': ['*'],
'can_comment_doctypes': ['*'],
'can_describe_doctypes': ['*'],
'can_delete_doctypes': ['*'],
'can_keep_doctypes': ['*'],
'can_rename_doctypes': ['*'],
'can_add_format_to_doctypes': ['*'],
'can_restrict_doctypes': ['*']}
try:
from invenio.config import CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_DOCTYPES
except ImportError:
CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_DOCTYPES = [
('Main', 'Main document'),
('LaTeX', 'LaTeX'),
('Source', 'Source'),
('Additional', 'Additional File'),
('Audio', 'Audio file'),
('Video', 'Video file'),
('Script', 'Script'),
('Data', 'Data'),
('Figure', 'Figure'),
('Schema', 'Schema'),
('Graph', 'Graph'),
('Image', 'Image'),
('Drawing', 'Drawing'),
('Slides', 'Slides')]
try:
from invenio.config import CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_RESTRICTIONS
except ImportError:
CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_RESTRICTIONS = [
('', 'Public'),
('restricted', 'Restricted')]
## CFG_BIBDOCFILE_ICON_SUBFORMAT_RE -- a subformat is an Invenio concept to give
## file formats more semantic. For example "foo.gif;icon" has ".gif;icon"
## 'format', ".gif" 'superformat' and "icon" 'subformat'. That means that this
## particular format/instance of the "foo" document, not only is a ".gif" but
## is in the shape of an "icon", i.e. most probably it will be low-resolution.
## This configuration variable let the administrator to decide which implicit
## convention will be used to know which formats will be meant to be used
## as an icon.
CFG_BIBDOCFILE_ICON_SUBFORMAT_RE = re.compile(r"icon.*")
## CFG_BIBDOCFILE_DEFAULT_ICON_SUBFORMAT -- this is the default subformat used
## when creating new icons.
CFG_BIBDOCFILE_DEFAULT_ICON_SUBFORMAT = "icon"
| gpl-2.0 |
nsat/gnuradio | gnuradio-runtime/python/pmt/qa_pmt_to_python.py | 48 | 1554 | #!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import unittest
import pmt
import pmt_to_python as pmt2py
class test_pmt_to_python(unittest.TestCase):
def test_pmt_from_double(self):
b = pmt.from_double(123765)
self.assertEqual(pmt.to_python(b), 123765)
t = pmt.to_pmt(range(5))
def test_numpy_to_uvector_and_reverse(self):
import numpy as np
N = 100
narr = np.ndarray(N, dtype=np.complex128)
narr.real[:] = np.random.uniform(size=N)
narr.imag[:] = np.random.uniform(size=N)
uvector = pmt2py.numpy_to_uvector(narr)
nparr = pmt2py.uvector_to_numpy(uvector)
self.assertTrue(nparr.dtype==narr.dtype)
self.assertTrue(np.alltrue(nparr == narr))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
meantheory/dotfiles | dos/src/dos/roles/hashicorp_release.py | 1 | 3105 | import urllib.request
from pathlib import Path
import re
import structlog
from dos.providers import ZipInstaller
from dos.roles.role import Role
from dos.roles.role import Plans
log = structlog.get_logger()
CONFIG_MISSING = 'required config value: "{0}"'
class HashicorpRelease(Role):
def ns(self):
return "hashicorp_release"
def role_setup(self):
self.cfg["url_base"] = "https://releases.hashicorp.com"
if "product" not in self.cfg:
raise EnvironmentError(CONFIG_MISSING.format("product"))
latest_version = self.latest()
if "download_dir" not in self.cfg:
self.cfg["download_dir"] = Path("/tmp")
if "install_dir" not in self.cfg:
self.cfg["install_dir"] = Path("~/.local/dos/bin").expanduser()
if "version" not in self.cfg:
# product and url_base must already be in self.cfg
self.cfg["version"] = latest_version
if "platform" not in self.cfg:
self.cfg["platform"] = "linux_amd64"
def latest(self):
base = self.cfg["url_base"]
product = self.cfg["product"]
version_regex = r"([0-9]+\.[0-9]+\.[0-9]+)(-[a-zA-Z]*)?"
version_matcher = re.compile(version_regex)
url = f"{base}/{product}/"
log.info("fetching latest version", product=product, url=url)
html = urllib.request.urlopen(url).read().decode("utf8")
log.debug("parsing fetched html")
versions = list()
for version, info in version_matcher.findall(html):
if len(info) > 0:
log.debug(
f"ignore {product} version", product=product, version=version, status=info
)
else:
versions.append(version)
versions.sort(key=lambda version: [int(u) for u in version.split(".")])
latest = versions[-1]
log.info("latest version", product=product, version=latest)
return latest
def plan(self, version=None, platform="linux_amd64"):
base = self.cfg["url_base"]
product = self.cfg["product"]
version = self.cfg["version"]
platform = self.cfg["platform"]
package_url = f"{base}/{product}/{version}/{product}_{version}_{platform}.zip"
install_dir = self.cfg["install_dir"]
installer = ZipInstaller(
install_dir=install_dir,
exe_name=product,
exe_mod="0o755",
package_url=package_url,
download_dir=Path("/tmp"),
zip_file_name=f"{product}_{version}.zip",
)
return Plans(installer)
# define variables and file paths
# dos_bin = config.dos_bin()
# zip_file = f'/tmp/{product}_{version}.zip'
# zip_dir = f'/tmp/{product}_{version}'
# tmp_exe = f'{zip_dir}/{product}'
# final = f'{dos_bin}/{product}'
# url = self.download_url(product, version, platform)
# HashicorpProduct.do_install(url, zip_file, zip_dir, tmp_exe, final)
# log.info(f'installation complete for: {product}')
| mit |
TeachAtTUM/edx-platform | lms/djangoapps/grades/admin.py | 19 | 1072 | """
Django admin page for grades models
"""
from config_models.admin import ConfigurationModelAdmin, KeyedConfigurationModelAdmin
from django.contrib import admin
from lms.djangoapps.grades.config.forms import CoursePersistentGradesAdminForm
from lms.djangoapps.grades.config.models import (
ComputeGradesSetting,
CoursePersistentGradesFlag,
PersistentGradesEnabledFlag
)
class CoursePersistentGradesAdmin(KeyedConfigurationModelAdmin):
"""
Admin for enabling subsection grades on a course-by-course basis.
Allows searching by course id.
"""
form = CoursePersistentGradesAdminForm
search_fields = ['course_id']
fieldsets = (
(None, {
'fields': ('course_id', 'enabled'),
'description': 'Enter a valid course id. If it is invalid, an error message will display.'
}),
)
admin.site.register(CoursePersistentGradesFlag, CoursePersistentGradesAdmin)
admin.site.register(PersistentGradesEnabledFlag, ConfigurationModelAdmin)
admin.site.register(ComputeGradesSetting, ConfigurationModelAdmin)
| agpl-3.0 |
0k/OpenUpgrade | addons/l10n_ro/res_partner.py | 309 | 2255 | # -*- encoding: utf-8 -*-
##############################################################################
#
# @author - Fekete Mihai <feketemihai@gmail.com>
# Copyright (C) 2011 TOTAL PC SYSTEMS (http://www.www.erpsystems.ro).
# Copyright (C) 2009 (<http://www.filsystem.ro>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class res_partner(osv.osv):
_name = "res.partner"
_inherit = "res.partner"
_columns = {
'nrc' : fields.char('NRC', help='Registration number at the Registry of Commerce'),
}
def _auto_init(self, cr, context=None):
result = super(res_partner, self)._auto_init(cr, context=context)
# Remove constrains for vat, nrc on "commercial entities" because is not mandatory by legislation
# Even that VAT numbers are unique, the NRC field is not unique, and there are certain entities that
# doesn't have a NRC number plus the formatting was changed few times, so we cannot have a base rule for
# checking if available and emmited by the Ministry of Finance, only online on their website.
cr.execute("""
DROP INDEX IF EXISTS res_partner_vat_uniq_for_companies;
DROP INDEX IF EXISTS res_partner_nrc_uniq_for_companies;
""")
return result
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + ['nrc']
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sassman/ansible-modules-core | database/mysql/mysql_db.py | 12 | 13375 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Mark Theunissen <mark.theunissen@gmail.com>
# Sponsored by Four Kitchens http://fourkitchens.com.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: mysql_db
short_description: Add or remove MySQL databases from a remote host.
description:
- Add or remove MySQL databases from a remote host.
version_added: "0.6"
options:
name:
description:
- name of the database to add or remove
required: true
default: null
aliases: [ db ]
login_user:
description:
- The username used to authenticate with
required: false
default: null
login_password:
description:
- The password used to authenticate with
required: false
default: null
login_host:
description:
- Host running the database
required: false
default: localhost
login_port:
description:
- Port of the MySQL server. Requires login_host be defined as other then localhost if login_port is used
required: false
default: 3306
login_unix_socket:
description:
- The path to a Unix domain socket for local connections
required: false
default: null
state:
description:
- The database state
required: false
default: present
choices: [ "present", "absent", "dump", "import" ]
collation:
description:
- Collation mode
required: false
default: null
encoding:
description:
- Encoding mode
required: false
default: null
target:
description:
- Location, on the remote host, of the dump file to read from or write to. Uncompressed SQL
files (C(.sql)) as well as bzip2 (C(.bz2)) and gzip (C(.gz)) compressed files are supported.
required: false
notes:
- Requires the MySQLdb Python package on the remote host. For Ubuntu, this
is as easy as apt-get install python-mysqldb. (See M(apt).)
- Both I(login_password) and I(login_user) are required when you are
passing credentials. If none are present, the module will attempt to read
the credentials from C(~/.my.cnf), and finally fall back to using the MySQL
default login of C(root) with no password.
requirements: [ ConfigParser ]
author: Mark Theunissen
'''
EXAMPLES = '''
# Create a new database with name 'bobdata'
- mysql_db: name=bobdata state=present
# Copy database dump file to remote host and restore it to database 'my_db'
- copy: src=dump.sql.bz2 dest=/tmp
- mysql_db: name=my_db state=import target=/tmp/dump.sql.bz2
'''
import ConfigParser
import os
import pipes
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
# ===========================================
# MySQL module specific support methods.
#
def db_exists(cursor, db):
res = cursor.execute("SHOW DATABASES LIKE %s", (db.replace("_","\_"),))
return bool(res)
def db_delete(cursor, db):
query = "DROP DATABASE `%s`" % db
cursor.execute(query)
return True
def db_dump(module, host, user, password, db_name, target, port, socket=None):
cmd = module.get_bin_path('mysqldump', True)
cmd += " --quick --user=%s --password=%s" % (pipes.quote(user), pipes.quote(password))
if socket is not None:
cmd += " --socket=%s" % pipes.quote(socket)
else:
cmd += " --host=%s --port=%s" % (pipes.quote(host), pipes.quote(port))
cmd += " %s" % pipes.quote(db_name)
if os.path.splitext(target)[-1] == '.gz':
cmd = cmd + ' | gzip > ' + pipes.quote(target)
elif os.path.splitext(target)[-1] == '.bz2':
cmd = cmd + ' | bzip2 > ' + pipes.quote(target)
else:
cmd += " > %s" % pipes.quote(target)
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
return rc, stdout, stderr
def db_import(module, host, user, password, db_name, target, port, socket=None):
if not os.path.exists(target):
return module.fail_json(msg="target %s does not exist on the host" % target)
cmd = module.get_bin_path('mysql', True)
cmd += " --user=%s --password=%s" % (pipes.quote(user), pipes.quote(password))
if socket is not None:
cmd += " --socket=%s" % pipes.quote(socket)
else:
cmd += " --host=%s --port=%s" % (pipes.quote(host), pipes.quote(port))
cmd += " -D %s" % pipes.quote(db_name)
if os.path.splitext(target)[-1] == '.gz':
gunzip_path = module.get_bin_path('gunzip')
if gunzip_path:
rc, stdout, stderr = module.run_command('%s %s' % (gunzip_path, target))
if rc != 0:
return rc, stdout, stderr
cmd += " < %s" % pipes.quote(os.path.splitext(target)[0])
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
if rc != 0:
return rc, stdout, stderr
gzip_path = module.get_bin_path('gzip')
if gzip_path:
rc, stdout, stderr = module.run_command('%s %s' % (gzip_path, os.path.splitext(target)[0]))
else:
module.fail_json(msg="gzip command not found")
else:
module.fail_json(msg="gunzip command not found")
elif os.path.splitext(target)[-1] == '.bz2':
bunzip2_path = module.get_bin_path('bunzip2')
if bunzip2_path:
rc, stdout, stderr = module.run_command('%s %s' % (bunzip2_path, target))
if rc != 0:
return rc, stdout, stderr
cmd += " < %s" % pipes.quote(os.path.splitext(target)[0])
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
if rc != 0:
return rc, stdout, stderr
bzip2_path = module.get_bin_path('bzip2')
if bzip2_path:
rc, stdout, stderr = module.run_command('%s %s' % (bzip2_path, os.path.splitext(target)[0]))
else:
module.fail_json(msg="bzip2 command not found")
else:
module.fail_json(msg="bunzip2 command not found")
else:
cmd += " < %s" % pipes.quote(target)
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
return rc, stdout, stderr
def db_create(cursor, db, encoding, collation):
if encoding:
encoding = " CHARACTER SET %s" % encoding
if collation:
collation = " COLLATE %s" % collation
query = "CREATE DATABASE `%s`%s%s" % (db, encoding, collation)
res = cursor.execute(query)
return True
def strip_quotes(s):
""" Remove surrounding single or double quotes
>>> print strip_quotes('hello')
hello
>>> print strip_quotes('"hello"')
hello
>>> print strip_quotes("'hello'")
hello
>>> print strip_quotes("'hello")
'hello
"""
single_quote = "'"
double_quote = '"'
if s.startswith(single_quote) and s.endswith(single_quote):
s = s.strip(single_quote)
elif s.startswith(double_quote) and s.endswith(double_quote):
s = s.strip(double_quote)
return s
def config_get(config, section, option):
""" Calls ConfigParser.get and strips quotes
See: http://dev.mysql.com/doc/refman/5.0/en/option-files.html
"""
return strip_quotes(config.get(section, option))
def load_mycnf():
config = ConfigParser.RawConfigParser()
mycnf = os.path.expanduser('~/.my.cnf')
if not os.path.exists(mycnf):
return False
try:
config.readfp(open(mycnf))
except (IOError):
return False
# We support two forms of passwords in .my.cnf, both pass= and password=,
# as these are both supported by MySQL.
try:
passwd = config_get(config, 'client', 'password')
except (ConfigParser.NoOptionError):
try:
passwd = config_get(config, 'client', 'pass')
except (ConfigParser.NoOptionError):
return False
try:
creds = dict(user=config_get(config, 'client', 'user'),passwd=passwd)
except (ConfigParser.NoOptionError):
return False
return creds
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None),
login_host=dict(default="localhost"),
login_port=dict(default="3306"),
login_unix_socket=dict(default=None),
name=dict(required=True, aliases=['db']),
encoding=dict(default=""),
collation=dict(default=""),
target=dict(default=None),
state=dict(default="present", choices=["absent", "present","dump", "import"]),
)
)
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
db = module.params["name"]
encoding = module.params["encoding"]
collation = module.params["collation"]
state = module.params["state"]
target = module.params["target"]
# make sure the target path is expanded for ~ and $HOME
if target is not None:
target = os.path.expandvars(os.path.expanduser(target))
# Either the caller passes both a username and password with which to connect to
# mysql, or they pass neither and allow this module to read the credentials from
# ~/.my.cnf.
login_password = module.params["login_password"]
login_user = module.params["login_user"]
if login_user is None and login_password is None:
mycnf_creds = load_mycnf()
if mycnf_creds is False:
login_user = "root"
login_password = ""
else:
login_user = mycnf_creds["user"]
login_password = mycnf_creds["passwd"]
elif login_password is None or login_user is None:
module.fail_json(msg="when supplying login arguments, both login_user and login_password must be provided")
login_host = module.params["login_host"]
if state in ['dump','import']:
if target is None:
module.fail_json(msg="with state=%s target is required" % (state))
connect_to_db = db
else:
connect_to_db = 'mysql'
try:
if module.params["login_unix_socket"]:
db_connection = MySQLdb.connect(host=module.params["login_host"], unix_socket=module.params["login_unix_socket"], user=login_user, passwd=login_password, db=connect_to_db)
elif module.params["login_port"] != "3306" and module.params["login_host"] == "localhost":
module.fail_json(msg="login_host is required when login_port is defined, login_host cannot be localhost when login_port is defined")
else:
db_connection = MySQLdb.connect(host=module.params["login_host"], port=int(module.params["login_port"]), user=login_user, passwd=login_password, db=connect_to_db)
cursor = db_connection.cursor()
except Exception, e:
if "Unknown database" in str(e):
errno, errstr = e.args
module.fail_json(msg="ERROR: %s %s" % (errno, errstr))
else:
module.fail_json(msg="unable to connect, check login_user and login_password are correct, or alternatively check ~/.my.cnf contains credentials")
changed = False
if db_exists(cursor, db):
if state == "absent":
try:
changed = db_delete(cursor, db)
except Exception, e:
module.fail_json(msg="error deleting database: " + str(e))
elif state == "dump":
rc, stdout, stderr = db_dump(module, login_host, login_user,
login_password, db, target,
port=module.params['login_port'],
socket=module.params['login_unix_socket'])
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
elif state == "import":
rc, stdout, stderr = db_import(module, login_host, login_user,
login_password, db, target,
port=module.params['login_port'],
socket=module.params['login_unix_socket'])
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
else:
if state == "present":
try:
changed = db_create(cursor, db, encoding, collation)
except Exception, e:
module.fail_json(msg="error creating database: " + str(e))
module.exit_json(changed=changed, db=db)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
elizaleong/robotframework-selenium2library | build_dist.py | 66 | 2390 | #!/usr/bin/env python
import os, sys, shutil, subprocess, argparse
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
DIST_DIR = os.path.join(THIS_DIR, "dist")
sys.path.append(os.path.join(THIS_DIR, "src", "Selenium2Library"))
sys.path.append(os.path.join(THIS_DIR, "doc"))
sys.path.append(os.path.join(THIS_DIR, "demo"))
def main():
parser = argparse.ArgumentParser(description="Builds a Se2Lib distribution")
parser.add_argument('py_26_path', action='store', help='Python 2.6 executbale file path')
parser.add_argument('py_27_path', action='store', help='Python 2.7 executbale file path')
parser.add_argument('--release', action='store_true')
parser.add_argument('--winonly', action='store_true')
args = parser.parse_args()
if args.winonly:
run_builds(args)
return
clear_dist_folder()
run_register(args)
run_builds(args)
run_demo_packaging()
run_doc_gen()
def clear_dist_folder():
if os.path.exists(DIST_DIR):
shutil.rmtree(DIST_DIR)
os.mkdir(DIST_DIR)
def run_doc_gen():
import generate
print
generate.main()
def run_register(args):
if args.release:
_run_setup(args.py_27_path, "register", [], False)
def run_builds(args):
print
if not args.winonly:
_run_setup(args.py_27_path, "sdist", [ "--formats=gztar,zip" ], args.release)
_run_setup(args.py_26_path, "bdist_egg", [], args.release)
_run_setup(args.py_27_path, "bdist_egg", [], args.release)
if os.name == 'nt':
_run_setup(args.py_27_path, "bdist_wininst", [ "--plat-name=win32" ], args.release)
_run_setup(args.py_27_path, "bdist_wininst", [ "--plat-name=win-amd64" ], args.release)
else:
print
print("Windows binary installers cannot be built on this platform!")
def run_demo_packaging():
import package
print
package.main()
def _run_setup(py_path, type, params, upload):
setup_args = [py_path, os.path.join(THIS_DIR, "setup.py")]
#setup_args.append("--quiet")
setup_args.append(type)
setup_args.extend(params)
if upload:
setup_args.append("upload")
print
print("Running: %s" % ' '.join(setup_args))
returncode = subprocess.call(setup_args)
if returncode != 0:
print("Error running setup.py")
sys.exit(1)
if __name__ == '__main__':
main()
| apache-2.0 |
autopulated/yotta | yotta/test/test_settings.py | 3 | 4733 | #!/usr/bin/env python
# Copyright 2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import unittest
import tempfile
import os
import random
# validate, , validate various things, internal
from yotta.lib import settings
from yotta.lib.fsutils import rmRf
class TestSettings(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_dir = tempfile.mkdtemp()
test_files = [
('1.json', '{"a":{"b":{"c":"1-value"}}}'),
('2.json', '{"a":{"b":{"c":"2-value"}, "b2":"2-value"}}'),
('3.json', '{"a":{"b":{"c":"3-value"}, "b2":"3-value"}, "a2":"3-value"}')
]
cls.filenames = []
for fn, s in test_files:
cls.filenames.append(os.path.join(cls.test_dir, fn))
with open(cls.filenames[-1], 'w') as f:
f.write(s)
@classmethod
def tearDownClass(cls):
rmRf(cls.test_dir)
def test_merging(self):
p = settings._JSONConfigParser()
p.read(self.filenames)
self.assertEqual(p.get('a.b.c'), '1-value')
self.assertEqual(p.get('a.b2'), '2-value')
self.assertEqual(p.get('a2'), '3-value')
def test_setting(self):
p = settings._JSONConfigParser()
p.read(self.filenames)
p.set('foo', 'xxx')
self.assertEqual(p.get('foo'), 'xxx')
p.set('someLongNameHere_etc_etc', 'xxx')
self.assertEqual(p.get('someLongNameHere_etc_etc'), 'xxx')
p.set('someLongNameHere_etc_etc.with.a.path', True, filename=self.filenames[1])
self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), True)
p.set('someLongNameHere_etc_etc.with.a.path', False, filename=self.filenames[1])
self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), False)
# NB: don't expect it to change when we set a value that's shadowed by
# an earlier file:
p.set('someLongNameHere_etc_etc.with.a.path', 7, filename=self.filenames[2])
self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), False)
p.set('someLongNameHere_etc_etc.with.another.path', 7, filename=self.filenames[2])
self.assertEqual(p.get('someLongNameHere_etc_etc.with.another.path'), 7)
def test_writing(self):
p = settings._JSONConfigParser()
p.read(self.filenames)
p.set('foo', 'xxx')
p.set('someLongNameHere_etc_etc', 'xxx')
p.set('someLongNameHere_etc_etc.with.a.path', True, filename=self.filenames[1])
p.set('someLongNameHere_etc_etc.with.a.path', False, filename=self.filenames[1])
p.set('someLongNameHere_etc_etc.with.a.path', 7, filename=self.filenames[2])
p.set('someLongNameHere_etc_etc.with.another.path', 7, filename=self.filenames[2])
# NB: only write settings to the first file
p.write()
self.assertEqual(p.get('foo'), 'xxx')
self.assertEqual(p.get('someLongNameHere_etc_etc'), 'xxx')
self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), False)
self.assertEqual(p.get('someLongNameHere_etc_etc.with.another.path'), 7)
p2 = settings._JSONConfigParser()
p2.read(self.filenames)
self.assertEqual(p2.get('foo'), 'xxx')
self.assertEqual(p2.get('someLongNameHere_etc_etc'), 'xxx')
# check that we only wrote settings to the first file
self.assertEqual(p2.get('someLongNameHere_etc_etc.with.a.path'), None)
# now write settings for the other files, and continue
p.write(self.filenames[1])
p.write(self.filenames[2])
p3 = settings._JSONConfigParser()
p3.read(self.filenames)
self.assertEqual(p3.get('someLongNameHere_etc_etc.with.a.path'), False)
self.assertEqual(p3.get('someLongNameHere_etc_etc.with.another.path'), 7)
p4 = settings._JSONConfigParser()
p4.read([self.filenames[1]])
self.assertEqual(p4.get('foo'), None)
self.assertEqual(p4.get('someLongNameHere_etc_etc.with.a.path'), False)
self.assertEqual(p4.get('someLongNameHere_etc_etc.with.another.path'), None)
p5 = settings._JSONConfigParser()
p5.read([self.filenames[2]])
self.assertEqual(p5.get('foo'), None)
self.assertEqual(p5.get('someLongNameHere_etc_etc.with.a.path'), 7)
self.assertEqual(p5.get('someLongNameHere_etc_etc.with.another.path'), 7)
def test_envvars(self):
testval = str(random.randint(1,100000))
os.environ['YOTTA_SETTINGS_TEST_VARIABLE'] = testval
self.assertEqual(settings.get('settings.TEST.Variable'), testval)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
SimVascular/VTK | Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearest.py | 21 | 15893 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
=========================================================================
Program: Visualization Toolkit
Module: TestNamedColorsIntegration.py
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================
'''
import vtk
import vtk.test.Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
class FixedPointRayCasterNearest(object):
def __init__(self, ren, renWin, iren):
self.ren = ren
self.renWin = renWin
self.iren = iren
# Create a gaussian
gs = vtk.vtkImageGaussianSource()
gs.SetWholeExtent(0, 30, 0, 30, 0, 30)
gs.SetMaximum(255.0)
gs.SetStandardDeviation(5)
gs.SetCenter(15, 15, 15)
# threshold to leave a gap that should show up for
# gradient opacity
t = vtk.vtkImageThreshold()
t.SetInputConnection(gs.GetOutputPort())
t.ReplaceInOn()
t.SetInValue(0)
t.ThresholdBetween(150, 200)
# Use a shift scale to convert to unsigned char
ss = vtk.vtkImageShiftScale()
ss.SetInputConnection(t.GetOutputPort())
ss.SetOutputScalarTypeToUnsignedChar()
# grid will be used for two component dependent
grid0 = vtk.vtkImageGridSource()
grid0.SetDataScalarTypeToUnsignedChar()
grid0.SetGridSpacing(10, 10, 10)
grid0.SetLineValue(200)
grid0.SetFillValue(10)
grid0.SetDataExtent(0, 30, 0, 30, 0, 30)
# use dilation to thicken the grid
d = vtk.vtkImageContinuousDilate3D()
d.SetInputConnection(grid0.GetOutputPort())
d.SetKernelSize(3, 3, 3)
# Now make a two component dependent
iac = vtk.vtkImageAppendComponents()
iac.AddInputConnection(d.GetOutputPort())
iac.AddInputConnection(ss.GetOutputPort())
# Some more gaussians for the four component indepent case
gs1 = vtk.vtkImageGaussianSource()
gs1.SetWholeExtent(0, 30, 0, 30, 0, 30)
gs1.SetMaximum(255.0)
gs1.SetStandardDeviation(4)
gs1.SetCenter(5, 5, 5)
t1 = vtk.vtkImageThreshold()
t1.SetInputConnection(gs1.GetOutputPort())
t1.ReplaceInOn()
t1.SetInValue(0)
t1.ThresholdBetween(150, 256)
gs2 = vtk.vtkImageGaussianSource()
gs2.SetWholeExtent(0, 30, 0, 30, 0, 30)
gs2.SetMaximum(255.0)
gs2.SetStandardDeviation(4)
gs2.SetCenter(12, 12, 12)
gs3 = vtk.vtkImageGaussianSource()
gs3.SetWholeExtent(0, 30, 0, 30, 0, 30)
gs3.SetMaximum(255.0)
gs3.SetStandardDeviation(4)
gs3.SetCenter(19, 19, 19)
t3 = vtk.vtkImageThreshold()
t3.SetInputConnection(gs3.GetOutputPort())
t3.ReplaceInOn()
t3.SetInValue(0)
t3.ThresholdBetween(150, 256)
gs4 = vtk.vtkImageGaussianSource()
gs4.SetWholeExtent(0, 30, 0, 30, 0, 30)
gs4.SetMaximum(255.0)
gs4.SetStandardDeviation(4)
gs4.SetCenter(26, 26, 26)
# we need a few append filters ...
iac1 = vtk.vtkImageAppendComponents()
iac1.AddInputConnection(t1.GetOutputPort())
iac1.AddInputConnection(gs2.GetOutputPort())
iac2 = vtk.vtkImageAppendComponents()
iac2.AddInputConnection(iac1.GetOutputPort())
iac2.AddInputConnection(t3.GetOutputPort())
iac3 = vtk.vtkImageAppendComponents()
iac3.AddInputConnection(iac2.GetOutputPort())
iac3.AddInputConnection(gs4.GetOutputPort())
# create the four component dependend -
# use lines in x, y, z for colors
gridR = vtk.vtkImageGridSource()
gridR.SetDataScalarTypeToUnsignedChar()
gridR.SetGridSpacing(10, 100, 100)
gridR.SetLineValue(250)
gridR.SetFillValue(100)
gridR.SetDataExtent(0, 30, 0, 30, 0, 30)
dR = vtk.vtkImageContinuousDilate3D()
dR.SetInputConnection(gridR.GetOutputPort())
dR.SetKernelSize(2, 2, 2)
gridG = vtk.vtkImageGridSource()
gridG.SetDataScalarTypeToUnsignedChar()
gridG.SetGridSpacing(100, 10, 100)
gridG.SetLineValue(250)
gridG.SetFillValue(100)
gridG.SetDataExtent(0, 30, 0, 30, 0, 30)
dG = vtk.vtkImageContinuousDilate3D()
dG.SetInputConnection(gridG.GetOutputPort())
dG.SetKernelSize(2, 2, 2)
gridB = vtk.vtkImageGridSource()
gridB.SetDataScalarTypeToUnsignedChar()
gridB.SetGridSpacing(100, 100, 10)
gridB.SetLineValue(0)
gridB.SetFillValue(250)
gridB.SetDataExtent(0, 30, 0, 30, 0, 30)
dB = vtk.vtkImageContinuousDilate3D()
dB.SetInputConnection(gridB.GetOutputPort())
dB.SetKernelSize(2, 2, 2)
# need some appending
iacRG = vtk.vtkImageAppendComponents()
iacRG.AddInputConnection(dR.GetOutputPort())
iacRG.AddInputConnection(dG.GetOutputPort())
iacRGB = vtk.vtkImageAppendComponents()
iacRGB.AddInputConnection(iacRG.GetOutputPort())
iacRGB.AddInputConnection(dB.GetOutputPort())
iacRGBA = vtk.vtkImageAppendComponents()
iacRGBA.AddInputConnection(iacRGB.GetOutputPort())
iacRGBA.AddInputConnection(ss.GetOutputPort())
# We need a bunch of opacity functions
# this one is a simple ramp to .2
rampPoint2 = vtk.vtkPiecewiseFunction()
rampPoint2.AddPoint(0, 0.0)
rampPoint2.AddPoint(255, 0.2)
# this one is a simple ramp to 1
ramp1 = vtk.vtkPiecewiseFunction()
ramp1.AddPoint(0, 0.0)
ramp1.AddPoint(255, 1.0)
# this one shows a sharp surface
surface = vtk.vtkPiecewiseFunction()
surface.AddPoint(0, 0.0)
surface.AddPoint(10, 0.0)
surface.AddPoint(50, 1.0)
surface.AddPoint(255, 1.0)
# this one is constant 1
constant1 = vtk.vtkPiecewiseFunction()
constant1.AddPoint(0, 1.0)
constant1.AddPoint(255, 1.0)
# this one is used for gradient opacity
gop = vtk.vtkPiecewiseFunction()
gop.AddPoint(0, 0.0)
gop.AddPoint(20, 0.0)
gop.AddPoint(60, 1.0)
gop.AddPoint(255, 1.0)
# We need a bunch of color functions
# This one is a simple rainbow
rainbow = vtk.vtkColorTransferFunction()
rainbow.SetColorSpaceToHSV()
rainbow.HSVWrapOff()
rainbow.AddHSVPoint(0, 0.1, 1.0, 1.0)
rainbow.AddHSVPoint(255, 0.9, 1.0, 1.0)
# this is constant red
red = vtk.vtkColorTransferFunction()
red.AddRGBPoint(0, 1, 0, 0)
red.AddRGBPoint(255, 1, 0, 0)
# this is constant green
green = vtk.vtkColorTransferFunction()
green.AddRGBPoint(0, 0, 1, 0)
green.AddRGBPoint(255, 0, 1, 0)
# this is constant blue
blue = vtk.vtkColorTransferFunction()
blue.AddRGBPoint(0, 0, 0, 1)
blue.AddRGBPoint(255, 0, 0, 1)
# this is constant yellow
yellow = vtk.vtkColorTransferFunction()
yellow.AddRGBPoint(0, 1, 1, 0)
yellow.AddRGBPoint(255, 1, 1, 0)
#ren = vtk.vtkRenderer()
#renWin = vtk.vtkRenderWindow()
self.renWin.AddRenderer(self.ren)
self.renWin.SetSize(500, 500)
#iren = vtk.vtkRenderWindowInteractor()
self.iren.SetRenderWindow(self.renWin)
self.ren.GetCullers().InitTraversal()
culler = self.ren.GetCullers().GetNextItem()
culler.SetSortingStyleToBackToFront()
# We need 25 mapper / actor pairs which we will render
# in a grid. Going down we will vary the input data
# with the top row unsigned char, then float, then
# two dependent components, then four dependent components
# then four independent components. Going across we
# will vary the rendering method with MIP, Composite,
# Composite Shade, Composite GO, and Composite GO Shade.
# Create the 5 by 5 grids
self.volumeProperty = [[0 for col in range(0, 5)] for row in range(0, 5)]
self.volumeMapper = [[0 for col in range(0, 5)] for row in range(0, 5)]
volume = [[0 for col in range(0, 5)] for row in range(0, 5)]
for i in range(0, 5):
for j in range(0, 5):
self.volumeProperty[i][j] = vtk.vtkVolumeProperty()
self.volumeMapper[i][j] = vtk.vtkFixedPointVolumeRayCastMapper()
self.volumeMapper[i][j].SetSampleDistance(0.25)
self.volumeMapper[i][j].SetNumberOfThreads(1)
volume[i][j] = vtk.vtkVolume()
volume[i][j].SetMapper(self.volumeMapper[i][j])
volume[i][j].SetProperty(self.volumeProperty[i][j])
volume[i][j].AddPosition(i * 30, j * 30, 0)
self.ren.AddVolume(volume[i][j])
for i in range(0, 5):
self.volumeMapper[0][i].SetInputConnection(t.GetOutputPort())
self.volumeMapper[1][i].SetInputConnection(ss.GetOutputPort())
self.volumeMapper[2][i].SetInputConnection(iac.GetOutputPort())
self.volumeMapper[3][i].SetInputConnection(iac3.GetOutputPort())
self.volumeMapper[4][i].SetInputConnection(iacRGBA.GetOutputPort())
self.volumeMapper[i][0].SetBlendModeToMaximumIntensity()
self.volumeMapper[i][1].SetBlendModeToComposite()
self.volumeMapper[i][2].SetBlendModeToComposite()
self.volumeMapper[i][3].SetBlendModeToComposite()
self.volumeMapper[i][4].SetBlendModeToComposite()
self.volumeProperty[0][i].IndependentComponentsOn()
self.volumeProperty[1][i].IndependentComponentsOn()
self.volumeProperty[2][i].IndependentComponentsOff()
self.volumeProperty[3][i].IndependentComponentsOn()
self.volumeProperty[4][i].IndependentComponentsOff()
self.volumeProperty[0][i].SetColor(rainbow)
self.volumeProperty[0][i].SetScalarOpacity(rampPoint2)
self.volumeProperty[0][i].SetGradientOpacity(constant1)
self.volumeProperty[1][i].SetColor(rainbow)
self.volumeProperty[1][i].SetScalarOpacity(rampPoint2)
self.volumeProperty[1][i].SetGradientOpacity(constant1)
self.volumeProperty[2][i].SetColor(rainbow)
self.volumeProperty[2][i].SetScalarOpacity(rampPoint2)
self.volumeProperty[2][i].SetGradientOpacity(constant1)
self.volumeProperty[3][i].SetColor(0, red)
self.volumeProperty[3][i].SetColor(1, green)
self.volumeProperty[3][i].SetColor(2, blue)
self.volumeProperty[3][i].SetColor(3, yellow)
self.volumeProperty[3][i].SetScalarOpacity(0, rampPoint2)
self.volumeProperty[3][i].SetScalarOpacity(1, rampPoint2)
self.volumeProperty[3][i].SetScalarOpacity(2, rampPoint2)
self.volumeProperty[3][i].SetScalarOpacity(3, rampPoint2)
self.volumeProperty[3][i].SetGradientOpacity(0, constant1)
self.volumeProperty[3][i].SetGradientOpacity(1, constant1)
self.volumeProperty[3][i].SetGradientOpacity(2, constant1)
self.volumeProperty[3][i].SetGradientOpacity(3, constant1)
self.volumeProperty[3][i].SetComponentWeight(0, 1)
self.volumeProperty[3][i].SetComponentWeight(1, 1)
self.volumeProperty[3][i].SetComponentWeight(2, 1)
self.volumeProperty[3][i].SetComponentWeight(3, 1)
self.volumeProperty[4][i].SetColor(rainbow)
self.volumeProperty[4][i].SetScalarOpacity(rampPoint2)
self.volumeProperty[4][i].SetGradientOpacity(constant1)
self.volumeProperty[i][2].ShadeOn()
self.volumeProperty[i][4].ShadeOn(0)
self.volumeProperty[i][4].ShadeOn(1)
self.volumeProperty[i][4].ShadeOn(2)
self.volumeProperty[i][4].ShadeOn(3)
self.volumeProperty[0][0].SetScalarOpacity(ramp1)
self.volumeProperty[1][0].SetScalarOpacity(ramp1)
self.volumeProperty[2][0].SetScalarOpacity(ramp1)
self.volumeProperty[3][0].SetScalarOpacity(0, surface)
self.volumeProperty[3][0].SetScalarOpacity(1, surface)
self.volumeProperty[3][0].SetScalarOpacity(2, surface)
self.volumeProperty[3][0].SetScalarOpacity(3, surface)
self.volumeProperty[4][0].SetScalarOpacity(ramp1)
self.volumeProperty[0][2].SetScalarOpacity(surface)
self.volumeProperty[1][2].SetScalarOpacity(surface)
self.volumeProperty[2][2].SetScalarOpacity(surface)
self.volumeProperty[3][2].SetScalarOpacity(0, surface)
self.volumeProperty[3][2].SetScalarOpacity(1, surface)
self.volumeProperty[3][2].SetScalarOpacity(2, surface)
self.volumeProperty[3][2].SetScalarOpacity(3, surface)
self.volumeProperty[4][2].SetScalarOpacity(surface)
self.volumeProperty[0][4].SetScalarOpacity(surface)
self.volumeProperty[1][4].SetScalarOpacity(surface)
self.volumeProperty[2][4].SetScalarOpacity(surface)
self.volumeProperty[3][4].SetScalarOpacity(0, surface)
self.volumeProperty[3][4].SetScalarOpacity(1, surface)
self.volumeProperty[3][4].SetScalarOpacity(2, surface)
self.volumeProperty[3][4].SetScalarOpacity(3, surface)
self.volumeProperty[4][4].SetScalarOpacity(surface)
self.volumeProperty[0][3].SetGradientOpacity(gop)
self.volumeProperty[1][3].SetGradientOpacity(gop)
self.volumeProperty[2][3].SetGradientOpacity(gop)
self.volumeProperty[3][3].SetGradientOpacity(0, gop)
self.volumeProperty[3][3].SetGradientOpacity(2, gop)
self.volumeProperty[4][3].SetGradientOpacity(gop)
self.volumeProperty[3][3].SetScalarOpacity(0, ramp1)
self.volumeProperty[3][3].SetScalarOpacity(2, ramp1)
self.volumeProperty[0][4].SetGradientOpacity(gop)
self.volumeProperty[1][4].SetGradientOpacity(gop)
self.volumeProperty[2][4].SetGradientOpacity(gop)
self.volumeProperty[3][4].SetGradientOpacity(0, gop)
self.volumeProperty[3][4].SetGradientOpacity(2, gop)
self.volumeProperty[4][4].SetGradientOpacity(gop)
self.renWin.Render()
self.ren.GetActiveCamera().Dolly(1.3)
self.ren.GetActiveCamera().Azimuth(15)
self.ren.GetActiveCamera().Elevation(5)
self.ren.ResetCameraClippingRange()
# self.renWin.Render()
def GetVolumeProperty(self):
''' Return the volumeProperty so other tests can use it.'''
return self.volumeProperty
def GetVolumeMapper(self):
''' Return the volumeMapper so other tests can use it.'''
return self.volumeMapper
class TestFixedPointRayCasterNearest(vtk.test.Testing.vtkTest):
def testFixedPointRayCasterNearest(self):
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
iRen = vtk.vtkRenderWindowInteractor()
tFPRCN = FixedPointRayCasterNearest(ren, renWin, iRen)
# render and interact with data
renWin.Render()
img_file = "TestFixedPointRayCasterNearest.png"
vtk.test.Testing.compareImage(iRen.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=10)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestFixedPointRayCasterNearest, 'test')])
| bsd-3-clause |
quesnel/pyvle | tests/pyvle_conditions.py | 1 | 7254 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest as ut
import pyvle
import sys
class TestConditions(ut.TestCase):
def setUp(self):
self.exp = pyvle.Vle("test_error.vpz", "test_port")
def testCreateConditionStillExists(self):
name = "cond"
self.assertEqual(len(self.exp.listConditions()),2)#cond and simulation_engine
self.exp.createCondition(name)
self.assertEqual(len(self.exp.listConditions()),2)#no cond added
def testCreateCondition(self):
name = "condCreate"
self.exp.createCondition(name)
conds = self.exp.listConditions()
self.assertEqual(True, (name in conds))
def testCreateConditionWithPort(self):
name = "CONFIG"
port = {'test': 3}
self.exp.createCondition(name, **port)
self.assertEqual(port['test'],
self.exp.getConditionPortValue(name, 'test', 0))
def testBooleanCondition(self):
x = self.exp.getConditionPortValue("cond", "cBOOL", 0)
self.assertTrue(isinstance(x, bool))
self.assertEqual(x, True)
self.exp.setConditionPortValue("cond", "cBOOL", False, 0)
x = self.exp.getConditionPortValue("cond", "cBOOL", 0)
self.assertTrue(isinstance(x, bool))
self.assertEqual(x, False)
def testIntegerCondition(self):
x = self.exp.getConditionPortValue("cond", "cINTEGER", 0)
self.assertTrue(isinstance(x, int))
self.assertEqual(x, 3)
self.exp.setConditionPortValue("cond", "cINTEGER", 2, 0)
x = self.exp.getConditionPortValue("cond", "cINTEGER", 0)
self.assertTrue(isinstance(x, int))
self.assertEqual(x, 2)
def testDoubleCondition(self):
x = self.exp.getConditionPortValue("cond", "cDOUBLE", 0)
self.assertTrue(isinstance(x, float))
self.assertEqual(x, 4.8)
self.exp.setConditionPortValue("cond", "cDOUBLE", 8.5, 0)
x = self.exp.getConditionPortValue("cond", "cDOUBLE", 0)
self.assertTrue(isinstance(x, float))
self.assertEqual(x, 8.5)
def testStringCondition(self):
x = self.exp.getConditionPortValue("cond", "cSTRING", 0)
self.assertTrue(isinstance(x, str))
self.assertEqual(x, 'hello')
self.exp.setConditionPortValue("cond", "cSTRING", 'hello2', 0)
x = self.exp.getConditionPortValue("cond", "cSTRING", 0)
self.assertTrue(isinstance(x, str))
self.assertEqual(x, 'hello2')
def testXMLCondition(self):
x = self.exp.getConditionPortValue("cond", "cXML", 0)
self.assertTrue(isinstance(x, pyvle.VleXML))
self.assertEqual(x.val, '<mytag>hello</mytag>')
self.exp.setConditionPortValue("cond", "cXML", pyvle.VleXML('<mytag>hello2</mytag>'), 0)
x = self.exp.getConditionPortValue("cond", "cXML", 0)
self.assertTrue(isinstance(x,pyvle.VleXML))
self.assertEqual(x.val, '<mytag>hello2</mytag>')
self.exp.clearConditionPort("cond", "cXML")
self.exp.addXMLCondition("cond","cXML", pyvle.VleXML('<mytag>hello3</mytag>'))
x = self.exp.getConditionPortValue("cond", "cXML", 0)
self.assertTrue(isinstance(x,pyvle.VleXML))
self.assertEqual(x.val, '<mytag>hello3</mytag>')
def testSetCondition(self):
x = self.exp.getConditionPortValue("cond", "cSET", 0)
self.assertTrue(isinstance(x, list))
self.assertEqual(len(x), 2)
self.assertTrue(isinstance(x[1], pyvle.VleTuple))
self.assertTrue(isinstance(x[1].val[0], float))
self.assertEqual(x[1].val[0],1.9)
x[1].val[0] = 2.8
self.exp.setConditionPortValue("cond", "cSET",x, 0)
x = self.exp.getConditionPortValue("cond", "cSET", 0)
self.assertAlmostEqual(x[1].val[0],2.8)
def testMapCondition(self):
x = self.exp.getConditionPortValue("cond", "cMAP", 0)
self.assertTrue(isinstance(x, dict))
self.assertEqual(len(x), 2)
self.assertAlmostEqual(x['key1'], 2.7)
x['key2'] = 1.3
self.exp.setConditionPortValue("cond", "cMAP",x, 0)
x = self.exp.getConditionPortValue("cond", "cMAP", 0)
self.assertAlmostEqual(x['key2'], 1.3)
def testTupleCondition(self):
x = self.exp.getConditionPortValue("cond", "cTUPLE", 0)
self.assertTrue(isinstance(x, pyvle.VleTuple))
self.assertEqual(len(x.val), 3)
self.assertAlmostEqual(x.val[1], 0)
x.val[1] = 1.3
self.exp.setConditionPortValue("cond", "cTUPLE",x, 0)
x = self.exp.getConditionPortValue("cond", "cTUPLE", 0)
self.assertAlmostEqual(x.val[1], 1.3)
def testTableCondition(self):
x = self.exp.getConditionPortValue("cond", "cTABLE", 0)
self.assertTrue(isinstance(x, pyvle.VleTable))
self.assertEqual(len(x.val), 3)
self.assertTrue(isinstance(x.val[1], list))
self.assertEqual(len(x.val[1]), 2)
self.assertTrue(isinstance(x.val[1][1], float))
self.assertAlmostEqual(x.val[1][1], 2)
x.val[1][1] = 1.3
self.exp.setConditionPortValue("cond", "cTABLE",x, 0)
x = self.exp.getConditionPortValue("cond", "cTABLE", 0)
self.assertAlmostEqual(x.val[1][1], 1.3)
def testMatrixCondition(self):
typeMatrix = self.exp.getConditionValueType("cond", "cMATRIX", 0)
self.assertEqual(typeMatrix, "matrix")
x = self.exp.getConditionPortValue("cond", "cMATRIX", 0)
self.assertTrue(isinstance(x, pyvle.VleMatrix))
self.assertEqual(len(x.val), 2)
self.assertTrue(isinstance(x.val[1], list))
self.assertEqual(len(x.val[1]), 4)
self.assertTrue(isinstance(x.val[1][3], list))
self.assertEqual(len(x.val[1][3]), 2)
self.assertAlmostEqual(x.val[1][3][0], 0.8)
x.val[1][3][0] = 1.3
self.exp.setConditionPortValue("cond", "cMATRIX",x, 0)
x = self.exp.getConditionPortValue("cond", "cMATRIX", 0)
self.assertAlmostEqual(x.val[1][3][0], 1.3)
def testChekOnAddFunctions(self):
x = self.exp.delConditionValue("cond", "cVALUE", 0)
with self.assertRaises(ValueError) as cm:
self.exp.addTupleCondition("cond", "cVALUE", True)
the_exception = cm.exception
self.assertEqual(the_exception.message,
u"Can't convert type <type 'bool'> to VleTuple")
class TestConditions2(ut.TestCase):
def setUp(self):
self.exp = pyvle.Vle("dummy.vpz", "test_port")
def testCreateConditionStillExists(self):
name = "test"
self.assertEqual(len(self.exp.listConditions()),2)#test and simulation_engine
self.exp.createCondition(name)
self.assertEqual(len(self.exp.listConditions()),2)#no cond added
def testCreateCondition(self):
name = "CONFIG"
self.exp.createCondition(name)
conds = self.exp.listConditions()
self.assertEqual(True, (name in conds))
def testCreateConditionWithPort(self):
name = "CONFIG"
port = {'test': 3}
self.exp.createCondition(name, **port)
self.assertEqual(port['test'],
self.exp.getConditionPortValue(name, 'test', 0))
if __name__ == '__main__':
ut.main()
| gpl-3.0 |
stbarnabas/mezzanine | mezzanine/generic/migrations/0006_move_keywords.py | 12 | 10241 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.db.models.fields.related import ReverseSingleRelatedObjectDescriptor
from django.contrib.sites.models import Site
class Migration(DataMigration):
def forwards(self, orm):
"""
This migration exists because prior to giving Keyword a site field, keywords were
shared between different sites. In order to add the site field, the "current site"
was assigned as a default. This migration attempts to determine the correct site
for the keyword by finding what objects the keyword was assigned to, and if
that object contains a reference to the site table, it uses that site.
If there is no reference, however, the default previously assigned is used.
Part of what this does is create new Keywords for cases where a keyword object
is being shared between different sites. A distinct keyword is used in each case.
"""
AssignedKeyword = orm['generic.assignedkeyword']
Keyword = orm['generic.keyword']
keywords = Keyword._base_manager.all()
for keyword in keywords:
#assignments = AssignedKeyword._base_manager.filter(keyword=keyword)
assignments = keyword.assignments.all()
site_dict = {}
uses_original = False
for assignment in assignments:
try:
ct = ContentType._base_manager.get(pk=assignment.content_type.pk)
related = ct.model_class()._base_manager.get(pk=assignment.object_pk)
site = None
for attr_name in dir(related.__class__):
attr = getattr(related.__class__, attr_name)
if isinstance(attr, ReverseSingleRelatedObjectDescriptor) and \
attr.field.rel.to is Site:
site = getattr(related, attr_name)
if site:
break
if site:
dict_keyword = site_dict.get(site.pk, None)
if not dict_keyword:
orm_site = orm['sites.site'].objects.get(pk=site.pk)
dict_keyword = Keyword(site=orm_site, slug=keyword.slug, title=keyword.title)
dict_keyword.save()
site_dict[site.pk] = dict_keyword
assignment.keyword = dict_keyword
assignment.save()
else:
uses_original = True
except Exception, e:
uses_original = True
if not uses_original:
keyword.delete()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'comments.comment': {
'Meta': {'ordering': "('submit_date',)", 'object_name': 'Comment', 'db_table': "'django_comments'"},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '3000'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'content_type_set_for_comment'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_removed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_pk': ('django.db.models.fields.TextField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'submit_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comment_comments'", 'null': 'True', 'to': "orm['auth.User']"}),
'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'user_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'user_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'generic.assignedkeyword': {
'Meta': {'object_name': 'AssignedKeyword'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': "orm['generic.Keyword']"}),
'object_pk': ('django.db.models.fields.IntegerField', [], {})
},
'generic.keyword': {
'Meta': {'object_name': 'Keyword'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'generic.rating': {
'Meta': {'object_name': 'Rating'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.IntegerField', [], {}),
'value': ('django.db.models.fields.IntegerField', [], {})
},
'generic.threadedcomment': {
'Meta': {'ordering': "('submit_date',)", 'object_name': 'ThreadedComment', '_ormbases': ['comments.Comment']},
'by_author': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['comments.Comment']", 'unique': 'True', 'primary_key': 'True'}),
'email_hash': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'replied_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'null': 'True', 'to': "orm['generic.ThreadedComment']"})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['generic']
| bsd-2-clause |
yawnosnorous/python-for-android | python3-alpha/python3-src/Tools/msi/msilib.py | 45 | 23962 | # Microsoft Installer Library
# (C) 2003 Martin v. Loewis
import win32com.client.gencache
import win32com.client
import pythoncom, pywintypes
from win32com.client import constants
import re, string, os, sets, glob, subprocess, sys, _winreg, struct, _msi
try:
basestring
except NameError:
basestring = (str, unicode)
# Partially taken from Wine
datasizemask= 0x00ff
type_valid= 0x0100
type_localizable= 0x0200
typemask= 0x0c00
type_long= 0x0000
type_short= 0x0400
type_string= 0x0c00
type_binary= 0x0800
type_nullable= 0x1000
type_key= 0x2000
# XXX temporary, localizable?
knownbits = datasizemask | type_valid | type_localizable | \
typemask | type_nullable | type_key
# Summary Info Property IDs
PID_CODEPAGE=1
PID_TITLE=2
PID_SUBJECT=3
PID_AUTHOR=4
PID_KEYWORDS=5
PID_COMMENTS=6
PID_TEMPLATE=7
PID_LASTAUTHOR=8
PID_REVNUMBER=9
PID_LASTPRINTED=11
PID_CREATE_DTM=12
PID_LASTSAVE_DTM=13
PID_PAGECOUNT=14
PID_WORDCOUNT=15
PID_CHARCOUNT=16
PID_APPNAME=18
PID_SECURITY=19
def reset():
global _directories
_directories = sets.Set()
def EnsureMSI():
win32com.client.gencache.EnsureModule('{000C1092-0000-0000-C000-000000000046}', 1033, 1, 0)
def EnsureMSM():
try:
win32com.client.gencache.EnsureModule('{0ADDA82F-2C26-11D2-AD65-00A0C9AF11A6}', 0, 1, 0)
except pywintypes.com_error:
win32com.client.gencache.EnsureModule('{0ADDA82F-2C26-11D2-AD65-00A0C9AF11A6}', 0, 2, 0)
_Installer=None
def MakeInstaller():
global _Installer
if _Installer is None:
EnsureMSI()
_Installer = win32com.client.Dispatch('WindowsInstaller.Installer',
resultCLSID='{000C1090-0000-0000-C000-000000000046}')
return _Installer
_Merge=None
def MakeMerge2():
global _Merge
if _Merge is None:
EnsureMSM()
_Merge = win32com.client.Dispatch("Msm.Merge2.1")
return _Merge
class Table:
def __init__(self, name):
self.name = name
self.fields = []
def add_field(self, index, name, type):
self.fields.append((index,name,type))
def sql(self):
fields = []
keys = []
self.fields.sort()
fields = [None]*len(self.fields)
for index, name, type in self.fields:
index -= 1
unk = type & ~knownbits
if unk:
print "%s.%s unknown bits %x" % (self.name, name, unk)
size = type & datasizemask
dtype = type & typemask
if dtype == type_string:
if size:
tname="CHAR(%d)" % size
else:
tname="CHAR"
elif dtype == type_short:
assert size==2
tname = "SHORT"
elif dtype == type_long:
assert size==4
tname="LONG"
elif dtype == type_binary:
assert size==0
tname="OBJECT"
else:
tname="unknown"
print "%s.%sunknown integer type %d" % (self.name, name, size)
if type & type_nullable:
flags = ""
else:
flags = " NOT NULL"
if type & type_localizable:
flags += " LOCALIZABLE"
fields[index] = "`%s` %s%s" % (name, tname, flags)
if type & type_key:
keys.append("`%s`" % name)
fields = ", ".join(fields)
keys = ", ".join(keys)
return "CREATE TABLE %s (%s PRIMARY KEY %s)" % (self.name, fields, keys)
def create(self, db):
v = db.OpenView(self.sql())
v.Execute(None)
v.Close()
class Binary:
def __init__(self, fname):
self.name = fname
def __repr__(self):
return 'msilib.Binary(os.path.join(dirname,"%s"))' % self.name
def gen_schema(destpath, schemapath):
d = MakeInstaller()
schema = d.OpenDatabase(schemapath,
win32com.client.constants.msiOpenDatabaseModeReadOnly)
# XXX ORBER BY
v=schema.OpenView("SELECT * FROM _Columns")
curtable=None
tables = []
v.Execute(None)
f = open(destpath, "wt")
f.write("from msilib import Table\n")
while 1:
r=v.Fetch()
if not r:break
name=r.StringData(1)
if curtable != name:
f.write("\n%s = Table('%s')\n" % (name,name))
curtable = name
tables.append(name)
f.write("%s.add_field(%d,'%s',%d)\n" %
(name, r.IntegerData(2), r.StringData(3), r.IntegerData(4)))
v.Close()
f.write("\ntables=[%s]\n\n" % (", ".join(tables)))
# Fill the _Validation table
f.write("_Validation_records = [\n")
v = schema.OpenView("SELECT * FROM _Validation")
v.Execute(None)
while 1:
r = v.Fetch()
if not r:break
# Table, Column, Nullable
f.write("(%s,%s,%s," %
(`r.StringData(1)`, `r.StringData(2)`, `r.StringData(3)`))
def put_int(i):
if r.IsNull(i):f.write("None, ")
else:f.write("%d," % r.IntegerData(i))
def put_str(i):
if r.IsNull(i):f.write("None, ")
else:f.write("%s," % `r.StringData(i)`)
put_int(4) # MinValue
put_int(5) # MaxValue
put_str(6) # KeyTable
put_int(7) # KeyColumn
put_str(8) # Category
put_str(9) # Set
put_str(10)# Description
f.write("),\n")
f.write("]\n\n")
f.close()
def gen_sequence(destpath, msipath):
dir = os.path.dirname(destpath)
d = MakeInstaller()
seqmsi = d.OpenDatabase(msipath,
win32com.client.constants.msiOpenDatabaseModeReadOnly)
v = seqmsi.OpenView("SELECT * FROM _Tables");
v.Execute(None)
f = open(destpath, "w")
print >>f, "import msilib,os;dirname=os.path.dirname(__file__)"
tables = []
while 1:
r = v.Fetch()
if not r:break
table = r.StringData(1)
tables.append(table)
f.write("%s = [\n" % table)
v1 = seqmsi.OpenView("SELECT * FROM `%s`" % table)
v1.Execute(None)
info = v1.ColumnInfo(constants.msiColumnInfoTypes)
while 1:
r = v1.Fetch()
if not r:break
rec = []
for i in range(1,r.FieldCount+1):
if r.IsNull(i):
rec.append(None)
elif info.StringData(i)[0] in "iI":
rec.append(r.IntegerData(i))
elif info.StringData(i)[0] in "slSL":
rec.append(r.StringData(i))
elif info.StringData(i)[0]=="v":
size = r.DataSize(i)
bytes = r.ReadStream(i, size, constants.msiReadStreamBytes)
bytes = bytes.encode("latin-1") # binary data represented "as-is"
if table == "Binary":
fname = rec[0]+".bin"
open(os.path.join(dir,fname),"wb").write(bytes)
rec.append(Binary(fname))
else:
rec.append(bytes)
else:
raise "Unsupported column type", info.StringData(i)
f.write(repr(tuple(rec))+",\n")
v1.Close()
f.write("]\n\n")
v.Close()
f.write("tables=%s\n" % repr(map(str,tables)))
f.close()
class _Unspecified:pass
def change_sequence(seq, action, seqno=_Unspecified, cond = _Unspecified):
"Change the sequence number of an action in a sequence list"
for i in range(len(seq)):
if seq[i][0] == action:
if cond is _Unspecified:
cond = seq[i][1]
if seqno is _Unspecified:
seqno = seq[i][2]
seq[i] = (action, cond, seqno)
return
raise ValueError, "Action not found in sequence"
def add_data(db, table, values):
d = MakeInstaller()
v = db.OpenView("SELECT * FROM `%s`" % table)
count = v.ColumnInfo(0).FieldCount
r = d.CreateRecord(count)
for value in values:
assert len(value) == count, value
for i in range(count):
field = value[i]
if isinstance(field, (int, long)):
r.SetIntegerData(i+1,field)
elif isinstance(field, basestring):
r.SetStringData(i+1,field)
elif field is None:
pass
elif isinstance(field, Binary):
r.SetStream(i+1, field.name)
else:
raise TypeError, "Unsupported type %s" % field.__class__.__name__
v.Modify(win32com.client.constants.msiViewModifyInsert, r)
r.ClearData()
v.Close()
def add_stream(db, name, path):
d = MakeInstaller()
v = db.OpenView("INSERT INTO _Streams (Name, Data) VALUES ('%s', ?)" % name)
r = d.CreateRecord(1)
r.SetStream(1, path)
v.Execute(r)
v.Close()
def init_database(name, schema,
ProductName, ProductCode, ProductVersion,
Manufacturer,
request_uac = False):
try:
os.unlink(name)
except OSError:
pass
ProductCode = ProductCode.upper()
d = MakeInstaller()
# Create the database
db = d.OpenDatabase(name,
win32com.client.constants.msiOpenDatabaseModeCreate)
# Create the tables
for t in schema.tables:
t.create(db)
# Fill the validation table
add_data(db, "_Validation", schema._Validation_records)
# Initialize the summary information, allowing atmost 20 properties
si = db.GetSummaryInformation(20)
si.SetProperty(PID_TITLE, "Installation Database")
si.SetProperty(PID_SUBJECT, ProductName)
si.SetProperty(PID_AUTHOR, Manufacturer)
si.SetProperty(PID_TEMPLATE, msi_type)
si.SetProperty(PID_REVNUMBER, gen_uuid())
if request_uac:
wc = 2 # long file names, compressed, original media
else:
wc = 2 | 8 # +never invoke UAC
si.SetProperty(PID_WORDCOUNT, wc)
si.SetProperty(PID_PAGECOUNT, 200)
si.SetProperty(PID_APPNAME, "Python MSI Library")
# XXX more properties
si.Persist()
add_data(db, "Property", [
("ProductName", ProductName),
("ProductCode", ProductCode),
("ProductVersion", ProductVersion),
("Manufacturer", Manufacturer),
("ProductLanguage", "1033")])
db.Commit()
return db
def add_tables(db, module):
for table in module.tables:
add_data(db, table, getattr(module, table))
def make_id(str):
#str = str.replace(".", "_") # colons are allowed
str = str.replace(" ", "_")
str = str.replace("-", "_")
str = str.replace("+", "_")
if str[0] in string.digits:
str = "_"+str
assert re.match("^[A-Za-z_][A-Za-z0-9_.]*$", str), "FILE"+str
return str
def gen_uuid():
return str(pythoncom.CreateGuid())
class CAB:
def __init__(self, name):
self.name = name
self.files = []
self.filenames = sets.Set()
self.index = 0
def gen_id(self, dir, file):
logical = _logical = make_id(file)
pos = 1
while logical in self.filenames:
logical = "%s.%d" % (_logical, pos)
pos += 1
self.filenames.add(logical)
return logical
def append(self, full, file, logical = None):
if os.path.isdir(full):
return
if not logical:
logical = self.gen_id(dir, file)
self.index += 1
self.files.append((full, logical))
return self.index, logical
def commit(self, db):
try:
os.unlink(self.name+".cab")
except OSError:
pass
_msi.FCICreate(self.name+".cab", self.files)
add_data(db, "Media",
[(1, self.index, None, "#"+self.name, None, None)])
add_stream(db, self.name, self.name+".cab")
os.unlink(self.name+".cab")
db.Commit()
_directories = sets.Set()
class Directory:
def __init__(self, db, cab, basedir, physical, _logical, default, componentflags=None):
"""Create a new directory in the Directory table. There is a current component
at each point in time for the directory, which is either explicitly created
through start_component, or implicitly when files are added for the first
time. Files are added into the current component, and into the cab file.
To create a directory, a base directory object needs to be specified (can be
None), the path to the physical directory, and a logical directory name.
Default specifies the DefaultDir slot in the directory table. componentflags
specifies the default flags that new components get."""
index = 1
_logical = make_id(_logical)
logical = _logical
while logical in _directories:
logical = "%s%d" % (_logical, index)
index += 1
_directories.add(logical)
self.db = db
self.cab = cab
self.basedir = basedir
self.physical = physical
self.logical = logical
self.component = None
self.short_names = sets.Set()
self.ids = sets.Set()
self.keyfiles = {}
self.componentflags = componentflags
if basedir:
self.absolute = os.path.join(basedir.absolute, physical)
blogical = basedir.logical
else:
self.absolute = physical
blogical = None
# initially assume that all files in this directory are unpackaged
# as files from self.absolute get added, this set is reduced
self.unpackaged_files = set()
for f in os.listdir(self.absolute):
if os.path.isfile(os.path.join(self.absolute, f)):
self.unpackaged_files.add(f)
add_data(db, "Directory", [(logical, blogical, default)])
def start_component(self, component = None, feature = None, flags = None, keyfile = None, uuid=None):
"""Add an entry to the Component table, and make this component the current for this
directory. If no component name is given, the directory name is used. If no feature
is given, the current feature is used. If no flags are given, the directory's default
flags are used. If no keyfile is given, the KeyPath is left null in the Component
table."""
if flags is None:
flags = self.componentflags
if uuid is None:
uuid = gen_uuid()
else:
uuid = uuid.upper()
if component is None:
component = self.logical
self.component = component
if Win64:
flags |= 256
if keyfile:
keyid = self.cab.gen_id(self.absolute, keyfile)
self.keyfiles[keyfile] = keyid
else:
keyid = None
add_data(self.db, "Component",
[(component, uuid, self.logical, flags, None, keyid)])
if feature is None:
feature = current_feature
add_data(self.db, "FeatureComponents",
[(feature.id, component)])
def make_short(self, file):
file = re.sub(r'[\?|><:/*"+,;=\[\]]', '_', file) # restrictions on short names
parts = file.split(".")
if len(parts)>1:
suffix = parts[-1].upper()
else:
suffix = None
prefix = parts[0].upper()
if len(prefix) <= 8 and (not suffix or len(suffix)<=3):
if suffix:
file = prefix+"."+suffix
else:
file = prefix
assert file not in self.short_names
else:
prefix = prefix[:6]
if suffix:
suffix = suffix[:3]
pos = 1
while 1:
if suffix:
file = "%s~%d.%s" % (prefix, pos, suffix)
else:
file = "%s~%d" % (prefix, pos)
if file not in self.short_names: break
pos += 1
assert pos < 10000
if pos in (10, 100, 1000):
prefix = prefix[:-1]
self.short_names.add(file)
return file
def add_file(self, file, src=None, version=None, language=None):
"""Add a file to the current component of the directory, starting a new one
one if there is no current component. By default, the file name in the source
and the file table will be identical. If the src file is specified, it is
interpreted relative to the current directory. Optionally, a version and a
language can be specified for the entry in the File table."""
if not self.component:
self.start_component(self.logical, current_feature)
if not src:
# Allow relative paths for file if src is not specified
src = file
file = os.path.basename(file)
absolute = os.path.join(self.absolute, src)
if absolute.startswith(self.absolute):
# mark file as packaged
relative = absolute[len(self.absolute)+1:]
if relative in self.unpackaged_files:
self.unpackaged_files.remove(relative)
assert not re.search(r'[\?|><:/*]"', file) # restrictions on long names
if self.keyfiles.has_key(file):
logical = self.keyfiles[file]
else:
logical = None
sequence, logical = self.cab.append(absolute, file, logical)
assert logical not in self.ids
self.ids.add(logical)
short = self.make_short(file)
full = "%s|%s" % (short, file)
filesize = os.stat(absolute).st_size
# constants.msidbFileAttributesVital
# Compressed omitted, since it is the database default
# could add r/o, system, hidden
attributes = 512
add_data(self.db, "File",
[(logical, self.component, full, filesize, version,
language, attributes, sequence)])
if not version:
# Add hash if the file is not versioned
filehash = MakeInstaller().FileHash(absolute, 0)
add_data(self.db, "MsiFileHash",
[(logical, 0, filehash.IntegerData(1),
filehash.IntegerData(2), filehash.IntegerData(3),
filehash.IntegerData(4))])
# Automatically remove .pyc/.pyo files on uninstall (2)
# XXX: adding so many RemoveFile entries makes installer unbelievably
# slow. So instead, we have to use wildcard remove entries
# if file.endswith(".py"):
# add_data(self.db, "RemoveFile",
# [(logical+"c", self.component, "%sC|%sc" % (short, file),
# self.logical, 2),
# (logical+"o", self.component, "%sO|%so" % (short, file),
# self.logical, 2)])
def glob(self, pattern, exclude = None):
"""Add a list of files to the current component as specified in the
glob pattern. Individual files can be excluded in the exclude list."""
files = glob.glob1(self.absolute, pattern)
for f in files:
if exclude and f in exclude: continue
self.add_file(f)
return files
def remove_pyc(self):
"Remove .pyc/.pyo files from __pycache__ on uninstall"
directory = self.logical + "_pycache"
add_data(self.db, "Directory", [(directory, self.logical, "__PYCA~1|__pycache__")])
flags = 256 if Win64 else 0
add_data(self.db, "Component",
[(directory, gen_uuid(), directory, flags, None, None)])
add_data(self.db, "FeatureComponents", [(current_feature.id, directory)])
add_data(self.db, "CreateFolder", [(directory, directory)])
add_data(self.db, "RemoveFile",
[(self.component, self.component, "*.*", directory, 2),
])
def removefile(self, key, pattern):
"Add a RemoveFile entry"
add_data(self.db, "RemoveFile", [(self.component+key, self.component, pattern, self.logical, 2)])
class Feature:
def __init__(self, db, id, title, desc, display, level = 1,
parent=None, directory = None, attributes=0):
self.id = id
if parent:
parent = parent.id
add_data(db, "Feature",
[(id, parent, title, desc, display,
level, directory, attributes)])
def set_current(self):
global current_feature
current_feature = self
class Control:
def __init__(self, dlg, name):
self.dlg = dlg
self.name = name
def event(self, ev, arg, cond = "1", order = None):
add_data(self.dlg.db, "ControlEvent",
[(self.dlg.name, self.name, ev, arg, cond, order)])
def mapping(self, ev, attr):
add_data(self.dlg.db, "EventMapping",
[(self.dlg.name, self.name, ev, attr)])
def condition(self, action, condition):
add_data(self.dlg.db, "ControlCondition",
[(self.dlg.name, self.name, action, condition)])
class RadioButtonGroup(Control):
def __init__(self, dlg, name, property):
self.dlg = dlg
self.name = name
self.property = property
self.index = 1
def add(self, name, x, y, w, h, text, value = None):
if value is None:
value = name
add_data(self.dlg.db, "RadioButton",
[(self.property, self.index, value,
x, y, w, h, text, None)])
self.index += 1
class Dialog:
def __init__(self, db, name, x, y, w, h, attr, title, first, default, cancel):
self.db = db
self.name = name
self.x, self.y, self.w, self.h = x,y,w,h
add_data(db, "Dialog", [(name, x,y,w,h,attr,title,first,default,cancel)])
def control(self, name, type, x, y, w, h, attr, prop, text, next, help):
add_data(self.db, "Control",
[(self.name, name, type, x, y, w, h, attr, prop, text, next, help)])
return Control(self, name)
def text(self, name, x, y, w, h, attr, text):
return self.control(name, "Text", x, y, w, h, attr, None,
text, None, None)
def bitmap(self, name, x, y, w, h, text):
return self.control(name, "Bitmap", x, y, w, h, 1, None, text, None, None)
def line(self, name, x, y, w, h):
return self.control(name, "Line", x, y, w, h, 1, None, None, None, None)
def pushbutton(self, name, x, y, w, h, attr, text, next):
return self.control(name, "PushButton", x, y, w, h, attr, None, text, next, None)
def radiogroup(self, name, x, y, w, h, attr, prop, text, next):
add_data(self.db, "Control",
[(self.name, name, "RadioButtonGroup",
x, y, w, h, attr, prop, text, next, None)])
return RadioButtonGroup(self, name, prop)
def checkbox(self, name, x, y, w, h, attr, prop, text, next):
return self.control(name, "CheckBox", x, y, w, h, attr, prop, text, next, None)
def pe_type(path):
header = open(path, "rb").read(1000)
# offset of PE header is at offset 0x3c
pe_offset = struct.unpack("<i", header[0x3c:0x40])[0]
assert header[pe_offset:pe_offset+4] == "PE\0\0"
machine = struct.unpack("<H", header[pe_offset+4:pe_offset+6])[0]
return machine
def set_arch_from_file(path):
global msi_type, Win64, arch_ext
machine = pe_type(path)
if machine == 0x14c:
# i386
msi_type = "Intel"
Win64 = 0
arch_ext = ''
elif machine == 0x200:
# Itanium
msi_type = "Intel64"
Win64 = 1
arch_ext = '.ia64'
elif machine == 0x8664:
# AMD64
msi_type = "x64"
Win64 = 1
arch_ext = '.amd64'
else:
raise ValueError, "Unsupported architecture"
msi_type += ";1033"
| apache-2.0 |
rangadi/beam | sdks/python/apache_beam/testing/benchmarks/nexmark/queries/query1.py | 7 | 1923 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Nexmark Query 1: Convert bid prices from dollars to euros.
The Nexmark suite is a series of queries (streaming pipelines) performed
on a simulation of auction events.
This query converts bid prices from dollars to euros.
It illustrates a simple map.
"""
from __future__ import absolute_import
import apache_beam as beam
from apache_beam.testing.benchmarks.nexmark.models import nexmark_model
from apache_beam.testing.benchmarks.nexmark.nexmark_util import ParseEventFn
from apache_beam.testing.benchmarks.nexmark.nexmark_util import display
def load(raw_events, query_args=None):
return (raw_events
| 'ParseEventFn' >> beam.ParDo(ParseEventFn())
| 'FilterInBids' >> beam.Filter(
lambda event: isinstance(event, nexmark_model.Bid))
| 'ConvertToEuro' >> beam.Map(
lambda bid: nexmark_model.Bid(
bid.auction,
bid.bidder,
(float(bid.price) * 89) // 100,
bid.timestamp,
bid.extra))
| 'DisplayQuery1' >> beam.Map(display)
) # pylint: disable=expression-not-assigned
| apache-2.0 |
Kazade/NeHe-Website | django/conf/locale/cs/formats.py | 232 | 1288 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G:i:s'
DATETIME_FORMAT = 'j. E Y G:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' '
NUMBER_GROUPING = 3
| bsd-3-clause |
loggly/skyline | src/analyzer/algorithms.py | 1 | 9913 |
import pandas
import numpy as np
import scipy
import statsmodels.api as sm
import traceback
import logging
from time import time
from msgpack import unpackb, packb
from redis import StrictRedis
from settings import (
ALGORITHMS,
CONSENSUS,
FULL_DURATION,
MAX_TOLERABLE_BOREDOM,
MIN_TOLERABLE_LENGTH,
STALE_PERIOD,
REDIS_SOCKET_PATH,
ENABLE_SECOND_ORDER,
BOREDOM_SET_SIZE,
)
from algorithm_exceptions import *
logger = logging.getLogger("AnalyzerLog")
redis_conn = StrictRedis(unix_socket_path=REDIS_SOCKET_PATH)
"""
This is no man's land. Do anything you want in here,
as long as you return a boolean that determines whether the input
timeseries is anomalous or not.
To add an algorithm, define it here, and add its name to settings.ALGORITHMS.
"""
def tail_avg(timeseries):
"""
This is a utility function used to calculate the average of the last three
datapoints in the series as a measure, instead of just the last datapoint.
It reduces noise, but it also reduces sensitivity and increases the delay
to detection.
"""
try:
t = (timeseries[-1][1] + timeseries[-2][1] + timeseries[-3][1]) / 3
return t
except IndexError:
return timeseries[-1][1]
def median_absolute_deviation(timeseries):
"""
A timeseries is anomalous if the deviation of its latest datapoint with
respect to the median is X times larger than the median of deviations.
"""
series = pandas.Series([x[1] for x in timeseries])
median = series.median()
demedianed = np.abs(series - median)
median_deviation = demedianed.median()
# The test statistic is infinite when the median is zero,
# so it becomes super sensitive. We play it safe and skip when this happens.
if median_deviation == 0:
return False
test_statistic = demedianed.iloc[-1] / median_deviation
# Completely arbitary...triggers if the median deviation is
# 6 times bigger than the median
if test_statistic > 6:
return True
return False
#def grubbs(timeseries):
# """
# A timeseries is anomalous if the Z score is greater than the Grubb's score.
# """
# series = scipy.array([x[1] for x in timeseries])
# stdDev = scipy.std(series)
# mean = np.mean(series)
# tail_average = tail_avg(timeseries)
# z_score = (tail_average - mean) / stdDev
# len_series = len(series)
# threshold = scipy.stats.t.isf(.05 / (2 * len_series), len_series - 2)
# threshold_squared = threshold * threshold
# grubbs_score = ((len_series - 1) / np.sqrt(len_series)) * np.sqrt(threshold_squared / (len_series - 2 + threshold_squared))
# return z_score > grubbs_score
def first_hour_average(timeseries):
"""
Calcuate the simple average over one hour, FULL_DURATION seconds ago.
A timeseries is anomalous if the average of the last three datapoints
are outside of three standard deviations of this value.
"""
last_hour_threshold = time() - (FULL_DURATION - 3600)
series = pandas.Series([x[1] for x in timeseries if x[0] < last_hour_threshold])
mean = (series).mean()
stdDev = (series).std()
t = tail_avg(timeseries)
return abs(t - mean) > 3 * stdDev
def stddev_from_average(timeseries):
"""
A timeseries is anomalous if the absolute value of the average of the latest
three datapoint minus the moving average is greater than three standard
deviations of the average. This does not exponentially weight the MA and so
is better for detecting anomalies with respect to the entire series.
"""
series = pandas.Series([x[1] for x in timeseries])
mean = series.mean()
stdDev = series.std()
t = tail_avg(timeseries)
return abs(t - mean) > 3 * stdDev
def stddev_from_moving_average(timeseries):
"""
A timeseries is anomalous if the absolute value of the average of the latest
three datapoint minus the moving average is greater than three standard
deviations of the moving average. This is better for finding anomalies with
respect to the short term trends.
"""
series = pandas.Series([x[1] for x in timeseries])
expAverage = pandas.stats.moments.ewma(series, com=50)
stdDev = pandas.stats.moments.ewmstd(series, com=50)
return abs(series.iloc[-1] - expAverage.iloc[-1]) > 3 * stdDev.iloc[-1]
def mean_subtraction_cumulation(timeseries):
"""
A timeseries is anomalous if the value of the next datapoint in the
series is farther than three standard deviations out in cumulative terms
after subtracting the mean from each data point.
"""
series = pandas.Series([x[1] if x[1] else 0 for x in timeseries])
series = series - series[0:len(series) - 1].mean()
stdDev = series[0:len(series) - 1].std()
expAverage = pandas.stats.moments.ewma(series, com=15)
return abs(series.iloc[-1]) > 3 * stdDev
def least_squares(timeseries):
"""
A timeseries is anomalous if the average of the last three datapoints
on a projected least squares model is greater than three sigma.
"""
x = np.array([t[0] for t in timeseries])
y = np.array([t[1] for t in timeseries])
A = np.vstack([x, np.ones(len(x))]).T
results = np.linalg.lstsq(A, y)
residual = results[1]
m, c = np.linalg.lstsq(A, y)[0]
errors = []
for i, value in enumerate(y):
projected = m * x[i] + c
error = value - projected
errors.append(error)
if len(errors) < 3:
return False
std_dev = scipy.std(errors)
t = (errors[-1] + errors[-2] + errors[-3]) / 3
return abs(t) > std_dev * 3 and round(std_dev) != 0 and round(t) != 0
def histogram_bins(timeseries):
"""
A timeseries is anomalous if the average of the last three datapoints falls
into a histogram bin with less than 20 other datapoints (you'll need to tweak
that number depending on your data)
Returns: the size of the bin which contains the tail_avg. Smaller bin size
means more anomalous.
"""
series = scipy.array([x[1] for x in timeseries])
t = tail_avg(timeseries)
h = np.histogram(series, bins=15)
bins = h[1]
for index, bin_size in enumerate(h[0]):
if bin_size <= 20:
# Is it in the first bin?
if index == 0:
if t <= bins[0]:
return True
# Is it in the current bin?
elif t >= bins[index] and t < bins[index + 1]:
return True
return False
#def ks_test(timeseries):
# """
# A timeseries is anomalous if 2 sample Kolmogorov-Smirnov test indicates
# that data distribution for last 10 minutes is different from last hour.
# It produces false positives on non-stationary series so Augmented
# Dickey-Fuller test applied to check for stationarity.
# """
#
# hour_ago = time() - 3600
# ten_minutes_ago = time() - 600
# reference = scipy.array([x[1] for x in timeseries if x[0] >= hour_ago and x[0] < ten_minutes_ago])
# probe = scipy.array([x[1] for x in timeseries if x[0] >= ten_minutes_ago])
# if reference.size < 20 or probe.size < 20:
# return False
# ks_d, ks_p_value = scipy.stats.ks_2samp(reference, probe)
# if ks_p_value < 0.05 and ks_d > 0.5:
# adf = sm.tsa.stattools.adfuller(reference, 10)
# if adf[1] < 0.05:
# return True
# return False
#def is_anomalously_anomalous(metric_name, ensemble, datapoint):
# """
# This method runs a meta-analysis on the metric to determine whether the
# metric has a past history of triggering. TODO: weight intervals based on datapoint
# """
# # We want the datapoint to avoid triggering twice on the same data
# new_trigger = [time(), datapoint]
# # Get the old history
# raw_trigger_history = redis_conn.get('trigger_history.' + metric_name)
# if not raw_trigger_history:
# redis_conn.set('trigger_history.' + metric_name, packb([(time(), datapoint)]))
# return True
# trigger_history = unpackb(raw_trigger_history)
# # Are we (probably) triggering on the same data?
# if (new_trigger[1] == trigger_history[-1][1] and
# new_trigger[0] - trigger_history[-1][0] <= 300):
# return False
# # Update the history
# trigger_history.append(new_trigger)
# redis_conn.set('trigger_history.' + metric_name, packb(trigger_history))
# # Should we surface the anomaly?
# trigger_times = [x[0] for x in trigger_history]
# intervals = [
# trigger_times[i + 1] - trigger_times[i]
# for i, v in enumerate(trigger_times)
# if (i + 1) < len(trigger_times)
# ]
# series = pandas.Series(intervals)
# mean = series.mean()
# stdDev = series.std()
# return abs(intervals[-1] - mean) > 3 * stdDev
def run_selected_algorithm(timeseries, metric_name):
"""
Filter timeseries and run selected algorithm.
"""
# Get rid of short series
if len(timeseries) < MIN_TOLERABLE_LENGTH:
raise TooShort()
# Get rid of stale series
if time() - timeseries[-1][0] > STALE_PERIOD:
raise Stale()
# Get rid of boring series
if len(set(item[1] for item in timeseries[-MAX_TOLERABLE_BOREDOM:])) == BOREDOM_SET_SIZE:
raise Boring()
try:
ensemble = [globals()[algorithm](timeseries) for algorithm in ALGORITHMS]
threshold = len(ensemble) - CONSENSUS
if ensemble.count(False) <= threshold:
# if ENABLE_SECOND_ORDER:
# if is_anomalously_anomalous(metric_name, ensemble, timeseries[-1][1]):
# return True, ensemble, timeseries[-1][1], threshold
# else:
return True, ensemble, timeseries[-1][1], threshold
return False, ensemble, timeseries[-1][1], threshold
except:
logging.error("Algorithm error: " + traceback.format_exc())
return False, [], 1
| mit |
mattseymour/django | tests/template_tests/filter_tests/test_dictsortreversed.py | 181 | 1686 | from django.template.defaultfilters import dictsortreversed
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_sort(self):
sorted_dicts = dictsortreversed(
[{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}],
'age',
)
self.assertEqual(
[sorted(dict.items()) for dict in sorted_dicts],
[[('age', 63), ('name', 'Ra Ra Rasputin')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 18), ('name', 'Jonny B Goode')]],
)
def test_sort_list_of_tuples(self):
data = [('a', '42'), ('c', 'string'), ('b', 'foo')]
expected = [('c', 'string'), ('b', 'foo'), ('a', '42')]
self.assertEqual(dictsortreversed(data, 0), expected)
def test_sort_list_of_tuple_like_dicts(self):
data = [
{'0': 'a', '1': '42'},
{'0': 'c', '1': 'string'},
{'0': 'b', '1': 'foo'},
]
expected = [
{'0': 'c', '1': 'string'},
{'0': 'b', '1': 'foo'},
{'0': 'a', '1': '42'},
]
self.assertEqual(dictsortreversed(data, '0'), expected)
def test_invalid_values(self):
"""
If dictsortreversed is passed something other than a list of
dictionaries, fail silently.
"""
self.assertEqual(dictsortreversed([1, 2, 3], 'age'), '')
self.assertEqual(dictsortreversed('Hello!', 'age'), '')
self.assertEqual(dictsortreversed({'a': 1}, 'age'), '')
self.assertEqual(dictsortreversed(1, 'age'), '')
| bsd-3-clause |
lxylinki/medCC | src/main/resources/output/evalresults2014/avgImp/indexasx.py | 1 | 1983 | # generate one file x-axis is problem indexes 1-20
import os
import math
def stdv(implist):
totalnum = len(implist)
avg = 0
sq = 0
for imp in implist:
avg += imp
avg = avg/20
for imp in implist:
sq += (imp-avg)*(imp-avg)
sq = sq/20
res = math.sqrt(sq)
return res
def calcuAvg(Mods, Edges, maxindex):
filedir = './overCost/'
outfilename = 'indexasx.dat'
outfilename = os.path.join('./', outfilename)
outfile = open(outfilename, 'w')
outfile.write('Idx Imp(HBCS) StdDv Imp(ScaleStar) StdDv\n')
for i in range(0, maxindex):
avgoverhbcs = 0
avgoverss = 0
impoverhbcsalllevels = []
impoverssalllevels = []
filename = '{}_{}_AvgOverCost.txt'.format(Mods[i], Edges[i])
filename = os.path.join(filedir, filename)
imps = open(filename, 'r')
for line in imps:
items = line.split()
if (items[0].isdigit()==False):
continue
#budlevel = int(items[0])
impoverhbcs = float(items[1])
impoverss = float(items[2])
avgoverhbcs += impoverhbcs;
impoverhbcsalllevels.append(impoverhbcs)
avgoverss += impoverss;
impoverssalllevels.append(impoverss)
imps.close()
avgoverhbcs = avgoverhbcs/20
avgoverss = avgoverss/20
stdvhbcs = stdv(impoverhbcsalllevels)
stdvss = stdv(impoverssalllevels)
writeline = '%d %.2f %.2f %.2f %.2f\n'%(i, avgoverhbcs, stdvhbcs, avgoverss, stdvss)
outfile.write(writeline)
outfile.close()
if __name__=='__main__':
Mods = [5, 10, 15, 20, 25, 30, 35, 40, 45, 50,
55, 60, 65, 70, 75, 80, 85, 90, 95, 100]
Edges = [6, 15, 60, 80, 200, 300, 500, 500, 580, 500,
800, 900, 950, 950, 1000, 1200, 1200, 1600, 1600, 2000]
# 20 bud levels
calcuAvg(Mods, Edges, 20)
| gpl-3.0 |
jchodera/cclib | test/method/testnuclear.py | 5 | 1500 | # This file is part of cclib (http://cclib.github.io), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2014,2015, the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
"""Test the Nuclear method in cclib"""
from __future__ import print_function
import os
import re
import logging
import unittest
import numpy
from testall import getfile
from cclib.method import Nuclear
from cclib.parser import QChem
from cclib.parser import utils
class NuclearTest(unittest.TestCase):
def test_nre(self):
"""Testing nuclear repulsion energy for one logfile where it is printed."""
data, logfile = getfile(QChem, "basicQChem4.2", "water_mp4sdq.out")
nuclear = Nuclear(data)
nuclear.logger.setLevel(logging.ERROR)
with open(logfile.filename) as f:
output = f.read()
line = re.search('Nuclear Repulsion Energy = .* hartrees', output).group()
nre = float(line.split()[4])
nre = utils.convertor(nre, 'Angstrom', 'bohr')
self.assertAlmostEqual(nuclear.repulsion_energy(), nre, places=7)
tests = [NuclearTest]
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(unittest.makeSuite(NuclearTest))
| lgpl-2.1 |
gauribhoite/personfinder | env/google_appengine/lib/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py | 2360 | 3778 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
# Note: This file is under the PSF license as the code comes from the python
# stdlib. http://docs.python.org/3/license.html
import re
__version__ = '3.4.0.2'
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
| apache-2.0 |
Zelgadis87/Sick-Beard | lib/hachoir_parser/image/wmf.py | 90 | 23796 | """
Hachoir parser of Microsoft Windows Metafile (WMF) file format.
Documentation:
- Microsoft Windows Metafile; also known as: WMF,
Enhanced Metafile, EMF, APM
http://wvware.sourceforge.net/caolan/ora-wmf.html
- libwmf source code:
- include/libwmf/defs.h: enums
- src/player/meta.h: arguments parsers
- libemf source code
Author: Victor Stinner
Creation date: 26 december 2006
"""
MAX_FILESIZE = 50 * 1024 * 1024
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet, StaticFieldSet, Enum,
MissingField, ParserError,
UInt32, Int32, UInt16, Int16, UInt8, NullBytes, RawBytes, String)
from lib.hachoir_core.endian import LITTLE_ENDIAN
from lib.hachoir_core.text_handler import textHandler, hexadecimal
from lib.hachoir_core.tools import createDict
from lib.hachoir_parser.image.common import RGBA
POLYFILL_MODE = {1: "Alternate", 2: "Winding"}
BRUSH_STYLE = {
0: u"Solid",
1: u"Null",
2: u"Hollow",
3: u"Pattern",
4: u"Indexed",
5: u"DIB pattern",
6: u"DIB pattern point",
7: u"Pattern 8x8",
8: u"DIB pattern 8x8",
}
HATCH_STYLE = {
0: u"Horizontal", # -----
1: u"Vertical", # |||||
2: u"FDIAGONAL", # \\\\\
3: u"BDIAGONAL", # /////
4: u"Cross", # +++++
5: u"Diagonal cross", # xxxxx
}
PEN_STYLE = {
0: u"Solid",
1: u"Dash", # -------
2: u"Dot", # .......
3: u"Dash dot", # _._._._
4: u"Dash dot dot", # _.._.._
5: u"Null",
6: u"Inside frame",
7: u"User style",
8: u"Alternate",
}
# Binary raster operations
ROP2_DESC = {
1: u"Black (0)",
2: u"Not merge pen (DPon)",
3: u"Mask not pen (DPna)",
4: u"Not copy pen (PN)",
5: u"Mask pen not (PDna)",
6: u"Not (Dn)",
7: u"Xor pen (DPx)",
8: u"Not mask pen (DPan)",
9: u"Mask pen (DPa)",
10: u"Not xor pen (DPxn)",
11: u"No operation (D)",
12: u"Merge not pen (DPno)",
13: u"Copy pen (P)",
14: u"Merge pen not (PDno)",
15: u"Merge pen (DPo)",
16: u"White (1)",
}
def parseXY(parser):
yield Int16(parser, "x")
yield Int16(parser, "y")
def parseCreateBrushIndirect(parser):
yield Enum(UInt16(parser, "brush_style"), BRUSH_STYLE)
yield RGBA(parser, "color")
yield Enum(UInt16(parser, "brush_hatch"), HATCH_STYLE)
def parsePenIndirect(parser):
yield Enum(UInt16(parser, "pen_style"), PEN_STYLE)
yield UInt16(parser, "pen_width")
yield UInt16(parser, "pen_height")
yield RGBA(parser, "color")
def parsePolyFillMode(parser):
yield Enum(UInt16(parser, "operation"), POLYFILL_MODE)
def parseROP2(parser):
yield Enum(UInt16(parser, "operation"), ROP2_DESC)
def parseObjectID(parser):
yield UInt16(parser, "object_id")
class Point(FieldSet):
static_size = 32
def createFields(self):
yield Int16(self, "x")
yield Int16(self, "y")
def createDescription(self):
return "Point (%s, %s)" % (self["x"].value, self["y"].value)
def parsePolygon(parser):
yield UInt16(parser, "count")
for index in xrange(parser["count"].value):
yield Point(parser, "point[]")
META = {
0x0000: ("EOF", u"End of file", None),
0x001E: ("SAVEDC", u"Save device context", None),
0x0035: ("REALIZEPALETTE", u"Realize palette", None),
0x0037: ("SETPALENTRIES", u"Set palette entries", None),
0x00f7: ("CREATEPALETTE", u"Create palette", None),
0x0102: ("SETBKMODE", u"Set background mode", None),
0x0103: ("SETMAPMODE", u"Set mapping mode", None),
0x0104: ("SETROP2", u"Set foreground mix mode", parseROP2),
0x0106: ("SETPOLYFILLMODE", u"Set polygon fill mode", parsePolyFillMode),
0x0107: ("SETSTRETCHBLTMODE", u"Set bitmap streching mode", None),
0x0108: ("SETTEXTCHAREXTRA", u"Set text character extra", None),
0x0127: ("RESTOREDC", u"Restore device context", None),
0x012A: ("INVERTREGION", u"Invert region", None),
0x012B: ("PAINTREGION", u"Paint region", None),
0x012C: ("SELECTCLIPREGION", u"Select clipping region", None),
0x012D: ("SELECTOBJECT", u"Select object", parseObjectID),
0x012E: ("SETTEXTALIGN", u"Set text alignment", None),
0x0142: ("CREATEDIBPATTERNBRUSH", u"Create DIB brush with specified pattern", None),
0x01f0: ("DELETEOBJECT", u"Delete object", parseObjectID),
0x0201: ("SETBKCOLOR", u"Set background color", None),
0x0209: ("SETTEXTCOLOR", u"Set text color", None),
0x020A: ("SETTEXTJUSTIFICATION", u"Set text justification", None),
0x020B: ("SETWINDOWORG", u"Set window origin", parseXY),
0x020C: ("SETWINDOWEXT", u"Set window extends", parseXY),
0x020D: ("SETVIEWPORTORG", u"Set view port origin", None),
0x020E: ("SETVIEWPORTEXT", u"Set view port extends", None),
0x020F: ("OFFSETWINDOWORG", u"Offset window origin", None),
0x0211: ("OFFSETVIEWPORTORG", u"Offset view port origin", None),
0x0213: ("LINETO", u"Draw a line to", None),
0x0214: ("MOVETO", u"Move to", None),
0x0220: ("OFFSETCLIPRGN", u"Offset clipping rectangle", None),
0x0228: ("FILLREGION", u"Fill region", None),
0x0231: ("SETMAPPERFLAGS", u"Set mapper flags", None),
0x0234: ("SELECTPALETTE", u"Select palette", None),
0x02FB: ("CREATEFONTINDIRECT", u"Create font indirect", None),
0x02FA: ("CREATEPENINDIRECT", u"Create pen indirect", parsePenIndirect),
0x02FC: ("CREATEBRUSHINDIRECT", u"Create brush indirect", parseCreateBrushIndirect),
0x0324: ("POLYGON", u"Draw a polygon", parsePolygon),
0x0325: ("POLYLINE", u"Draw a polyline", None),
0x0410: ("SCALEWINDOWEXT", u"Scale window extends", None),
0x0412: ("SCALEVIEWPORTEXT", u"Scale view port extends", None),
0x0415: ("EXCLUDECLIPRECT", u"Exclude clipping rectangle", None),
0x0416: ("INTERSECTCLIPRECT", u"Intersect clipping rectangle", None),
0x0418: ("ELLIPSE", u"Draw an ellipse", None),
0x0419: ("FLOODFILL", u"Flood fill", None),
0x041B: ("RECTANGLE", u"Draw a rectangle", None),
0x041F: ("SETPIXEL", u"Set pixel", None),
0x0429: ("FRAMEREGION", u"Fram region", None),
0x0521: ("TEXTOUT", u"Draw text", None),
0x0538: ("POLYPOLYGON", u"Draw multiple polygons", None),
0x0548: ("EXTFLOODFILL", u"Extend flood fill", None),
0x061C: ("ROUNDRECT", u"Draw a rounded rectangle", None),
0x061D: ("PATBLT", u"Pattern blitting", None),
0x0626: ("ESCAPE", u"Escape", None),
0x06FF: ("CREATEREGION", u"Create region", None),
0x0817: ("ARC", u"Draw an arc", None),
0x081A: ("PIE", u"Draw a pie", None),
0x0830: ("CHORD", u"Draw a chord", None),
0x0940: ("DIBBITBLT", u"DIB bit blitting", None),
0x0a32: ("EXTTEXTOUT", u"Draw text (extra)", None),
0x0b41: ("DIBSTRETCHBLT", u"DIB stretch blitting", None),
0x0d33: ("SETDIBTODEV", u"Set DIB to device", None),
0x0f43: ("STRETCHDIB", u"Stretch DIB", None),
}
META_NAME = createDict(META, 0)
META_DESC = createDict(META, 1)
#----------------------------------------------------------------------------
# EMF constants
# EMF mapping modes
EMF_MAPPING_MODE = {
1: "TEXT",
2: "LOMETRIC",
3: "HIMETRIC",
4: "LOENGLISH",
5: "HIENGLISH",
6: "TWIPS",
7: "ISOTROPIC",
8: "ANISOTROPIC",
}
#----------------------------------------------------------------------------
# EMF parser
def parseEmfMappingMode(parser):
yield Enum(Int32(parser, "mapping_mode"), EMF_MAPPING_MODE)
def parseXY32(parser):
yield Int32(parser, "x")
yield Int32(parser, "y")
def parseObjectID32(parser):
yield textHandler(UInt32(parser, "object_id"), hexadecimal)
def parseBrushIndirect(parser):
yield UInt32(parser, "ihBrush")
yield UInt32(parser, "style")
yield RGBA(parser, "color")
yield Int32(parser, "hatch")
class Point16(FieldSet):
static_size = 32
def createFields(self):
yield Int16(self, "x")
yield Int16(self, "y")
def createDescription(self):
return "Point16: (%i,%i)" % (self["x"].value, self["y"].value)
def parsePoint16array(parser):
yield RECT32(parser, "bounds")
yield UInt32(parser, "count")
for index in xrange(parser["count"].value):
yield Point16(parser, "point[]")
def parseGDIComment(parser):
yield UInt32(parser, "data_size")
size = parser["data_size"].value
if size:
yield RawBytes(parser, "data", size)
def parseICMMode(parser):
yield UInt32(parser, "icm_mode")
def parseExtCreatePen(parser):
yield UInt32(parser, "ihPen")
yield UInt32(parser, "offBmi")
yield UInt32(parser, "cbBmi")
yield UInt32(parser, "offBits")
yield UInt32(parser, "cbBits")
yield UInt32(parser, "pen_style")
yield UInt32(parser, "width")
yield UInt32(parser, "brush_style")
yield RGBA(parser, "color")
yield UInt32(parser, "hatch")
yield UInt32(parser, "nb_style")
for index in xrange(parser["nb_style"].value):
yield UInt32(parser, "style")
EMF_META = {
1: ("HEADER", u"Header", None),
2: ("POLYBEZIER", u"Draw poly bezier", None),
3: ("POLYGON", u"Draw polygon", None),
4: ("POLYLINE", u"Draw polyline", None),
5: ("POLYBEZIERTO", u"Draw poly bezier to", None),
6: ("POLYLINETO", u"Draw poly line to", None),
7: ("POLYPOLYLINE", u"Draw poly polyline", None),
8: ("POLYPOLYGON", u"Draw poly polygon", None),
9: ("SETWINDOWEXTEX", u"Set window extend EX", parseXY32),
10: ("SETWINDOWORGEX", u"Set window origin EX", parseXY32),
11: ("SETVIEWPORTEXTEX", u"Set viewport extend EX", parseXY32),
12: ("SETVIEWPORTORGEX", u"Set viewport origin EX", parseXY32),
13: ("SETBRUSHORGEX", u"Set brush org EX", None),
14: ("EOF", u"End of file", None),
15: ("SETPIXELV", u"Set pixel V", None),
16: ("SETMAPPERFLAGS", u"Set mapper flags", None),
17: ("SETMAPMODE", u"Set mapping mode", parseEmfMappingMode),
18: ("SETBKMODE", u"Set background mode", None),
19: ("SETPOLYFILLMODE", u"Set polyfill mode", None),
20: ("SETROP2", u"Set ROP2", None),
21: ("SETSTRETCHBLTMODE", u"Set stretching blitting mode", None),
22: ("SETTEXTALIGN", u"Set text align", None),
23: ("SETCOLORADJUSTMENT", u"Set color adjustment", None),
24: ("SETTEXTCOLOR", u"Set text color", None),
25: ("SETBKCOLOR", u"Set background color", None),
26: ("OFFSETCLIPRGN", u"Offset clipping region", None),
27: ("MOVETOEX", u"Move to EX", parseXY32),
28: ("SETMETARGN", u"Set meta region", None),
29: ("EXCLUDECLIPRECT", u"Exclude clipping rectangle", None),
30: ("INTERSECTCLIPRECT", u"Intersect clipping rectangle", None),
31: ("SCALEVIEWPORTEXTEX", u"Scale viewport extend EX", None),
32: ("SCALEWINDOWEXTEX", u"Scale window extend EX", None),
33: ("SAVEDC", u"Save device context", None),
34: ("RESTOREDC", u"Restore device context", None),
35: ("SETWORLDTRANSFORM", u"Set world transform", None),
36: ("MODIFYWORLDTRANSFORM", u"Modify world transform", None),
37: ("SELECTOBJECT", u"Select object", parseObjectID32),
38: ("CREATEPEN", u"Create pen", None),
39: ("CREATEBRUSHINDIRECT", u"Create brush indirect", parseBrushIndirect),
40: ("DELETEOBJECT", u"Delete object", parseObjectID32),
41: ("ANGLEARC", u"Draw angle arc", None),
42: ("ELLIPSE", u"Draw ellipse", None),
43: ("RECTANGLE", u"Draw rectangle", None),
44: ("ROUNDRECT", u"Draw rounded rectangle", None),
45: ("ARC", u"Draw arc", None),
46: ("CHORD", u"Draw chord", None),
47: ("PIE", u"Draw pie", None),
48: ("SELECTPALETTE", u"Select palette", None),
49: ("CREATEPALETTE", u"Create palette", None),
50: ("SETPALETTEENTRIES", u"Set palette entries", None),
51: ("RESIZEPALETTE", u"Resize palette", None),
52: ("REALIZEPALETTE", u"Realize palette", None),
53: ("EXTFLOODFILL", u"EXT flood fill", None),
54: ("LINETO", u"Draw line to", parseXY32),
55: ("ARCTO", u"Draw arc to", None),
56: ("POLYDRAW", u"Draw poly draw", None),
57: ("SETARCDIRECTION", u"Set arc direction", None),
58: ("SETMITERLIMIT", u"Set miter limit", None),
59: ("BEGINPATH", u"Begin path", None),
60: ("ENDPATH", u"End path", None),
61: ("CLOSEFIGURE", u"Close figure", None),
62: ("FILLPATH", u"Fill path", None),
63: ("STROKEANDFILLPATH", u"Stroke and fill path", None),
64: ("STROKEPATH", u"Stroke path", None),
65: ("FLATTENPATH", u"Flatten path", None),
66: ("WIDENPATH", u"Widen path", None),
67: ("SELECTCLIPPATH", u"Select clipping path", None),
68: ("ABORTPATH", u"Arbort path", None),
70: ("GDICOMMENT", u"GDI comment", parseGDIComment),
71: ("FILLRGN", u"Fill region", None),
72: ("FRAMERGN", u"Frame region", None),
73: ("INVERTRGN", u"Invert region", None),
74: ("PAINTRGN", u"Paint region", None),
75: ("EXTSELECTCLIPRGN", u"EXT select clipping region", None),
76: ("BITBLT", u"Bit blitting", None),
77: ("STRETCHBLT", u"Stretch blitting", None),
78: ("MASKBLT", u"Mask blitting", None),
79: ("PLGBLT", u"PLG blitting", None),
80: ("SETDIBITSTODEVICE", u"Set DIB bits to device", None),
81: ("STRETCHDIBITS", u"Stretch DIB bits", None),
82: ("EXTCREATEFONTINDIRECTW", u"EXT create font indirect W", None),
83: ("EXTTEXTOUTA", u"EXT text out A", None),
84: ("EXTTEXTOUTW", u"EXT text out W", None),
85: ("POLYBEZIER16", u"Draw poly bezier (16-bit)", None),
86: ("POLYGON16", u"Draw polygon (16-bit)", parsePoint16array),
87: ("POLYLINE16", u"Draw polyline (16-bit)", parsePoint16array),
88: ("POLYBEZIERTO16", u"Draw poly bezier to (16-bit)", parsePoint16array),
89: ("POLYLINETO16", u"Draw polyline to (16-bit)", parsePoint16array),
90: ("POLYPOLYLINE16", u"Draw poly polyline (16-bit)", None),
91: ("POLYPOLYGON16", u"Draw poly polygon (16-bit)", parsePoint16array),
92: ("POLYDRAW16", u"Draw poly draw (16-bit)", None),
93: ("CREATEMONOBRUSH", u"Create monobrush", None),
94: ("CREATEDIBPATTERNBRUSHPT", u"Create DIB pattern brush PT", None),
95: ("EXTCREATEPEN", u"EXT create pen", parseExtCreatePen),
96: ("POLYTEXTOUTA", u"Poly text out A", None),
97: ("POLYTEXTOUTW", u"Poly text out W", None),
98: ("SETICMMODE", u"Set ICM mode", parseICMMode),
99: ("CREATECOLORSPACE", u"Create color space", None),
100: ("SETCOLORSPACE", u"Set color space", None),
101: ("DELETECOLORSPACE", u"Delete color space", None),
102: ("GLSRECORD", u"GLS record", None),
103: ("GLSBOUNDEDRECORD", u"GLS bound ED record", None),
104: ("PIXELFORMAT", u"Pixel format", None),
}
EMF_META_NAME = createDict(EMF_META, 0)
EMF_META_DESC = createDict(EMF_META, 1)
class Function(FieldSet):
def __init__(self, *args):
FieldSet.__init__(self, *args)
if self.root.isEMF():
self._size = self["size"].value * 8
else:
self._size = self["size"].value * 16
def createFields(self):
if self.root.isEMF():
yield Enum(UInt32(self, "function"), EMF_META_NAME)
yield UInt32(self, "size")
try:
parser = EMF_META[self["function"].value][2]
except KeyError:
parser = None
else:
yield UInt32(self, "size")
yield Enum(UInt16(self, "function"), META_NAME)
try:
parser = META[self["function"].value][2]
except KeyError:
parser = None
if parser:
for field in parser(self):
yield field
else:
size = (self.size - self.current_size) // 8
if size:
yield RawBytes(self, "data", size)
def isValid(self):
func = self["function"]
return func.value in func.getEnum()
def createDescription(self):
if self.root.isEMF():
return EMF_META_DESC[self["function"].value]
try:
return META_DESC[self["function"].value]
except KeyError:
return "Function %s" % self["function"].display
class RECT16(StaticFieldSet):
format = (
(Int16, "left"),
(Int16, "top"),
(Int16, "right"),
(Int16, "bottom"),
)
def createDescription(self):
return "%s: %ux%u at (%u,%u)" % (
self.__class__.__name__,
self["right"].value-self["left"].value,
self["bottom"].value-self["top"].value,
self["left"].value,
self["top"].value)
class RECT32(RECT16):
format = (
(Int32, "left"),
(Int32, "top"),
(Int32, "right"),
(Int32, "bottom"),
)
class PlaceableHeader(FieldSet):
"""
Header of Placeable Metafile (file extension .APM),
created by Aldus Corporation
"""
MAGIC = "\xD7\xCD\xC6\x9A\0\0" # (magic, handle=0x0000)
def createFields(self):
yield textHandler(UInt32(self, "signature", "Placeable Metafiles signature (0x9AC6CDD7)"), hexadecimal)
yield UInt16(self, "handle")
yield RECT16(self, "rect")
yield UInt16(self, "inch")
yield NullBytes(self, "reserved", 4)
yield textHandler(UInt16(self, "checksum"), hexadecimal)
class EMF_Header(FieldSet):
MAGIC = "\x20\x45\x4D\x46\0\0" # (magic, min_ver=0x0000)
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = self["size"].value * 8
def createFields(self):
LONG = Int32
yield UInt32(self, "type", "Record type (always 1)")
yield UInt32(self, "size", "Size of the header in bytes")
yield RECT32(self, "Bounds", "Inclusive bounds")
yield RECT32(self, "Frame", "Inclusive picture frame")
yield textHandler(UInt32(self, "signature", "Signature ID (always 0x464D4520)"), hexadecimal)
yield UInt16(self, "min_ver", "Minor version")
yield UInt16(self, "maj_ver", "Major version")
yield UInt32(self, "file_size", "Size of the file in bytes")
yield UInt32(self, "NumOfRecords", "Number of records in the metafile")
yield UInt16(self, "NumOfHandles", "Number of handles in the handle table")
yield NullBytes(self, "reserved", 2)
yield UInt32(self, "desc_size", "Size of description in 16-bit words")
yield UInt32(self, "desc_ofst", "Offset of description string in metafile")
yield UInt32(self, "nb_colors", "Number of color palette entries")
yield LONG(self, "width_px", "Width of reference device in pixels")
yield LONG(self, "height_px", "Height of reference device in pixels")
yield LONG(self, "width_mm", "Width of reference device in millimeters")
yield LONG(self, "height_mm", "Height of reference device in millimeters")
# Read description (if any)
offset = self["desc_ofst"].value
current = (self.absolute_address + self.current_size) // 8
size = self["desc_size"].value * 2
if offset == current and size:
yield String(self, "description", size, charset="UTF-16-LE", strip="\0 ")
# Read padding (if any)
size = self["size"].value - self.current_size//8
if size:
yield RawBytes(self, "padding", size)
class WMF_File(Parser):
PARSER_TAGS = {
"id": "wmf",
"category": "image",
"file_ext": ("wmf", "apm", "emf"),
"mime": (
u"image/wmf", u"image/x-wmf", u"image/x-win-metafile",
u"application/x-msmetafile", u"application/wmf", u"application/x-wmf",
u"image/x-emf"),
"magic": (
(PlaceableHeader.MAGIC, 0),
(EMF_Header.MAGIC, 40*8),
# WMF: file_type=memory, header size=9, version=3.0
("\0\0\x09\0\0\3", 0),
# WMF: file_type=disk, header size=9, version=3.0
("\1\0\x09\0\0\3", 0),
),
"min_size": 40*8,
"description": u"Microsoft Windows Metafile (WMF)",
}
endian = LITTLE_ENDIAN
FILE_TYPE = {0: "memory", 1: "disk"}
def validate(self):
if self.isEMF():
# Check EMF header
emf = self["emf_header"]
if emf["signature"].value != 0x464D4520:
return "Invalid signature"
if emf["type"].value != 1:
return "Invalid record type"
if emf["reserved"].value != "\0\0":
return "Invalid reserved"
else:
# Check AMF header
if self.isAPM():
amf = self["amf_header"]
if amf["handle"].value != 0:
return "Invalid handle"
if amf["reserved"].value != "\0\0\0\0":
return "Invalid reserved"
# Check common header
if self["file_type"].value not in (0, 1):
return "Invalid file type"
if self["header_size"].value != 9:
return "Invalid header size"
if self["nb_params"].value != 0:
return "Invalid number of parameters"
# Check first functions
for index in xrange(5):
try:
func = self["func[%u]" % index]
except MissingField:
if self.done:
return True
return "Unable to get function #%u" % index
except ParserError:
return "Unable to create function #%u" % index
# Check first frame values
if not func.isValid():
return "Function #%u is invalid" % index
return True
def createFields(self):
if self.isEMF():
yield EMF_Header(self, "emf_header")
else:
if self.isAPM():
yield PlaceableHeader(self, "amf_header")
yield Enum(UInt16(self, "file_type"), self.FILE_TYPE)
yield UInt16(self, "header_size", "Size of header in 16-bit words (always 9)")
yield UInt8(self, "win_ver_min", "Minor version of Microsoft Windows")
yield UInt8(self, "win_ver_maj", "Major version of Microsoft Windows")
yield UInt32(self, "file_size", "Total size of the metafile in 16-bit words")
yield UInt16(self, "nb_obj", "Number of objects in the file")
yield UInt32(self, "max_record_size", "The size of largest record in 16-bit words")
yield UInt16(self, "nb_params", "Not Used (always 0)")
while not(self.eof):
yield Function(self, "func[]")
def isEMF(self):
"""File is in EMF format?"""
if 1 <= self.current_length:
return self[0].name == "emf_header"
if self.size < 44*8:
return False
magic = EMF_Header.MAGIC
return self.stream.readBytes(40*8, len(magic)) == magic
def isAPM(self):
"""File is in Aldus Placeable Metafiles format?"""
if 1 <= self.current_length:
return self[0].name == "amf_header"
else:
magic = PlaceableHeader.MAGIC
return (self.stream.readBytes(0, len(magic)) == magic)
def createDescription(self):
if self.isEMF():
return u"Microsoft Enhanced Metafile (EMF) picture"
elif self.isAPM():
return u"Aldus Placeable Metafile (APM) picture"
else:
return u"Microsoft Windows Metafile (WMF) picture"
def createMimeType(self):
if self.isEMF():
return u"image/x-emf"
else:
return u"image/wmf"
def createContentSize(self):
if self.isEMF():
return None
start = self["func[0]"].absolute_address
end = self.stream.searchBytes("\3\0\0\0\0\0", start, MAX_FILESIZE * 8)
if end is not None:
return end + 6*8
return None
| gpl-3.0 |
bowen0701/algorithms_data_structures | lc0581_shortest_unsorted_continuous_subarray.py | 1 | 2700 | """Leetcode 581. Shortest Unsorted Continuous Subarray
Easy
URL: https://leetcode.com/problems/shortest-unsorted-continuous-subarray/
Given an integer array, you need to find one continuous subarray that if you only
sort this subarray in ascending order, then the whole array will be sorted in
ascending order, too.
You need to find the shortest such subarray and output its length.
Example 1:
Input: [2, 6, 4, 8, 10, 9, 15]
Output: 5
Explanation: You need to sort [6, 4, 8, 10, 9] in ascending order to make the
whole array sorted in ascending order.
Note:
- Then length of the input array is in range [1, 10,000].
- The input array may contain duplicates, so ascending order here means <=.
"""
class SolutionSortTwoPoinsters(object):
def findUnsortedSubarray(self, nums):
"""
:type nums: List[int]
:rtype: int
Sort the nums and check element match from left and from right.
Time complexity: O(n*logn).
Space complexity: O(n).
"""
# Store sorted nums in a new list.
sorted_nums = sorted(nums)
# Iterate from left/right to check element match of nums and sorted nums.
n = len(nums)
i, j = 0, n - 1
while i < n and nums[i] == sorted_nums[i]:
i += 1
while j > i and nums[j] == sorted_nums[j]:
j -= 1
return j - (i - 1)
class SolutionMinRHSMaxLHS(object):
def findUnsortedSubarray(self, nums):
"""
:type nums: List[int]
:rtype: int
For a sorted list: min is the minimum on RHS; similarly with max.
create min RHS and max LHS lists, and then check their element match.
Time complexity: O(n*logn).
Space complexity: O(n).
"""
n = len(nums)
# Create min RHS list from right to left.
min_r = float('inf')
min_rhs = [None] * n
for j in range(n - 1, -1, -1):
min_r = min(min_r, nums[j])
min_rhs[j] = min_r
# Create max LHS list from left to right.
max_l = -float('inf')
max_lhs = [None] * n
for i in range(n):
max_l = max(max_l, nums[i])
max_lhs[i] = max_l
# Iterate from left/right to check match of nums and min RHS / max LHS.
i, j = 0, n - 1
while i < n and nums[i] == min_rhs[i]:
i += 1
while j > i and nums[j] == max_lhs[j]:
j -= 1
return j - (i - 1)
def main():
# Output: 5
nums = [2, 6, 4, 8, 10, 9, 15]
print SolutionSortTwoPoinsters().findUnsortedSubarray(nums)
print SolutionMinRHSMaxLHS().findUnsortedSubarray(nums)
if __name__ == '__main__':
main()
| bsd-2-clause |
rcarmo/pythonium | pythonium/compliant/compliant.py | 2 | 34359 | import os
import sys
from collections import namedtuple
from ast import Str
from ast import Name
from ast import List
from ast import Tuple
from ast import parse
from ast import Assign
from ast import Global
from ast import Attribute
from ast import Subscript
from ast import FunctionDef
from ast import NodeVisitor
from ..utils import YieldSearch, Writer
from ..veloce.veloce import Veloce
ClassDefNode = namedtuple('ClassDef', 'name')
FunctionDefNode = namedtuple('FunctionDef', 'name')
class Compliant(NodeVisitor):
@classmethod
def translate(cls, code):
translator = cls()
ast = parse(code)
translator.visit(ast)
return translator.writer.value()
def __init__(self):
super().__init__()
self.dependencies = []
self._def_stack = []
self.__all__ = []
self.writer = Writer()
self._uuid = -1
def uuid(self):
self._uuid += 1
return self._uuid
def visit(self, node):
if os.environ.get('DEBUG', False):
sys.stderr.write(">>> {} {}\n".format(node.__class__.__name__, node._fields))
return super().visit(node)
######################################################################
# mod = Module | Interactive | Expression | Suite
#
# stmt = FunctionDef | ClassDef | Return | Delete | Assign | AugAssign
# | For | While | If | With | Raise | Try | Assert | Import | ImportFrom
# | Global | Nonlocal | Expr | Pass | Break | Continue
#
# expr = BoolOp | BinOp | UnaryOp | Lambda | IfExp | Dict | Set
# | ListComp | SetComp | DictComp | GeneratorExp | Yield | YieldFrom
# | Compare | Call | Num | Str | Bytes | Ellipsis | Attribute
# | Subscript | Starred | Name | List | Tuple
#
# expr_context = Load | Store | Del | AugLoad | AugStore | Param
#
# slice = Slice | ExtSlice | Index
#
# boolop = And | Or
#
# operator = Add | Sub | Mult | Div | Mod | Pow | LShift | RShift
# | BitOr | BitXor | BitAnd | FloorDiv
#
# unaryop = Invert | Not | UAdd | USub
#
# cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn
#
# comprehension = (expr target, expr iter, expr* ifs)
#
# excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body)
#
# arguments = (arg* args, identifier? vararg, expr? varargannotation,
# arg* kwonlyargs, identifier? kwarg, expr? kwargannotation,
# expr* defaults, expr* kw_defaults)
#
# arg = (identifier arg, expr? annotation)
#
# keyword = (identifier arg, expr value)
#
# alias = (identifier name, identifier? asname)
#
# withitem = (expr context_expr, expr? optional_vars)
######################################################################
# Interactive(stmt* body)
visit_Interactive = NotImplemented
# Expression(expr body)
visit_Expression = NotImplemented
# Suite(stmt* body)
visit_Suite = NotImplemented
# expr_context = Load | Store | Del | AugLoad | AugStore | Param
visit_Load = NotImplemented
visit_Store = NotImplemented
visit_Del = NotImplemented
visit_AugLoad = NotImplemented
visit_AugStore = NotImplemented
visit_Param = NotImplemented
# Pass
def visit_Pass(self, node):
self.writer.write('/* pass */')
# Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
def visit_Try(self, node):
self.writer.write('try {')
self.writer.push()
list(map(self.visit, node.body))
self.writer.pull()
self.writer.write('} catch (__exception__) {')
self.writer.push()
list(map(self.visit, node.handlers))
self.writer.pull()
self.writer.write('}')
# Raise(expr? exc, expr? cause)
def visit_Raise(self, node):
self.writer.write('throw {};'.format(self.visit(node.exc)))
# ExceptHandler(expr? type, identifier? name, stmt* body)
def visit_ExceptHandler(self, node):
if node.type:
if node.name:
self.writer.write('var {} = __exception__;'.format(node.name))
name = node.name
else:
name = '__exception__'
self.writer.write('if (pythonium_is_exception({}, {})) {{'.format(name, self.visit(node.type)))
self.writer.push()
list(map(self.visit, node.body))
if node.type:
self.writer.pull()
self.writer.write('}')
# Yield(expr? value)
def visit_Yield(self, node):
return 'yield {}'.format(self.visit(node.value))
# YieldFrom(expr value)
def visit_YieldFrom(self, node):
pass
# In
def visit_In(self, node):
return '__rcontains__'
# NotIn
visit_NotIn = NotImplemented
# Module(stmt* body)
def visit_Module(self, node):
list(map(self.visit, node.body))
# Tuple(expr* elts, expr_context ctx)
def visit_Tuple(self, node):
args = ', '.join(map(self.visit, node.elts))
if args:
name = '__a_tuple{}'.format(self.uuid())
self.writer.write('var {} = pythonium_call(tuple);'.format(name))
self.writer.write('{}.jsobject = [{}];'.format(name, args))
return name
else:
return 'pythonium_call(tuple)'
# List(expr* elts, expr_context ctx)
def visit_List(self, node):
args = ', '.join(map(self.visit, node.elts))
if args:
name = '__a_list{}'.format(self.uuid())
self.writer.write('var {} = pythonium_call(list);'.format(name))
self.writer.write('{}.jsobject = [{}];'.format(name, args))
return name
else:
return 'pythonium_call(list)'
# Set(expr* elts)
visit_Set = NotImplemented
# alias = (identifier name, identifier? asname)
def visit_alias(self, node):
out = ''
name = node.name
asname = node.asname
if not asname:
asname = name
path = []
for module in name.split('.')[:-1]:
path.append(module)
path_to_module = '.'.join(path)
self.writer.write("var {} = typeof({}) == 'undefined' ? {{}} : {}".format(path_to_module, path_to_module, path_to_module))
path.append(asname.split('.')[-1])
path = '/'.join(path)
self.writer.write('var {} = require("{}");'.format(asname, path))
path = '/'.join(name.split('.'))
self.dependencies.append('/' + path) # relative to project root
# Import(alias* names)
visit_Import = NotImplemented
# ImportFrom(identifier? module, alias* names, int? level)
def visit_ImportFrom(self, node):
if len(node.names) > 1:
raise NotImplemented
if len(node.names) == 0:
raise NotImplemented
out = ''
name = node.names[0].name
asname = node.names[0].asname
if not asname:
asname = name
modules = '/'.join(node.module.split('.'))
path = modules
if node.level == 0:
self.writer.write('var {} = require("{}").{};'.format(asname, path, name))
self.dependencies.append('/' + path) # relative to project root
elif node.level == 1:
self.writer.write('var {} = require.toUrl("./{}").{};'.format(asname, path, name))
self.dependencies.append('./' + path) # relative to current file
else:
path = '../' * node.level + path
self.writer.write('var {} = require.toUrl("{}").{};'.format(asname, path, name))
self.dependencies.append(path) # relative to current file
return out
# Global(identifier* names)
def visit_Global(self, node):
# handled in visit_FunctionDef
return ''
# Nonlocal(identifier* names)
visit_Nonlocal = NotImplemented
def _is_inside_method_definition(self):
if len(self._def_stack) >= 2:
if isinstance(self._def_stack[-2], ClassDefNode):
if isinstance(self._def_stack[-1], FunctionDefNode):
return True
return False
def _is_inside_class_definition(self):
return isinstance(self._def_stack[-1], ClassDefNode)
# FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns)
def visit_FunctionDef(self, node):
# 'name', 'args', 'body', 'decorator_list', 'returns'
if len(self._def_stack) == 0: # module level definition must be exported
self.__all__.append(node.name)
self._def_stack.append(FunctionDefNode(node.name))
args, kwargs, varargs, varkwargs = self.visit(node.args)
all_parameters = list(args)
all_parameters.extend(kwargs.keys())
if varargs:
all_parameters.append(varargs)
if varkwargs:
all_parameters.append(varkwargs)
all_parameters = set(all_parameters)
if self._is_inside_method_definition():
name = '__{}_{}'.format(self._def_stack[-2].name, node.name)
else:
name = node.name
# handle yield
has_yield = False
for child in node.body:
searcher = YieldSearch()
searcher.visit(child)
if getattr(searcher, 'has_yield', False):
has_yield = True
break
__args = ', '.join(args)
if has_yield:
self.writer.write('var {} = function*({}) {{'.format(name, __args))
else:
self.writer.write('var {} = function({}) {{'.format(name, __args))
self.writer.push()
self._unpack_arguments(args, kwargs, varargs, varkwargs)
# check for variable creation use var if not global
def retrieve_vars(body, vars=None):
local_vars = set()
global_vars = vars if vars else set()
for n in body:
if isinstance(n, Assign) and isinstance(n.targets[0], Name):
local_vars.add(n.targets[0].id)
elif isinstance(n, Assign) and isinstance(n.targets[0], Tuple):
for target in n.targets[0].elts:
local_vars.add(target.id)
elif isinstance(n, Global):
global_vars.update(n.names)
elif hasattr(n, 'body') and not isinstance(n, FunctionDef):
# do a recursive search inside new block except function def
l, g = retrieve_vars(n.body)
local_vars.update(l)
global_vars.update(g)
if hasattr(n, 'orelse'):
l, g = retrieve_vars(n.orelse)
local_vars.update(l)
global_vars.update(g)
return local_vars, global_vars
local_vars, global_vars = retrieve_vars(node.body, all_parameters)
if local_vars - global_vars:
a = ','.join(local_vars-global_vars)
self.writer.write('var {};'.format(a))
self.writer.write('/* BEGIN function */')
# output function body
list(map(self.visit, node.body))
self.writer.pull()
self.writer.write('};')
if self._is_inside_method_definition():
self.writer.write('{}.is_method = true;'.format(name))
for decorator in node.decorator_list:
decorator = self.visit(decorator)
self.writer.write('{} = {}({});'.format(name, decorator, name))
self._def_stack.pop()
self.writer.write('')
return node.name, name
# Slice(expr? lower, expr? upper, expr? step)
def visit_Slice(self, node):
start = self.visit(node.lower) if node.lower else 'undefined'
end = self.visit(node.upper) if node.upper else 'undefined'
step = self.visit(node.step) if node.step else 'undefined'
return 'slice({}, {}, {})'.format(start, step, end)
# Index(expr value)
def visit_Index(self, node):
return self.visit(node.value)
# ExtSlice(slice* dims)
visit_ExtSlice = NotImplemented
# Subscript(expr value, slice slice, expr_context ctx)
def visit_Subscript(self, node):
return "pythonium_call(pythonium_get_attribute({}, '__getitem__'), {})".format(self.visit(node.value), self.visit(node.slice))
# arguments = (arg* args, identifier? vararg, expr? varargannotation,
# arg* kwonlyargs, identifier? kwarg, expr? kwargannotation,
# expr* defaults, expr* kw_defaults)
def visit_arguments(self, node):
args = list(map(lambda x: x.arg, node.args))
vararg = node.vararg
kwonlyargs = node.kwonlyargs
varkwargs = node.kwarg
defaults = list(map(self.visit, node.defaults))
kwargs = dict(zip(args[-len(defaults):], defaults))
return args, kwargs, vararg, varkwargs
# arg = (identifier arg, expr? annotation)
visit_arg = NotImplemented
# Name(identifier id, expr_context ctx)
def visit_Name(self, node):
if node.id == 'None':
return '__NONE'
elif node.id == 'True':
return '__TRUE'
elif node.id == 'False':
return '__FALSE'
elif node.id == 'null':
return 'null'
return node.id.replace('__DOLLAR__', '$')
# Attribute(expr value, identifier attr, expr_context ctx)
def visit_Attribute(self, node):
name = self.visit(node.value)
attr = node.attr.replace('__DOLLAR__', '$')
return 'pythonium_get_attribute({}, "{}")'.format(name, attr)
# keyword = (identifier arg, expr value)
def visit_keyword(self, node):
if isinstance(node.arg, str):
return node.arg, self.visit(node.value)
return self.visit(node.arg), self.visit(node.value)
# Call(expr func, expr* args, keyword* keywords, expr? starargs, expr? kwargs)
def visit_Call(self, node):
name = self.visit(node.func)
if name == 'instanceof':
# this gets used by "with javascript:" blocks
# to test if an instance is a JavaScript type
args = list(map(self.visit, node.args))
if len(args) == 2:
return '{} instanceof {}'.format(*tuple(args))
else:
raise SyntaxError(args)
elif name == 'JSObject':
if node.keywords:
kwargs = map(self.visit, node.keywords)
f = lambda x: '"{}": {}'.format(x[0], x[1])
out = ', '.join(map(f, kwargs))
return '{{}}'.format(out)
else:
return 'Object()'
elif name == 'var':
args = map(self.visit, node.args)
out = ', '.join(args)
return 'var {}'.format(out)
elif name == 'new':
args = list(map(self.visit, node.args))
object = args[0]
return 'new {}'.format(object, args)
elif name == 'JSArray':
if node.args:
args = map(self.visit, node.args)
out = ', '.join(args)
else:
out = ''
return '[{}]'.format(out)
elif name == 'jscode':
return node.args[0].s
else:
# is it a call to new?
try:
if node.func.func.id == 'new':
# Do not convert arguments to Python
if node.args:
veloce = Veloce()
args = [veloce.visit(e) for e in node.args]
args = [e for e in args if e]
else:
args = []
# use native call since constructors don't have apply method
return '{}({})'.format(name, ', '.join(args))
except AttributeError:
# it is not
pass
# positional args
if node.args:
args = [self.visit(e) for e in node.args]
args = [e for e in args if e]
else:
args = []
args.insert(0, name)
# variable arguments aka. starargs
if node.starargs:
varargs = self.visit(node.starargs)
code = "for i in {}: jscode('{}.push(i)')".format(varargs, call_arguments)
self.writer.write(self.translate(code))
# keywords and variable keywords arguments aka. starkwargs
if node.kwargs:
kwargs = self.visit(node.kwargs)
if node.keywords:
for key, value in map(self.visit, node.keywords):
self.writer.write('{}.__class__.__setitem__({}, pythonium_call(str, "{}"), {})'.format(kwargs, kwargs, key, value)) # XXX
elif node.keywords:
kwargs = '__pythonium_kwargs'
self.writer.write('var __pythonium_kwargs = pythonium_create_empty_dict();')
for key, value in map(self.visit, node.keywords):
self.writer.write('{}.__class__.__setitem__({}, pythonium_call(str, "{}"), {})'.format(kwargs, kwargs, key, value))
if node.kwargs or node.keywords:
args.append('__ARGUMENTS_PADDING__')
args.append(kwargs)
call_arguments = 'call_arguments{}'.format(self.uuid())
self.writer.write('var {} = [{}];'.format(call_arguments, ', '.join(args)))
return 'pythonium_call.apply(undefined, {})'.format(call_arguments)
# ListComp(expr elt, comprehension* generators)
def visit_ListComp(self, node):
# 'elt', 'generators'
comprehension = '__comp{}__'.format(self.uuid())
self.writer.write('var {} = pythonium_call(list);'.format(comprehension))
list(map(self.visit, node.generators))
value = self.visit(node.elt)
self.writer.write('pythonium_get_attribute({}, "append")({});'.format(comprehension, value))
for _ in node.generators:
self.writer.pull()
self.writer.write('}} catch (__exception__) {if (!pythonium_is_exception(__exception__, StopIteration)) { throw x; }}')
self.writer.pull()
return comprehension
# SetComp(expr elt, comprehension* generators)
visit_SetComp = NotImplemented
# DictComp(expr key, expr value, comprehension* generators)
visit_DictComp = NotImplemented
# GeneratorExp(expr elt, comprehension* generators)
visit_GeneratorExp = NotImplemented
# comprehension = (expr target, expr iter, expr* ifs)
def visit_comprehension(self, node):
# 'target', 'iter', 'ifs'
iterator = '__iterator{}__'.format(self.uuid())
index = '__index{}__'.format(self.uuid())
self.writer.write('var {} = iter({});'.format(iterator, self.visit(node.iter)))
self.writer.write('try {')
self.writer.push()
self.writer.write('while (true) {')
self.writer.push()
self.writer.write('var {} = next({});'.format(self.visit(node.target), iterator))
if node.ifs:
self.writer.write('if(!pythonium_is_true({})) {{ continue; }}'.format(' && '.join(map(self.visit, node.ifs))))
# While(expr test, stmt* body, stmt* orelse)
def visit_While(self, node):
self.writer.write('while(pythonium_is_true({})) {{'.format(self.visit(node.test)))
self.writer.push()
list(map(self.visit, node.body))
self.writer.pull()
self.writer.write('}')
# AugAssign(expr target, operator op, expr value)
def visit_AugAssign(self, node):
target = self.visit(node.target)
self.writer.write('{} = pythonium_call(pythonium_get_attribute({}, "{}"), {});'.format(target, target, self.visit(node.op), self.visit(node.value)))
# Str(string s)
def visit_Str(self, node):
s = node.s.replace('\n', '\\n')
if '"' in s:
return "pythonium_call(str, '{}')".format(s)
return 'pythonium_call(str, "{}")'.format(s)
# Bytes(bytes s)
visit_Bytes = NotImplemented
# BinOp(expr left, operator op, expr right)
def visit_BinOp(self, node):
left = self.visit(node.left)
op = self.visit(node.op)
right = self.visit(node.right)
return '(pythonium_call(pythonium_get_attribute({}, "{}"), {}))'.format(left, op, right)
def visit_Mult(self, node):
return '__mul__'
def visit_Add(self, node):
return '__add__'
visit_UAdd = NotImplemented
def visit_Sub(self, node):
return '__sub__'
def visit_USub(self, node):
return '__neg__'
def visit_Div(self, node):
return '__div__'
visit_FloorDiv = NotImplemented
visit_Pow = NotImplemented
visit_Invert = NotImplemented
def visit_Mod(self, node):
return '__mod__'
def visit_Lt(self, node):
return '__lt__'
def visit_Gt(self, node):
return '__gt__'
def visit_GtE(self, node):
return '__gte__'
def visit_LtE(self, node):
return '__lte__'
def visit_LShift(self, node):
return '__lshift__'
def visit_RShift(self, node):
return '__rshift__'
def visit_BitXor(self, node):
return '__xor__'
def visit_BitOr(self, node):
return '__or__'
def visit_BitAnd(self, node):
return '__and__'
def visit_Eq(self, node):
return '__eq__'
def visit_NotEq(self, node):
return '__neq__'
def visit_Num(self, node):
if isinstance(node.n, float):
return 'pythonium_call(float, {})'.format(str(node.n))
else:
return 'pythonium_call(int, {})'.format(str(node.n))
# Num(object n)
def visit_Is(self, node):
return '__is__'
def visit_Not(self, node):
return '__not__'
def visit_IsNot(self, node):
return '__isnot__'
# UnaryOp(unaryop op, expr operand)
def visit_UnaryOp(self, node):
return 'pythonium_call(pythonium_get_attribute({}, "{}"))'.format(self.visit(node.operand), self.visit(node.op))
def visit_And(self, node):
return '__and__'
def visit_Or(self, node):
return '__or__'
# Delete(expr* targets)
def visit_Delete(self, node):
for target in node.targets:
if isinstance(target, Subscript):
slice = self.visit(target.slice)
target = self.visit(target.value)
self.writer.write("pythonium_get_attribute({}, '__delitem__')({});".format(target, slice))
else:
target = self.visit(target)
self.writer.write('delete {};'.format(target))
# Assign(expr* targets, expr value)
def visit_Assign(self, node):
value = self.visit(node.value)
if len(self._def_stack) == 0: # module level definition must be exported
export = True
else:
export = False
if len(node.targets) == 1 and not isinstance(node.targets[0], Tuple):
target = node.targets[0]
if isinstance(target, Attribute):
self.writer.write('pythonium_set_attribute({}, "{}", {});'.format(
self.visit(target.value),
target.attr.replace('__DOLLAR__', '$'),
value
))
return
elif isinstance(target, Subscript):
self.writer.write('pythonium_call(pythonium_get_attribute({}, "__setitem__"), {}, {});'.format(
self.visit(target.value),
self.visit(target.slice),
value,
))
return
else:
target = self.visit(target)
if export:
self.__all__.append(target)
self.writer.write('{} = {};'.format(target, value))
return
self.writer.write('var __assignement = {};'.format(value))
for target in node.targets:
if isinstance(target, Tuple):
targets = map(self.visit, target.elts)
if export:
self.__all__.extend(targets)
self.writer.write('var getitem = pythonium_get_attribute(__assignement, "__getitem__");')
for index, target in enumerate(targets):
self.writer.write('{} = getitem(pythonium_call(int, {}));'.format(target, index))
else:
if isinstance(target, Attribute):
name = self.visit(target.value)
attr = target.attr.replace('__DOLLAR__', '$')
self.writer.write('pythonium_set_attribute({}, "{}", {});'.format(
name,
attr,
value,
))
else:
target = self.visit(target)
if self._def_stack and isinstance(self._def_stack[-1], ClassDefNode):
name = '__{}_{}'.format(self._def_stack[-1].name, target)
else:
name = target
if export:
self.__all__.extend(name)
self.writer.write('{} = __assignement;'.format(name))
if self._def_stack and isinstance(self._def_stack[-1], ClassDefNode):
return target, name
# Expr(expr value)
def visit_Expr(self, node):
self.writer.write(self.visit(node.value) + ';')
# Return(expr? value)
def visit_Return(self, node):
if node.value:
self.writer.write('return {};'.format(self.visit(node.value)))
return
self.writer.write('return __NONE;')
# Compare(expr left, cmpop* ops, expr* comparators)
def visit_Compare(self, node):
def merge(a, b, c):
if a and b:
c.append(self.visit(a[0]))
c.append(self.visit(b[0]))
return merge(a[1:], b[1:], c)
else:
return c
ops = merge(node.ops, node.comparators, [self.visit(node.left)])
iter = reversed(ops)
c = next(iter)
for op in iter:
c = '(pythonium_get_attribute({}, "{}")({}))'.format(next(iter), op, c)
return c
# BoolOp(boolop op, expr* values)
def visit_BoolOp(self, node):
op = self.visit(node.op)
out = self.visit(node.values[0])
for value in node.values[1:]:
v = self.visit(value)
out = 'pythonium_call(pythonium_get_attribute({}, "{}"), {})'.format(v, op, out)
return out
# Assert(expr test, expr? msg)
def visit_Assert(self, node):
test = self.visit(node.test)
msg = self.visit(node.msg) if node.msg else 'undefined'
self.writer.write('ASSERT({}, {});'.format(test, msg))
# If(expr test, stmt* body, stmt* orelse)
def visit_If(self, node):
test = self.visit(node.test)
self.writer.write('if (pythonium_is_true({})) {{'.format(test))
self.writer.push()
list(map(self.visit, node.body))
self.writer.pull()
self.writer.write('}')
if node.orelse:
self.writer.write('else {')
self.writer.push()
list(map(self.visit, node.orelse))
self.writer.pull()
self.writer.write('}')
# IfExp(expr test, expr body, expr orelse)
def visit_IfExp(self, node):
name = '__pythonium_ifexp_{}'.format(self.uuid())
self.writer.write('if (pythonium_is_true({})) {{'.format(self.visit(node.test)))
self.writer.push()
body = self.visit(node.body)
self.writer.write('var {} = {};'.format(name, body))
self.writer.pull()
self.writer.write('} else {')
self.writer.push()
orelse = self.visit(node.orelse)
self.writer.write('var {} = {};'.format(name, orelse))
self.writer.pull()
self.writer.write('}')
return name
# Ellipsis
visit_Ellipsis = NotImplemented
# Starred(expr value, expr_context ctx)
visit_Starred = NotImplemented
# Dict(expr* keys, expr* values)
def visit_Dict(self, node):
keys = []
values = []
for i in range(len(node.keys)):
k = self.visit(node.keys[i])
keys.append(k)
v = self.visit(node.values[i])
values.append(v)
if node.keys:
name = '__a_dict{}'.format(self.uuid())
self.writer.write('var {} = pythonium_call(dict);'.format(name))
for key, value in zip(keys, values):
self.writer.write('{}.__class__.__setitem__({}, {}, {});'.format(name, name, key, value))
return name
else:
return 'pythonium_call(dict)'
# With(withitem* items, stmt* body)
visit_With = NotImplemented
# withitem = (expr context_expr, expr? optional_vars)
visit_withitem = NotImplemented
# For(expr target, expr iter, stmt* body, stmt* orelse)
def visit_For(self, node):
# support only arrays
target = node.target.id
iterator = self.visit(node.iter) # iter is the python iterator
self.writer.write('try {')
self.writer.push()
nextname = '__next{}__'.format(self.uuid())
self.writer.write('var {} = pythonium_get_attribute(iter({}), "__next__");'.format(nextname, iterator))
self.writer.write('while(true) {')
self.writer.push()
self.writer.write('var {} = {}();'.format(target, nextname))
list(map(self.visit, node.body))
self.writer.pull()
self.writer.write('}')
self.writer.pull()
self.writer.write('} catch (__exception__) { if (!pythonium_is_exception(__exception__, StopIteration)) { throw x; }}')
# Continue
def visit_Continue(self, node):
self.writer.write('continue;')
# Break
def visit_Break(self, node):
self.writer.write('break;')
def _unpack_arguments(self, args, kwargs, varargs, varkwargs):
self.writer.write('/* BEGIN arguments unpacking */')
if not varkwargs and kwargs:
varkwargs = '__kwargs'
if varargs or (varkwargs and varkwargs != '__kwargs') or kwargs:
self.writer.write('var __args = Array.prototype.slice.call(arguments);')
if (varkwargs and varkwargs != '__kwargs') or kwargs:
self.writer.write('if (__args[__args.length - 2] === __ARGUMENTS_PADDING__) {')
self.writer.push()
self.writer.write('var {} = __args[__args.length - 1];'.format(varkwargs))
self.writer.write('var varkwargs_start = __args.length - 2;')
self.writer.pull()
self.writer.write('} else {') # no variable keywords was provided so it's empty
self.writer.push()
self.writer.write('var {} = pythonium_create_empty_dict();'.format(varkwargs))
self.writer.write('var varkwargs_start = undefined;')
self.writer.pull()
self.writer.write('}')
num_args = len(args)
for index, keyword in enumerate(kwargs.keys()):
position = num_args + index - 1
self.writer.write('if (varkwargs_start !== undefined && {} > varkwargs_start) {{'.format(position))
self.writer.push()
self.writer.write('{} = {}.__class__.get({}, {}) || {};'.format(keyword, varkwargs, varkwargs, keyword, kwargs[keyword]))
self.writer.pull()
self.writer.write('} else {')
self.writer.push()
self.writer.write('{} = {} || {}.__class__.get({}, {}) || {};'.format(keyword, keyword, varkwargs, varkwargs, keyword, kwargs[keyword]))
self.writer.pull()
self.writer.write('}')
if varkwargs != '__kwargs':
self.writer.write('delete {}.{};'.format(varkwargs, keyword))
if varargs:
self.writer.write('__args = __args.splice({});'.format(len(args)))
if varkwargs and (varkwargs != '__kwargs' or kwargs):
self.writer.write('if (varkwargs_start) {{ __args.splice(varkwargs_start - {}) }}'.format(len(args)))
self.writer.write('var {} = pythonium_call(tuple);'.format(varargs))
self.writer.write('{}.jsobject = __args;'.format(varargs))
self.writer.write('/* END arguments unpacking */')
# Lambda(arguments args, expr body)
def visit_Lambda(self, node):
args, kwargs, vararg, varkwargs = self.visit(node.args)
name = '__lambda{}'.format(self.uuid())
self.writer.write('var {} = function({}) {{'.format(name, ', '.join(args)))
self.writer.push()
self._unpack_arguments(args, kwargs, vararg, varkwargs)
body = 'return '
body += self.visit(node.body)
self.writer.write(body)
self.writer.pull()
self.writer.write('}')
return name
# ClassDef(identifier name, expr* bases, keyword* keywords,
# expr? starargs, expr? kwargs, stmt* body, expr* decorator_list)
def visit_ClassDef(self, node):
# 'name', 'bases', 'keywords', 'starargs', 'kwargs', 'body', 'decorator_lis't
if len(self._def_stack) == 0: # module level definition must be exported
self.__all__.append(node.name)
if len(node.bases) == 0:
bases = ['object']
else:
bases = map(self.visit, node.bases)
bases = '[{}]'.format(', '.join(bases))
self._def_stack.append(ClassDefNode(node.name))
self.writer.write('/* class definition {} */'.format(node.name))
definitions = []
for child in node.body:
definitions.append(self.visit(child))
self.writer.write('var {} = pythonium_create_class("{}", {}, {{'.format(node.name, node.name, bases))
self.writer.push()
for o in definitions:
if not o:
continue
name, definition = o
self.writer.write('{}: {},'.format(name, definition))
self.writer.pull()
self.writer.write('});')
self._def_stack.pop()
| lgpl-2.1 |
eayunstack/neutron | neutron/scheduler/dhcp_agent_scheduler.py | 1 | 12957 | # Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from operator import itemgetter
from neutron_lib.api.definitions import availability_zone as az_def
from neutron_lib import constants
from neutron_lib.objects import exceptions
from oslo_config import cfg
from oslo_log import log as logging
from neutron.agent.common import utils as agent_utils
from neutron.objects import agent as agent_obj
from neutron.objects import network
from neutron.scheduler import base_resource_filter
from neutron.scheduler import base_scheduler
LOG = logging.getLogger(__name__)
class AutoScheduler(object):
def auto_schedule_networks(self, plugin, context, host):
"""Schedule non-hosted networks to the DHCP agent on the specified
host.
"""
agents_per_network = cfg.CONF.dhcp_agents_per_network
# a list of (agent, net_ids) tuples
bindings_to_add = []
with context.session.begin(subtransactions=True):
fields = ['network_id', 'enable_dhcp', 'segment_id']
subnets = plugin.get_subnets(context, fields=fields)
net_ids = {}
net_segment_ids = collections.defaultdict(set)
for s in subnets:
if s['enable_dhcp']:
net_segment_ids[s['network_id']].add(s.get('segment_id'))
for network_id, segment_ids in net_segment_ids.items():
is_routed_network = any(segment_ids)
net_ids[network_id] = is_routed_network
if not net_ids:
LOG.debug('No non-hosted networks')
return False
dhcp_agents = agent_obj.Agent.get_objects(
context, agent_type=constants.AGENT_TYPE_DHCP,
host=host, admin_state_up=True)
segment_host_mapping = network.SegmentHostMapping.get_objects(
context, host=host)
segments_on_host = {s.segment_id for s in segment_host_mapping}
for dhcp_agent in dhcp_agents:
if agent_utils.is_agent_down(
dhcp_agent.heartbeat_timestamp):
LOG.warning('DHCP agent %s is not active', dhcp_agent.id)
continue
for net_id, is_routed_network in net_ids.items():
agents = plugin.get_dhcp_agents_hosting_networks(
context, [net_id])
segments_on_network = net_segment_ids[net_id]
if is_routed_network:
if len(segments_on_network & segments_on_host) == 0:
continue
else:
if len(agents) >= agents_per_network:
continue
if any(dhcp_agent.id == agent.id for agent in agents):
continue
net = plugin.get_network(context, net_id)
az_hints = (net.get(az_def.AZ_HINTS) or
cfg.CONF.default_availability_zones)
if (az_hints and
dhcp_agent['availability_zone'] not in az_hints):
continue
bindings_to_add.append((dhcp_agent, net_id))
# do it outside transaction so particular scheduling results don't
# make other to fail
for agent, net_id in bindings_to_add:
self.resource_filter.bind(context, [agent], net_id)
return True
class ChanceScheduler(base_scheduler.BaseChanceScheduler, AutoScheduler):
def __init__(self):
super(ChanceScheduler, self).__init__(DhcpFilter())
class WeightScheduler(base_scheduler.BaseWeightScheduler, AutoScheduler):
def __init__(self):
super(WeightScheduler, self).__init__(DhcpFilter())
class AZAwareWeightScheduler(WeightScheduler):
def select(self, plugin, context, resource_hostable_agents,
resource_hosted_agents, num_agents_needed):
"""AZ aware scheduling
If the network has multiple AZs, agents are scheduled as
follows:
- select AZ with least agents scheduled for the network
- for AZs with same amount of scheduled agents, the AZ which
contains least weight agent will be used first
- choose agent in the AZ with WeightScheduler
"""
# The dict to record the agents in each AZ, the record will be sorted
# according to the weight of agent. So that the agent with less weight
# will be used first.
hostable_az_agents = collections.defaultdict(list)
# The dict to record the number of agents in each AZ. When the number
# of agents in each AZ is the same and num_agents_needed is less than
# the number of AZs, we want to select agents with less weight.
# Use an OrderedDict here, so that the AZ with least weight agent
# will be recorded first in the case described above. And, as a result,
# the agent with least weight will be used first.
num_az_agents = collections.OrderedDict()
# resource_hostable_agents should be a list with agents in the order of
# their weight.
resource_hostable_agents = (
super(AZAwareWeightScheduler, self).select(
plugin, context, resource_hostable_agents,
resource_hosted_agents, len(resource_hostable_agents)))
for agent in resource_hostable_agents:
az_agent = agent['availability_zone']
hostable_az_agents[az_agent].append(agent)
if az_agent not in num_az_agents:
num_az_agents[az_agent] = 0
if num_agents_needed <= 0:
return []
for agent in resource_hosted_agents:
az_agent = agent['availability_zone']
if az_agent in num_az_agents:
num_az_agents[az_agent] += 1
chosen_agents = []
while num_agents_needed > 0:
# 'min' will stably output the first min value in the list.
select_az = min(num_az_agents.items(), key=itemgetter(1))[0]
# Select the agent in AZ with least weight.
select_agent = hostable_az_agents[select_az][0]
chosen_agents.append(select_agent)
# Update the AZ-agents records.
del hostable_az_agents[select_az][0]
if not hostable_az_agents[select_az]:
del num_az_agents[select_az]
else:
num_az_agents[select_az] += 1
num_agents_needed -= 1
return chosen_agents
class DhcpFilter(base_resource_filter.BaseResourceFilter):
def bind(self, context, agents, network_id):
"""Bind the network to the agents."""
# customize the bind logic
bound_agents = agents[:]
for agent in agents:
# saving agent_id to use it after rollback to avoid
# DetachedInstanceError
agent_id = agent.id
try:
network.NetworkDhcpAgentBinding(context,
dhcp_agent_id=agent_id, network_id=network_id).create()
except exceptions.NeutronDbObjectDuplicateEntry:
# it's totally ok, someone just did our job!
bound_agents.remove(agent)
LOG.info('Agent %s already present', agent_id)
LOG.debug('Network %(network_id)s is scheduled to be '
'hosted by DHCP agent %(agent_id)s',
{'network_id': network_id,
'agent_id': agent_id})
super(DhcpFilter, self).bind(context, bound_agents, network_id)
def filter_agents(self, plugin, context, network):
"""Return the agents that can host the network.
This function returns a dictionary which has 3 keys.
n_agents: The number of agents should be scheduled. If n_agents=0,
all networks are already scheduled or no more agent can host the
network.
hostable_agents: A list of agents which can host the network.
hosted_agents: A list of agents which already hosts the network.
"""
agents_dict = self._get_network_hostable_dhcp_agents(
plugin, context, network)
if not agents_dict['hostable_agents'] or agents_dict['n_agents'] <= 0:
return {'n_agents': 0, 'hostable_agents': [],
'hosted_agents': agents_dict['hosted_agents']}
return agents_dict
def _filter_agents_with_network_access(self, plugin, context,
network, hostable_agents):
if 'candidate_hosts' in network:
hostable_dhcp_hosts = network['candidate_hosts']
else:
hostable_dhcp_hosts = plugin.filter_hosts_with_network_access(
context, network['id'],
[agent['host'] for agent in hostable_agents])
reachable_agents = [agent for agent in hostable_agents
if agent['host'] in hostable_dhcp_hosts]
return reachable_agents
def _get_dhcp_agents_hosting_network(self, plugin, context, network):
"""Return dhcp agents hosting the given network or None if a given
network is already hosted by enough number of agents.
"""
agents_per_network = cfg.CONF.dhcp_agents_per_network
#TODO(gongysh) don't schedule the networks with only
# subnets whose enable_dhcp is false
with context.session.begin(subtransactions=True):
network_hosted_agents = plugin.get_dhcp_agents_hosting_networks(
context, [network['id']], hosts=network.get('candidate_hosts'))
if len(network_hosted_agents) >= agents_per_network:
LOG.debug('Network %s is already hosted by enough agents.',
network['id'])
return
return network_hosted_agents
def _get_active_agents(self, plugin, context, az_hints):
"""Return a list of active dhcp agents."""
with context.session.begin(subtransactions=True):
filters = {'agent_type': [constants.AGENT_TYPE_DHCP],
'admin_state_up': [True]}
if az_hints:
filters['availability_zone'] = az_hints
active_dhcp_agents = plugin.get_agent_objects(
context, filters=filters)
if not active_dhcp_agents:
LOG.warning('No more DHCP agents')
return []
return active_dhcp_agents
def _get_network_hostable_dhcp_agents(self, plugin, context, network):
"""Provide information on hostable DHCP agents for network.
The returned value includes the number of agents that will actually
host the given network, a list of DHCP agents that can host the given
network, and a list of DHCP agents currently hosting the network.
"""
hosted_agents = self._get_dhcp_agents_hosting_network(plugin,
context, network)
if hosted_agents is None:
return {'n_agents': 0, 'hostable_agents': [], 'hosted_agents': []}
n_agents = cfg.CONF.dhcp_agents_per_network - len(hosted_agents)
az_hints = (network.get(az_def.AZ_HINTS) or
cfg.CONF.default_availability_zones)
active_dhcp_agents = self._get_active_agents(plugin, context, az_hints)
hosted_agent_ids = [agent['id'] for agent in hosted_agents]
if not active_dhcp_agents:
return {'n_agents': 0, 'hostable_agents': [],
'hosted_agents': hosted_agents}
hostable_dhcp_agents = [
agent for agent in active_dhcp_agents
if agent.id not in hosted_agent_ids and plugin.is_eligible_agent(
context, True, agent)]
hostable_dhcp_agents = self._filter_agents_with_network_access(
plugin, context, network, hostable_dhcp_agents)
if not hostable_dhcp_agents:
return {'n_agents': 0, 'hostable_agents': [],
'hosted_agents': hosted_agents}
n_agents = min(len(hostable_dhcp_agents), n_agents)
return {'n_agents': n_agents, 'hostable_agents': hostable_dhcp_agents,
'hosted_agents': hosted_agents}
| apache-2.0 |
dgilland/alchy | tasks.py | 1 | 2051 | # -*- coding: utf-8 -*-
from invoke import run, task
REQUIREMENTS = 'requirements-dev.txt'
PACKAGE_NAME = 'alchy'
FLAKE8_IGNORE = ','.join([
'F401', # `module` imported but unused
'F811', # redefinition of unused `name` from line `N`
])
PYLINT_IGNORE = ','.join([
'not-callable',
'no-self-argument',
'no-member',
'no-value-for-parameter',
'method-hidden'
])
TEST_TARGETS = ' '.join([PACKAGE_NAME, 'tests'])
COV_TARGET = PACKAGE_NAME
@task
def clean(ctx):
"""Remove temporary files related to development."""
run('find . -name \*.py[cod] -type f -delete')
run('find . -depth -name __pycache__ -type d -exec rm -rf {} \;')
run('rm -rf .tox .coverage .cache .egg* *.egg* dist build')
@task
def install(ctx):
"""Install package development dependencies."""
run('pip install -r {0}'.format(REQUIREMENTS))
@task
def flake8(ctx):
"""Run flake8 checker."""
run('flake8 --ignore={0} {1}'.format(FLAKE8_IGNORE, TEST_TARGETS))
@task
def pylint(ctx):
"""Run pylint checker."""
run('pylint -E -d {0} {1}'.format(PYLINT_IGNORE, TEST_TARGETS))
@task(pre=[flake8, pylint])
def lint(ctx):
"""Run static linter."""
pass
@task
def unit(ctx):
"""Run unit tests."""
run('py.test --cov {0} {1}'.format(COV_TARGET, TEST_TARGETS))
@task(pre=[lint, unit])
def test(ctx):
"""Run all tests."""
pass
@task(post=[clean])
def tox(ctx):
"""Run tox testing."""
run('tox -c tox.ini')
@task
def docs(ctx, serve=False, port=8000):
"""Build documentation."""
run('rm -rf {0}'.format('docs/_build'))
run('cd docs && make doctest && make html')
if serve:
print('Serving docs on port {0} ...'.format(port))
run('cd {0} && python -m http.server {1}'
.format('docs/_build/html', port))
@task
def build(ctx):
"""Build package distribution."""
run('python setup.py sdist bdist_wheel')
@task(pre=[build], post=[clean])
def release(ctx):
"""Upload package distribution to PyPI."""
run('twine upload dist/*')
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.