code stringlengths 1 1.49M | vector listlengths 0 7.38k | snippet listlengths 0 7.38k |
|---|---|---|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for custom HTML tags."""
__author__ = 'John Orr (jorr@google.com)'
import logging
import mimetypes
import os
import re
from xml.etree import cElementTree
import html5lib
import lxml.html
import safe_dom
import webapp2
import appengine_config
from common import schema_fields
from models import config
CAN_USE_DYNAMIC_TAGS = config.ConfigProperty(
'gcb_can_use_dynamic_tags', bool, safe_dom.Text(
'Whether lesson content can make use of custom HTML tags such as '
'<gcb-youtube videoid="...">. If this is enabled some legacy content '
'may be rendered differently. '),
default_value=True)
DUPLICATE_INSTANCE_ID_MESSAGE = (
'Error processing custom HTML tag: duplicate tag id')
INVALID_HTML_TAG_MESSAGE = 'Invalid HTML tag'
class BaseTag(object):
"""Base class for the custom HTML tags."""
@classmethod
def name(cls):
return cls.__name__
@classmethod
def vendor(cls):
return cls.__module__
@classmethod
def required_modules(cls):
"""Lists the inputEx modules required by the editor."""
return []
@classmethod
def extra_js_files(cls):
"""Returns a list of JS files to be loaded in the editor lightbox."""
return []
@classmethod
def extra_css_files(cls):
"""Returns a list of CSS files to be loaded in the editor lightbox."""
return []
@classmethod
def additional_dirs(cls):
"""Returns a list of directories searched for files used by the editor.
These folders will be searched for files to be loaded as Jinja
templates by the editor, e.g., the files referenced by extra_js_files
and extra_css_files.
Returns:
List of strings.
"""
return []
def render(self, node, handler): # pylint: disable=W0613
"""Receive a node and return a node.
Args:
node: cElementTree.Element. The DOM node for the tag which should be
rendered.
handler: controllers.utils.BaseHandler. The server runtime.
Returns:
A cElementTree.Element holding the rendered DOM.
"""
return cElementTree.XML('<div>[Unimplemented custom tag]</div>')
def get_icon_url(self):
"""Return the URL for the icon to be displayed in the rich text editor.
Images should be placed in a folder called 'resources' inside the main
package for the tag definitions.
Returns:
the URL for the icon to be displayed in the editor.
"""
return """
data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs
4c6QAAAAZiS0dEAP8A/wD/oL2nkwAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAd0SU1FB90EGgAIFHpT6h
8AAAAZdEVYdENvbW1lbnQAQ3JlYXRlZCB3aXRoIEdJTVBXgQ4XAAAC30lEQVRo3u1ZP2sqQRCfVVGUXC
FqoZAmbSBYxFikMojBD2ErkgdC/AxpAn4A2wRMKptgCrWwSApBEG2DCidcI0gIxogXnXnFI5I87y6Jd6
seOHDN7LL7+83u/Nk5hoh/wMTCEJHMTMDGGDMzfrCAyWVL4DdCZLy72YwCxhgDIoKXlxcQRREeHx9BFE
WYTqfg9XohGAxCKBSCnZ0dcDqdhlrFEKlWq8QYIwD49ovFYjQajYiICBF17auLACLSbDaj3d3dObizsz
Nqt9v09PRE8Xhck0gul9NtONADnojI7XbPAXW73YV55XJZk8TFxcX6TuDk5GQORBAE1StxeXmpSaJery
99lWBZ69dqtQUgpVJJcW6/39cksL+/v/oTiEajC0DsdjvNZjPF+Q6HQ5PEsrJ0Huj1egs6WZbh+flZcX
4kEtFcr1KprDaRybKsqL++vlbU+/1+zfVEUVwtAZ/Pp6h/f39X1COi5nqBQGC1iaxUKine5eFwqDg/Fo
tx8QFdYfTm5uYLiPv7e0JExZD4OV/8/+3t7a0vkcmyTJIk0Xg8Vs0Dr6+vmta/vb1dbR74rTw8PKiCPz
09XV8m/qmEQiFF8IeHh7oLOq4EEJGazaam5ddajf5ElKJPNps1BDxXAohIjUbjC3CPx0OTycTQfbiewO
f3QDKZ5LIHVwIf4PP5vGFXZmUErq6uCAAok8lw9TFuBFKp1LxE4GF53eX0d10KSZLg+Pj4X/+SY/ePCw
HGGIzHYzg6OuLfG+W18MHBAYTDYf7daeLRLtv2RrcE9DdvC4UC5PN5mE6n3DvGhtU+RETn5+cLxVsikT
BHIru7u1N9uKTTaS4EDItCiAhWq1V13OVywWg02lwfGA6HmuNvb2+b7cQWi8XcUUgQBPB6varjWmMbE0
Y7nY5q4VYsFs0RRvv9PgmCMI8+VquVWq0WtzBqaC308bMPAGAwGAAiqvZQt8XcthbaELGZ/AbBX0kdVa
SPB+uxAAAAAElFTkSuQmCC
"""
def get_schema(self, unused_handler):
"""Return the list of fields which will be displayed in the editor.
This method assembles the list of fields which will be displayed in
the rich text editor when a user double-clicks on the icon for the tag.
The fields are a list of SchemaField objects in a FieldRegistry
container. Each SchemaField has the actual attribute name as used in
the tag, the display name for the form, and the type (usually
string).
The schema field type of "text" plays a special role: a tag is allowed
to have at most one field of type "text", and this is stored in the body
of the tag, not as an attribute.
Args:
unused_handler: a request handler; if None is received, the request
is being made by the system and there is no user in session; the
minimal schema must be returned in this case; don't attempt to
access course, app_context, file system, datastore, etc. in this
case; if a valid handler object is received, the request is being
made by a real user and schema can have additional data binding in
it; for example: 'select_data' can be computed and set by accessing
course, app_context, filesyste, datastore, etc.
Returns:
the list of fields to be displayed in the editor.
"""
reg = schema_fields.FieldRegistry('Unimplemented Custom Tag')
return reg
def unavailable_schema(self, message):
"""Utility to generate a schema for a "not available" message."""
reg = schema_fields.FieldRegistry(self.name())
reg.add_property(
schema_fields.SchemaField(
'unused_id', '', 'string', optional=True,
editable=False, extra_schema_dict_values={
'value': message,
'visu': {
'visuType': 'funcName',
'funcName': 'disableSave'}}))
return reg
class ContextAwareTag(BaseTag):
"""A tag which shares a context with other tags of the same type."""
class Context(object):
"""Carries the environment and other data used by the tag."""
def __init__(self, handler, env):
"""Initialize the context.
Args:
handler: controllers.utils.BaseHandler. The server runtime.
env: dict. A dict of values shared shared between instances of
the tag on the same page. Values stored in this dict will be
available to subsequent calls to render() on the same page,
and to the call to rollup_header_footer() made at the end of
the page. Use this to store things like JS library refs
which can be de-dup'd and put in the header or footer.
"""
self.handler = handler
self.env = env
def render(self, node, context): # pylint: disable=W0613
"""Receive a node and return a node.
Args:
node: cElementTree.Element. The DOM node for the tag which should be
rendered.
context: Context. The context shared between instances of the tag.
Returns:
A cElementTree.Element holding the rendered DOM.
"""
return super(ContextAwareTag, self).render(node, context.handler)
def rollup_header_footer(self, context):
"""Roll up header and footer from data stored in the tag environment.
This method is called once at the end of page processing. It receives
the context object, which has been passed to all rendering methods for
this tag on the page, and which accumulates data stored by the
renderers.
Args:
context: Context. Holds data set in an environment dict by previous
calls to render, containing, e.g., URLs of CSS or JS resources.
Returns:
A pair of cElementTree.Element's (header, footer).
"""
pass
class ResourcesHandler(webapp2.RequestHandler):
"""Content handler for resources associated with custom tags."""
def rebase_path(self, path):
"""Override this method to rebase the path to a different root."""
return path
def transform_resource(self, resource_str):
"""Override this method to apply a transforation to the resource."""
return resource_str
def get(self):
"""Respond to HTTP GET methods."""
path = self.rebase_path(self.request.path)
if path.startswith('/'):
path = path[1:]
path = os.path.normpath(path)
resource_file = os.path.join(appengine_config.BUNDLE_ROOT, path)
mimetype = mimetypes.guess_type(resource_file)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
try:
self.response.status = 200
self.response.headers['Content-Type'] = mimetype
self.response.cache_control.no_cache = None
self.response.cache_control.public = 'public'
self.response.cache_control.max_age = 600
stream = open(resource_file)
self.response.write(self.transform_resource(stream.read()))
except IOError:
self.error(404)
class JQueryHandler(ResourcesHandler):
"""A content handler which serves jQuery scripts wrapped in $.ready()."""
def transform_resource(self, resource_str):
return '$(function() {%s});' % resource_str
class IifeHandler(ResourcesHandler):
"""A content handler which serves JavaScript wrapped in an immediately
invoked function expression (IIFE).
"""
def transform_resource(self, resource_str):
return '(function() {%s})();' % resource_str
class EditorBlacklists(object):
"""Lists tags which should not be supported by various editors."""
COURSE_SCOPE = set()
ASSESSMENT_SCOPE = set()
DESCRIPTIVE_SCOPE = set()
@classmethod
def register(cls, tag_name, editor_set):
editor_set.add(tag_name)
@classmethod
def unregister(cls, tag_name, editor_set):
if tag_name in editor_set:
editor_set.remove(tag_name)
class Registry(object):
"""A class that holds all dynamically registered tags."""
_bindings = {}
@classmethod
def add_tag_binding(cls, tag_name, clazz):
"""Registers a tag name to class binding."""
cls._bindings[tag_name] = clazz
@classmethod
def remove_tag_binding(cls, tag_name):
"""Unregisters a tag binding."""
if tag_name in cls._bindings:
del cls._bindings[tag_name]
@classmethod
def get_all_tags(cls):
return dict(cls._bindings.items())
def get_tag_bindings():
return dict(Registry.get_all_tags().items())
def html_string_to_element_tree(html_string):
parser = html5lib.HTMLParser(
tree=html5lib.treebuilders.getTreeBuilder('etree', cElementTree),
namespaceHTMLElements=False)
return parser.parseFragment('<div>%s</div>' % html_string)[0]
def html_to_safe_dom(html_string, handler, render_custom_tags=True):
"""Render HTML text as a tree of safe_dom elements."""
tag_bindings = get_tag_bindings()
node_list = safe_dom.NodeList()
if not html_string:
return node_list
# Set of all instance id's used in this dom tree, used to detect duplication
used_instance_ids = set([])
# A dictionary of environments, one for each tag type which appears in the
# page
tag_contexts = {}
def _generate_error_message_node_list(elt, error_message):
"""Generates a node_list representing an error message."""
logging.error(
'[%s, %s]: %s.', elt.tag, dict(**elt.attrib), error_message)
node_list = safe_dom.NodeList()
node_list.append(safe_dom.Element(
'span', className='gcb-error-tag'
).add_text(error_message))
if elt.tail:
node_list.append(safe_dom.Text(elt.tail))
return node_list
def _remove_namespace(tag_name):
# Remove any namespacing which html5lib may have introduced. Html5lib
# namespacing is of the form, e.g.,
# {http://www.w3.org/2000/svg}svg
return re.sub(r'^\{[^\}]+\}', '', tag_name, count=1)
def _process_html_tree(elt):
"""Recursively parses an HTML tree into a safe_dom.NodeList()."""
# Return immediately with an error message if a duplicate instanceid is
# detected.
if 'instanceid' in elt.attrib:
if elt.attrib['instanceid'] in used_instance_ids:
return _generate_error_message_node_list(
elt, DUPLICATE_INSTANCE_ID_MESSAGE)
used_instance_ids.add(elt.attrib['instanceid'])
# Otherwise, attempt to parse this tag and all its child tags.
original_elt = elt
try:
if render_custom_tags and elt.tag in tag_bindings:
tag = tag_bindings[elt.tag]()
if isinstance(tag, ContextAwareTag):
# Get or initialize a environment dict for this type of tag.
# Each tag type gets a separate environment shared by all
# instances of that tag.
context = tag_contexts.get(elt.tag)
if context is None:
context = ContextAwareTag.Context(handler, {})
tag_contexts[elt.tag] = context
# Render the tag
elt = tag.render(elt, context)
else:
# Render the tag
elt = tag.render(elt, handler)
if elt.tag == cElementTree.Comment:
out_elt = safe_dom.Comment()
elif elt.tag.lower() == 'script':
out_elt = safe_dom.ScriptElement()
else:
out_elt = safe_dom.Element(_remove_namespace(elt.tag))
out_elt.add_attribute(**elt.attrib)
if elt.text:
out_elt.add_text(elt.text)
for child in elt:
out_elt.add_children(
_process_html_tree(child))
node_list = safe_dom.NodeList()
node_list.append(out_elt)
if original_elt.tail:
node_list.append(safe_dom.Text(original_elt.tail))
return node_list
except Exception as e: # pylint: disable=broad-except
logging.exception('Error handling tag: %s', elt.tag)
return _generate_error_message_node_list(
original_elt, '%s: %s' % (INVALID_HTML_TAG_MESSAGE, e))
root = html_string_to_element_tree(html_string)
if root.text:
node_list.append(safe_dom.Text(root.text))
for child_elt in root:
node_list.append(_process_html_tree(child_elt))
# After the page is processed, rollup any global header/footer data which
# the environment-aware tags have accumulated in their env's
for tag_name, context in tag_contexts.items():
header, footer = tag_bindings[tag_name]().rollup_header_footer(context)
node_list.insert(0, _process_html_tree(header))
node_list.append(_process_html_tree(footer))
return node_list
def get_components_from_html(html):
"""Returns a list of dicts representing the components in a lesson.
Args:
html: a block of html that may contain some HTML tags representing
custom components.
Returns:
A list of dicts. Each dict represents one component and has two
keys:
- instanceid: the instance id of the component
- cpt_name: the name of the component tag (e.g. gcb-googlegroup)
"""
content = lxml.html.fromstring('<div>%s</div>' % html)
components = []
for component in content.xpath('.//*[@instanceid]'):
component_dict = {'cpt_name': component.tag}
component_dict.update(component.attrib)
components.append(component_dict)
return components
| [
[
8,
0,
0.033,
0.0022,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0374,
0.0022,
0,
0.66,
0.037,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0441,
0.0022,
0,
0.66,
... | [
"\"\"\"Handlers for custom HTML tags.\"\"\"",
"__author__ = 'John Orr (jorr@google.com)'",
"import logging",
"import mimetypes",
"import os",
"import re",
"from xml.etree import cElementTree",
"import html5lib",
"import lxml.html",
"import safe_dom",
"import webapp2",
"import appengine_config"... |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unified method of referring to to heterogenous resources in courses"""
__author__ = 'Mike Gainer (mgainer@google.com)'
class AbstractResourceHandler(object):
"""Unified accessor for heterogenous resources within CourseBuilder.
CourseBuilder contains a number of different resources, such as
questions, units, lessons, course settings, etc. There are a number
of features that are concerned with acting on some or all of these
types, and would like to do so polymorphically. (E.g., I18N,
skill mapping, and other 3rd-party modules).
"""
# Derived classes must set TYPE to a short, globally-unique string. This
# string may only contain lowercase letters, numbers, and underscores.
TYPE = None
@classmethod
def get_key(cls, instance):
"""Returns a key for the given instance.
Args:
instance: And instance of a Course Builder resource.
Returns:
A Key for that instance.
"""
raise NotImplementedError('Derived classes must implement this.')
@classmethod
def get_resource(cls, course, key):
"""Returns an instance of the resource type.
Args:
course: A courses.Course instance
key: A small fact (string or integer, typically) representing
the primary key for the desired instance.
Returns:
A loaded instance of the type appropriate for the Handler subtype.
Note that this can be very broadly interpreted. For example,
since it is so common to need the Unit corresponding to a Lesson,
this function in ResourceLesson returns a 2-tuple of the unit
and lesson, rather than just the lesson.
"""
raise NotImplementedError('Derived classes must implement this.')
@classmethod
def get_resource_title(cls, resource):
"""Get a title for the resource.
Args:
resource: Whatever is returned from get_resource() (q.v.)
Returns:
A short human-friendly string for titling the resource.
NOTE: This string is not I18N'd - it is the actual string
from the resource, before translation. This string is
suitable for display in dashboard contexts, where it is
OK to presume a reasonable working knowledge of English,
but not on student-facing pages.
"""
raise NotImplementedError('Derived classes must implement this.')
@classmethod
def get_schema(cls, course, key):
"""Return a schema describing the value returned from get_data_dict().
Again, note that in theory, the specific identity of the item in
question should not be required to get what should be a generic
schema. The difference between theory and practice....
Args:
course: A courses.Course instance.
key: A small fact (string or integer, typically) representing
the primary key for the desired instance.
Returns:
A schema_fields.FieldRegistry instance.
"""
raise NotImplementedError('Derived classes must implement this.')
@classmethod
def get_data_dict(cls, course, key):
"""Return a simple dict expression of the object's data.
This is typically used in REST editors and other similar import/
export related scenarios.
Args:
course: A courses.Course instance.
key: A small fact (string or integer, typically) representing
the primary key for the desired instance.
Returns:
A dict corresponding to the schema from get_schema().
"""
raise NotImplementedError('Derived classes must implement this.')
@classmethod
def get_view_url(cls, resource):
"""Return a URL that will show a student view of the item.
Not all classes need to return a reasonable value here. For
example, Labels and Skills may just not have a simple student-visible
representation. It is fine in those cases to return None; the
caller must deal with this situation appropriately.
resource: Whatever is returned from get_resource() (q.v.)
Returns:
A *relative* URL. E.g., dashboard?action=foo&tab=bar Such a
URL can be placed unmmodified on a page which has been set
up with the default URL prefix pointing to the namespace for
the current course.
"""
raise NotImplementedError('Derived classes must implement this.')
@classmethod
def get_edit_url(cls, key):
"""Return a dashboard URL for editing the resource.
All classes should implement this function. If it is hard to
implement this, then you may have made a poor selection as to
the noun that you're trying to represent.
Args:
key: A small fact (string or integer, typically) representing
the primary key for the desired instance.
Returns:
A *relative* URL. E.g., dashboard?action=foo&tab=bar Such a
URL can be placed unmmodified on a page which has been set
up with the default URL prefix pointing to the namespace for
the current course.
"""
raise NotImplementedError('Derived classes must implement this.')
class Registry(object):
_RESOURCE_HANDLERS = {}
@classmethod
def register(cls, resource_handler):
"""Object types wishing to be generically handled register here.
Args:
resource_handler: A class that inherits from AbstractResourceHandler,
above.
"""
type_name = resource_handler.TYPE
if type_name in cls._RESOURCE_HANDLERS:
raise ValueError(
'The type name "%s" is already registered as a resource.' %
type_name)
cls._RESOURCE_HANDLERS[type_name] = resource_handler
@classmethod
def get(cls, name):
if not cls.is_valid_name(name):
raise ValueError('Unknown resource type: %s' % name)
return cls._RESOURCE_HANDLERS[name]
@classmethod
def is_valid_name(cls, name):
return name in cls._RESOURCE_HANDLERS
class Key(object):
"""Manages key for Course Builder resource.
Every Course Builder resource can be identified by a type name and a
type-contextual key. This class holds data related to this keying, and
manages serialization/deserialization as strings.
"""
def __init__(self, type_str, key, course=None):
self._type = type_str
self._key = key
self._course = course
assert Registry.is_valid_name(self._type), (
'Unknown resource type: %s' % type_str)
def __str__(self):
return '%s:%s' % (self._type, self._key)
@property
def type(self):
return self._type
@property
def key(self):
return self._key
@classmethod
def fromstring(cls, key_str):
index = key_str.index(':')
return Key(key_str[:index], key_str[index + 1:])
def get_resource(self, course):
course = course or self._course
return Registry.get(self._type).get_resource(course, self._key)
def get_schema(self, course):
return Registry.get(self._type).get_schema(course, self._key)
def get_data_dict(self, course):
return Registry.get(self._type).get_data_dict(course, self._key)
| [
[
8,
0,
0.0685,
0.0046,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0776,
0.0046,
0,
0.66,
0.25,
777,
1,
0,
0,
0,
0,
3,
0
],
[
3,
0,
0.379,
0.5799,
0,
0.66,
... | [
"\"\"\"Unified method of referring to to heterogenous resources in courses\"\"\"",
"__author__ = 'Mike Gainer (mgainer@google.com)'",
"class AbstractResourceHandler(object):\n \"\"\"Unified accessor for heterogenous resources within CourseBuilder.\n\n CourseBuilder contains a number of different resources... |
# -*- coding: utf-8; -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions used for Course Builder locale support."""
__author__ = 'John Orr (jorr@google.com)'
import logging
import os
import re
import appengine_config
# Locale description information based on Babel Locale.display_name. However
# the names are collected here because (i) Babel does not have correct display
# names for all of the locales included, and (ii) Babel cannot access its
# localeinfo data when it is loaded as a Zip file.
LOCALES_DISPLAY_NAMES = {
'af': u'Afrikaans (af)',
'am': u'አማርኛ (am)',
'ar': u'العربية (ar)',
'bg': u'български (bg)',
'bn': u'বাংলা (bn)',
'ca': u'català (ca)',
'cs': u'čeština (cs)',
'da': u'dansk (da)',
'de': u'Deutsch (de)',
'el': u'Ελληνικά (el)',
'en_GB': u'British English (en_GB)',
'en_US': u'U.S. English (en_US)',
'es': u'español (es)',
'et': u'eesti (et)',
'eu': u'euskara (eu)',
'fa': u'فارسی (fa)',
'fi': u'suomi (fi)',
'fil': u'Filipino (fil)',
'fr': u'français (fr)',
'gl': u'galego (gl)',
'gu': u'ગુજરાતી (gu)',
'hi': u'हिन्दी (hi)',
'hr': u'hrvatski (hr)',
'hu': u'magyar (hu)',
'id': u'Bahasa Indonesia (id)',
'is': u'íslenska (is)',
'it': u'italiano (it)',
'iw': u'עברית (iw)', # Former ISO-639 code for Hebrew; should now be he
'ja': u'日本語 (ja)',
'kn': u'ಕನ್ನಡ (kn)',
'ko': u'한국어 (ko)',
'ln': u'Fake Translation (ln)',
'lt': u'lietuvių (lt)',
'lv': u'latviešu (lv)',
'ml': u'മലയാളം (ml)',
'mr': u'मराठी (mr)',
'ms': u'Bahasa Melayu (ms)',
'nl': u'Nederlands (nl)',
'no': u'Nynorsk (no)', # Correct ISO-369-1 is nn and ISO-369-2 is nno
'pl': u'polski (pl)',
'pt_BR': u'português do Brasil (pt_BR)',
'pt_PT': u'português europeu (pt_PT)',
'ro': u'română (ro)',
'ru': u'русский (ru)',
'sk': u'slovenský (sk)',
'sl': u'slovenščina (sl)',
'sr': u'Српски (sr)',
'sv': u'svenska (sv)',
'sw': u'Kiswahili (sw)',
'ta': u'தமிழ் (ta)',
'te': u'తెలుగు (te)',
'th': u'ไทย (th)',
'tr': u'Türkçe (tr)',
'uk': u'українська (uk)',
'ur': u'اردو (ur)',
'vi': u'Tiếng Việt (vi)',
'zh_CN': u'中文 (简体) (zh_CN)', # Chinese (Simplified)
'zh_TW': u'中文 (繁體) (zh_TW)', # Chinese (Traditional)
'zu': u'isiZulu (zu)',
}
def get_system_supported_locales():
translations_path = os.path.join(
appengine_config.BUNDLE_ROOT, 'modules/i18n/resources/locale')
return sorted(os.listdir(translations_path) + ['ln'])
def get_locale_display_name(locale):
return LOCALES_DISPLAY_NAMES.get(locale, locale)
def parse_accept_language(accept_language_str):
"""Parse a RFC 2616 Accept-Language string.
Accept-Language strings are of the form
en-US,en;q=0.8,el;q=0.6
where each language string (en-US, en, el) may be followed by a quality
score (q). So in the example US English has default quality score (1),
English has quality score 0.8, and Greek has quality score 0.6.
Args:
accept_language_str: str. A string in RFC 2616 format. If the string is
None or empty, an empty list is return.
Returns:
A list of pairs. The first element of the pair is the language code
(a str) and the second element is either a float between 0 and 1.
The list is sorted in decreasing order by q, so that the highest
quality language is the first element of the list.
Refs:
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14
"""
if not accept_language_str:
return []
assert len(accept_language_str) < 8 * 1024
parsed = []
try:
for item in accept_language_str.split(','):
lang = item.strip()
q = 1.0
if ';' in item:
lang, q_str = item.split(';')
q = float(q_str[2:]) if q_str.startswith('q=') else float(q_str)
components = lang.split('-')
if not all([re.match('^[a-zA-Z]+$', c) for c in components]):
continue
lang = '_'.join(
[components[0].lower()] + [c.upper() for c in components[1:]])
parsed.append((lang, q))
return sorted(parsed, None, lambda x: -x[1])
except Exception: # pylint: disable=broad-except
logging.exception('Bad Accept-Languager: %s', accept_language_str)
return []
| [
[
8,
0,
0.1088,
0.0068,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.1224,
0.0068,
0,
0.66,
0.1111,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1361,
0.0068,
0,
0.66,... | [
"\"\"\"Functions used for Course Builder locale support.\"\"\"",
"__author__ = 'John Orr (jorr@google.com)'",
"import logging",
"import os",
"import re",
"import appengine_config",
"LOCALES_DISPLAY_NAMES = {\n 'af': u'Afrikaans (af)',\n 'am': u'አማርኛ (am)',\n 'ar': u'العربية (ar)',\n 'bg': u'... |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom Jinja2 filters used in Course Builder."""
__author__ = 'John Orr (jorr@google.com)'
import sys
import traceback
import jinja2
import safe_dom
import tags
from webapp2_extras import i18n
import appengine_config
from common import caching
from models import config
from models import models
from models.counters import PerfCounter
# max size for in-process jinja template cache
MAX_GLOBAL_CACHE_SIZE_BYTES = 8 * 1024 * 1024
# this cache used to be memcache based; now it's in-process
CAN_USE_JINJA2_TEMPLATE_CACHE = config.ConfigProperty(
'gcb_can_use_jinja2_template_cache', bool, safe_dom.Text(
'Whether jinja2 can cache bytecode of compiled templates in-process.'),
default_value=True)
def finalize(x):
"""A finalize method which will correctly handle safe_dom elements."""
if isinstance(x, safe_dom.Node) or isinstance(x, safe_dom.NodeList):
return jinja2.utils.Markup(x.sanitized)
return x
def js_string_raw(data):
"""Escape a string so that it can be put in a JS quote."""
if not isinstance(data, basestring):
return data
data = data.replace('\\', '\\\\')
data = data.replace('\r', '\\r')
data = data.replace('\n', '\\n')
data = data.replace('\b', '\\b')
data = data.replace('"', '\\"')
data = data.replace("'", "\\'")
data = data.replace('<', '\\u003c')
data = data.replace('>', '\\u003e')
data = data.replace('&', '\\u0026')
return data
def js_string(data):
return jinja2.utils.Markup(js_string_raw(data))
def get_gcb_tags_filter(handler):
@appengine_config.timeandlog('get_gcb_tags_filter')
def gcb_tags(data):
"""Apply GCB custom tags, if enabled. Otherwise pass as if by 'safe'."""
data = unicode(data)
if tags.CAN_USE_DYNAMIC_TAGS.value:
return jinja2.utils.Markup(tags.html_to_safe_dom(data, handler))
else:
return jinja2.utils.Markup(data)
return gcb_tags
class ProcessScopedJinjaCache(caching.ProcessScopedSingleton):
"""This class holds in-process cache of Jinja compiled templates."""
@classmethod
def get_cache_len(cls):
return len(ProcessScopedJinjaCache.instance().cache.items.keys())
@classmethod
def get_cache_size(cls):
return ProcessScopedJinjaCache.instance().cache.total_size
def __init__(self):
self.cache = caching.LRUCache(
max_size_bytes=MAX_GLOBAL_CACHE_SIZE_BYTES)
self.cache.get_entry_size = self._get_entry_size
def _get_entry_size(self, key, value):
return sys.getsizeof(key) + sys.getsizeof(value)
class JinjaBytecodeCache(jinja2.BytecodeCache):
"""Jinja-compatible cache backed by global in-process Jinja cache."""
def __init__(self, prefix):
self.prefix = prefix
def load_bytecode(self, bucket):
found, _bytes = ProcessScopedJinjaCache.instance().cache.get(
self.prefix + bucket.key)
if found and _bytes is not None:
bucket.bytecode_from_string(_bytes)
def dump_bytecode(self, bucket):
_bytes = bucket.bytecode_to_string()
ProcessScopedJinjaCache.instance().cache.put(
self.prefix + bucket.key, _bytes)
JINJA_CACHE_LEN = PerfCounter(
'gcb-models-JinjaBytecodeCache-len',
'A total number of items in Jinja cache.')
JINJA_CACHE_SIZE_BYTES = PerfCounter(
'gcb-models-JinjaBytecodeCache-bytes',
'A total size of items in Jinja cache in bytes.')
JINJA_CACHE_LEN.poll_value = ProcessScopedJinjaCache.get_cache_len
JINJA_CACHE_SIZE_BYTES.poll_value = ProcessScopedJinjaCache.get_cache_size
def create_jinja_environment(loader, locale=None, autoescape=True):
"""Create proper jinja environment."""
cache = None
if CAN_USE_JINJA2_TEMPLATE_CACHE.value:
prefix = 'jinja2:bytecode:%s:/' % models.MemcacheManager.get_namespace()
cache = JinjaBytecodeCache(prefix)
jinja_environment = jinja2.Environment(
autoescape=autoescape, finalize=finalize,
extensions=['jinja2.ext.i18n'], bytecode_cache=cache, loader=loader)
jinja_environment.filters['js_string'] = js_string
if locale:
i18n.get_i18n().set_locale(locale)
jinja_environment.install_gettext_translations(i18n)
old_handle_exception = jinja_environment.handle_exception
def _handle_exception(exc_info=None, rendered=False, source_hint=None):
"""Handle template exception."""
traceback.print_exc(exc_info)
result = old_handle_exception(exc_info, rendered, source_hint)
return result
jinja_environment.handle_exception = _handle_exception
return jinja_environment
def get_template(
template_name, dirs, handler=None, autoescape=True):
"""Sets up an environment and gets jinja template."""
# Defer to avoid circular import.
from controllers import sites
locale = None
app_context = sites.get_course_for_current_request()
if app_context:
locale = app_context.get_current_locale()
if not locale:
locale = app_context.default_locale
if not locale:
locale = 'en_US'
jinja_environment = create_jinja_environment(
jinja2.FileSystemLoader(dirs), locale=locale, autoescape=autoescape)
jinja_environment.filters['gcb_tags'] = get_gcb_tags_filter(handler)
return jinja_environment.get_template(template_name)
| [
[
8,
0,
0.0811,
0.0054,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0919,
0.0054,
0,
0.66,
0.0385,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1027,
0.0054,
0,
0.66,... | [
"\"\"\"Custom Jinja2 filters used in Course Builder.\"\"\"",
"__author__ = 'John Orr (jorr@google.com)'",
"import sys",
"import traceback",
"import jinja2",
"import safe_dom",
"import tags",
"from webapp2_extras import i18n",
"import appengine_config",
"from common import caching",
"from models ... |
#!/usr/bin/python
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for processing various .yaml files in CourseBuilder installations."""
__author__ = 'Mike Gainer (mgainer@google.com)'
import copy
import re
import yaml
NEWLINE_BEFORE_YAML_SECTIONS = set([
'env_variables',
'includes',
'inbound_services',
'builtins',
'libraries',
'handlers',
])
class CourseBuilderYamlFormatter(yaml.Dumper):
"""Custom formatter to generate CourseBuilder style in yaml files."""
def __init__(self, *args, **kwargs):
super(CourseBuilderYamlFormatter, self).__init__(*args, **kwargs)
self.best_width = 0 # Minimize line merging
# Add newlines before major sections for good visual parsing.
def emit(self, item):
if (isinstance(item, yaml.ScalarEvent) and
str(item.value) in NEWLINE_BEFORE_YAML_SECTIONS):
self.write_line_break()
self.write_line_break()
super(CourseBuilderYamlFormatter, self).emit(item)
# For very long lines, don't leave 1st item in element on same line
# as name of element; instead, move to next line so all parts have
# the same indent. (E.g., for GCB_REGISTERED_MODULES list)
def write_plain(self, text, split):
if len(text) > 80 or ' ' in text:
self.write_line_break()
self.write_indent()
super(CourseBuilderYamlFormatter, self).write_plain(text, split)
class AppYamlFile(object):
"""Parse, modify, and write app.yaml file."""
def __init__(self, name):
self._name = name
self._loaded = False
def _lazy_load(self):
if self._loaded:
return
with open(self._name) as fp:
self._root = yaml.compose(fp)
# Root value is a list of 2-tuples for name/value of top-level
# items in yaml file.
for item in self._root.value:
if item[0].value == 'env_variables':
self._env_vars = item[1].value
if item[0].value == 'libraries':
self._library_list = item[1].value
if item[0].value == 'application':
self._application = item[1].value
# Libraries item is a list of name/value 2-tuples.
# Extract name and version for each library.
self._lib_versions = {}
for lib_spec in self._library_list:
name = None
vers = None
for lib_item in lib_spec.value:
if lib_item[0].value == 'name':
name = lib_item[1].value
elif lib_item[0].value == 'version':
vers = lib_item[1].value
if name and vers:
self._lib_versions[name] = vers
self._loaded = True
def write(self):
self._lazy_load()
content = yaml.serialize(self._root, stream=None,
Dumper=CourseBuilderYamlFormatter)
with open(self._name, 'w') as fp:
fp.write(content)
def require_library(self, library, version):
"""Add tree nodes for new library if it is not already called for."""
self._lazy_load()
if library in self._lib_versions:
if version != self._lib_versions[library]:
raise ValueError(
'Library "%s" is already required ' % library +
'at version "%s". ' % self._lib_versions[library] +
'Cannot satisfy request for version "%s".' % version)
return False
added_lib = copy.deepcopy(self._library_list[0])
added_lib.value[0][1].value = library
added_lib.value[1][1].value = version
self._library_list.append(added_lib)
self._library_list.sort(key=lambda x: x.value[0][1].value)
return True
def set_env(self, var_name, var_value):
self._lazy_load()
var_value = var_value.strip()
env_var = None
for member in self._env_vars:
if member[0].value == var_name:
env_var = member
break
if var_value:
if not env_var:
env_var_name = yaml.ScalarNode('tag:yaml.org,2002:str',
var_name)
env_var_value = yaml.ScalarNode('tag:yaml.org,2002:str',
var_value)
env_var = (env_var_name, env_var_value)
self._env_vars.append(env_var)
else:
env_var[1].value = var_value
else:
if env_var:
self._env_vars.remove(env_var)
def get_env(self, var_name):
self._lazy_load()
for env_var in self._env_vars:
if env_var[0].value == var_name:
return env_var[1].value
return None
def get_all_env(self):
self._lazy_load()
ret = {}
for env_var in self._env_vars:
ret[env_var[0].value] = env_var[1].value
return ret
@property
def application(self):
self._lazy_load()
return self._application
class ModuleManifest(object):
"""Parse module.yaml files into object providing convienent properties."""
def __init__(self, path):
self._path = path
self._loaded = False
def _lazy_load(self):
if self._loaded:
return
with open(self._path) as fp:
module_spec = yaml.load(fp)
self._main_module = module_spec['module_name']
parts = self._main_module.split('.')
if parts[0] != 'modules' or len(parts) < 2:
raise ValueError(
'module_name is expected to name the main python file '
'under CourseBuilder as: modules.<module>.<filename>')
self._module_name = parts[1]
self._required_version = module_spec['container_version']
self._third_party_libraries = module_spec.get(
'third_party_libraries', {})
self._appengine_libraries = module_spec.get(
'appengine_libraries', {})
self._tests = module_spec['tests']
self._loaded = True
def assert_version_compatibility(self, actual_version):
self._lazy_load()
for required, actual in zip(re.split(r'[-.]', self._required_version),
re.split(r'[-.]', actual_version)):
if int(required) < int(actual):
break
if int(required) > int(actual):
raise ValueError(
'Current CourseBuilder version %s ' % actual_version +
'is less than the version %s ' % self._required_version +
'required by module %s' % self._module_name)
@property
def module_name(self):
self._lazy_load()
return self._module_name
@property
def main_module(self):
self._lazy_load()
return self._main_module
@property
def third_party_libraries(self):
self._lazy_load()
return self._third_party_libraries
@property
def appengine_libraries(self):
self._lazy_load()
return self._appengine_libraries
@property
def tests(self):
self._lazy_load()
return self._tests
| [
[
8,
0,
0.0733,
0.0043,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0819,
0.0043,
0,
0.66,
0.125,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0905,
0.0043,
0,
0.66,
... | [
"\"\"\"Classes for processing various .yaml files in CourseBuilder installations.\"\"\"",
"__author__ = 'Mike Gainer (mgainer@google.com)'",
"import copy",
"import re",
"import yaml",
"NEWLINE_BEFORE_YAML_SECTIONS = set([\n 'env_variables',\n 'includes',\n 'inbound_services',\n 'builtins',\n ... |
"""Classes to build sanitized HTML."""
__author__ = 'John Orr (jorr@google.com)'
import cgi
import re
def escape(strg):
return cgi.escape(strg, quote=1).replace("'", ''').replace('`', '`')
class Node(object):
"""Base class for the sanitizing module."""
def __init__(self):
self._parent = None
def _set_parent(self, parent):
self._parent = parent
@property
def parent(self):
return self._parent
@property
def sanitized(self):
raise NotImplementedError()
def __str__(self):
return self.sanitized
# pylint: disable=incomplete-protocol
class NodeList(object):
"""Holds a list of Nodes and can bulk sanitize them."""
def __init__(self):
self.list = []
self._parent = None
def __len__(self):
return len(self.list)
def _set_parent(self, parent):
assert self != parent
self._parent = parent
@property
def parent(self):
return self._parent
def append(self, node):
assert node is not None, 'Cannot add an empty value to the node list'
self.list.append(node)
node._set_parent(self) # pylint: disable=protected-access
return self
@property
def children(self):
return [] + self.list
def empty(self):
self.list = []
return self
def delete(self, node):
_list = []
for child in self.list:
if child != node:
_list.append(child)
self.list = _list
def insert(self, index, node):
assert node is not None, 'Cannot add an empty value to the node list'
self.list.insert(index, node)
node._set_parent(self) # pylint: disable=protected-access
return self
@property
def sanitized(self):
sanitized_list = []
for node in self.list:
sanitized_list.append(node.sanitized)
return ''.join(sanitized_list)
def __str__(self):
return self.sanitized
class Text(Node):
"""Holds untrusted text which will be sanitized when accessed."""
def __init__(self, unsafe_string):
super(Text, self).__init__()
self._value = unicode(unsafe_string)
@property
def sanitized(self):
return escape(self._value)
class Comment(Node):
"""An HTML comment."""
def __init__(self, unsafe_string=''):
super(Comment, self).__init__()
self._value = unicode(unsafe_string)
def get_value(self):
return self._value
@property
def sanitized(self):
return '<!--%s-->' % escape(self._value)
def add_attribute(self, **attr):
pass
def add_text(self, unsafe_string):
self._value += unicode(unsafe_string)
class Element(Node):
"""Embodies an HTML element which will be sanitized when accessed."""
_ALLOWED_NAME_PATTERN = re.compile(r'^[a-zA-Z][_\-a-zA-Z0-9]*$')
_VOID_ELEMENTS = frozenset([
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'])
def __init__(self, tag_name, **attr):
"""Initializes an element with given tag name and attributes.
Tag name will be restricted to alpha chars, attribute names
will be quote-escaped.
Args:
tag_name: the name of the element, which must match
_ALLOWED_NAME_PATTERN.
**attr: the names and value of the attributes. Names must match
_ALLOWED_NAME_PATTERN and values will be quote-escaped.
"""
assert Element._ALLOWED_NAME_PATTERN.match(tag_name), (
'tag name %s is not allowed' % tag_name)
for attr_name in attr:
assert Element._ALLOWED_NAME_PATTERN.match(attr_name), (
'attribute name %s is not allowed' % attr_name)
super(Element, self).__init__()
self._tag_name = tag_name
self._children = []
self._attr = {}
for _name, _value in attr.items():
self._attr[_name.lower()] = _value
def has_attribute(self, name):
return name.lower() in self._attr
@property
def attributes(self):
return self._attr.keys()
def set_attribute(self, name, value):
self._attr[name.lower()] = value
return self
def get_escaped_attribute(self, name):
return escape(self._attr[name.lower()])
def add_attribute(self, **attr):
for attr_name, value in attr.items():
assert Element._ALLOWED_NAME_PATTERN.match(attr_name), (
'attribute name %s is not allowed' % attr_name)
self._attr[attr_name.lower()] = value
return self
def add_child(self, node):
node._set_parent(self) # pylint: disable=protected-access
self._children.append(node)
return self
def append(self, node):
return self.add_child(node)
def add_children(self, node_list):
for child in node_list.list:
self.add_child(child)
return self
def empty(self):
self._children = []
return self
def add_text(self, text):
return self.add_child(Text(text))
def can_have_children(self):
return True
@property
def children(self):
return [] + self._children
@property
def tag_name(self):
return self._tag_name
@property
def sanitized(self):
"""Santize the element and its descendants."""
assert Element._ALLOWED_NAME_PATTERN.match(self._tag_name), (
'tag name %s is not allowed' % self._tag_name)
buff = '<' + self._tag_name
for attr_name, value in sorted(self._attr.items()):
if attr_name == 'classname':
attr_name = 'class'
elif attr_name.startswith('data_'):
attr_name = attr_name.replace('_', '-')
if value is None:
value = ''
buff += ' %s="%s"' % (
attr_name, escape(value))
if self._children:
buff += '>'
for child in self._children:
buff += child.sanitized
buff += '</%s>' % self._tag_name
elif self._tag_name.lower() in Element._VOID_ELEMENTS:
buff += '/>'
else:
buff += '></%s>' % self._tag_name
return buff
class A(Element):
"""Embodies an 'a' tag. Just a conveniece wrapper on Element."""
def __init__(self, href, **attr):
"""Initialize an 'a' tag to a given target.
Args:
href: The value to put in the 'href' tag of the 'a' element.
**attr: the names and value of the attributes. Names must match
_ALLOWED_NAME_PATTERN and values will be quote-escaped.
"""
super(A, self).__init__('a', **attr)
self.add_attribute(href=href)
class ScriptElement(Element):
"""Represents an HTML <script> element."""
def __init__(self, **attr):
super(ScriptElement, self).__init__('script', **attr)
def can_have_children(self):
return False
def add_child(self, unused_node):
raise ValueError()
def add_children(self, unused_nodes):
raise ValueError()
def empty(self):
raise ValueError()
def add_text(self, text):
"""Add the script body."""
class Script(Text):
def __init__(self, script):
# Pylint is just plain wrong about warning here; suppressing.
# pylint: disable=bad-super-call
super(Script, self).__init__(None)
self._script = script
@property
def sanitized(self):
if '</script>' in self._script:
raise ValueError('End script tag forbidden')
return self._script
self._children.append(Script(text))
class Entity(Node):
"""Holds an XML entity."""
ENTITY_PATTERN = re.compile('^&([a-zA-Z]+|#[0-9]+|#x[0-9a-fA-F]+);$')
def __init__(self, entity):
assert Entity.ENTITY_PATTERN.match(entity)
super(Entity, self).__init__()
self._entity = entity
@property
def sanitized(self):
assert Entity.ENTITY_PATTERN.match(self._entity)
return self._entity
def assemble_text_message(text, link):
node_list = NodeList()
if text:
node_list.append(Text(text))
node_list.append(Entity(' '))
if link:
node_list.append(Element(
'a', href=link, target='_blank').add_text('Learn more...'))
return node_list
| [
[
8,
0,
0.0032,
0.0032,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0095,
0.0032,
0,
0.66,
0.0769,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0158,
0.0032,
0,
0.66,... | [
"\"\"\"Classes to build sanitized HTML.\"\"\"",
"__author__ = 'John Orr (jorr@google.com)'",
"import cgi",
"import re",
"def escape(strg):\n return cgi.escape(strg, quote=1).replace(\"'\", ''').replace('`', '`')",
" return cgi.escape(strg, quote=1).replace(\"'\", ''').replace('`', '`... |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""General utility functions common to all of CourseBuilder."""
__author__ = 'Mike Gainer (mgainer@google.com)'
import cStringIO
import datetime
import logging
import random
import re
import string
import sys
import traceback
import zipfile
import appengine_config
from google.appengine.api import namespace_manager
BACKWARD_COMPATIBLE_SPLITTER = re.compile(r'[\[\] ,\t\n]+', flags=re.M)
SPLITTER = re.compile(r'[ ,\t\n]+', flags=re.M)
ALPHANUM = string.ascii_letters + string.digits
def text_to_list(text, splitter=SPLITTER):
if not text:
return []
return [item for item in splitter.split(text) if item]
def list_to_text(items):
if not items:
return ''
return ' '.join([unicode(item) for item in items])
def generate_instance_id():
length = 12
return ''.join([random.choice(ALPHANUM) for _ in xrange(length)])
def truncate(x, precision=2):
assert isinstance(precision, int) and precision >= 0
factor = 10 ** precision
return int(x * factor) / float(factor)
def iter_all(query, batch_size=100):
"""Yields query results iterator. Proven method for large datasets."""
prev_cursor = None
any_records = True
while any_records:
any_records = False
query = query.with_cursor(prev_cursor)
for entity in query.run(batch_size=batch_size):
any_records = True
yield entity
prev_cursor = query.cursor()
def run_hooks(hooks, *args, **kwargs):
"""Run all the given callback hooks.
Args:
hooks: iterable. The callback functions to be invoked. Each function is
passed the remaining args and kwargs.
*args: List of arguments passed the hook functions.
**kwargs: Dict of keyword args passed to the hook functions.
"""
for hook in hooks:
# TODO(jorr): Add configurable try-catch around call
hook(*args, **kwargs)
class Namespace(object):
"""Save current namespace and reset it.
This is inteded to be used in a 'with' statement. The verbose code:
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(self._namespace)
app_specific_stuff()
finally:
namespace_manager.set_namespace(old_namespace)
can be replaced with the much more terse:
with Namespace(self._namespace):
app_specific_stuff()
This style can be used in classes that need to be pickled; the
@in_namespace function annotation (see below) is arguably visually
cleaner, but can't be used with pickling.
The other use-case for this style of acquire/release guard is when
only portions of a function need to be done within a namespaced
context.
"""
def __init__(self, new_namespace):
self.new_namespace = new_namespace
def __enter__(self):
self.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace(self.new_namespace)
return self
def __exit__(self, *unused_exception_info):
namespace_manager.set_namespace(self.old_namespace)
return False # Don't suppress exceptions
def log_exception_origin():
"""Log the traceback of the origin of an exception as a critical error.
When in a try/except block, logging often discards the traceback of the
origin of the thrown exception. This function determines the traceback at
the point of exception and sends that to the standard logging library as a
critical message. This is a common idiom, and the boilerplate code is a
little verbose, so factored here into a separate function.
"""
try:
# Log origin of exception to permit troubleshooting.
# Do this in try/finally block to conform to Python docs'
# recommendation to avoid circular reference to traceback
# object.
origin_traceback = sys.exc_info()[2]
logging.critical(''.join(traceback.format_tb(origin_traceback)))
finally:
pass
def find(predicate, iterable, default=None):
"""Find the first matching item in a list, or None if not found.
This is as a more-usable alternative to filter(), in that it does
not raise an exception if the item is not found.
Args:
predicate: A function taking one argument: an item from the iterable.
iterable: A list or generator providing items passed to "predicate".
default: Value to return if no item is found.
Returns:
The first item in "iterable" where "predicate" returns True, or
None if no item matches.
"""
for item in iterable:
if predicate(item):
return item
return default
class ZipAwareOpen(object):
"""Provide open() services for third party libraries in .zip files.
Some libraries that are commonly downloaded and pushed alongside
CourseBuilder are shipped with data files. These libraries make the
assumption that when shipped in a product, they are packaged as plain
files in a normal directory hierarchy. Thus, when that library is
actually packaged in a .zip file, the open() call will fail. This
class provides a convenient syntax around functionality that wraps
calls to the builtin open() (or in the case of AppEngine, the version
of 'open()' that AppEngine itself provides). When an attempt is made
to open a file that is actually packaged within a .zip file, this
wrapper will intelligently look within the .zip file for that member.
Only read access is supported.
No recursion into .zip files within other .zip files is supported.
Example:
with common_utils.ZipAwareOpen():
third_party_module.some_func_that_calls_open()
"""
THIRD_PARTY_LIB_PATHS = {
l.file_path: l.full_path for l in appengine_config.THIRD_PARTY_LIBS}
def zip_aware_open(self, name, *args, **kwargs):
"""Override open() iff opening a file in a library .zip for reading."""
# First cut: Don't even consider checking .zip files unless the
# open is for read-only and ".zip" is in the filename.
mode = args[0] if args else kwargs['mode'] if 'mode' in kwargs else 'r'
if '.zip' in name and (not mode or mode == 'r' or mode == 'rb'):
# Only consider .zip files known in the third-party libraries
# registered in appengine_config.py
for path in ZipAwareOpen.THIRD_PARTY_LIB_PATHS:
# Don't use zip-open if the file we are looking for _is_
# the sought .zip file. (We are recursed into from the
# zipfile module when it needs to open a file.)
if path in name and path != name:
zf = zipfile.ZipFile(path, 'r')
# Possibly extend simple path to .zip file with relative
# path inside .zip file to meaningful contents.
name = name.replace(
path, ZipAwareOpen.THIRD_PARTY_LIB_PATHS[path])
# Strip off on-disk path to .zip file. This leaves
# us with the absolute path within the .zip file.
name = name.replace(path, '').lstrip('/')
# Return a file-like object containing the data extracted
# from the .zip file for the given name.
data = zf.read(name)
return cStringIO.StringIO(data)
# All other cases pass through to builtin open().
return self._real_open(name, *args, **kwargs)
def __enter__(self):
"""Wrap Python's internal open() with our version."""
# No particular reason to use __builtins__ in the 'zipfile' module; the
# set of builtins is shared among all modules implemented in Python.
self._real_open = sys.modules['zipfile'].__builtins__['open']
sys.modules['zipfile'].__builtins__['open'] = self.zip_aware_open
def __exit__(self, *unused_exception_info):
"""Reset open() to be the Python internal version."""
sys.modules['zipfile'].__builtins__['open'] = self._real_open
return False # Don't suppress exceptions.
def parse_timedelta_string(timedelta_string):
keys = ['weeks', 'days', 'hours', 'minutes', 'seconds']
regex = r'\s*,?\s*'.join([r'((?P<%s>\d+)\s*%s(%s)?s?)?' %
(k, k[0], k[1:-1]) for k in keys])
kwargs = {}
for k, v in re.match(regex,
timedelta_string).groupdict(default='0').items():
kwargs[k] = int(v)
return datetime.timedelta(**kwargs)
| [
[
8,
0,
0.0607,
0.004,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0688,
0.004,
0,
0.66,
0.0385,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0769,
0.004,
0,
0.66,
... | [
"\"\"\"General utility functions common to all of CourseBuilder.\"\"\"",
"__author__ = 'Mike Gainer (mgainer@google.com)'",
"import cStringIO",
"import datetime",
"import logging",
"import random",
"import re",
"import string",
"import sys",
"import traceback",
"import zipfile",
"import appeng... |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encryption and digest functionality."""
__author__ = 'Mike Gainer (mgainer@google.com)'
import base64
import hashlib
import hmac
import os
import random
import time
import appengine_config
from common import utils
from models import config
from google.appengine.api import users
try:
from Crypto.Cipher import AES
except ImportError:
if appengine_config.PRODUCTION_MODE:
raise
class AES(object):
"""No-op crypto class to permit running on MacOS in dev mode."""
MODE_CBC = 2
@staticmethod
def new(unused_1, unused_2, unused_3):
return AES()
def __init__(self):
pass
def _reverse(self, message):
# "Encrypt" by reversing. Just want to ensure that the encrypted
# version differs from the plaintext so that nothing accidentally
# relies on being able to read the nominally-encrypted value.
m_list = list(message)
m_list.reverse()
return ''.join(m_list)
def encrypt(self, message):
return self._reverse(message)
def decrypt(self, message):
return self._reverse(message)
XSRF_SECRET_LENGTH = 20
XSRF_SECRET = config.ConfigProperty(
'gcb_xsrf_secret', str, (
'Text used to encrypt tokens, which help prevent Cross-site request '
'forgery (CSRF, XSRF). You can set the value to any alphanumeric text, '
'preferably using 16-64 characters. Once you change this value, the '
'server rejects all subsequent requests issued using an old value for '
'this variable.'),
'course builder XSRF secret')
ENCRYPTION_SECRET_LENGTH = 48
ENCRYPTION_SECRET = config.ConfigProperty(
'gcb_encryption_secret', str, (
'Text used to encrypt messages. You can set this to any text at all, '
'but the value must be exactly ' + str(ENCRYPTION_SECRET_LENGTH) +
' characters long. If you change this value, the server will be '
'unable to understand items encrypted under the old key.'),
'default value of CourseBuilder encryption secret',
validator=config.ValidateLength(ENCRYPTION_SECRET_LENGTH).validator)
class EncryptionManager(object):
@classmethod
def _init_secret_if_none(cls, cfg, length):
# Any non-default value is fine.
if cfg.value and cfg.value != cfg.default_value:
return
# All property manipulations must run in the default namespace.
with utils.Namespace(appengine_config.DEFAULT_NAMESPACE_NAME):
# Look in the datastore directly.
entity = config.ConfigPropertyEntity.get_by_key_name(cfg.name)
if not entity:
entity = config.ConfigPropertyEntity(key_name=cfg.name)
# Any non-default non-None value is fine.
if (entity.value and not entity.is_draft and
(str(entity.value) != str(cfg.default_value))):
return
# Initialize to random value.
entity.value = base64.urlsafe_b64encode(
os.urandom(int(length * 0.75)))
entity.is_draft = False
entity.put()
@classmethod
def _get_hmac_secret(cls):
"""Verifies that non-default XSRF secret exists; creates one if not."""
cls._init_secret_if_none(XSRF_SECRET, XSRF_SECRET_LENGTH)
return XSRF_SECRET.value
@classmethod
def _get_encryption_secret(cls):
"""Verifies non-default encryption secret exists; creates one if not."""
cls._init_secret_if_none(ENCRYPTION_SECRET, ENCRYPTION_SECRET_LENGTH)
return ENCRYPTION_SECRET.value
@classmethod
def hmac(cls, components):
"""Generate an XSRF over the array of components strings."""
secret = cls._get_hmac_secret()
digester = hmac.new(str(secret))
for component in components:
digester.update(component)
return digester.digest()
@classmethod
def _build_crypto(cls, secret):
if len(secret) != 48:
raise ValueError('Encryption secret must be exactly 48 characters')
return AES.new(secret[:32], AES.MODE_CBC, secret[32:])
@classmethod
def encrypt(cls, message, secret=None):
"""Encrypt a message. Message value returned is not URL-safe."""
message = message or ''
message = '%d.%s' % (len(message), message)
message += '^' * (16 - len(message) % 16)
secret = secret or cls._get_encryption_secret()
return cls._build_crypto(secret).encrypt(message)
@classmethod
def encrypt_to_urlsafe_ciphertext(cls, message, secret=None):
"""Convenience wrapper to get URL-safe version of encrytped data."""
return base64.urlsafe_b64encode(cls.encrypt(message, secret))
@classmethod
def decrypt(cls, message, secret=None):
"""Decrypt a message, returning the original plaintext."""
secret = secret or cls._get_encryption_secret()
crypto = cls._build_crypto(secret)
message = crypto.decrypt(message)
delim_index = message.find('.')
original_length = int(message[:delim_index])
return message[delim_index + 1:delim_index + 1 + original_length]
@classmethod
def decrypt_from_urlsafe_ciphertext(cls, message, secret=None):
return cls.decrypt(base64.urlsafe_b64decode(message), secret)
class XsrfTokenManager(object):
"""Provides XSRF protection by managing action/user tokens in memcache."""
# Max age of the token (4 hours).
XSRF_TOKEN_AGE_SECS = 60 * 60 * 4
# Token delimiters.
DELIMITER_PRIVATE = ':'
DELIMITER_PUBLIC = '/'
# Default nickname to use if a user does not have a nickname,
USER_ID_DEFAULT = 'default'
@classmethod
def _create_token(cls, action_id, issued_on):
"""Creates a string representation (digest) of a token."""
# We have decided to use transient tokens stored in memcache to reduce
# datastore costs. The token has 4 parts: hash of the actor user id,
# hash of the action, hash of the time issued and the plain text of time
# issued.
# Lookup user id.
user = users.get_current_user()
if user:
user_id = user.user_id()
else:
user_id = cls.USER_ID_DEFAULT
# Round time to seconds.
issued_on = long(issued_on)
digest = EncryptionManager.hmac(
cls.DELIMITER_PRIVATE.join([
str(user_id), str(action_id), str(issued_on)]))
token = '%s%s%s' % (
issued_on, cls.DELIMITER_PUBLIC, base64.urlsafe_b64encode(digest))
return token
@classmethod
def create_xsrf_token(cls, action):
return cls._create_token(action, time.time())
@classmethod
def is_xsrf_token_valid(cls, token, action):
"""Validate a given XSRF token by retrieving it from memcache."""
try:
parts = token.split(cls.DELIMITER_PUBLIC)
if len(parts) != 2:
return False
issued_on = long(parts[0])
age = time.time() - issued_on
if age > cls.XSRF_TOKEN_AGE_SECS:
return False
authentic_token = cls._create_token(action, issued_on)
if authentic_token == token:
return True
return False
except Exception: # pylint: disable=broad-except
return False
def get_external_user_id(app_id, namespace, email):
"""Gets an id for a user that can be transmitted to external systems.
The returned key is scoped to a particular user within a particular course
on a particular Course Builder deployment, and is guaranteed to be
statistically unique within that scope.
Args:
app_id: string. Application ID of the CB App Engine deployment.
namespace: string. Namespace of a single course. May be the empty string.
email: string. Unvalidated email address for a user.
Returns:
String.
"""
return hmac.new(
'%s%s%s' % (app_id, namespace, email), digestmod=hashlib.sha256
).hexdigest()
def hmac_sha_2_256_transform(privacy_secret, value):
"""HMAC-SHA-2-256 for use as a privacy transformation function."""
return hmac.new(
str(privacy_secret), msg=str(value), digestmod=hashlib.sha256
).hexdigest()
def generate_transform_secret_from_xsrf_token(xsrf_token, action):
"""Deterministically generate a secret from an XSRF 'nonce'.
When multiple data sources are being via the REST API, consumers
may need to correlate data across the different sources. To take
a particular example, the analytics page on the dashboard is one
such consumer. This function provides a convenient way to turn an
opaque, non-forgeable XSRF token internally into an HMAC secret.
The main point here is that the secret string used for HMAC'ing
the PII in the data source outputs is
- Not derived from anything the user may generate, so the user
cannot manipulate the seed value to experiment to find weaknesses.
- Not predictable given the information the user has. (The user does
not have the encryption key.) The encryption key is used in preference
to using the HMAC key twice.
Args:
xsrf_token: An XSRF token encoded as usual for use as an
HTML parameter.
action: Action expected to be present in the token.
Returns:
None if the XSRF token is invalid, or an encryption key if it is.
"""
if not XsrfTokenManager.is_xsrf_token_valid(xsrf_token, action):
return None
# Encrypt the publicly-visible xsrf parameter with our private
# encryption secret so that we now have a string which is
# - Entirely deterministic
# - Not generatable by anyone not in posession of the encryption secret.
seed_string = EncryptionManager.encrypt(xsrf_token)
seed = 0
for c in seed_string:
seed *= 256
seed += ord(c)
r = random.Random(seed)
# Use the random seed to deterministically generate a secret which
# will be consistent for identical values of the HMAC token.
return base64.urlsafe_b64encode(
''.join(chr(r.getrandbits(8)) for unused in range(
int(ENCRYPTION_SECRET_LENGTH * 0.75))))
| [
[
8,
0,
0.0487,
0.0032,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0552,
0.0032,
0,
0.66,
0.0476,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0617,
0.0032,
0,
0.66,... | [
"\"\"\"Encryption and digest functionality.\"\"\"",
"__author__ = 'Mike Gainer (mgainer@google.com)'",
"import base64",
"import hashlib",
"import hmac",
"import os",
"import random",
"import time",
"import appengine_config",
"from common import utils",
"from models import config",
"from google... |
# -*- coding: utf-8 -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""HTML content transformation and manipulation functions.
About
This module performs complex HTML document transformations, which enable
machine-assisted internationalization (I18N) of content.
Extracting a resource bundle from your HTML content
This is done using extract_resource_bundle_from() function. Here is what
happens behind the scenes.
HTML content is received as text and parsed into an XML ElementTree tree with
html5lib library. ElementTree is then converted into safe_dom tree. Already
parsed tree can be provided as well.
The HTML tags in the tree are inspected to extract all contiguous text chunks.
For example: content of <p>...</p> tag is extracted as one chunk, with a
simple markup (like <a>, <b>, <i> and <br>) left inline.
Text chunks are returned as a list of strings. Each string contains plain
text and an inline markup. The markup uses slightly modified original tag
names with the unique index appended and with most attributes removed. For
example:
'<a href="foo" title="Bar">Foo</a><br/><b>Bar</b>'
becomes
'<a#1 title="Bar">Foo</a#1><br#2 /><b#3>Bar</b#3>'
The list of strings, which we will call a 'resource bundle', is ready to be
sent to translator, who must translate both plain text and the text between
inline markup. Reorder of marked up terms is allowed.
When I said 'plain text', I lied a little bit. The strings are expected to
a) be HTML entity encoded and b) be of unicode type in Python. Each of the
strings will be parsed using minidom XML parser. The translator must take care
of the entity encoding, and you as a developer must take care of using proper
charsets in the user interface given to the translator. During the XML parsing
phase UTF-8 is used internally.
Putting translations into your HTML content
This is done using merge_resource_bundle_into() function. Here is what
happens behind the scenes.
The list of strings is received along with the HTML content or an safe_dom
tree of the content to be inserted into. The content is processed as described
above and both the strings and the markup in the original language are
removed.
New strings are inserted one by one into the proper places of the content tree
and inline markup is expanded to have the proper original tags names and the
attributes. The values of attributes like 'alt' and 'title' can be provided in
the translations, other attributes specified in the translations are ignored.
No attempt is made to make sure new strings correspond to the original
strings. Whatever strings are given, those are the ones we will try to weave
into the content. Thus, when the original content changes, it's your
responsibility to diff the resource bundles before and after the edit, send
the delta to translator and compose new updated resource bundle.
The final safe_dom tree with the translations put in is returned. You have
many options how to render it out, including using functions provided
ContentIO.tostring() function.
Common issues
Where is my whitespace?
Whitespace inside and around translation strings is removed intentionally.
Why do I see 'UnicodeDecodeError: 'ascii' codec can't decode byte...'?
you most like forgot the a letter 'u' in front of your Python unicode
string
Resource String Disambiguation
One may encounter two strings that have exact same text in English, but have
to be translated differently due to the context of their use. Simply add a
comment just before the text to be translated. The comment must start with
the 'I18N:', otherwise it is not shown. For example, here a valid i18N
comment: '<!-- I18N: "Android" means "a robot" in this context -->'.
Open Issues:
- P0: complete map_source_to_target() for allow_list_reorder=True
- P0: move all schemas out of dashboard into models; leave UX specific
inputEx annotations of those schemas in dashboard
- P0: clean up and streamline Registry/SchemaFields
- P0: update about herein with details of object bind/map/diff
- P0: get rid of minidom, use cElementTree to reduce parser dependency
- P0: drop '#' and allow <a> and <b> while no disambiguation is required
- P0: how shall safedom handle custom tag nodes that are not yet ready to
be expanded; as proxy nodes?
Good luck!
"""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import difflib
import htmlentitydefs
import re
import StringIO
import sys
import unittest
from xml.dom import minidom
import safe_dom
from tags import html_to_safe_dom
XML_ENCODING = 'utf-8'
# Comments having this prefix will be extracted into the resource bundle; for
# example: <!-- I18N: 'Android' here means 'robot', not 'operating system' -->
I18N_COMMENT_PREFIX = 'I18N:'
# These tags are rendered inline if the don't have any children;
# for example: <a#1>Foo!</a#1>
DEFAULT_INLINE_TAG_NAMES = [
'A', 'B', 'I', 'SPAN', 'BR', 'STRONG', 'EM', 'SMALL']
# These tags are not inspected are rendered inline without any content;
# for example: <script#1 />
DEFAULT_OPAQUE_TAG_NAMES = ['SCRIPT', 'STYLE']
# These tags are inspected and are rendered inline without any content;
# for example: <ul#1 />; their children are extracted and for translation as
# independent items
DEFAULT_OPAQUE_DECOMPOSABLE_TAG_NAMES = [
'UL', 'TABLE', 'IMG', 'INPUT', 'TEXTAREA']
# The key is an attribute name. The value is a set of tag names, for which
# this attribute can be recomposed from resource bundle. All other attributes
# are not recomposable.
DEFAULT_RECOMPOSABLE_ATTRIBUTES_MAP = {
'ALT': set(['*']), 'TITLE': set(['*']), 'SRC': set(['IMG']),
'PLACEHOLDER': set(['INPUT', 'TEXTAREA'])}
# Regex that matches HTML entities (& followed by anything other than a ;, up to
# a ;).
_ENTITY_REGEX = re.compile('(&[a-z]+;)')
# Items we don't want to change to codes when translating HTML entities in XML.
_XML_ENTITY_NAMES = frozenset(['quot', 'amp', 'lt', 'gt', 'apos'])
# pylint: disable=protected-access
def _get_entity_map():
mappings = {}
html_and_xml_entities = dict(htmlentitydefs.entitydefs)
# Python is missing apos, which is part of XML.
html_and_xml_entities['apos'] = None # Set below.
for name, code in html_and_xml_entities.iteritems():
if name in _XML_ENTITY_NAMES:
# In entitydefs, some codes are unicode chars and some are numeric
# references. Standardize on all numeric references for minidom
# compatibility.
code = '&%s;' % name
else:
if not code.startswith('&'):
code = '&#%s;' % str(ord(code))
mappings[name] = code
return mappings
# Map of HTML entity name string ('copy') to ASCII, decimal code string
# ('©'). IMPORTANT: some entities are known to both HTML and XML (see
# _XML_ENTITY_NAMES). In that case, we do not translate to a code because we're
# processing XML. For those items, the value is the entity name (for example,
# for the key 'quot' the value is '"').
_ENTITY_MAP = _get_entity_map()
class ContentIO(object):
"""Class that knows how to load and save HTML content to be translated."""
@classmethod
def _is_simple_text_content(cls, node):
"""Checks if node only has children of type Text."""
simple_text_content = True
children = cls._get_children(node)
for child in children:
if not isinstance(child, safe_dom.Text):
simple_text_content = False
break
return simple_text_content
@classmethod
def _get_children(cls, node_list_or_element):
if isinstance(node_list_or_element, safe_dom.NodeList):
return node_list_or_element.list
if isinstance(node_list_or_element, safe_dom.Element):
return node_list_or_element.children
raise TypeError(
'Expected NodeList/Element, found: %s.' % node_list_or_element)
@classmethod
def _merge_node_lists(cls, node_list, node):
"""Combines sibling or nested node lists into one."""
if isinstance(node, safe_dom.NodeList):
cls._merge_node_list_chidren(node_list, node)
elif isinstance(node, safe_dom.Element):
root_node_list = safe_dom.NodeList()
root_node_list.append(node)
cls._merge_node_list_chidren(node_list, root_node_list)
else:
node_list.append(node)
@classmethod
def _merge_node_list_chidren(cls, target_node_list, node_list):
"""Inspects NodeList and merges its contents recursively."""
_children = [] + node_list.children
node_list.empty()
for child in _children:
if isinstance(child, safe_dom.NodeList):
cls._merge_node_list_chidren(target_node_list, child)
else:
target_node_list.append(child)
if isinstance(child, safe_dom.Element):
cls._merge_element_chidren(child)
@classmethod
def _merge_element_chidren(cls, element):
"""Inspects Element and merges its contents recursively."""
if not element.can_have_children():
return
_children = [] + element.children
element.empty()
_last_node_list_child = None
for child in _children:
if isinstance(child, safe_dom.NodeList):
if _last_node_list_child is None:
_last_node_list_child = safe_dom.NodeList()
cls._merge_node_list_chidren(_last_node_list_child, child)
if _last_node_list_child:
element.append(_last_node_list_child)
else:
_last_node_list_child = None
else:
cls._merge_node_list_chidren(_last_node_list_child, child)
else:
_last_node_list_child = None
element.append(child)
if isinstance(child, safe_dom.Element):
cls._merge_element_chidren(child)
@classmethod
def _normalize_tree(cls, tree):
"""Combines sibling or nested node lists into one."""
node_list = safe_dom.NodeList()
cls._merge_node_lists(node_list, tree)
return node_list
@classmethod
def fromstring(cls, content):
"""Converts HTML string content into an XML tree."""
return (
html_to_safe_dom(unicode(content), None, render_custom_tags=False))
@classmethod
def tostring(cls, tree):
"""Renders tree to as HTML text."""
return tree.sanitized
class TranslationIO(object):
"""Class that knows how to load and save XML translations."""
@classmethod
def _is_indexable(cls, node):
"""Checks if node can have an index of style of <a#1 />."""
return not (isinstance(node, safe_dom.Text) or isinstance(
node, safe_dom.Comment))
@classmethod
def _is_ancestor(cls, descendant, ancestor):
if descendant == ancestor or descendant.parent == ancestor:
return True
if not descendant.parent:
return False
return cls._is_ancestor(descendant.parent, ancestor)
@classmethod
def _set_children(cls, node, children):
if isinstance(node, safe_dom.NodeList):
node.list = children
elif isinstance(node, safe_dom.Element):
node._children = children
else:
raise TypeError('Unsupported node type: %s.' % node)
@classmethod
def _copy_node_content_from_minidom_to_safe_dom(
cls, source_node, target_element):
"""Copies child nodes from source to target."""
if not source_node.childNodes:
return
target_element._children = []
for node in source_node.childNodes:
if node.nodeType == minidom.Node.TEXT_NODE:
target_element.add_child(safe_dom.Text(node.nodeValue))
continue
if node.nodeType == minidom.Node.COMMENT_NODE:
target_element.add_child(safe_dom.Comment(node.nodeValue))
continue
raise TypeError('Unknown node type: %s.' % node)
@classmethod
def _find_replace_for_tag_open(cls, source_delimiter, target_delimiter):
"""Returns regex pattern for replacing delimiter in the open tag."""
return (
r'<([a-zA-Z0-9_\-]+)%s([0-9]+)' % source_delimiter,
'<\\1%s\\2' % target_delimiter)
@classmethod
def _find_replace_for_tag_close(cls, source_delimiter, target_delimiter):
"""Returns regex pattern for replacing delimiter in the closing tag."""
return (
r'</([a-zA-Z0-9_\-]+)%s([0-9]+)>' % source_delimiter,
'</\\1%s\\2>' % target_delimiter)
@classmethod
def _apply_regex(cls, find_replace, content):
_find, _replace = find_replace
return re.sub(_find, _replace, content)
@classmethod
def remove_whitespace(cls, content):
"""Removes whitespace from translation string."""
_content = content
_content = re.sub(r'[\r\n]+', ' ', _content)
_content = re.sub(r'\s\s+', ' ', _content)
return _content.strip()
@classmethod
def _decode_tag_names(cls, content):
"""Decode all tags from 'tag#index' into 'tag-index' style names."""
return cls._apply_regex(
cls._find_replace_for_tag_open('#', '-'), cls._apply_regex(
cls._find_replace_for_tag_close('#', '-'), content))
@classmethod
def _encode_tag_names(cls, content):
"""Encode all tags from 'tag-index' into 'tag#-index' style names."""
return cls._apply_regex(
cls._find_replace_for_tag_open('-', '#'), cls._apply_regex(
cls._find_replace_for_tag_close('-', '#'), content))
@classmethod
def _element_to_translation(cls, config, context, element):
"""Converts safe_dom Element into a resource bundle string."""
lines = []
index = context.index.get_node_index_in_collation(element)
assert index
tag_name = '%s#%s' % (element.tag_name.lower(), index)
start_tag = tag_name
_attributes = element.attributes
if config.sort_attributes:
_attributes = sorted(_attributes)
for attr in _attributes:
tag_name_set = config.recomposable_attributes_map.get(attr.upper())
if tag_name_set and (
element.tag_name.upper() in tag_name_set
or '*' in tag_name_set
):
start_tag += ' %s="%s"' % (
attr, element.get_escaped_attribute(attr))
if element.tag_name.upper() in config.opaque_tag_names:
return False, '<%s />' % start_tag
if element.tag_name.upper() in config.opaque_decomposable_tag_names:
content = None
if element.tag_name.upper() in config.inline_tag_names:
content = []
if element.children:
for child in element.children:
if not isinstance(child, safe_dom.Text):
raise TypeError(
'Unsupported node type: %s.' % child)
value = child.sanitized
content.append(value)
if content:
content = ''.join(content)
else:
content = None
has_content = content or not config.omit_empty_opaque_decomposable
if content:
return has_content, '<%s>%s</%s>' % (
start_tag, content, tag_name)
else:
return has_content, '<%s />' % start_tag
has_content = False
if element.children:
lines.append('<%s>' % start_tag)
for child in element.children:
if not isinstance(child, safe_dom.Text):
raise TypeError('Unsupported node type: %s.' % child)
value = child.sanitized
if value.strip():
has_content = True
lines.append(value)
lines.append('</%s>' % tag_name)
else:
lines.append('<%s />' % start_tag)
return has_content, ''.join(lines)
@classmethod
def _collation_to_translation(cls, config, context, collation):
"""Converts a list of safe_dom nodes into a resource bundle string."""
lines = []
has_content = False
for node in collation:
if isinstance(
node, safe_dom.Comment) or isinstance(node, safe_dom.Text):
value = node.sanitized
if value.strip():
has_content = True
lines.append(value)
continue
if isinstance(node, safe_dom.Element):
_has_content, _value = cls._element_to_translation(
config, context, node)
if _has_content:
has_content = True
lines.append(_value)
continue
raise TypeError('Unsupported node type: %s.' % node)
if not has_content:
return None
return ''.join(lines)
def new_tree(self):
"""Creates new empty tree."""
return minidom.Document()
@classmethod
def parse_indexed_tag_name(cls, node):
try:
# Split off the last component after a '-'. (Note that custom tags
# may contain '-' in their tag names.)
parts = node.tagName.split('-')
index = parts.pop()
tag_name = '-'.join(parts)
return tag_name, int(index)
except:
raise SyntaxError(
'Error extracting index form the tag <%s>. '
'Tag name format is <tag_name#index>, '
'like <a#1>.' % node.tagName)
@classmethod
def extract_line_column_from_parse_error(cls, error):
"""Try to extract line, column from the text of parsing error."""
try:
msg = error.message
match = re.match(r'.*\: line ([0-9]+), column ([0-9]+).*', msg)
if match is not None:
return int(match.group(1)), int(match.group(2))
except: # pylint: disable=bare-except
pass
return None, None
@classmethod
def get_text_fragment(cls, text, line_num, col_num, clip_len=16):
"""Makes an clip_len long excerpt of the text using line and column.
Args:
text: text to make a fragment of
line_num: one-based line number of excerpt start
col_num: one-based column number of excerpt start
clip_len: number of character to leave on both sides of start position
Returns:
tuple clipped text fragment of the entire text if clipping failed
"""
assert clip_len > 0
lines = text.split('\n')
if (line_num is not None
and col_num is not None
and line_num > 0
and line_num <= len(lines)):
line = lines[line_num - 1]
if col_num < 0 or col_num >= len(line):
return text
from_col_num = max(col_num - clip_len, 0)
to_col_num = min(col_num + clip_len, len(line))
result = ''
if from_col_num < col_num:
result += line[from_col_num:col_num]
result += '[%s]' % line[col_num]
if to_col_num > col_num:
result += line[col_num + 1:to_col_num]
return result
return text
@classmethod
def fromstring(cls, content):
"""Converts XML string content of the translation into an XML tree."""
translated_entities = _ENTITY_REGEX.sub(cls._match_to_code, content)
xml_text = '<div>%s</div>' % cls._decode_tag_names(
translated_entities).encode(XML_ENCODING)
try:
tree = minidom.parseString(xml_text)
except Exception as e: # pylint: disable=broad-except
line_num, col_num = cls.extract_line_column_from_parse_error(e)
raise Exception(
e.message, cls.get_text_fragment(xml_text, line_num, col_num))
return tree
@classmethod
def _match_to_code(cls, match):
return _ENTITY_MAP[match.group()[1:-1]]
@classmethod
def toxml(cls, tree):
"""Renders tree as XML text without XML declaration and root node."""
assert 'DIV' == tree.documentElement.tagName.upper()
data = StringIO.StringIO()
for child in tree.documentElement.childNodes:
child.writexml(data)
return data.getvalue()
@classmethod
def tocollation(cls, tree):
"""Converts a tree into a list of nodes no more than one level deep."""
collation = []
for node in tree.documentElement.childNodes:
if node.nodeType == minidom.Node.TEXT_NODE:
collation.append(node)
continue
if node.nodeType == minidom.Node.COMMENT_NODE:
collation.append(node)
continue
if node.nodeType == minidom.Node.ELEMENT_NODE:
for child in node.childNodes:
if child.nodeType not in [
minidom.Node.TEXT_NODE, minidom.Node.COMMENT_NODE]:
raise TypeError(
'Unsupported node child type: %s.' % child.nodeType)
collation.append(node)
continue
raise TypeError('Unsupported node type: %s.' % node.nodeType)
return collation
@classmethod
def get_indexed_tag_name(cls, node, index):
return '%s#%s' % (node.tag_name.lower(), index)
@classmethod
def tostring(cls, tree):
"""Renders tree as a string with <a#1 /> style markup."""
return cls._encode_tag_names(cls.toxml(tree))
class ResourceBundleItemError(Exception):
"""An error related to a specific string in a resource bundle."""
def __init__(self, exc_info, original_exception, index):
Exception.__init__(self, 'Error in chunk %s. %s' % (
index + 1, original_exception))
self._exc_info = exc_info
self._original_exception = original_exception
self._index = index
@property
def exc_info(self):
return self._exc_info
@property
def index(self):
return self._index
@property
def original_exception(self):
return self._original_exception
def reraise(self):
"""Re-raises an exception preserving original stack trace."""
raise self._exc_info[0], self, self._exc_info[2]
class Configuration(object):
"""Various options that control content transformation process."""
def __init__(
self,
inline_tag_names=None,
opaque_tag_names=None,
opaque_decomposable_tag_names=None,
recomposable_attributes_map=None,
omit_empty_opaque_decomposable=True,
sort_attributes=False):
if inline_tag_names is not None:
self.inline_tag_names = inline_tag_names
else:
self.inline_tag_names = DEFAULT_INLINE_TAG_NAMES
if opaque_tag_names is not None:
self.opaque_tag_names = opaque_tag_names
else:
self.opaque_tag_names = DEFAULT_OPAQUE_TAG_NAMES
if opaque_decomposable_tag_names is not None:
self.opaque_decomposable_tag_names = opaque_decomposable_tag_names
else:
self.opaque_decomposable_tag_names = (
DEFAULT_OPAQUE_DECOMPOSABLE_TAG_NAMES)
if recomposable_attributes_map is not None:
self.recomposable_attributes_map = recomposable_attributes_map
else:
self.recomposable_attributes_map = (
DEFAULT_RECOMPOSABLE_ATTRIBUTES_MAP)
self.omit_empty_opaque_decomposable = omit_empty_opaque_decomposable
self.sort_attributes = sort_attributes
class Context(object):
"""Runtime state of the transformation process."""
def __init__(self, tree):
self.tree = ContentIO._normalize_tree(tree)
self.collations = None
self.index = None
self.resource_bundle = None
self.resource_bundle_index_2_collation_index = None
self.append_to_index = None
self.is_dirty = False
def _get_collation_index(self, resource_bundle_index):
return self.resource_bundle_index_2_collation_index[
resource_bundle_index]
def _remove_empty_collations(self):
_collations = []
for collation in self.collations:
if collation:
_collations.append(collation)
self.collations = _collations
def _new_collation(self):
assert self.collations is not None
if not self.collations or self.collations[-1]:
self.collations.append([])
self.append_to_index = len(self.collations) - 1
def _append_collation(self, node):
if not self.collations:
self._new_collation()
self.collations[self.append_to_index].append(node)
class CollationIndex(object):
"""An in-order index of all indexable nodes in the collation."""
def __init__(self):
self._node_to_index = {}
def rebuild(self, context):
for collation in context.collations:
counter = 1
for node in collation:
if TranslationIO._is_indexable(node):
self._node_to_index[node] = counter
counter += 1
else:
self._node_to_index[node] = None
def get_node_index_in_collation(self, node):
return self._node_to_index[node]
def find_node_in_collation(self, collation, node_index):
"""Finds node that has a specific index in the collation."""
for node in collation:
if node_index == self.get_node_index_in_collation(node):
return node
return None
@classmethod
def get_all_indexes_in_collation(cls, context, collation):
"""Returns a set of all possible indexes of nodes in the collation."""
all_indexes = set()
for node in collation:
if TranslationIO._is_indexable(node):
all_indexes.add(context.index.get_node_index_in_collation(node))
return all_indexes
class ContentTransformer(object):
"""Main class that performs content transformation."""
def __init__(self, config=None):
if config is None:
config = Configuration()
self.config = config
def _collate_action_append(self, context, node):
context._append_collation(node)
def _collate_action_inspect_children(self, context, node):
for child in ContentIO._get_children(node):
action = self._get_collate_action(child)
if action:
action(context, child)
def _collate_action_inspect_inline(self, context, node):
if ContentIO._is_simple_text_content(node):
self._collate_action_append(context, node)
else:
self._collate_action_inspect_composite(context, node)
def _collate_action_inspect_opaque(self, context, node):
context._append_collation(node)
def _collate_action_inspect_opaque_decomposable(self, context, node):
context._append_collation(node)
_append_to_index = context.append_to_index
context._new_collation()
self._collate_action_inspect_children(context, node)
context.append_to_index = _append_to_index
def _collate_action_inspect_composite(self, context, node):
context._new_collation()
self._collate_action_inspect_children(context, node)
context._new_collation()
def _get_collate_action(self, node):
if isinstance(node, safe_dom.NodeList):
return self._collate_action_inspect_children
if isinstance(node, safe_dom.Comment):
if node.get_value().strip().find(I18N_COMMENT_PREFIX) == 0:
return self._collate_action_append
else:
return None
if isinstance(node, safe_dom.Text):
return self._collate_action_append
if isinstance(node, safe_dom.Element):
tag_name = node.tag_name
if tag_name.upper() in self.config.inline_tag_names:
return self._collate_action_inspect_inline
if tag_name.upper() in self.config.opaque_tag_names:
return self._collate_action_inspect_opaque
if tag_name.upper() in self.config.opaque_decomposable_tag_names:
return self._collate_action_inspect_opaque_decomposable
return self._collate_action_inspect_composite
raise TypeError(
'Unsupported node type: %s.' % node.__class__.__name__)
@classmethod
def _assert_all_indexed_elements_are_consumed(
cls, context, target_collation, consumed_indexes):
"""Asserts all indexed nodes in the collation were consumed."""
all_indexes = context.index.get_all_indexes_in_collation(
context, target_collation)
if consumed_indexes != all_indexes:
missing_indexes = set(list(all_indexes))
missing_indexes.difference_update(consumed_indexes)
missing_tags = []
for index in missing_indexes:
missing_node = context.index.find_node_in_collation(
target_collation, index)
missing_tags.append(TranslationIO.get_indexed_tag_name(
missing_node, index))
raise LookupError(
'Expected to find the following tags: <%s>.' % (
'>, <'.join(missing_tags)))
@classmethod
def _get_node_index(cls, node, node_list):
node_index = None
index = 0
for child in node_list:
if node == child:
node_index = index
break
index += 1
assert node_index is not None
return node_index
def _replace_children(self, tree, collation, children):
"""Replaces all nodes in the collation with the new nodes."""
first_node = collation[0]
parent = first_node.parent
if not parent:
parent = tree
first_node_index = self._get_node_index(
first_node, ContentIO._get_children(parent))
new_children = []
old_children = ContentIO._get_children(parent)
for index in range(0, len(old_children)):
if index == first_node_index:
for new_child in children:
new_children.append(new_child)
child = old_children[index]
ignore = False
for _child in collation:
if TranslationIO._is_ancestor(_child, child):
ignore = True
break
if not ignore:
new_children.append(child)
TranslationIO._set_children(parent, new_children)
@classmethod
def _copy_selected_node_attributes(
cls, config, source_node, target_element):
"""Copy selected attributes from source to target."""
for key in source_node.attributes.keys():
tag_name_set = config.recomposable_attributes_map.get(
key.upper())
eligible = tag_name_set and (
(source_node.tagName.upper() in tag_name_set) or (
'*' in tag_name_set))
if eligible:
if target_element.has_attribute(key):
target_element.set_attribute(
key, source_node.attributes[key].nodeValue)
def _recompose(self, context, translation, collation_index):
"""Applies translation to the collation."""
_tree = TranslationIO.fromstring(translation)
consumed_indexes = set()
collation = []
for node in TranslationIO.tocollation(_tree):
if node.nodeType == minidom.Node.TEXT_NODE:
collation.append(safe_dom.Text(node.nodeValue))
continue
if node.nodeType == minidom.Node.COMMENT_NODE:
collation.append(safe_dom.Comment(node.nodeValue))
continue
if node.nodeType == minidom.Node.ELEMENT_NODE:
tag_name, index = TranslationIO.parse_indexed_tag_name(node)
node.tagName = tag_name
target_node = context.index.find_node_in_collation(
context.collations[collation_index], index)
if not target_node:
raise LookupError(
'Unexpected tag: <%s#%s>.' % (tag_name, index))
TranslationIO._copy_node_content_from_minidom_to_safe_dom(
node, target_node)
self._copy_selected_node_attributes(
self.config, node, target_node)
consumed_indexes.add(index)
collation.append(target_node)
continue
raise TypeError('Unknown node type: %s.' % node)
self._assert_all_indexed_elements_are_consumed(
context, context.collations[collation_index], consumed_indexes)
self._replace_children(
context.tree, context.collations[collation_index], collation)
def _collate(self, context):
"""Collates XML tree into lists of nodes containing chunks of text."""
self._collate_action_inspect_children(context, context.tree)
context._remove_empty_collations()
context.index.rebuild(context)
def decompose(self, context):
"""Creates a resource bundle from the collations of nodes."""
context.collations = []
context.index = CollationIndex()
self._collate(context)
_index = 0
_collation_index = 0
context.resource_bundle = []
context.resource_bundle_index_2_collation_index = {}
context.append_to_index = None
for collation in context.collations:
value = TranslationIO._collation_to_translation(
self.config, context, collation)
if value:
context.resource_bundle.append(value)
context.resource_bundle_index_2_collation_index[
_index] = _collation_index
_index += 1
_collation_index += 1
def recompose(self, context, resource_bundle, errors=None):
"""Pushes string translations from resource bundle into the tree."""
if context.is_dirty:
raise AssertionError(
'Please create new context; this context is not reusable.')
if context.resource_bundle is None:
raise Exception('Please call decompose() first.')
if len(context.resource_bundle) != len(resource_bundle):
raise IndexError(
'The lists of translations must have the same number of items '
'(%s) as extracted from the original content (%s).' % (
len(resource_bundle), len(context.resource_bundle)))
if errors is None:
errors = []
context.is_dirty = True
for index, item in enumerate(resource_bundle):
try:
self._recompose(
context, item,
context._get_collation_index(index))
except Exception as e: # pylint: disable=broad-except
_error = ResourceBundleItemError(sys.exc_info(), e, index)
errors.append(_error)
if errors:
errors[-1].reraise()
class SourceToTargetMapping(object):
"""Class that maps source to target."""
def __init__(self, name, label, type_name, source_value, target_value):
self._name = name
self._label = label
self._type = type_name
self._source = source_value
self._target = target_value
def __str__(self):
return '%s (%s): %s == %s' % (
self._name, self._type, self._source, self._target)
@property
def name(self):
return self._name
@property
def label(self):
return self._label
@property
def source_value(self):
return self._source
@property
def target_value(self):
return self._target
@property
def type(self):
return self._type
@classmethod
def find_mapping(cls, mappings, name):
for mapping in mappings:
if name == mapping.name:
return mapping
return None
class SourceToTargetDiffMapping(SourceToTargetMapping):
"""Class that maps source to target with diff."""
VERB_NEW = 1 # new source value added, no mapping to target exists
VERB_CHANGED = 2 # source value changed, mapping to target likely invalid
VERB_CURRENT = 3 # source value is mapped to valid target value
ALLOWED_VERBS = [VERB_NEW, VERB_CHANGED, VERB_CURRENT]
SIMILARITY_CUTOFF = 0.5
def __init__(
self, name, label, type_name,
source_value, target_value, verb,
source_value_index, target_value_index):
assert verb in self.ALLOWED_VERBS
super(SourceToTargetDiffMapping, self).__init__(
name, label, type_name, source_value, target_value)
self._verb = verb
self._source_value_index = source_value_index
self._target_value_index = target_value_index
def __str__(self):
return '%s (%s, %s): %s | %s' % (
self._name, self._type, self._verb, self._source, self._target)
@property
def verb(self):
return self._verb
@property
def source_value_index(self):
return self._source_value_index
@property
def target_value_index(self):
return self._target_value_index
@classmethod
def _create_value_mapping(
cls, field_value, source_value, target_value, verb,
source_value_index, target_value_index):
_name = None
_label = None
_type = None
if field_value is not None:
_name = field_value.name
_label = field_value.field.label
_type = field_value.field.type
return SourceToTargetDiffMapping(
_name, _label, _type,
source_value, target_value, verb,
source_value_index, target_value_index)
@classmethod
def map_lists_source_to_target(cls, a, b, allow_reorder=False):
"""Maps items from the source list to a target list."""
return cls._map_lists_source_to_target_with_reorder(a, b) if (
allow_reorder) else cls._map_lists_source_to_target_no_reorder(a, b)
@classmethod
def _map_lists_source_to_target_no_reorder(cls, a, b):
mappings = []
matcher = difflib.SequenceMatcher(None, a, b)
for optcode in matcher.get_opcodes():
tag, i1, i2, j1, j2 = optcode
if 'insert' == tag:
continue
if 'replace' == tag:
changed_len = min(i2 - i1, j2 - j1)
for index in range(i1, i1 + changed_len):
entry = cls._create_value_mapping(
None, a[index], b[j1 + (index - i1)], cls.VERB_CHANGED,
index, j1 + (index - i1))
mappings.append(entry)
for index in range(i1 + changed_len, i2):
entry = cls._create_value_mapping(
None, a[index], None, cls.VERB_NEW, index, None)
mappings.append(entry)
continue
for index in range(i1, i2):
entry = None
if 'equal' == tag:
assert (i2 - i1) == (j2 - j1)
entry = cls._create_value_mapping(
None, a[index], b[j1 + (index - i1)], cls.VERB_CURRENT,
index, j1 + (index - i1))
elif 'delete' == tag:
entry = cls._create_value_mapping(
None, a[index], None, cls.VERB_NEW,
index, None)
else:
raise KeyError()
assert entry is not None
mappings.append(entry)
return mappings
@classmethod
def _map_lists_source_to_target_with_reorder(cls, a, b):
mappings = []
for new_index, _new in enumerate(a):
best_match_index = None
best_score = -1
entry = None
for old_index, _old in enumerate(b):
if _new == _old:
entry = cls._create_value_mapping(
None,
a[new_index], b[old_index], cls.VERB_CURRENT,
new_index, old_index)
break
score = difflib.SequenceMatcher(None, _new, _old).quick_ratio()
if score > best_score:
best_score = score
best_match_index = old_index
if entry:
mappings.append(entry)
continue
if best_score > cls.SIMILARITY_CUTOFF:
entry = cls._create_value_mapping(
None, a[new_index], b[best_match_index], cls.VERB_CHANGED,
new_index, best_match_index)
else:
entry = cls._create_value_mapping(
None, a[new_index], None, cls.VERB_NEW,
new_index, None)
assert entry is not None
mappings.append(entry)
return mappings
@classmethod
def map_source_to_target(
cls, binding,
existing_mappings=None, allowed_names=None, allow_list_reorder=False,
errors=None):
"""Maps binding field value to the existing SourceToTargetMapping.
Args:
binding: an instance of ValueToTypeBinding object
existing_mappings: an array of SourceToTargetMapping holding
existing translations
allowed_names: field names that are subject to mapping
allow_list_reorder: controls whether list items can be reordered
while looking for better matching
errors: an array to receive errors found during mapping process
Returns:
an array of SourceToTargetDiffMapping objects, one per each field
value in the binding passed in
"""
name_to_mapping = {}
if existing_mappings is not None:
for mapping in existing_mappings:
name_to_mapping[mapping.name] = mapping
mapping = []
if allow_list_reorder:
raise NotImplementedError()
for index, field_value in enumerate(binding.value_list):
if allowed_names is not None and (
field_value.name not in allowed_names):
continue
target_value = None
verb = cls.VERB_NEW
translation = name_to_mapping.get(field_value.name)
if translation:
if translation.type != field_value.field.type:
_error = AssertionError(
'Source and target types don\'t match: %s, %s.' % (
field_value.field.type, translation.type))
if errors is not None:
_error = ResourceBundleItemError(
sys.exc_info(), _error, index)
errors.append(_error)
continue
else:
raise _error
target_value = translation.target_value
if translation.source_value != field_value.value:
verb = cls.VERB_CHANGED
else:
verb = cls.VERB_CURRENT
source_value = field_value.value
entry = cls._create_value_mapping(
field_value, source_value, target_value, verb, None, None)
mapping.append(entry)
return mapping
def extract_resource_bundle_from(
tree=None, html=None, context=None, config=None):
"""Extracts resource bundle from the HTML string of tree.
Args:
tree: an XML tree of HTML content to use; required if content is None
html: a string with HTML content to use; required if tree is None
context: translation context
config: configuration options
Returns:
a (context, transformer) tuple.
"""
if config is None:
config = Configuration()
transformer = ContentTransformer(config=config)
if tree is None and html is not None:
tree = ContentIO.fromstring(html)
context = Context(tree)
transformer.decompose(context)
return context, transformer
def merge_resource_bundle_into(
tree=None, html=None, context=None, config=None, resource_bundle=None,
errors=None):
"""Weaves strings from the resource bundle into the content.
Args:
tree: an XML tree of HTML content to use; required if content is None
html: a string with HTML content to use; required if tree is None
context: translation context
config: configuration options
resource_bundle: a list of strings containing translations in the same
order and in the same quality that a list of strings in the resource
bundle returned by extract_resource_bundle_from()
errors: a list to receive errors
Returns:
a (context, transformer) tuple.
"""
context, transformer = extract_resource_bundle_from(
tree=tree, html=html, context=context, config=config)
transformer.recompose(context, resource_bundle, errors=errors)
return context, transformer
class ListsDifflibTests(unittest.TestCase):
"""Tests our understanding of difflib as applied to ordered lists."""
def test_diff_two_string_lists_works(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'sky', 'was', 'blue', '!']
matcher = difflib.SequenceMatcher(None, newest, oldest)
expected_verbs = ['equal', 'replace', 'equal']
for index, optcode in enumerate(matcher.get_opcodes()):
tag, _, _, _, _ = optcode
self.assertEqual(expected_verbs[index], tag)
def test_diff_two_string_lists_no_reorder(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'is', 'sky', 'blue', '!']
matcher = difflib.SequenceMatcher(None, newest, oldest)
expected_verbs = ['equal', 'insert', 'equal', 'delete', 'equal']
for index, optcode in enumerate(matcher.get_opcodes()):
tag, _, _, _, _ = optcode
self.assertEqual(expected_verbs[index], tag)
def test_map_lists_source_to_target_identity(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'sky', 'is', 'blue', '!']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 1),
('is', 'is', SourceToTargetDiffMapping.VERB_CURRENT, 2, 2),
('blue', 'blue', SourceToTargetDiffMapping.VERB_CURRENT, 3, 3),
('!', '!', SourceToTargetDiffMapping.VERB_CURRENT, 4, 4)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
def test_map_lists_source_to_target_no_reorder_but_changed(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'sky', 'was', 'blue', '!']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 1),
('is', 'was', SourceToTargetDiffMapping.VERB_CHANGED, 2, 2),
('blue', 'blue', SourceToTargetDiffMapping.VERB_CURRENT, 3, 3),
('!', '!', SourceToTargetDiffMapping.VERB_CURRENT, 4, 4)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
def test_map_lists_source_to_target_no_reorder_and_remove_insert(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'is', 'sky', 'blue', '!']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 2),
('is', None, SourceToTargetDiffMapping.VERB_NEW, 2, None),
('blue', 'blue', SourceToTargetDiffMapping.VERB_CURRENT, 3, 3),
('!', '!', SourceToTargetDiffMapping.VERB_CURRENT, 4, 4)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
def test_map_lists_source_to_target_no_reorder_and_new(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'sky', 'blue', '!']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 1),
('is', None, SourceToTargetDiffMapping.VERB_NEW, 2, None),
('blue', 'blue', SourceToTargetDiffMapping.VERB_CURRENT, 3, 2),
('!', '!', SourceToTargetDiffMapping.VERB_CURRENT, 4, 3)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
def test_map_lists_source_to_target_no_reorder_change_and_new(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'sky', 'is', 'BLUE']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 1),
('is', 'is', SourceToTargetDiffMapping.VERB_CURRENT, 2, 2),
('blue', 'BLUE', SourceToTargetDiffMapping.VERB_CHANGED, 3, 3),
('!', None, SourceToTargetDiffMapping.VERB_NEW, 4, None)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
class SetsDifflibUtils(unittest.TestCase):
"""Tests our understanding of difflib as applied to lists and sets."""
def test_diff_two_string_lists_with_reorder(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'is', 'sky', 'blue', '!']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest, allow_reorder=True)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 2),
('is', 'is', SourceToTargetDiffMapping.VERB_CURRENT, 2, 1),
('blue', 'blue', SourceToTargetDiffMapping.VERB_CURRENT, 3, 3),
('!', '!', SourceToTargetDiffMapping.VERB_CURRENT, 4, 4)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
def test_diff_two_string_lists_with_reorder_over_cutoff(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'sky', 'is', 'blUe', '!']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest, allow_reorder=True)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 1),
('is', 'is', SourceToTargetDiffMapping.VERB_CURRENT, 2, 2),
('blue', 'blUe', SourceToTargetDiffMapping.VERB_CHANGED, 3, 3),
('!', '!', SourceToTargetDiffMapping.VERB_CURRENT, 4, 4)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
def test_diff_two_string_lists_with_reorder_under_cutoff(self):
newest = ['The', 'sky', 'is', 'blue', '!']
oldest = ['The', 'sky', 'is', 'BLUE', '!']
mappings = SourceToTargetDiffMapping.map_lists_source_to_target(
newest, oldest, allow_reorder=True)
expected_mappings = [
('The', 'The', SourceToTargetDiffMapping.VERB_CURRENT, 0, 0),
('sky', 'sky', SourceToTargetDiffMapping.VERB_CURRENT, 1, 1),
('is', 'is', SourceToTargetDiffMapping.VERB_CURRENT, 2, 2),
('blue', None, SourceToTargetDiffMapping.VERB_NEW, 3, None),
('!', '!', SourceToTargetDiffMapping.VERB_CURRENT, 4, 4)]
self.assertEqual(
expected_mappings, [(
mapping.source_value, mapping.target_value,
mapping.verb,
mapping.source_value_index, mapping.target_value_index
) for mapping in mappings])
class TestCasesForIO(unittest.TestCase):
"""Tests for content/translation input/output."""
def _containers(self):
return [safe_dom.A('http://'), safe_dom.Element('div')]
def _leafs(self):
return [
safe_dom.Comment('comment'),
safe_dom.Entity('>'),
safe_dom.Text('text'),
safe_dom.ScriptElement()]
def _all(self):
return [] + self._containers() + self._leafs()
def test_merge_single_element(self):
for _elem in self._all():
_result = safe_dom.NodeList()
ContentIO._merge_node_lists(_result, _elem)
self.assertEqual(_result.list, [_elem])
def test_merge_stack_of_node_lists_leaf_element(self):
for _elem in self._all():
_list1 = safe_dom.NodeList()
_list2 = safe_dom.NodeList()
_list3 = safe_dom.NodeList()
_list1.append(_list2)
_list2.append(_list3)
_list3.append(_elem)
self.assertEqual(_list1.list, [_list2])
self.assertEqual(_list2.list, [_list3])
self.assertEqual(_list3.list, [_elem])
_result = safe_dom.NodeList()
ContentIO._merge_node_lists(_result, _list1)
self.assertEqual(_result.list, [_elem])
def test_merge_stack_of_node_lists_non_leaf_element(self):
for _bar in self._containers():
for _foo in self._all():
_bar.empty()
_list1 = safe_dom.NodeList()
_list2 = safe_dom.NodeList()
_list3 = safe_dom.NodeList()
_bar.add_child(_list1)
_list1.append(_list2)
_list2.append(_list3)
_list3.append(_foo)
self.assertEqual(_list1.list, [_list2])
self.assertEqual(_list2.list, [_list3])
self.assertEqual(_list3.list, [_foo])
_result = safe_dom.NodeList()
ContentIO._merge_node_lists(_result, _bar)
self.assertEqual(_result.list, [_bar])
self.assertEqual(
_bar.children[0].list, [_foo],
'%s >>> %s' % (_bar, _foo))
def test_merge_sibling_node_lists_leaf_element(self):
for _bar in self._all():
for _foo in self._all():
_list1 = safe_dom.NodeList()
_list2 = safe_dom.NodeList()
_list3 = safe_dom.NodeList()
_list1.append(_list2)
_list1.append(_list3)
_list2.append(_foo)
_list3.append(_bar)
self.assertEqual(_list1.list, [_list2, _list3])
self.assertEqual(_list2.list, [_foo])
self.assertEqual(_list3.list, [_bar])
_result = safe_dom.NodeList()
ContentIO._merge_node_lists(_result, _list1)
self.assertEqual(_result.list, [_foo, _bar])
def test_merge_stack_and_sibling_lists(self):
for _elem in self._containers():
_list1 = safe_dom.NodeList()
_list2 = safe_dom.NodeList()
_list3 = safe_dom.NodeList()
_list4 = safe_dom.NodeList()
_list1.append(_list2)
_list2.append(_elem)
_elem.add_child(_list3)
_list3.append(_list4)
self.assertEqual(_elem.children, [_list3])
self.assertEqual(_list3.list, [_list4])
_result = safe_dom.NodeList()
ContentIO._merge_node_lists(_result, _list1)
self.assertEqual(_result.list, [_elem])
self.assertEqual(_elem.children, [])
def test_translation_to_minidom(self):
translation = 'The <a#1 href="foo">skies</a#1> are <b#2>blue</b#2>.'
tree_as_text = 'The <a-1 href="foo">skies</a-1> are <b-2>blue</b-2>.'
dom = TranslationIO.fromstring(translation)
self.assertEqual(tree_as_text, TranslationIO.toxml(dom))
self.assertEqual(translation, TranslationIO.tostring(dom))
def test_minidom_is_casesensitive(self):
translation = 'The <SPAN#1>skies</SPAN#1>.'
TranslationIO.fromstring(translation)
translation = 'The <span#1>skies</SPAN#1>.'
with self.assertRaises(Exception):
TranslationIO.fromstring(translation)
translation = 'The <SPAN#1>skies</span#1>.'
with self.assertRaises(Exception):
TranslationIO.fromstring(translation)
def test_fromstring_translates_html_entities_for_minidom(self):
original = u'The skies® are © copyrighted.'
parsed = u'The skies\xae are \xa9 copyrighted.'
dom = TranslationIO.fromstring(original)
self.assertEqual(parsed, TranslationIO.toxml(dom))
self.assertEqual(parsed, TranslationIO.tostring(dom))
def test_fromstring_does_not_translate_xml_entities_for_minidom(self):
original = u'Hello, " & < > ' world.'
dom = TranslationIO.fromstring(original)
# We leave ' as &apos, but minidom turns it to '.
self.assertEqual(
u"Hello, " & < > ' world.",
TranslationIO.toxml(dom))
self.assertEqual(
u"Hello, " & < > ' world.",
TranslationIO.tostring(dom))
def test_entity_map_converts_all_html_codes_to_base_10_ascii(self):
for name, code in _ENTITY_MAP.iteritems():
if name not in _XML_ENTITY_NAMES:
int(code[2:-1], base=10)
self.assertTrue(code.startswith('&') and code.endswith(';'))
# Spot check a few values.
self.assertEqual('©', _ENTITY_MAP.get('copy'))
self.assertEqual('®', _ENTITY_MAP.get('reg'))
def test_entity_map_xml_entity_values_are_keynames_with_amp_and_semi(self):
for xml_entity in _XML_ENTITY_NAMES:
self.assertEqual('&%s;' % xml_entity, _ENTITY_MAP.get(xml_entity))
def test_html_to_safedom(self):
html = '''
Let's start!
<p>First!</>
Some random <b>markup</b> text!
<p>
<!-- comment -->
The <b>skies</b> are <a href="foo">blue</a>.
The <b>roses</b> are <a href="bar">red</a>!
<script>alert('Foo!');</script>
<style>{ width: 100%; }</style>
</p>
<p>Last!</p>
We are done!
'''
tree_as_text = '''
Let's start!
<p>First!
Some random <b>markup</b> text!
</p><p>
<!-- comment -->
The <b>skies</b> are <a href="foo">blue</a>.
The <b>roses</b> are <a href="bar">red</a>!
<script>alert('Foo!');</script>
<style>{ width: 100%; }</style>
</p>
<p>Last!</p>
We are done!
'''
self.assertEqual(
tree_as_text,
ContentIO.tostring(ContentIO.fromstring(html)))
def test_parse_error_interpretation(self):
# test expected error message
error = Exception('not well-formed (invalid token): line 66, column 99')
line_num, col_num = TranslationIO.extract_line_column_from_parse_error(
error)
self.assertEquals(66, line_num)
self.assertEquals(99, col_num)
# test text that does not have line & column
self.assertEquals(
(None, None),
TranslationIO.extract_line_column_from_parse_error('Some text.'))
# test clipping
text = 'The sky is blue!'
self.assertEquals(
'[T]he s',
TranslationIO.get_text_fragment(text, 1, 0, clip_len=5))
self.assertEquals(
'The s[k]y is',
TranslationIO.get_text_fragment(text, 1, 5, clip_len=5))
# text out of bounds conditions
self.assertEquals(
text,
TranslationIO.get_text_fragment(text, 1, 16, clip_len=5))
self.assertEquals(
text, TranslationIO.get_text_fragment(text, 1, 99))
self.assertEquals(
text, TranslationIO.get_text_fragment(text, 1, -1))
self.assertEquals(text, TranslationIO.get_text_fragment(text, -1, -1))
class TestCasesBase(unittest.TestCase):
"""Base class for testing translations."""
def setUp(self):
self.transformer = ContentTransformer()
def tearDown(self):
self.tree = None
self.transformer = None
self.context = None
@classmethod
def _remove_whitespace(cls, content):
content = content.replace('\n', ' ').replace('\r', ' ')
content = re.sub(r'\s+', ' ', content)
content = re.sub(r'>\s+', '>', content)
content = re.sub(r'\s+/>', '/>', content)
content = re.sub(r'\s+<', '<', content)
return content.strip()
def _assert_collated_nodes_have_same_parent(self, collation):
parent = None
for node in collation:
if parent is None:
parent = node.parent
assert parent == node.parent
def _assert_decomposes(
self, content, resource_bundle, ignore_whitespace=True):
self.context = Context(ContentIO.fromstring(content))
self.transformer.decompose(self.context)
for collation in self.context.collations:
self._assert_collated_nodes_have_same_parent(collation)
if resource_bundle is not None:
self.assertEqual(
len(resource_bundle), len(self.context.resource_bundle))
for index, _ in enumerate(resource_bundle):
if ignore_whitespace:
self.assertEqual(
self._remove_whitespace(resource_bundle[index]),
self._remove_whitespace(
self.context.resource_bundle[index]))
else:
self.assertEqual(
resource_bundle[index],
self.context.resource_bundle[index])
if not self.context.resource_bundle:
self.assertEqual(
{},
self.context.resource_bundle_index_2_collation_index)
def _assert_recomposes(self, resource_bundle, result):
self.transformer.recompose(self.context, resource_bundle)
self.assertEqual(
self._remove_whitespace(result),
self._remove_whitespace(ContentIO.tostring(self.context.tree)))
def _assert_recomposes_error(self, resource_bundle):
failed = True
result = None
try:
errors = []
self.transformer.recompose(
self.context, resource_bundle, errors=errors)
failed = False
except Exception as e: # pylint: disable=broad-except
if errors:
return errors[0]
return e
if not failed:
raise Exception('Expected to fail.' % ContentIO.tostring(
result) if result else None)
class TestCasesForContentDecompose(TestCasesBase):
"""Tests for content decomposition phase."""
def test_i18n_comment_is_preserved(self):
original = 'Hello <!-- I18N: special comment -->world!'
expected = ['Hello <!-- I18N: special comment -->world!']
self._assert_decomposes(original, expected)
return original
def test_i18n_non_comment_is_removed(self):
original = 'Hello <!-- just a comment -->world!'
self._assert_decomposes(original, ['Hello world!'])
def test_extract_simple_value_no_markup(self):
original = 'The skies are blue.'
expected = ['The skies are blue.']
self._assert_decomposes(original, expected)
def test_extract_simple_value_with_br(self):
original = 'The skies are <br />blue.'
expected = ['The skies are <br#1 />blue.']
self._assert_decomposes(original, expected)
def test_extract_value_with_inline(self):
html = 'The <a href="foo">sky</a> is blue.'
expected = ['The <a#1>sky</a#1> is blue.']
self._assert_decomposes(html, expected)
def test_extract_value_with_nested_inline(self):
html = 'The <a href="foo"><b>ocean</b> liner</a> is blue.'
expected = ['The', '<b#1>ocean</b#1> liner', 'is blue.']
self._assert_decomposes(html, expected)
def test_extract_simple_value_with_only_non_ascii_no_markup(self):
original = u'<p>Трава зеленая.</p>'
expected = [u'Трава зеленая.']
self._assert_decomposes(original, expected)
def test_extract_simple_value_with_only_non_ascii_and_markup(self):
original = u'Трава <b>зеленая</b>.'
expected = [u'Трава <b#1>зеленая</b#1>.']
self._assert_decomposes(original, expected)
def test_extract_simple_value_with_entity(self):
original = 'The skies < are blue.'
expected = ['The skies < are blue.']
self._assert_decomposes(original, expected)
def test_extract_simple_value_with_entity_2(self):
original = '''Let's start!'''
expected = ['Let's start!']
self._assert_decomposes(original, expected)
def test_extract_nothing_to_translate(self):
original = '\n\n <script>alert("Foo!");</script>\n\n '
self._assert_decomposes(original, [])
def test_extract_nothing_to_translate_2(self):
original = '\n\n <a href="#foo" />\n\n '
self._assert_decomposes(original, [])
def test_extract_script_value(self):
original = 'The skies <script>alert("Foo!");</script> are blue.'
expected = ['The skies <script#1 /> are blue.']
self._assert_decomposes(original, expected)
config = Configuration(opaque_tag_names=[])
self.transformer = ContentTransformer(config=config)
original = 'The skies <script>alert("Foo!");</script> are blue.'
expected = ['The skies', 'alert("Foo!");', 'are blue.']
self._assert_decomposes(original, expected)
def test_extract_script_and_style_value(self):
original = (
'The skies <script>alert("Foo!");</script> are '
'<style> { color: blue; } </style> blue.')
expected = ['The skies <script#1 /> are <style#2 /> blue.']
self._assert_decomposes(original, expected)
def test_extract_one_complex_value(self):
html = '''begin
<p>
The <a href='foo'>skies</a> are <a href="bar">blue</a>.
</p>
end'''
expected = ['begin', 'The <a#1>skies</a#1> are <a#2>blue</a#2>.', 'end']
self._assert_decomposes(html, expected)
self.assertEqual(
{0: 0, 1: 1, 2: 2},
self.context.resource_bundle_index_2_collation_index)
def test_resource_bundle_to_collation_mapping(self):
html = '''
<p>
The <a href='foo'>skies</a> are <a href="bar">blue</a>.
</p>
'''
expected = ['The <a#1>skies</a#1> are <a#2>blue</a#2>.']
self._assert_decomposes(html, expected)
self.assertEqual(3, len(self.context.collations))
self.assertEqual(
{0: 1},
self.context.resource_bundle_index_2_collation_index)
def test_extract_many_complex_values(self):
html = '''begin
<p>
The <a href="foo">skies</a> are <a href="bar">blue</a>.
</p>
followed by more <a href="baz">text</a> with markup
<p>
The <span class="red">roses</span> are <a href="y">red</a>.
</p>
end'''
expected = [
'begin',
'The <a#1>skies</a#1> are <a#2>blue</a#2>.',
'followed by more <a#1>text</a#1> with markup',
'The <span#1>roses</span#1> are <a#2>red</a#2>.',
'end']
self._assert_decomposes(html, expected)
def test_extract_complex_value_with_unicode(self):
original = u'''
begin
<p>
The <b>skies</b> are <a href="foo">blue</a>.
<p>Трава <b>зеленая</b>.</p>
The <b>roses</b> are <a href="bar">red</a>!
</p>
end
'''
expected = [
'begin',
'The <b#1>skies</b#1> are <a#2>blue</a#2>.',
u'Трава <b#1>зеленая</b#1>.',
'The <b#1>roses</b#1> are <a#2>red</a#2>!',
'end'
]
self._assert_decomposes(original, expected)
def test_extract_ul_value(self):
original = '''
Start!
<ul>
The skies are <li>blue</li> and <li>red</li>.
</ul>
Done!
'''
expected = [
'Start!\n <ul#1 />\n Done!',
'The skies are',
'blue',
'and',
'red',
'.']
self._assert_decomposes(original, expected)
def test_extract_nested_elements(self):
original = '''
<p>
The skies can be:
<ul>
<li>red</li>
<li>blue</li>
</ul>
in the fall.
</p>
'''
# TODO(psimakov): undesirable, but the parser closes <p> before new <ul>
expected = [
'The skies can be:',
'<ul#1 />\n in the fall.',
'red',
'blue']
self._assert_decomposes(original, expected)
def test_extract_decompose_can_be_called_many_times(self):
html = 'The <a href="foo">sky</a> is blue.'
expected = ['The <a#1>sky</a#1> is blue.']
self._assert_decomposes(html, expected)
self._assert_decomposes(html, expected)
self._assert_decomposes(html, expected)
def test_extract_decompose_opaque_translatable(self):
config = Configuration(
omit_empty_opaque_decomposable=False,
sort_attributes=True)
self.transformer = ContentTransformer(config)
html = '<img src="foo" />'
expected = ['<img#1 src="foo" />']
self._assert_decomposes(html, expected)
html = '<img src="foo" alt="bar"/>'
expected = ['<img#1 alt="bar" src="foo" />']
self._assert_decomposes(html, expected)
html = '<img alt="bar" src="foo" />'
expected = ['<img#1 alt="bar" src="foo" />']
self._assert_decomposes(html, expected)
html = '<img alt="bar" src="foo" title="baz"/>'
expected = ['<img#1 alt="bar" src="foo" title="baz" />']
self._assert_decomposes(html, expected)
html = '<img src="foo" alt="bar" title="baz"/>'
expected = ['<img#1 alt="bar" src="foo" title="baz" />']
self._assert_decomposes(html, expected)
def test_extract_decompose_custom_tag_with_attribute(self):
config = Configuration(
inline_tag_names=['FOO'],
opaque_decomposable_tag_names=['FOO'],
omit_empty_opaque_decomposable=False)
self.transformer = ContentTransformer(config)
html = '<div><foo alt="bar"></foo></div>'
expected = ['<foo#1 alt="bar" />']
self._assert_decomposes(html, expected)
html = '<div><foo alt="bar">baz</foo></div>'
expected = ['<foo#1 alt="bar">baz</foo#1>']
self._assert_decomposes(html, expected)
def test_extract_large_sample_document(self):
self.maxDiff = None
original = ContentIO.tostring(ContentIO.fromstring(
SAMPLE_HTML_DOC_CONTENT))
self._assert_decomposes(original, SAMPLE_HTML_DOC_DECOMPOSE)
def test_extract_resource_bundle_from(self):
original = '<p>The <a href="foo">skies</a> are blue!</p>'
expected = ['The <a#1>skies</a#1> are blue!']
context, _ = extract_resource_bundle_from(html=original)
self.assertEqual(expected, context.resource_bundle)
class TestCasesForContentRecompose(TestCasesBase):
"""Tests for content decomposition phase."""
def test_recompose_i18n_comment_is_preserved(self):
html = 'Hello <!-- I18N: special comment -->world!'
self._assert_decomposes(html, None)
translations = ['HELLO <!-- I18N: special comment -->WORLD!']
result = 'HELLO <!-- I18N: special comment -->WORLD!'
self._assert_recomposes(translations, result)
def test_recompose_one_complex_value(self):
html = '''begin
<p>
The <a href="foo">skies</a> are <a href="bar">blue</a>.
</p>
end'''
self._assert_decomposes(html, None)
translations = [
'BEGIN', 'The <a#1>SKIES</a#1> ARE <a#2>BLUE</a#2>.', 'END']
result = '''BEGIN
<p>
The <a href="foo">SKIES</a> ARE <a href="bar">BLUE</a>.
</p>
END'''
self._assert_recomposes(translations, result)
def test_recompose_complex_value_mixed_tags(self):
html = '''
Start!
<p>
The <b>skies</b> are <a href="foo">blue</a>.
The <b>roses</b> are <a href="bar">red</a>!
</p>
Done!
'''
expected = [
'Start!',
'''The <b#1>skies</b#1> are <a#2>blue</a#2>.
The <b#3>roses</b#3> are <a#4>red</a#4>!''',
'Done!']
self._assert_decomposes(html, expected)
translations = [
'START!',
'''The <b#1>SKIES</b#1> ARE <a#2>BLUE</a#2>.
The <b#3>roses</b#3> ARE <a#4>RED</a#4>!''',
'DONE!']
result = '''START!<p>The <b>SKIES</b> ARE <a href="foo">BLUE</a>.
The <b>roses</b> ARE <a href="bar">RED</a>!</p>DONE!'''
self._assert_recomposes(translations, result)
def test_recompose_multiple_complex_values_with_mixed_tags(self):
html = '''
Start!
<p>
The <b>skies</b> are <a href="foo">blue</a>.
</p>
<p>
The <b>roses</b> are <a href="bar">red</a>!
</p>
Done!
'''
expected = [
'Start!',
'The <b#1>skies</b#1> are <a#2>blue</a#2>.',
'The <b#1>roses</b#1> are <a#2>red</a#2>!',
'Done!']
self._assert_decomposes(html, expected)
translations = [
'START!',
'The <b#1>SKIES</b#1> ARE <a#2>blue</a#2>.',
'THE <b#1>roses</b#1> are <a#2>RED</a#2>!',
'DONE!']
result = (
'START!'
'<p>The <b>SKIES</b> ARE <a href="foo">blue</a>.</p>'
'<p>THE <b>roses</b> are <a href="bar">RED</a>!</p>'
'DONE!')
self._assert_recomposes(translations, result)
def test_recompose_complex_value(self):
html = """
<h1>
<a href="/">
<img alt="Google"
src="//www.google.com/images/logos/google_logo_41.png">
Open Online Education</a>
</h1>
<a class="maia-teleport" href="#content">Skip to content</a>
"""
expected = [
'<img#1 src="//www.google.com/images/logos/google_logo_41.png" '
'alt="Google" />\n Open Online Education',
'<a#1>Skip to content</a#1>']
self._assert_decomposes(html, expected)
translations = [
'<img#1 src="//www.google.com/images/logos/google_logo_99.png" '
'alt="Google+" />\n Open ONLINE Education',
'<a#1>SKIP to content</a#1>']
result = """
<h1>
<a href="/">
<img alt="Google+"
src="//www.google.com/images/logos/google_logo_99.png" />
Open ONLINE Education</a>
</h1>
<a class="maia-teleport" href="#content">SKIP to content</a>
"""
self._assert_recomposes(translations, result)
def test_recompose_complex_value_2(self):
html = (
'The <a class="foo">skies</a> '
'<p>are <i>not</i></p>'
' always <a href="bar">blue</a>.')
expected = [
'The <a#1>skies</a#1>',
'are <i#1>not</i#1>',
'always <a#1>blue</a#1>.']
self._assert_decomposes(html, expected)
translations = [
'The <a#1>SKIES</a#1> ',
'ARE <i#1>NOT</i#1>',
' ALWAYS <a#1>blue</a#1>.']
result = (
'The <a class="foo">SKIES</a> '
'<p>ARE <i>NOT</i></p>'
' ALWAYS <a href="bar">blue</a>.')
self._assert_recomposes(translations, result)
def test_textarea_self_closing_fails_parse(self):
# TODO(psimakov): fix this
html = 'foo <textarea name="bar"/> baz'
expected = ['foo', 'baz']
with self.assertRaises(AssertionError):
self._assert_decomposes(html, expected)
unexpected = ['foo <textarea#1 />', 'baz</div>']
self._assert_decomposes(html, unexpected)
def test_placeholder(self):
config = Configuration(omit_empty_opaque_decomposable=False)
self.transformer = ContentTransformer(config)
html = '<textarea class="foo" placeholder="bar">baz</textarea>'
expected = ['<textarea#1 placeholder="bar" />', 'baz']
self._assert_decomposes(html, expected)
def test_recompose_complex_ul(self):
config = Configuration(omit_empty_opaque_decomposable=False)
self.transformer = ContentTransformer(config)
html = '''
<ul class="foo">
<li>sss</li>
<li index="bar">ttt</li>
<li>xxx</li>
<li>yyy</li>
<li>zzz</li>
</ul>
'''
expected = ['<ul#1 />', 'sss', 'ttt', 'xxx', 'yyy', 'zzz']
self._assert_decomposes(html, expected)
translations = ['<ul#1 />', 'SSS', 'TTT', 'XXX', 'YYY', 'ZZZ']
result = '''
<ul class="foo">
<li>SSS</li>
<li index="bar">TTT</li>
<li>XXX</li>
<li>YYY</li>
<li>ZZZ</li>
</ul>
'''
self._assert_recomposes(translations, result)
def test_recompose_complex_with_opaque_docomposable(self):
config = Configuration(omit_empty_opaque_decomposable=False)
self.transformer = ContentTransformer(config)
html = u"""
<table border="2">
<tbody>
<tr>
<td>
<i>table</i>
<p></p>
<ul>
<li>a</li>
<li>b</li>
</ul>
<p></p>
</td>
</tr>
</tbody>
</table>"""
expected = [
'<table#1 />', '<i#1>table</i#1>', '<ul#1 />', 'a', 'b']
self._assert_decomposes(html, expected)
translations = [
'<table#1/>', '<i#1>TABLE</i#1>', '<ul#1/>', 'A', 'B']
result = (
'<table border="2">'
'<tbody>'
'<tr>'
'<td>'
'<i>TABLE</i>'
'<p></p>'
'<ul>'
'<li>A</li>'
'<li>B</li>'
'</ul>'
'<p></p>'
'</td>'
'</tr>'
'</tbody>'
'</table>')
self._assert_recomposes(translations, result)
def test_recompose_empty_p_is_roundtripped(self):
html = 'The skies are blue.<p></p>The roses are red.'
self._assert_decomposes(html, None)
translation = ['The SKIES are blue. ', 'The roses are RED.']
result = 'The SKIES are blue.<p></p>The roses are RED.'
self._assert_recomposes(translation, result)
def test_recompose_translation_with_no_significant_markup(self):
html = 'The skies are blue.<p>Maybe...</p>The roses are red.'
self._assert_decomposes(html, None)
translation = ['The SKIES are blue.', 'MAYBE...', 'The roses are RED.']
result = 'The SKIES are blue.<p>MAYBE...</p>The roses are RED.'
self._assert_recomposes(translation, result)
def test_no_new_tag_attributes_can_be_added_in_translations(self):
html = 'The <a class="foo">skies</a> are blue.'
self._assert_decomposes(html, None)
translation = ['The <a#1 onclick="bar">SKIES</a#1> are blue.']
result = 'The <a class="foo">SKIES</a> are blue.'
self._assert_recomposes(translation, result)
def test_whitespace_is_preserved(self):
html = 'foo <b><i>bar</i></b>'
expected_no_whitespace = ['foo', '<i#1>bar</i#1>']
self._assert_decomposes(html, expected_no_whitespace)
translation_no_whitespace = ['FOO', '<i#1>BAR</i#1>']
result_no_whitespace = 'FOO<b><i>BAR</i></b>'
self._assert_recomposes(translation_no_whitespace, result_no_whitespace)
expected_with_whitespace = ['foo ', '<i#1>bar</i#1>']
self._assert_decomposes(
html, expected_with_whitespace, ignore_whitespace=False)
translation_with_whitespace = ['FOO ', '<i#1>BAR</i#1>']
result_with_whitespace = 'FOO <b><i>BAR</i></b>'
self._assert_recomposes(
translation_with_whitespace, result_with_whitespace)
def test_no_new_tags_can_be_added_in_translations(self):
original = 'The <a class="foo">skies</a> are blue.'
self._assert_decomposes(original, None)
translation = ['The <a#1>SKIES</a#1> are <b#2>blue</b#2>.']
_error = self._assert_recomposes_error(translation)
if not isinstance(_error.original_exception, LookupError):
_error.reraise()
self.assertEquals(
'Unexpected tag: <b#2>.',
_error.original_exception.message)
self.assertEqual(0, _error.index)
def test_all_tags_must_be_indexed_in_translations(self):
original = 'The <a class="foo">skies</a> are blue.'
self._assert_decomposes(original, None)
translation = ['The <a#1>SKIES</a#1> are <b>blue</b>.']
_error = self._assert_recomposes_error(translation)
if not isinstance(_error.original_exception, SyntaxError):
_error.reraise()
self.assertEquals(
'Error extracting index form the tag <b>. '
'Tag name format is <tag_name#index>, like <a#1>.',
_error.original_exception.message)
self.assertEqual(0, _error.index)
def test_all_tags_must_be_translated_in_translations(self):
original = 'The <a class="foo">skies</a> are <a href="bar">blue</a>.'
expected = ['The <a#1>skies</a#1> are <a#2>blue</a#2>.']
self._assert_decomposes(original, expected)
translation = ['The SKIES are blue.']
_error = self._assert_recomposes_error(translation)
if not isinstance(_error.original_exception, LookupError):
_error.reraise()
self.assertEquals(
'Expected to find the following tags: <a#1>, <a#2>.',
_error.original_exception.message)
self.assertEqual(0, _error.index)
def test_can_recompose_alphanum_tag_names(self):
config = Configuration(
inline_tag_names=['GCB-HTML5VIDEO'],
omit_empty_opaque_decomposable=False)
self.transformer = ContentTransformer(config)
html = 'video <gcb-html5video url="woo.mp4"></gcb-html5video>'
expected = ['video <gcb-html5video#1 />']
self._assert_decomposes(html, expected)
translation = ['VIDEO <gcb-html5video#1 />']
result = 'VIDEO <gcb-html5video url="woo.mp4"></gcb-html5video>'
self._assert_recomposes(translation, result)
def test_recompose_called_multiple_times_fails(self):
html = 'The <a class="foo">skies</a> are blue.'
self._assert_decomposes(html, None)
translation = ['The <a#1 onclick="bar">SKIES</a#1> are blue.']
result = 'The <a class="foo">SKIES</a> are blue.'
self._assert_recomposes(translation, result)
_error = self._assert_recomposes_error(translation)
if not isinstance(_error, AssertionError):
raise Exception()
self.assertEquals(
'Please create new context; this context is not reusable.',
_error.message)
def test_recompose_large_sample_document(self):
self.maxDiff = None
original = ContentIO.tostring(ContentIO.fromstring(
SAMPLE_HTML_DOC_CONTENT))
self._assert_decomposes(original, None)
translations = [] + SAMPLE_HTML_DOC_DECOMPOSE
translations[2] = '<a#1>SKIP TO CONTENT</a#1>'
result = original.replace('Skip to content', 'SKIP TO CONTENT')
self._assert_recomposes(translations, result)
def test_recompose_resource_bundle_into(self):
original = '<p>The <a href="foo">skies</a> are blue!</p>'
translation = [u'<a#1>Небо</a#1> синее!']
expected = u'<p><a href="foo">Небо</a> синее!</p>'
context, _ = merge_resource_bundle_into(
html=original, resource_bundle=translation)
self.assertEqual(expected, ContentIO.tostring(context.tree))
def run_all_unit_tests():
"""Runs all unit tests in this module."""
suites_list = []
for test_class in [
ListsDifflibTests, SetsDifflibUtils,
TestCasesForIO,
TestCasesForContentDecompose, TestCasesForContentRecompose]:
suite = unittest.TestLoader().loadTestsFromTestCase(test_class)
suites_list.append(suite)
result = unittest.TextTestRunner().run(unittest.TestSuite(suites_list))
if not result.wasSuccessful() or result.errors:
raise Exception(result)
# Below we keep content needed for test cases. We keep them here to allow this
# to be reused in any application and splitting test out into /tests/... would
# make this more difficult."""
# pylint: disable=line-too-long
# taken from http://www.google.com/edu/openonline/edukit/course-parts.html
SAMPLE_HTML_DOC_CONTENT = u'''
<!DOCTYPE html>
<html class="google" lang="en">
<head>
<script>
(function(H){H.className=H.className.replace(/\bgoogle\b/,'google-js')})(document.documentElement)
</script>
<meta charset="utf-8">
<meta content="initial-scale=1, minimum-scale=1, width=device-width" name="viewport">
<title>
Google Open Online Education
</title>
<script src="//www.google.com/js/google.js">
</script>
<script>
new gweb.analytics.AutoTrack({profile:"UA-12481063-1"});
</script>
<link href="//fonts.googleapis.com/css?family=Open+Sans:300,400,600,700&lang=en" rel=
"stylesheet">
<link href=" /edu/openonline/css/edukit.css" rel="stylesheet">
</head>
<body>
<div class="maia-header" id="maia-header" role="banner">
<div class="maia-aux">
<h1>
<a href="/"><img alt="Google" src="//www.google.com/images/logos/google_logo_41.png">
Open Online Education</a>
</h1><a class="maia-teleport" href="#content">Skip to content</a>
</div>
</div>
<div class="maia-nav" id="maia-nav-x" role="navigation">
<div class="maia-aux">
<ul>
<li>
<a data-g-action="Maia: Level 1" data-g-event="Maia: Site Nav" data-g-label="OOE_Home"
href="/edu/openonline/index.html">Home</a>
</li>
<li>
<a data-g-action="Maia: Level 1" data-g-event="Maia: Site Nav" data-g-label=
"OOE_Insights" href="/edu/openonline/insights/index.html">Insights</a>
</li>
<li>
<a class="active" data-g-action="Maia: Level 1" data-g-event="Maia: Site Nav"
data-g-label="OOE_Edu_Kit" href="/edu/openonline/edukit/index.html">Online Course
Kit</a>
</li>
<li>
<a data-g-action="Maia: Level 1" data-g-event="Maia: Site Nav" data-g-label=
"OOE_Open_edX" href="/edu/openonline/tech/index.html">Technologies</a>
</li>
<li>
<a class="active" data-g-action="Maia: Level 1" data-g-event="Maia: Site Nav"
data-g-label="GOOG_EDU_main" href="/edu/index.html">Google for Education</a>
</li>
</ul>
</div>
</div>
<div id="maia-main" role="main">
<div class="maia-nav-aux">
<div class="edukit_nav">
<ul>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label=
"Quick Start" href="/edu/openonline/edukit/quickstart.html">Quick Start</a>
</li>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label="Plan"
href="/edu/openonline/edukit/plan.html">Plan</a>
</li>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label="Create"
href="/edu/openonline/edukit/create.html">Create</a>
</li>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label=
"Implement" href="/edu/openonline/edukit/implement.html">Implement</a>
</li>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label="Pilot"
href="/edu/openonline/edukit/pilot.html">Pilot</a>
</li>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label=
"Communicate" href="/edu/openonline/edukit/communicate.html">Communicate</a>
</li>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label=
"Using Course Builder" href="/edu/openonline/edukit/course-parts.html">Using Course
Builder</a>
</li>
<li>
<a data-g-action="Maia: Level 2" data-g-event="Maia: Site Nav" data-g-label=
"More Resources" href="/edu/openonline/edukit/resource.html">More Resources</a>
</li>
</ul>
</div>
</div>
<div class="maia-teleport" id="content"></div>
<div class="clearfix_nav"></div>
<div class="ooe_content">
<h1>
Parts of a Course Builder Course
</h1>
<p>
The primary parts of a course created with Course Builder are as follows:
</p>
<ul>
<li>
<a href="#course_content_and_delivery">Course content and delivery</a><br>
The material that you formally convey to students. Formal content can be lessons
recorded or written in advance. It can also be live question and answer sessions with
course staff.
</li>
<li>
<a href="#assessments_and_activities">Assessments and activities</a><br>
Graded assessments with a fixed deadline to track student progress. You can also use
ungraded assessments, called <strong>activities</strong>, to provide feedback and hints
to students.
</li>
<li>
<a href="#social_interactions">Social interactions</a><br>
An important component of an online course is the interactions among the students
themselves and the interactions between students and the course staff (the instructors
or teaching assistants).
</li>
<li>
<a href="#administrative_tasks">Administrative tasks</a><br>
Of course, there are tasks such as registering students, setting up the course,
tracking usage, and so on.
</li>
</ul>
<p>
A single course consists of a series of units with individual lessons and activities. The
course can have any number of graded assessments scattered before, between, and after the
units and lessons. It can also have one or more formally-set up avenues for social
interaction for the students.
</p>
<p>
For a quick description of the flow a student typically experiences in a course, see
<a href="courseflow.html">Course Flow for Students</a>. For a description of the design
process we think is effective, see <a href="design-process.html">Design Process</a>.
</p>
<p>
The rest of this page discusses the four main parts of a course in more detail.
</p><a id="course_content_and_delivery" name="course_content_and_delivery"></a>
<h2>
Course content and delivery
</h2>
<p>
To make the content more digestible, consider grouping course material into a number of
units. Each unit contains a series of lessons and possibly activities related to a
particular topic within the content covered by the entire course.
</p><input class="toggle-box-small" id="units1" type="checkbox"> <label for="units1">Units
with lessons and activities</label>
<div class="toggle-small maia-aside">
<p>
In the <a href="power-searching.html">Power Searching with Google</a> course, one unit
is about interpreting search results; another is about checking the reliability of the
content of those search results. Each of those units consists of about five lessons and
about five activities. For these units, course staff creates and releases the lessons
and activities ahead of time. While the material is available to students, course staff
interacts with students through the <a href="forums.html">participant community
mechanisms</a>.
</p>
<p>
For a unit that consists of a series of lessons and activities, we found that around
five lessons and four activities is a good length.
</p>
<p>
A lesson is a coherent and relatively small chunk of information. In Power Searching
with Google, we chose to create each lesson as one video and a text version of the same
content. Your lessons do not have to have both parts. For more information, see
<a href="//code.google.com/p/course-builder/wiki/CreateLessons">Create Lessons</a>.
</p>
<p>
An activity is an ungraded assessment, used to provide feedback to students on how well
they understand the lesson. Activities typically contain optional hints. For more
information, see <a href="#assessments_and_activities">Assessments and activities</a>.
</p>
<p>
Tips:
</p>
<ul>
<li>Make short videos, preferably 3-5 minutes.
</li>
<li>Include closed captions in your videos.
</li>
<li>For the text version, take the time to clean up the transcript.
</li>
<li>When deciding what content to include, design for the average student. To
accommodate other students, consider including background or advanced material in forum
posts (if you want discussion and maybe answers) or in Google+ or blog posts (if you
just want to broadcast the information).
</li>
</ul>
</div><input class="toggle-box-small" id="units2" type="checkbox"> <label for=
"units2">Units using Hangouts on Air</label>
<div class="toggle-small maia-aside">
<p>
A very different type of unit is online office hours where the students submit
questions ahead of time and the course staff answers those questions in real-time using
a <a href="http://www.google.com/+/learnmore/hangouts/onair.html">Hangout On Air</a>.
Depending on your course, you may have some students interacting with the course staff
over video for the Hangout On Air or you may have students submit all of their
questions using <a href="https://www.google.com/moderator/">Google Moderator</a>.
</p>
<p>
For online office hours you have a fixed date and time when the course staff broadcasts
a session for students to watch and interact with.
</p>
<p>
If you have a very small course (fewer than 10 people), you can use a Google Hangout
for your session. If you have more than 10 people, you can use a combination of Google
Hangouts on Air and Google Moderator instead.
</p>
<p>
A <a href="//www.google.com/+/learnmore/hangouts/">Google Hangout</a> is a video chat
that can have up to 10 participants. In a Google Hangout, all participants can share
what\u2019s on each person's screen, collaborate in Google Docs, view presentations and
diagrams together and speak to each other. If your course is small enough, this is a
great way to go. If your course is large, you may still consider having your students
break into small groups for interactive activities with each other over Hangouts.
</p>
<p>
If your course has many more than 10 students, you can use a combination of Hangouts on
Air and Google Moderator to create a live experience with your students. With a
<a href="//code.google.com/p/course-builder/wiki/OnlineOfficeHours#Setting_up_a_Hangout_On_Air">
Google Hangout on Air</a>, you can post a live session with your instructors and any
guests you chose. You post the Hangout on Air to your YouTube channel and to your
Google+ stream. Students cannot talk to you in real-time, but they can ask you
questions through the use of a Google Moderator series, or by posting comments to the
Moderator stream, YouTube stream, or Google+ stream. You can use a <a href=
"https://code.google.com/p/course-builder/wiki/OnlineOfficeHours#Setting_up_a_Google_Moderator_series">
Google Moderator series</a> to collect questions from your students and have them vote
those questions up and down; the collecting can either be done in advance, during the
Hangout on Air, or both.
</p>
<p>
<strong>Tip:</strong> If you do a Hangout on Air, consider using a live captioning
service to help students who are hearing impaired or whose primary language is not the
language used in the Hangout on Air.
</p>
</div>
<p>
For all of these unit types, instructors make course content available to students at
scheduled intervals throughout the course. Once available, the content continues to be
available until the course ends. That is, lessons are not available for only a few days;
students can go back and redo lessons at any time throughout the course. In <a href=
"http://www.powersearchingwithgoogle.com/">Power Searching with Google</a>, soon after
online office hours took place, the course staff posted a video of it. So even if
students missed the office hours, that material was still available.
</p>
<p>
Releasing course content at scheduled intervals has one perhaps unanticipated benefit.
Many students tend to work on content relatively soon after the content becomes
available. For that reason, questions about course material tend to cluster near the
release of that material. Because other students are thinking about the same material,
they are more likely to be interested in getting involved in discussions about the
material or in answering questions.
</p>
<p>
These are only some possibilities for how to model units. You may discover other ways to
do things that suit your material better. For example, instead of all of the teaching
being pushed from the course staff, you may decide to break your students into small
cohorts and have those cohorts work on material together. You could provide them with
lessons and activities to start from and then have them use Hangouts of their own for
group study.
</p><a id="assessments_and_activities" name="assessments_and_activities"></a>
<h2>
Assessments and activities
</h2>
<p>
In Course Builder, an assessment is a test. Assessments can either be graded or ungraded.
Ungraded assessments are also called activities.
</p>
<p>
When you create your course using Course Builder, you supply the code with the
information needed to grade assessments.
</p><input class="toggle-box-small" id="question-types" type="checkbox"> <label for=
"question-types">Question types</label>
<div class="toggle-small maia-aside">
<p>
Graded and ungraded assessments essentially support the same types of questions:
</p>
<ul>
<li>Multiple-choice with one correct answer
</li>
<li>Multiple-choice with more than one correct answer
</li>
<li>Fill-in-the blank
</li>
<li>Go and do something. These are questions that do not have prepared answers and
instead invite the user to engage in some action. For example, in <a href=
"//www.powersearchingwithgoogle.com/">Power Searching with Google</a> one of the
questions was "When was the last historic earthquake in your area? Share your answer in
the forum."
</li>
</ul>
<p>
Telling the experimental code how to grade multiple-choice questions is
straightforward. Telling it how to grade fill-in-the-blank questions can be trickier.
You need to be very careful both in your wording of the question and in what you
include about the correct answer. \u201cGo and do something\u201d questions do not require an
answer, so you don\u2019t have to include anything about the answer.
</p>
</div><input class="toggle-box-small" id="ungraded-activities" type="checkbox"> <label for=
"ungraded-activities">Ungraded activities</label>
<div class="toggle-small maia-aside">
<p>
An activity typically covers material only from the lesson that the activity
immediately follows. You use them to let the students assess their own understanding of
the material in that lesson. An activity does not affect a student\u2019s final score in the
course.
</p>
<p>
When you create a question for an activity, you can provide the following information:
</p>
<ul>
<li>The correct answer to the question, so the code knows what to tell the student.
</li>
<li>A hint about why incorrect answers are incorrect. The hint should point the student
to the correct answer.
</li>
<li>The correct answer and explanatory information.
</li>
</ul>
</div><input class="toggle-box-small" id="graded-assessments" type="checkbox"> <label for=
"graded-assessments">Graded assessments</label>
<div class="toggle-small maia-aside">
<p>
Graded assessments typically cover material from several units and lessons. You use
them to rate students\u2019 performance. Before and after assessments can also help you
gauge the effectiveness of the course.
</p>
<p>
With Course Builder's experimental code, you have control over how many graded
assessments you provide and how each of those assessments counts in the final scoring
for a student\u2019s grade.
</p>
<p>
Because you use a graded assessment to rate performance and measure success, your
practical choices are:
</p>
<ul>
<li>Only let students take a graded assessment once. In this case, you can tell your
students which of their answers are incorrect.
</li>
<li>Let students take a graded assessment multiple times. In this case, do not tell
them which answers are incorrect. (If you do, then they'll have no difficulty getting
100% when retaking the same assessment.)
</li>
</ul>
<p>
If you choose to allow your students to take the same graded assessment multiple times,
consider still giving the students some feedback about what they did wrong. To do this,
map each assessment question to the corresponding unit and lesson within the course.
Then immediately after submission of the assessment, show students the score and list
the lessons to review to improve their score.
</p>
</div>
<h2>
Social interactions
</h2>
<p>
Another critical component of a successful course is student participation. Online office
hours and asking questions of the experts are some examples to elicit participation.
</p>
<p>
For large online courses, the size of the audience means that it is impractical for the
course staff to answer all of the questions and to enter all of the discussions posed by
all of the students. Instead, you can set up avenues in which the students can
participate not just with the instructor but also with other students.
</p>
<p>
The most common types of social interactions are:
</p>
<ul>
<li>
<a href="//code.google.com/p/course-builder/wiki/WebForums">Google Groups or other web
forum</a><br>
A web forum is a great way to get your students to talk to each other. To facilitate
discussion, you can set up your forum with appropriate categories, to guide students to
likely places to read and to post questions on particular topics within your course.
When designing the content of your course, consider creating activities requesting that
students post answers to the forum. You can also use a forum to post material that you
do not want in the main body of your course, either because it is background material
for students who need a bit more help or more challenging questions for more advanced
students.
</li>
<li>
<a href="//code.google.com/p/course-builder/wiki/Announcements">Google+ page or
blog</a><br>
Use Google+ or your blog to share information that you want available to not just your
students, but to other people as well. While students can comment on your posts, these
formats are still primarily methods for instructors to push information out to the
students.
</li>
<li>
<a href="//www.google.com/+/learnmore/hangouts/">Google Hangout</a><br>
You may decide that you want your students to divide into smaller groups to work on
projects together. Your students probably live in distributed areas. You can have them
meet in a Google Hangout to collaborate on their project.
</li>
<li>
<a href=
"https://code.google.com/p/course-builder/wiki/CreateEmailList">Announcements-only
email alias</a><br>
Throughout the course, you may want to send email to students, such as to remind them
of upcoming events.
</li>
</ul>
<p>
In addition to these things that you set up, students may create additional interaction
mechanisms, perhaps an email alias for students interested in a particular aspect of the
course material or weekly in-person meetings for students living close to each other.
</p><a id="administrivia" name="administrivia"></a>
<h2>
Administrative tasks
</h2>
<p>
Of course, as with any class there are various administrative aspects to creating an
online course. Two of the major ones are <a href=
"//code.google.com/p/course-builder/wiki/CreateRegistration">managing student
registration</a> and <a href=
"//code.google.com/p/course-builder/wiki/MeasureEfficacy">collecting and analyzing data
to see how well your course does</a>.
</p>
<p>
For a full list of tasks needed to create a course, see the <a href=
"//code.google.com/p/course-builder/wiki/CourseBuilderChecklist">Course Builder
Checklist</a>.
</p>
</div>
</div>
<div id="maia-signature"></div>
<div class="maia-footer" id="maia-footer">
<div id="maia-footer-global">
<div class="maia-aux">
<ul>
<li>
<a href="/">Google</a>
</li>
<li>
<a href="/intl/en/about/">About Google</a>
</li>
<li>
<a href="/intl/en/policies/">Privacy & Terms</a>
</li>
</ul>
</div>
</div>
</div><script src="//www.google.com/js/maia.js">
</script>
</body>
</html>
'''
SAMPLE_HTML_DOC_DECOMPOSE = [
'Google Open Online Education',
'<img#1 src="//www.google.com/images/logos/google_logo_41.png" alt="Google" />\n Open Online Education',
'<a#1>Skip to content</a#1>',
'<a#1>Home</a#1>',
'<a#1>Insights</a#1>',
'<a#1>Online Course\n Kit</a#1>',
'<a#1>Technologies</a#1>',
'<a#1>Google for Education</a#1>',
'<a#1>Quick Start</a#1>',
'<a#1>Plan</a#1>',
'<a#1>Create</a#1>',
'<a#1>Implement</a#1>',
'<a#1>Pilot</a#1>',
'<a#1>Communicate</a#1>',
'<a#1>Using Course\n Builder</a#1>',
'<a#1>More Resources</a#1>',
'Parts of a Course Builder Course',
'The primary parts of a course created with Course Builder are as follows:',
'<a#1>Course content and delivery</a#1><br#2 />\n The material that you formally convey to students. Formal content can be lessons\n recorded or written in advance. It can also be live question and answer sessions with\n course staff.',
'<a#1>Assessments and activities</a#1><br#2 />\n Graded assessments with a fixed deadline to track student progress. You can also use\n ungraded assessments, called <strong#3>activities</strong#3>, to provide feedback and hints\n to students.',
'<a#1>Social interactions</a#1><br#2 />\n An important component of an online course is the interactions among the students\n themselves and the interactions between students and the course staff (the instructors\n or teaching assistants).',
'<a#1>Administrative tasks</a#1><br#2 />\n Of course, there are tasks such as registering students, setting up the course,\n tracking usage, and so on.',
'A single course consists of a series of units with individual lessons and activities. The\n course can have any number of graded assessments scattered before, between, and after the\n units and lessons. It can also have one or more formally-set up avenues for social\n interaction for the students.',
'For a quick description of the flow a student typically experiences in a course, see\n <a#1>Course Flow for Students</a#1>. For a description of the design\n process we think is effective, see <a#2>Design Process</a#2>.',
'The rest of this page discusses the four main parts of a course in more detail.',
'Course content and delivery',
'To make the content more digestible, consider grouping course material into a number of\n units. Each unit contains a series of lessons and possibly activities related to a\n particular topic within the content covered by the entire course.',
'Units\n with lessons and activities',
'In the <a#1>Power Searching with Google</a#1> course, one unit\n is about interpreting search results; another is about checking the reliability of the\n content of those search results. Each of those units consists of about five lessons and\n about five activities. For these units, course staff creates and releases the lessons\n and activities ahead of time. While the material is available to students, course staff\n interacts with students through the <a#2>participant community\n mechanisms</a#2>.',
'For a unit that consists of a series of lessons and activities, we found that around\n five lessons and four activities is a good length.',
'A lesson is a coherent and relatively small chunk of information. In Power Searching\n with Google, we chose to create each lesson as one video and a text version of the same\n content. Your lessons do not have to have both parts. For more information, see\n <a#1>Create Lessons</a#1>.',
'An activity is an ungraded assessment, used to provide feedback to students on how well\n they understand the lesson. Activities typically contain optional hints. For more\n information, see <a#1>Assessments and activities</a#1>.',
'Tips:',
'Make short videos, preferably 3-5 minutes.',
'Include closed captions in your videos.',
'For the text version, take the time to clean up the transcript.',
'When deciding what content to include, design for the average student. To\n accommodate other students, consider including background or advanced material in forum\n posts (if you want discussion and maybe answers) or in Google+ or blog posts (if you\n just want to broadcast the information).',
'Units using Hangouts on Air',
'A very different type of unit is online office hours where the students submit\n questions ahead of time and the course staff answers those questions in real-time using\n a <a#1>Hangout On Air</a#1>.\n Depending on your course, you may have some students interacting with the course staff\n over video for the Hangout On Air or you may have students submit all of their\n questions using <a#2>Google Moderator</a#2>.',
'For online office hours you have a fixed date and time when the course staff broadcasts\n a session for students to watch and interact with.',
'If you have a very small course (fewer than 10 people), you can use a Google Hangout\n for your session. If you have more than 10 people, you can use a combination of Google\n Hangouts on Air and Google Moderator instead.',
u'A <a#1>Google Hangout</a#1> is a video chat\n that can have up to 10 participants. In a Google Hangout, all participants can share\n what\u2019s on each person's screen, collaborate in Google Docs, view presentations and\n diagrams together and speak to each other. If your course is small enough, this is a\n great way to go. If your course is large, you may still consider having your students\n break into small groups for interactive activities with each other over Hangouts.',
'If your course has many more than 10 students, you can use a combination of Hangouts on\n Air and Google Moderator to create a live experience with your students. With a\n <a#1>\n Google Hangout on Air</a#1>, you can post a live session with your instructors and any\n guests you chose. You post the Hangout on Air to your YouTube channel and to your\n Google+ stream. Students cannot talk to you in real-time, but they can ask you\n questions through the use of a Google Moderator series, or by posting comments to the\n Moderator stream, YouTube stream, or Google+ stream. You can use a <a#2>\n Google Moderator series</a#2> to collect questions from your students and have them vote\n those questions up and down; the collecting can either be done in advance, during the\n Hangout on Air, or both.',
'<strong#1>Tip:</strong#1> If you do a Hangout on Air, consider using a live captioning\n service to help students who are hearing impaired or whose primary language is not the\n language used in the Hangout on Air.',
'For all of these unit types, instructors make course content available to students at\n scheduled intervals throughout the course. Once available, the content continues to be\n available until the course ends. That is, lessons are not available for only a few days;\n students can go back and redo lessons at any time throughout the course. In <a#1>Power Searching with Google</a#1>, soon after\n online office hours took place, the course staff posted a video of it. So even if\n students missed the office hours, that material was still available.',
'Releasing course content at scheduled intervals has one perhaps unanticipated benefit.\n Many students tend to work on content relatively soon after the content becomes\n available. For that reason, questions about course material tend to cluster near the\n release of that material. Because other students are thinking about the same material,\n they are more likely to be interested in getting involved in discussions about the\n material or in answering questions.',
'These are only some possibilities for how to model units. You may discover other ways to\n do things that suit your material better. For example, instead of all of the teaching\n being pushed from the course staff, you may decide to break your students into small\n cohorts and have those cohorts work on material together. You could provide them with\n lessons and activities to start from and then have them use Hangouts of their own for\n group study.',
'Assessments and activities',
'In Course Builder, an assessment is a test. Assessments can either be graded or ungraded.\n Ungraded assessments are also called activities.',
'When you create your course using Course Builder, you supply the code with the\n information needed to grade assessments.',
'Question types',
'Graded and ungraded assessments essentially support the same types of questions:',
'Multiple-choice with one correct answer',
'Multiple-choice with more than one correct answer',
'Fill-in-the blank',
'Go and do something. These are questions that do not have prepared answers and\n instead invite the user to engage in some action. For example, in <a#1>Power Searching with Google</a#1> one of the\n questions was "When was the last historic earthquake in your area? Share your answer in\n the forum."',
u'Telling the experimental code how to grade multiple-choice questions is\n straightforward. Telling it how to grade fill-in-the-blank questions can be trickier.\n You need to be very careful both in your wording of the question and in what you\n include about the correct answer. \u201cGo and do something\u201d questions do not require an\n answer, so you don\u2019t have to include anything about the answer.',
'Ungraded activities',
u'An activity typically covers material only from the lesson that the activity\n immediately follows. You use them to let the students assess their own understanding of\n the material in that lesson. An activity does not affect a student\u2019s final score in the\n course.',
'When you create a question for an activity, you can provide the following information:',
'The correct answer to the question, so the code knows what to tell the student.',
'A hint about why incorrect answers are incorrect. The hint should point the student\n to the correct answer.',
'The correct answer and explanatory information.',
'Graded assessments',
u'Graded assessments typically cover material from several units and lessons. You use\n them to rate students\u2019 performance. Before and after assessments can also help you\n gauge the effectiveness of the course.',
u'With Course Builder's experimental code, you have control over how many graded\n assessments you provide and how each of those assessments counts in the final scoring\n for a student\u2019s grade.',
'Because you use a graded assessment to rate performance and measure success, your\n practical choices are:',
'Only let students take a graded assessment once. In this case, you can tell your\n students which of their answers are incorrect.',
'Let students take a graded assessment multiple times. In this case, do not tell\n them which answers are incorrect. (If you do, then they'll have no difficulty getting\n 100% when retaking the same assessment.)',
'If you choose to allow your students to take the same graded assessment multiple times,\n consider still giving the students some feedback about what they did wrong. To do this,\n map each assessment question to the corresponding unit and lesson within the course.\n Then immediately after submission of the assessment, show students the score and list\n the lessons to review to improve their score.',
'Social interactions',
'Another critical component of a successful course is student participation. Online office\n hours and asking questions of the experts are some examples to elicit participation.',
'For large online courses, the size of the audience means that it is impractical for the\n course staff to answer all of the questions and to enter all of the discussions posed by\n all of the students. Instead, you can set up avenues in which the students can\n participate not just with the instructor but also with other students.',
'The most common types of social interactions are:',
'<a#1>Google Groups or other web\n forum</a#1><br#2 />\n A web forum is a great way to get your students to talk to each other. To facilitate\n discussion, you can set up your forum with appropriate categories, to guide students to\n likely places to read and to post questions on particular topics within your course.\n When designing the content of your course, consider creating activities requesting that\n students post answers to the forum. You can also use a forum to post material that you\n do not want in the main body of your course, either because it is background material\n for students who need a bit more help or more challenging questions for more advanced\n students.',
'<a#1>Google+ page or\n blog</a#1><br#2 />\n Use Google+ or your blog to share information that you want available to not just your\n students, but to other people as well. While students can comment on your posts, these\n formats are still primarily methods for instructors to push information out to the\n students.',
'<a#1>Google Hangout</a#1><br#2 />\n You may decide that you want your students to divide into smaller groups to work on\n projects together. Your students probably live in distributed areas. You can have them\n meet in a Google Hangout to collaborate on their project.',
'<a#1>Announcements-only\n email alias</a#1><br#2 />\n Throughout the course, you may want to send email to students, such as to remind them\n of upcoming events.',
'In addition to these things that you set up, students may create additional interaction\n mechanisms, perhaps an email alias for students interested in a particular aspect of the\n course material or weekly in-person meetings for students living close to each other.',
'Administrative tasks',
'Of course, as with any class there are various administrative aspects to creating an\n online course. Two of the major ones are <a#1>managing student\n registration</a#1> and <a#2>collecting and analyzing data\n to see how well your course does</a#2>.',
'For a full list of tasks needed to create a course, see the <a#1>Course Builder\n Checklist</a#1>.',
'<a#1>Google</a#1>',
'<a#1>About Google</a#1>',
'<a#1>Privacy & Terms</a#1>'
]
# pylint: enable=line-too-long
if __name__ == '__main__':
run_all_unit_tests()
| [
[
8,
0,
0.0233,
0.0356,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0416,
0.0004,
0,
0.66,
0.0244,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0427,
0.0004,
0,
0.66,... | [
"\"\"\"HTML content transformation and manipulation functions.\n\nAbout\n\n This module performs complex HTML document transformations, which enable\n machine-assisted internationalization (I18N) of content.",
"__author__ = 'Pavel Simakov (psimakov@google.com)'",
"import difflib",
"import htmlentitydefs... |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper classes to implement caching."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import collections
import datetime
import logging
import sys
import threading
import unittest
import appengine_config
from models.counters import PerfCounter
def iter_all(query, batch_size=100):
"""Yields query results iterator. Proven method for large datasets."""
prev_cursor = None
any_records = True
while any_records:
any_records = False
query = query.with_cursor(prev_cursor)
for entity in query.run(batch_size=batch_size):
any_records = True
yield entity
prev_cursor = query.cursor()
class AbstractScopedSingleton(object):
"""A singleton object bound to and managed by a container.
This singleton stores its instance inside the container. When container is
wiped, the singleton instance is garbage collected and destroyed. You can
use a dict as a container and then wipe it yourself. You can use
threading.local as a container and it will be wiped automatically when
thread exits.
"""
CONTAINER = None
@classmethod
def _instances(cls):
assert cls.CONTAINER is not None
if 'instances' not in cls.CONTAINER:
cls.CONTAINER['instances'] = {}
return cls.CONTAINER['instances']
@classmethod
def instance(cls, *args, **kwargs):
"""Creates new or returns existing instance of the object."""
# pylint: disable=protected-access
_instance = cls._instances().get(cls)
if not _instance:
try:
_instance = cls(*args, **kwargs)
except:
logging.exception(
'Failed to instantiate %s: %s, %s', cls, args, kwargs)
raise
appengine_config.log_appstats_event('%s.create' % cls.__name__, {})
_instance._init_args = (args, kwargs)
cls._instances()[cls] = _instance
else:
_before = _instance._init_args
_now = (args, kwargs)
if _now != _before:
raise AssertionError(
'Singleton initiated with %s already exists. '
'Failed to re-initialized it with %s.' % (_before, _now))
return _instance
@classmethod
def clear_all(cls):
"""Clear all active instances."""
if cls._instances():
for _instance in list(cls._instances().values()):
_instance.clear()
del cls.CONTAINER['instances']
def clear(self):
"""Destroys this object and its content."""
appengine_config.log_appstats_event(
'%s.destroy' % self.__class__.__name__, {})
_instance = self._instances().get(self.__class__)
if _instance:
del self._instances()[self.__class__]
_process_scoped_singleton = {}
_request_scoped_singleton = threading.local()
class ProcessScopedSingleton(AbstractScopedSingleton):
"""A singleton object bound to the process."""
CONTAINER = _process_scoped_singleton
class RequestScopedSingleton(AbstractScopedSingleton):
"""A singleton object bound to the request scope."""
CONTAINER = _request_scoped_singleton.__dict__
class LRUCache(object):
"""A dict that supports capped size and LRU eviction of items."""
def __init__(
self, max_item_count=None,
max_size_bytes=None, max_item_size_bytes=None):
assert max_item_count or max_size_bytes
if max_item_count:
assert max_item_count > 0
if max_size_bytes:
assert max_size_bytes > 0
self.total_size = 0
self.max_item_count = max_item_count
self.max_size_bytes = max_size_bytes
self.max_item_size_bytes = max_item_size_bytes
self.items = collections.OrderedDict([])
def get_entry_size(self, key, value):
"""Computes item size. Override and compute properly for your items."""
return sys.getsizeof(key) + sys.getsizeof(value)
def _compute_current_size(self):
total = 0
for key, item in self.items.iteritems():
total += sys.getsizeof(key) + self.get_item_size(item)
return total
def _allocate_space(self, key, value):
"""Remove items in FIFO order until size constraints are met."""
entry_size = self.get_entry_size(key, value)
if self.max_item_size_bytes and entry_size > self.max_item_size_bytes:
return False
while True:
over_count = False
over_size = False
if self.max_item_count:
over_count = len(self.items) >= self.max_item_count
if self.max_size_bytes:
over_size = self.total_size + entry_size >= self.max_size_bytes
if not (over_count or over_size):
if self.max_size_bytes:
self.total_size += entry_size
assert self.total_size < self.max_size_bytes
return True
if self.items:
_key, _value = self.items.popitem(last=False)
if self.max_size_bytes:
self.total_size -= self.get_entry_size(_key, _value)
assert self.total_size >= 0
else:
break
return False
def _record_access(self, key):
"""Pop and re-add the item."""
item = self.items.pop(key)
self.items[key] = item
def contains(self, key):
"""Checks if item is contained without accessing it."""
assert key
return key in self.items
def put(self, key, value):
assert key
if self._allocate_space(key, value):
self.items[key] = value
return True
return False
def get(self, key):
"""Accessing item makes it less likely to be evicted."""
assert key
if key in self.items:
self._record_access(key)
return True, self.items[key]
return False, None
def delete(self, key):
assert key
if key in self.items:
del self.items[key]
return True
return False
class NoopCacheConnection(object):
"""Connection to no-op cache that provides no caching."""
def put(self, *unused_args, **unused_kwargs):
return None
def get(self, *unused_args, **unused_kwargs):
return False, None
def delete(self, *unused_args, **unused_kwargs):
return None
class AbstractCacheEntry(object):
"""Object representation while in cache."""
# we don't track deletions; deleted item will hang around this long
CACHE_ENTRY_TTL_SEC = 5 * 60
@classmethod
def internalize(cls, unused_key, *args, **kwargs):
"""Converts incoming objects into cache entry object."""
return (args, kwargs)
@classmethod
def externalize(cls, unused_key, *args, **kwargs):
"""Converts cache entry into external object."""
return (args, kwargs)
def has_expired(self):
age = (datetime.datetime.utcnow() - self.created_on).total_seconds()
return age > self.CACHE_ENTRY_TTL_SEC
def is_up_to_date(self, unused_key, unused_update):
"""Compare entry and the update object to decide if entry is fresh."""
raise NotImplementedError()
def updated_on(self):
"""Return last update time for entity."""
raise NotImplementedError()
class AbstractCacheConnection(object):
PERSISTENT_ENTITY = None
CACHE_ENTRY = None
@classmethod
def init_counters(cls):
name = cls.__name__
cls.CACHE_RESYNC = PerfCounter(
'gcb-models-%s-cache-resync' % name,
'A number of times an vfs cache was updated.')
cls.CACHE_PUT = PerfCounter(
'gcb-models-%s-cache-put' % name,
'A number of times an object was put into cache.')
cls.CACHE_GET = PerfCounter(
'gcb-models-%s-cache-get' % name,
'A number of times an object was pulled from cache.')
cls.CACHE_DELETE = PerfCounter(
'gcb-models-%s-cache-delete' % name,
'A number of times an object was deleted from cache.')
cls.CACHE_HIT = PerfCounter(
'gcb-models-%s-cache-hit' % name,
'A number of times an object was found cache.')
cls.CACHE_HIT_NONE = PerfCounter(
'gcb-models-%s-cache-hit-none' % name,
'A number of times an object was found cache, but it was None.')
cls.CACHE_MISS = PerfCounter(
'gcb-models-%s-cache-miss' % name,
'A number of times an object was not found in the cache.')
cls.CACHE_NOT_FOUND = PerfCounter(
'gcb-models-%s-cache-not-found' % name,
'A number of times an object was requested, but was not found in '
'the cache or underlying provider.')
cls.CACHE_UPDATE_COUNT = PerfCounter(
'gcb-models-%s-cache-update-count' % name,
'A number of update objects received.')
cls.CACHE_EVICT = PerfCounter(
'gcb-models-%s-cache-evict' % name,
'A number of times an object was evicted from cache because it was '
'changed.')
cls.CACHE_EXPIRE = PerfCounter(
'gcb-models-%s-cache-expire' % name,
'A number of times an object has expired from cache because it was '
'too old.')
@classmethod
def make_key_prefix(cls, ns):
return '%s:%s' % (cls.__name__, ns)
@classmethod
def make_key(cls, ns, entry_key):
return '%s:%s' % (cls.make_key_prefix(ns), entry_key)
@classmethod
def is_enabled(cls):
raise NotImplementedError()
@classmethod
def new_connection(cls, *args, **kwargs):
if not cls.is_enabled():
return NoopCacheConnection()
conn = cls(*args, **kwargs)
# pylint: disable=protected-access
conn.apply_updates(conn._get_incremental_updates())
return conn
def __init__(self, namespace):
"""Override this method and properly instantiate self.cache."""
self.namespace = namespace
self.cache = None
appengine_config.log_appstats_event(
'%s.connect' % self.__class__.__name__, {'namespace': namespace})
def apply_updates(self, updates):
"""Applies a list of global changes to the local cache."""
self.CACHE_RESYNC.inc()
for key, update in updates.iteritems():
_key = self.make_key(self.namespace, key)
found, entry = self.cache.get(_key)
if not found:
continue
if entry is None:
self.CACHE_EVICT.inc()
self.cache.delete(_key)
continue
if not entry.is_up_to_date(key, update):
self.CACHE_EVICT.inc()
self.cache.delete(_key)
continue
if entry.has_expired():
self.CACHE_EXPIRE.inc()
self.cache.delete(_key)
continue
def _get_most_recent_updated_on(self):
"""Get the most recent item cached. Datastore deletions are missed..."""
has_items = False
max_updated_on = datetime.datetime.fromtimestamp(0)
prefix = self.make_key_prefix(self.namespace)
for key, entry in self.cache.items.iteritems():
if not key.startswith(prefix):
continue
has_items = True
if not entry:
continue
updated_on = entry.updated_on()
if not updated_on: # old entities may be missing this field
updated_on = datetime.datetime.fromtimestamp(0)
if updated_on > max_updated_on:
max_updated_on = updated_on
return has_items, max_updated_on
def get_updates_when_empty(self):
"""Override this method to pre-load cache when it's completely empty."""
return {}
def _get_incremental_updates(self):
"""Gets a list of global changes older than the most recent item cached.
WARNING!!! We fetch the updates since the timestamp of the oldest item
we have cached so far. This will bring all objects that have changed or
were created since that time.
This will NOT bring the notifications about object deletions. Thus cache
will continue to serve deleted objects until they expire.
Returns:
an dict of {key: update} objects that represent recent updates
"""
has_items, updated_on = self._get_most_recent_updated_on()
if not has_items:
return self.get_updates_when_empty()
q = self.PERSISTENT_ENTITY.all()
if updated_on:
q.filter('updated_on > ', updated_on)
result = {
entity.key().name(): entity for entity in iter_all(q)}
self.CACHE_UPDATE_COUNT.inc(len(result.keys()))
return result
def put(self, key, *args):
self.CACHE_PUT.inc()
self.cache.put(
self.make_key(self.namespace, key),
self.CACHE_ENTRY.internalize(key, *args))
def get(self, key):
self.CACHE_GET.inc()
_key = self.make_key(self.namespace, key)
found, entry = self.cache.get(_key)
if not found:
self.CACHE_MISS.inc()
return False, None
if not entry:
self.CACHE_HIT_NONE.inc()
return True, None
if entry.has_expired():
self.CACHE_EXPIRE.inc()
self.cache.delete(_key)
return False, None
self.CACHE_HIT.inc()
return True, self.CACHE_ENTRY.externalize(key, entry)
def delete(self, key):
self.CACHE_DELETE.inc()
self.cache.delete(self.make_key(self.namespace, key))
class LRUCacheTests(unittest.TestCase):
def test_ordereddict_works(self):
_dict = collections.OrderedDict([])
_dict['a'] = '1'
_dict['b'] = '2'
_dict['c'] = '3'
self.assertEqual(('a', '1'), _dict.popitem(last=False))
self.assertEqual(('c', '3'), _dict.popitem(last=True))
def test_initialization(self):
with self.assertRaises(AssertionError):
LRUCache()
with self.assertRaises(AssertionError):
LRUCache(max_item_count=-1)
with self.assertRaises(AssertionError):
LRUCache(max_size_bytes=-1)
LRUCache(max_item_count=1)
LRUCache(max_size_bytes=1)
def test_evict_by_count(self):
cache = LRUCache(max_item_count=3)
self.assertTrue(cache.put('a', '1'))
self.assertTrue(cache.put('b', '2'))
self.assertTrue(cache.put('c', '3'))
self.assertTrue(cache.contains('a'))
self.assertTrue(cache.put('d', '4'))
self.assertFalse(cache.contains('a'))
self.assertEquals(cache.get('a'), (False, None))
def test_evict_by_count_lru(self):
cache = LRUCache(max_item_count=3)
self.assertTrue(cache.put('a', '1'))
self.assertTrue(cache.put('b', '2'))
self.assertTrue(cache.put('c', '3'))
self.assertEquals(cache.get('a'), (True, '1'))
self.assertTrue(cache.put('d', '4'))
self.assertTrue(cache.contains('a'))
self.assertFalse(cache.contains('b'))
def test_evict_by_size(self):
min_size = sys.getsizeof(LRUCache(max_item_count=1).items)
item_size = sys.getsizeof('a1')
cache = LRUCache(max_size_bytes=min_size + 3 * item_size)
self.assertTrue(cache.put('a', '1'))
self.assertTrue(cache.put('b', '2'))
self.assertTrue(cache.put('c', '3'))
self.assertFalse(cache.put('d', bytearray(1000)))
def test_evict_by_size_lru(self):
cache = LRUCache(max_size_bytes=5000)
self.assertTrue(cache.put('a', bytearray(4500)))
self.assertTrue(cache.put('b', '2'))
self.assertTrue(cache.put('c', '3'))
self.assertTrue(cache.contains('a'))
self.assertTrue(cache.put('d', bytearray(1000)))
self.assertFalse(cache.contains('a'))
self.assertTrue(cache.contains('b'))
def test_max_item_size(self):
cache = LRUCache(max_size_bytes=5000, max_item_size_bytes=1000)
self.assertFalse(cache.put('a', bytearray(4500)))
self.assertEquals(cache.get('a'), (False, None))
self.assertTrue(cache.put('a', bytearray(500)))
found, _ = cache.get('a')
self.assertTrue(found)
class SingletonTests(unittest.TestCase):
def test_singleton(self):
class A(RequestScopedSingleton):
def __init__(self, data):
self.data = data
class B(RequestScopedSingleton):
def __init__(self, data):
self.data = data
# TODO(psimakov): prevent direct instantiation
A('aaa')
B('bbb')
# using instance() creates and returns the same instance
RequestScopedSingleton.clear_all()
a = A.instance('bar')
b = A.instance('bar')
assert a.data == 'bar'
assert b.data == 'bar'
assert a is b
# re-initialization fails if arguments differ
RequestScopedSingleton.clear_all()
a = A.instance('dog')
try:
b = A.instance('cat')
raise Exception('Expected to fail.')
except AssertionError:
pass
# clearing one keep others
RequestScopedSingleton.clear_all()
a = A.instance('bar')
b = B.instance('cat')
a.clear()
c = B.instance('cat')
assert c is b
# clearing all clears all
RequestScopedSingleton.clear_all()
a = A.instance('bar')
b = B.instance('cat')
RequestScopedSingleton.clear_all()
c = A.instance('bar')
d = B.instance('cat')
assert a is not c
assert b is not d
def run_all_unit_tests():
"""Runs all unit tests in this module."""
suites_list = []
for test_class in [LRUCacheTests, SingletonTests]:
suite = unittest.TestLoader().loadTestsFromTestCase(test_class)
suites_list.append(suite)
unittest.TextTestRunner().run(unittest.TestSuite(suites_list))
if __name__ == '__main__':
run_all_unit_tests()
| [
[
8,
0,
0.0274,
0.0018,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0311,
0.0018,
0,
0.66,
0.0435,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0366,
0.0018,
0,
0.66,... | [
"\"\"\"Helper classes to implement caching.\"\"\"",
"__author__ = 'Pavel Simakov (psimakov@google.com)'",
"import collections",
"import datetime",
"import logging",
"import sys",
"import threading",
"import unittest",
"import appengine_config",
"from models.counters import PerfCounter",
"def ite... |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapping from schema to backend properties."""
__author__ = 'Abhinav Khandelwal (abhinavk@google.com)'
import collections
import copy
import json
class Property(object):
"""Property."""
def __init__(
self, name, label, property_type, select_data=None, description=None,
optional=False, extra_schema_dict_values=None):
if name == 'properties':
raise ValueError('Cannot name a field "properties"; this conflicts '
'with the use of "properties" in generating JSON '
'schema dictionaries.')
self._name = name
self._label = label
self._property_type = property_type
self._select_data = select_data
self._description = description
self._optional = optional
self._extra_schema_dict_values = extra_schema_dict_values or {}
def __str__(self):
return '%s#%s' % (self._name, self._property_type)
@property
def type(self):
return self._property_type
@property
def name(self):
return self._name
@property
def description(self):
return self._description
@property
def extra_schema_dict_values(self):
return self._extra_schema_dict_values
@property
def label(self):
return self._label
def set_select_data(self, select_data):
self._select_data = select_data
def get_display_dict(self):
return {
'name': self._name,
'label': self._label,
'repeated': False,
'description': self._description,
}
class Registry(object):
"""Registry is a collection of Property's."""
def __init__(self, title, description=None, extra_schema_dict_values=None):
self._title = title
self._registry = {'id': title, 'type': 'object'}
self._description = description
if description:
self._registry['description'] = description
self._extra_schema_dict_values = extra_schema_dict_values
self._properties = []
self._sub_registries = collections.OrderedDict()
@property
def title(self):
return self._title
@property
def sub_registries(self):
return self._sub_registries
def add_property(self, schema_field):
"""Add a Property to this Registry."""
self._properties.append(schema_field)
def get_property(self, property_name):
for prop in self._properties:
if prop.name == property_name:
return prop
return None
def get_sub_registry(self, sub_registry_name):
return self._sub_registries.get(sub_registry_name)
def remove_property(self, property_name):
prop = self.get_property(property_name)
if prop:
return self._properties.pop(self._properties.index(prop))
def add_sub_registry(
self, name, title=None, description=None, registry=None):
"""Add a sub registry to for this Registry."""
if not registry:
registry = Registry(title, description)
self._sub_registries[name] = registry
return registry
def has_subregistries(self):
return True if self._sub_registries else False
def get_display_dict(self):
return {
'title': self._title,
'properties': [p.get_display_dict() for p in self._properties],
'registries': [r.get_display_dict()
for r in self._sub_registries.itervalues()],
}
def clone_only_items_named(self, paths):
"""Clone only the selected items from a registry.
Args:
paths: Each item is a path into the schema, with slashes as
separators. E.g., "foo" would match things at the top level
named "foo". Similarly, 'foo/bar/baz' looks in sub-schema
"foo" for a sub-schema "bar", and within that, "baz." The
returned schema would include not just the leaf item, but
sub-registry 'foo' containing 'bar', containing 'baz'.
NOTE - Schema hierarchy components are stored separately from
properties, and so "foo" may well match _both_ a subschema
_and_ a property, if someone were unwise enough to build
a schema with overloaded names.
Also note that colons in names are not special to this function,
though they may well have special meaning to, e.g., the
course schema mapping to course.yaml dict hierarchy. Picking
out a single such field would use a name such as
"registration/course:send_welcome_notifications".
Returns:
A schema with only the named items present.
"""
# Arbitrary depth instantiate-on-reference dict constructor
treebuilder = lambda: collections.defaultdict(treebuilder)
# Build a tree of nodes from the given paths.
root = treebuilder()
for path in paths:
parts = path.split('/')
node = root
for part in parts:
node = node[part]
registry = copy.deepcopy(self)
def delete_all_but(registry, node):
# pylint: disable=protected-access
# Copy so deleting does not wreck iterator.
for prop in copy.copy(registry._properties):
if prop.name not in node:
registry._properties.remove(prop)
for name, value in registry._sub_registries.iteritems():
# If this subregistry is not named at all, remove it.
if name not in node:
del registry._sub_registries[name]
# If the paths-to-save gives sub-entries within this
# node, then proceed into the node to prune its members.
# Otherwise, do nothing, leaving the node and all its
# children in place.
elif node[name]:
delete_all_but(value, node[name])
delete_all_but(registry, root)
return registry
class SchemaField(Property):
"""SchemaField defines a simple field."""
def __init__(
self, name, label, property_type, select_data=None, description=None,
optional=False, hidden=False, editable=True, i18n=None,
extra_schema_dict_values=None, validator=None):
Property.__init__(
self, name, label, property_type, select_data=select_data,
description=description, optional=optional,
extra_schema_dict_values=extra_schema_dict_values)
self._hidden = hidden
self._editable = editable
self._validator = validator
self._i18n = i18n
@property
def hidden(self):
return self._hidden
@property
def editable(self):
return self._editable
@property
def i18n(self):
return self._i18n
def get_json_schema_dict(self):
"""Get the JSON schema for this field."""
prop = {}
prop['type'] = self._property_type
if self._optional:
prop['optional'] = self._optional
if self._description:
prop['description'] = self._description
if self._i18n:
prop['i18n'] = self._i18n
return prop
def _get_schema_dict(self, prefix_key):
"""Get Schema annotation dictionary for this field."""
if self._extra_schema_dict_values:
schema = self._extra_schema_dict_values
else:
schema = {}
schema['label'] = self._label
if self._hidden:
schema['_type'] = 'hidden'
elif not self._editable:
schema['_type'] = 'uneditable'
elif self._select_data and '_type' not in schema:
schema['_type'] = 'select'
if 'date' is self._property_type:
if 'dateFormat' not in schema:
schema['dateFormat'] = 'Y/m/d'
if 'valueFormat' not in schema:
schema['valueFormat'] = 'Y/m/d'
elif self._select_data:
choices = []
for value, label in self._select_data:
choices.append(
{'value': value, 'label': unicode(label)})
schema['choices'] = choices
if self._description:
schema['description'] = self._description
return [(prefix_key + ['_inputex'], schema)]
def validate(self, value, errors):
if self._validator:
self._validator(value, errors)
class FieldArray(SchemaField):
"""FieldArray is an array with object or simple items."""
def __init__(
self, name, label, description=None, item_type=None,
optional=False, extra_schema_dict_values=None):
super(FieldArray, self).__init__(
name, label, 'array', description=description, optional=optional,
extra_schema_dict_values=extra_schema_dict_values)
self._item_type = item_type
@property
def item_type(self):
return self._item_type
def get_json_schema_dict(self):
json_schema = super(FieldArray, self).get_json_schema_dict()
json_schema['items'] = self._item_type.get_json_schema_dict()
return json_schema
def _get_schema_dict(self, prefix_key):
dict_list = super(FieldArray, self)._get_schema_dict(prefix_key)
# pylint: disable=protected-access
dict_list += self._item_type._get_schema_dict(prefix_key + ['items'])
# pylint: enable=protected-access
return dict_list
def get_display_dict(self):
display_dict = super(FieldArray, self).get_display_dict()
display_dict['repeated'] = True
display_dict['item_type'] = self.item_type.get_display_dict()
return display_dict
class FieldRegistry(Registry):
"""FieldRegistry is an object with SchemaField properties."""
def add_sub_registry(
self, name, title=None, description=None, registry=None):
"""Add a sub registry to for this Registry."""
if not registry:
registry = FieldRegistry(title, description=description)
self._sub_registries[name] = registry
return registry
def get_json_schema_dict(self):
schema_dict = dict(self._registry)
schema_dict['properties'] = collections.OrderedDict()
for schema_field in self._properties:
schema_dict['properties'][schema_field.name] = (
schema_field.get_json_schema_dict())
for key in self._sub_registries.keys():
schema_dict['properties'][key] = (
self._sub_registries[key].get_json_schema_dict())
return schema_dict
def get_json_schema(self):
"""Get the json schema for this API."""
return json.dumps(self.get_json_schema_dict())
def _get_schema_dict(self, prefix_key):
"""Get schema dict for this API."""
title_key = list(prefix_key)
title_key.append('title')
schema_dict = [(title_key, self._title)]
if self._extra_schema_dict_values:
key = list(prefix_key)
key.append('_inputex')
schema_dict.append([key, self._extra_schema_dict_values])
base_key = list(prefix_key)
base_key.append('properties')
# pylint: disable=protected-access
for schema_field in self._properties:
key = base_key + [schema_field.name]
schema_dict += schema_field._get_schema_dict(key)
# pylint: enable=protected-access
for key in self._sub_registries.keys():
sub_registry_key_prefix = list(base_key)
sub_registry_key_prefix.append(key)
sub_registry = self._sub_registries[key]
# pylint: disable=protected-access
for entry in sub_registry._get_schema_dict(sub_registry_key_prefix):
schema_dict.append(entry)
# pylint: enable=protected-access
return schema_dict
def get_schema_dict(self):
"""Get schema dict for this API."""
return self._get_schema_dict(list())
@classmethod
def _add_entry(cls, key_part_list, value, entity):
if len(key_part_list) == 1:
entity[key_part_list[0]] = value
return
key = key_part_list.pop()
if not entity.has_key(key):
entity[key] = {}
else:
assert type(entity[key]) == type(dict())
cls._add_entry(key_part_list, value, entity[key])
@classmethod
def convert_json_to_entity(cls, json_entry, entity):
assert type(json_entry) == type(dict())
for key in json_entry.keys():
if type(json_entry[key]) == type(dict()):
cls.convert_json_to_entity(json_entry[key], entity)
else:
key_parts = key.split(':')
key_parts.reverse()
cls._add_entry(key_parts, json_entry[key], entity)
@classmethod
def _get_field_name_parts(cls, field_name):
field_name_parts = field_name.split(':')
field_name_parts.reverse()
return field_name_parts
@classmethod
def _get_field_value(cls, key_part_list, entity):
if len(key_part_list) == 1:
if type(entity) == dict and entity.has_key(key_part_list[0]):
return entity[key_part_list[0]]
return None
key = key_part_list.pop()
if entity.has_key(key):
return cls._get_field_value(key_part_list, entity[key])
return None
def convert_entity_to_json_entity(self, entity, json_entry):
for schema_field in self._properties:
field_name = schema_field.name
field_name_parts = self._get_field_name_parts(field_name)
value = self._get_field_value(field_name_parts, entity)
if type(value) != type(None):
json_entry[field_name] = value
for key in self._sub_registries.keys():
json_entry[key] = {}
self._sub_registries[key].convert_entity_to_json_entity(
entity, json_entry[key])
def validate(self, payload, errors):
for schema_field in self._properties:
field_name_parts = self._get_field_name_parts(schema_field.name)
value = self._get_field_value(field_name_parts, payload)
schema_field.validate(value, errors)
for registry in self._sub_registries.values():
registry.validate(payload, errors)
@classmethod
def is_complex_name(cls, name):
return ':' in name
@classmethod
def compute_name(cls, parent_names):
"""Computes non-indexed and indexed entity name given parent names."""
parts = []
for parent_name in parent_names:
if parent_name[0] == '[' and parent_name[-1] == ']':
parts.append('[]')
else:
parts.append(parent_name)
return ':'.join(parts), ':'.join(parent_names)
class SchemaFieldValue(object):
"""This class represents an instance of a field value."""
def __init__(self, name, field, value, setter):
"""An object that name, value and type of a field.
Args:
name: a name of the value
field: SchemaField object that holds the type
value: Python object that holds the value
setter: a function which sets the value in the underlying data
structure
"""
self._name = name
self._field = field
self._value = value
self._setter = setter
@property
def name(self):
return self._name
@property
def field(self):
return self._field
@property
def value(self):
return self._value
@value.setter
def value(self, new_value):
self._value = new_value
self._setter(new_value)
class FieldRegistryIndex(object):
"""Helper class that allows fast access to values and their fields."""
def __init__(self, registry):
self._registry = registry
self._names_in_order = []
self._complex_name_to_field = {}
self._computed_name_to_field = {}
@property
def registry(self):
return self._registry
@property
def names_in_order(self):
return self._names_in_order
def _inspect_registry(self, parent_names, registry):
"""Inspects registry and adds its items to the index."""
for field in registry._properties: # pylint: disable=protected-access
if registry.is_complex_name(field.name):
complex_name = field.name
if complex_name in self._complex_name_to_field:
raise KeyError('Field already defined: %s.' % complex_name)
if isinstance(field, FieldArray):
self._inspect_registry(
[complex_name, '[]'], field.item_type)
self._complex_name_to_field[complex_name] = field
self._names_in_order.append(complex_name)
else:
computed_name = ':'.join(parent_names + [field.name])
if computed_name in self._computed_name_to_field:
raise KeyError('Field already defined: %s.' % computed_name)
if isinstance(field, FieldArray):
self._inspect_registry(
parent_names + [field.name, '[]'], field.item_type)
self._computed_name_to_field[computed_name] = field
self._names_in_order.append(computed_name)
# pylint: disable=protected-access
for name, registry in registry._sub_registries.items():
self._inspect_registry(parent_names + [name], registry)
def rebuild(self):
"""Build an index."""
self._inspect_registry([], self._registry)
def find(self, name):
"""Finds and returns a field given field name."""
field = self._complex_name_to_field.get(name)
return field if field else self._computed_name_to_field.get(name)
class FieldFilter(object):
"""Filter for collections of schema fields."""
def __init__(
self, type_names=None, hidden_values=None, i18n_values=None,
editable_values=None):
self._type_names = type_names
self._hidden_values = hidden_values
self._i18n_values = i18n_values
self._editable_values = editable_values
def _filter(self, named_field_list):
"""Filters a list of name, SchemaField pairs."""
result = set()
for name, field in named_field_list:
if self._type_names and field.type not in self._type_names:
continue
if self._hidden_values and field.hidden not in self._hidden_values:
continue
if self._editable_values and (
field.editable not in self._editable_values):
continue
if self._i18n_values and field.i18n not in self._i18n_values:
continue
result.add(name)
return result
def filter_value_to_type_binding(self, binding):
"""Returns a set of value names that pass the criterion."""
named_field_list = [
(field_value.name, field_value.field)
for field_value in binding.value_list]
return self._filter(named_field_list)
def filter_field_registry_index(self, index):
"""Returns the field names in the schema that pass the criterion."""
named_field_list = [
(name, index.find(name)) for name in index.names_in_order]
return self._filter(named_field_list)
class ValueToTypeBinding(object):
"""This class provides mapping of entity attributes to their types."""
def __init__(self):
self.value_list = [] # a list of all encountered SchemaFieldValues
self.name_to_value = {} # field name to SchemaFieldValue mapping
self.name_to_field = {} # field name to SchemaField mapping
self.unmapped_names = set() # a set of field names where mapping failed
self.index = None # the indexed set of schema names
def find_value(self, name):
return self.name_to_value[name]
def find_field(self, name):
return self.name_to_field[name]
@classmethod
def _get_setter(cls, entity, key):
def setter(value):
entity[key] = value
return setter
@classmethod
def _visit_dict(cls, index, parent_names, entity, binding):
"""Visit dict entity."""
for _name, _value in entity.items():
cls._decompose_entity(
index, parent_names + [_name], _value, binding,
cls._get_setter(entity, _name))
@classmethod
def _visit_list(cls, index, parent_names, entity, binding, setter):
"""Visit list entity."""
name_no_index, name = index.registry.compute_name(parent_names)
_field = index.find(name_no_index)
if _field:
assert isinstance(_field, FieldArray)
assert name not in binding.name_to_field
binding.name_to_field[name] = _field
assert name not in binding.name_to_value, name
binding.name_to_value[name] = SchemaFieldValue(
name, _field, entity, setter)
for _index, _item in enumerate(entity):
_item_name = '[%s]' % _index
cls._decompose_entity(
index, parent_names + [_item_name], _item, binding,
cls._get_setter(entity, _index))
else:
assert name not in binding.unmapped_names
binding.unmapped_names.add(name)
@classmethod
def _visit_attribute(cls, index, parent_names, entity, binding, setter):
"""Visit simple attribute."""
name_no_index, name = index.registry.compute_name(parent_names)
_field = index.find(name_no_index)
if _field:
_value = SchemaFieldValue(name, _field, entity, setter)
binding.value_list.append(_value)
assert name not in binding.name_to_value, name
binding.name_to_value[name] = _value
assert name not in binding.name_to_field
binding.name_to_field[name] = _field
else:
assert name not in binding.unmapped_names, name
binding.unmapped_names.add(name)
@classmethod
def _decompose_entity(
cls, index, parent_names, entity, binding, setter):
"""Recursively decomposes entity."""
if isinstance(entity, dict):
cls._visit_dict(index, parent_names, entity, binding)
elif isinstance(entity, list):
cls._visit_list(index, parent_names, entity, binding, setter)
else:
cls._visit_attribute(index, parent_names, entity, binding, setter)
@classmethod
def bind_entity_to_schema(cls, json_dumpable_entity, registry):
"""Connects schema field type information to the entity attributes.
Args:
json_dumpable_entity: a Python dict recursively containing other
dict, list and primitive objects
registry: a FieldRegistry that holds entity type information
Returns:
an instance of ValueToTypeBinding object that maps entity attributes
to their types
"""
binding = ValueToTypeBinding()
index = FieldRegistryIndex(registry)
index.rebuild()
cls._decompose_entity(
index, [], json_dumpable_entity, binding, None)
binding.index = index
return binding
| [
[
8,
0,
0.0225,
0.0015,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0254,
0.0015,
0,
0.66,
0.0769,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0284,
0.0015,
0,
0.66,... | [
"\"\"\"Mapping from schema to backend properties.\"\"\"",
"__author__ = 'Abhinav Khandelwal (abhinavk@google.com)'",
"import collections",
"import copy",
"import json",
"class Property(object):\n \"\"\"Property.\"\"\"\n\n def __init__(\n self, name, label, property_type, select_data=None, des... |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module providing simplistic logger."""
__author__ = 'Mike Gainer (mgainer@google.com)'
import datetime
import logging
import traceback
import appengine_config
_LOG_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
_CRITICAL = 'critical'
_WARNING = 'warning'
_INFO = 'info'
class CatchAndLog(object):
"""Simplistic logger allowing WebApp handlers to note errors for consumers.
During processing of a request, there may be recoverable errors and other
noteworthy events. This logger allows components to simply note these so
that they can be reported, rather than having to report only the first
problem, or trying to encode multiple events into a single HTTP response
code.
"""
class _Catcher(object):
"""Automatically note thrown exceptions as log messages."""
def __init__(self, log, consume_exceptions, message):
self._log = log
self._consume_exceptions = consume_exceptions
self._message = message
def __enter__(self):
return self
def __exit__(self, ex_type, value, tb):
if ex_type:
frame_tuple = list(traceback.extract_tb(tb)[-1])
frame_tuple[0] = frame_tuple[0].replace(
appengine_config.CODE_ROOT, '')
exception_message = (
'%s: %s: %s' %
(self._message, ex_type.__name__, str(value)))
if not appengine_config.PRODUCTION_MODE:
exception_message += (
' at %s' % traceback.format_list([frame_tuple])[0])
self._log.critical(exception_message)
return self._consume_exceptions
def __init__(self):
self._messages = []
def consume_exceptions(self, message):
"""Convert exceptions into 'critical' log messages.
This is a convenience function for use in contexts where exceptions
may be raised, but are not fatal and should not propagate. Usage:
with log.log_and_consume_exceptions("Arming mouse trap"):
mouse_trap.set_bait('Wensleydale')
mouse_trap.set_closing_force('critical personal injury')
mouse_trap.arm()
Args:
message: Prepended to exception messages to give more context.
E.g., suppose some calling code receives an exception:
OutOfCheeseException('Can't open pantry!'). That may be true,
neither is it very helpful. If this is expressed as:
Arming mouse trap: OutOfCheeseException: Can't open pantry!
then the external caller has a somewhat better idea of why
being out of cheese is a problem.
Returns:
A context manager for use in a 'with' statement.
"""
return CatchAndLog._Catcher(
self, consume_exceptions=True, message=message)
def propagate_exceptions(self, message):
"""Log exceptions as 'critical' log messages, and propagate them.
See log_and_consume_exceptions() for usage.
Args:
message: Prepended to exception messages to give more context.
Returns:
A context manager for use in a 'with' statement.
"""
return CatchAndLog._Catcher(
self, consume_exceptions=False, message=message)
def _log(self, level, message):
self._messages.append({
'message': message,
'level': level,
'timestamp': datetime.datetime.now().strftime(_LOG_DATE_FORMAT)})
def critical(self, message):
self._log(_CRITICAL, message)
logging.critical(message)
def warning(self, message):
self._log(_WARNING, message)
logging.warning(message)
def warn(self, message):
self._log(_WARNING, message)
logging.warning(message)
def info(self, message):
self._log(_INFO, message)
logging.info(message)
def get(self):
return self._messages
| [
[
8,
0,
0.1154,
0.0077,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.1308,
0.0077,
0,
0.66,
0.1,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1462,
0.0077,
0,
0.66,
... | [
"\"\"\"Module providing simplistic logger.\"\"\"",
"__author__ = 'Mike Gainer (mgainer@google.com)'",
"import datetime",
"import logging",
"import traceback",
"import appengine_config",
"_LOG_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'",
"_CRITICAL = 'critical'",
"_WARNING = 'warning'",
"_INFO = 'info'",... |
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = ""
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&Toolbar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
| [
[
8,
0,
0.0719,
0.1375,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.15,
0.0063,
0,
0.66,
0.1667,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.1562,
0.0063,
0,
0.66,
... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2008 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi",
"import os",
"import re",
"import string",
"def escape(text,... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2008 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = getCurrentFolder(self.request.get("CurrentFolder",""))
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2008 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0132,
0.0132,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0395,
0.0132,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0526,
0.0132,
0,
... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1207,
0.1983,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2328,
0.0086,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2414,
0.0086,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2008 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2008 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0132,
0.0132,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0395,
0.0132,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0526,
0.0132,
0,
... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1207,
0.1983,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2328,
0.0086,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2414,
0.0086,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2008 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2008 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2008 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = getCurrentFolder(self.request.get("CurrentFolder",""))
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
# -*- coding: utf-8 -*-
#
# jQuery File Upload Plugin GAE Python Example 2.1.0
# https://github.com/blueimp/jQuery-File-Upload
#
# Copyright 2011, Sebastian Tschan
# https://blueimp.net
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
from __future__ import with_statement
from google.appengine.api import files, images
from google.appengine.ext import blobstore, deferred
from google.appengine.ext.webapp import blobstore_handlers
import json
import re
import urllib
import webapp2
WEBSITE = 'http://blueimp.github.io/jQuery-File-Upload/'
MIN_FILE_SIZE = 1 # bytes
MAX_FILE_SIZE = 5000000 # bytes
IMAGE_TYPES = re.compile('image/(gif|p?jpeg|(x-)?png)')
ACCEPT_FILE_TYPES = IMAGE_TYPES
THUMBNAIL_MODIFICATOR = '=s80' # max width / height
EXPIRATION_TIME = 300 # seconds
def cleanup(blob_keys):
blobstore.delete(blob_keys)
class UploadHandler(webapp2.RequestHandler):
def initialize(self, request, response):
super(UploadHandler, self).initialize(request, response)
self.response.headers['Access-Control-Allow-Origin'] = '*'
self.response.headers[
'Access-Control-Allow-Methods'
] = 'OPTIONS, HEAD, GET, POST, PUT, DELETE'
self.response.headers[
'Access-Control-Allow-Headers'
] = 'Content-Type, Content-Range, Content-Disposition'
def validate(self, file):
if file['size'] < MIN_FILE_SIZE:
file['error'] = 'File is too small'
elif file['size'] > MAX_FILE_SIZE:
file['error'] = 'File is too big'
elif not ACCEPT_FILE_TYPES.match(file['type']):
file['error'] = 'Filetype not allowed'
else:
return True
return False
def get_file_size(self, file):
file.seek(0, 2) # Seek to the end of the file
size = file.tell() # Get the position of EOF
file.seek(0) # Reset the file position to the beginning
return size
def write_blob(self, data, info):
blob = files.blobstore.create(
mime_type=info['type'],
_blobinfo_uploaded_filename=info['name']
)
with files.open(blob, 'a') as f:
f.write(data)
files.finalize(blob)
return files.blobstore.get_blob_key(blob)
def handle_upload(self):
results = []
blob_keys = []
for name, fieldStorage in self.request.POST.items():
if type(fieldStorage) is unicode:
continue
result = {}
result['name'] = re.sub(
r'^.*\\',
'',
fieldStorage.filename
)
result['type'] = fieldStorage.type
result['size'] = self.get_file_size(fieldStorage.file)
if self.validate(result):
blob_key = str(
self.write_blob(fieldStorage.value, result)
)
blob_keys.append(blob_key)
result['deleteType'] = 'DELETE'
result['deleteUrl'] = self.request.host_url +\
'/?key=' + urllib.quote(blob_key, '')
if (IMAGE_TYPES.match(result['type'])):
try:
result['url'] = images.get_serving_url(
blob_key,
secure_url=self.request.host_url.startswith(
'https'
)
)
result['thumbnailUrl'] = result['url'] +\
THUMBNAIL_MODIFICATOR
except: # Could not get an image serving url
pass
if not 'url' in result:
result['url'] = self.request.host_url +\
'/' + blob_key + '/' + urllib.quote(
result['name'].encode('utf-8'), '')
results.append(result)
deferred.defer(
cleanup,
blob_keys,
_countdown=EXPIRATION_TIME
)
return results
def options(self):
pass
def head(self):
pass
def get(self):
self.redirect(WEBSITE)
def post(self):
if (self.request.get('_method') == 'DELETE'):
return self.delete()
result = {'files': self.handle_upload()}
s = json.dumps(result, separators=(',', ':'))
redirect = self.request.get('redirect')
if redirect:
return self.redirect(str(
redirect.replace('%s', urllib.quote(s, ''), 1)
))
if 'application/json' in self.request.headers.get('Accept'):
self.response.headers['Content-Type'] = 'application/json'
self.response.write(s)
def delete(self):
blobstore.delete(self.request.get('key') or '')
class DownloadHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self, key, filename):
if not blobstore.get(key):
self.error(404)
else:
# Prevent browsers from MIME-sniffing the content-type:
self.response.headers['X-Content-Type-Options'] = 'nosniff'
# Cache for the expiration time:
self.response.headers['Cache-Control'] = 'public,max-age=%d' % EXPIRATION_TIME
# Send the file forcing a download dialog:
self.send_blob(key, save_as=filename, content_type='application/octet-stream')
app = webapp2.WSGIApplication(
[
('/', UploadHandler),
('/([^/]+)/([^/]+)', DownloadHandler)
],
debug=True
)
| [
[
1,
0,
0.0788,
0.0061,
0,
0.66,
0,
777,
0,
1,
0,
0,
777,
0,
0
],
[
1,
0,
0.0848,
0.0061,
0,
0.66,
0.0556,
279,
0,
2,
0,
0,
279,
0,
0
],
[
1,
0,
0.0909,
0.0061,
0,
... | [
"from __future__ import with_statement",
"from google.appengine.api import files, images",
"from google.appengine.ext import blobstore, deferred",
"from google.appengine.ext.webapp import blobstore_handlers",
"import json",
"import re",
"import urllib",
"import webapp2",
"WEBSITE = 'http://blueimp.g... |
#####################################################
# Python Watch and Warn Files Modification script #
#####################################################
# Vallee Cedric #
# 20/12/2011 #
# Network and Systems Security #
#####################################################
import re
import subprocess
import os
import hashlib
import smtplib
from email.MIMEText import MIMEText
#General analyser class : manage the mail sending
class Analyser:
#Constructor
#mail : where the mail will be sent
#dbpath : db location
#fileList : List of files to analyse, ids.py and ids.cfg are automatically added
def __init__(self, mail, dbpath,fileList):
self.mail = mail
self.dbpath = dbpath
self.fileList = fileList
self.fileList.append("ids.cfg")
self.fileList.append("ids.py")
#report: string which store the mail which will be sent
self.report=""
self.run()
#send the report to the self.mail
def sendMail(self):
if self.report != "":
#header
self.report="\tAutomatic report from ids.py script\n"+ self.report
print self.report
fromaddr = 'cvallee.insa@gmail.com'
toaddrs = self.mail
msg = self.report
# Credentials for connecting on the google spam account
username = 'nss.automatic.message@gmail.com'
password = 'ThisIsMyPassword'
# The actual mail send
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(username,password)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
self.report=""
else:
print "No modification"
#the main method
def Run(self):
pass
#database constructor
def create(self):
pass
#RpmAnalyser which is not yet implemented
class RpmAnalyser(Analyser):
def Run(self):
pass
def Create(self):
pass
#Analyser using the md5 hashcode for matching files
class MdAnalyser(Analyser):
def create(self):
#open the self.dbpath file in writing mode
db= open(self.dbpath,'w')
for i in self.fileList:
try:
#info=os.stat(i) brute information(same as ls)
#if i is a directory then we compute the md5 of the ls -lia i string
#if it's a file then we compute directly his md5
if os.path.isdir(i):
ls = subprocess.Popen(
["ls", i],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE
)
out , error = ls.communicate()
m=hashlib.md5(out)
hashTmp=m.hexdigest()+" "+i+"\n"
else:
md5sum = subprocess.Popen(
["md5sum", i],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE
)
hashTmp, error = md5sum.communicate()
#write it in the db file
db.write(hashTmp)
except:
self.report+= "The %s file is missing\n"%i
#Report the creation
self.report+="The database has been created"
db.close()
def run(self):
dbmodified=False
if not os.path.exists(self.dbpath):
#if the db file doesn't exist then we create it
self.create()
else:
#open, put the former md5 value in a list, and remove the '\n' char
dbfile=open(self.dbpath,'r')
reg=re.compile("(\w*)\s*([^\s]*)\n")
db=dbfile.readlines()
dbfile.close()
db = map(lambda x: x.strip(),db)
#we look for each file of the config file
for i in self.fileList:
try:
#info=os.stat(i) #brute information(same as ls)
#if i is a directory then we compute the md5 of the ls -lia i string
#if it's a file then we compute directly his md5
if os.path.isdir(i):
ls = subprocess.Popen(
["ls", i],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE
)
out , error = ls.communicate()
m=hashlib.md5(out)
hashTmp=m.hexdigest()+" "+i
else:
md5sum = subprocess.Popen(
["md5sum", i],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE
)
hashTmp, error = md5sum.communicate()
hashTmp=hashTmp[:-1]
#if the current md5 value and the former one are different then the files have been modified
#so we put it in the report and update the md5 value to the new one
if not hashTmp in db and not i == self.dbpath :
#Update of the database
for n in db:
if i in n:
db.remove(n)
db.append(hashTmp)
break
dbmodified=True
#Additionnal information for report
#We join the ls -lia of the current file or directory that have been modified
self.report+="File %s has been modificated\n"%i
out, err = subprocess.Popen(
["ls", "-lia", i],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE
).communicate()
self.report+=out+"\n"
except:
self.report+= "The %s file is missing\n"%i
if dbmodified:
#if there was a modification of one of the files then we update the db file for the next call
dbfile= open(self.dbpath,'w')
dbfile.write("\n".join(db))
self.report+="The database has been updated"
dbfile.close()
#we check if we need to send the report
self.sendMail()
def main():
try:
cfgfile=open('ids.cfg','r')
except :
#raise Exception('config file not found\nIds abort\n')
print '\tconfiguration file not found\n\tIds script aborted\n'
return
mail=""
rpm=False
dbpath=""
file=False
fileList=[]
#Processing the configuration file, and looking for the specials patterns
for line in cfgfile:
mailPat = re.compile("(email|e-mail)\s*:\s*([^\s]*)",re.IGNORECASE)
pathPat = re.compile("(db|path)\s*:\s*([^\s]*)",re.IGNORECASE)
lPat = re.compile("#.*list of files{0,1}.*#",re.IGNORECASE)
if file and line != '\n' and not line[0] == '#' :
fileList.append(line[:-1])
elif mailPat.match(line):
mail = mailPat.match(line).group(2)
elif pathPat.match(line):
dbpath=pathPat.match(line).group(2)
elif re.match("option\s*:\s*rpm",line,re.IGNORECASE):
rpm=True
elif lPat.match(line):
file=True
else:
#Comment or unknown option
pass
if rpm:
RpmAnalyser(mail,dbpath,fileList)
else:
MdAnalyser(mail,dbpath,fileList)
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0437,
0.0049,
0,
0.66,
0,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0485,
0.0049,
0,
0.66,
0.1,
394,
0,
1,
0,
0,
394,
0,
0
],
[
1,
0,
0.0534,
0.0049,
0,
0.6... | [
"import re",
"import subprocess",
"import os",
"import hashlib",
"import smtplib",
"from email.MIMEText import MIMEText",
"class Analyser:\n\t#Constructor\n\t#mail : where the mail will be sent\n\t#dbpath : db location\n\t#fileList : List of files to analyse, ids.py and ids.cfg are automatically added\n... |
# email_spam.py
#
# This file does the actual spam detection in Peter Ballard's email spam filter.
# The function check() takes in an email message (the "message" class from
# the standard Python module email), and returns a status
# ("wham", "mham", "spam" or "ham") as well as a one line description.
#
# Feel free to modify this program to your taste.
# But the intention is that you can use this program unchanged,
# and that you only need to modify email_defs.py
#
# Version 1.0, 4-Apr-2005.
#
#
#Copyright (C) 2005 Peter Ballard
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
import string
import re
import email_defs
##################################################
# Functions for getting data from another file
##################################################
# each (whitespace separated) string in the file becomes a list element,
# unless the line begins with "#" or ";"
def file_to_list(filename):
list = []
fp = open(filename, "r")
for line in fp.readlines():
words = string.split(line)
if len(words) and words[0][0]!="#" and words[0][0]!=";":
list.extend(words)
fp.close()
return list
##################################################
# Functions for separating out part of the message
##################################################
def getfield(msg, field):
if msg.has_key(field):
return string.strip(msg.get(field))
else:
return ""
def subject(msg):
return getfield(msg, "Subject")
# body includes subject,
# and bodies of all sub-messages if it's multipart
def body(msg):
# initial space + trailing CR to aid regexp matching
retstring = " " + subject(msg) + "\n"
if msg.is_multipart():
for submsg in msg.get_payload():
retstring = retstring + submsg.as_string()
else:
retstring = retstring + msg.get_payload()
return retstring
def extract_addresses(astr):
# return list of matching strings
alist = []
for obj in re.finditer("[-\w\.]+@[-\w\.]+", astr):
addr = string.lower(obj.group())
alist.append( addr)
# this little bit increases time from about 65 to 75 secs for 5500 msgs
# it's probably worth it to get more generalised addr checking
parts = string.split(addr, "@")
if len(parts)==2:
# append all domain names,
# down to domain names of two parts
# e.g. with pballard@ozemail.com.au it would add "ozemail.com.au" and "com.au"
bits = string.split(parts[1], ".")
for i in range(0, len(bits)-1):
alist.append(string.join( bits[i:], "."))
return alist
# doing domains like this instead of regexps reduces time from 88 to 61 secs for 5764 messages
def extract_urls(astr):
# return list of matching strings
alist = []
# checking for =2e doesnt cost much,
# because some spams get identified quicker (before we reach regexps)
for dot in [".", "=2e"]:
for obj in re.finditer(r"http://[-\w" + dot + "]+", astr):
addr = string.lower(obj.group())[7:] # strip 1st 7 chars i.e. "http://"
# append all domain names,
# down to domain names of one parts
# e.g. http://ozemail.com.au it would add "ozemail.com.au" and "com.au" and "au"
bits = string.split(addr, dot)
for i in range(0, len(bits)):
alist.append(string.join( bits[i:], "."))
return alist
def recipient(msg):
return extract_addresses(getfield(msg, "To")) + extract_addresses(getfield(msg, "Cc"))
def sender(msg):
if msg.has_key("From"):
# already in lower case coming out of extract_address[es]()
alist = extract_addresses(msg.get("From"))
if len(alist):
return alist
return []
# Content-Type string for all attachments, if any
def msg_attachment_types(msg):
if msg.is_multipart():
retlist = []
for submsg in msg.get_payload():
if submsg.has_key("Content-Type"):
retlist.append(submsg.get("Content-Type"))
return retlist
else:
return []
##################################################
# Functions for actual spam matching
##################################################
def file_to_phrase_list(filename):
list = []
fp = open(filename, "r")
for line in fp.readlines():
words = string.split(line)
if len(words) and words[0][0]!="#" and words[0][0]!=";":
list.append(string.strip(line))
fp.close()
return list
def really_from_me(msg):
if find_listmember_in_list(email_defs.my_addresses, sender(msg)):
xmailer = getfield(msg, "X-Mailer")
if email_defs.really_from_me(xmailer):
return email_defs.my_address
else:
return ""
def bad_attachment(msg, black_attachment_types):
for strng in msg_attachment_types(msg):
for ext in black_attachment_types:
if re.search(r'\.' + ext, strng):
return "##bad_attachment:." + ext
return 0
def non_letter():
return r"[^a-z0-9]"
def non_trickyletter():
return r"[^a-z0-9@!\|]"
def domainchars():
return r"[a-z0-9_\-]"
def nondomainchars():
return r"[^a-z0-9_\-]"
def letteralt(ch):
if ch=="a":
# need to also add the accented a
return "[a@]"
elif ch=="i" or ch=="l" or ch=="1":
# with ascii chars>127, it seems that raw strings dont work
# need to also add the accented i
# chr(195) = i with circumflex
return "[il1!\\|" + chr(int("356",8)) + "]"
if ch=="o":
return "[o0]"
else:
return "[" + ch + "]"
def simpleregexp(str):
return non_letter() + str + non_letter()
def trickyregexp(str):
outstring = non_trickyletter() + letteralt(str[0])
for i in range(1, len(str)):
outstring = outstring + non_trickyletter() + "*?" + letteralt(str[i])
return outstring + non_trickyletter()
def findword(str, wordlist):
for word in wordlist:
if re.search(simpleregexp(word), str):
return simpleregexp(word)
return 0
def findstring(str, wordlist):
for word in wordlist:
if string.count(str, word):
return word
return 0
def findregexp(str, wordlist):
for word in wordlist:
if re.search(word, str, re.MULTILINE): # makes no performance or result difference on current set
#if re.search(word, str):
return word
return 0
def find_in_list(str, list):
# using this rather list.count reduces time fro, 118 to 75 secs on 5586 messages
if str in list:
return str
return ""
def find_listmember_in_list(strlist, list):
for str in strlist:
if str in list:
return str
return ""
def bad_recipients(recipient_list):
#recipient_string = string.join(recipient_list, " ")
if not find_listmember_in_list(email_defs.my_addresses, recipient_list):
return "##recipient_not_me"
elif recipient_list.count(email_defs.my_isp_domain) >= 3:
return "##many_my_isp_recipients"
else:
return ""
def bad_subject(msg):
if subject(msg)[:2]=="=?":
return "=?"
else:
return ""
def empty_message(msg, body_mod):
if subject(msg)=="" and string.strip(body_mod)=="":
return "##empty_message"
else:
return ""
##################################################
# The main entry program
##################################################
def check(msg):
# strip any empty tags (i.e. <tag></tag>), and make lower case
#body_notags = re.sub("<(?P<tag>[a-z]+)></(?P=tag)>", "", string.lower(body(msg)))
bodylow = string.lower(body(msg))
bodyearly = bodylow[:300] # first 200 chars in body (including subject)
thissender = sender(msg)
# look for whitelisted ham (wham)
whitematch = (find_listmember_in_list(thissender, whitelist)
or really_from_me(msg))
if whitematch:
return "wham", whitematch
# looked for "marked" ham (mham)
mwhitematch = (findstring(bodyearly, whiteintrostrings)
or findregexp(bodyearly, whiteintroregexps))
if mwhitematch:
return "mham", mwhitematch
# bad_recipients gets most hits so do it first
blackmatch = (bad_recipients(recipient(msg))
or findstring(bodylow, blackstrings)
or empty_message(msg, bodylow)
or bad_attachment(msg, black_attachment_types)
or bad_subject(msg)
or findstring(extract_urls(bodylow), blackdomains)
# regexps are costliest so check them last (sped up 1099 emails from 43 to 24 secs!)
or findregexp(bodylow, blackregexps))
if blackmatch:
return "spam", blackmatch
# do this last. It looks like a good trick but only caught 3 of 1508 spams
body_notags = re.sub("<.*?>", "", bodylow)
blackmatch = findregexp(body_notags, blackregexps)
if blackmatch:
return "spam", blackmatch
else:
return " ham", ""
##################################################
# Some top level definitions
##################################################
trickywords = email_defs.trickywords
blackwords = email_defs.blackwords
blackphrases = email_defs.blackphrases
blackregexps = email_defs.blackregexps
for word in trickywords:
# this saves 1/10000, and simplifies some output, but again about 4/3 slowdown
blackregexps.append(simpleregexp(word))
blackregexps.append(trickyregexp(word))
# this catches very few extra cases (in fact 1: "meds!"), but increases time 61 -> 82 secs for 6643 msgs
#blackregexps.append(simpleregexp(word))
for word in blackwords:
blackregexps.append(simpleregexp(word))
for blackphrase in blackphrases:
list = string.split(string.strip(blackphrase))
regexp = list[0]
for i in range(1, len(list)):
#regexp = regexp + r"\s" + list[i]
regexp = regexp + non_letter() + "+?" + list[i]
blackregexps.append(regexp)
blackstrings = email_defs.blackstrings
blackdomains = file_to_list(email_defs.blackdomains)
black_attachment_types = email_defs.black_attachment_types
# words in the intro, or early in the body, which indicate wham
whiteintrowords = email_defs.whiteintrowords
whiteintroregexps = []
for word in whiteintrowords:
whiteintroregexps.append(simpleregexp(word))
# strings in the subject line which indicate mail is wham
whiteintrostrings = email_defs.whiteintrostrings
whitelist = file_to_list(email_defs.whitelist)
| [
[
1,
0,
0.0928,
0.003,
0,
0.66,
0,
890,
0,
1,
0,
0,
890,
0,
0
],
[
1,
0,
0.0958,
0.003,
0,
0.66,
0.0222,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.1018,
0.003,
0,
0.6... | [
"import string",
"import re",
"import email_defs",
"def file_to_list(filename):\n list = []\n fp = open(filename, \"r\")\n for line in fp.readlines():\n words = string.split(line)\n if len(words) and words[0][0]!=\"#\" and words[0][0]!=\";\":\n list.extend(words)\n fp.clos... |
# email_defs.py
#
# This file is the "local definitions" part of Peter Ballard's email spam filter.
# Some of this you will need to change,
# and most of this you will want to change.
#
# Note the definitions for whitelist and blackdomains are not done here,
# but are in separate files. This is because these two lists can be quite
# long, and might even be created automatically. In these files,
# each (whitespace separated) string in the file becomes a list element,
# unless the line begins with "#" or ";"
# These files are not included in the package, because obviously I don't
# want to make my whitelist public.
#
# Version 1.0, 4-Apr-2005.
#
#Copyright (C) 2005 Peter Ballard
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#######################################
# definitions for email in and out files
#######################################
SYSMAILDIR = "/home/pballard/sysmail/"
MAILNAME = SYSMAILDIR + "pballard"
srcfilename = MAILNAME + ".raw"
storefilename = MAILNAME + ".store"
spamfilename = MAILNAME + ".spam"
hamfilename = MAILNAME + ".ham"
whamfilename = MAILNAME + ".wham"
mhamfilename = MAILNAME + ".mham"
panicfilename = MAILNAME + ".panic"
test_srcfilename = MAILNAME + ".store" # i.e. read back all the stored emails
#test_storefilename = "" # never used
test_spamfilename = "/dev/null"
# note these are written, not appended. Those parameters probably belong in here.
test_hamfilename = SYSMAILDIR + "test.ham"
test_whamfilename = SYSMAILDIR + "test.wham"
test_mhamfilename = SYSMAILDIR + "test.mham"
test_panicfilename = SYSMAILDIR + "test.panic"
#######################################
# Stuff used to detect spam
#######################################
# A few notes:
#
# "string" refers to a string
# "word" refers to a string bounded by whitespace
# "regexp" refers to a Python regular expression
# "phrase" is a set of words, separated by whitespace
# "intro" refers to the subject line plus the first 300 characters
# of the message body
#
# All text is (including email addresses) is converted to lower case.
#
# Lists of email names ("my_addresses" and "whitelist")
# may contain domain names also.
my_address = "pballard@ozemail.com.au"
my_isp_domain = "ozemail.com.au"
# you can add secondary email addresses to this list of strings
my_addresses = [my_address]
# words in the intro, or early in the body, which indicate mham
# (Mine are edited out. Add your own).
whiteintrowords = [] # a list of strings
# strings in the subject line (or early in the body) which indicate mail is mham
# (Mine are edited out. Add your own).
whiteintrostrings = [] # a list of strings
MAILLISTDIR = "/home/pballard/mail/"
whitelist = MAILLISTDIR + "whitelist.txt"
blackdomains = MAILLISTDIR + "blackdomains.txt"
# making most blackwords tricky (not just the usual suspects in the 1st and last lines)
# reduced (false) hams from 100 to 72,
# and increased time from 38 to 46 seconds for python 2.4a3
# (48 to 66 for python 2.2)
# for a corpus of 4149 messages (284 whams)
trickywords = ["xanax", "viagra", "cialis", "vicodin", "valium", "penis", "pharmacy",
"incest", "explicit", "ejaculation", "porno", "erection", "erections",
"orgasm", "pussy", "slut", "slutty", "sluts",
"personals",
"lottery",
"insurance", "wholesale", "mastercard",
"medication", "meds", "adipren", "prescription",
"hydrocodone",
"spyware",
"diploma", "diplomas",
"mortgage", "refinance",
"rolex"]
# dont put mlm in tricky words, because it can be mistaken for "mim"
# which might be a valid word (e.g. "mim" is an Islamic word).
blackwords = ["elkedeseen", "mlm"]
blackphrases = ["improve your size",
"rock hard",
"3 inches",
#"big dick",
#"huge dick",
"business offer",
"business proposal",
"future mailing",
"further mailing",
"business investment",
"million dollars",
"utmost confidentiality",
"ultmost confidentiality",
"bank transfer",
"money transfer",
"account details",
"foreign accounts",
"low price",
"double your money",
"money back guarantee",
"credit card",
"email database",
"xp professional",
"traders report",
"stock dividend",
"hot stocks",
"active stock",
"stock update",
"lotto games",
"want a watch", "cheap watch",
"red light cameras", "speed cameras"]
blackregexps = [r"font-size:\s*[01][^0-9\.]", # tiny html font
r"[a-z]<!.*?>[a-z]", # html comments in middle of a word
r"http://[0-9]+\.[0-9]+\.[0-9]+\.", # numerical url
#r"^ =\?", # subject begins with "=?" # covered in function bad_subject()
r"pills[0-9]*\.gif"]
blackstrings = ["fuck",
# currently non-alphanumerics go in strings only
"auto-generated",
# check for the string because it's often in a domain name
"casino"]
# .zip is OK from whitelisted people, but some spammers send it
black_attachment_types = ["com", "cpl", "exe", "pif", "scr", "vbs", "bat", "zip"]
# a function to check the X-Mailer field (the string xmailer here)
# to see if it's really from me.
# Note, in contrast to everything else, the X-Mailer field has NOT
# been converted to lower case.
# I've put this in email_defs.py because it probably would vary wildly
# from user to user
def really_from_me(xmailer):
return xmailer[:5]=="Emacs"
| [
[
14,
0,
0.2176,
0.0059,
0,
0.66,
0,
17,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.2235,
0.0059,
0,
0.66,
0.0345,
728,
4,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.2353,
0.0059,
0,
0.... | [
"SYSMAILDIR = \"/home/pballard/sysmail/\"",
"MAILNAME = SYSMAILDIR + \"pballard\"",
"srcfilename = MAILNAME + \".raw\"",
"storefilename = MAILNAME + \".store\"",
"spamfilename = MAILNAME + \".spam\"",
"hamfilename = MAILNAME + \".ham\"",
"whamfilename = MAILNAME + \".wham\"",
"mhamfilename = MAILNAME ... |
# email_filter.py
#
# This file is the "bookkeeping" part of Peter Ballard's email spam filter.
# This is the top level program.
# It reads the user options, opens and closes the files, and sends emails
# to the appropriate files.
# However it is the function check() in the module email_spam, which this
# program calls, which does the actual checking.
#
# Feel free to modify this program to your taste.
# But the intention is that you can use this program unchanged,
# and that you only need to modify email_defs.py
#
# Version 1.01, 7-Apr-2005:
# - all messages to stdout;
# - neater way to clear srcfilename
# Version 1.0, 4-Apr-2005.
#
#
#Copyright (C) 2005 Peter Ballard
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Standard Python modules
import sys
import string
import re
import email
import email.Errors
#import mailbox
import os
# Python modules which are part of this package
import email_defs
import email_spam
##########################################
# Set up the options
##########################################
# set up the defaults (for non-test mode)
REAL = 1
srcfilename = email_defs.srcfilename
storefilename = email_defs.storefilename
spamfilename = email_defs.spamfilename
hamfilename = email_defs.hamfilename
whamfilename = email_defs.whamfilename
mhamfilename = email_defs.mhamfilename
panicfilename = email_defs.panicfilename
spammode = "ab"
hammode = "ab"
whammode = "ab"
mhammode = "ab"
panicmode = "ab"
detail = 0
summary = 1
verbose = 1
limit = 0
argv = sys.argv
if argv.count("-u"):
sys.stdout.write("python ~/software/email_filter.py")
sys.stdout.write(" [-test] \\\n")
sys.stdout.write(" [-src srcfilename] (default is " + srcfilename + ") \\\n")
sys.stdout.write(" [-[a]spam spamfilename] (default is " + spamfilename + ") \\\n")
sys.stdout.write(" [-[a]ham hamfilename] (default is " + hamfilename + ") \\\n")
sys.stdout.write(" [-[a]wham whamfilename] (default is " + whamfilename + ") \\\n")
sys.stdout.write(" [-[a]mham whamfilename] (default is " + mhamfilename + ") \\\n")
sys.stdout.write(" [-[a]panic panicfilename] (default is " + panicfilename + ") \\\n")
sys.stdout.write(" [-limit n] (default is " + str(limit) + ") \\\n")
sys.stdout.write(" [-detail n] (default is " + str(detail) + ") \\\n")
sys.stdout.write(" [-summary n] (default is " + str(summary) + ") \\\n")
sys.stdout.write(" [-verbose n] (default is " + str(verbose) + ")n")
sys.exit(0)
if argv.count("-test"):
# A different set of defaults for test mode
REAL = 0
argv.remove("-test")
srcfilename = email_defs.test_srcfilename
spamfilename = email_defs.test_spamfilename
hamfilename = email_defs.test_hamfilename
whamfilename = email_defs.test_whamfilename
mhamfilename = email_defs.test_mhamfilename
panicfilename = email_defs.test_panicfilename
spammode = "wb"
hammode = "wb"
whammode = "wb"
mhammode = "wb"
panicmode = "wb"
detail = 1
summary = 1
verbose = 1
limit = 0
options = ["srcfilename", "spamfilename", "whamfilename", "mhamfilename",
"hamfilename", "panicname",
"spammode", "whammode", "mhammode", "hammode", "panicmode",
"detail", "summary", "verbose", "limit"]
if REAL==0:
i = 1
while i<len(argv):
if i+1<len(argv) and argv[i][0]=="-" and (argv[i][1:] in options):
# evaluate - no checking here!
exec (argv[i][1:] + " = " + argv[i+1])
i += 2
else:
raise Exception, "Bad input option " + argv[i]
elif len(argv)>1:
raise Exception, "Option " + argv[1] + " only works with -test"
##########################################
# open the various files
##########################################
fp = open(srcfilename, "rb")
spamfile = open(spamfilename, spammode)
hamfile = open(hamfilename, hammode)
if whamfilename==hamfilename:
whamfile = hamfile
else:
whamfile = open(whamfilename, whammode)
if mhamfilename!=whamfilename:
mhamfile = open(mhamfilename, mhammode)
panicfile = open(panicfilename, panicmode)
##########################################
# Read the source file (fp),
# Creating a list, msgs.
# Each list item is a string which is a single email message.
##########################################
msgs = [] # each list element is an email message
thismsg = [] # each list element is a line
while 1:
line = fp.readline()
if line=="":
break
if len(line)>=5 and line[:5]=="From ":
if len(thismsg):
# this is MUCH faster than a loop of string concatenations,
# because a big (immutable) string is only built once.
msgs.append(string.join(thismsg, ""))
thismsg = [line]
else:
thismsg.append(line)
# end condition
if len(thismsg):
msgs.append(string.join(thismsg, ""))
fp.close()
# shorten the list if limit is specified.
# Perhaps this should be done dynamically while reading fp,
# So that msgs does not get ridiculously long.
if limit:
msgs = msgs[-limit:]
##########################################
# Setup up the arrays WHITEHITS and BLACKHITS
# These are only used in the detailed summary if detail==1
##########################################
WHITEHITS = {}
BLACKHITS = {}
for word in email_spam.whitelist + email_spam.whiteintroregexps + email_spam.whiteintrostrings:
WHITEHITS[word] = 0
for word in (email_spam.blackstrings + email_spam.blackregexps + email_spam.blackdomains
+ email_spam.black_attachment_types):
BLACKHITS[word] = 0
##########################################
# process each email using results from the email_spam module.
# The algorithm is:
# if (message cannot be parsed):
# it's a "panic"
# elif (sender is in whitelist):
# it's "wham"
# elif (mail contains a string indicating it's good):
# it's "mham"
# elif (mail looks like spam):
# it's "spam"
# else:
# it's "ham"
##########################################
spams = 0
hams = 0
whams = 0
mhams = 0
panics = 0
for msgstring in msgs:
try:
msg = email.message_from_string(msgstring)
except:
if verbose:
sys.stdout.write("panic\n")
panicfile.write(msgstring)
panics += 1
continue
(status, matchword) = email_spam.check(msg)
if status=="wham":
whams = whams + 1
if whamfile:
whamfile.write(msgstring)
#Do this to add a header...
#msg.add_header("X-diyfilter", "wham")
#whamfile.write(msg.as_string(1))
if not WHITEHITS.has_key(matchword):
WHITEHITS[matchword] = 1
else:
WHITEHITS[matchword] += 1
elif status=="mham":
mhams = mhams + 1
# always write to whamfile, and optionally ALSO to mhamfile
# (as a record of addresses that need to be whitelisted)
whamfile.write(msgstring)
if mhamfilename!=whamfilename:
#Do this to add a header...
#msg.add_header("X-diyfilter", "mham")
#mhamfile.write(msg.as_string(1))
mhamfile.write(msgstring)
if not WHITEHITS.has_key(matchword):
WHITEHITS[matchword] = 1
else:
WHITEHITS[matchword] += 1
elif status=="spam":
spams = spams+1
#Do this to add a header...
#msg.add_header("X-diyfilter", "spam")
#spamfile.write(msg.as_string(1))
spamfile.write(msgstring)
if not BLACKHITS.has_key(matchword):
BLACKHITS[matchword] = 1
else:
BLACKHITS[matchword] += 1
else:
#Do this to add a header...
#msg.add_header("X-diyfilter", "ham")
#hamfile.write(msg.as_string(1))
hamfile.write(msgstring)
hams = hams+1
if verbose:
# print a one-line summary per email
sys.stdout.write(status + "\t" + matchword + "\t")
sys.stdout.write(email_spam.subject(msg) + "\n")
if summary:
sys.stdout.write(`len(msgs)` + " messages: "
+ `whams` + " whams, " + `mhams` + " mhams, "
+ `hams` + " hams, " + `spams` + " spams, "
+ `panics` + " panics\n")
spamfile.close()
hamfile.close()
panicfile.close()
if whamfilename!=hamfilename:
whamfile.close()
if mhamfilename!=whamfilename:
mhamfile.close()
##########################################
# For real email reading (as opposed to testing)
# empty srcfilename into storefilename.
##########################################
if REAL:
os.system("cat " + srcfilename + " >> " + storefilename)
# reduce srcfilename to zero size
fp = open(srcfilename, "w")
fp.close()
# the old way
#os.system("/bin/rm -f " + srcfilename)
#os.system("touch " + srcfilename)
##########################################
# detailed summary, if requested
##########################################
if detail:
for marker in ["+", "-"]:
if marker=="+":
dict = WHITEHITS
else:
dict = BLACKHITS
tuples = []
for key in dict.keys():
tuples.append((dict[key], key))
tuples.sort()
tuples.reverse()
for tuple in tuples:
sys.stdout.write(marker + `tuple[0]` + "\t" + tuple[1] + "\n")
| [
[
1,
0,
0.1,
0.1,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.2,
0.1,
0,
0.66,
0.1429,
890,
0,
1,
0,
0,
890,
0,
0
],
[
1,
0,
0.3,
0.1,
0,
0.66,
0.2857,
... | [
"import sys",
"import string",
"import re",
"import email",
"import email.Errors",
"import os",
"import email_defs",
"import email_spam"
] |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import sys
sys.path.append("~/5a-bucarest-vallee-cedric/asois/project")
import orange
class Filter:
def __init__(self):
print "init filt"
self.file=""
def isSpam(mail):
pass
def userDecision():
pass
class kwordFilter(Filter):
def __init__(self):
print "init keyword filter"
self.file= "filters/contentBlock.txt"
f = open(self.file,'r')
self.list = f.readlines()
self.list = [s.replace("\n","") for s in self.list]
f.close()
def isSpam(self,mail):
for k in self.list:
if k in str(mail):
return True
return False
if __name__ == "__main__":
obj = kwordFilter()
| [
[
1,
0,
0.0811,
0.027,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
8,
0,
0.1081,
0.027,
0,
0.66,
0.2,
243,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1351,
0.027,
0,
0.66,
... | [
"import sys",
"sys.path.append(\"~/5a-bucarest-vallee-cedric/asois/project\")",
"import orange",
"class Filter:\n\tdef __init__(self):\n\t\tprint(\"init filt\")\n\t\tself.file=\"\"\n\n\tdef isSpam(mail):\n\t\tpass\n\tdef userDecision():",
"\tdef __init__(self):\n\t\tprint(\"init filt\")\n\t\tself.file=\"\""... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1176,
0.1933,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2269,
0.0084,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2353,
0.0084,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0127,
0.0127,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.038,
0.0127,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0506,
0.0127,
0,
0... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1176,
0.1933,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2269,
0.0084,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2353,
0.0084,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0127,
0.0127,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.038,
0.0127,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0506,
0.0127,
0,
0... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = ""
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&Toolbar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
| [
[
8,
0,
0.0719,
0.1375,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.15,
0.0063,
0,
0.66,
0.1667,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.1562,
0.0063,
0,
0.66,
... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi",
"import os",
"import re",
"import string",
"def escape(text,... |
#-------------------------------------------------------------------------------
# Name: analyze_graph
# Purpose:
#
# Author: Azfar Khandoker
#
# Created: 10/04/2013
# Copyright: (c) Azfar Khandoker 2013
# Licence: <your licence>
#-------------------------------------------------------------------------------
from igraph import *
#this should be *JUST* the filename, not the path to it
#for example, input would be: "apple.orange.gml"
filename = raw_input("filename: ")
#the graphs should be located in the graphs/
#directory, which is one level above this
#working directory
file_to_read = "../graphs/" + filename
print ("reading " + file_to_read)
g = Graph.Read_GML(file_to_read)
summary(g)
print("\naverage path length = " + str(g.average_path_length()))
#this should gives us an array of
#3 elements with the 0th element being
#the seed word, the 1th element being the
#target word and the 2th element being "gml"
result = filename.split('.')
seedWord = result[0]
targetWord = result[1]
print ("seed word = \"" + seedWord + "\"")
print ("target word = \"" + targetWord + "\"")
a = g.vs.select(label_eq=seedWord)[0]
b = g.vs.select(label_eq=targetWord)[0]
path = g.get_all_shortest_paths(a,to=b)[0]
for i in path:
print "%d: %s" % (i, g.vs[i]["label"])
print "Length: %d" % len(path)
#plot(g, "output.png", margin = 50)
| [
[
1,
0,
0.2264,
0.0189,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
],
[
14,
0,
0.3019,
0.0189,
0,
0.66,
0.0625,
275,
3,
1,
0,
0,
821,
10,
1
],
[
14,
0,
0.3962,
0.0189,
0,
... | [
"from igraph import *",
"filename = raw_input(\"filename: \")",
"file_to_read = \"../graphs/\" + filename",
"print (\"reading \" + file_to_read)",
"g = Graph.Read_GML(file_to_read)",
"summary(g)",
"print(\"\\naverage path length = \" + str(g.average_path_length()))",
"result = filename.split('.')",
... |
#-------------------------------------------------------------------------------
# Name: amazon_generator
# Purpose:
#
# Author: Azfar Khandoker
#
# Created: 17/04/2013
# Copyright: (c) Azfar Khandoker 2013
# Licence: <your licence>
#-------------------------------------------------------------------------------
from igraph import *
import re
seeds = [
'las vegas',
'the beatles',
'oreo',
'zebra',
'sony'
]
k = 4
#read the GML data into memory so we do not
#need to keep the GML file open
f = open('amazon.gml', 'r')
g = Graph.Read_GML(f)
f.close()
for seed in seeds:
count = 0
#this regular expression allows only the whole word of the
#seed to be matched
#in other words, if there is a letter before or after the
#seed word, we do not count it as the seed word
#therefore with a seed of 'car', 'card' nor 'scar' will get
#matched and will be ignored
regex = re.compile('.*([^a-z]|\A)' + seed + '([^a-z]|$).*')
for v in g.vs:
#see if the label of the vertex matches the regex
#we first transform the label to lower case to avoid
#the case-sensitivity issue
if regex.match(v['label'].lower()) != None:
#returns a subgraph rooted at the matched vertex
#containing all verticies at most k hops away from it
#this is called the neighborhood of a vertex in a graph
neighborhood = g.neighborhood(int(v['id']), k)
#create a subgraph of the original graph containing
#all the verticies that are in the neighborhood of the
#matched vertex
subgraph = g.subgraph(neighborhood)
#many verticies may match the regular expression,
#but not all of them will generate interesting neighborhoods
#therefore, we only consider those verticies whose
#neighborhoods will generate subgraphs of the original
#graph that have diameter at least k
#since multiple of these such graphs are possible,
#we use a counter to allow for unique filenames when outputting
if subgraph.diameter() >= k:
f = open('output/' + seed + '_' + str(count) + '.gml', 'w')
subgraph.write_gml(f)
f.close()
count += 1
if count == 0:
print(seed + 'not found in graph')
| [
[
1,
0,
0.1644,
0.0137,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
],
[
1,
0,
0.1781,
0.0137,
0,
0.66,
0.1429,
540,
0,
1,
0,
0,
540,
0,
0
],
[
14,
0,
0.2466,
0.0959,
0,
... | [
"from igraph import *",
"import re",
"seeds = [\n 'las vegas',\n 'the beatles',\n 'oreo',\n 'zebra',\n 'sony'\n]",
"k = 4",
"f = open('amazon.gml', 'r')",
"g = Graph.Read_GML(f)",
"f.close()",
"for seed in seeds:\n count = 0\n\n #this regular expression allows only the whole word ... |
#original Amazon data from
#http://snap.stanford.edu/data/bigdata/amazon/amazon-meta.txt.gz
f = open("amazon-meta.txt", "r")
#the ASIN id
count = 0
#dictionary associates an ASIN with
#its title and similar products
d = dict()
#list to keep the ASINs ordered according to
#order in which they were encountered
l = []
for line in f:
#seen line starting with 'ASIN'
#check next line for 'title'
if line.startswith("ASIN"):
ASIN = line[6:-1]
continue
#seen line starting with 'title'
#check next line for 'similar'
elif line.startswith(" title"):
title = line[9:-1]
continue
#once we have seen a line beginning with
#'similar', we must have seen the proper
#ASIN and title of the corresponding product
#therefore, we do execute the remainder of the loop
elif line.startswith(" similar"):
similar = line[12:-1].split(' ')[1:]
#ignore any other lines, read next line
else:
continue
l.append(ASIN)
#add ASIN entry to dictionary with its information
d[ASIN] = [count, title, similar]
count += 1
f.close()
f1 = open("edge-list.txt", "w")
f2 = open("vertex-labels.txt", "w")
for asin in l:
#get the ID for this ASIN
asinID = str(d[asin][0])
#write the edge list for this data in igraph-friendly format
#only add an entry to the edge list if the source and target
#of the edge is within this data set
for similar in d[asin][2]:
if similar in d:
f1.write(asinID + ' ' + str(d[similar][0]) + '\n')
#this file will associate each vertex ID with its 'label'
#attribute
f2.write(asinID + ' ' + str(d[asin][1]) + '\n')
f1.close()
f2.close()
| [
[
14,
0,
0.0435,
0.0145,
0,
0.66,
0,
899,
3,
2,
0,
0,
693,
10,
1
],
[
14,
0,
0.087,
0.0145,
0,
0.66,
0.1,
778,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.1449,
0.0145,
0,
0.... | [
"f = open(\"amazon-meta.txt\", \"r\")",
"count = 0",
"d = dict()",
"l = []",
"for line in f:\n #seen line starting with 'ASIN'\n #check next line for 'title'\n if line.startswith(\"ASIN\"):\n ASIN = line[6:-1]\n continue\n\n #seen line starting with 'title'",
" if line.startsw... |
#!/bin/usr/python
from igraph import *
print "Filename: "
file = raw_input()
g = Graph.Read_GML(file)
print g.summary()
n = g.degree_distribution().n
f = open(file + "_deg_dist.csv", 'w')
for left, right, count in g.degree_distribution().bins():
print >>f, "%d,%f" % (left, float(float(count)/n))
f.close()
| [
[
1,
0,
0.5,
0.5,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
]
] | [
"from igraph import *"
] |
from igraph import *
num_gs_only = 0; # gs = Google Sets
num_shared = 0;
num_not_gs = 0;
g_name = raw_input("Google sets graph: ")
g = Graph.Read_GML(g_name);
h_name = raw_input("Amazon graph: ")
h = Graph.Read_GML(h_name);
for v in g.vs:
query = v["label"]
query = query.title().replace(" ", "_")
set = h.vs.select(label=query)
if len(set) == 0:
num_gs_only += 1
else:
num_shared += 1
num_not_gs = len(h.vs) - num_shared
print "Num gs only: " + str(num_gs_only)
print "Num shared: " + str(num_shared)
print "Num not gs: " + str(num_not_gs)
p1 = float(num_shared) / float(len(h.vs))
print "Fraction shared: " + str(p1)
| [
[
1,
0,
0.0294,
0.0294,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
],
[
14,
0,
0.0882,
0.0294,
0,
0.66,
0.0714,
866,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.1176,
0.0294,
0,
... | [
"from igraph import *",
"num_gs_only = 0; # gs = Google Sets",
"num_shared = 0;",
"num_not_gs = 0;",
"g_name = raw_input(\"Google sets graph: \")",
"g = Graph.Read_GML(g_name);",
"h_name = raw_input(\"Amazon graph: \")",
"h = Graph.Read_GML(h_name);",
"for v in g.vs:\n query = v[\"labe... |
#-------------------------------------------------------------------------------
# Name: wiki_generator
# Purpose: Generates subgraphs with k = 4 for the given seed words
#
# Author: Ryan Miller
#
# Created: 17/04/2013
# Copyright: (c) Not Azfar Khandoker 2013
# Licence: <your licence>
#-------------------------------------------------------------------------------
from igraph import *
import re
seeds = [
'Las_Vegas',
'The_Beatles',
'Oreo',
'Zebra',
'Sony'
]
k = 2
print "Reading edge list"
#read the GML data into memory
g = Graph.Read_Edgelist("/homes/millerrv/scratch/links_el_final.txt")
print "Done"
print "Combining data with titles"
#combine data with "title" attribute
titles = []
f = open("/homes/millerrv/scratch/titles-sorted.txt", 'r')
for line in f:
titles.append(line.rstrip())
g.vs["title"] = titles
print "Done"
for seed in seeds:
print "Making subgraph for seed: " + seed
count = 0
vertex = g.vs.select(title=seed)[0]
neighborhood = g.neighborhood(vertex, k, "out")
subgraph = g.subgraph(neighborhood)
new_file = open("/homes/millerrv/scratch/" + seed + "_" + str(count) + ".graphml",
'w')
subgraph.write_graphml(new_file)
new_file.close() | [
[
1,
0,
0.2264,
0.0189,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
],
[
1,
0,
0.2453,
0.0189,
0,
0.66,
0.0769,
540,
0,
1,
0,
0,
540,
0,
0
],
[
14,
0,
0.3396,
0.1321,
0,
... | [
"from igraph import *",
"import re",
"seeds = [\n 'Las_Vegas',\n 'The_Beatles',\n 'Oreo',\n 'Zebra',\n 'Sony'\n]",
"k = 2",
"print(\"Reading edge list\")",
"g = Graph.Read_Edgelist(\"/homes/millerrv/scratch/links_el_final.txt\")",
"print(\"Done\")",
"print(\"Combining data with titles\"... |
from igraph import *
import matplotlib.pyplot as plt
plt.figure()
g = Graph.Read_GML(raw_input())
dd = g.degree_distribution()
xs, ys = zip(*[(left, count / float(dd.n)) for left, _, count in dd.bins()])
plt.xscale('log')
plt.yscale('log')
plt.title(r"$\mathrm{"+name+"\ Degree\ Distribution}$")
plt.xlabel(r"$\mathrm{Degree}$")
plt.ylabel(r"$\mathrm{Probability}$")
plt.plot(xs, ys)
plt.savefig("plot.png")
| [
[
1,
0,
0.0476,
0.0476,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
],
[
1,
0,
0.0952,
0.0476,
0,
0.66,
0.0833,
596,
0,
1,
0,
0,
596,
0,
0
],
[
8,
0,
0.1905,
0.0476,
0,
... | [
"from igraph import *",
"import matplotlib.pyplot as plt",
"plt.figure()",
"g = Graph.Read_GML(raw_input())",
"dd = g.degree_distribution()",
"xs, ys = zip(*[(left, count / float(dd.n)) for left, _, count in dd.bins()])",
"plt.xscale('log')",
"plt.yscale('log')",
"plt.title(r\"$\\mathrm{\"+name+\"\\... |
def cosine_similarity(g,i,j):
adj = g.get_adjacency()
n = 0.0
for k in range(adj.shape[0]):
n = n + (adj[i,k] * adj[k,j])
sim = n / math.sqrt(g.vs[i].degree() * g.vs[j].degree())
return sim
# warning: takes a long time
def all_cosine_similarity(g):
list = []
for i in range(len(g.vs)):
for j in range(len(g.vs)):
list.append([i,j,cosine_similarity(g,i,j)])
return list
| [
[
2,
0,
0.2667,
0.4667,
0,
0.66,
0,
567,
0,
3,
1,
0,
0,
0,
5
],
[
14,
1,
0.1333,
0.0667,
1,
0.06,
0,
761,
3,
0,
0,
0,
985,
10,
1
],
[
14,
1,
0.2,
0.0667,
1,
0.06,
... | [
"def cosine_similarity(g,i,j):\n\tadj = g.get_adjacency()\n\tn = 0.0\n\tfor k in range(adj.shape[0]):\n\t\tn = n + (adj[i,k] * adj[k,j])\n\tsim = n / math.sqrt(g.vs[i].degree() * g.vs[j].degree())\n\treturn sim",
"\tadj = g.get_adjacency()",
"\tn = 0.0",
"\tfor k in range(adj.shape[0]):\n\t\tn = n + (adj[i,k]... |
#-------------------------------------------------------------------------------
# Name: edge_list_to_gml
# Purpose:
#
# Author: Azfar Khandoker
#
# Created: 17/04/2013
# Copyright: (c) Azfar Khandoker 2013
# Licence: <your licence>
#-------------------------------------------------------------------------------
from igraph import *
#read in the edge list
#have to use explicit file handlers to ensure
#data is read properly...otherwise igraph closes
#the file and flushes it when it pleases...messing
#up other scripts in the pipeline
f = open('edge-list.txt', 'r')
g = Graph.Read_Edgelist(f)
#no more need to read from file
f.close()
f = open("vertex-labels.txt", 'r')
#delete any double quotes that appear
#in the labels of the verticies
#double quotes mess things up
labels = []
for line in f:
labels.append(line[:-1].split(' ', 1)[1].replace('"', ''))
f.close()
g.vs["label"] = labels
#transform the graph from edge list format to
#GML format with labels, again using explicit file handles
f = open('amazon.gml', 'w')
g.write_gml(f)
f.close() | [
[
1,
0,
0.2667,
0.0222,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
],
[
14,
0,
0.4222,
0.0222,
0,
0.66,
0.0909,
899,
3,
2,
0,
0,
693,
10,
1
],
[
14,
0,
0.4667,
0.0222,
0,
... | [
"from igraph import *",
"f = open('edge-list.txt', 'r')",
"g = Graph.Read_Edgelist(f)",
"f.close()",
"f = open(\"vertex-labels.txt\", 'r')",
"labels = []",
"for line in f:\n labels.append(line[:-1].split(' ', 1)[1].replace('\"', ''))",
" labels.append(line[:-1].split(' ', 1)[1].replace('\"', '')... |
#!/bin/usr/python
from igraph import *
print "Filename: "
file = raw_input()
g = Graph.Read_GML(file)
print g.summary()
n = g.degree_distribution().n
f = open(file + "_deg_dist.csv", 'w')
for left, right, count in g.degree_distribution().bins():
print >>f, "%d,%f" % (left, float(float(count)/n))
f.close()
| [
[
1,
0,
0.5,
0.5,
0,
0.66,
0,
107,
0,
1,
0,
0,
107,
0,
0
]
] | [
"from igraph import *"
] |
#!/usr/bin/python
import sys
W = {}
W['A'] = []
W['W'] = []
W['F'] = W['W']
for line in sys.stdin:
i = line.find ('#')
if i >= 0:
line = line[:i]
line = line.strip ()
if not len (line):
continue
fields = [x.strip () for x in line.split (';')]
chars = fields[0]
width = fields[1]
if width not in ['A', 'W', 'F']:
continue
if chars.find ('..') > 0:
(start,end) = chars.split ('..')
else:
start = chars
end = chars
start, end = int(start,16), int(end,16)
for i in range (start, end+1):
W[width].append (i)
def write_intervals (S):
S.sort ()
start = S[0];
end = start - 1
for c in S:
if c == end+1:
end += 1
continue
else:
print "{0x%04X, 0x%04X}, " % (start, end)
start = c
end = start
print "{0x%04X, 0x%04X} " % (start, end)
print "table for g_unichar_iswide():"
print
write_intervals (W['W'])
print
print "table for g_unichar_iswide_cjk():"
print
write_intervals (W['A'])
| [
[
1,
0,
0.0556,
0.0185,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
14,
0,
0.0926,
0.0185,
0,
0.66,
0.1111,
94,
0,
0,
0,
0,
0,
6,
0
],
[
14,
0,
0.1111,
0.0185,
0,
0... | [
"import sys",
"W = {}",
"W['A'] = []",
"W['W'] = []",
"W['F'] = W['W']",
"for line in sys.stdin:\n\ti = line.find ('#')\n\tif i >= 0:\n\t\tline = line[:i]\n\tline = line.strip ()\n\tif not len (line):\n\t\tcontinue",
"\ti = line.find ('#')",
"\tif i >= 0:\n\t\tline = line[:i]",
"\t\tline = line[:i]"... |
import sys
import gdb
# Update module path.
dir_ = '/usr/local/share/glib-2.0/gdb'
if not dir_ in sys.path:
sys.path.insert(0, dir_)
from glib import register
register (gdb.current_objfile ())
| [
[
1,
0,
0.1,
0.1,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.2,
0.1,
0,
0.66,
0.2,
430,
0,
1,
0,
0,
430,
0,
0
],
[
14,
0,
0.5,
0.1,
0,
0.66,
0.4,
7... | [
"import sys",
"import gdb",
"dir_ = '/usr/local/share/glib-2.0/gdb'",
"if not dir_ in sys.path:\n sys.path.insert(0, dir_)",
" sys.path.insert(0, dir_)",
"from glib import register",
"register (gdb.current_objfile ())"
] |
import gdb
# This is not quite right, as local vars may override symname
def read_global_var (symname):
return gdb.selected_frame().read_var(symname)
def g_quark_to_string (quark):
if quark == None:
return None
quark = long(quark)
if quark == 0:
return None
val = read_global_var ("g_quarks")
max_q = long(read_global_var ("g_quark_seq_id"))
if quark < max_q:
return val[quark].string()
return None
# We override the node printers too, so that node->next is not expanded
class GListNodePrinter:
"Prints a GList node"
def __init__ (self, val):
self.val = val
def to_string (self):
return "{data=%s, next=0x%x, prev=0x%x}" % (str(self.val["data"]), long(self.val["next"]), long(self.val["prev"]))
class GSListNodePrinter:
"Prints a GSList node"
def __init__ (self, val):
self.val = val
def to_string (self):
return "{data=%s, next=0x%x}" % (str(self.val["data"]), long(self.val["next"]))
class GListPrinter:
"Prints a GList"
class _iterator:
def __init__(self, head, listtype):
self.link = head
self.listtype = listtype
self.count = 0
def __iter__(self):
return self
def next(self):
if self.link == 0:
raise StopIteration
data = self.link['data']
self.link = self.link['next']
count = self.count
self.count = self.count + 1
return ('[%d]' % count, data)
def __init__ (self, val, listtype):
self.val = val
self.listtype = listtype
def children(self):
return self._iterator(self.val, self.listtype)
def to_string (self):
return "0x%x" % (long(self.val))
def display_hint (self):
return "array"
class GHashPrinter:
"Prints a GHashTable"
class _iterator:
def __init__(self, ht, keys_are_strings):
self.ht = ht
if ht != 0:
self.array = ht["nodes"]
self.size = ht["size"]
self.pos = 0
self.keys_are_strings = keys_are_strings
self.value = None
def __iter__(self):
return self
def next(self):
if self.ht == 0:
raise StopIteration
if self.value != None:
v = self.value
self.value = None
return v
while long(self.pos) < long(self.size):
node = self.array[self.pos]
self.pos = self.pos + 1
if long (node["key_hash"]) >= 2:
key = node["key"]
val = node["value"]
if self.keys_are_strings:
key = key.cast (gdb.lookup_type("char").pointer())
# Queue value for next result
self.value = ('[%dv]'% (self.pos), val)
# Return key
return ('[%dk]'% (self.pos), key)
raise StopIteration
def __init__ (self, val):
self.val = val
self.keys_are_strings = False
try:
string_hash = read_global_var ("g_str_hash")
except:
string_hash = None
if self.val != 0 and string_hash != None and self.val["hash_func"] == string_hash:
self.keys_are_strings = True
def children(self):
return self._iterator(self.val, self.keys_are_strings)
def to_string (self):
return "0x%x" % (long(self.val))
def display_hint (self):
return "map"
def pretty_printer_lookup (val):
if is_g_type_instance (val):
return GTypePrettyPrinter (val)
def pretty_printer_lookup (val):
# None yet, want things like hash table and list
type = val.type.unqualified()
# If it points to a reference, get the reference.
if type.code == gdb.TYPE_CODE_REF:
type = type.target ()
if type.code == gdb.TYPE_CODE_PTR:
type = type.target().unqualified()
t = str(type)
if t == "GList":
return GListPrinter(val, "GList")
if t == "GSList":
return GListPrinter(val, "GSList")
if t == "GHashTable":
return GHashPrinter(val)
else:
t = str(type)
if t == "GList":
return GListNodePrinter(val)
if t == "GSList *":
return GListPrinter(val, "GSList")
return None
def register (obj):
if obj == None:
obj = gdb
obj.pretty_printers.append(pretty_printer_lookup)
class ForeachCommand (gdb.Command):
"""Foreach on list"""
def __init__ (self):
super (ForeachCommand, self).__init__ ("gforeach",
gdb.COMMAND_DATA,
gdb.COMPLETE_SYMBOL)
def valid_name (self, name):
if not name[0].isalpha():
return False
return True
def parse_args (self, arg):
i = arg.find(" ")
if i <= 0:
raise Exception ("No var specified")
var = arg[:i]
if not self.valid_name(var):
raise Exception ("Invalid variable name")
while i < len (arg) and arg[i].isspace():
i = i + 1
if arg[i:i+2] != "in":
raise Exception ("Invalid syntax, missing in")
i = i + 2
while i < len (arg) and arg[i].isspace():
i = i + 1
colon = arg.find (":", i)
if colon == -1:
raise Exception ("Invalid syntax, missing colon")
val = arg[i:colon]
colon = colon + 1
while colon < len (arg) and arg[colon].isspace():
colon = colon + 1
command = arg[colon:]
return (var, val, command)
def do_iter(self, arg, item, command):
item = item.cast (gdb.lookup_type("void").pointer())
item = long(item)
to_eval = "set $%s = (void *)0x%x\n"%(arg, item)
gdb.execute(to_eval)
gdb.execute(command)
def slist_iterator (self, arg, container, command):
l = container.cast (gdb.lookup_type("GSList").pointer())
while long(l) != 0:
self.do_iter (arg, l["data"], command)
l = l["next"]
def list_iterator (self, arg, container, command):
l = container.cast (gdb.lookup_type("GList").pointer())
while long(l) != 0:
self.do_iter (arg, l["data"], command)
l = l["next"]
def pick_iterator (self, container):
t = container.type.unqualified()
if t.code == gdb.TYPE_CODE_PTR:
t = t.target().unqualified()
t = str(t)
if t == "GSList":
return self.slist_iterator
if t == "GList":
return self.list_iterator
raise Exception("Invalid container type %s"%(str(container.type)))
def invoke (self, arg, from_tty):
(var, container, command) = self.parse_args(arg)
container = gdb.parse_and_eval (container)
func = self.pick_iterator(container)
func(var, container, command)
ForeachCommand ()
| [
[
1,
0,
0.004,
0.004,
0,
0.66,
0,
430,
0,
1,
0,
0,
430,
0,
0
],
[
2,
0,
0.0181,
0.008,
0,
0.66,
0.0909,
512,
0,
1,
1,
0,
0,
0,
2
],
[
13,
1,
0.0201,
0.004,
1,
0.92,... | [
"import gdb",
"def read_global_var (symname):\n return gdb.selected_frame().read_var(symname)",
" return gdb.selected_frame().read_var(symname)",
"def g_quark_to_string (quark):\n if quark == None:\n return None\n quark = long(quark)\n if quark == 0:\n return None\n val = read_... |
import sys
import gdb
# Update module path.
dir_ = '/usr/local/share/glib-2.0/gdb'
if not dir_ in sys.path:
sys.path.insert(0, dir_)
from gobject import register
register (gdb.current_objfile ())
| [
[
1,
0,
0.1,
0.1,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.2,
0.1,
0,
0.66,
0.2,
430,
0,
1,
0,
0,
430,
0,
0
],
[
14,
0,
0.5,
0.1,
0,
0.66,
0.4,
7... | [
"import sys",
"import gdb",
"dir_ = '/usr/local/share/glib-2.0/gdb'",
"if not dir_ in sys.path:\n sys.path.insert(0, dir_)",
" sys.path.insert(0, dir_)",
"from gobject import register",
"register (gdb.current_objfile ())"
] |
import gdb
import glib
import gdb.backtrace
import gdb.command.backtrace
# This is not quite right, as local vars may override symname
def read_global_var (symname):
return gdb.selected_frame().read_var(symname)
def g_type_to_name (gtype):
def lookup_fundamental_type (typenode):
if typenode == 0:
return None
val = read_global_var ("static_fundamental_type_nodes")
if val == None:
return None
return val[typenode >> 2].address()
gtype = long(gtype)
typenode = gtype - gtype % 4
if typenode > (255 << 2):
typenode = gdb.Value(typenode).cast (gdb.lookup_type("TypeNode").pointer())
else:
typenode = lookup_fundamental_type (typenode)
if typenode != None:
return glib.g_quark_to_string (typenode["qname"])
return None
def is_g_type_instance (val):
def is_g_type_instance_helper (type):
if str(type) == "GTypeInstance":
return True
while type.code == gdb.TYPE_CODE_TYPEDEF:
type = type.target()
if type.code != gdb.TYPE_CODE_STRUCT:
return False
fields = type.fields()
if len (fields) < 1:
return False
first_field = fields[0]
return is_g_type_instance_helper(first_field.type)
type = val.type
if type.code != gdb.TYPE_CODE_PTR:
return False
type = type.target()
return is_g_type_instance_helper (type)
def g_type_name_from_instance (instance):
if long(instance) != 0:
try:
inst = instance.cast (gdb.lookup_type("GTypeInstance").pointer())
klass = inst["g_class"]
gtype = klass["g_type"]
name = g_type_to_name (gtype)
return name
except RuntimeError:
pass
return None
class GTypePrettyPrinter:
"Prints a GType instance pointer"
def __init__ (self, val):
self.val = val
def to_string (self):
name = g_type_name_from_instance (self.val)
if name:
return ("0x%x [%s]")% (long(self.val), name)
return ("0x%x") % (long(self.val))
def pretty_printer_lookup (val):
if is_g_type_instance (val):
return GTypePrettyPrinter (val)
return None
def get_signal_name (id):
if id == None:
return None
id = long(id)
if id == 0:
return None
val = read_global_var ("g_signal_nodes")
max_s = read_global_var ("g_n_signal_nodes")
max_s = long(max_s)
if id < max_s:
return val[id]["name"].string()
return None
class GFrameWrapper:
def __init__ (self, frame):
self.frame = frame;
def name (self):
name = self.frame.name()
if name and name.startswith("IA__"):
return name[4:]
return name
def __getattr__ (self, name):
return getattr (self.frame, name)
# Monkey patch FrameWrapper to avoid IA__ in symbol names
old__init__ = gdb.command.backtrace.FrameWrapper.__init__
def monkey_patched_init(self, frame):
name = frame.name()
if name and name.startswith("IA__"):
frame = GFrameWrapper(frame)
old__init__(self,frame)
gdb.command.backtrace.FrameWrapper.__init__ = monkey_patched_init
class DummyFrame:
def __init__ (self, frame):
self.frame = frame
def name (self):
return "signal-emission-dummy"
def describe (self, stream, full):
stream.write (" <...>\n")
def __getattr__ (self, name):
return getattr (self.frame, name)
class SignalFrame:
def __init__ (self, frames):
self.frame = frames[-1]
self.frames = frames;
def name (self):
return "signal-emission"
def read_var (self, frame, name, array = None):
try:
v = frame.read_var (name)
if v == None or v.is_optimized_out:
return None
if array != None:
array.append (v)
return v
except ValueError:
return None
def read_object (self, frame, name, array = None):
try:
v = frame.read_var (name)
if v == None or v.is_optimized_out:
return None
v = v.cast (gdb.lookup_type("GObject").pointer())
# Ensure this is a somewhat correct object pointer
if v != None and g_type_name_from_instance (v):
if array != None:
array.append (v)
return v
return None
except ValueError:
return None
def append (self, array, obj):
if obj != None:
array.append (obj)
def or_join_array (self, array):
if len(array) == 0:
return "???"
v = {}
for i in range(len(array)):
v[str(array[i])] = 1
array = v.keys()
s = array[0]
for i in range(1, len(array)):
s = s + " or %s"%array[i]
return s
def describe (self, stream, full):
instances = []
signals = []
for frame in self.frames:
name = frame.name()
if name == "signal_emit_unlocked_R":
self.read_object (frame, "instance", instances)
node = self.read_var (frame, "node")
if node:
signal = node["name"].string()
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emitv":
instance_and_params = self.read_var (frame, "instance_and_params")
if instance_and_params:
instance = instance_and_params[0]["v_pointer"].cast (gdb.Type("GObject").pointer())
self.append (instances, instance)
id = self.read_var (frame, "signal_id")
signal = get_signal_name (id)
if signal:
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emit_valist" or name == "g_signal_emit":
self.read_object (frame, "instance", instances)
id = self.read_var (frame, "signal_id")
signal = get_signal_name (id)
if signal:
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emit_by_name":
self.read_object (frame, "instance", instances)
self.read_var (frame, "detailed_signal", signals)
break
instance = self.or_join_array (instances)
signal = self.or_join_array (signals)
stream.write (" <emit signal %s on instance %s>\n" % (signal, instance))
def __getattr__ (self, name):
return getattr (self.frame, name)
class GFrameFilter:
def __init__ (self, iter):
self.queue = []
self.iter = iter
def __iter__ (self):
return self
def fill (self):
while len(self.queue) <= 6:
try:
f = self.iter.next ()
self.queue.append (f)
except StopIteration:
return
def find_signal_emission (self):
for i in range (min (len(self.queue), 3)):
if self.queue[i].name() == "signal_emit_unlocked_R":
return i
return -1
def next (self):
# Ensure we have enough frames for a full signal emission
self.fill()
# Are we at the end?
if len(self.queue) == 0:
raise StopIteration
emission = self.find_signal_emission ()
if emission > 0:
start = emission
while True:
if start == 0:
break
prev_name = self.queue[start-1].name()
if prev_name.find("_marshal_") or prev_name == "g_closure_invoke":
start = start - 1
else:
break
end = emission + 1
while end < len(self.queue):
if self.queue[end].name() in ["g_signal_emitv",
"g_signal_emit_valist",
"g_signal_emit",
"g_signal_emit_by_name"]:
end = end + 1
else:
break
signal_frames = self.queue[start:end]
new_frames = []
for i in range(len(signal_frames)-1):
new_frames.append(DummyFrame(signal_frames[i]))
new_frames.append(SignalFrame(signal_frames))
self.queue[start:end] = new_frames
return self.queue.pop(0)
def register (obj):
if obj == None:
obj = gdb
gdb.backtrace.push_frame_filter (GFrameFilter)
obj.pretty_printers.append(pretty_printer_lookup)
| [
[
1,
0,
0.0033,
0.0033,
0,
0.66,
0,
430,
0,
1,
0,
0,
430,
0,
0
],
[
1,
0,
0.0066,
0.0033,
0,
0.66,
0.0556,
233,
0,
1,
0,
0,
233,
0,
0
],
[
1,
0,
0.0098,
0.0033,
0,
... | [
"import gdb",
"import glib",
"import gdb.backtrace",
"import gdb.command.backtrace",
"def read_global_var (symname):\n return gdb.selected_frame().read_var(symname)",
" return gdb.selected_frame().read_var(symname)",
"def g_type_to_name (gtype):\n def lookup_fundamental_type (typenode):\n ... |
#!/usr/bin/env python
import gobject
import time
import dbus
import dbus.service
import dbus.mainloop.glib
class TestException(dbus.DBusException):
_dbus_error_name = 'com.example.TestException'
class TestService(dbus.service.Object):
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='s', out_signature='s')
def HelloWorld(self, hello_message):
if str(hello_message) == 'Yo':
raise TestException('Yo is not a proper greeting')
else:
return "You greeted me with '%s'. Thanks!"%(str(hello_message))
@dbus.service.method("com.example.Frob",
in_signature='ss', out_signature='ss')
def DoubleHelloWorld(self, hello1, hello2):
return ("You greeted me with '%s'. Thanks!"%(str(hello1)), "Yo dawg, you uttered '%s'. Thanks!"%(str(hello2)))
@dbus.service.method("com.example.Frob",
in_signature='', out_signature='su')
def PairReturn(self):
return ("foo", 42)
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='ybnqiuxtdsog', out_signature='ybnqiuxtdsog')
def TestPrimitiveTypes(self, val_byte, val_boolean, val_int16, val_uint16, val_int32, val_uint32, val_int64, val_uint64, val_double, val_string, val_objpath, val_signature):
return val_byte + 1, not val_boolean, val_int16 + 1, val_uint16 + 1, val_int32 + 1, val_uint32 + 1, val_int64 + 1, val_uint64 + 1, -val_double + 0.123, val_string * 2, val_objpath + "/modified", val_signature * 2
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='ayabanaqaiauaxatad', out_signature='ayabanaqaiauaxatad')
def TestArrayOfPrimitiveTypes(self, val_byte, val_boolean, val_int16, val_uint16, val_int32, val_uint32, val_int64, val_uint64, val_double):
return val_byte*2, val_boolean*2, val_int16*2, val_uint16*2, val_int32*2, val_uint32*2, val_int64*2, val_uint64*2, val_double*2
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='asaoag', out_signature='asaoag')
def TestArrayOfStringTypes(self, val_string, val_objpath, val_signature):
return val_string * 2, val_objpath * 2, val_signature * 2
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature = 'a{yy}a{bb}a{nn}a{qq}a{ii}a{uu}a{xx}a{tt}a{dd}a{ss}a{oo}a{gg}',
out_signature = 'a{yy}a{bb}a{nn}a{qq}a{ii}a{uu}a{xx}a{tt}a{dd}a{ss}a{oo}a{gg}')
def TestHashTables(self, hyy, hbb, hnn, hqq, hii, huu, hxx, htt, hdd, hss, hoo, hgg):
ret_hyy = {}
for i in hyy:
ret_hyy[i*2] = (hyy[i]*3) & 255
ret_hbb = {}
for i in hbb:
ret_hbb[i] = True
ret_hnn = {}
for i in hnn:
ret_hnn[i*2] = hnn[i]*3
ret_hqq = {}
for i in hqq:
ret_hqq[i*2] = hqq[i]*3
ret_hii = {}
for i in hii:
ret_hii[i*2] = hii[i]*3
ret_huu = {}
for i in huu:
ret_huu[i*2] = huu[i]*3
ret_hxx = {}
for i in hxx:
ret_hxx[i + 2] = hxx[i] + 1
ret_htt = {}
for i in htt:
ret_htt[i + 2] = htt[i] + 1
ret_hdd = {}
for i in hdd:
ret_hdd[i + 2.5] = hdd[i] + 5.0
ret_hss = {}
for i in hss:
ret_hss[i + "mod"] = hss[i]*2
ret_hoo = {}
for i in hoo:
ret_hoo[i + "/mod"] = hoo[i] + "/mod2"
ret_hgg = {}
for i in hgg:
ret_hgg[i + "assgit"] = hgg[i]*2
return ret_hyy, ret_hbb, ret_hnn, ret_hqq, ret_hii, ret_huu, ret_hxx, ret_htt, ret_hdd, ret_hss, ret_hoo, ret_hgg
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='(ii)(s(ii)aya{ss})', out_signature='(ii)(s(ii)aya{ss})')
def TestStructureTypes(self, s1, s2):
(x, y) = s1;
(desc, (x1, y1), ay, hss) = s2;
ret_hss = {}
for i in hss:
ret_hss[i] = hss[i] + " ... in bed!"
return (x + 1, y + 1), (desc + " ... in bed!", (x1 + 2, y1 + 2), ay * 2, ret_hss)
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='vb', out_signature='v')
def TestVariant(self, v, modify):
if modify:
if type(v)==dbus.Boolean:
ret = False
elif type(v)==dbus.Dictionary:
ret = {}
for i in v:
ret[i] = v[i] * 2
elif type(v)==dbus.Struct:
ret = ["other struct", dbus.Int16(100)]
else:
ret = v * 2
else:
ret = v
return (type(v))(ret)
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='a(ii)aa(ii)aasaa{ss}aayavaav', out_signature='a(ii)aa(ii)aasaa{ss}aayavaav')
def TestComplexArrays(self, aii, aaii, aas, ahashes, aay, av, aav):
return aii * 2, aaii * 2, aas * 2, ahashes * 2, aay * 2, av *2, aav * 2
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='a{s(ii)}a{sv}a{sav}a{saav}a{sa(ii)}a{sa{ss}}',
out_signature='a{s(ii)}a{sv}a{sav}a{saav}a{sa(ii)}a{sa{ss}}')
def TestComplexHashTables(self, h_str_to_pair, h_str_to_variant, h_str_to_av, h_str_to_aav,
h_str_to_array_of_pairs, hash_of_hashes):
ret_h_str_to_pair = {}
for i in h_str_to_pair:
ret_h_str_to_pair[i + "_baz"] = h_str_to_pair[i]
ret_h_str_to_variant = {}
for i in h_str_to_variant:
ret_h_str_to_variant[i + "_baz"] = h_str_to_variant[i]
return ret_h_str_to_pair, ret_h_str_to_variant, h_str_to_av, h_str_to_aav, h_str_to_array_of_pairs, hash_of_hashes
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='', out_signature='')
def Quit(self):
mainloop.quit()
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='sv', out_signature='')
def FrobSetProperty(self, prop_name, prop_value):
self.frob_props[prop_name] = prop_value
message = dbus.lowlevel.SignalMessage("/com/example/TestObject",
"org.freedesktop.DBus.Properties",
"PropertiesChanged")
message.append("com.example.Frob")
message.append({prop_name : prop_value})
message.append([], signature="as")
session_bus.send_message(message)
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob",
in_signature='', out_signature='')
def FrobInvalidateProperty(self):
self.frob_props["PropertyThatWillBeInvalidated"] = "OMGInvalidated"
message = dbus.lowlevel.SignalMessage("/com/example/TestObject",
"org.freedesktop.DBus.Properties",
"PropertiesChanged")
message.append("com.example.Frob")
message.append({}, signature="a{sv}")
message.append(["PropertyThatWillBeInvalidated"])
session_bus.send_message(message)
# ----------------------------------------------------------------------------------------------------
@dbus.service.signal("com.example.Frob",
signature="sov")
def TestSignal(self, str1, objpath1, variant1):
pass
@dbus.service.method("com.example.Frob",
in_signature='so', out_signature='')
def EmitSignal(self, str1, objpath1):
self.TestSignal (str1 + " .. in bed!", objpath1 + "/in/bed", "a variant")
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("com.example.Frob", in_signature='i', out_signature='',
async_callbacks=('return_cb', 'raise_cb'))
def Sleep(self, msec, return_cb, raise_cb):
def return_from_async_wait():
return_cb()
return False
gobject.timeout_add(msec, return_from_async_wait)
# ----------------------------------------------------------------------------------------------------
@dbus.service.method("org.freedesktop.DBus.Properties",
in_signature = 'ss',
out_signature = 'v')
def Get(self, interface_name, property_name):
if interface_name == "com.example.Frob":
return self.frob_props[property_name]
else:
raise TestException("No such interface " + interface_name)
@dbus.service.method("org.freedesktop.DBus.Properties",
in_signature = 's',
out_signature = 'a{sv}')
def GetAll(self, interface_name):
if interface_name == "com.example.Frob":
return self.frob_props
else:
raise TestException("No such interface " + interface_name)
if __name__ == '__main__':
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
session_bus = dbus.SessionBus()
name = dbus.service.BusName("com.example.TestService", session_bus)
obj = TestService(session_bus, '/com/example/TestObject')
#print "Our unique name is %s"%(session_bus.get_unique_name())
obj.frob_props = {}
obj.frob_props["y"] = dbus.Byte(1)
obj.frob_props["b"] = dbus.Boolean(True)
obj.frob_props["n"] = dbus.Int16(2)
obj.frob_props["q"] = dbus.UInt16(3)
obj.frob_props["i"] = dbus.Int32(4)
obj.frob_props["u"] = dbus.UInt32(5)
obj.frob_props["x"] = dbus.Int64(6)
obj.frob_props["t"] = dbus.UInt64(7)
obj.frob_props["d"] = dbus.Double(7.5)
obj.frob_props["s"] = dbus.String("a string")
obj.frob_props["o"] = dbus.ObjectPath("/some/path")
obj.frob_props["ay"] = [dbus.Byte(1), dbus.Byte(11)]
obj.frob_props["ab"] = [dbus.Boolean(True), dbus.Boolean(False)]
obj.frob_props["an"] = [dbus.Int16(2), dbus.Int16(12)]
obj.frob_props["aq"] = [dbus.UInt16(3), dbus.UInt16(13)]
obj.frob_props["ai"] = [dbus.Int32(4), dbus.Int32(14)]
obj.frob_props["au"] = [dbus.UInt32(5), dbus.UInt32(15)]
obj.frob_props["ax"] = [dbus.Int64(6), dbus.Int64(16)]
obj.frob_props["at"] = [dbus.UInt64(7), dbus.UInt64(17)]
obj.frob_props["ad"] = [dbus.Double(7.5), dbus.Double(17.5)]
obj.frob_props["as"] = [dbus.String("a string"), dbus.String("another string")]
obj.frob_props["ao"] = [dbus.ObjectPath("/some/path"), dbus.ObjectPath("/another/path")]
obj.frob_props["foo"] = "a frobbed string"
obj.frob_props["PropertyThatWillBeInvalidated"] = "InitialValue"
mainloop = gobject.MainLoop()
mainloop.run()
| [
[
1,
0,
0.0105,
0.0035,
0,
0.66,
0,
90,
0,
1,
0,
0,
90,
0,
0
],
[
1,
0,
0.0139,
0.0035,
0,
0.66,
0.1429,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0209,
0.0035,
0,
0.... | [
"import gobject",
"import time",
"import dbus",
"import dbus.service",
"import dbus.mainloop.glib",
"class TestException(dbus.DBusException):\n _dbus_error_name = 'com.example.TestException'",
" _dbus_error_name = 'com.example.TestException'",
"class TestService(dbus.service.Object):\n\n # --... |
#!/usr/bin/env python
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
import os
import sys
srcdir = os.getenv('UNINSTALLED_GLIB_SRCDIR', None)
if srcdir is not None:
path = os.path.join(srcdir, 'gio', 'gdbus-2.0')
elif os.name == 'nt':
# Makes gdbus-codegen 'relocatable' at runtime on Windows.
path = os.path.join(os.path.dirname(__file__), '..', 'lib', 'gdbus-2.0')
else:
path = os.path.join('@libdir@', 'gdbus-2.0')
sys.path.insert(0, os.path.abspath(path))
from codegen import codegen_main
sys.exit(codegen_main.codegen_main())
| [
[
1,
0,
0.6098,
0.0244,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.6341,
0.0244,
0,
0.66,
0.1667,
509,
0,
1,
0,
0,
509,
0,
0
],
[
14,
0,
0.6829,
0.0244,
0,
... | [
"import os",
"import sys",
"srcdir = os.getenv('UNINSTALLED_GLIB_SRCDIR', None)",
"if srcdir is not None:\n path = os.path.join(srcdir, 'gio', 'gdbus-2.0')\nelif os.name == 'nt':\n # Makes gdbus-codegen 'relocatable' at runtime on Windows.\n path = os.path.join(os.path.dirname(__file__), '..', 'lib',... |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
import sys
import re
from . import config
from . import utils
from . import dbustypes
from . import parser
# ----------------------------------------------------------------------------------------------------
class DocbookCodeGenerator:
def __init__(self, ifaces, docbook):
self.ifaces = ifaces
self.docbook = docbook
self.generate_expand_dicts()
def print_method_prototype(self, i, m, in_synopsis):
max_method_len = 0
if in_synopsis:
for _m in i.methods:
max_method_len = max(len(_m.name), max_method_len)
else:
max_method_len = max(len(m.name), max_method_len)
max_signature_len = 0
if in_synopsis:
for _m in i.methods:
for a in _m.in_args:
max_signature_len = max(len(a.signature), max_signature_len)
for a in _m.out_args:
max_signature_len = max(len(a.signature), max_signature_len)
else:
for a in m.in_args:
max_signature_len = max(len(a.signature), max_signature_len)
for a in m.out_args:
max_signature_len = max(len(a.signature), max_signature_len)
if in_synopsis:
self.out.write('<link linkend="gdbus-method-%s.%s">%s</link>%*s ('
%(utils.dots_to_hyphens(i.name), m.name, m.name, max_method_len - len(m.name), ''))
else:
self.out.write('%s%*s ('
%(m.name, max_method_len - len(m.name), ''))
count = 0
for a in m.in_args:
if (count > 0):
self.out.write(',\n%*s'%(max_method_len + 2, ''))
self.out.write('IN %s%*s %s'%(a.signature, max_signature_len - len(a.signature), '', a.name))
count = count + 1
for a in m.out_args:
if (count > 0):
self.out.write(',\n%*s'%(max_method_len + 2, ''))
self.out.write('OUT %s%*s %s'%(a.signature, max_signature_len - len(a.signature), '', a.name))
count = count + 1
self.out.write(');\n')
def print_signal_prototype(self, i, s, in_synopsis):
max_signal_len = 0
if in_synopsis:
for _s in i.signals:
max_signal_len = max(len(_s.name), max_signal_len)
else:
max_signal_len = max(len(s.name), max_signal_len)
max_signature_len = 0
if in_synopsis:
for _s in i.signals:
for a in _s.args:
max_signature_len = max(len(a.signature), max_signature_len)
else:
for a in s.args:
max_signature_len = max(len(a.signature), max_signature_len)
if in_synopsis:
self.out.write('<link linkend="gdbus-signal-%s.%s">%s</link>%*s ('
%(utils.dots_to_hyphens(i.name), s.name, s.name, max_signal_len - len(s.name), ''))
else:
self.out.write('%s%*s ('
%(s.name, max_signal_len - len(s.name), ''))
count = 0
for a in s.args:
if (count > 0):
self.out.write(',\n%*s'%(max_signal_len + 2, ''))
self.out.write('%s%*s %s'%(a.signature, max_signature_len - len(a.signature), '', a.name))
count = count + 1
self.out.write(');\n')
def print_property_prototype(self, i, p, in_synopsis):
max_property_len = 0
if in_synopsis:
for _p in i.properties:
max_property_len = max(len(_p.name), max_property_len)
else:
max_property_len = max(len(p.name), max_property_len)
max_signature_len = 0
if in_synopsis:
for _p in i.properties:
max_signature_len = max(len(_p.signature), max_signature_len)
else:
max_signature_len = max(len(p.signature), max_signature_len)
if in_synopsis:
self.out.write('<link linkend="gdbus-property-%s.%s">%s</link>%*s'
%(utils.dots_to_hyphens(i.name), p.name, p.name, max_property_len - len(p.name), ''))
else:
self.out.write('%s%*s'
%(p.name, max_property_len - len(p.name), ''))
if p.readable and p.writable:
access = 'readwrite'
elif p.readable:
access = 'readable '
else:
access = 'writable '
self.out.write(' %s %s\n'%(access, p.signature))
def print_synopsis_methods(self, i):
self.out.write(' <refsynopsisdiv role="synopsis">\n'%())
self.out.write(' <title role="synopsis.title">Methods</title>\n'%())
self.out.write(' <synopsis>\n'%())
for m in i.methods:
self.print_method_prototype(i, m, in_synopsis=True)
self.out.write('</synopsis>\n'%())
self.out.write(' </refsynopsisdiv>\n'%())
def print_synopsis_signals(self, i):
self.out.write(' <refsect1 role="signal_proto">\n'%())
self.out.write(' <title role="signal_proto.title">Signals</title>\n'%())
self.out.write(' <synopsis>\n'%())
for s in i.signals:
self.print_signal_prototype(i, s, in_synopsis=True)
self.out.write('</synopsis>\n'%())
self.out.write(' </refsect1>\n'%())
def print_synopsis_properties(self, i):
self.out.write(' <refsect1 role="properties">\n'%())
self.out.write(' <title role="properties.title">Properties</title>\n'%())
self.out.write(' <synopsis>\n'%())
for p in i.properties:
self.print_property_prototype(i, p, in_synopsis=True)
self.out.write('</synopsis>\n'%())
self.out.write(' </refsect1>\n'%())
def print_method(self, i, m):
self.out.write('<refsect2 role="method" id="gdbus-method-%s.%s">\n'%(utils.dots_to_hyphens(i.name), m.name))
self.out.write(' <title>The %s() method</title>\n'%(m.name))
self.out.write(' <indexterm zone="gdbus-method-%s.%s"><primary sortas="%s.%s">%s.%s()</primary></indexterm>\n'%(utils.dots_to_hyphens(i.name), m.name, i.name_without_prefix, m.name, i.name, m.name))
self.out.write('<programlisting>\n')
self.print_method_prototype(i, m, in_synopsis=False)
self.out.write('</programlisting>\n')
self.out.write('<para>%s</para>\n'%(self.expand(m.doc_string, True)))
self.out.write('<variablelist role="params">\n')
for a in m.in_args:
self.out.write('<varlistentry>\n'%())
self.out.write(' <term><literal>IN %s <parameter>%s</parameter></literal>:</term>\n'%(a.signature, a.name))
self.out.write(' <listitem><para>%s</para></listitem>\n'%(self.expand(a.doc_string, True)))
self.out.write('</varlistentry>\n'%())
for a in m.out_args:
self.out.write('<varlistentry>\n'%())
self.out.write(' <term><literal>OUT %s <parameter>%s</parameter></literal>:</term>\n'%(a.signature, a.name))
self.out.write(' <listitem><para>%s</para></listitem>\n'%(self.expand(a.doc_string, True)))
self.out.write('</varlistentry>\n'%())
self.out.write('</variablelist>\n')
if len(m.since) > 0:
self.out.write('<para role="since">Since %s</para>\n'%(m.since))
if m.deprecated:
self.out.write('<warning><para>The %s() method is deprecated.</para></warning>'%(m.name))
self.out.write('</refsect2>\n')
def print_signal(self, i, s):
self.out.write('<refsect2 role="signal" id="gdbus-signal-%s.%s">\n'%(utils.dots_to_hyphens(i.name), s.name))
self.out.write(' <title>The "%s" signal</title>\n'%(s.name))
self.out.write(' <indexterm zone="gdbus-signal-%s.%s"><primary sortas="%s::%s">%s::%s</primary></indexterm>\n'%(utils.dots_to_hyphens(i.name), s.name, i.name_without_prefix, s.name, i.name, s.name))
self.out.write('<programlisting>\n')
self.print_signal_prototype(i, s, in_synopsis=False)
self.out.write('</programlisting>\n')
self.out.write('<para>%s</para>\n'%(self.expand(s.doc_string, True)))
self.out.write('<variablelist role="params">\n')
for a in s.args:
self.out.write('<varlistentry>\n'%())
self.out.write(' <term><literal>%s <parameter>%s</parameter></literal>:</term>\n'%(a.signature, a.name))
self.out.write(' <listitem><para>%s</para></listitem>\n'%(self.expand(a.doc_string, True)))
self.out.write('</varlistentry>\n'%())
self.out.write('</variablelist>\n')
if len(s.since) > 0:
self.out.write('<para role="since">Since %s</para>\n'%(s.since))
if s.deprecated:
self.out.write('<warning><para>The "%s" signal is deprecated.</para></warning>'%(s.name))
self.out.write('</refsect2>\n')
def print_property(self, i, p):
self.out.write('<refsect2 role="property" id="gdbus-property-%s.%s">\n'%(utils.dots_to_hyphens(i.name), p.name))
self.out.write(' <title>The "%s" property</title>\n'%(p.name))
self.out.write(' <indexterm zone="gdbus-property-%s.%s"><primary sortas="%s:%s">%s:%s</primary></indexterm>\n'%(utils.dots_to_hyphens(i.name), p.name, i.name_without_prefix, p.name, i.name, p.name))
self.out.write('<programlisting>\n')
self.print_property_prototype(i, p, in_synopsis=False)
self.out.write('</programlisting>\n')
self.out.write('<para>%s</para>\n'%(self.expand(p.doc_string, True)))
if len(p.since) > 0:
self.out.write('<para role="since">Since %s</para>\n'%(p.since))
if p.deprecated:
self.out.write('<warning><para>The "%s" property is deprecated.</para></warning>'%(p.name))
self.out.write('</refsect2>\n')
def expand(self, s, expandParamsAndConstants):
for key in self.expand_member_dict_keys:
s = s.replace(key, self.expand_member_dict[key])
for key in self.expand_iface_dict_keys:
s = s.replace(key, self.expand_iface_dict[key])
if expandParamsAndConstants:
# replace @foo with <parameter>foo</parameter>
s = re.sub('@[a-zA-Z0-9_]*', lambda m: '<parameter>' + m.group(0)[1:] + '</parameter>', s)
# replace e.g. %TRUE with <constant>TRUE</constant>
s = re.sub('%[a-zA-Z0-9_]*', lambda m: '<constant>' + m.group(0)[1:] + '</constant>', s)
return s
def generate_expand_dicts(self):
self.expand_member_dict = {}
self.expand_iface_dict = {}
for i in self.ifaces:
key = '#%s'%(i.name)
value = '<link linkend="gdbus-interface-%s.top_of_page">%s</link>'%(utils.dots_to_hyphens(i.name), i.name)
self.expand_iface_dict[key] = value
for m in i.methods:
key = '%s.%s()'%(i.name, m.name)
value = '<link linkend="gdbus-method-%s.%s">%s()</link>'%(utils.dots_to_hyphens(i.name), m.name, m.name)
self.expand_member_dict[key] = value
for s in i.signals:
key = '#%s::%s'%(i.name, s.name)
value = '<link linkend="gdbus-signal-%s.%s">"%s"</link>'%(utils.dots_to_hyphens(i.name), s.name, s.name)
self.expand_member_dict[key] = value
for p in i.properties:
key = '#%s:%s'%(i.name, p.name)
value = '<link linkend="gdbus-property-%s.%s">"%s"</link>'%(utils.dots_to_hyphens(i.name), p.name, p.name)
self.expand_member_dict[key] = value
# Make sure to expand the keys in reverse order so e.g. #org.foo.Iface:MediaCompat
# is evaluated before #org.foo.Iface:Media ...
self.expand_member_dict_keys = self.expand_member_dict.keys()
self.expand_member_dict_keys.sort(reverse=True)
self.expand_iface_dict_keys = self.expand_iface_dict.keys()
self.expand_iface_dict_keys.sort(reverse=True)
def generate(self):
for i in self.ifaces:
self.out = file('%s-%s.xml'%(self.docbook, i.name), 'w')
self.out.write(''%())
self.out.write('<?xml version="1.0" encoding="utf-8"?>\n'%())
self.out.write('<!DOCTYPE refentry PUBLIC "-//OASIS//DTD DocBook XML V4.1.2//EN"\n'%())
self.out.write(' "http://www.oasis-open.org/docbook/xml/4.1.2/docbookx.dtd" [\n'%())
self.out.write(']>\n'%())
self.out.write('<refentry id="gdbus-%s">\n'%(i.name))
self.out.write(' <refmeta>'%())
self.out.write(' <refentrytitle role="top_of_page" id="gdbus-interface-%s.top_of_page">%s</refentrytitle>\n'%(utils.dots_to_hyphens(i.name), i.name))
self.out.write(' <indexterm zone="gdbus-interface-%s.top_of_page"><primary sortas="%s">%s</primary></indexterm>\n'%(utils.dots_to_hyphens(i.name), i.name_without_prefix, i.name))
self.out.write(' </refmeta>'%())
self.out.write(' <refnamediv>'%())
self.out.write(' <refname>%s</refname>'%(i.name))
self.out.write(' <refpurpose>%s</refpurpose>'%(i.doc_string_brief))
self.out.write(' </refnamediv>'%())
if len(i.methods) > 0:
self.print_synopsis_methods(i)
if len(i.signals) > 0:
self.print_synopsis_signals(i)
if len(i.properties) > 0:
self.print_synopsis_properties(i)
self.out.write('<refsect1 role="desc" id="gdbus-interface-%s">\n'%(utils.dots_to_hyphens(i.name)))
self.out.write(' <title role="desc.title">Description</title>\n'%())
self.out.write(' <para>%s</para>\n'%(self.expand(i.doc_string, True)))
if len(i.since) > 0:
self.out.write(' <para role="since">Since %s</para>\n'%(i.since))
if i.deprecated:
self.out.write('<warning><para>The %s interface is deprecated.</para></warning>'%(i.name))
self.out.write('</refsect1>\n'%())
if len(i.methods) > 0:
self.out.write('<refsect1 role="details" id="gdbus-methods-%s">\n'%(i.name))
self.out.write(' <title role="details.title">Method Details</title>\n'%())
for m in i.methods:
self.print_method(i, m)
self.out.write('</refsect1>\n'%())
if len(i.signals) > 0:
self.out.write('<refsect1 role="details" id="gdbus-signals-%s">\n'%(i.name))
self.out.write(' <title role="details.title">Signal Details</title>\n'%())
for s in i.signals:
self.print_signal(i, s)
self.out.write('</refsect1>\n'%())
if len(i.properties) > 0:
self.out.write('<refsect1 role="details" id="gdbus-properties-%s">\n'%(i.name))
self.out.write(' <title role="details.title">Property Details</title>\n'%())
for s in i.properties:
self.print_property(i, s)
self.out.write('</refsect1>\n'%())
self.out.write('</refentry>\n')
self.out.write('\n')
| [
[
1,
0,
0.0738,
0.0031,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0769,
0.0031,
0,
0.66,
0.1667,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0831,
0.0031,
0,
... | [
"import sys",
"import re",
"from . import config",
"from . import utils",
"from . import dbustypes",
"from . import parser",
"class DocbookCodeGenerator:\n def __init__(self, ifaces, docbook):\n self.ifaces = ifaces\n self.docbook = docbook\n self.generate_expand_dicts()\n\n d... |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
from . import utils
class Annotation:
def __init__(self, key, value):
self.key = key
self.value = value
self.annotations = []
class Arg:
def __init__(self, name, signature):
self.name = name
self.signature = signature
self.annotations = []
self.doc_string = ''
self.since = ''
def post_process(self, interface_prefix, cns, cns_upper, cns_lower, arg_number):
if len(self.doc_string) == 0:
self.doc_string = utils.lookup_docs(self.annotations)
if len(self.since) == 0:
self.since = utils.lookup_since(self.annotations)
if self.name == None:
self.name = 'unnamed_arg%d'%arg_number
# default to GVariant
self.ctype_in_g = 'GVariant *'
self.ctype_in = 'GVariant *'
self.ctype_in_dup = 'GVariant *'
self.ctype_out = 'GVariant **'
self.gtype = 'G_TYPE_VARIANT'
self.free_func = 'g_variant_unref'
self.format_in = '@' + self.signature
self.format_out = '@' + self.signature
self.gvariant_get = 'XXX'
self.gvalue_get = 'g_value_get_variant'
if not utils.lookup_annotation(self.annotations, 'org.gtk.GDBus.C.ForceGVariant'):
if self.signature == 'b':
self.ctype_in_g = 'gboolean '
self.ctype_in = 'gboolean '
self.ctype_out = 'gboolean *'
self.gtype = 'G_TYPE_BOOLEAN'
self.free_func = None
self.format_in = 'b'
self.format_out = 'b'
self.gvariant_get = 'g_variant_get_boolean'
self.gvalue_get = 'g_value_get_boolean'
elif self.signature == 'y':
self.ctype_in_g = 'guchar '
self.ctype_in = 'guchar '
self.ctype_out = 'guchar *'
self.gtype = 'G_TYPE_UCHAR'
self.free_func = None
self.format_in = 'y'
self.format_out = 'y'
self.gvariant_get = 'g_variant_get_byte'
self.gvalue_get = 'g_value_get_uchar'
elif self.signature == 'n':
self.ctype_in_g = 'gint '
self.ctype_in = 'gint16 '
self.ctype_out = 'gint16 *'
self.gtype = 'G_TYPE_INT'
self.free_func = None
self.format_in = 'n'
self.format_out = 'n'
self.gvariant_get = 'g_variant_get_int16'
self.gvalue_get = 'g_value_get_int'
elif self.signature == 'q':
self.ctype_in_g = 'guint '
self.ctype_in = 'guint16 '
self.ctype_out = 'guint16 *'
self.gtype = 'G_TYPE_UINT'
self.free_func = None
self.format_in = 'q'
self.format_out = 'q'
self.gvariant_get = 'g_variant_get_uint16'
self.gvalue_get = 'g_value_get_uint'
elif self.signature == 'i':
self.ctype_in_g = 'gint '
self.ctype_in = 'gint '
self.ctype_out = 'gint *'
self.gtype = 'G_TYPE_INT'
self.free_func = None
self.format_in = 'i'
self.format_out = 'i'
self.gvariant_get = 'g_variant_get_int32'
self.gvalue_get = 'g_value_get_int'
elif self.signature == 'u':
self.ctype_in_g = 'guint '
self.ctype_in = 'guint '
self.ctype_out = 'guint *'
self.gtype = 'G_TYPE_UINT'
self.free_func = None
self.format_in = 'u'
self.format_out = 'u'
self.gvariant_get = 'g_variant_get_uint32'
self.gvalue_get = 'g_value_get_uint'
elif self.signature == 'x':
self.ctype_in_g = 'gint64 '
self.ctype_in = 'gint64 '
self.ctype_out = 'gint64 *'
self.gtype = 'G_TYPE_INT64'
self.free_func = None
self.format_in = 'x'
self.format_out = 'x'
self.gvariant_get = 'g_variant_get_int64'
self.gvalue_get = 'g_value_get_int64'
elif self.signature == 't':
self.ctype_in_g = 'guint64 '
self.ctype_in = 'guint64 '
self.ctype_out = 'guint64 *'
self.gtype = 'G_TYPE_UINT64'
self.free_func = None
self.format_in = 't'
self.format_out = 't'
self.gvariant_get = 'g_variant_get_uint64'
self.gvalue_get = 'g_value_get_uint64'
elif self.signature == 'd':
self.ctype_in_g = 'gdouble '
self.ctype_in = 'gdouble '
self.ctype_out = 'gdouble *'
self.gtype = 'G_TYPE_DOUBLE'
self.free_func = None
self.format_in = 'd'
self.format_out = 'd'
self.gvariant_get = 'g_variant_get_double'
self.gvalue_get = 'g_value_get_double'
elif self.signature == 's':
self.ctype_in_g = 'const gchar *'
self.ctype_in = 'const gchar *'
self.ctype_in_dup = 'gchar *'
self.ctype_out = 'gchar **'
self.gtype = 'G_TYPE_STRING'
self.free_func = 'g_free'
self.format_in = 's'
self.format_out = 's'
self.gvariant_get = 'g_variant_get_string'
self.gvalue_get = 'g_value_get_string'
elif self.signature == 'o':
self.ctype_in_g = 'const gchar *'
self.ctype_in = 'const gchar *'
self.ctype_in_dup = 'gchar *'
self.ctype_out = 'gchar **'
self.gtype = 'G_TYPE_STRING'
self.free_func = 'g_free'
self.format_in = 'o'
self.format_out = 'o'
self.gvariant_get = 'g_variant_get_string'
self.gvalue_get = 'g_value_get_string'
elif self.signature == 'g':
self.ctype_in_g = 'const gchar *'
self.ctype_in = 'const gchar *'
self.ctype_in_dup = 'gchar *'
self.ctype_out = 'gchar **'
self.gtype = 'G_TYPE_STRING'
self.free_func = 'g_free'
self.format_in = 'g'
self.format_out = 'g'
self.gvariant_get = 'g_variant_get_string'
self.gvalue_get = 'g_value_get_string'
elif self.signature == 'ay':
self.ctype_in_g = 'const gchar *'
self.ctype_in = 'const gchar *'
self.ctype_in_dup = 'gchar *'
self.ctype_out = 'gchar **'
self.gtype = 'G_TYPE_STRING'
self.free_func = 'g_free'
self.format_in = '^ay'
self.format_out = '^ay'
self.gvariant_get = 'g_variant_get_bytestring'
self.gvalue_get = 'g_value_get_string'
elif self.signature == 'as':
self.ctype_in_g = 'const gchar *const *'
self.ctype_in = 'const gchar *const *'
self.ctype_in_dup = 'gchar **'
self.ctype_out = 'gchar ***'
self.gtype = 'G_TYPE_STRV'
self.free_func = 'g_strfreev'
self.format_in = '^as'
self.format_out = '^as'
self.gvariant_get = 'g_variant_get_strv'
self.gvalue_get = 'g_value_get_boxed'
elif self.signature == 'ao':
self.ctype_in_g = 'const gchar *const *'
self.ctype_in = 'const gchar *const *'
self.ctype_in_dup = 'gchar **'
self.ctype_out = 'gchar ***'
self.gtype = 'G_TYPE_STRV'
self.free_func = 'g_strfreev'
self.format_in = '^ao'
self.format_out = '^ao'
self.gvariant_get = 'g_variant_get_objv'
self.gvalue_get = 'g_value_get_boxed'
elif self.signature == 'aay':
self.ctype_in_g = 'const gchar *const *'
self.ctype_in = 'const gchar *const *'
self.ctype_in_dup = 'gchar **'
self.ctype_out = 'gchar ***'
self.gtype = 'G_TYPE_STRV'
self.free_func = 'g_strfreev'
self.format_in = '^aay'
self.format_out = '^aay'
self.gvariant_get = 'g_variant_get_bytestring_array'
self.gvalue_get = 'g_value_get_boxed'
class Method:
def __init__(self, name):
self.name = name
self.in_args = []
self.out_args = []
self.annotations = []
self.doc_string = ''
self.since = ''
self.deprecated = False
def post_process(self, interface_prefix, cns, cns_upper, cns_lower):
if len(self.doc_string) == 0:
self.doc_string = utils.lookup_docs(self.annotations)
if len(self.since) == 0:
self.since = utils.lookup_since(self.annotations)
name = self.name
overridden_name = utils.lookup_annotation(self.annotations, 'org.gtk.GDBus.C.Name')
if utils.is_ugly_case(overridden_name):
self.name_lower = overridden_name.lower()
else:
if overridden_name:
name = overridden_name
self.name_lower = utils.camel_case_to_uscore(name).lower().replace('-', '_')
self.name_hyphen = self.name_lower.replace('_', '-')
arg_count = 0
for a in self.in_args:
a.post_process(interface_prefix, cns, cns_upper, cns_lower, arg_count)
arg_count += 1
for a in self.out_args:
a.post_process(interface_prefix, cns, cns_upper, cns_lower, arg_count)
arg_count += 1
if utils.lookup_annotation(self.annotations, 'org.freedesktop.DBus.Deprecated') == 'true':
self.deprecated = True
class Signal:
def __init__(self, name):
self.name = name
self.args = []
self.annotations = []
self.doc_string = ''
self.since = ''
self.deprecated = False
def post_process(self, interface_prefix, cns, cns_upper, cns_lower):
if len(self.doc_string) == 0:
self.doc_string = utils.lookup_docs(self.annotations)
if len(self.since) == 0:
self.since = utils.lookup_since(self.annotations)
name = self.name
overridden_name = utils.lookup_annotation(self.annotations, 'org.gtk.GDBus.C.Name')
if utils.is_ugly_case(overridden_name):
self.name_lower = overridden_name.lower()
else:
if overridden_name:
name = overridden_name
self.name_lower = utils.camel_case_to_uscore(name).lower().replace('-', '_')
self.name_hyphen = self.name_lower.replace('_', '-')
arg_count = 0
for a in self.args:
a.post_process(interface_prefix, cns, cns_upper, cns_lower, arg_count)
arg_count += 1
if utils.lookup_annotation(self.annotations, 'org.freedesktop.DBus.Deprecated') == 'true':
self.deprecated = True
class Property:
def __init__(self, name, signature, access):
self.name = name
self.signature = signature
self.access = access
self.annotations = []
self.arg = Arg('value', self.signature)
self.arg.annotations = self.annotations
self.readable = False
self.writable = False
if self.access == 'readwrite':
self.readable = True
self.writable = True
elif self.access == 'read':
self.readable = True
elif self.access == 'write':
self.writable = True
else:
raise RuntimeError('Invalid access type %s'%self.access)
self.doc_string = ''
self.since = ''
self.deprecated = False
def post_process(self, interface_prefix, cns, cns_upper, cns_lower):
if len(self.doc_string) == 0:
self.doc_string = utils.lookup_docs(self.annotations)
if len(self.since) == 0:
self.since = utils.lookup_since(self.annotations)
name = self.name
overridden_name = utils.lookup_annotation(self.annotations, 'org.gtk.GDBus.C.Name')
if utils.is_ugly_case(overridden_name):
self.name_lower = overridden_name.lower()
else:
if overridden_name:
name = overridden_name
self.name_lower = utils.camel_case_to_uscore(name).lower().replace('-', '_')
if self.name_lower == 'type':
self.name_lower = 'type_'
self.name_hyphen = self.name_lower.replace('_', '-')
# recalculate arg
self.arg.annotations = self.annotations
self.arg.post_process(interface_prefix, cns, cns_upper, cns_lower, 0)
if utils.lookup_annotation(self.annotations, 'org.freedesktop.DBus.Deprecated') == 'true':
self.deprecated = True
class Interface:
def __init__(self, name):
self.name = name
self.methods = []
self.signals = []
self.properties = []
self.annotations = []
self.doc_string = ''
self.doc_string_brief = ''
self.since = ''
self.deprecated = False
def post_process(self, interface_prefix, c_namespace):
if len(self.doc_string) == 0:
self.doc_string = utils.lookup_docs(self.annotations)
if len(self.doc_string_brief) == 0:
self.doc_string_brief = utils.lookup_brief_docs(self.annotations)
if len(self.since) == 0:
self.since = utils.lookup_since(self.annotations)
if len(c_namespace) > 0:
if utils.is_ugly_case(c_namespace):
cns = c_namespace.replace('_', '')
cns_upper = c_namespace.upper() + '_'
cns_lower = c_namespace.lower() + '_'
else:
cns = c_namespace
cns_upper = utils.camel_case_to_uscore(c_namespace).upper() + '_'
cns_lower = utils.camel_case_to_uscore(c_namespace).lower() + '_'
else:
cns = ''
cns_upper = ''
cns_lower = ''
overridden_name = utils.lookup_annotation(self.annotations, 'org.gtk.GDBus.C.Name')
if utils.is_ugly_case(overridden_name):
name = overridden_name.replace('_', '')
name_with_ns = cns + name
self.name_without_prefix = name
self.camel_name = name_with_ns
self.ns_upper = cns_upper
self.name_lower = cns_lower + overridden_name.lower()
self.name_upper = overridden_name.upper()
#raise RuntimeError('handle Ugly_Case ', overridden_name)
else:
if overridden_name:
name = overridden_name
else:
name = self.name
if name.startswith(interface_prefix):
name = name[len(interface_prefix):]
self.name_without_prefix = name
name = utils.strip_dots(name)
name_with_ns = utils.strip_dots(cns + '.' + name)
self.camel_name = name_with_ns
self.ns_upper = cns_upper
self.name_lower = cns_lower + utils.camel_case_to_uscore(name)
self.name_upper = utils.camel_case_to_uscore(name).upper()
self.name_hyphen = self.name_upper.lower().replace('_', '-')
if utils.lookup_annotation(self.annotations, 'org.freedesktop.DBus.Deprecated') == 'true':
self.deprecated = True
for m in self.methods:
m.post_process(interface_prefix, cns, cns_upper, cns_lower)
for s in self.signals:
s.post_process(interface_prefix, cns, cns_upper, cns_lower)
for p in self.properties:
p.post_process(interface_prefix, cns, cns_upper, cns_lower)
| [
[
1,
0,
0.0573,
0.0024,
0,
0.66,
0,
0,
0,
1,
0,
0,
0,
0,
0
],
[
3,
0,
0.0668,
0.0119,
0,
0.66,
0.1667,
568,
0,
1,
0,
0,
0,
0,
0
],
[
2,
1,
0.068,
0.0095,
1,
0.66,
... | [
"from . import utils",
"class Annotation:\n def __init__(self, key, value):\n self.key = key\n self.value = value\n self.annotations = []",
" def __init__(self, key, value):\n self.key = key\n self.value = value\n self.annotations = []",
" self.key = key"... |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
DATADIR = "${prefix}/share"
DATADIR = DATADIR.replace(
"${prefix}", "/usr/local")
VERSION = "2.30.0"
| [
[
14,
0,
0.8889,
0.037,
0,
0.66,
0,
366,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.9444,
0.0741,
0,
0.66,
0.5,
366,
3,
2,
0,
0,
293,
10,
1
],
[
14,
0,
1,
0.037,
0,
0.66,
... | [
"DATADIR = \"${prefix}/share\"",
"DATADIR = DATADIR.replace(\n \"${prefix}\", \"/usr/local\")",
"VERSION = \"2.30.0\""
] |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
import sys
import optparse
from . import config
from . import utils
from . import dbustypes
from . import parser
from . import codegen
from . import codegen_docbook
def find_arg(arg_list, arg_name):
for a in arg_list:
if a.name == arg_name:
return a
return None
def find_method(iface, method):
for m in iface.methods:
if m.name == method:
return m
return None
def find_signal(iface, signal):
for m in iface.signals:
if m.name == signal:
return m
return None
def find_prop(iface, prop):
for m in iface.properties:
if m.name == prop:
return m
return None
def apply_annotation(iface_list, iface, method, signal, prop, arg, key, value):
for i in iface_list:
if i.name == iface:
iface_obj = i
break
if iface_obj == None:
raise RuntimeError('No interface %s'%iface)
target_obj = None
if method:
method_obj = find_method(iface_obj, method)
if method_obj == None:
raise RuntimeError('No method %s on interface %s'%(method, iface))
if arg:
arg_obj = find_arg(method_obj.in_args, arg)
if (arg_obj == None):
arg_obj = find_arg(method_obj.out_args, arg)
if (arg_obj == None):
raise RuntimeError('No arg %s on method %s on interface %s'%(arg, method, iface))
target_obj = arg_obj
else:
target_obj = method_obj
elif signal:
signal_obj = find_signal(iface_obj, signal)
if signal_obj == None:
raise RuntimeError('No signal %s on interface %s'%(signal, iface))
if arg:
arg_obj = find_arg(signal_obj.args, arg)
if (arg_obj == None):
raise RuntimeError('No arg %s on signal %s on interface %s'%(arg, signal, iface))
target_obj = arg_obj
else:
target_obj = signal_obj
elif prop:
prop_obj = find_prop(iface_obj, prop)
if prop_obj == None:
raise RuntimeError('No property %s on interface %s'%(prop, iface))
target_obj = prop_obj
else:
target_obj = iface_obj
target_obj.annotations.insert(0, dbustypes.Annotation(key, value))
def apply_annotations(iface_list, annotation_list):
# apply annotations given on the command line
for (what, key, value) in annotation_list:
pos = what.find('::')
if pos != -1:
# signal
iface = what[0:pos];
signal = what[pos + 2:]
pos = signal.find('[')
if pos != -1:
arg = signal[pos + 1:]
signal = signal[0:pos]
pos = arg.find(']')
arg = arg[0:pos]
apply_annotation(iface_list, iface, None, signal, None, arg, key, value)
else:
apply_annotation(iface_list, iface, None, signal, None, None, key, value)
else:
pos = what.find(':')
if pos != -1:
# property
iface = what[0:pos];
prop = what[pos + 1:]
apply_annotation(iface_list, iface, None, None, prop, None, key, value)
else:
pos = what.find('()')
if pos != -1:
# method
combined = what[0:pos]
pos = combined.rfind('.')
iface = combined[0:pos]
method = combined[pos + 1:]
pos = what.find('[')
if pos != -1:
arg = what[pos + 1:]
pos = arg.find(']')
arg = arg[0:pos]
apply_annotation(iface_list, iface, method, None, None, arg, key, value)
else:
apply_annotation(iface_list, iface, method, None, None, None, key, value)
else:
# must be an interface
iface = what
apply_annotation(iface_list, iface, None, None, None, None, key, value)
def codegen_main():
arg_parser = optparse.OptionParser('%prog [options]')
arg_parser.add_option('', '--xml-files', metavar='FILE', action='append',
help='D-Bus introspection XML file')
arg_parser.add_option('', '--interface-prefix', metavar='PREFIX', default='',
help='String to strip from D-Bus interface names for code and docs')
arg_parser.add_option('', '--c-namespace', metavar='NAMESPACE', default='',
help='The namespace to use for generated C code')
arg_parser.add_option('', '--c-generate-object-manager', action='store_true',
help='Generate a GDBusObjectManagerClient subclass when generating C code')
arg_parser.add_option('', '--generate-c-code', metavar='OUTFILES',
help='Generate C code in OUTFILES.[ch]')
arg_parser.add_option('', '--generate-docbook', metavar='OUTFILES',
help='Generate Docbook in OUTFILES-org.Project.IFace.xml')
arg_parser.add_option('', '--annotate', nargs=3, action='append', metavar='WHAT KEY VALUE',
help='Add annotation (may be used several times)')
(opts, args) = arg_parser.parse_args();
all_ifaces = []
for fname in args:
f = open(fname)
xml_data = f.read()
f.close()
parsed_ifaces = parser.parse_dbus_xml(xml_data)
all_ifaces.extend(parsed_ifaces)
if opts.annotate != None:
apply_annotations(all_ifaces, opts.annotate)
for i in all_ifaces:
i.post_process(opts.interface_prefix, opts.c_namespace)
docbook = opts.generate_docbook
docbook_gen = codegen_docbook.DocbookCodeGenerator(all_ifaces, docbook);
if docbook:
ret = docbook_gen.generate()
c_code = opts.generate_c_code
if c_code:
h = file(c_code + '.h', 'w')
c = file(c_code + '.c', 'w')
gen = codegen.CodeGenerator(all_ifaces,
opts.c_namespace,
opts.interface_prefix,
opts.c_generate_object_manager,
docbook_gen,
h, c);
ret = gen.generate()
sys.exit(0)
if __name__ == "__main__":
codegen_main()
| [
[
1,
0,
0.12,
0.005,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.125,
0.005,
0,
0.66,
0.0667,
323,
0,
1,
0,
0,
323,
0,
0
],
[
1,
0,
0.135,
0.005,
0,
0.66,
... | [
"import sys",
"import optparse",
"from . import config",
"from . import utils",
"from . import dbustypes",
"from . import parser",
"from . import codegen",
"from . import codegen_docbook",
"def find_arg(arg_list, arg_name):\n for a in arg_list:\n if a.name == arg_name:\n return ... |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
import sys
from . import config
from . import utils
from . import dbustypes
# ----------------------------------------------------------------------------------------------------
class CodeGenerator:
def __init__(self, ifaces, namespace, interface_prefix, generate_objmanager, docbook_gen, h, c):
self.docbook_gen = docbook_gen
self.generate_objmanager = generate_objmanager
self.ifaces = ifaces
self.h = h
self.c = c
self.namespace = namespace
if len(namespace) > 0:
if utils.is_ugly_case(namespace):
self.namespace = namespace.replace('_', '')
self.ns_upper = namespace.upper() + '_'
self.ns_lower = namespace.lower() + '_'
else:
self.ns_upper = utils.camel_case_to_uscore(namespace).upper() + '_'
self.ns_lower = utils.camel_case_to_uscore(namespace).lower() + '_'
else:
self.ns_upper = ''
self.ns_lower = ''
self.interface_prefix = interface_prefix
self.header_guard = self.h.name.upper().replace('.', '_').replace('-', '_').replace('/', '_')
# ----------------------------------------------------------------------------------------------------
def generate_intro(self):
self.c.write('/*\n'
' * Generated by gdbus-codegen %s. DO NOT EDIT.\n'
' *\n'
' * The license of this code is the same as for the source it was derived from.\n'
' */\n'
'\n'
%(config.VERSION))
self.c.write('#ifdef HAVE_CONFIG_H\n'
'# include "config.h"\n'
'#endif\n'
'\n'
'#include "%s"\n'
'\n'%(self.h.name))
self.c.write('#ifdef G_OS_UNIX\n'
'# include <gio/gunixfdlist.h>\n'
'#endif\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusArgInfo parent_struct;\n'
' gboolean use_gvariant;\n'
'} _ExtendedGDBusArgInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusMethodInfo parent_struct;\n'
' const gchar *signal_name;\n'
' gboolean pass_fdlist;\n'
'} _ExtendedGDBusMethodInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusSignalInfo parent_struct;\n'
' const gchar *signal_name;\n'
'} _ExtendedGDBusSignalInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusPropertyInfo parent_struct;\n'
' const gchar *hyphen_name;\n'
' gboolean use_gvariant;\n'
'} _ExtendedGDBusPropertyInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusInterfaceInfo parent_struct;\n'
' const gchar *hyphen_name;\n'
'} _ExtendedGDBusInterfaceInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' const _ExtendedGDBusPropertyInfo *info;\n'
' guint prop_id;\n'
' GValue orig_value; /* the value before the change */\n'
'} ChangedProperty;\n'
'\n'
'static void\n'
'_changed_property_free (ChangedProperty *data)\n'
'{\n'
' g_value_unset (&data->orig_value);\n'
' g_free (data);\n'
'}\n'
'\n')
self.c.write('static gboolean\n'
'_g_strv_equal0 (gchar **a, gchar **b)\n'
'{\n'
' gboolean ret = FALSE;\n'
' guint n;\n'
' if (a == NULL && b == NULL)\n'
' {\n'
' ret = TRUE;\n'
' goto out;\n'
' }\n'
' if (a == NULL || b == NULL)\n'
' goto out;\n'
' if (g_strv_length (a) != g_strv_length (b))\n'
' goto out;\n'
' for (n = 0; a[n] != NULL; n++)\n'
' if (g_strcmp0 (a[n], b[n]) != 0)\n'
' goto out;\n'
' ret = TRUE;\n'
'out:\n'
' return ret;\n'
'}\n'
'\n')
self.c.write('static gboolean\n'
'_g_variant_equal0 (GVariant *a, GVariant *b)\n'
'{\n'
' gboolean ret = FALSE;\n'
' if (a == NULL && b == NULL)\n'
' {\n'
' ret = TRUE;\n'
' goto out;\n'
' }\n'
' if (a == NULL || b == NULL)\n'
' goto out;\n'
' ret = g_variant_equal (a, b);\n'
'out:\n'
' return ret;\n'
'}\n'
'\n')
# simplified - only supports the types we use
self.c.write('G_GNUC_UNUSED static gboolean\n'
'_g_value_equal (const GValue *a, const GValue *b)\n'
'{\n'
' gboolean ret = FALSE;\n'
' g_assert (G_VALUE_TYPE (a) == G_VALUE_TYPE (b));\n'
' switch (G_VALUE_TYPE (a))\n'
' {\n'
' case G_TYPE_BOOLEAN:\n'
' ret = (g_value_get_boolean (a) == g_value_get_boolean (b));\n'
' break;\n'
' case G_TYPE_UCHAR:\n'
' ret = (g_value_get_uchar (a) == g_value_get_uchar (b));\n'
' break;\n'
' case G_TYPE_INT:\n'
' ret = (g_value_get_int (a) == g_value_get_int (b));\n'
' break;\n'
' case G_TYPE_UINT:\n'
' ret = (g_value_get_uint (a) == g_value_get_uint (b));\n'
' break;\n'
' case G_TYPE_INT64:\n'
' ret = (g_value_get_int64 (a) == g_value_get_int64 (b));\n'
' break;\n'
' case G_TYPE_UINT64:\n'
' ret = (g_value_get_uint64 (a) == g_value_get_uint64 (b));\n'
' break;\n'
' case G_TYPE_DOUBLE:\n'
' ret = (g_value_get_double (a) == g_value_get_double (b));\n'
' break;\n'
' case G_TYPE_STRING:\n'
' ret = (g_strcmp0 (g_value_get_string (a), g_value_get_string (b)) == 0);\n'
' break;\n'
' case G_TYPE_VARIANT:\n'
' ret = _g_variant_equal0 (g_value_get_variant (a), g_value_get_variant (b));\n'
' break;\n'
' default:\n'
' if (G_VALUE_TYPE (a) == G_TYPE_STRV)\n'
' ret = _g_strv_equal0 (g_value_get_boxed (a), g_value_get_boxed (b));\n'
' else\n'
' g_critical ("_g_value_equal() does not handle type %s", g_type_name (G_VALUE_TYPE (a)));\n'
' break;\n'
' }\n'
' return ret;\n'
'}\n'
'\n')
self.h.write('/*\n'
' * Generated by gdbus-codegen %s. DO NOT EDIT.\n'
' *\n'
' * The license of this code is the same as for the source it was derived from.\n'
' */\n'
'\n'
'#ifndef __%s__\n'
'#define __%s__\n'
'\n'%(config.VERSION, self.header_guard, self.header_guard))
self.h.write('#include <gio/gio.h>\n'
'\n'
'G_BEGIN_DECLS\n'
'\n')
# ----------------------------------------------------------------------------------------------------
def declare_types(self):
for i in self.ifaces:
self.h.write('\n')
self.h.write('/* ------------------------------------------------------------------------ */\n')
self.h.write('/* Declarations for %s */\n'%i.name)
self.h.write('\n')
# First the GInterface
self.h.write('#define %sTYPE_%s (%s_get_type ())\n'%(i.ns_upper, i.name_upper, i.name_lower))
self.h.write('#define %s%s(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_%s, %s))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %sIS_%s(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_%s))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('#define %s%s_GET_IFACE(o) (G_TYPE_INSTANCE_GET_INTERFACE ((o), %sTYPE_%s, %sIface))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('\n')
self.h.write('struct _%s;\n'%(i.camel_name))
self.h.write('typedef struct _%s %s;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sIface %sIface;\n'%(i.camel_name, i.camel_name))
self.h.write('\n')
self.h.write('struct _%sIface\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' GTypeInterface parent_iface;\n')
function_pointers = {}
# vfuncs for methods
if len(i.methods) > 0:
self.h.write('\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
key = (m.since, '_method_%s'%m.name_lower)
value = ' gboolean (*handle_%s) (\n'%(m.name_lower)
value += ' %s *object,\n'%(i.camel_name)
value += ' GDBusMethodInvocation *invocation'%()
if unix_fd:
value += ',\n GUnixFDList *fd_list'
for a in m.in_args:
value += ',\n %sarg_%s'%(a.ctype_in, a.name)
value += ');\n\n'
function_pointers[key] = value
# vfuncs for signals
if len(i.signals) > 0:
self.h.write('\n')
for s in i.signals:
key = (s.since, '_signal_%s'%s.name_lower)
value = ' void (*%s) (\n'%(s.name_lower)
value += ' %s *object'%(i.camel_name)
for a in s.args:
value += ',\n %sarg_%s'%(a.ctype_in, a.name)
value += ');\n\n'
function_pointers[key] = value
# vfuncs for properties
if len(i.properties) > 0:
self.h.write('\n')
for p in i.properties:
key = (p.since, '_prop_get_%s'%p.name_lower)
value = ' %s (*get_%s) (%s *object);\n\n'%(p.arg.ctype_in, p.name_lower, i.camel_name)
function_pointers[key] = value
# Sort according to @since tag, then name.. this ensures
# that the function pointers don't change order assuming
# judicious use of @since
#
# Also use a proper version comparison function so e.g.
# 10.0 comes after 2.0.
#
# See https://bugzilla.gnome.org/show_bug.cgi?id=647577#c5
# for discussion
keys = function_pointers.keys()
if len(keys) > 0:
keys.sort(cmp=utils.my_version_cmp)
for key in keys:
self.h.write('%s'%function_pointers[key])
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %s_get_type (void) G_GNUC_CONST;\n'%(i.name_lower))
self.h.write('\n')
self.h.write('GDBusInterfaceInfo *%s_interface_info (void);\n'%(i.name_lower))
self.h.write('guint %s_override_properties (GObjectClass *klass, guint property_id_begin);\n'%(i.name_lower))
self.h.write('\n')
# Then method call completion functions
if len(i.methods) > 0:
self.h.write('\n')
self.h.write('/* D-Bus method call completion functions: */\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_complete_%s (\n'
' %s *object,\n'
' GDBusMethodInvocation *invocation'%(i.name_lower, m.name_lower, i.camel_name))
if unix_fd:
self.h.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.h.write(',\n %s%s'%(a.ctype_in, a.name))
self.h.write(');\n')
self.h.write('\n')
self.h.write('\n')
# Then signal emission functions
if len(i.signals) > 0:
self.h.write('\n')
self.h.write('/* D-Bus signal emissions functions: */\n')
for s in i.signals:
if s.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_emit_%s (\n'
' %s *object'%(i.name_lower, s.name_lower, i.camel_name))
for a in s.args:
self.h.write(',\n %sarg_%s'%(a.ctype_in, a.name))
self.h.write(');\n')
self.h.write('\n')
self.h.write('\n')
# Then method call declarations
if len(i.methods) > 0:
self.h.write('\n')
self.h.write('/* D-Bus method calls: */\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
# async begin
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_call_%s (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.h.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList *fd_list')
self.h.write(',\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n')
self.h.write('\n')
# async finish
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('gboolean %s_call_%s_finish (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.out_args:
self.h.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList **out_fd_list')
self.h.write(',\n'
' GAsyncResult *res,\n'
' GError **error);\n')
self.h.write('\n')
# sync
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('gboolean %s_call_%s_sync (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.h.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.h.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList **out_fd_list')
self.h.write(',\n'
' GCancellable *cancellable,\n'
' GError **error);\n')
self.h.write('\n')
self.h.write('\n')
# Then the property accessor declarations
if len(i.properties) > 0:
self.h.write('\n')
self.h.write('/* D-Bus property accessors: */\n')
for p in i.properties:
# getter
if p.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s%s_get_%s (%s *object);\n'%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name))
if p.arg.free_func != None:
if p.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s%s_dup_%s (%s *object);\n'%(p.arg.ctype_in_dup, i.name_lower, p.name_lower, i.camel_name))
# setter
if p.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_set_%s (%s *object, %svalue);\n'%(i.name_lower, p.name_lower, i.camel_name, p.arg.ctype_in, ))
self.h.write('\n')
# Then the proxy
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_%s_PROXY (%s_proxy_get_type ())\n'%(i.ns_upper, i.name_upper, i.name_lower))
self.h.write('#define %s%s_PROXY(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_%s_PROXY, %sProxy))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_%s_PROXY, %sProxyClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_PROXY_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_%s_PROXY, %sProxyClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %sIS_%s_PROXY(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_%s_PROXY))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('#define %sIS_%s_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_%s_PROXY))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('\n')
self.h.write('typedef struct _%sProxy %sProxy;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sProxyClass %sProxyClass;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sProxyPrivate %sProxyPrivate;\n'%(i.camel_name, i.camel_name))
self.h.write('\n')
self.h.write('struct _%sProxy\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusProxy parent_instance;\n')
self.h.write(' %sProxyPrivate *priv;\n'%(i.camel_name))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sProxyClass\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' GDBusProxyClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %s_proxy_get_type (void) G_GNUC_CONST;\n'%(i.name_lower))
self.h.write('\n')
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_proxy_new (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
self.h.write('\n')
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_proxy_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
self.h.write('\n')
# Then the skeleton
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_%s_SKELETON (%s_skeleton_get_type ())\n'%(i.ns_upper, i.name_upper, i.name_lower))
self.h.write('#define %s%s_SKELETON(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_%s_SKELETON, %sSkeleton))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_%s_SKELETON, %sSkeletonClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_SKELETON_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_%s_SKELETON, %sSkeletonClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %sIS_%s_SKELETON(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_%s_SKELETON))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('#define %sIS_%s_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_%s_SKELETON))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('\n')
self.h.write('typedef struct _%sSkeleton %sSkeleton;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sSkeletonClass %sSkeletonClass;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sSkeletonPrivate %sSkeletonPrivate;\n'%(i.camel_name, i.camel_name))
self.h.write('\n')
self.h.write('struct _%sSkeleton\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusInterfaceSkeleton parent_instance;\n')
self.h.write(' %sSkeletonPrivate *priv;\n'%(i.camel_name))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sSkeletonClass\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' GDBusInterfaceSkeletonClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %s_skeleton_get_type (void) G_GNUC_CONST;\n'%(i.name_lower))
self.h.write('\n')
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_skeleton_new (void);\n'%(i.camel_name, i.name_lower))
self.h.write('\n')
# Finally, the Object, ObjectProxy, ObjectSkeleton and ObjectManagerClient
if self.generate_objmanager:
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT (%sobject_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT, %sObject))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sOBJECT_GET_IFACE(o) (G_TYPE_INSTANCE_GET_INTERFACE ((o), %sTYPE_OBJECT, %sObject))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObject;\n'%(self.namespace))
self.h.write('typedef struct _%sObject %sObject;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectIface %sObjectIface;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectIface\n'%(self.namespace))
self.h.write('{\n'
' GTypeInterface parent_iface;\n'
'};\n'
'\n')
self.h.write('GType %sobject_get_type (void) G_GNUC_CONST;\n'
'\n'
%(self.ns_lower))
for i in self.ifaces:
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write ('%s *%sobject_get_%s (%sObject *object);\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
for i in self.ifaces:
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write ('%s *%sobject_peek_%s (%sObject *object);\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT_PROXY (%sobject_proxy_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT_PROXY(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT_PROXY, %sObjectProxy))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_OBJECT_PROXY, %sObjectProxyClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_PROXY_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_OBJECT_PROXY, %sObjectProxyClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT_PROXY(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT_PROXY))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sIS_OBJECT_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_OBJECT_PROXY))\n'%(self.ns_upper, self.ns_upper))
self.h.write('\n')
self.h.write('typedef struct _%sObjectProxy %sObjectProxy;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectProxyClass %sObjectProxyClass;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectProxyPrivate %sObjectProxyPrivate;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectProxy\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusObjectProxy parent_instance;\n')
self.h.write(' %sObjectProxyPrivate *priv;\n'%(self.namespace))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sObjectProxyClass\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' GDBusObjectProxyClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %sobject_proxy_get_type (void) G_GNUC_CONST;\n'%(self.ns_lower))
self.h.write('%sObjectProxy *%sobject_proxy_new (GDBusConnection *connection, const gchar *object_path);\n'%(self.namespace, self.ns_lower))
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT_SKELETON (%sobject_skeleton_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT_SKELETON(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT_SKELETON, %sObjectSkeleton))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_OBJECT_SKELETON, %sObjectSkeletonClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_SKELETON_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_OBJECT_SKELETON, %sObjectSkeletonClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT_SKELETON(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT_SKELETON))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sIS_OBJECT_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_OBJECT_SKELETON))\n'%(self.ns_upper, self.ns_upper))
self.h.write('\n')
self.h.write('typedef struct _%sObjectSkeleton %sObjectSkeleton;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectSkeletonClass %sObjectSkeletonClass;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectSkeletonPrivate %sObjectSkeletonPrivate;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectSkeleton\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusObjectSkeleton parent_instance;\n')
self.h.write(' %sObjectSkeletonPrivate *priv;\n'%(self.namespace))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sObjectSkeletonClass\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' GDBusObjectSkeletonClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %sobject_skeleton_get_type (void) G_GNUC_CONST;\n'%(self.ns_lower))
self.h.write('%sObjectSkeleton *%sobject_skeleton_new (const gchar *object_path);\n'
%(self.namespace, self.ns_lower))
for i in self.ifaces:
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write ('void %sobject_skeleton_set_%s (%sObjectSkeleton *object, %s *interface_);\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name))
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT_MANAGER_CLIENT (%sobject_manager_client_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT_MANAGER_CLIENT(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT_MANAGER_CLIENT, %sObjectManagerClient))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_MANAGER_CLIENT_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_OBJECT_MANAGER_CLIENT, %sObjectManagerClientClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_MANAGER_CLIENT_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_OBJECT_MANAGER_CLIENT, %sObjectManagerClientClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT_MANAGER_CLIENT(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT_MANAGER_CLIENT))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sIS_OBJECT_MANAGER_CLIENT_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_OBJECT_MANAGER_CLIENT))\n'%(self.ns_upper, self.ns_upper))
self.h.write('\n')
self.h.write('typedef struct _%sObjectManagerClient %sObjectManagerClient;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectManagerClientClass %sObjectManagerClientClass;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectManagerClientPrivate %sObjectManagerClientPrivate;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectManagerClient\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusObjectManagerClient parent_instance;\n')
self.h.write(' %sObjectManagerClientPrivate *priv;\n'%(self.namespace))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sObjectManagerClientClass\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' GDBusObjectManagerClientClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %sobject_manager_client_get_type (void) G_GNUC_CONST;\n'%(self.ns_lower))
self.h.write('\n')
self.h.write('GType %sobject_manager_client_get_proxy_type (GDBusObjectManagerClient *manager, const gchar *object_path, const gchar *interface_name, gpointer user_data);\n'%(self.ns_lower))
self.h.write('\n')
self.h.write('void %sobject_manager_client_new (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('\n')
self.h.write('void %sobject_manager_client_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('\n')
# ----------------------------------------------------------------------------------------------------
def generate_outro(self):
self.h.write('\n'
'G_END_DECLS\n'
'\n'
'#endif /* __%s__ */\n'%(self.header_guard))
# ----------------------------------------------------------------------------------------------------
def generate_annotations(self, prefix, annotations):
if annotations == None:
return
n = 0
for a in annotations:
#self.generate_annotations('%s_%d'%(prefix, n), a.get_annotations())
# skip internal annotations
if a.key.startswith('org.gtk.GDBus'):
continue
self.c.write('static const GDBusAnnotationInfo %s_%d =\n'
'{\n'
' -1,\n'
' "%s",\n'
' "%s",\n'%(prefix, n, a.key, a.value))
if len(a.annotations) == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &%s_%d_pointers\n'%(prefix, n))
self.c.write('};\n'
'\n')
n += 1
if n > 0:
self.c.write('static const GDBusAnnotationInfo * const %s_pointers[] =\n'
'{\n'%(prefix))
m = 0;
for a in annotations:
if a.key.startswith('org.gtk.GDBus'):
continue
self.c.write(' &%s_%d,\n'%(prefix, m))
m += 1
self.c.write(' NULL\n'
'};\n'
'\n')
return n
def generate_args(self, prefix, args):
for a in args:
num_anno = self.generate_annotations('%s_arg_%s_annotation_info'%(prefix, a.name), a.annotations)
self.c.write('static const _ExtendedGDBusArgInfo %s_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'
' "%s",\n'%(prefix, a.name, a.name, a.signature))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &%s_arg_%s_annotation_info_pointers\n'%(prefix, a.name))
self.c.write(' },\n')
if not utils.lookup_annotation(a.annotations, 'org.gtk.GDBus.C.ForceGVariant'):
self.c.write(' FALSE\n')
else:
self.c.write(' TRUE\n')
self.c.write('};\n'
'\n')
if len(args) > 0:
self.c.write('static const _ExtendedGDBusArgInfo * const %s_pointers[] =\n'
'{\n'%(prefix))
for a in args:
self.c.write(' &%s_%s,\n'%(prefix, a.name))
self.c.write(' NULL\n'
'};\n'
'\n')
def generate_introspection_for_interface(self, i):
self.c.write('/* ---- Introspection data for %s ---- */\n'
'\n'%(i.name))
if len(i.methods) > 0:
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
self.generate_args('_%s_method_info_%s_IN_ARG'%(i.name_lower, m.name_lower), m.in_args)
self.generate_args('_%s_method_info_%s_OUT_ARG'%(i.name_lower, m.name_lower), m.out_args)
num_anno = self.generate_annotations('_%s_method_%s_annotation_info'%(i.name_lower, m.name_lower), m.annotations)
self.c.write('static const _ExtendedGDBusMethodInfo _%s_method_info_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'%(i.name_lower, m.name_lower, m.name))
if len(m.in_args) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusArgInfo **) &_%s_method_info_%s_IN_ARG_pointers,\n'%(i.name_lower, m.name_lower))
if len(m.out_args) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusArgInfo **) &_%s_method_info_%s_OUT_ARG_pointers,\n'%(i.name_lower, m.name_lower))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_method_%s_annotation_info_pointers\n'%(i.name_lower, m.name_lower))
self.c.write(' },\n'
' "handle-%s",\n'
' %s\n'
%(m.name_hyphen, 'TRUE' if unix_fd else 'FALSE'))
self.c.write('};\n'
'\n')
self.c.write('static const _ExtendedGDBusMethodInfo * const _%s_method_info_pointers[] =\n'
'{\n'%(i.name_lower))
for m in i.methods:
self.c.write(' &_%s_method_info_%s,\n'%(i.name_lower, m.name_lower))
self.c.write(' NULL\n'
'};\n'
'\n')
# ---
if len(i.signals) > 0:
for s in i.signals:
self.generate_args('_%s_signal_info_%s_ARG'%(i.name_lower, s.name_lower), s.args)
num_anno = self.generate_annotations('_%s_signal_%s_annotation_info'%(i.name_lower, s.name_lower), s.annotations)
self.c.write('static const _ExtendedGDBusSignalInfo _%s_signal_info_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'%(i.name_lower, s.name_lower, s.name))
if len(s.args) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusArgInfo **) &_%s_signal_info_%s_ARG_pointers,\n'%(i.name_lower, s.name_lower))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_signal_%s_annotation_info_pointers\n'%(i.name_lower, s.name_lower))
self.c.write(' },\n'
' "%s"\n'
%(s.name_hyphen))
self.c.write('};\n'
'\n')
self.c.write('static const _ExtendedGDBusSignalInfo * const _%s_signal_info_pointers[] =\n'
'{\n'%(i.name_lower))
for s in i.signals:
self.c.write(' &_%s_signal_info_%s,\n'%(i.name_lower, s.name_lower))
self.c.write(' NULL\n'
'};\n'
'\n')
# ---
if len(i.properties) > 0:
for p in i.properties:
if p.readable and p.writable:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_READABLE | G_DBUS_PROPERTY_INFO_FLAGS_WRITABLE'
elif p.readable:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_READABLE'
elif p.writable:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_WRITABLE'
else:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_NONE'
num_anno = self.generate_annotations('_%s_property_%s_annotation_info'%(i.name_lower, p.name_lower), p.annotations)
self.c.write('static const _ExtendedGDBusPropertyInfo _%s_property_info_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'
' "%s",\n'
' %s,\n'%(i.name_lower, p.name_lower, p.name, p.arg.signature, access))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_property_%s_annotation_info_pointers\n'%(i.name_lower, p.name_lower))
self.c.write(' },\n'
' "%s",\n'
%(p.name_hyphen))
if not utils.lookup_annotation(p.annotations, 'org.gtk.GDBus.C.ForceGVariant'):
self.c.write(' FALSE\n')
else:
self.c.write(' TRUE\n')
self.c.write('};\n'
'\n')
self.c.write('static const _ExtendedGDBusPropertyInfo * const _%s_property_info_pointers[] =\n'
'{\n'%(i.name_lower))
for p in i.properties:
self.c.write(' &_%s_property_info_%s,\n'%(i.name_lower, p.name_lower))
self.c.write(' NULL\n'
'};\n'
'\n')
num_anno = self.generate_annotations('_%s_annotation_info'%(i.name_lower), i.annotations)
self.c.write('static const _ExtendedGDBusInterfaceInfo _%s_interface_info =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'%(i.name_lower, i.name))
if len(i.methods) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusMethodInfo **) &_%s_method_info_pointers,\n'%(i.name_lower))
if len(i.signals) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusSignalInfo **) &_%s_signal_info_pointers,\n'%(i.name_lower))
if len(i.properties) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusPropertyInfo **) &_%s_property_info_pointers,\n'%(i.name_lower))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_annotation_info_pointers\n'%(i.name_lower))
self.c.write(' },\n'
' "%s",\n'
'};\n'
'\n'
%(i.name_hyphen))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_interface_info:\n'
' *\n'
' * Gets a machine-readable description of the #%s D-Bus interface.\n'
' *\n'
' * Returns: (transfer none): A #GDBusInterfaceInfo. Do not free.\n'
%(i.name_lower, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('GDBusInterfaceInfo *\n'
'%s_interface_info (void)\n'
'{\n'
' return (GDBusInterfaceInfo *) &_%s_interface_info;\n'
'}\n'
'\n'%(i.name_lower, i.name_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_override_properties:\n'
' * @klass: The class structure for a #GObject<!-- -->-derived class.\n'
' * @property_id_begin: The property id to assign to the first overridden property.\n'
' *\n'
' * Overrides all #GObject properties in the #%s interface for a concrete class.\n'
' * The properties are overridden in the order they are defined.\n'
' *\n'
' * Returns: The last property id.\n'
%(i.name_lower, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('guint\n'
'%s_override_properties (GObjectClass *klass, guint property_id_begin)\n'
'{\n'%(i.name_lower))
for p in i.properties:
self.c.write (' g_object_class_override_property (klass, property_id_begin++, "%s");\n'%(p.name_hyphen))
self.c.write(' return property_id_begin - 1;\n'
'}\n'
'\n')
self.c.write('\n')
# ----------------------------------------------------------------------------------------------------
def generate_interface(self, i):
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s:\n'
' *\n'
' * Abstract interface type for the D-Bus interface #%s.\n'
%(i.camel_name, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sIface:\n'
' * @parent_iface: The parent interface.\n'
%(i.camel_name), False))
doc_bits = {}
if len(i.methods) > 0:
for m in i.methods:
key = (m.since, '_method_%s'%m.name_lower)
value = '@handle_%s: '%(m.name_lower)
value += 'Handler for the #%s::handle-%s signal.'%(i.camel_name, m.name_hyphen)
doc_bits[key] = value
if len(i.signals) > 0:
for s in i.signals:
key = (s.since, '_signal_%s'%s.name_lower)
value = '@%s: '%(s.name_lower)
value += 'Handler for the #%s::%s signal.'%(i.camel_name, s.name_hyphen)
doc_bits[key] = value
if len(i.properties) > 0:
for p in i.properties:
key = (p.since, '_prop_get_%s'%p.name_lower)
value = '@get_%s: '%(p.name_lower)
value += 'Getter for the #%s:%s property.'%(i.camel_name, p.name_hyphen)
doc_bits[key] = value
keys = doc_bits.keys()
if len(keys) > 0:
keys.sort(cmp=utils.my_version_cmp)
for key in keys:
self.c.write(' * %s\n'%doc_bits[key])
self.c.write(self.docbook_gen.expand(
' *\n'
' * Virtual table for the D-Bus interface #%s.\n'
%(i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write('static void\n'
'%s_default_init (%sIface *iface)\n'
'{\n'%(i.name_lower, i.camel_name));
if len(i.methods) > 0:
self.c.write(' /* GObject signals for incoming D-Bus method calls: */\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %s::handle-%s:\n'
' * @object: A #%s.\n'
' * @invocation: A #GDBusMethodInvocation.\n'
%(i.camel_name, m.name_hyphen, i.camel_name), False))
if unix_fd:
self.c.write (' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
for a in m.in_args:
self.c.write (' * @arg_%s: Argument passed by remote caller.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * Signal emitted when a remote caller is invoking the %s.%s() D-Bus method.\n'
' *\n'
' * If a signal handler returns %%TRUE, it means the signal handler will handle the invocation (e.g. take a reference to @invocation and eventually call %s_complete_%s() or e.g. g_dbus_method_invocation_return_error() on it) and no order signal handlers will run. If no signal handler handles the invocation, the %%G_DBUS_ERROR_UNKNOWN_METHOD error is returned.\n'
' *\n'
' * Returns: %%TRUE if the invocation was handled, %%FALSE to let other signal handlers run.\n'
%(i.name, m.name, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 2)
if unix_fd:
extra_args = 2
else:
extra_args = 1
self.c.write(' g_signal_new ("handle-%s",\n'
' G_TYPE_FROM_INTERFACE (iface),\n'
' G_SIGNAL_RUN_LAST,\n'
' G_STRUCT_OFFSET (%sIface, handle_%s),\n'
' g_signal_accumulator_true_handled,\n'
' NULL,\n' # accu_data
' g_cclosure_marshal_generic,\n'
' G_TYPE_BOOLEAN,\n'
' %d,\n'
' G_TYPE_DBUS_METHOD_INVOCATION'
%(m.name_hyphen, i.camel_name, m.name_lower, len(m.in_args) + extra_args))
if unix_fd:
self.c.write(', G_TYPE_UNIX_FD_LIST')
for a in m.in_args:
self.c.write (', %s'%(a.gtype))
self.c.write(');\n')
self.c.write('\n')
if len(i.signals) > 0:
self.c.write(' /* GObject signals for received D-Bus signals: */\n')
for s in i.signals:
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %s::%s:\n'
' * @object: A #%s.\n'
%(i.camel_name, s.name_hyphen, i.camel_name), False))
for a in s.args:
self.c.write (' * @arg_%s: Argument.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * On the client-side, this signal is emitted whenever the D-Bus signal #%s::%s is received.\n'
' *\n'
' * On the service-side, this signal can be used with e.g. g_signal_emit_by_name() to make the object emit the D-Bus signal.\n'
%(i.name, s.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(s, self.c, 2)
self.c.write(' g_signal_new ("%s",\n'
' G_TYPE_FROM_INTERFACE (iface),\n'
' G_SIGNAL_RUN_LAST,\n'
' G_STRUCT_OFFSET (%sIface, %s),\n'
' NULL,\n' # accumulator
' NULL,\n' # accu_data
' g_cclosure_marshal_generic,\n'
' G_TYPE_NONE,\n'
' %d'
%(s.name_hyphen, i.camel_name, s.name_lower, len(s.args)))
for a in s.args:
self.c.write (', %s'%(a.gtype))
self.c.write(');\n')
self.c.write('\n')
if len(i.properties) > 0:
self.c.write(' /* GObject properties for D-Bus properties: */\n')
for p in i.properties:
if p.readable and p.writable:
hint = 'Since the D-Bus property for this #GObject property is both readable and writable, it is meaningful to both read from it and write to it on both the service- and client-side.'
elif p.readable:
hint = 'Since the D-Bus property for this #GObject property is readable but not writable, it is meaningful to read from it on both the client- and service-side. It is only meaningful, however, to write to it on the service-side.'
elif p.writable:
hint = 'Since the D-Bus property for this #GObject property is writable but not readable, it is meaningful to write to it on both the client- and service-side. It is only meaningful, however, to read from it on the service-side.'
else:
raise RuntimeError('Cannot handle property %s that neither readable nor writable'%(p.name))
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %s:%s:\n'
' *\n'
' * Represents the D-Bus property #%s:%s.\n'
' *\n'
' * %s\n'
%(i.camel_name, p.name_hyphen, i.name, p.name, hint), False))
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 2)
self.c.write(' g_object_interface_install_property (iface,\n')
if p.arg.gtype == 'G_TYPE_VARIANT':
s = 'g_param_spec_variant ("%s", "%s", "%s", G_VARIANT_TYPE ("%s"), NULL'%(p.name_hyphen, p.name, p.name, p.arg.signature)
elif p.arg.signature == 'b':
s = 'g_param_spec_boolean ("%s", "%s", "%s", FALSE'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'y':
s = 'g_param_spec_uchar ("%s", "%s", "%s", 0, 255, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'n':
s = 'g_param_spec_int ("%s", "%s", "%s", G_MININT16, G_MAXINT16, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'q':
s = 'g_param_spec_uint ("%s", "%s", "%s", 0, G_MAXUINT16, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'i':
s = 'g_param_spec_int ("%s", "%s", "%s", G_MININT32, G_MAXINT32, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'u':
s = 'g_param_spec_uint ("%s", "%s", "%s", 0, G_MAXUINT32, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'x':
s = 'g_param_spec_int64 ("%s", "%s", "%s", G_MININT64, G_MAXINT64, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 't':
s = 'g_param_spec_uint64 ("%s", "%s", "%s", 0, G_MAXUINT64, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'd':
s = 'g_param_spec_double ("%s", "%s", "%s", -G_MAXDOUBLE, G_MAXDOUBLE, 0.0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 's':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'o':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'g':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'ay':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'as':
s = 'g_param_spec_boxed ("%s", "%s", "%s", G_TYPE_STRV'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'ao':
s = 'g_param_spec_boxed ("%s", "%s", "%s", G_TYPE_STRV'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'aay':
s = 'g_param_spec_boxed ("%s", "%s", "%s", G_TYPE_STRV'%(p.name_hyphen, p.name, p.name)
else:
raise RuntimeError('Unsupported gtype %s for GParamSpec'%(p.arg.gtype))
self.c.write(' %s, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));'%s);
self.c.write('\n')
self.c.write('}\n'
'\n')
self.c.write('typedef %sIface %sInterface;\n'%(i.camel_name, i.camel_name))
self.c.write('G_DEFINE_INTERFACE (%s, %s, G_TYPE_OBJECT);\n'%(i.camel_name, i.name_lower))
self.c.write('\n')
# ----------------------------------------------------------------------------------------------------
def generate_property_accessors(self, i):
for p in i.properties:
# getter
if p.readable and p.writable:
hint = 'Since this D-Bus property is both readable and writable, it is meaningful to use this function on both the client- and service-side.'
elif p.readable:
hint = 'Since this D-Bus property is readable, it is meaningful to use this function on both the client- and service-side.'
elif p.writable:
hint = 'Since this D-Bus property is not readable, it is only meaningful to use this function on the service-side.'
else:
raise RuntimeError('Cannot handle property %s that neither readable nor writable'%(p.name))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_get_%s: (skip)\n'
' * @object: A #%s.\n'
' *\n'
' * Gets the value of the #%s:%s D-Bus property.\n'
' *\n'
' * %s\n'
' *\n'
%(i.name_lower, p.name_lower, i.camel_name, i.name, p.name, hint), False))
if p.arg.free_func != None:
self.c.write(' * <warning>The returned value is only valid until the property changes so on the client-side it is only safe to use this function on the thread where @object was constructed. Use %s_dup_%s() if on another thread.</warning>\n'
' *\n'
' * Returns: (transfer none): The property value or %%NULL if the property is not set. Do not free the returned value, it belongs to @object.\n'
%(i.name_lower, p.name_lower))
else:
self.c.write(' * Returns: The property value.\n')
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 0)
self.c.write('%s\n'
'%s_get_%s (%s *object)\n'
'{\n'%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name))
self.c.write(' return %s%s_GET_IFACE (object)->get_%s (object);\n'%(i.ns_upper, i.name_upper, p.name_lower))
self.c.write('}\n')
self.c.write('\n')
if p.arg.free_func != None:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_dup_%s: (skip)\n'
' * @object: A #%s.\n'
' *\n'
' * Gets a copy of the #%s:%s D-Bus property.\n'
' *\n'
' * %s\n'
' *\n'
' * Returns: (transfer full): The property value or %%NULL if the property is not set. The returned value should be freed with %s().\n'
%(i.name_lower, p.name_lower, i.camel_name, i.name, p.name, hint, p.arg.free_func), False))
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 0)
self.c.write('%s\n'
'%s_dup_%s (%s *object)\n'
'{\n'
' %svalue;\n'%(p.arg.ctype_in_dup, i.name_lower, p.name_lower, i.camel_name, p.arg.ctype_in_dup))
self.c.write(' g_object_get (G_OBJECT (object), "%s", &value, NULL);\n'%(p.name_hyphen))
self.c.write(' return value;\n')
self.c.write('}\n')
self.c.write('\n')
# setter
if p.readable and p.writable:
hint = 'Since this D-Bus property is both readable and writable, it is meaningful to use this function on both the client- and service-side.'
elif p.readable:
hint = 'Since this D-Bus property is not writable, it is only meaningful to use this function on the service-side.'
elif p.writable:
hint = 'Since this D-Bus property is writable, it is meaningful to use this function on both the client- and service-side.'
else:
raise RuntimeError('Cannot handle property %s that neither readable nor writable'%(p.name))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_set_%s: (skip)\n'
' * @object: A #%s.\n'
' * @value: The value to set.\n'
' *\n'
' * Sets the #%s:%s D-Bus property to @value.\n'
' *\n'
' * %s\n'
%(i.name_lower, p.name_lower, i.camel_name, i.name, p.name, hint), False))
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 0)
self.c.write('void\n'
'%s_set_%s (%s *object, %svalue)\n'
'{\n'%(i.name_lower, p.name_lower, i.camel_name, p.arg.ctype_in, ))
self.c.write(' g_object_set (G_OBJECT (object), "%s", value, NULL);\n'%(p.name_hyphen))
self.c.write('}\n')
self.c.write('\n')
# ---------------------------------------------------------------------------------------------------
def generate_signal_emitters(self, i):
for s in i.signals:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_emit_%s:\n'
' * @object: A #%s.\n'
%(i.name_lower, s.name_lower, i.camel_name), False))
for a in s.args:
self.c.write(' * @arg_%s: Argument to pass with the signal.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * Emits the #%s::%s D-Bus signal.\n'
%(i.name, s.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(s, self.c, 0)
self.c.write('void\n'
'%s_emit_%s (\n'
' %s *object'%(i.name_lower, s.name_lower, i.camel_name))
for a in s.args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
self.c.write(')\n'
'{\n'
' g_signal_emit_by_name (object, "%s"'%(s.name_hyphen))
for a in s.args:
self.c.write(', arg_%s'%a.name)
self.c.write(');\n')
self.c.write('}\n'
'\n')
# ---------------------------------------------------------------------------------------------------
def generate_method_calls(self, i):
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
# async begin
self.c.write('/**\n'
' * %s_call_%s:\n'
' * @proxy: A #%sProxy.\n'
%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(' * @arg_%s: Argument to pass with the method invocation.\n'%(a.name))
if unix_fd:
self.c.write(' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
self.c.write(self.docbook_gen.expand(
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied or %%NULL.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Asynchronously invokes the %s.%s() D-Bus method on @proxy.\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %s_call_%s_finish() to get the result of the operation.\n'
' *\n'
' * See %s_call_%s_sync() for the synchronous, blocking version of this method.\n'
%(i.name, m.name, i.name_lower, m.name_lower, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('void\n'
'%s_call_%s (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList *fd_list')
self.c.write(',\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n')
if unix_fd:
self.c.write(' g_dbus_proxy_call_with_unix_fd_list (G_DBUS_PROXY (proxy),\n')
else:
self.c.write(' g_dbus_proxy_call (G_DBUS_PROXY (proxy),\n')
self.c.write(' "%s",\n'
' g_variant_new ("('%(m.name))
for a in m.in_args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in m.in_args:
self.c.write(',\n arg_%s'%(a.name))
self.c.write('),\n'
' G_DBUS_CALL_FLAGS_NONE,\n'
' -1,\n')
if unix_fd:
self.c.write(' fd_list,\n')
self.c.write(' cancellable,\n'
' callback,\n'
' user_data);\n')
self.c.write('}\n'
'\n')
# async finish
self.c.write('/**\n'
' * %s_call_%s_finish:\n'
' * @proxy: A #%sProxy.\n'
%(i.name_lower, m.name_lower, i.camel_name))
for a in m.out_args:
self.c.write(' * @out_%s: (out): Return location for return parameter or %%NULL to ignore.\n'%(a.name))
if unix_fd:
self.c.write(' * @out_fd_list: (out): Return location for a #GUnixFDList or %NULL.\n')
self.c.write(self.docbook_gen.expand(
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %s_call_%s().\n'
' * @error: Return location for error or %%NULL.\n'
' *\n'
' * Finishes an operation started with %s_call_%s().\n'
' *\n'
' * Returns: (skip): %%TRUE if the call succeded, %%FALSE if @error is set.\n'
%(i.name_lower, m.name_lower, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('gboolean\n'
'%s_call_%s_finish (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.out_args:
self.c.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList **out_fd_list')
self.c.write(',\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GVariant *_ret;\n')
if unix_fd:
self.c.write(' _ret = g_dbus_proxy_call_with_unix_fd_list_finish (G_DBUS_PROXY (proxy), out_fd_list, res, error);\n')
else:
self.c.write(' _ret = g_dbus_proxy_call_finish (G_DBUS_PROXY (proxy), res, error);\n')
self.c.write(' if (_ret == NULL)\n'
' goto _out;\n')
self.c.write(' g_variant_get (_ret,\n'
' \"(')
for a in m.out_args:
self.c.write('%s'%(a.format_out))
self.c.write(')"')
for a in m.out_args:
self.c.write(',\n out_%s'%(a.name))
self.c.write(');\n'
' g_variant_unref (_ret);\n')
self.c.write('_out:\n'
' return _ret != NULL;\n'
'}\n'
'\n')
# sync
self.c.write('/**\n'
' * %s_call_%s_sync:\n'
' * @proxy: A #%sProxy.\n'
%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(' * @arg_%s: Argument to pass with the method invocation.\n'%(a.name))
if unix_fd:
self.c.write(' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
for a in m.out_args:
self.c.write(' * @out_%s: (out): Return location for return parameter or %%NULL to ignore.\n'%(a.name))
if unix_fd:
self.c.write(' * @out_fd_list: (out): Return location for a #GUnixFDList or %NULL.\n')
self.c.write(self.docbook_gen.expand(
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL.\n'
' *\n'
' * Synchronously invokes the %s.%s() D-Bus method on @proxy. The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %s_call_%s() for the asynchronous version of this method.\n'
' *\n'
' * Returns: (skip): %%TRUE if the call succeded, %%FALSE if @error is set.\n'
%(i.name, m.name, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('gboolean\n'
'%s_call_%s_sync (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.c.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList **out_fd_list')
self.c.write(',\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GVariant *_ret;\n')
if unix_fd:
self.c.write(' _ret = g_dbus_proxy_call_with_unix_fd_list_sync (G_DBUS_PROXY (proxy),\n')
else:
self.c.write(' _ret = g_dbus_proxy_call_sync (G_DBUS_PROXY (proxy),\n')
self.c.write(' "%s",\n'
' g_variant_new ("('%(m.name))
for a in m.in_args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in m.in_args:
self.c.write(',\n arg_%s'%(a.name))
self.c.write('),\n'
' G_DBUS_CALL_FLAGS_NONE,\n'
' -1,\n')
if unix_fd:
self.c.write(' fd_list,\n'
' out_fd_list,\n')
self.c.write(' cancellable,\n'
' error);\n'
' if (_ret == NULL)\n'
' goto _out;\n')
self.c.write(' g_variant_get (_ret,\n'
' \"(')
for a in m.out_args:
self.c.write('%s'%(a.format_out))
self.c.write(')"')
for a in m.out_args:
self.c.write(',\n out_%s'%(a.name))
self.c.write(');\n'
' g_variant_unref (_ret);\n')
self.c.write('_out:\n'
' return _ret != NULL;\n'
'}\n'
'\n')
# ---------------------------------------------------------------------------------------------------
def generate_method_completers(self, i):
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
self.c.write('/**\n'
' * %s_complete_%s:\n'
' * @object: A #%s.\n'
' * @invocation: (transfer full): A #GDBusMethodInvocation.\n'
%(i.name_lower, m.name_lower, i.camel_name))
if unix_fd:
self.c.write (' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
for a in m.out_args:
self.c.write(' * @%s: Parameter to return.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * Helper function used in service implementations to finish handling invocations of the %s.%s() D-Bus method. If you instead want to finish handling an invocation by returning an error, use g_dbus_method_invocation_return_error() or similar.\n'
' *\n'
' * This method will free @invocation, you cannot use it afterwards.\n'
%(i.name, m.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('void\n'
'%s_complete_%s (\n'
' %s *object,\n'
' GDBusMethodInvocation *invocation'%(i.name_lower, m.name_lower, i.camel_name))
if unix_fd:
self.c.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.c.write(',\n %s%s'%(a.ctype_in, a.name))
self.c.write(')\n'
'{\n')
if unix_fd:
self.c.write(' g_dbus_method_invocation_return_value_with_unix_fd_list (invocation,\n'
' g_variant_new ("(')
else:
self.c.write(' g_dbus_method_invocation_return_value (invocation,\n'
' g_variant_new ("(')
for a in m.out_args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in m.out_args:
self.c.write(',\n %s'%(a.name))
if unix_fd:
self.c.write('),\n fd_list);\n')
else:
self.c.write('));\n')
self.c.write('}\n'
'\n')
# ---------------------------------------------------------------------------------------------------
def generate_proxy(self, i):
# class boilerplate
self.c.write('/* ------------------------------------------------------------------------ */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sProxy:\n'
' *\n'
' * The #%sProxy structure contains only private data and should only be accessed using the provided API.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sProxyClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sProxy.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write('struct _%sProxyPrivate\n'
'{\n'
' GData *qdata;\n'
'};\n'
'\n'%i.camel_name)
self.c.write('static void %s_proxy_iface_init (%sIface *iface);\n'
'\n'%(i.name_lower, i.camel_name))
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sProxy, %s_proxy, G_TYPE_DBUS_PROXY,\n'%(i.camel_name, i.name_lower))
self.c.write(' G_IMPLEMENT_INTERFACE (%sTYPE_%s, %s_proxy_iface_init));\n\n'%(i.ns_upper, i.name_upper, i.name_lower))
# finalize
self.c.write('static void\n'
'%s_proxy_finalize (GObject *object)\n'
'{\n'%(i.name_lower))
self.c.write(' %sProxy *proxy = %s%s_PROXY (object);\n'%(i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' g_datalist_clear (&proxy->priv->qdata);\n')
self.c.write(' G_OBJECT_CLASS (%s_proxy_parent_class)->finalize (object);\n'
'}\n'
'\n'%(i.name_lower))
# property accessors
#
# Note that we are guaranteed that prop_id starts at 1 and is
# laid out in the same order as introspection data pointers
#
self.c.write('static void\n'
'%s_proxy_get_property (GObject *object,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
if len(i.properties) > 0:
self.c.write(' const _ExtendedGDBusPropertyInfo *info;\n'
' GVariant *variant;\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' info = _%s_property_info_pointers[prop_id - 1];\n'
' variant = g_dbus_proxy_get_cached_property (G_DBUS_PROXY (object), info->parent_struct.name);\n'
' if (info->use_gvariant)\n'
' {\n'
' g_value_set_variant (value, variant);\n'
' }\n'
' else\n'
' {\n'
# could be that we don't have the value in cache - in that case, we do
# nothing and the user gets the default value for the GType
' if (variant != NULL)\n'
' g_dbus_gvariant_to_gvalue (variant, value);\n'
' }\n'
' if (variant != NULL)\n'
' g_variant_unref (variant);\n'
%(len(i.properties), i.name_lower))
self.c.write('}\n'
'\n')
if len(i.properties) > 0:
self.c.write('static void\n'
'%s_proxy_set_property_cb (GDBusProxy *proxy,\n'
' GAsyncResult *res,\n'
' gpointer user_data)\n'
'{\n'%(i.name_lower))
self.c.write(' const _ExtendedGDBusPropertyInfo *info = user_data;\n'
' GError *error;\n'
' error = NULL;\n'
' if (!g_dbus_proxy_call_finish (proxy, res, &error))\n'
' {\n'
' g_warning ("Error setting property `%%s\' on interface %s: %%s (%%s, %%d)",\n'
' info->parent_struct.name, \n'
' error->message, g_quark_to_string (error->domain), error->code);\n'
' g_error_free (error);\n'
' }\n'
%(i.name))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_proxy_set_property (GObject *object,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
if len(i.properties) > 0:
self.c.write(' const _ExtendedGDBusPropertyInfo *info;\n'
' GVariant *variant;\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' info = _%s_property_info_pointers[prop_id - 1];\n'
' variant = g_dbus_gvalue_to_gvariant (value, G_VARIANT_TYPE (info->parent_struct.signature));\n'
' g_dbus_proxy_call (G_DBUS_PROXY (object),\n'
' "org.freedesktop.DBus.Properties.Set",\n'
' g_variant_new ("(ssv)", "%s", info->parent_struct.name, variant),\n'
' G_DBUS_CALL_FLAGS_NONE,\n'
' -1,\n'
' NULL, (GAsyncReadyCallback) %s_proxy_set_property_cb, (gpointer) info);\n'
' g_variant_unref (variant);\n'
%(len(i.properties), i.name_lower, i.name, i.name_lower))
self.c.write('}\n'
'\n')
# signal received
self.c.write('static void\n'
'%s_proxy_g_signal (GDBusProxy *proxy,\n'
' const gchar *sender_name,\n'
' const gchar *signal_name,\n'
' GVariant *parameters)\n'
'{\n'%(i.name_lower))
self.c.write(' _ExtendedGDBusSignalInfo *info;\n'
' GVariantIter iter;\n'
' GVariant *child;\n'
' GValue *paramv;\n'
' guint num_params;\n'
' guint n;\n'
' guint signal_id;\n');
# Note: info could be NULL if we are talking to a newer version of the interface
self.c.write(' info = (_ExtendedGDBusSignalInfo *) g_dbus_interface_info_lookup_signal ((GDBusInterfaceInfo *) &_%s_interface_info, signal_name);\n'
' if (info == NULL)\n'
' return;\n'
%(i.name_lower))
self.c.write (' num_params = g_variant_n_children (parameters);\n'
' paramv = g_new0 (GValue, num_params + 1);\n'
' g_value_init (¶mv[0], %sTYPE_%s);\n'
' g_value_set_object (¶mv[0], proxy);\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_variant_iter_init (&iter, parameters);\n'
' n = 1;\n'
' while ((child = g_variant_iter_next_value (&iter)) != NULL)\n'
' {\n'
' _ExtendedGDBusArgInfo *arg_info = (_ExtendedGDBusArgInfo *) info->parent_struct.args[n - 1];\n'
' if (arg_info->use_gvariant)\n'
' {\n'
' g_value_init (¶mv[n], G_TYPE_VARIANT);\n'
' g_value_set_variant (¶mv[n], child);\n'
' n++;\n'
' }\n'
' else\n'
' g_dbus_gvariant_to_gvalue (child, ¶mv[n++]);\n'
' g_variant_unref (child);\n'
' }\n'
)
self.c.write(' signal_id = g_signal_lookup (info->signal_name, %sTYPE_%s);\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_signal_emitv (paramv, signal_id, 0, NULL);\n')
self.c.write(' for (n = 0; n < num_params + 1; n++)\n'
' g_value_unset (¶mv[n]);\n'
' g_free (paramv);\n')
self.c.write('}\n'
'\n')
# property changed
self.c.write('static void\n'
'%s_proxy_g_properties_changed (GDBusProxy *_proxy,\n'
' GVariant *changed_properties,\n'
' const gchar *const *invalidated_properties)\n'
'{\n'%(i.name_lower))
# Note: info could be NULL if we are talking to a newer version of the interface
self.c.write(' %sProxy *proxy = %s%s_PROXY (_proxy);\n'
' guint n;\n'
' const gchar *key;\n'
' GVariantIter *iter;\n'
' _ExtendedGDBusPropertyInfo *info;\n'
' g_variant_get (changed_properties, "a{sv}", &iter);\n'
' while (g_variant_iter_next (iter, "{&sv}", &key, NULL))\n'
' {\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, key);\n'
' g_datalist_remove_data (&proxy->priv->qdata, key);\n'
' if (info != NULL)\n'
' g_object_notify (G_OBJECT (proxy), info->hyphen_name);\n'
' }\n'
' g_variant_iter_free (iter);\n'
' for (n = 0; invalidated_properties[n] != NULL; n++)\n'
' {\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, invalidated_properties[n]);\n'
' g_datalist_remove_data (&proxy->priv->qdata, invalidated_properties[n]);\n'
' if (info != NULL)\n'
' g_object_notify (G_OBJECT (proxy), info->hyphen_name);\n'
' }\n'
'}\n'
'\n'
%(i.camel_name, i.ns_upper, i.name_upper,
i.name_lower, i.name_lower))
# property vfuncs
for p in i.properties:
nul_value = '0'
if p.arg.free_func != None:
nul_value = 'NULL'
self.c.write('static %s\n'
'%s_proxy_get_%s (%s *object)\n'
'{\n'
' %sProxy *proxy = %s%s_PROXY (object);\n'
' GVariant *variant;\n'
' %svalue = %s;\n'%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name,
i.camel_name, i.ns_upper, i.name_upper,
p.arg.ctype_in, nul_value))
# For some property types, we have to free the returned
# value (or part of it, e.g. the container) because of how
# GVariant works.. see https://bugzilla.gnome.org/show_bug.cgi?id=657100
# for details
#
free_container = False;
if p.arg.gvariant_get == 'g_variant_get_strv' or p.arg.gvariant_get == 'g_variant_get_objpathv' or p.arg.gvariant_get == 'g_variant_get_bytestring_array':
free_container = True;
# If already using an old value for strv, objpathv, bytestring_array (see below),
# then just return that... that way the result from multiple consecutive calls
# to the getter are valid as long as they're freed
#
if free_container:
self.c.write(' value = g_datalist_get_data (&proxy->priv->qdata, \"%s\");\n'
' if (value != NULL)\n'
' return value;\n'
%(p.name))
self.c.write(' variant = g_dbus_proxy_get_cached_property (G_DBUS_PROXY (proxy), \"%s\");\n'%(p.name))
if p.arg.gtype == 'G_TYPE_VARIANT':
self.c.write(' value = variant;\n')
self.c.write(' if (variant != NULL)\n')
self.c.write(' g_variant_unref (variant);\n')
else:
self.c.write(' if (variant != NULL)\n'
' {\n')
extra_len = ''
if p.arg.gvariant_get == 'g_variant_get_string' or p.arg.gvariant_get == 'g_variant_get_strv' or p.arg.gvariant_get == 'g_variant_get_objv' or p.arg.gvariant_get == 'g_variant_get_bytestring_array':
extra_len = ', NULL'
self.c.write(' value = %s (variant%s);\n'%(p.arg.gvariant_get, extra_len))
if free_container:
self.c.write(' g_datalist_set_data_full (&proxy->priv->qdata, \"%s\", (gpointer) value, g_free);\n'
%(p.name))
self.c.write(' g_variant_unref (variant);\n')
self.c.write(' }\n')
self.c.write(' return value;\n')
self.c.write('}\n')
self.c.write('\n')
# class boilerplate
self.c.write('static void\n'
'%s_proxy_init (%sProxy *proxy)\n'
'{\n'
' proxy->priv = G_TYPE_INSTANCE_GET_PRIVATE (proxy, %sTYPE_%s_PROXY, %sProxyPrivate);\n'
' g_dbus_proxy_set_interface_info (G_DBUS_PROXY (proxy), %s_interface_info ());\n'
'}\n'
'\n'
%(i.name_lower, i.camel_name,
i.ns_upper, i.name_upper, i.camel_name,
i.name_lower))
self.c.write('static void\n'
'%s_proxy_class_init (%sProxyClass *klass)\n'
'{\n'
' GObjectClass *gobject_class;\n'
' GDBusProxyClass *proxy_class;\n'
'\n'
' g_type_class_add_private (klass, sizeof (%sProxyPrivate));\n'
'\n'
' gobject_class = G_OBJECT_CLASS (klass);\n'
' gobject_class->finalize = %s_proxy_finalize;\n'
' gobject_class->get_property = %s_proxy_get_property;\n'
' gobject_class->set_property = %s_proxy_set_property;\n'
'\n'
' proxy_class = G_DBUS_PROXY_CLASS (klass);\n'
' proxy_class->g_signal = %s_proxy_g_signal;\n'
' proxy_class->g_properties_changed = %s_proxy_g_properties_changed;\n'
'\n'%(i.name_lower, i.camel_name,
i.camel_name,
i.name_lower, i.name_lower, i.name_lower, i.name_lower, i.name_lower))
if len(i.properties) > 0:
self.c.write('\n'
' %s_override_properties (gobject_class, 1);\n'%(i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_proxy_iface_init (%sIface *iface)\n'
'{\n'%(i.name_lower, i.camel_name))
for p in i.properties:
self.c.write(' iface->get_%s = %s_proxy_get_%s;\n'%(p.name_lower, i.name_lower, p.name_lower))
self.c.write('}\n'
'\n')
# constructors
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Asynchronously creates a proxy for the D-Bus interface #%s. See g_dbus_proxy_new() for more details.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %s_proxy_new_finish() to get the result of the operation.\n'
' *\n'
' * See %s_proxy_new_sync() for the synchronous, blocking version of this constructor.\n'
%(i.name_lower, i.name, i.name_lower, i.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('void\n'
'%s_proxy_new (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_%s_PROXY, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "g-flags", flags, "g-name", name, "g-connection", connection, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
'}\n'
'\n'
%(i.name_lower, i.ns_upper, i.name_upper, i.name))
self.c.write('/**\n'
' * %s_proxy_new_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %s_proxy_new().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %s_proxy_new().\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.camel_name))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new_sync:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Synchronously creates a proxy for the D-Bus interface #%s. See g_dbus_proxy_new_sync() for more details.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %s_proxy_new() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name, i.name_lower, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_%s_PROXY, cancellable, error, "g-flags", flags, "g-name", name, "g-connection", connection, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper, i.name, i.ns_upper, i.name_upper))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new_for_bus:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Like %s_proxy_new() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %s_proxy_new_for_bus_finish() to get the result of the operation.\n'
' *\n'
' * See %s_proxy_new_for_bus_sync() for the synchronous, blocking version of this constructor.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('void\n'
'%s_proxy_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_%s_PROXY, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "g-flags", flags, "g-name", name, "g-bus-type", bus_type, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
'}\n'
'\n'
%(i.name_lower, i.ns_upper, i.name_upper, i.name))
self.c.write('/**\n'
' * %s_proxy_new_for_bus_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %s_proxy_new_for_bus().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %s_proxy_new_for_bus().\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.camel_name))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new_for_bus_sync:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Like %s_proxy_new_sync() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %s_proxy_new_for_bus() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_%s_PROXY, cancellable, error, "g-flags", flags, "g-name", name, "g-bus-type", bus_type, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper, i.name, i.ns_upper, i.name_upper))
self.c.write('\n')
# ---------------------------------------------------------------------------------------------------
def generate_skeleton(self, i):
# class boilerplate
self.c.write('/* ------------------------------------------------------------------------ */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sSkeleton:\n'
' *\n'
' * The #%sSkeleton structure contains only private data and should only be accessed using the provided API.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sSkeletonClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sSkeleton.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write('struct _%sSkeletonPrivate\n'
'{\n'
' GValueArray *properties;\n'
' GList *changed_properties;\n'
' GSource *changed_properties_idle_source;\n'
' GMainContext *context;\n'
' GMutex *lock;\n'
'};\n'
'\n'%i.camel_name)
self.c.write('static void\n'
'_%s_skeleton_handle_method_call (\n'
' GDBusConnection *connection,\n'
' const gchar *sender,\n'
' const gchar *object_path,\n'
' const gchar *interface_name,\n'
' const gchar *method_name,\n'
' GVariant *parameters,\n'
' GDBusMethodInvocation *invocation,\n'
' gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
' _ExtendedGDBusMethodInfo *info;\n'
' GVariantIter iter;\n'
' GVariant *child;\n'
' GValue *paramv;\n'
' guint num_params;\n'
' guint num_extra;\n'
' guint n;\n'
' guint signal_id;\n'
' GValue return_value = {0};\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' info = (_ExtendedGDBusMethodInfo *) g_dbus_method_invocation_get_method_info (invocation);\n'
' g_assert (info != NULL);\n'
%())
self.c.write (' num_params = g_variant_n_children (parameters);\n'
' num_extra = info->pass_fdlist ? 3 : 2;'
' paramv = g_new0 (GValue, num_params + num_extra);\n'
' n = 0;\n'
' g_value_init (¶mv[n], %sTYPE_%s);\n'
' g_value_set_object (¶mv[n++], skeleton);\n'
' g_value_init (¶mv[n], G_TYPE_DBUS_METHOD_INVOCATION);\n'
' g_value_set_object (¶mv[n++], invocation);\n'
' if (info->pass_fdlist)\n'
' {\n'
'#ifdef G_OS_UNIX\n'
' g_value_init (¶mv[n], G_TYPE_UNIX_FD_LIST);\n'
' g_value_set_object (¶mv[n++], g_dbus_message_get_unix_fd_list (g_dbus_method_invocation_get_message (invocation)));\n'
'#else\n'
' g_assert_not_reached ();\n'
'#endif\n'
' }\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_variant_iter_init (&iter, parameters);\n'
' while ((child = g_variant_iter_next_value (&iter)) != NULL)\n'
' {\n'
' _ExtendedGDBusArgInfo *arg_info = (_ExtendedGDBusArgInfo *) info->parent_struct.in_args[n - num_extra];\n'
' if (arg_info->use_gvariant)\n'
' {\n'
' g_value_init (¶mv[n], G_TYPE_VARIANT);\n'
' g_value_set_variant (¶mv[n], child);\n'
' n++;\n'
' }\n'
' else\n'
' g_dbus_gvariant_to_gvalue (child, ¶mv[n++]);\n'
' g_variant_unref (child);\n'
' }\n'
)
self.c.write(' signal_id = g_signal_lookup (info->signal_name, %sTYPE_%s);\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_value_init (&return_value, G_TYPE_BOOLEAN);\n'
' g_signal_emitv (paramv, signal_id, 0, &return_value);\n'
' if (!g_value_get_boolean (&return_value))\n'
' g_dbus_method_invocation_return_error (invocation, G_DBUS_ERROR, G_DBUS_ERROR_UNKNOWN_METHOD, "Method %s is not implemented on interface %s", method_name, interface_name);\n'
' g_value_unset (&return_value);\n'
)
self.c.write(' for (n = 0; n < num_params + num_extra; n++)\n'
' g_value_unset (¶mv[n]);\n'
' g_free (paramv);\n')
self.c.write('}\n'
'\n')
self.c.write('static GVariant *\n'
'_%s_skeleton_handle_get_property (\n'
' GDBusConnection *connection,\n'
' const gchar *sender,\n'
' const gchar *object_path,\n'
' const gchar *interface_name,\n'
' const gchar *property_name,\n'
' GError **error,\n'
' gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
' GValue value = {0};\n'
' GParamSpec *pspec;\n'
' _ExtendedGDBusPropertyInfo *info;\n'
' GVariant *ret;\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' ret = NULL;\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, property_name);\n'
' g_assert (info != NULL);\n'
' pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (skeleton), info->hyphen_name);\n'
' if (pspec == NULL)\n'
' {\n'
' g_set_error (error, G_DBUS_ERROR, G_DBUS_ERROR_INVALID_ARGS, "No property with name %%s", property_name);\n'
' }\n'
' else\n'
' {\n'
' g_value_init (&value, pspec->value_type);\n'
' g_object_get_property (G_OBJECT (skeleton), info->hyphen_name, &value);\n'
' ret = g_dbus_gvalue_to_gvariant (&value, G_VARIANT_TYPE (info->parent_struct.signature));\n'
' g_value_unset (&value);\n'
' }\n'
' return ret;\n'
'}\n'
'\n'
%(i.name_lower))
self.c.write('static gboolean\n'
'_%s_skeleton_handle_set_property (\n'
' GDBusConnection *connection,\n'
' const gchar *sender,\n'
' const gchar *object_path,\n'
' const gchar *interface_name,\n'
' const gchar *property_name,\n'
' GVariant *variant,\n'
' GError **error,\n'
' gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
' GValue value = {0};\n'
' GParamSpec *pspec;\n'
' _ExtendedGDBusPropertyInfo *info;\n'
' gboolean ret;\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' ret = FALSE;\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, property_name);\n'
' g_assert (info != NULL);\n'
' pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (skeleton), info->hyphen_name);\n'
' if (pspec == NULL)\n'
' {\n'
' g_set_error (error, G_DBUS_ERROR, G_DBUS_ERROR_INVALID_ARGS, "No property with name %%s", property_name);\n'
' }\n'
' else\n'
' {\n'
' if (info->use_gvariant)\n'
' g_value_set_variant (&value, variant);\n'
' else\n'
' g_dbus_gvariant_to_gvalue (variant, &value);\n'
' g_object_set_property (G_OBJECT (skeleton), info->hyphen_name, &value);\n'
' g_value_unset (&value);\n'
' ret = TRUE;\n'
' }\n'
' return ret;\n'
'}\n'
'\n'
%(i.name_lower))
self.c.write('static const GDBusInterfaceVTable _%s_skeleton_vtable =\n'
'{\n'
' _%s_skeleton_handle_method_call,\n'
' _%s_skeleton_handle_get_property,\n'
' _%s_skeleton_handle_set_property\n'
'};\n'
'\n'%(i.name_lower, i.name_lower, i.name_lower, i.name_lower))
self.c.write('static GDBusInterfaceInfo *\n'
'%s_skeleton_dbus_interface_get_info (GDBusInterfaceSkeleton *skeleton)\n'
'{\n'
' return %s_interface_info ();\n'
%(i.name_lower, i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static GDBusInterfaceVTable *\n'
'%s_skeleton_dbus_interface_get_vtable (GDBusInterfaceSkeleton *skeleton)\n'
'{\n'
' return (GDBusInterfaceVTable *) &_%s_skeleton_vtable;\n'
%(i.name_lower, i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static GVariant *\n'
'%s_skeleton_dbus_interface_get_properties (GDBusInterfaceSkeleton *_skeleton)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (_skeleton);\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write('\n'
' GVariantBuilder builder;\n'
' guint n;\n'
' g_variant_builder_init (&builder, G_VARIANT_TYPE ("a{sv}"));\n'
' if (_%s_interface_info.parent_struct.properties == NULL)\n'
' goto out;\n'
' for (n = 0; _%s_interface_info.parent_struct.properties[n] != NULL; n++)\n'
' {\n'
' GDBusPropertyInfo *info = _%s_interface_info.parent_struct.properties[n];\n'
' if (info->flags & G_DBUS_PROPERTY_INFO_FLAGS_READABLE)\n'
' {\n'
' GVariant *value;\n'
' value = _%s_skeleton_handle_get_property (g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton)), NULL, g_dbus_interface_skeleton_get_object_path (G_DBUS_INTERFACE_SKELETON (skeleton)), "%s", info->name, NULL, skeleton);\n'
' if (value != NULL)\n'
' {\n'
' g_variant_take_ref (value);\n'
' g_variant_builder_add (&builder, "{sv}", info->name, value);\n'
' g_variant_unref (value);\n'
' }\n'
' }\n'
' }\n'
'out:\n'
' return g_variant_builder_end (&builder);\n'
'}\n'
'\n'
%(i.name_lower, i.name_lower, i.name_lower, i.name_lower, i.name))
if len(i.properties) > 0:
self.c.write('static gboolean _%s_emit_changed (gpointer user_data);\n'
'\n'
%(i.name_lower))
self.c.write('static void\n'
'%s_skeleton_dbus_interface_flush (GDBusInterfaceSkeleton *_skeleton)\n'
'{\n'
%(i.name_lower))
if len(i.properties) > 0:
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (_skeleton);\n'
' gboolean emit_changed = FALSE;\n'
'\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' if (skeleton->priv->changed_properties_idle_source != NULL)\n'
' {\n'
' g_source_destroy (skeleton->priv->changed_properties_idle_source);\n'
' skeleton->priv->changed_properties_idle_source = NULL;\n'
' emit_changed = TRUE;\n'
' }\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
'\n'
' if (emit_changed)\n'
' _%s_emit_changed (skeleton);\n'
%(i.camel_name, i.ns_upper, i.name_upper, i.name_lower))
self.c.write('}\n'
'\n')
for s in i.signals:
self.c.write('static void\n'
'_%s_on_signal_%s (\n'
' %s *object'%(i.name_lower, s.name_lower, i.camel_name))
for a in s.args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
self.c.write(')\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' GDBusConnection *connection = g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton));\n'
%(i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' if (connection == NULL)\n'
' return;\n'
' g_dbus_connection_emit_signal (connection,\n'
' NULL, g_dbus_interface_skeleton_get_object_path (G_DBUS_INTERFACE_SKELETON (skeleton)), "%s", "%s",\n'
' g_variant_new ("('
%(i.name, s.name))
for a in s.args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in s.args:
self.c.write(',\n arg_%s'%(a.name))
self.c.write('), NULL);\n')
self.c.write('}\n'
'\n')
self.c.write('static void %s_skeleton_iface_init (%sIface *iface);\n'
%(i.name_lower, i.camel_name))
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sSkeleton, %s_skeleton, G_TYPE_DBUS_INTERFACE_SKELETON,\n'%(i.camel_name, i.name_lower))
self.c.write(' G_IMPLEMENT_INTERFACE (%sTYPE_%s, %s_skeleton_iface_init));\n\n'%(i.ns_upper, i.name_upper, i.name_lower))
# finalize
self.c.write('static void\n'
'%s_skeleton_finalize (GObject *object)\n'
'{\n'%(i.name_lower))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'%(i.camel_name, i.ns_upper, i.name_upper))
if len(i.properties) > 0:
self.c.write(' g_value_array_free (skeleton->priv->properties);\n')
self.c.write(' g_list_foreach (skeleton->priv->changed_properties, (GFunc) _changed_property_free, NULL);\n')
self.c.write(' g_list_free (skeleton->priv->changed_properties);\n')
self.c.write(' if (skeleton->priv->changed_properties_idle_source != NULL)\n')
self.c.write(' g_source_destroy (skeleton->priv->changed_properties_idle_source);\n')
self.c.write(' if (skeleton->priv->context != NULL)\n')
self.c.write(' g_main_context_unref (skeleton->priv->context);\n')
self.c.write(' g_mutex_free (skeleton->priv->lock);\n')
self.c.write(' G_OBJECT_CLASS (%s_skeleton_parent_class)->finalize (object);\n'
'}\n'
'\n'%(i.name_lower))
# property accessors (TODO: generate PropertiesChanged signals in setter)
if len(i.properties) > 0:
self.c.write('static void\n'
'%s_skeleton_get_property (GObject *object,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' g_value_copy (&skeleton->priv->properties->values[prop_id - 1], value);\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
%(i.camel_name, i.ns_upper, i.name_upper, len(i.properties)))
self.c.write('}\n'
'\n')
# if property is already scheduled then re-use entry.. though it could be
# that the user did
#
# foo_set_prop_bar (object, "");
# foo_set_prop_bar (object, "blah");
#
# say, every update... In this case, where nothing happens, we obviously
# don't want a PropertiesChanged() event. We can easily check for this
# by comparing against the _original value_ recorded before the first
# change event. If the latest value is not different from the original
# one, we can simply ignore the ChangedProperty
#
self.c.write('static gboolean\n'
'_%s_emit_changed (gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' GList *l;\n'
' GVariantBuilder builder;\n'
' GVariantBuilder invalidated_builder;\n'
' guint num_changes;\n'
'\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' g_variant_builder_init (&builder, G_VARIANT_TYPE ("a{sv}"));\n'
' g_variant_builder_init (&invalidated_builder, G_VARIANT_TYPE ("as"));\n'
' for (l = skeleton->priv->changed_properties, num_changes = 0; l != NULL; l = l->next)\n'
' {\n'
' ChangedProperty *cp = l->data;\n'
' GVariant *variant;\n'
' const GValue *cur_value;\n'
'\n'
' cur_value = &skeleton->priv->properties->values[cp->prop_id - 1];\n'
' if (!_g_value_equal (cur_value, &cp->orig_value))\n'
' {\n'
' variant = g_dbus_gvalue_to_gvariant (cur_value, G_VARIANT_TYPE (cp->info->parent_struct.signature));\n'
' g_variant_builder_add (&builder, "{sv}", cp->info->parent_struct.name, variant);\n'
' g_variant_unref (variant);\n'
' num_changes++;\n'
' }\n'
' }\n'
' if (num_changes > 0)\n'
' {\n'
' g_dbus_connection_emit_signal (g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton)),\n'
' NULL, g_dbus_interface_skeleton_get_object_path (G_DBUS_INTERFACE_SKELETON (skeleton)),\n'
' "org.freedesktop.DBus.Properties",\n'
' "PropertiesChanged",\n'
' g_variant_new ("(sa{sv}as)",\n'
' "%s",\n'
' &builder, &invalidated_builder),\n'
' NULL);\n'
' }\n'
' else\n'
' {\n'
' g_variant_builder_clear (&builder);\n'
' g_variant_builder_clear (&invalidated_builder);\n'
' }\n'
%(i.name))
self.c.write(' g_list_foreach (skeleton->priv->changed_properties, (GFunc) _changed_property_free, NULL);\n')
self.c.write(' g_list_free (skeleton->priv->changed_properties);\n')
self.c.write(' skeleton->priv->changed_properties = NULL;\n')
self.c.write(' skeleton->priv->changed_properties_idle_source = NULL;\n')
self.c.write(' g_mutex_unlock (skeleton->priv->lock);\n')
self.c.write(' return FALSE;\n'
'}\n'
'\n')
# holding lock while being called
self.c.write('static void\n'
'_%s_schedule_emit_changed (%sSkeleton *skeleton, const _ExtendedGDBusPropertyInfo *info, guint prop_id, const GValue *orig_value)\n'
'{\n'
' ChangedProperty *cp;\n'
' GList *l;\n'
' cp = NULL;\n'
' for (l = skeleton->priv->changed_properties; l != NULL; l = l->next)\n'
' {\n'
' ChangedProperty *i_cp = l->data;\n'
' if (i_cp->info == info)\n'
' {\n'
' cp = i_cp;\n'
' break;\n'
' }\n'
' }\n'
%(i.name_lower, i.camel_name))
self.c.write(' if (cp == NULL)\n'
' {\n'
' cp = g_new0 (ChangedProperty, 1);\n'
' cp->prop_id = prop_id;\n'
' cp->info = info;\n'
' skeleton->priv->changed_properties = g_list_prepend (skeleton->priv->changed_properties, cp);\n'
' g_value_init (&cp->orig_value, G_VALUE_TYPE (orig_value));\n'
' g_value_copy (orig_value, &cp->orig_value);\n'
' }\n'
'}\n'
'\n'
%())
# Postpone setting up the refresh source until the ::notify signal is emitted as
# this allows use of g_object_freeze_notify()/g_object_thaw_notify() ...
# This is useful when updating several properties from another thread than
# where the idle will be emitted from
self.c.write('static void\n'
'%s_skeleton_notify (GObject *object,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' if (skeleton->priv->changed_properties != NULL &&\n'
' skeleton->priv->changed_properties_idle_source == NULL)\n'
' {\n'
' skeleton->priv->changed_properties_idle_source = g_idle_source_new ();\n'
' g_source_set_priority (skeleton->priv->changed_properties_idle_source, G_PRIORITY_DEFAULT);\n'
' g_source_set_callback (skeleton->priv->changed_properties_idle_source, _%s_emit_changed, g_object_ref (skeleton), (GDestroyNotify) g_object_unref);\n'
' g_source_attach (skeleton->priv->changed_properties_idle_source, skeleton->priv->context);\n'
' g_source_unref (skeleton->priv->changed_properties_idle_source);\n'
' }\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
'}\n'
'\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper, i.name_lower))
self.c.write('static void\n'
'%s_skeleton_set_property (GObject *object,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' g_object_freeze_notify (object);\n'
' if (!_g_value_equal (value, &skeleton->priv->properties->values[prop_id - 1]))\n'
' {\n'
' if (g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton)) != NULL)\n'
' _%s_schedule_emit_changed (skeleton, _%s_property_info_pointers[prop_id - 1], prop_id, &skeleton->priv->properties->values[prop_id - 1]);\n'
' g_value_copy (value, &skeleton->priv->properties->values[prop_id - 1]);\n'
' g_object_notify_by_pspec (object, pspec);\n'
' }\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
' g_object_thaw_notify (object);\n'
%(i.camel_name, i.ns_upper, i.name_upper, len(i.properties), i.name_lower, i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_skeleton_init (%sSkeleton *skeleton)\n'
'{\n'
' skeleton->priv = G_TYPE_INSTANCE_GET_PRIVATE (skeleton, %sTYPE_%s_SKELETON, %sSkeletonPrivate);\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper, i.camel_name))
self.c.write(' skeleton->priv->lock = g_mutex_new ();\n')
self.c.write(' skeleton->priv->context = g_main_context_get_thread_default ();\n')
self.c.write(' if (skeleton->priv->context != NULL)\n')
self.c.write(' g_main_context_ref (skeleton->priv->context);\n')
if len(i.properties) > 0:
self.c.write(' skeleton->priv->properties = g_value_array_new (%d);\n'%(len(i.properties)))
n = 0
for p in i.properties:
self.c.write(' g_value_array_append (skeleton->priv->properties, NULL);\n')
self.c.write(' g_value_init (&skeleton->priv->properties->values[%d], %s);\n'%(n, p.arg.gtype))
n += 1
self.c.write('}\n'
'\n')
# property vfuncs
n = 0
for p in i.properties:
self.c.write('static %s\n'
'%s_skeleton_get_%s (%s *object)\n'
'{\n'
%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'%(i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' %svalue;\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' value = %s (&(skeleton->priv->properties->values[%d]));\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
%(p.arg.ctype_in_g, p.arg.gvalue_get, n))
self.c.write(' return value;\n')
self.c.write('}\n')
self.c.write('\n')
n += 1
self.c.write('static void\n'
'%s_skeleton_class_init (%sSkeletonClass *klass)\n'
'{\n'
' GObjectClass *gobject_class;\n'
' GDBusInterfaceSkeletonClass *skeleton_class;\n'
'\n'
' g_type_class_add_private (klass, sizeof (%sSkeletonPrivate));\n'
'\n'
' gobject_class = G_OBJECT_CLASS (klass);\n'
' gobject_class->finalize = %s_skeleton_finalize;\n'
%(i.name_lower, i.camel_name, i.camel_name, i.name_lower))
if len(i.properties) > 0:
self.c.write(' gobject_class->get_property = %s_skeleton_get_property;\n'
' gobject_class->set_property = %s_skeleton_set_property;\n'
' gobject_class->notify = %s_skeleton_notify;\n'
'\n'%(i.name_lower, i.name_lower, i.name_lower))
self.c.write('\n'
' %s_override_properties (gobject_class, 1);\n'%(i.name_lower))
self.c.write('\n'
' skeleton_class = G_DBUS_INTERFACE_SKELETON_CLASS (klass);\n');
self.c.write(' skeleton_class->get_info = %s_skeleton_dbus_interface_get_info;\n'%(i.name_lower))
self.c.write(' skeleton_class->get_properties = %s_skeleton_dbus_interface_get_properties;\n'%(i.name_lower))
self.c.write(' skeleton_class->flush = %s_skeleton_dbus_interface_flush;\n'%(i.name_lower))
self.c.write(' skeleton_class->get_vtable = %s_skeleton_dbus_interface_get_vtable;\n'%(i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_skeleton_iface_init (%sIface *iface)\n'
'{\n'
%(i.name_lower, i.camel_name))
for s in i.signals:
self.c.write(' iface->%s = _%s_on_signal_%s;\n'
%(s.name_lower, i.name_lower, s.name_lower))
for p in i.properties:
self.c.write(' iface->get_%s = %s_skeleton_get_%s;\n'%(p.name_lower, i.name_lower, p.name_lower))
self.c.write('}\n'
'\n')
# constructors
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_skeleton_new:\n'
' *\n'
' * Creates a skeleton object for the D-Bus interface #%s.\n'
' *\n'
' * Returns: (transfer full) (type %sSkeleton): The skeleton object.\n'
%(i.name_lower, i.name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_skeleton_new (void)\n'
'{\n'
' return %s%s (g_object_new (%sTYPE_%s_SKELETON, NULL));\n'
'}\n'
'\n'%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
# ---------------------------------------------------------------------------------------------------
def generate_object(self):
self.c.write('/* ------------------------------------------------------------------------\n'
' * Code for Object, ObjectProxy and ObjectSkeleton\n'
' * ------------------------------------------------------------------------\n'
' */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * SECTION:%sObject\n'
' * @title: %sObject\n'
' * @short_description: Specialized GDBusObject types\n'
' *\n'
' * This section contains the #%sObject, #%sObjectProxy, and #%sObjectSkeleton types which make it easier to work with objects implementing generated types for D-Bus interfaces.\n'
' */\n'
%(self.namespace, self.namespace, self.namespace, self.namespace, self.namespace), False))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObject:\n'
' *\n'
' * The #%sObject type is a specialized container of interfaces.\n'
' */\n'
%(self.namespace, self.namespace), False))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectIface:\n'
' * @parent_iface: The parent interface.\n'
' *\n'
' * Virtual table for the #%sObject interface.\n'
' */\n'
%(self.namespace, self.namespace), False))
self.c.write('\n')
self.c.write('static void\n'
'%sobject_default_init (%sObjectIface *iface)\n'
'{\n'
%(self.ns_lower, self.namespace));
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %sObject:%s:\n'
' *\n'
' * The #%s instance corresponding to the D-Bus interface #%s, if any.\n'
' *\n'
' * Connect to the #GObject::notify signal to get informed of property changes.\n'
%(self.namespace, i.name_hyphen, i.camel_name, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 2)
self.c.write(' g_object_interface_install_property (iface, g_param_spec_object ("%s", "%s", "%s", %sTYPE_%s, G_PARAM_READWRITE|G_PARAM_STATIC_STRINGS));\n'
'\n'
%(i.name_hyphen, i.name_hyphen, i.name_hyphen, self.ns_upper, i.name_upper))
self.c.write('}\n'
'\n')
self.c.write('typedef %sObjectIface %sObjectInterface;\n'%(self.namespace, self.namespace))
self.c.write('G_DEFINE_INTERFACE_WITH_CODE (%sObject, %sobject, G_TYPE_OBJECT, g_type_interface_add_prerequisite (g_define_type_id, G_TYPE_DBUS_OBJECT));\n'%(self.namespace, self.ns_lower))
self.c.write('\n')
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_get_%s:\n'
' * @object: A #%sObject.\n'
' *\n'
' * Gets the #%s instance for the D-Bus interface #%s on @object, if any.\n'
' *\n'
' * Returns: (transfer full): A #%s that must be freed with g_object_unref() or %%NULL if @object does not implement the interface.\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name, i.name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write ('%s *%sobject_get_%s (%sObject *object)\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
self.c.write('{\n'
' GDBusInterface *ret;\n'
' ret = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' if (ret == NULL)\n'
' return NULL;\n'
' return %s%s (ret);\n'
'}\n'
'\n'
%(i.name, self.ns_upper, i.name_upper))
self.c.write('\n')
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_peek_%s: (skip)\n'
' * @object: A #%sObject.\n'
' *\n'
' * Like %sobject_get_%s() but doesn\' increase the reference count on the returned object.\n'
' *\n'
' * <warning>It is not safe to use the returned object if you are on another thread than the one where the #GDBusObjectManagerClient or #GDBusObjectManagerServer for @object is running.</warning>\n'
' *\n'
' * Returns: (transfer none): A #%s or %%NULL if @object does not implement the interface. Do not free the returned object, it is owned by @object.\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, self.ns_lower, i.name_upper.lower(), i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write ('%s *%sobject_peek_%s (%sObject *object)\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
self.c.write('{\n'
' GDBusInterface *ret;\n'
' ret = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' if (ret == NULL)\n'
' return NULL;\n'
' g_object_unref (ret);\n'
' return %s%s (ret);\n'
'}\n'
'\n'
%(i.name, self.ns_upper, i.name_upper))
self.c.write('\n')
# shared by ObjectProxy and ObjectSkeleton classes
self.c.write('static void\n'
'%sobject_notify (GDBusObject *object, GDBusInterface *interface)\n'
'{\n'
' g_object_notify (G_OBJECT (object), ((_ExtendedGDBusInterfaceInfo *) g_dbus_interface_get_info (interface))->hyphen_name);\n'
'}\n'
'\n'
%(self.ns_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectProxy:\n'
' *\n'
' * The #%sObjectProxy structure contains only private data and should only be accessed using the provided API.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectProxyClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sObjectProxy.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
# class boilerplate
self.c.write('static void\n'
'%sobject_proxy__%sobject_iface_init (%sObjectIface *iface)\n'
'{\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_proxy__g_dbus_object_iface_init (GDBusObjectIface *iface)\n'
'{\n'
' iface->interface_added = %sobject_notify;\n'
' iface->interface_removed = %sobject_notify;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.ns_lower))
self.c.write('\n')
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sObjectProxy, %sobject_proxy, G_TYPE_DBUS_OBJECT_PROXY,\n'
' G_IMPLEMENT_INTERFACE (%sTYPE_OBJECT, %sobject_proxy__%sobject_iface_init)\n'
' G_IMPLEMENT_INTERFACE (G_TYPE_DBUS_OBJECT, %sobject_proxy__g_dbus_object_iface_init));\n'
'\n'
%(self.namespace, self.ns_lower, self.ns_upper, self.ns_lower, self.ns_lower, self.ns_lower))
# class boilerplate
self.c.write('static void\n'
'%sobject_proxy_init (%sObjectProxy *object)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_proxy_set_property (GObject *gobject,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
%(self.ns_lower))
self.c.write('}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_proxy_get_property (GObject *gobject,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sObjectProxy *object = %sOBJECT_PROXY (gobject);\n'
' GDBusInterface *interface;\n'
'\n'
' switch (prop_id)\n'
' {\n'
%(self.ns_lower, self.namespace, self.ns_upper))
n = 1
for i in self.ifaces:
self.c.write(' case %d:\n'
' interface = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' g_value_take_object (value, interface);\n'
' break;\n'
'\n'
%(n, i.name))
n += 1
self.c.write(' default:\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
' break;\n'
' }\n'
'}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_proxy_class_init (%sObjectProxyClass *klass)\n'
'{\n'
' GObjectClass *gobject_class = G_OBJECT_CLASS (klass);\n'
'\n'
' gobject_class->set_property = %sobject_proxy_set_property;\n'
' gobject_class->get_property = %sobject_proxy_get_property;\n'
'\n'
%(self.ns_lower, self.namespace, self.ns_lower, self.ns_lower))
n = 1
for i in self.ifaces:
self.c.write(' g_object_class_override_property (gobject_class, %d, "%s");'
'\n'
%(n, i.name_hyphen))
n += 1
self.c.write('}\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_proxy_new:\n'
' * @connection: A #GDBusConnection.\n'
' * @object_path: An object path.\n'
' *\n'
' * Creates a new proxy object.\n'
' *\n'
' * Returns: (transfer full): The proxy object.\n'
' */\n'
%(self.ns_lower), False))
self.c.write('%sObjectProxy *\n'
'%sobject_proxy_new (GDBusConnection *connection,\n'
' const gchar *object_path)\n'
'{\n'
' g_return_val_if_fail (G_IS_DBUS_CONNECTION (connection), NULL);\n'
' g_return_val_if_fail (g_variant_is_object_path (object_path), NULL);\n'
' return %sOBJECT_PROXY (g_object_new (%sTYPE_OBJECT_PROXY, "g-connection", connection, "g-object-path", object_path, NULL));\n'
'}\n'
'\n'%(self.namespace, self.ns_lower, self.ns_upper, self.ns_upper))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectSkeleton:\n'
' *\n'
' * The #%sObjectSkeleton structure contains only private data and should only be accessed using the provided API.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectSkeletonClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sObjectSkeleton.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
# class boilerplate
self.c.write('static void\n'
'%sobject_skeleton__%sobject_iface_init (%sObjectIface *iface)\n'
'{\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.namespace))
self.c.write('\n')
self.c.write('static void\n'
'%sobject_skeleton__g_dbus_object_iface_init (GDBusObjectIface *iface)\n'
'{\n'
' iface->interface_added = %sobject_notify;\n'
' iface->interface_removed = %sobject_notify;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.ns_lower))
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sObjectSkeleton, %sobject_skeleton, G_TYPE_DBUS_OBJECT_SKELETON,\n'
' G_IMPLEMENT_INTERFACE (%sTYPE_OBJECT, %sobject_skeleton__%sobject_iface_init)\n'
' G_IMPLEMENT_INTERFACE (G_TYPE_DBUS_OBJECT, %sobject_skeleton__g_dbus_object_iface_init));\n'
'\n'
%(self.namespace, self.ns_lower, self.ns_upper, self.ns_lower, self.ns_lower, self.ns_lower))
# class boilerplate
self.c.write('static void\n'
'%sobject_skeleton_init (%sObjectSkeleton *object)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_skeleton_set_property (GObject *gobject,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sObjectSkeleton *object = %sOBJECT_SKELETON (gobject);\n'
' GDBusInterfaceSkeleton *interface;\n'
'\n'
' switch (prop_id)\n'
' {\n'
%(self.ns_lower, self.namespace, self.ns_upper))
n = 1
for i in self.ifaces:
self.c.write(' case %d:\n'
' interface = g_value_get_object (value);\n'
' if (interface != NULL)\n'
' {\n'
' g_warn_if_fail (%sIS_%s (interface));\n'
' g_dbus_object_skeleton_add_interface (G_DBUS_OBJECT_SKELETON (object), interface);\n'
' }\n'
' else\n'
' {\n'
' g_dbus_object_skeleton_remove_interface_by_name (G_DBUS_OBJECT_SKELETON (object), "%s");\n'
' }\n'
' break;\n'
'\n'
%(n, self.ns_upper, i.name_upper, i.name))
n += 1
self.c.write(' default:\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
' break;\n'
' }\n'
'}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_skeleton_get_property (GObject *gobject,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sObjectSkeleton *object = %sOBJECT_SKELETON (gobject);\n'
' GDBusInterface *interface;\n'
'\n'
' switch (prop_id)\n'
' {\n'
%(self.ns_lower, self.namespace, self.ns_upper))
n = 1
for i in self.ifaces:
self.c.write(' case %d:\n'
' interface = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' g_value_take_object (value, interface);\n'
' break;\n'
'\n'
%(n, i.name))
n += 1
self.c.write(' default:\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
' break;\n'
' }\n'
'}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_skeleton_class_init (%sObjectSkeletonClass *klass)\n'
'{\n'
' GObjectClass *gobject_class = G_OBJECT_CLASS (klass);\n'
'\n'
' gobject_class->set_property = %sobject_skeleton_set_property;\n'
' gobject_class->get_property = %sobject_skeleton_get_property;\n'
'\n'
%(self.ns_lower, self.namespace, self.ns_lower, self.ns_lower))
n = 1
for i in self.ifaces:
self.c.write(' g_object_class_override_property (gobject_class, %d, "%s");'
'\n'
%(n, i.name_hyphen))
n += 1
self.c.write('}\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_skeleton_new:\n'
' * @object_path: An object path.\n'
' *\n'
' * Creates a new skeleton object.\n'
' *\n'
' * Returns: (transfer full): The skeleton object.\n'
' */\n'
%(self.ns_lower), False))
self.c.write('%sObjectSkeleton *\n'
'%sobject_skeleton_new (const gchar *object_path)\n'
'{\n'
' g_return_val_if_fail (g_variant_is_object_path (object_path), NULL);\n'
' return %sOBJECT_SKELETON (g_object_new (%sTYPE_OBJECT_SKELETON, "g-object-path", object_path, NULL));\n'
'}\n'
'\n'%(self.namespace, self.ns_lower, self.ns_upper, self.ns_upper))
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_skeleton_set_%s:\n'
' * @object: A #%sObjectSkeleton.\n'
' * @interface_: (allow-none): A #%s or %%NULL to clear the interface.\n'
' *\n'
' * Sets the #%s instance for the D-Bus interface #%s on @object.\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name, i.camel_name, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write ('void %sobject_skeleton_set_%s (%sObjectSkeleton *object, %s *interface_)\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name))
self.c.write('{\n'
' g_object_set (G_OBJECT (object), "%s", interface_, NULL);\n'
'}\n'
'\n'
%(i.name_hyphen))
self.c.write('\n')
def generate_object_manager_client(self):
self.c.write('/* ------------------------------------------------------------------------\n'
' * Code for ObjectManager client\n'
' * ------------------------------------------------------------------------\n'
' */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * SECTION:%sObjectManagerClient\n'
' * @title: %sObjectManagerClient\n'
' * @short_description: Generated GDBusObjectManagerClient type\n'
' *\n'
' * This section contains a #GDBusObjectManagerClient that uses %sobject_manager_client_get_proxy_type() as the #GDBusProxyTypeFunc.\n'
' */\n'
%(self.namespace, self.namespace, self.ns_lower), False))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectManagerClient:\n'
' *\n'
' * The #%sObjectManagerClient structure contains only private data and should only be accessed using the provided API.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectManagerClientClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sObjectManagerClient.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
# class boilerplate
self.c.write('G_DEFINE_TYPE (%sObjectManagerClient, %sobject_manager_client, G_TYPE_DBUS_OBJECT_MANAGER_CLIENT);\n'
'\n'
%(self.namespace, self.ns_lower))
# class boilerplate
self.c.write('static void\n'
'%sobject_manager_client_init (%sObjectManagerClient *manager)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_manager_client_class_init (%sObjectManagerClientClass *klass)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_get_proxy_type:\n'
' * @manager: A #GDBusObjectManagerClient.\n'
' * @object_path: The object path of the remote object (unused).\n'
' * @interface_name: (allow-none): Interface name of the remote object or %%NULL to get the object proxy #GType.\n'
' * @user_data: User data (unused).\n'
' *\n'
' * A #GDBusProxyTypeFunc that maps @interface_name to the generated #GDBusObjectProxy<!-- -->- and #GDBusProxy<!-- -->-derived types.\n'
' *\n'
' * Returns: A #GDBusProxy<!-- -->-derived #GType if @interface_name is not %%NULL, otherwise the #GType for #%sObjectProxy.\n'
%(self.ns_lower, self.namespace), False))
self.c.write(' */\n')
self.c.write('GType\n'
'%sobject_manager_client_get_proxy_type (GDBusObjectManagerClient *manager, const gchar *object_path, const gchar *interface_name, gpointer user_data)\n'
'{\n'
%(self.ns_lower))
self.c.write(' static gsize once_init_value = 0;\n'
' static GHashTable *lookup_hash;\n'
' GType ret;\n'
'\n'
' if (interface_name == NULL)\n'
' return %sTYPE_OBJECT_PROXY;\n'
' if (g_once_init_enter (&once_init_value))\n'
' {\n'
' lookup_hash = g_hash_table_new (g_str_hash, g_str_equal);\n'
%(self.ns_upper))
for i in self.ifaces:
self.c.write(' g_hash_table_insert (lookup_hash, "%s", GSIZE_TO_POINTER (%sTYPE_%s_PROXY));\n'
%(i.name, i.ns_upper, i.name_upper))
self.c.write(' g_once_init_leave (&once_init_value, 1);\n'
' }\n')
self.c.write(' ret = (GType) GPOINTER_TO_SIZE (g_hash_table_lookup (lookup_hash, interface_name));\n'
' if (ret == (GType) 0)\n'
' ret = G_TYPE_DBUS_PROXY;\n')
self.c.write(' return ret;\n'
'}\n'
'\n')
# constructors
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Asynchronously creates #GDBusObjectManagerClient using %sobject_manager_client_get_proxy_type() as the #GDBusProxyTypeFunc. See g_dbus_object_manager_client_new() for more details.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %sobject_manager_client_new_finish() to get the result of the operation.\n'
' *\n'
' * See %sobject_manager_client_new_sync() for the synchronous, blocking version of this constructor.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.ns_lower), False))
self.c.write(' */\n')
self.c.write('void\n'
'%sobject_manager_client_new (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_OBJECT_MANAGER_CLIENT, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "flags", flags, "name", name, "connection", connection, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('/**\n'
' * %sobject_manager_client_new_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %sobject_manager_client_new().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %sobject_manager_client_new().\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new_sync:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Synchronously creates #GDBusObjectManagerClient using %sobject_manager_client_get_proxy_type() as the #GDBusProxyTypeFunc. See g_dbus_object_manager_client_new_sync() for more details.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %sobject_manager_client_new() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace), False))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_OBJECT_MANAGER_CLIENT, cancellable, error, "flags", flags, "name", name, "connection", connection, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new_for_bus:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Like %sobject_manager_client_new() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %sobject_manager_client_new_for_bus_finish() to get the result of the operation.\n'
' *\n'
' * See %sobject_manager_client_new_for_bus_sync() for the synchronous, blocking version of this constructor.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.ns_lower), False))
self.c.write(' */\n')
self.c.write('void\n'
'%sobject_manager_client_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_OBJECT_MANAGER_CLIENT, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "flags", flags, "name", name, "bus-type", bus_type, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('/**\n'
' * %sobject_manager_client_new_for_bus_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %sobject_manager_client_new_for_bus().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %sobject_manager_client_new_for_bus().\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new_for_bus_sync:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Like %sobject_manager_client_new_sync() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %sobject_manager_client_new_for_bus() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace), False))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_OBJECT_MANAGER_CLIENT, cancellable, error, "flags", flags, "name", name, "bus-type", bus_type, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('\n')
# ---------------------------------------------------------------------------------------------------
def write_gtkdoc_deprecated_and_since_and_close(self, obj, f, indent):
if len(obj.since) > 0:
f.write('%*s *\n'
'%*s * Since: %s\n'
%(indent, '', indent, '', obj.since))
if obj.deprecated:
if isinstance(obj, dbustypes.Interface):
thing = 'The D-Bus interface'
elif isinstance(obj, dbustypes.Method):
thing = 'The D-Bus method'
elif isinstance(obj, dbustypes.Signal):
thing = 'The D-Bus signal'
elif isinstance(obj, dbustypes.Property):
thing = 'The D-Bus property'
else:
raise RuntimeError('Cannot handle object ', obj)
f.write(self.docbook_gen.expand(
'%*s *\n'
'%*s * Deprecated: %s has been deprecated.\n'
%(indent, '', indent, '', thing), False))
f.write('%*s */\n'%(indent, ''))
# ---------------------------------------------------------------------------------------------------
def generate_interface_intro(self, i):
self.c.write('/* ------------------------------------------------------------------------\n'
' * Code for interface %s\n'
' * ------------------------------------------------------------------------\n'
' */\n'
'\n'%(i.name))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * SECTION:%s\n'
' * @title: %s\n'
' * @short_description: Generated C code for the %s D-Bus interface\n'
' *\n'
' * This section contains code for working with the #%s D-Bus interface in C.\n'
' */\n'
%(i.camel_name, i.camel_name, i.name, i.name), False))
self.c.write('\n')
def generate(self):
self.generate_intro()
self.declare_types()
for i in self.ifaces:
self.generate_interface_intro(i)
self.generate_introspection_for_interface(i)
self.generate_interface(i)
self.generate_property_accessors(i)
self.generate_signal_emitters(i)
self.generate_method_calls(i)
self.generate_method_completers(i)
self.generate_proxy(i)
self.generate_skeleton(i)
if self.generate_objmanager:
self.generate_object()
self.generate_object_manager_client()
self.generate_outro()
| [
[
1,
0,
0.0072,
0.0003,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0077,
0.0003,
0,
0.66,
0.25,
0,
0,
1,
0,
0,
0,
0,
0
],
[
1,
0,
0.008,
0.0003,
0,
0.66,
... | [
"import sys",
"from . import config",
"from . import utils",
"from . import dbustypes",
"class CodeGenerator:\n def __init__(self, ifaces, namespace, interface_prefix, generate_objmanager, docbook_gen, h, c):\n self.docbook_gen = docbook_gen\n self.generate_objmanager = generate_objmanager\... |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
import distutils.version
def strip_dots(s):
ret = ''
force_upper = False
for c in s:
if c == '.':
force_upper = True
else:
if force_upper:
ret += c.upper()
force_upper = False
else:
ret += c
return ret
def dots_to_hyphens(s):
return s.replace('.', '-')
def camel_case_to_uscore(s):
ret = ''
insert_uscore = False
prev_was_lower = False
for c in s:
if c.isupper():
if prev_was_lower:
insert_uscore = True
prev_was_lower = False
else:
prev_was_lower = True
if insert_uscore:
ret += '_'
ret += c.lower()
insert_uscore = False
return ret
def is_ugly_case(s):
if s and s.find('_') > 0:
return True
return False
def lookup_annotation(annotations, key):
if annotations:
for a in annotations:
if a.key == key:
return a.value
return None
def lookup_docs(annotations):
s = lookup_annotation(annotations, 'org.gtk.GDBus.DocString')
if s == None:
return ''
else:
return s
def lookup_since(annotations):
s = lookup_annotation(annotations, 'org.gtk.GDBus.Since')
if s == None:
return ''
else:
return s
def lookup_brief_docs(annotations):
s = lookup_annotation(annotations, 'org.gtk.GDBus.DocString.Short')
if s == None:
return ''
else:
return s
# I'm sure this could be a lot more elegant if I was
# more fluent in python...
def my_version_cmp(a, b):
if len(a[0]) > 0 and len(b[0]) > 0:
va = distutils.version.LooseVersion(a[0])
vb = distutils.version.LooseVersion(b[0])
ret = va.__cmp__(vb)
else:
ret = cmp(a[0], b[0])
if ret != 0:
return ret
return cmp(a[1], b[1])
| [
[
1,
0,
0.2308,
0.0096,
0,
0.66,
0,
42,
0,
1,
0,
0,
42,
0,
0
],
[
2,
0,
0.3077,
0.125,
0,
0.66,
0.1111,
914,
0,
1,
1,
0,
0,
0,
1
],
[
14,
1,
0.2596,
0.0096,
1,
0.9,... | [
"import distutils.version",
"def strip_dots(s):\n ret = ''\n force_upper = False\n for c in s:\n if c == '.':\n force_upper = True\n else:\n if force_upper:",
" ret = ''",
" force_upper = False",
" for c in s:\n if c == '.':\n force_upp... |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
import os
builddir = os.environ.get('UNINSTALLED_GLIB_BUILDDIR')
if builddir is not None:
__path__.append(os.path.abspath(os.path.join(builddir, 'gio', 'gdbus-2.0', 'codegen')))
| [
[
1,
0,
0.8276,
0.0345,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
14,
0,
0.8966,
0.0345,
0,
0.66,
0.5,
555,
3,
1,
0,
0,
607,
10,
1
],
[
4,
0,
0.9828,
0.069,
0,
0.... | [
"import os",
"builddir = os.environ.get('UNINSTALLED_GLIB_BUILDDIR')",
"if builddir is not None:\n __path__.append(os.path.abspath(os.path.join(builddir, 'gio', 'gdbus-2.0', 'codegen')))",
" __path__.append(os.path.abspath(os.path.join(builddir, 'gio', 'gdbus-2.0', 'codegen')))"
] |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
DATADIR = "@datarootdir@"
DATADIR = DATADIR.replace(
"${prefix}", "@prefix@")
VERSION = "@VERSION@"
| [
[
14,
0,
0.8889,
0.037,
0,
0.66,
0,
366,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.9444,
0.0741,
0,
0.66,
0.5,
366,
3,
2,
0,
0,
293,
10,
1
],
[
14,
0,
1,
0.037,
0,
0.66,
... | [
"DATADIR = \"@datarootdir@\"",
"DATADIR = DATADIR.replace(\n \"${prefix}\", \"@prefix@\")",
"VERSION = \"@VERSION@\""
] |
# -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <davidz@redhat.com>
import sys
import xml.parsers.expat
from . import dbustypes
class DBusXMLParser:
STATE_TOP = 'top'
STATE_NODE = 'node'
STATE_INTERFACE = 'interface'
STATE_METHOD = 'method'
STATE_SIGNAL = 'signal'
STATE_PROPERTY = 'property'
STATE_ARG = 'arg'
STATE_ANNOTATION = 'annotation'
STATE_IGNORED = 'ignored'
def __init__(self, xml_data):
self._parser = xml.parsers.expat.ParserCreate()
self._parser.CommentHandler = self.handle_comment
self._parser.CharacterDataHandler = self.handle_char_data
self._parser.StartElementHandler = self.handle_start_element
self._parser.EndElementHandler = self.handle_end_element
self.parsed_interfaces = []
self._cur_object = None
self.state = DBusXMLParser.STATE_TOP
self.state_stack = []
self._cur_object = None
self._cur_object_stack = []
self.doc_comment_last_symbol = ''
self._parser.Parse(xml_data)
COMMENT_STATE_BEGIN = 'begin'
COMMENT_STATE_PARAMS = 'params'
COMMENT_STATE_BODY = 'body'
COMMENT_STATE_SKIP = 'skip'
def handle_comment(self, data):
comment_state = DBusXMLParser.COMMENT_STATE_BEGIN;
lines = data.split('\n')
symbol = ''
body = ''
in_para = False
params = {}
for line in lines:
orig_line = line
line = line.lstrip()
if comment_state == DBusXMLParser.COMMENT_STATE_BEGIN:
if len(line) > 0:
colon_index = line.find(': ')
if colon_index == -1:
if line.endswith(':'):
symbol = line[0:len(line)-1]
comment_state = DBusXMLParser.COMMENT_STATE_PARAMS
else:
comment_state = DBusXMLParser.COMMENT_STATE_SKIP
else:
symbol = line[0:colon_index]
rest_of_line = line[colon_index+2:].strip()
if len(rest_of_line) > 0:
body += '<para>' + rest_of_line + '</para>'
comment_state = DBusXMLParser.COMMENT_STATE_PARAMS
elif comment_state == DBusXMLParser.COMMENT_STATE_PARAMS:
if line.startswith('@'):
colon_index = line.find(': ')
if colon_index == -1:
comment_state = DBusXMLParser.COMMENT_STATE_BODY
if not in_para:
body += '<para>'
in_para = True
body += orig_line + '\n'
else:
param = line[1:colon_index]
docs = line[colon_index + 2:]
params[param] = docs
else:
comment_state = DBusXMLParser.COMMENT_STATE_BODY
if len(line) > 0:
if not in_para:
body += '<para>'
in_para = True
body += orig_line + '\n'
elif comment_state == DBusXMLParser.COMMENT_STATE_BODY:
if len(line) > 0:
if not in_para:
body += '<para>'
in_para = True
body += orig_line + '\n'
else:
if in_para:
body += '</para>'
in_para = False
if in_para:
body += '</para>'
if symbol != '':
self.doc_comment_last_symbol = symbol
self.doc_comment_params = params
self.doc_comment_body = body
def handle_char_data(self, data):
#print 'char_data=%s'%data
pass
def handle_start_element(self, name, attrs):
old_state = self.state
old_cur_object = self._cur_object
if self.state == DBusXMLParser.STATE_IGNORED:
self.state = DBusXMLParser.STATE_IGNORED
elif self.state == DBusXMLParser.STATE_TOP:
if name == DBusXMLParser.STATE_NODE:
self.state = DBusXMLParser.STATE_NODE
else:
self.state = DBusXMLParser.STATE_IGNORED
elif self.state == DBusXMLParser.STATE_NODE:
if name == DBusXMLParser.STATE_INTERFACE:
self.state = DBusXMLParser.STATE_INTERFACE
iface = dbustypes.Interface(attrs['name'])
self._cur_object = iface
self.parsed_interfaces.append(iface)
elif name == DBusXMLParser.STATE_ANNOTATION:
self.state = DBusXMLParser.STATE_ANNOTATION
anno = dbustypes.Annotation(attrs['name'], attrs['value'])
self._cur_object.annotations.append(anno)
self._cur_object = anno
else:
self.state = DBusXMLParser.STATE_IGNORED
# assign docs, if any
if attrs.has_key('name') and self.doc_comment_last_symbol == attrs['name']:
self._cur_object.doc_string = self.doc_comment_body
if self.doc_comment_params.has_key('short_description'):
short_description = self.doc_comment_params['short_description']
self._cur_object.doc_string_brief = short_description
if self.doc_comment_params.has_key('since'):
self._cur_object.since = self.doc_comment_params['since']
elif self.state == DBusXMLParser.STATE_INTERFACE:
if name == DBusXMLParser.STATE_METHOD:
self.state = DBusXMLParser.STATE_METHOD
method = dbustypes.Method(attrs['name'])
self._cur_object.methods.append(method)
self._cur_object = method
elif name == DBusXMLParser.STATE_SIGNAL:
self.state = DBusXMLParser.STATE_SIGNAL
signal = dbustypes.Signal(attrs['name'])
self._cur_object.signals.append(signal)
self._cur_object = signal
elif name == DBusXMLParser.STATE_PROPERTY:
self.state = DBusXMLParser.STATE_PROPERTY
prop = dbustypes.Property(attrs['name'], attrs['type'], attrs['access'])
self._cur_object.properties.append(prop)
self._cur_object = prop
elif name == DBusXMLParser.STATE_ANNOTATION:
self.state = DBusXMLParser.STATE_ANNOTATION
anno = dbustypes.Annotation(attrs['name'], attrs['value'])
self._cur_object.annotations.append(anno)
self._cur_object = anno
else:
self.state = DBusXMLParser.STATE_IGNORED
# assign docs, if any
if attrs.has_key('name') and self.doc_comment_last_symbol == attrs['name']:
self._cur_object.doc_string = self.doc_comment_body
if self.doc_comment_params.has_key('since'):
self._cur_object.since = self.doc_comment_params['since']
elif self.state == DBusXMLParser.STATE_METHOD:
if name == DBusXMLParser.STATE_ARG:
self.state = DBusXMLParser.STATE_ARG
arg_name = None
if attrs.has_key('name'):
arg_name = attrs['name']
arg = dbustypes.Arg(arg_name, attrs['type'])
direction = attrs['direction']
if direction == 'in':
self._cur_object.in_args.append(arg)
elif direction == 'out':
self._cur_object.out_args.append(arg)
else:
raise RuntimeError('Invalid direction "%s"'%(direction))
self._cur_object = arg
elif name == DBusXMLParser.STATE_ANNOTATION:
self.state = DBusXMLParser.STATE_ANNOTATION
anno = dbustypes.Annotation(attrs['name'], attrs['value'])
self._cur_object.annotations.append(anno)
self._cur_object = anno
else:
self.state = DBusXMLParser.STATE_IGNORED
# assign docs, if any
if self.doc_comment_last_symbol == old_cur_object.name:
if attrs.has_key('name') and self.doc_comment_params.has_key(attrs['name']):
doc_string = self.doc_comment_params[attrs['name']]
if doc_string != None:
self._cur_object.doc_string = doc_string
if self.doc_comment_params.has_key('since'):
self._cur_object.since = self.doc_comment_params['since']
elif self.state == DBusXMLParser.STATE_SIGNAL:
if name == DBusXMLParser.STATE_ARG:
self.state = DBusXMLParser.STATE_ARG
arg_name = None
if attrs.has_key('name'):
arg_name = attrs['name']
arg = dbustypes.Arg(arg_name, attrs['type'])
self._cur_object.args.append(arg)
self._cur_object = arg
elif name == DBusXMLParser.STATE_ANNOTATION:
self.state = DBusXMLParser.STATE_ANNOTATION
anno = dbustypes.Annotation(attrs['name'], attrs['value'])
self._cur_object.annotations.append(anno)
self._cur_object = anno
else:
self.state = DBusXMLParser.STATE_IGNORED
# assign docs, if any
if self.doc_comment_last_symbol == old_cur_object.name:
if attrs.has_key('name') and self.doc_comment_params.has_key(attrs['name']):
doc_string = self.doc_comment_params[attrs['name']]
if doc_string != None:
self._cur_object.doc_string = doc_string
if self.doc_comment_params.has_key('since'):
self._cur_object.since = self.doc_comment_params['since']
elif self.state == DBusXMLParser.STATE_PROPERTY:
if name == DBusXMLParser.STATE_ANNOTATION:
self.state = DBusXMLParser.STATE_ANNOTATION
anno = dbustypes.Annotation(attrs['name'], attrs['value'])
self._cur_object.annotations.append(anno)
self._cur_object = anno
else:
self.state = DBusXMLParser.STATE_IGNORED
elif self.state == DBusXMLParser.STATE_ARG:
if name == DBusXMLParser.STATE_ANNOTATION:
self.state = DBusXMLParser.STATE_ANNOTATION
anno = dbustypes.Annotation(attrs['name'], attrs['value'])
self._cur_object.annotations.append(anno)
self._cur_object = anno
else:
self.state = DBusXMLParser.STATE_IGNORED
elif self.state == DBusXMLParser.STATE_ANNOTATION:
if name == DBusXMLParser.STATE_ANNOTATION:
self.state = DBusXMLParser.STATE_ANNOTATION
anno = dbustypes.Annotation(attrs['name'], attrs['value'])
self._cur_object.annotations.append(anno)
self._cur_object = anno
else:
self.state = DBusXMLParser.STATE_IGNORED
else:
raise RuntimeError('Unhandled state "%s" while entering element with name "%s"'%(self.state, name))
self.state_stack.append(old_state)
self._cur_object_stack.append(old_cur_object)
def handle_end_element(self, name):
self.state = self.state_stack.pop()
self._cur_object = self._cur_object_stack.pop()
def parse_dbus_xml(xml_data):
parser = DBusXMLParser(xml_data)
return parser.parsed_interfaces
| [
[
1,
0,
0.0828,
0.0034,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0862,
0.0034,
0,
0.66,
0.25,
573,
0,
1,
0,
0,
573,
0,
0
],
[
1,
0,
0.0931,
0.0034,
0,
0.... | [
"import sys",
"import xml.parsers.expat",
"from . import dbustypes",
"class DBusXMLParser:\n STATE_TOP = 'top'\n STATE_NODE = 'node'\n STATE_INTERFACE = 'interface'\n STATE_METHOD = 'method'\n STATE_SIGNAL = 'signal'\n STATE_PROPERTY = 'property'\n STATE_ARG = 'arg'",
" STATE_TOP = '... |
#!/usr/bin/env python
# -*- encoding:utf8 -*-
# protoc-gen-erl
# Google's Protocol Buffers project, ported to lua.
# https://code.google.com/p/protoc-gen-lua/
#
# Copyright (c) 2010 , 林卓毅 (Zhuoyi Lin) netsnail@gmail.com
# All rights reserved.
#
# Use, modification and distribution are subject to the "New BSD License"
# as listed at <url: http://www.opensource.org/licenses/bsd-license.php >.
import sys
import os.path as path
from cStringIO import StringIO
import plugin_pb2
import google.protobuf.descriptor_pb2 as descriptor_pb2
_packages = {}
_files = {}
_message = {}
FDP = plugin_pb2.descriptor_pb2.FieldDescriptorProto
if sys.platform == "win32":
import msvcrt, os
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
class CppType:
CPPTYPE_INT32 = 1
CPPTYPE_INT64 = 2
CPPTYPE_UINT32 = 3
CPPTYPE_UINT64 = 4
CPPTYPE_DOUBLE = 5
CPPTYPE_FLOAT = 6
CPPTYPE_BOOL = 7
CPPTYPE_ENUM = 8
CPPTYPE_STRING = 9
CPPTYPE_MESSAGE = 10
CPP_TYPE ={
FDP.TYPE_DOUBLE : CppType.CPPTYPE_DOUBLE,
FDP.TYPE_FLOAT : CppType.CPPTYPE_FLOAT,
FDP.TYPE_INT64 : CppType.CPPTYPE_INT64,
FDP.TYPE_UINT64 : CppType.CPPTYPE_UINT64,
FDP.TYPE_INT32 : CppType.CPPTYPE_INT32,
FDP.TYPE_FIXED64 : CppType.CPPTYPE_UINT64,
FDP.TYPE_FIXED32 : CppType.CPPTYPE_UINT32,
FDP.TYPE_BOOL : CppType.CPPTYPE_BOOL,
FDP.TYPE_STRING : CppType.CPPTYPE_STRING,
FDP.TYPE_MESSAGE : CppType.CPPTYPE_MESSAGE,
FDP.TYPE_BYTES : CppType.CPPTYPE_STRING,
FDP.TYPE_UINT32 : CppType.CPPTYPE_UINT32,
FDP.TYPE_ENUM : CppType.CPPTYPE_ENUM,
FDP.TYPE_SFIXED32 : CppType.CPPTYPE_INT32,
FDP.TYPE_SFIXED64 : CppType.CPPTYPE_INT64,
FDP.TYPE_SINT32 : CppType.CPPTYPE_INT32,
FDP.TYPE_SINT64 : CppType.CPPTYPE_INT64
}
def printerr(*args):
sys.stderr.write(" ".join(args))
sys.stderr.write("\n")
sys.stderr.flush()
class TreeNode(object):
def __init__(self, name, parent=None, filename=None, package=None):
super(TreeNode, self).__init__()
self.child = []
self.parent = parent
self.filename = filename
self.package = package
if parent:
self.parent.add_child(self)
self.name = name
def add_child(self, child):
self.child.append(child)
def find_child(self, child_names):
if child_names:
for i in self.child:
if i.name == child_names[0]:
return i.find_child(child_names[1:])
raise StandardError
else:
return self
def get_child(self, child_name):
for i in self.child:
if i.name == child_name:
return i
return None
def get_path(self, end = None):
pos = self
out = []
while pos and pos != end:
out.append(pos.name)
pos = pos.parent
out.reverse()
return '.'.join(out)
def get_global_name(self):
return self.get_path()
def get_local_name(self):
pos = self
while pos.parent:
pos = pos.parent
if self.package and pos.name == self.package[-1]:
break
return self.get_path(pos)
def __str__(self):
return self.to_string(0)
def __repr__(self):
return str(self)
def to_string(self, indent = 0):
return ' '*indent + '<TreeNode ' + self.name + '(\n' + \
','.join([i.to_string(indent + 4) for i in self.child]) + \
' '*indent +')>\n'
class Env(object):
filename = None
package = None
extend = None
descriptor = None
message = None
context = None
register = None
def __init__(self):
self.message_tree = TreeNode('')
self.scope = self.message_tree
def get_global_name(self):
return self.scope.get_global_name()
def get_local_name(self):
return self.scope.get_local_name()
def get_ref_name(self, type_name):
try:
node = self.lookup_name(type_name)
except:
# if the child doesn't be founded, it must be in this file
return type_name[len('.'.join(self.package)) + 2:]
if node.filename != self.filename:
return node.filename + '_pb.' + node.get_local_name()
return node.get_local_name()
def lookup_name(self, name):
names = name.split('.')
if names[0] == '':
return self.message_tree.find_child(names[1:])
else:
return self.scope.parent.find_child(names)
def enter_package(self, package):
if not package:
return self.message_tree
names = package.split('.')
pos = self.message_tree
for i, name in enumerate(names):
new_pos = pos.get_child(name)
if new_pos:
pos = new_pos
else:
return self._build_nodes(pos, names[i:])
return pos
def enter_file(self, filename, package):
self.filename = filename
self.package = package.split('.')
self._init_field()
self.scope = self.enter_package(package)
def exit_file(self):
self._init_field()
self.filename = None
self.package = []
self.scope = self.scope.parent
def enter(self, message_name):
self.scope = TreeNode(message_name, self.scope, self.filename,
self.package)
def exit(self):
self.scope = self.scope.parent
def _init_field(self):
self.descriptor = []
self.context = []
self.message = []
self.register = []
def _build_nodes(self, node, names):
parent = node
for i in names:
parent = TreeNode(i, parent, self.filename, self.package)
return parent
class Writer(object):
def __init__(self, prefix=None):
self.io = StringIO()
self.__indent = ''
self.__prefix = prefix
def getvalue(self):
return self.io.getvalue()
def __enter__(self):
self.__indent += ' '
return self
def __exit__(self, type, value, trackback):
self.__indent = self.__indent[:-4]
def __call__(self, data):
self.io.write(self.__indent)
if self.__prefix:
self.io.write(self.__prefix)
self.io.write(data)
DEFAULT_VALUE = {
FDP.TYPE_DOUBLE : '0.0',
FDP.TYPE_FLOAT : '0.0',
FDP.TYPE_INT64 : '0',
FDP.TYPE_UINT64 : '0',
FDP.TYPE_INT32 : '0',
FDP.TYPE_FIXED64 : '0',
FDP.TYPE_FIXED32 : '0',
FDP.TYPE_BOOL : 'false',
FDP.TYPE_STRING : '""',
FDP.TYPE_MESSAGE : 'nil',
FDP.TYPE_BYTES : '""',
FDP.TYPE_UINT32 : '0',
FDP.TYPE_ENUM : '1',
FDP.TYPE_SFIXED32 : '0',
FDP.TYPE_SFIXED64 : '0',
FDP.TYPE_SINT32 : '0',
FDP.TYPE_SINT64 : '0',
}
def code_gen_enum_item(index, enum_value, env):
full_name = env.get_local_name() + '.' + enum_value.name
obj_name = full_name.upper().replace('.', '_') + '_ENUM'
env.descriptor.append(
"local %s = protobuf.EnumValueDescriptor();\n"% obj_name
)
context = Writer(obj_name)
context('.name = "%s"\n' % enum_value.name)
context('.index = %d\n' % index)
context('.number = %d\n' % enum_value.number)
env.context.append(context.getvalue())
return obj_name
def code_gen_enum(enum_desc, env):
env.enter(enum_desc.name)
full_name = env.get_local_name()
obj_name = full_name.upper().replace('.', '_')
env.descriptor.append(
"local %s = protobuf.EnumDescriptor();\n"% obj_name
)
context = Writer(obj_name)
context('.name = "%s"\n' % enum_desc.name)
context('.full_name = "%s"\n' % env.get_global_name())
values = []
for i, enum_value in enumerate(enum_desc.value):
values.append(code_gen_enum_item(i, enum_value, env))
context('.values = {%s}\n' % ','.join(values))
env.context.append(context.getvalue())
env.exit()
return obj_name
def code_gen_field(index, field_desc, env):
full_name = env.get_local_name() + '.' + field_desc.name
obj_name = full_name.upper().replace('.', '_') + '_FIELD'
env.descriptor.append(
"local %s = protobuf.FieldDescriptor();\n"% obj_name
)
context = Writer(obj_name)
context('.name = "%s"\n' % field_desc.name)
context('.full_name = "%s"\n' % (
env.get_global_name() + '.' + field_desc.name))
context('.number = %d\n' % field_desc.number)
context('.index = %d\n' % index)
context('.label = %d\n' % field_desc.label)
if field_desc.HasField("default_value"):
context('.has_default_value = true\n')
value = field_desc.default_value
if field_desc.type == FDP.TYPE_STRING:
context('.default_value = "%s"\n'%value)
else:
context('.default_value = %s\n'%value)
else:
context('.has_default_value = false\n')
if field_desc.label == FDP.LABEL_REPEATED:
default_value = "{}"
elif field_desc.HasField('type_name'):
default_value = "nil"
else:
default_value = DEFAULT_VALUE[field_desc.type]
context('.default_value = %s\n' % default_value)
if field_desc.HasField('type_name'):
type_name = env.get_ref_name(field_desc.type_name).upper().replace('.', '_')
if field_desc.type == FDP.TYPE_MESSAGE:
context('.message_type = %s\n' % type_name)
else:
context('.enum_type = %s\n' % type_name)
if field_desc.HasField('extendee'):
type_name = env.get_ref_name(field_desc.extendee)
env.register.append(
"%s.RegisterExtension(%s)\n" % (type_name, obj_name)
)
context('.type = %d\n' % field_desc.type)
context('.cpp_type = %d\n\n' % CPP_TYPE[field_desc.type])
env.context.append(context.getvalue())
return obj_name
def code_gen_message(message_descriptor, env, containing_type = None):
env.enter(message_descriptor.name)
full_name = env.get_local_name()
obj_name = full_name.upper().replace('.', '_')
env.descriptor.append(
"local %s = protobuf.Descriptor();\n"% obj_name
)
context = Writer(obj_name)
context('.name = "%s"\n' % message_descriptor.name)
context('.full_name = "%s"\n' % env.get_global_name())
nested_types = []
for msg_desc in message_descriptor.nested_type:
msg_name = code_gen_message(msg_desc, env, obj_name)
nested_types.append(msg_name)
context('.nested_types = {%s}\n' % ', '.join(nested_types))
enums = []
for enum_desc in message_descriptor.enum_type:
enums.append(code_gen_enum(enum_desc, env))
context('.enum_types = {%s}\n' % ', '.join(enums))
fields = []
for i, field_desc in enumerate(message_descriptor.field):
fields.append(code_gen_field(i, field_desc, env))
context('.fields = {%s}\n' % ', '.join(fields))
if len(message_descriptor.extension_range) > 0:
context('.is_extendable = true\n')
else:
context('.is_extendable = false\n')
extensions = []
for i, field_desc in enumerate(message_descriptor.extension):
extensions.append(code_gen_field(i, field_desc, env))
context('.extensions = {%s}\n' % ', '.join(extensions))
if containing_type:
context('.containing_type = %s\n' % containing_type)
env.message.append('%s = protobuf.Message(%s)\n' % (full_name,
obj_name))
env.context.append(context.getvalue())
env.exit()
return obj_name
def write_header(writer):
writer("""-- Generated By protoc-gen-lua Do not Edit
""")
def code_gen_file(proto_file, env, is_gen):
filename = path.splitext(proto_file.name)[0]
env.enter_file(filename, proto_file.package)
includes = []
for f in proto_file.dependency:
inc_file = path.splitext(f)[0]
includes.append(inc_file)
# for field_desc in proto_file.extension:
# code_gen_extensions(field_desc, field_desc.name, env)
for enum_desc in proto_file.enum_type:
code_gen_enum(enum_desc, env)
for enum_value in enum_desc.value:
env.message.append('%s = %d\n' % (enum_value.name,
enum_value.number))
for msg_desc in proto_file.message_type:
code_gen_message(msg_desc, env)
if is_gen:
lua = Writer()
write_header(lua)
lua('local protobuf = require "protobuf"\n')
for i in includes:
lua('local %s_pb = require("%s_pb")\n' % (i, i))
lua("module('%s_pb')\n" % env.filename)
lua('\n\n')
map(lua, env.descriptor)
lua('\n')
map(lua, env.context)
lua('\n')
env.message.sort()
map(lua, env.message)
lua('\n')
map(lua, env.register)
_files[env.filename+ '_pb.lua'] = lua.getvalue()
env.exit_file()
def main():
plugin_require_bin = sys.stdin.read()
code_gen_req = plugin_pb2.CodeGeneratorRequest()
code_gen_req.ParseFromString(plugin_require_bin)
env = Env()
for proto_file in code_gen_req.proto_file:
code_gen_file(proto_file, env,
proto_file.name in code_gen_req.file_to_generate)
code_generated = plugin_pb2.CodeGeneratorResponse()
for k in _files:
file_desc = code_generated.file.add()
file_desc.name = k
file_desc.content = _files[k]
sys.stdout.write(code_generated.SerializeToString())
if __name__ == "__main__":
main()
| [
[
1,
0,
0.0289,
0.0022,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0311,
0.0022,
0,
0.66,
0.0417,
79,
0,
1,
0,
0,
79,
0,
0
],
[
1,
0,
0.0333,
0.0022,
0,
0.... | [
"import sys",
"import os.path as path",
"from cStringIO import StringIO",
"import plugin_pb2",
"import google.protobuf.descriptor_pb2 as descriptor_pb2",
"_packages = {}",
"_files = {}",
"_message = {}",
"FDP = plugin_pb2.descriptor_pb2.FieldDescriptorProto",
"if sys.platform == \"win32\":\n im... |
#!/usr/bin/python2
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011 5sing-crawler contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "antiAgainst@gmail.com"
import re
import os.path
#import urllib
import cookielib
import mechanize
import lxml.html.soupparser as sp
control_chars = [0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06,
0x07, 0x08, 0x0E, 0x0F, 0x10, 0x11, 0x12,
0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19,
0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F]
chars = [chr(c) for c in control_chars]
ccre = re.compile('[%s]' % ''.join(chars))
class FiveSingClient(object):
"""
Client for crawling music from 5sing.com
to use, firstly
client = FiveSingClient()
and
client.login(some_id, some_password)
then
client.crawl_user()
"""
def __init__(self):
self.br = mechanize.Browser()
# different parameters of the browser
self.br.set_handle_equiv(True)
self.br.set_handle_gzip(False)
self.br.set_handle_redirect(True)
self.br.set_handle_referer(True)
self.br.set_handle_robots(False)
# print info about http header/response?
self.br.set_debug_http(False)
# print info about redirect?
self.br.set_debug_redirects(False)
# print debug response?
self.br.set_debug_responses(False)
# add firfox-like headers
self.br.addheaders = [
("User-Agent", "Mozilla/5.0 (X11; Linux i686; rv:5.0) Gecko/20100101 Firefox/5.0"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
("Accept-Language", "en-us,en;q=0.5"),
("Accept-Charset", "gb18030,utf-8;q=0.7,*;q=0.7")]
def login(self, id, passwd):
# create cookie for the current user
cj = cookielib.CookieJar()
self.br.set_cookiejar(cj)
# open the login web page
self.br.open("http://www.5sing.com/login.aspx")
self.br.select_form(name='ctl00')
# fill in the form
self.br["txtUserName"] = id
self.br["txtPassword"] = passwd
# submit the form
self.br.submit()
def get_music_url(self, start_url):
""" get a singer's music's name(s) and id(s) """
self.br.open(start_url)
body = self.br.response().read()
#dom = sp.fromstring(body)
dom = sp.fromstring(ccre.sub('', body))
# N piece of music
tot = int(dom.xpath('//div[@class="page_list"]/span[1]/text()')[0])
print "\n" + "-" * 15 + "%d piece(s) of music" % tot + "-" * 15
current = 0 # how many crawled
page = 1
music_dict = {} # music-id <--> music-name dict
while current < tot:
print "\n---[request] %s---\n" % (start_url + '?p=%d' % page)
self.br.open(start_url + '?p=%d' % page)
body = self.br.response().read()
#dom = sp.fromstring(body)
dom = sp.fromstring(ccre.sub('', body))
dom = dom.xpath('//table[@class="song"]/descendant::tr/td[2]/a')
for music in dom:
name = music.get('title')
url = music.get('href')
id = url.split('/')[-1].split('.')[0]
print "[id] %s [name] %s" % (id, name)
music_dict[id] = name
current += len(dom)
page += 1
return music_dict
def download_music(self, type, music, prefix):
for id in music:
print "---[request] %s---" % (id)
self.br.open("http://%s.5sing.com/Down.aspx?sid=%s" % (type, id))
body = self.br.response().read()
#dom = sp.fromstring(body)
dom = sp.fromstring(ccre.sub('', body))
name = dom.xpath('//div[@class="main ft"]/h1/a')[0].text
p = dom.xpath('//div[@class="main ft"]/p')
dtype = p[0].xpath('em')
if len(dtype) == 2 and dtype[1].text == u'免费下载':
singer = p[0].text.split(u':')[1]
ftype = p[0].xpath('text()')[1].split(u':')[1]
url = dom.xpath('//a[@class="d_sing"]/@href')[0]
print "[id]%s [mime]%s [singer]%s" % (id, ftype, singer)
save_name = '%s-%s.%s' % (name, singer, ftype)
#save_name = urllib.quote(save_name, safe='=+,;')
save_name = save_name.replace('/', '-')
save_name = "%s/%s" % (prefix, save_name)
if not os.path.exists(save_name):
print '[download]%s' % save_name
self.br.retrieve(url, save_name)
else:
print "[info]exists, skip"
def crawl_user(self, id, prefix):
print "\n" + "-" * 15 + "http://%s.5sing.com/ycsong.aspx" % id + "-" * 15
md = self.get_music_url('http://%s.5sing.com/YcSong.aspx' % id)
print
self.download_music('yc', md, prefix)
print "\n" + "-" * 15 + "http://%s.5sing.com/fcsong.aspx" % id + "-" * 15
md = self.get_music_url('http://%s.5sing.com/fcsong.aspx' % id)
print
self.download_music('fc', md, prefix)
def demo():
""" demo: get 清漪's music """
import getpass
print "[info]Your 5sing.com account is required to download music"
id = raw_input("[userID]:")
passwd = getpass.getpass("[password]:")
client = FiveSingClient()
client.login(id, passwd)
print "[info]fetching 清漪(3142067)'s music"
client.crawl_user('3142067')
def fetch_multi():
""" fetch many singers' songs """
import getpass
import os
print "[info]Your 5sing.com account is required to download music"
id = raw_input("[userID]:")
passwd = getpass.getpass("[password]:")
client = FiveSingClient()
client.login(id, passwd)
singerid = raw_input("[singerID]:")
while singerid:
if not os.path.isdir(singerid):
os.mkdir(singerid)
client.crawl_user(singerid, singerid)
singerid = raw_input("[singerID]:")
if __name__ == "__main__":
fetch_multi()
__all__ = ['FiveSingClient']
| [
[
14,
0,
0.099,
0.0052,
0,
0.66,
0,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1094,
0.0052,
0,
0.66,
0.0769,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.1146,
0.0052,
0,
0.... | [
"__author__ = \"antiAgainst@gmail.com\"",
"import re",
"import os.path",
"import cookielib",
"import mechanize",
"import lxml.html.soupparser as sp",
"control_chars = [0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06,\n 0x07, 0x08, 0x0E, 0x0F, 0x10, 0x11, 0x12,\n 0x13, 0x14, 0x15, ... |
# Copyright 2011 Tom SF Haines
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
import math
import numpy
import numpy.random
import scipy.stats.distributions
class DecTree:
"""A decision tree, uses id3 with the c4.5 extension for continuous attributes. Fairly basic - always grows fully and stores a distribution of children at every node so it can fallback for previously unseen attribute categories. Allows the trainning vectors to be weighted and can be pickled. An effort has been made to keep this small, due to the fact that its not unusual to have millions in memory."""
__slots__ = ['dist_cat', 'dist_weight', 'leaf', 'discrete', 'index', 'children', 'threshold', 'low', 'high'] # One tends to make a lot of these - best to keep 'em small.
def __init__(self, int_dm, real_dm, cat, weight = None, index = None, rand = None, minimum_size = 1):
"""Input consists of upto 5 arrays and one parameter. The first two parameters are data matrices, where each row contains the attributes for an example. Two are provided, one of numpy.int32 for the discrete features, another of numpy.float32 for the continuous features - one can be set to None to indicate none of that type, but obviously at least one has to be provided. The cat vector is then aligned with the data matrices and gives the category for each exemplar, as a numpy.int32. weight optionally provides a numpy.float32 vector that also aligns with the data matrices, and effectivly provides a continuous repeat count for each example, so some can be weighted as being more important. By default all items in the data matrices are used, however, instead an index vector can be provided that indexes the examples to be used by the tree - this not only allows a subset to be used but allows samples to be repeated if desired (This feature is actually used for building the tree recursivly, such that each DecTree object is in fact a node; also helps for creating a collection of trees with random trainning sets.). Finally, by default it considers all features at each level of the tree, however, if an integer rather than None is provided to the rand parameter it instead randomly selects a subset of attributes, of size rand, and then selects the best of this subset, with a new draw for each node in the tree. minimum_size gives a minimum number of samples in a node for it to be split - it needs more samples than this otherwise it will become a leaf. A simple tree prunning method; defaults to 1 which is in effect it disabled."""
# If weight and/or index have not been provided create them - makes the code neater...
if weight==None: weight = numpy.ones(cat.shape[0], dtype=numpy.float32)
if index==None: index = numpy.arange(cat.shape[0], dtype=numpy.int32)
# Collect the category statistics for this node - incase its a leaf or splits on discrete values and encounters a new value, or just for general curiosity...
cats = numpy.unique(cat[index])
self.dist_cat = numpy.empty(cats.shape[0], dtype=numpy.int32)
self.dist_weight = numpy.empty(cats.shape[0], dtype=numpy.float32)
for i,c in enumerate(cats):
self.dist_cat[i] = c
self.dist_weight[i] = weight[index[numpy.where(cat[index]==c)]].sum()
# Decide if its worth subdividing this node or not (Might change our mind later)...
if self.dist_cat.shape[0]<=1 or index.shape[0]<=minimum_size: self.leaf = True
else:
# Its subdivision time!..
self.leaf = False
# Select the set of attributes to consider splitting on...
int_size = int_dm.shape[1] if int_dm!=None else 0
real_size = real_dm.shape[1] if real_dm!=None else 0
if rand==None: options = numpy.arange(int_size+real_size)
else: options = numpy.random.permutation(int_size+real_size)[:rand]
# We need to find the optimal split out of all options...
ent = self.entropy()
choice = (None, (ent,))
for c in options:
if c<int_size: ch = (c, self.__entropy_discrete(int_dm, c, cat, weight, index))
else: ch = (c, self.__entropy_continuous(real_dm, c-int_size, cat, weight, index))
if ch[1][0]<choice[1][0]: choice = ch
# Check its worth doing the split - needs to be at least a tiny improvement...
if choice[1][0]+1e-5>ent: self.leaf = True
else:
# Recurse to generate the children nodes, which happen to just be more DecTree objects - code depends on if its discrete or continuous...
if choice[0]<int_size:
self.discrete = True
self.index = choice[0]
self.children = []
for category, c_index in choice[1][1].iteritems():
self.children.append((category, DecTree(int_dm, real_dm, cat, weight, c_index, rand, minimum_size)))
self.children = tuple(self.children)
else:
self.discrete = False
self.index = choice[0] - int_size
self.threshold = choice[1][1]
self.low = DecTree(int_dm, real_dm, cat, weight, choice[1][2], rand, minimum_size)
self.high = DecTree(int_dm, real_dm, cat, weight, choice[1][3], rand, minimum_size)
def entropy(self):
"""Returns the entropy of the data that was used to train this node. Really an internal method, exposed in case of rampant curiosity. Note that it is in nats, not bits."""
return scipy.stats.distributions.entropy(self.dist_weight)
def __entropy_discrete(self, int_dm, column, cat, weight, index):
"""Internal method - works out the entropy after a discrete division. Also returns the indices for the children nodes as values in a dictionary indexed by the class they have for the relevant column. Returns a tuple (entropy, index dict.)"""
# Generate the index dictionary...
ind_dict = dict()
values = numpy.unique(int_dm[index,column])
for v in values: ind_dict[v] = index[numpy.where(int_dm[index,column]==v)]
# Calculate the entropy - make use of the index dictionary...
entropy = 0.0
p_div = weight[index].sum()
for c_index in ind_dict.itervalues():
c_div = weight[c_index].sum()
cats = numpy.unique(cat[c_index])
dist = numpy.empty(cats.shape[0], dtype=numpy.float32)
for i,c in enumerate(cats):
dist[i] = weight[c_index[numpy.where(cat[c_index]==c)]].sum()
entropy += c_div/p_div * scipy.stats.distributions.entropy(dist)
# Return...
return (entropy, ind_dict)
def __entropy_continuous(self, real_dm, column, cat, weight, index):
"""Internal method - works out the entropy after a continuous division. Also returns the optimal split point and the indices for the children nodes - as a tuple (entropy, split point, low, high)."""
# Generate a copy of the index sorted by the matching value in the selected real_dm column...
s_index = index[numpy.argsort(real_dm[index,column])]
# Generate a culumative array containing a sum of the weights for each exemplar less than the split point, which is defined as being between the index in the array and the next index, as aligned with the sorted weighted array...
cats = numpy.unique(cat[index])
cum = numpy.zeros((s_index.shape[0],cats.shape[0]), dtype=numpy.float32)
for i,c in enumerate(cats):
ind = numpy.where(cat[s_index]==c)
cum[ind,i] += weight[s_index[ind]]
cum = numpy.cumsum(cum, axis=0)
# Calculate the entropy for each split point...
entLow = scipy.stats.distributions.entropy(cum[:-1,:].T)
entHigh = scipy.stats.distributions.entropy((numpy.reshape(cum[-1,:], (1,-1))-cum[:-1,:]).T)
weight = cum[:-1,:].sum(axis=1) / cum[-1,:].sum()
ent = weight*entLow + (1.0-weight)*entHigh
# Select the lowest, return that split point and other relevant information...
i = numpy.argmin(ent)
split = 0.5*(real_dm[s_index[i],column] + real_dm[s_index[i+1],column])
return (ent[i], split, s_index[:i+1], s_index[i+1:])
def classify(self, int_vec, real_vec):
"""Given a pair of vectors, one for discrete attributes and another for continuous atributes this returns the trees estimated distribution for the exampler. This distribution will take the form of a dictionary, which you must not modify, that is indexed by categories and goes to a count of how many examples with that category were in that leaf node. 99% of the time only one category should exist, though various scenarios can result in there being more than 1."""
if self.leaf: return self.prob()
elif self.discrete:
key = int_vec[self.index]
for value, child in self.children:
if value==key:
return child.classify(int_vec, real_vec)
return self.prob() # Previously unseen attribute - fallback and return this nodes distribution.
else: # Its continuous.
itsLow = real_vec[self.index]<self.threshold
if itsLow: return self.low.classify(int_vec, real_vec)
else: return self.high.classify(int_vec, real_vec)
def prob(self):
"""Returns the distribution over the categories of the trainning examples that went through this node - if this is a leaf its likelly to be non-zero for just one category. Represented as a dictionary category -> weight that only includes entrys if they are not 0. weights are the sum of the weights for the input, and are not normalised."""
ret = dict()
for i in xrange(self.dist_cat.shape[0]): ret[self.dist_cat[i]] = self.dist_weight[i]
return ret
def size(self):
"""Returns how many nodes make up the tree."""
if self.leaf: return 1
elif self.discrete: return 1 + sum(map(lambda c: c[1].size(), self.children))
else: return 1 + self.low.size() + self.high.size()
def isLeaf(self):
"""Returns True if it is a leaf node, False otherwise."""
return self.leaf
def isDiscrete(self):
"""Returns True if it makes its decision based on a discrete attribute, False if it is continuous or a leaf."""
if self.leaf: return False
return self.discrete
def isContinuous(self):
"""Returns True if it makes a decision by splitting a continuous node, False if its is either discrete or a leaf."""
if self.leaf: return False
return not self.discrete
def getIndex(self):
"""Returns the index of either the discrete column or continuous column which it decides on, or None if it is a leaf."""
if self.leaf: return None
return self.index
def getChildren(self):
"""Returns a dictionary of children nodes indexed by the attribute the decision is being made on if it makes a discrete decision, otherwise None. Note that any unseen attribute value will not be included."""
if not self.leaf and self.discrete:
ret = dict()
for value, child in self.children: ret[value] = child
return ret
else: return None
def getThreshold(self):
"""If it is a continuous node this returns the threshold between going down the low and high branches of the decision tree, otherwise returns None."""
if not self.leaf and not self.discrete: return self.threshold
else: return None
def getLow(self):
"""If it is a continuous decision node this returns the branch down which samples with the attribute less than the threshold go to; otherwise None."""
if not self.leaf and not self.discrete: return self.low
else: return None
def getHigh(self):
"""If it is a continuous decision node this returns the branch down which samples with the attribute higher than or equal to the threshold go to; otherwise None."""
if not self.leaf and not self.discrete: return self.high
else: return None
| [
[
1,
0,
0.0561,
0.0051,
0,
0.66,
0,
526,
0,
1,
0,
0,
526,
0,
0
],
[
1,
0,
0.0612,
0.0051,
0,
0.66,
0.25,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0663,
0.0051,
0,
0.... | [
"import math",
"import numpy",
"import numpy.random",
"import scipy.stats.distributions",
"class DecTree:\n \"\"\"A decision tree, uses id3 with the c4.5 extension for continuous attributes. Fairly basic - always grows fully and stores a distribution of children at every node so it can fallback for previou... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
from utils.numpy_help_cpp import numpy_util_code
# Provides various functions to assist with manipulating python objects from c++ code.
python_obj_code = numpy_util_code + start_cpp() + """
#ifndef PYTHON_OBJ_CODE
#define PYTHON_OBJ_CODE
// Extracts a boolean from an object...
bool GetObjectBoolean(PyObject * obj, const char * name)
{
PyObject * b = PyObject_GetAttrString(obj, name);
bool ret = b!=Py_False;
Py_DECREF(b);
return ret;
}
// Extracts an int from an object...
int GetObjectInt(PyObject * obj, const char * name)
{
PyObject * i = PyObject_GetAttrString(obj, name);
int ret = PyInt_AsLong(i);
Py_DECREF(i);
return ret;
}
// Extracts a float from an object...
float GetObjectFloat(PyObject * obj, const char * name)
{
PyObject * f = PyObject_GetAttrString(obj, name);
float ret = PyFloat_AsDouble(f);
Py_DECREF(f);
return ret;
}
// Extracts an array from an object, returning it as a new[] unsigned char array. You can also pass in a pointer to an int to have the size of the array stored...
unsigned char * GetObjectByte1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
unsigned char * ret = new unsigned char[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Byte1D(nao,i);
Py_DECREF(nao);
return ret;
}
// Extracts an array from an object, returning it as a new[] float array. You can also pass in a pointer to an int to have the size of the array stored...
float * GetObjectFloat1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
float * ret = new float[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Float1D(nao,i);
Py_DECREF(nao);
return ret;
}
#endif
"""
| [
[
1,
0,
0.1875,
0.0125,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
1,
0,
0.2,
0.0125,
0,
0.66,
0.5,
884,
0,
1,
0,
0,
884,
0,
0
],
[
14,
0,
0.6312,
0.75,
0,
0.66,
... | [
"from utils.start_cpp import start_cpp",
"from utils.numpy_help_cpp import numpy_util_code",
"python_obj_code = numpy_util_code + start_cpp() + \"\"\"\n#ifndef PYTHON_OBJ_CODE\n#define PYTHON_OBJ_CODE\n\n// Extracts a boolean from an object...\nbool GetObjectBoolean(PyObject * obj, const char * name)\n{\n PyObj... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
from utils.numpy_help_cpp import numpy_util_code
# Provides various functions to assist with manipulating python objects from c++ code.
python_obj_code = numpy_util_code + start_cpp() + """
#ifndef PYTHON_OBJ_CODE
#define PYTHON_OBJ_CODE
// Extracts a boolean from an object...
bool GetObjectBoolean(PyObject * obj, const char * name)
{
PyObject * b = PyObject_GetAttrString(obj, name);
bool ret = b!=Py_False;
Py_DECREF(b);
return ret;
}
// Extracts an int from an object...
int GetObjectInt(PyObject * obj, const char * name)
{
PyObject * i = PyObject_GetAttrString(obj, name);
int ret = PyInt_AsLong(i);
Py_DECREF(i);
return ret;
}
// Extracts a float from an object...
float GetObjectFloat(PyObject * obj, const char * name)
{
PyObject * f = PyObject_GetAttrString(obj, name);
float ret = PyFloat_AsDouble(f);
Py_DECREF(f);
return ret;
}
// Extracts an array from an object, returning it as a new[] unsigned char array. You can also pass in a pointer to an int to have the size of the array stored...
unsigned char * GetObjectByte1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
unsigned char * ret = new unsigned char[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Byte1D(nao,i);
Py_DECREF(nao);
return ret;
}
// Extracts an array from an object, returning it as a new[] float array. You can also pass in a pointer to an int to have the size of the array stored...
float * GetObjectFloat1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
float * ret = new float[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Float1D(nao,i);
Py_DECREF(nao);
return ret;
}
#endif
"""
| [
[
1,
0,
0.1875,
0.0125,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
1,
0,
0.2,
0.0125,
0,
0.66,
0.5,
884,
0,
1,
0,
0,
884,
0,
0
],
[
14,
0,
0.6312,
0.75,
0,
0.66,
... | [
"from utils.start_cpp import start_cpp",
"from utils.numpy_help_cpp import numpy_util_code",
"python_obj_code = numpy_util_code + start_cpp() + \"\"\"\n#ifndef PYTHON_OBJ_CODE\n#define PYTHON_OBJ_CODE\n\n// Extracts a boolean from an object...\nbool GetObjectBoolean(PyObject * obj, const char * name)\n{\n PyObj... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
from utils.numpy_help_cpp import numpy_util_code
# Provides various functions to assist with manipulating python objects from c++ code.
python_obj_code = numpy_util_code + start_cpp() + """
#ifndef PYTHON_OBJ_CODE
#define PYTHON_OBJ_CODE
// Extracts a boolean from an object...
bool GetObjectBoolean(PyObject * obj, const char * name)
{
PyObject * b = PyObject_GetAttrString(obj, name);
bool ret = b!=Py_False;
Py_DECREF(b);
return ret;
}
// Extracts an int from an object...
int GetObjectInt(PyObject * obj, const char * name)
{
PyObject * i = PyObject_GetAttrString(obj, name);
int ret = PyInt_AsLong(i);
Py_DECREF(i);
return ret;
}
// Extracts a float from an object...
float GetObjectFloat(PyObject * obj, const char * name)
{
PyObject * f = PyObject_GetAttrString(obj, name);
float ret = PyFloat_AsDouble(f);
Py_DECREF(f);
return ret;
}
// Extracts an array from an object, returning it as a new[] unsigned char array. You can also pass in a pointer to an int to have the size of the array stored...
unsigned char * GetObjectByte1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
unsigned char * ret = new unsigned char[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Byte1D(nao,i);
Py_DECREF(nao);
return ret;
}
// Extracts an array from an object, returning it as a new[] float array. You can also pass in a pointer to an int to have the size of the array stored...
float * GetObjectFloat1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
float * ret = new float[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Float1D(nao,i);
Py_DECREF(nao);
return ret;
}
#endif
"""
| [
[
1,
0,
0.1875,
0.0125,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
1,
0,
0.2,
0.0125,
0,
0.66,
0.5,
884,
0,
1,
0,
0,
884,
0,
0
],
[
14,
0,
0.6312,
0.75,
0,
0.66,
... | [
"from utils.start_cpp import start_cpp",
"from utils.numpy_help_cpp import numpy_util_code",
"python_obj_code = numpy_util_code + start_cpp() + \"\"\"\n#ifndef PYTHON_OBJ_CODE\n#define PYTHON_OBJ_CODE\n\n// Extracts a boolean from an object...\nbool GetObjectBoolean(PyObject * obj, const char * name)\n{\n PyObj... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
from utils.numpy_help_cpp import numpy_util_code
# Provides various functions to assist with manipulating python objects from c++ code.
python_obj_code = numpy_util_code + start_cpp() + """
#ifndef PYTHON_OBJ_CODE
#define PYTHON_OBJ_CODE
// Extracts a boolean from an object...
bool GetObjectBoolean(PyObject * obj, const char * name)
{
PyObject * b = PyObject_GetAttrString(obj, name);
bool ret = b!=Py_False;
Py_DECREF(b);
return ret;
}
// Extracts an int from an object...
int GetObjectInt(PyObject * obj, const char * name)
{
PyObject * i = PyObject_GetAttrString(obj, name);
int ret = PyInt_AsLong(i);
Py_DECREF(i);
return ret;
}
// Extracts a float from an object...
float GetObjectFloat(PyObject * obj, const char * name)
{
PyObject * f = PyObject_GetAttrString(obj, name);
float ret = PyFloat_AsDouble(f);
Py_DECREF(f);
return ret;
}
// Extracts an array from an object, returning it as a new[] unsigned char array. You can also pass in a pointer to an int to have the size of the array stored...
unsigned char * GetObjectByte1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
unsigned char * ret = new unsigned char[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Byte1D(nao,i);
Py_DECREF(nao);
return ret;
}
// Extracts an array from an object, returning it as a new[] float array. You can also pass in a pointer to an int to have the size of the array stored...
float * GetObjectFloat1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
float * ret = new float[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Float1D(nao,i);
Py_DECREF(nao);
return ret;
}
#endif
"""
| [
[
1,
0,
0.1875,
0.0125,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
1,
0,
0.2,
0.0125,
0,
0.66,
0.5,
884,
0,
1,
0,
0,
884,
0,
0
],
[
14,
0,
0.6312,
0.75,
0,
0.66,
... | [
"from utils.start_cpp import start_cpp",
"from utils.numpy_help_cpp import numpy_util_code",
"python_obj_code = numpy_util_code + start_cpp() + \"\"\"\n#ifndef PYTHON_OBJ_CODE\n#define PYTHON_OBJ_CODE\n\n// Extracts a boolean from an object...\nbool GetObjectBoolean(PyObject * obj, const char * name)\n{\n PyObj... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
from utils.numpy_help_cpp import numpy_util_code
# Provides various functions to assist with manipulating python objects from c++ code.
python_obj_code = numpy_util_code + start_cpp() + """
#ifndef PYTHON_OBJ_CODE
#define PYTHON_OBJ_CODE
// Extracts a boolean from an object...
bool GetObjectBoolean(PyObject * obj, const char * name)
{
PyObject * b = PyObject_GetAttrString(obj, name);
bool ret = b!=Py_False;
Py_DECREF(b);
return ret;
}
// Extracts an int from an object...
int GetObjectInt(PyObject * obj, const char * name)
{
PyObject * i = PyObject_GetAttrString(obj, name);
int ret = PyInt_AsLong(i);
Py_DECREF(i);
return ret;
}
// Extracts a float from an object...
float GetObjectFloat(PyObject * obj, const char * name)
{
PyObject * f = PyObject_GetAttrString(obj, name);
float ret = PyFloat_AsDouble(f);
Py_DECREF(f);
return ret;
}
// Extracts an array from an object, returning it as a new[] unsigned char array. You can also pass in a pointer to an int to have the size of the array stored...
unsigned char * GetObjectByte1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
unsigned char * ret = new unsigned char[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Byte1D(nao,i);
Py_DECREF(nao);
return ret;
}
// Extracts an array from an object, returning it as a new[] float array. You can also pass in a pointer to an int to have the size of the array stored...
float * GetObjectFloat1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
float * ret = new float[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Float1D(nao,i);
Py_DECREF(nao);
return ret;
}
#endif
"""
| [
[
1,
0,
0.1875,
0.0125,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
1,
0,
0.2,
0.0125,
0,
0.66,
0.5,
884,
0,
1,
0,
0,
884,
0,
0
],
[
14,
0,
0.6312,
0.75,
0,
0.66,
... | [
"from utils.start_cpp import start_cpp",
"from utils.numpy_help_cpp import numpy_util_code",
"python_obj_code = numpy_util_code + start_cpp() + \"\"\"\n#ifndef PYTHON_OBJ_CODE\n#define PYTHON_OBJ_CODE\n\n// Extracts a boolean from an object...\nbool GetObjectBoolean(PyObject * obj, const char * name)\n{\n PyObj... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
from utils.numpy_help_cpp import numpy_util_code
# Provides various functions to assist with manipulating python objects from c++ code.
python_obj_code = numpy_util_code + start_cpp() + """
#ifndef PYTHON_OBJ_CODE
#define PYTHON_OBJ_CODE
// Extracts a boolean from an object...
bool GetObjectBoolean(PyObject * obj, const char * name)
{
PyObject * b = PyObject_GetAttrString(obj, name);
bool ret = b!=Py_False;
Py_DECREF(b);
return ret;
}
// Extracts an int from an object...
int GetObjectInt(PyObject * obj, const char * name)
{
PyObject * i = PyObject_GetAttrString(obj, name);
int ret = PyInt_AsLong(i);
Py_DECREF(i);
return ret;
}
// Extracts a float from an object...
float GetObjectFloat(PyObject * obj, const char * name)
{
PyObject * f = PyObject_GetAttrString(obj, name);
float ret = PyFloat_AsDouble(f);
Py_DECREF(f);
return ret;
}
// Extracts an array from an object, returning it as a new[] unsigned char array. You can also pass in a pointer to an int to have the size of the array stored...
unsigned char * GetObjectByte1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
unsigned char * ret = new unsigned char[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Byte1D(nao,i);
Py_DECREF(nao);
return ret;
}
// Extracts an array from an object, returning it as a new[] float array. You can also pass in a pointer to an int to have the size of the array stored...
float * GetObjectFloat1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
float * ret = new float[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Float1D(nao,i);
Py_DECREF(nao);
return ret;
}
#endif
"""
| [
[
1,
0,
0.1875,
0.0125,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
1,
0,
0.2,
0.0125,
0,
0.66,
0.5,
884,
0,
1,
0,
0,
884,
0,
0
],
[
14,
0,
0.6312,
0.75,
0,
0.66,
... | [
"from utils.start_cpp import start_cpp",
"from utils.numpy_help_cpp import numpy_util_code",
"python_obj_code = numpy_util_code + start_cpp() + \"\"\"\n#ifndef PYTHON_OBJ_CODE\n#define PYTHON_OBJ_CODE\n\n// Extracts a boolean from an object...\nbool GetObjectBoolean(PyObject * obj, const char * name)\n{\n PyObj... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
from utils.numpy_help_cpp import numpy_util_code
# Provides various functions to assist with manipulating python objects from c++ code.
python_obj_code = numpy_util_code + start_cpp() + """
#ifndef PYTHON_OBJ_CODE
#define PYTHON_OBJ_CODE
// Extracts a boolean from an object...
bool GetObjectBoolean(PyObject * obj, const char * name)
{
PyObject * b = PyObject_GetAttrString(obj, name);
bool ret = b!=Py_False;
Py_DECREF(b);
return ret;
}
// Extracts an int from an object...
int GetObjectInt(PyObject * obj, const char * name)
{
PyObject * i = PyObject_GetAttrString(obj, name);
int ret = PyInt_AsLong(i);
Py_DECREF(i);
return ret;
}
// Extracts a float from an object...
float GetObjectFloat(PyObject * obj, const char * name)
{
PyObject * f = PyObject_GetAttrString(obj, name);
float ret = PyFloat_AsDouble(f);
Py_DECREF(f);
return ret;
}
// Extracts an array from an object, returning it as a new[] unsigned char array. You can also pass in a pointer to an int to have the size of the array stored...
unsigned char * GetObjectByte1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
unsigned char * ret = new unsigned char[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Byte1D(nao,i);
Py_DECREF(nao);
return ret;
}
// Extracts an array from an object, returning it as a new[] float array. You can also pass in a pointer to an int to have the size of the array stored...
float * GetObjectFloat1D(PyObject * obj, const char * name, int * size = 0)
{
PyArrayObject * nao = (PyArrayObject*)PyObject_GetAttrString(obj, name);
float * ret = new float[nao->dimensions[0]];
if (size) *size = nao->dimensions[0];
for (int i=0;i<nao->dimensions[0];i++) ret[i] = Float1D(nao,i);
Py_DECREF(nao);
return ret;
}
#endif
"""
| [
[
1,
0,
0.1875,
0.0125,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
1,
0,
0.2,
0.0125,
0,
0.66,
0.5,
884,
0,
1,
0,
0,
884,
0,
0
],
[
14,
0,
0.6312,
0.75,
0,
0.66,
... | [
"from utils.start_cpp import start_cpp",
"from utils.numpy_help_cpp import numpy_util_code",
"python_obj_code = numpy_util_code + start_cpp() + \"\"\"\n#ifndef PYTHON_OBJ_CODE\n#define PYTHON_OBJ_CODE\n\n// Extracts a boolean from an object...\nbool GetObjectBoolean(PyObject * obj, const char * name)\n{\n PyObj... |
# -*- coding: utf-8 -*-
# Copyright (c) 2010, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
class ProgBar:
"""Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops."""
def __init__(self, width = 60, onCallback = None):
self.start = time.time()
self.fill = 0
self.width = width
self.onCallback = onCallback
sys.stdout.write(('_'*self.width)+'\n')
sys.stdout.flush()
def __del__(self):
self.end = time.time()
self.__show(self.width)
sys.stdout.write('\nDone - '+str(self.end-self.start)+' seconds\n\n')
sys.stdout.flush()
def callback(self, nDone, nToDo):
"""Hand this into the callback of methods to get a progress bar - it works by users repeatedly calling it to indicate how many units of work they have done (nDone) out of the total number of units required (nToDo)."""
if self.onCallback:
self.onCallback()
n = int(float(self.width)*float(nDone)/float(nToDo))
n = min((n,self.width))
if n>self.fill:
self.__show(n)
def __show(self,n):
sys.stdout.write('|'*(n-self.fill))
sys.stdout.flush()
self.fill = n
| [
[
1,
0,
0.2941,
0.0196,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3137,
0.0196,
0,
0.66,
0.5,
654,
0,
1,
0,
0,
654,
0,
0
],
[
3,
0,
0.6863,
0.6078,
0,
0.6... | [
"import sys",
"import time",
"class ProgBar:\n \"\"\"Simple console progress bar class. Note that object creation and destruction matter, as they indicate when processing starts and when it stops.\"\"\"\n def __init__(self, width = 60, onCallback = None):\n self.start = time.time()\n self.fill = 0\n ... |
# -*- coding: utf-8 -*-
# Code copied from http://opencv.willowgarage.com/wiki/PythonInterface - license unknown, but presumed to be at least as liberal as bsd (The license for opencv.).
import cv
import numpy as np
def cv2array(im):
"""Converts a cv array to a numpy array."""
depth2dtype = {
cv.IPL_DEPTH_8U: 'uint8',
cv.IPL_DEPTH_8S: 'int8',
cv.IPL_DEPTH_16U: 'uint16',
cv.IPL_DEPTH_16S: 'int16',
cv.IPL_DEPTH_32S: 'int32',
cv.IPL_DEPTH_32F: 'float32',
cv.IPL_DEPTH_64F: 'float64',
}
arrdtype=im.depth
a = np.fromstring(
im.tostring(),
dtype=depth2dtype[im.depth],
count=im.width*im.height*im.nChannels)
a.shape = (im.height,im.width,im.nChannels)
return a
def array2cv(a):
"""Converts a numpy array to a cv array, if possible."""
dtype2depth = {
'uint8': cv.IPL_DEPTH_8U,
'int8': cv.IPL_DEPTH_8S,
'uint16': cv.IPL_DEPTH_16U,
'int16': cv.IPL_DEPTH_16S,
'int32': cv.IPL_DEPTH_32S,
'float32': cv.IPL_DEPTH_32F,
'float64': cv.IPL_DEPTH_64F,
}
try:
nChannels = a.shape[2]
except:
nChannels = 1
cv_im = cv.CreateImageHeader((a.shape[1],a.shape[0]),
dtype2depth[str(a.dtype)],
nChannels)
cv.SetData(cv_im, a.tostring(),
a.dtype.itemsize*nChannels*a.shape[1])
return cv_im
| [
[
1,
0,
0.1296,
0.0185,
0,
0.66,
0,
492,
0,
1,
0,
0,
492,
0,
0
],
[
1,
0,
0.1481,
0.0185,
0,
0.66,
0.3333,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.3889,
0.3519,
0,
... | [
"import cv",
"import numpy as np",
"def cv2array(im):\n \"\"\"Converts a cv array to a numpy array.\"\"\"\n depth2dtype = {\n cv.IPL_DEPTH_8U: 'uint8',\n cv.IPL_DEPTH_8S: 'int8',\n cv.IPL_DEPTH_16U: 'uint16',\n cv.IPL_DEPTH_16S: 'int16',\n cv.IPL_DEPTH_32S: 'int32',",
" \... |
# Copyright (c) 2012, Tom SF Haines
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from utils.start_cpp import start_cpp
# Some basic matrix operations that come in use...
matrix_code = start_cpp() + """
#ifndef MATRIX_CODE
#define MATRIX_CODE
template <typename T>
inline void MemSwap(T * lhs, T * rhs, int count = 1)
{
while(count!=0)
{
T t = *lhs;
*lhs = *rhs;
*rhs = t;
++lhs;
++rhs;
--count;
}
}
// Calculates the determinant - you give it a pointer to the first elment of the array, and its size (It must be square), plus its stride, which would typically be identical to size, which is the default.
template <typename T>
inline T Determinant(T * pos, int size, int stride = -1)
{
if (stride==-1) stride = size;
if (size==1) return pos[0];
else
{
if (size==2) return pos[0]*pos[stride+1] - pos[1]*pos[stride];
else
{
T ret = 0.0;
for (int i=0; i<size; i++)
{
if (i!=0) MemSwap(&pos[0], &pos[stride*i], size-1);
T sub = Determinant(&pos[stride], size-1, stride) * pos[stride*i + size-1];
if ((i+size)%2) ret += sub;
else ret -= sub;
}
for (int i=1; i<size; i++)
{
MemSwap(&pos[(i-1)*stride], &pos[i*stride], size-1);
}
return ret;
}
}
}
// Inverts a square matrix, will fail on singular and very occasionally on
// non-singular matrices, returns true on success. Uses Gauss-Jordan elimination
// with partial pivoting.
// in is the input matrix, out the output matrix, just be aware that the input matrix is trashed.
// You have to provide its size (Its square, obviously.), and optionally a stride if different from size.
template <typename T>
inline bool Inverse(T * in, T * out, int size, int stride = -1)
{
if (stride==-1) stride = size;
for (int r=0; r<size; r++)
{
for (int c=0; c<size; c++)
{
out[r*stride + c] = (c==r)?1.0:0.0;
}
}
for (int r=0; r<size; r++)
{
// Find largest pivot and swap in, fail if best we can get is 0...
T max = in[r*stride + r];
int index = r;
for (int i=r+1; i<size; i++)
{
if (fabs(in[i*stride + r])>fabs(max))
{
max = in[i*stride + r];
index = i;
}
}
if (index!=r)
{
MemSwap(&in[index*stride], &in[r*stride], size);
MemSwap(&out[index*stride], &out[r*stride], size);
}
if (fabs(max-0.0)<1e-6) return false;
// Divide through the entire row...
max = 1.0/max;
in[r*stride + r] = 1.0;
for (int i=r+1; i<size; i++) in[r*stride + i] *= max;
for (int i=0; i<size; i++) out[r*stride + i] *= max;
// Row subtract to generate 0's in the current column, so it matches an identity matrix...
for (int i=0; i<size; i++)
{
if (i==r) continue;
T factor = in[i*stride + r];
in[i*stride + r] = 0.0;
for (int j=r+1; j<size; j++) in[i*stride + j] -= factor * in[r*stride + j];
for (int j=0; j<size; j++) out[i*stride + j] -= factor * out[r*stride + j];
}
}
return true;
}
#endif
"""
| [
[
1,
0,
0.1,
0.0077,
0,
0.66,
0,
972,
0,
1,
0,
0,
972,
0,
0
],
[
14,
0,
0.5692,
0.8692,
0,
0.66,
1,
974,
4,
0,
0,
0,
0,
0,
1
]
] | [
"from utils.start_cpp import start_cpp",
"matrix_code = start_cpp() + \"\"\"\n#ifndef MATRIX_CODE\n#define MATRIX_CODE\n\ntemplate <typename T>\ninline void MemSwap(T * lhs, T * rhs, int count = 1)\n{\n while(count!=0)"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.