code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
#!/usr/bin/python2.4
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Errors for the library.
All exceptions defined by the library
should be defined in this file.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
from oauth2client.anyjson import simplejson
class Error(Exception):
"""Base error for this module."""
pass
class HttpError(Error):
"""HTTP data was invalid or unexpected."""
def __init__(self, resp, content, uri=None):
self.resp = resp
self.content = content
self.uri = uri
def _get_reason(self):
"""Calculate the reason for the error from the response content."""
if self.resp.get('content-type', '').startswith('application/json'):
try:
data = simplejson.loads(self.content)
reason = data['error']['message']
except (ValueError, KeyError):
reason = self.content
else:
reason = self.resp.reason
return reason
def __repr__(self):
if self.uri:
return '<HttpError %s when requesting %s returned "%s">' % (
self.resp.status, self.uri, self._get_reason())
else:
return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
__str__ = __repr__
class InvalidJsonError(Error):
"""The JSON returned could not be parsed."""
pass
class UnknownLinkType(Error):
"""Link type unknown or unexpected."""
pass
class UnknownApiNameOrVersion(Error):
"""No API with that name and version exists."""
pass
class UnacceptableMimeTypeError(Error):
"""That is an unacceptable mimetype for this operation."""
pass
class MediaUploadSizeError(Error):
"""Media is larger than the method can accept."""
pass
class ResumableUploadError(Error):
"""Error occured during resumable upload."""
pass
class BatchError(HttpError):
"""Error occured during batch operations."""
def __init__(self, reason, resp=None, content=None):
self.resp = resp
self.content = content
self.reason = reason
def __repr__(self):
return '<BatchError %s "%s">' % (self.resp.status, self.reason)
__str__ = __repr__
class UnexpectedMethodError(Error):
"""Exception raised by RequestMockBuilder on unexpected calls."""
def __init__(self, methodId=None):
"""Constructor for an UnexpectedMethodError."""
super(UnexpectedMethodError, self).__init__(
'Received unexpected call %s' % methodId)
class UnexpectedBodyError(Error):
"""Exception raised by RequestMockBuilder on unexpected bodies."""
def __init__(self, expected, provided):
"""Constructor for an UnexpectedMethodError."""
super(UnexpectedBodyError, self).__init__(
'Expected: [%s] - Provided: [%s]' % (expected, provided))
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import apiclient
import base64
import pickle
from django.db import models
class OAuthCredentialsField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
if value is None:
return None
if isinstance(value, apiclient.oauth.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class FlowThreeLeggedField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
print "In to_python", value
if value is None:
return None
if isinstance(value, apiclient.oauth.FlowThreeLegged):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command-line tools for authenticating via OAuth 1.0
Do the OAuth 1.0 Three Legged Dance for
a command line application. Stores the generated
credentials in a common file that is used by
other example apps in the same directory.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = ["run"]
import BaseHTTPServer
import gflags
import logging
import socket
import sys
from optparse import OptionParser
from apiclient.oauth import RequestError
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
FLAGS = gflags.FLAGS
gflags.DEFINE_boolean('auth_local_webserver', True,
('Run a local web server to handle redirects during '
'OAuth authorization.'))
gflags.DEFINE_string('auth_host_name', 'localhost',
('Host name to use when running a local web server to '
'handle redirects during OAuth authorization.'))
gflags.DEFINE_multi_int('auth_host_port', [8080, 8090],
('Port to use when running a local web server to '
'handle redirects during OAuth authorization.'))
class ClientRedirectServer(BaseHTTPServer.HTTPServer):
"""A server to handle OAuth 1.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into query_params and then stops serving.
"""
query_params = {}
class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler for OAuth 1.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into the servers query_params and then stops serving.
"""
def do_GET(s):
"""Handle a GET request
Parses the query parameters and prints a message
if the flow has completed. Note that we can't detect
if an error occurred.
"""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
query = s.path.split('?', 1)[-1]
query = dict(parse_qsl(query))
s.server.query_params = query
s.wfile.write("<html><head><title>Authentication Status</title></head>")
s.wfile.write("<body><p>The authentication flow has completed.</p>")
s.wfile.write("</body></html>")
def log_message(self, format, *args):
"""Do not log messages to stdout while running as command line program."""
pass
def run(flow, storage):
"""Core code for a command-line application.
Args:
flow: Flow, an OAuth 1.0 Flow to step through.
storage: Storage, a Storage to store the credential in.
Returns:
Credentials, the obtained credential.
Exceptions:
RequestError: if step2 of the flow fails.
Args:
"""
if FLAGS.auth_local_webserver:
success = False
port_number = 0
for port in FLAGS.auth_host_port:
port_number = port
try:
httpd = BaseHTTPServer.HTTPServer((FLAGS.auth_host_name, port),
ClientRedirectHandler)
except socket.error, e:
pass
else:
success = True
break
FLAGS.auth_local_webserver = success
if FLAGS.auth_local_webserver:
oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number)
else:
oauth_callback = 'oob'
authorize_url = flow.step1_get_authorize_url(oauth_callback)
print 'Go to the following link in your browser:'
print authorize_url
print
if FLAGS.auth_local_webserver:
print 'If your browser is on a different machine then exit and re-run this'
print 'application with the command-line parameter --noauth_local_webserver.'
print
if FLAGS.auth_local_webserver:
httpd.handle_request()
if 'error' in httpd.query_params:
sys.exit('Authentication request was rejected.')
if 'oauth_verifier' in httpd.query_params:
code = httpd.query_params['oauth_verifier']
else:
accepted = 'n'
while accepted.lower() == 'n':
accepted = raw_input('Have you authorized me? (y/n) ')
code = raw_input('What is the verification code? ').strip()
try:
credentials = flow.step2_exchange(code)
except RequestError:
sys.exit('The authentication has failed.')
storage.put(credentials)
credentials.set_store(storage.put)
print "You have successfully authenticated."
return credentials
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Google App Engine
Utilities for making it easier to use the
Google API Client for Python on Google App Engine.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import pickle
from google.appengine.ext import db
from apiclient.oauth import OAuthCredentials
from apiclient.oauth import FlowThreeLegged
class FlowThreeLeggedProperty(db.Property):
"""Utility property that allows easy
storage and retreival of an
apiclient.oauth.FlowThreeLegged"""
# Tell what the user type is.
data_type = FlowThreeLegged
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
flow = super(FlowThreeLeggedProperty,
self).get_value_for_datastore(model_instance)
return db.Blob(pickle.dumps(flow))
# For reading from datastore.
def make_value_from_datastore(self, value):
if value is None:
return None
return pickle.loads(value)
def validate(self, value):
if value is not None and not isinstance(value, FlowThreeLegged):
raise BadValueError('Property %s must be convertible '
'to a FlowThreeLegged instance (%s)' %
(self.name, value))
return super(FlowThreeLeggedProperty, self).validate(value)
def empty(self, value):
return not value
class OAuthCredentialsProperty(db.Property):
"""Utility property that allows easy
storage and retrieval of
apiclient.oath.OAuthCredentials
"""
# Tell what the user type is.
data_type = OAuthCredentials
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
cred = super(OAuthCredentialsProperty,
self).get_value_for_datastore(model_instance)
return db.Blob(pickle.dumps(cred))
# For reading from datastore.
def make_value_from_datastore(self, value):
if value is None:
return None
return pickle.loads(value)
def validate(self, value):
if value is not None and not isinstance(value, OAuthCredentials):
raise BadValueError('Property %s must be convertible '
'to an OAuthCredentials instance (%s)' %
(self.name, value))
return super(OAuthCredentialsProperty, self).validate(value)
def empty(self, value):
return not value
class StorageByKeyName(object):
"""Store and retrieve a single credential to and from
the App Engine datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsProperty
on a datastore model class, and that entities
are stored by key_name.
"""
def __init__(self, model, key_name, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
property_name: string, name of the property that is a CredentialsProperty
"""
self.model = model
self.key_name = key_name
self.property_name = property_name
def get(self):
"""Retrieve Credential from datastore.
Returns:
Credentials
"""
entity = self.model.get_or_insert(self.key_name)
credential = getattr(entity, self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self.put)
return credential
def put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
entity = self.model.get_or_insert(self.key_name)
setattr(entity, self.property_name, credentials)
entity.put()
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for OAuth.
Utilities for making it easier to work with OAuth 1.0 credentials.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import pickle
import threading
from apiclient.oauth import Storage as BaseStorage
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from a file."""
def __init__(self, filename):
self._filename = filename
self._lock = threading.Lock()
def get(self):
"""Retrieve Credential from file.
Returns:
apiclient.oauth.Credentials
"""
self._lock.acquire()
try:
f = open(self._filename, 'r')
credentials = pickle.loads(f.read())
f.close()
credentials.set_store(self.put)
except:
credentials = None
self._lock.release()
return credentials
def put(self, credentials):
"""Write a pickled Credentials to file.
Args:
credentials: Credentials, the credentials to store.
"""
self._lock.acquire()
f = open(self._filename, 'w')
f.write(pickle.dumps(credentials))
f.close()
self._lock.release()
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs
A client library for Google's discovery based APIs.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = [
'build',
'build_from_document'
'fix_method_name',
'key2param'
]
import copy
import httplib2
import logging
import os
import random
import re
import uritemplate
import urllib
import urlparse
import mimeparse
import mimetypes
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
from apiclient.errors import HttpError
from apiclient.errors import InvalidJsonError
from apiclient.errors import MediaUploadSizeError
from apiclient.errors import UnacceptableMimeTypeError
from apiclient.errors import UnknownApiNameOrVersion
from apiclient.errors import UnknownLinkType
from apiclient.http import HttpRequest
from apiclient.http import MediaFileUpload
from apiclient.http import MediaUpload
from apiclient.model import JsonModel
from apiclient.model import MediaModel
from apiclient.model import RawModel
from apiclient.schema import Schemas
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
from oauth2client.anyjson import simplejson
logger = logging.getLogger(__name__)
URITEMPLATE = re.compile('{[^}]*}')
VARNAME = re.compile('[a-zA-Z0-9_-]+')
DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
'{api}/{apiVersion}/rest')
DEFAULT_METHOD_DOC = 'A description of how to use this function'
# Parameters accepted by the stack, but not visible via discovery.
STACK_QUERY_PARAMETERS = ['trace', 'pp', 'userip', 'strict']
# Python reserved words.
RESERVED_WORDS = ['and', 'assert', 'break', 'class', 'continue', 'def', 'del',
'elif', 'else', 'except', 'exec', 'finally', 'for', 'from',
'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or',
'pass', 'print', 'raise', 'return', 'try', 'while' ]
def fix_method_name(name):
"""Fix method names to avoid reserved word conflicts.
Args:
name: string, method name.
Returns:
The name with a '_' prefixed if the name is a reserved word.
"""
if name in RESERVED_WORDS:
return name + '_'
else:
return name
def _add_query_parameter(url, name, value):
"""Adds a query parameter to a url.
Replaces the current value if it already exists in the URL.
Args:
url: string, url to add the query parameter to.
name: string, query parameter name.
value: string, query parameter value.
Returns:
Updated query parameter. Does not update the url if value is None.
"""
if value is None:
return url
else:
parsed = list(urlparse.urlparse(url))
q = dict(parse_qsl(parsed[4]))
q[name] = value
parsed[4] = urllib.urlencode(q)
return urlparse.urlunparse(parsed)
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
Args:
key: string, the method key name.
Returns:
A safe method name based on the key name.
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append('x')
for c in key:
if c.isalnum():
result.append(c)
else:
result.append('_')
return ''.join(result)
def build(serviceName,
version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with an API. The serviceName and
version are the names from the Discovery service.
Args:
serviceName: string, name of the service.
version: string, the version of the service.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
discoveryServiceUrl: string, a URI Template that points to the location of
the discovery service. It should have two parameters {api} and
{apiVersion} that when filled in produce an absolute URI to the discovery
document for that service.
developerKey: string, key obtained from
https://code.google.com/apis/console.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP
request.
Returns:
A Resource object with methods for interacting with the service.
"""
params = {
'api': serviceName,
'apiVersion': version
}
if http is None:
http = httplib2.Http()
requested_url = uritemplate.expand(discoveryServiceUrl, params)
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if 'REMOTE_ADDR' in os.environ:
requested_url = _add_query_parameter(requested_url, 'userIp',
os.environ['REMOTE_ADDR'])
logger.info('URL being requested: %s' % requested_url)
resp, content = http.request(requested_url)
if resp.status == 404:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName,
version))
if resp.status >= 400:
raise HttpError(resp, content, requested_url)
try:
service = simplejson.loads(content)
except ValueError, e:
logger.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError()
return build_from_document(content, discoveryServiceUrl, http=http,
developerKey=developerKey, model=model, requestBuilder=requestBuilder)
def build_from_document(
service,
base,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object from a discovery
document that is it given, as opposed to retrieving one over HTTP.
Args:
service: string, discovery document.
base: string, base URI for all HTTP requests, usually the discovery URI.
future: string, discovery document with future capabilities (deprecated).
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and de-serializes requests and
responses.
requestBuilder: Takes an http request and packages it up to be executed.
Returns:
A Resource object with methods for interacting with the service.
"""
# future is no longer used.
future = {}
service = simplejson.loads(service)
base = urlparse.urljoin(base, service['basePath'])
schema = Schemas(service)
if model is None:
features = service.get('features', [])
model = JsonModel('dataWrapper' in features)
resource = _createResource(http, base, model, requestBuilder, developerKey,
service, service, schema)
return resource
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
value: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == 'string':
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
elif schema_type == 'integer':
return str(int(value))
elif schema_type == 'number':
return str(float(value))
elif schema_type == 'boolean':
return str(bool(value)).lower()
else:
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
MULTIPLIERS = {
"KB": 2 ** 10,
"MB": 2 ** 20,
"GB": 2 ** 30,
"TB": 2 ** 40,
}
def _media_size_to_long(maxSize):
"""Convert a string media size, such as 10GB or 3TB into an integer.
Args:
maxSize: string, size as a string, such as 2MB or 7GB.
Returns:
The size as an integer value.
"""
if len(maxSize) < 2:
return 0
units = maxSize[-2:].upper()
multiplier = MULTIPLIERS.get(units, 0)
if multiplier:
return int(maxSize[:-2]) * multiplier
else:
return int(maxSize)
def _createResource(http, baseUrl, model, requestBuilder,
developerKey, resourceDesc, rootDesc, schema):
"""Build a Resource from the API description.
Args:
http: httplib2.Http, Object to make http requests with.
baseUrl: string, base URL for the API. All requests are relative to this
URI.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: class or callable that instantiates an
apiclient.HttpRequest object.
developerKey: string, key obtained from
https://code.google.com/apis/console
resourceDesc: object, section of deserialized discovery document that
describes a resource. Note that the top level discovery document
is considered a resource.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
Returns:
An instance of Resource with all the methods attached for interacting with
that resource.
"""
class Resource(object):
"""A class for interacting with a resource."""
def __init__(self):
self._http = http
self._baseUrl = baseUrl
self._model = model
self._developerKey = developerKey
self._requestBuilder = requestBuilder
def createMethod(theclass, methodName, methodDesc, rootDesc):
"""Creates a method for attaching to a Resource.
Args:
theclass: type, the class to attach methods to.
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
"""
methodName = fix_method_name(methodName)
pathUrl = methodDesc['path']
httpMethod = methodDesc['httpMethod']
methodId = methodDesc['id']
mediaPathUrl = None
accept = []
maxSize = 0
if 'mediaUpload' in methodDesc:
mediaUpload = methodDesc['mediaUpload']
# TODO(jcgregorio) Use URLs from discovery once it is updated.
parsed = list(urlparse.urlparse(baseUrl))
basePath = parsed[2]
mediaPathUrl = '/upload' + basePath + pathUrl
accept = mediaUpload['accept']
maxSize = _media_size_to_long(mediaUpload.get('maxSize', ''))
if 'parameters' not in methodDesc:
methodDesc['parameters'] = {}
# Add in the parameters common to all methods.
for name, desc in rootDesc.get('parameters', {}).iteritems():
methodDesc['parameters'][name] = desc
# Add in undocumented query parameters.
for name in STACK_QUERY_PARAMETERS:
methodDesc['parameters'][name] = {
'type': 'string',
'location': 'query'
}
if httpMethod in ['PUT', 'POST', 'PATCH'] and 'request' in methodDesc:
methodDesc['parameters']['body'] = {
'description': 'The request body.',
'type': 'object',
'required': True,
}
if 'request' in methodDesc:
methodDesc['parameters']['body'].update(methodDesc['request'])
else:
methodDesc['parameters']['body']['type'] = 'object'
if 'mediaUpload' in methodDesc:
methodDesc['parameters']['media_body'] = {
'description': 'The filename of the media request body.',
'type': 'string',
'required': False,
}
if 'body' in methodDesc['parameters']:
methodDesc['parameters']['body']['required'] = False
argmap = {} # Map from method parameter name to query parameter name
required_params = [] # Required parameters
repeated_params = [] # Repeated parameters
pattern_params = {} # Parameters that must match a regex
query_params = [] # Parameters that will be used in the query string
path_params = {} # Parameters that will be used in the base URL
param_type = {} # The type of the parameter
enum_params = {} # Allowable enumeration values for each parameter
if 'parameters' in methodDesc:
for arg, desc in methodDesc['parameters'].iteritems():
param = key2param(arg)
argmap[param] = arg
if desc.get('pattern', ''):
pattern_params[param] = desc['pattern']
if desc.get('enum', ''):
enum_params[param] = desc['enum']
if desc.get('required', False):
required_params.append(param)
if desc.get('repeated', False):
repeated_params.append(param)
if desc.get('location') == 'query':
query_params.append(param)
if desc.get('location') == 'path':
path_params[param] = param
param_type[param] = desc.get('type', 'string')
for match in URITEMPLATE.finditer(pathUrl):
for namematch in VARNAME.finditer(match.group(0)):
name = key2param(namematch.group(0))
path_params[name] = name
if name in query_params:
query_params.remove(name)
def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod.
for name in kwargs.iterkeys():
if name not in argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name)
# Remove args that have a value of None.
keys = kwargs.keys()
for name in keys:
if kwargs[name] is None:
del kwargs[name]
for name in required_params:
if name not in kwargs:
raise TypeError('Missing required parameter "%s"' % name)
for name, regex in pattern_params.iteritems():
if name in kwargs:
if isinstance(kwargs[name], basestring):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
for pvalue in pvalues:
if re.match(regex, pvalue) is None:
raise TypeError(
'Parameter "%s" value "%s" does not match the pattern "%s"' %
(name, pvalue, regex))
for name, enums in enum_params.iteritems():
if name in kwargs:
# We need to handle the case of a repeated enum
# name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2']
if (name in repeated_params and
not isinstance(kwargs[name], basestring)):
values = kwargs[name]
else:
values = [kwargs[name]]
for value in values:
if value not in enums:
raise TypeError(
'Parameter "%s" value "%s" is not an allowed value in "%s"' %
(name, value, str(enums)))
actual_query_params = {}
actual_path_params = {}
for key, value in kwargs.iteritems():
to_type = param_type.get(key, 'string')
# For repeated parameters we cast each member of the list.
if key in repeated_params and type(value) == type([]):
cast_value = [_cast(x, to_type) for x in value]
else:
cast_value = _cast(value, to_type)
if key in query_params:
actual_query_params[argmap[key]] = cast_value
if key in path_params:
actual_path_params[argmap[key]] = cast_value
body_value = kwargs.get('body', None)
media_filename = kwargs.get('media_body', None)
if self._developerKey:
actual_query_params['key'] = self._developerKey
model = self._model
# If there is no schema for the response then presume a binary blob.
if methodName.endswith('_media'):
model = MediaModel()
elif 'response' not in methodDesc:
model = RawModel()
headers = {}
headers, params, query, body = model.request(headers,
actual_path_params, actual_query_params, body_value)
expanded_url = uritemplate.expand(pathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
resumable = None
multipart_boundary = ''
if media_filename:
# Ensure we end up with a valid MediaUpload object.
if isinstance(media_filename, basestring):
(media_mime_type, encoding) = mimetypes.guess_type(media_filename)
if media_mime_type is None:
raise UnknownFileType(media_filename)
if not mimeparse.best_match([media_mime_type], ','.join(accept)):
raise UnacceptableMimeTypeError(media_mime_type)
media_upload = MediaFileUpload(media_filename, media_mime_type)
elif isinstance(media_filename, MediaUpload):
media_upload = media_filename
else:
raise TypeError('media_filename must be str or MediaUpload.')
# Check the maxSize
if maxSize > 0 and media_upload.size() > maxSize:
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads
expanded_url = uritemplate.expand(mediaPathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
if media_upload.resumable():
url = _add_query_parameter(url, 'uploadType', 'resumable')
if media_upload.resumable():
# This is all we need to do for resumable, if the body exists it gets
# sent in the first request, otherwise an empty body is sent.
resumable = media_upload
else:
# A non-resumable upload
if body is None:
# This is a simple media upload
headers['content-type'] = media_upload.mimetype()
body = media_upload.getbytes(0, media_upload.size())
url = _add_query_parameter(url, 'uploadType', 'media')
else:
# This is a multipart/related upload.
msgRoot = MIMEMultipart('related')
# msgRoot should not write out it's own headers
setattr(msgRoot, '_write_headers', lambda self: None)
# attach the body as one part
msg = MIMENonMultipart(*headers['content-type'].split('/'))
msg.set_payload(body)
msgRoot.attach(msg)
# attach the media as the second part
msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
payload = media_upload.getbytes(0, media_upload.size())
msg.set_payload(payload)
msgRoot.attach(msg)
body = msgRoot.as_string()
multipart_boundary = msgRoot.get_boundary()
headers['content-type'] = ('multipart/related; '
'boundary="%s"') % multipart_boundary
url = _add_query_parameter(url, 'uploadType', 'multipart')
logger.info('URL being requested: %s' % url)
return self._requestBuilder(self._http,
model.response,
url,
method=httpMethod,
body=body,
headers=headers,
methodId=methodId,
resumable=resumable)
docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
if len(argmap) > 0:
docs.append('Args:\n')
# Skip undocumented params and params common to all methods.
skip_parameters = rootDesc.get('parameters', {}).keys()
skip_parameters.append(STACK_QUERY_PARAMETERS)
for arg in argmap.iterkeys():
if arg in skip_parameters:
continue
repeated = ''
if arg in repeated_params:
repeated = ' (repeated)'
required = ''
if arg in required_params:
required = ' (required)'
paramdesc = methodDesc['parameters'][argmap[arg]]
paramdoc = paramdesc.get('description', 'A parameter')
if '$ref' in paramdesc:
docs.append(
(' %s: object, %s%s%s\n The object takes the'
' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
schema.prettyPrintByName(paramdesc['$ref'])))
else:
paramtype = paramdesc.get('type', 'string')
docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
repeated))
enum = paramdesc.get('enum', [])
enumDesc = paramdesc.get('enumDescriptions', [])
if enum and enumDesc:
docs.append(' Allowed values\n')
for (name, desc) in zip(enum, enumDesc):
docs.append(' %s - %s\n' % (name, desc))
if 'response' in methodDesc:
if methodName.endswith('_media'):
docs.append('\nReturns:\n The media object as a string.\n\n ')
else:
docs.append('\nReturns:\n An object of the form:\n\n ')
docs.append(schema.prettyPrintSchema(methodDesc['response']))
setattr(method, '__doc__', ''.join(docs))
setattr(theclass, methodName, method)
def createNextMethod(theclass, methodName, methodDesc, rootDesc):
"""Creates any _next methods for attaching to a Resource.
The _next methods allow for easy iteration through list() responses.
Args:
theclass: type, the class to attach methods to.
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
"""
methodName = fix_method_name(methodName)
methodId = methodDesc['id'] + '.next'
def methodNext(self, previous_request, previous_response):
"""Retrieves the next page of results.
Args:
previous_request: The request for the previous page.
previous_response: The response from the request for the previous page.
Returns:
A request object that you can call 'execute()' on to request the next
page. Returns None if there are no more items in the collection.
"""
# Retrieve nextPageToken from previous_response
# Use as pageToken in previous_request to create new request.
if 'nextPageToken' not in previous_response:
return None
request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken']
parsed = list(urlparse.urlparse(request.uri))
q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken))
parsed[4] = urllib.urlencode(newq)
uri = urlparse.urlunparse(parsed)
request.uri = uri
logger.info('URL being requested: %s' % uri)
return request
setattr(theclass, methodName, methodNext)
# Add basic methods to Resource
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
createMethod(Resource, methodName, methodDesc, rootDesc)
# Add in _media methods. The functionality of the attached method will
# change when it sees that the method name ends in _media.
if methodDesc.get('supportsMediaDownload', False):
createMethod(Resource, methodName + '_media', methodDesc, rootDesc)
# Add in nested resources
if 'resources' in resourceDesc:
def createResourceMethod(theclass, methodName, methodDesc, rootDesc):
"""Create a method on the Resource to access a nested Resource.
Args:
theclass: type, the class to attach methods to.
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
"""
methodName = fix_method_name(methodName)
def methodResource(self):
return _createResource(self._http, self._baseUrl, self._model,
self._requestBuilder, self._developerKey,
methodDesc, rootDesc, schema)
setattr(methodResource, '__doc__', 'A collection resource.')
setattr(methodResource, '__is_resource__', True)
setattr(theclass, methodName, methodResource)
for methodName, methodDesc in resourceDesc['resources'].iteritems():
createResourceMethod(Resource, methodName, methodDesc, rootDesc)
# Add _next() methods
# Look for response bodies in schema that contain nextPageToken, and methods
# that take a pageToken parameter.
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
if 'response' in methodDesc:
responseSchema = methodDesc['response']
if '$ref' in responseSchema:
responseSchema = schema.get(responseSchema['$ref'])
hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
{})
hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
if hasNextPageToken and hasPageToken:
createNextMethod(Resource, methodName + '_next',
resourceDesc['methods'][methodName],
methodName)
return Resource()
| Python |
# Copyright (C) 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes to encapsulate a single HTTP request.
The classes implement a command pattern, with every
object supporting an execute() method that does the
actuall HTTP request.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import StringIO
import base64
import copy
import gzip
import httplib2
import mimeparse
import mimetypes
import os
import urllib
import urlparse
import uuid
from email.generator import Generator
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
from email.parser import FeedParser
from errors import BatchError
from errors import HttpError
from errors import ResumableUploadError
from errors import UnexpectedBodyError
from errors import UnexpectedMethodError
from model import JsonModel
from oauth2client.anyjson import simplejson
DEFAULT_CHUNK_SIZE = 512*1024
class MediaUploadProgress(object):
"""Status of a resumable upload."""
def __init__(self, resumable_progress, total_size):
"""Constructor.
Args:
resumable_progress: int, bytes sent so far.
total_size: int, total bytes in complete upload, or None if the total
upload size isn't known ahead of time.
"""
self.resumable_progress = resumable_progress
self.total_size = total_size
def progress(self):
"""Percent of upload completed, as a float.
Returns:
the percentage complete as a float, returning 0.0 if the total size of
the upload is unknown.
"""
if self.total_size is not None:
return float(self.resumable_progress) / float(self.total_size)
else:
return 0.0
class MediaDownloadProgress(object):
"""Status of a resumable download."""
def __init__(self, resumable_progress, total_size):
"""Constructor.
Args:
resumable_progress: int, bytes received so far.
total_size: int, total bytes in complete download.
"""
self.resumable_progress = resumable_progress
self.total_size = total_size
def progress(self):
"""Percent of download completed, as a float.
Returns:
the percentage complete as a float, returning 0.0 if the total size of
the download is unknown.
"""
if self.total_size is not None:
return float(self.resumable_progress) / float(self.total_size)
else:
return 0.0
class MediaUpload(object):
"""Describes a media object to upload.
Base class that defines the interface of MediaUpload subclasses.
Note that subclasses of MediaUpload may allow you to control the chunksize
when upload a media object. It is important to keep the size of the chunk as
large as possible to keep the upload efficient. Other factors may influence
the size of the chunk you use, particularly if you are working in an
environment where individual HTTP requests may have a hardcoded time limit,
such as under certain classes of requests under Google App Engine.
"""
def chunksize(self):
"""Chunk size for resumable uploads.
Returns:
Chunk size in bytes.
"""
raise NotImplementedError()
def mimetype(self):
"""Mime type of the body.
Returns:
Mime type.
"""
return 'application/octet-stream'
def size(self):
"""Size of upload.
Returns:
Size of the body, or None of the size is unknown.
"""
return None
def resumable(self):
"""Whether this upload is resumable.
Returns:
True if resumable upload or False.
"""
return False
def getbytes(self, begin, end):
"""Get bytes from the media.
Args:
begin: int, offset from beginning of file.
length: int, number of bytes to read, starting at begin.
Returns:
A string of bytes read. May be shorter than length if EOF was reached
first.
"""
raise NotImplementedError()
def _to_json(self, strip=None):
"""Utility function for creating a JSON representation of a MediaUpload.
Args:
strip: array, An array of names of members to not include in the JSON.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
t = type(self)
d = copy.copy(self.__dict__)
if strip is not None:
for member in strip:
del d[member]
d['_class'] = t.__name__
d['_module'] = t.__module__
return simplejson.dumps(d)
def to_json(self):
"""Create a JSON representation of an instance of MediaUpload.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
return self._to_json()
@classmethod
def new_from_json(cls, s):
"""Utility class method to instantiate a MediaUpload subclass from a JSON
representation produced by to_json().
Args:
s: string, JSON from to_json().
Returns:
An instance of the subclass of MediaUpload that was serialized with
to_json().
"""
data = simplejson.loads(s)
# Find and call the right classmethod from_json() to restore the object.
module = data['_module']
m = __import__(module, fromlist=module.split('.')[:-1])
kls = getattr(m, data['_class'])
from_json = getattr(kls, 'from_json')
return from_json(s)
class MediaFileUpload(MediaUpload):
"""A MediaUpload for a file.
Construct a MediaFileUpload and pass as the media_body parameter of the
method. For example, if we had a service that allowed uploading images:
media = MediaFileUpload('cow.png', mimetype='image/png',
chunksize=1024*1024, resumable=True)
farm.animals()..insert(
id='cow',
name='cow.png',
media_body=media).execute()
"""
def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
"""Constructor.
Args:
filename: string, Name of the file.
mimetype: string, Mime-type of the file. If None then a mime-type will be
guessed from the file extension.
chunksize: int, File will be uploaded in chunks of this many bytes. Only
used if resumable=True.
resumable: bool, True if this is a resumable upload. False means upload
in a single request.
"""
self._filename = filename
self._size = os.path.getsize(filename)
self._fd = None
if mimetype is None:
(mimetype, encoding) = mimetypes.guess_type(filename)
self._mimetype = mimetype
self._chunksize = chunksize
self._resumable = resumable
def chunksize(self):
"""Chunk size for resumable uploads.
Returns:
Chunk size in bytes.
"""
return self._chunksize
def mimetype(self):
"""Mime type of the body.
Returns:
Mime type.
"""
return self._mimetype
def size(self):
"""Size of upload.
Returns:
Size of the body, or None of the size is unknown.
"""
return self._size
def resumable(self):
"""Whether this upload is resumable.
Returns:
True if resumable upload or False.
"""
return self._resumable
def getbytes(self, begin, length):
"""Get bytes from the media.
Args:
begin: int, offset from beginning of file.
length: int, number of bytes to read, starting at begin.
Returns:
A string of bytes read. May be shorted than length if EOF was reached
first.
"""
if self._fd is None:
self._fd = open(self._filename, 'rb')
self._fd.seek(begin)
return self._fd.read(length)
def to_json(self):
"""Creating a JSON representation of an instance of MediaFileUpload.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
return self._to_json(['_fd'])
@staticmethod
def from_json(s):
d = simplejson.loads(s)
return MediaFileUpload(
d['_filename'], d['_mimetype'], d['_chunksize'], d['_resumable'])
class MediaIoBaseUpload(MediaUpload):
"""A MediaUpload for a io.Base objects.
Note that the Python file object is compatible with io.Base and can be used
with this class also.
fh = io.BytesIO('...Some data to upload...')
media = MediaIoBaseUpload(fh, mimetype='image/png',
chunksize=1024*1024, resumable=True)
farm.animals().insert(
id='cow',
name='cow.png',
media_body=media).execute()
"""
def __init__(self, fh, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
resumable=False):
"""Constructor.
Args:
fh: io.Base or file object, The source of the bytes to upload. MUST be
opened in blocking mode, do not use streams opened in non-blocking mode.
mimetype: string, Mime-type of the file. If None then a mime-type will be
guessed from the file extension.
chunksize: int, File will be uploaded in chunks of this many bytes. Only
used if resumable=True.
resumable: bool, True if this is a resumable upload. False means upload
in a single request.
"""
self._fh = fh
self._mimetype = mimetype
self._chunksize = chunksize
self._resumable = resumable
self._size = None
try:
if hasattr(self._fh, 'fileno'):
fileno = self._fh.fileno()
# Pipes and such show up as 0 length files.
size = os.fstat(fileno).st_size
if size:
self._size = os.fstat(fileno).st_size
except IOError:
pass
def chunksize(self):
"""Chunk size for resumable uploads.
Returns:
Chunk size in bytes.
"""
return self._chunksize
def mimetype(self):
"""Mime type of the body.
Returns:
Mime type.
"""
return self._mimetype
def size(self):
"""Size of upload.
Returns:
Size of the body, or None of the size is unknown.
"""
return self._size
def resumable(self):
"""Whether this upload is resumable.
Returns:
True if resumable upload or False.
"""
return self._resumable
def getbytes(self, begin, length):
"""Get bytes from the media.
Args:
begin: int, offset from beginning of file.
length: int, number of bytes to read, starting at begin.
Returns:
A string of bytes read. May be shorted than length if EOF was reached
first.
"""
self._fh.seek(begin)
return self._fh.read(length)
def to_json(self):
"""This upload type is not serializable."""
raise NotImplementedError('MediaIoBaseUpload is not serializable.')
class MediaInMemoryUpload(MediaUpload):
"""MediaUpload for a chunk of bytes.
Construct a MediaFileUpload and pass as the media_body parameter of the
method.
"""
def __init__(self, body, mimetype='application/octet-stream',
chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
"""Create a new MediaBytesUpload.
Args:
body: string, Bytes of body content.
mimetype: string, Mime-type of the file or default of
'application/octet-stream'.
chunksize: int, File will be uploaded in chunks of this many bytes. Only
used if resumable=True.
resumable: bool, True if this is a resumable upload. False means upload
in a single request.
"""
self._body = body
self._mimetype = mimetype
self._resumable = resumable
self._chunksize = chunksize
def chunksize(self):
"""Chunk size for resumable uploads.
Returns:
Chunk size in bytes.
"""
return self._chunksize
def mimetype(self):
"""Mime type of the body.
Returns:
Mime type.
"""
return self._mimetype
def size(self):
"""Size of upload.
Returns:
Size of the body, or None of the size is unknown.
"""
return len(self._body)
def resumable(self):
"""Whether this upload is resumable.
Returns:
True if resumable upload or False.
"""
return self._resumable
def getbytes(self, begin, length):
"""Get bytes from the media.
Args:
begin: int, offset from beginning of file.
length: int, number of bytes to read, starting at begin.
Returns:
A string of bytes read. May be shorter than length if EOF was reached
first.
"""
return self._body[begin:begin + length]
def to_json(self):
"""Create a JSON representation of a MediaInMemoryUpload.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
t = type(self)
d = copy.copy(self.__dict__)
del d['_body']
d['_class'] = t.__name__
d['_module'] = t.__module__
d['_b64body'] = base64.b64encode(self._body)
return simplejson.dumps(d)
@staticmethod
def from_json(s):
d = simplejson.loads(s)
return MediaInMemoryUpload(base64.b64decode(d['_b64body']),
d['_mimetype'], d['_chunksize'],
d['_resumable'])
class MediaIoBaseDownload(object):
""""Download media resources.
Note that the Python file object is compatible with io.Base and can be used
with this class also.
Example:
request = farms.animals().get_media(id='cow')
fh = io.FileIO('cow.png', mode='wb')
downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
done = False
while done is False:
status, done = downloader.next_chunk()
if status:
print "Download %d%%." % int(status.progress() * 100)
print "Download Complete!"
"""
def __init__(self, fh, request, chunksize=DEFAULT_CHUNK_SIZE):
"""Constructor.
Args:
fh: io.Base or file object, The stream in which to write the downloaded
bytes.
request: apiclient.http.HttpRequest, the media request to perform in
chunks.
chunksize: int, File will be downloaded in chunks of this many bytes.
"""
self.fh_ = fh
self.request_ = request
self.uri_ = request.uri
self.chunksize_ = chunksize
self.progress_ = 0
self.total_size_ = None
self.done_ = False
def next_chunk(self):
"""Get the next chunk of the download.
Returns:
(status, done): (MediaDownloadStatus, boolean)
The value of 'done' will be True when the media has been fully
downloaded.
Raises:
apiclient.errors.HttpError if the response was not a 2xx.
httplib2.Error if a transport error has occured.
"""
headers = {
'range': 'bytes=%d-%d' % (
self.progress_, self.progress_ + self.chunksize_)
}
http = self.request_.http
http.follow_redirects = False
resp, content = http.request(self.uri_, headers=headers)
if resp.status in [301, 302, 303, 307, 308] and 'location' in resp:
self.uri_ = resp['location']
resp, content = http.request(self.uri_, headers=headers)
if resp.status in [200, 206]:
self.progress_ += len(content)
self.fh_.write(content)
if 'content-range' in resp:
content_range = resp['content-range']
length = content_range.rsplit('/', 1)[1]
self.total_size_ = int(length)
if self.progress_ == self.total_size_:
self.done_ = True
return MediaDownloadProgress(self.progress_, self.total_size_), self.done_
else:
raise HttpError(resp, content, self.uri_)
class HttpRequest(object):
"""Encapsulates a single HTTP request."""
def __init__(self, http, postproc, uri,
method='GET',
body=None,
headers=None,
methodId=None,
resumable=None):
"""Constructor for an HttpRequest.
Args:
http: httplib2.Http, the transport object to use to make a request
postproc: callable, called on the HTTP response and content to transform
it into a data object before returning, or raising an exception
on an error.
uri: string, the absolute URI to send the request to
method: string, the HTTP method to use
body: string, the request body of the HTTP request,
headers: dict, the HTTP request headers
methodId: string, a unique identifier for the API method being called.
resumable: MediaUpload, None if this is not a resumbale request.
"""
self.uri = uri
self.method = method
self.body = body
self.headers = headers or {}
self.methodId = methodId
self.http = http
self.postproc = postproc
self.resumable = resumable
self._in_error_state = False
# Pull the multipart boundary out of the content-type header.
major, minor, params = mimeparse.parse_mime_type(
headers.get('content-type', 'application/json'))
# The size of the non-media part of the request.
self.body_size = len(self.body or '')
# The resumable URI to send chunks to.
self.resumable_uri = None
# The bytes that have been uploaded.
self.resumable_progress = 0
def execute(self, http=None):
"""Execute the request.
Args:
http: httplib2.Http, an http object to be used in place of the
one the HttpRequest request object was constructed with.
Returns:
A deserialized object model of the response body as determined
by the postproc.
Raises:
apiclient.errors.HttpError if the response was not a 2xx.
httplib2.Error if a transport error has occured.
"""
if http is None:
http = self.http
if self.resumable:
body = None
while body is None:
_, body = self.next_chunk(http)
return body
else:
if 'content-length' not in self.headers:
self.headers['content-length'] = str(self.body_size)
resp, content = http.request(self.uri, self.method,
body=self.body,
headers=self.headers)
if resp.status >= 300:
raise HttpError(resp, content, self.uri)
return self.postproc(resp, content)
def next_chunk(self, http=None):
"""Execute the next step of a resumable upload.
Can only be used if the method being executed supports media uploads and
the MediaUpload object passed in was flagged as using resumable upload.
Example:
media = MediaFileUpload('cow.png', mimetype='image/png',
chunksize=1000, resumable=True)
request = farm.animals().insert(
id='cow',
name='cow.png',
media_body=media)
response = None
while response is None:
status, response = request.next_chunk()
if status:
print "Upload %d%% complete." % int(status.progress() * 100)
Returns:
(status, body): (ResumableMediaStatus, object)
The body will be None until the resumable media is fully uploaded.
Raises:
apiclient.errors.HttpError if the response was not a 2xx.
httplib2.Error if a transport error has occured.
"""
if http is None:
http = self.http
if self.resumable.size() is None:
size = '*'
else:
size = str(self.resumable.size())
if self.resumable_uri is None:
start_headers = copy.copy(self.headers)
start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
if size != '*':
start_headers['X-Upload-Content-Length'] = size
start_headers['content-length'] = str(self.body_size)
resp, content = http.request(self.uri, self.method,
body=self.body,
headers=start_headers)
if resp.status == 200 and 'location' in resp:
self.resumable_uri = resp['location']
else:
raise ResumableUploadError("Failed to retrieve starting URI.")
elif self._in_error_state:
# If we are in an error state then query the server for current state of
# the upload by sending an empty PUT and reading the 'range' header in
# the response.
headers = {
'Content-Range': 'bytes */%s' % size,
'content-length': '0'
}
resp, content = http.request(self.resumable_uri, 'PUT',
headers=headers)
status, body = self._process_response(resp, content)
if body:
# The upload was complete.
return (status, body)
data = self.resumable.getbytes(
self.resumable_progress, self.resumable.chunksize())
# A short read implies that we are at EOF, so finish the upload.
if len(data) < self.resumable.chunksize():
size = str(self.resumable_progress + len(data))
headers = {
'Content-Range': 'bytes %d-%d/%s' % (
self.resumable_progress, self.resumable_progress + len(data) - 1,
size)
}
try:
resp, content = http.request(self.resumable_uri, 'PUT',
body=data,
headers=headers)
except:
self._in_error_state = True
raise
return self._process_response(resp, content)
def _process_response(self, resp, content):
"""Process the response from a single chunk upload.
Args:
resp: httplib2.Response, the response object.
content: string, the content of the response.
Returns:
(status, body): (ResumableMediaStatus, object)
The body will be None until the resumable media is fully uploaded.
Raises:
apiclient.errors.HttpError if the response was not a 2xx or a 308.
"""
if resp.status in [200, 201]:
self._in_error_state = False
return None, self.postproc(resp, content)
elif resp.status == 308:
self._in_error_state = False
# A "308 Resume Incomplete" indicates we are not done.
self.resumable_progress = int(resp['range'].split('-')[1]) + 1
if 'location' in resp:
self.resumable_uri = resp['location']
else:
self._in_error_state = True
raise HttpError(resp, content, self.uri)
return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
None)
def to_json(self):
"""Returns a JSON representation of the HttpRequest."""
d = copy.copy(self.__dict__)
if d['resumable'] is not None:
d['resumable'] = self.resumable.to_json()
del d['http']
del d['postproc']
return simplejson.dumps(d)
@staticmethod
def from_json(s, http, postproc):
"""Returns an HttpRequest populated with info from a JSON object."""
d = simplejson.loads(s)
if d['resumable'] is not None:
d['resumable'] = MediaUpload.new_from_json(d['resumable'])
return HttpRequest(
http,
postproc,
uri=d['uri'],
method=d['method'],
body=d['body'],
headers=d['headers'],
methodId=d['methodId'],
resumable=d['resumable'])
class BatchHttpRequest(object):
"""Batches multiple HttpRequest objects into a single HTTP request.
Example:
from apiclient.http import BatchHttpRequest
def list_animals(request_id, response):
\"\"\"Do something with the animals list response.\"\"\"
pass
def list_farmers(request_id, response):
\"\"\"Do something with the farmers list response.\"\"\"
pass
service = build('farm', 'v2')
batch = BatchHttpRequest()
batch.add(service.animals().list(), list_animals)
batch.add(service.farmers().list(), list_farmers)
batch.execute(http)
"""
def __init__(self, callback=None, batch_uri=None):
"""Constructor for a BatchHttpRequest.
Args:
callback: callable, A callback to be called for each response, of the
form callback(id, response). The first parameter is the request id, and
the second is the deserialized response object.
batch_uri: string, URI to send batch requests to.
"""
if batch_uri is None:
batch_uri = 'https://www.googleapis.com/batch'
self._batch_uri = batch_uri
# Global callback to be called for each individual response in the batch.
self._callback = callback
# A map from id to request.
self._requests = {}
# A map from id to callback.
self._callbacks = {}
# List of request ids, in the order in which they were added.
self._order = []
# The last auto generated id.
self._last_auto_id = 0
# Unique ID on which to base the Content-ID headers.
self._base_id = None
# A map from request id to (headers, content) response pairs
self._responses = {}
# A map of id(Credentials) that have been refreshed.
self._refreshed_credentials = {}
def _refresh_and_apply_credentials(self, request, http):
"""Refresh the credentials and apply to the request.
Args:
request: HttpRequest, the request.
http: httplib2.Http, the global http object for the batch.
"""
# For the credentials to refresh, but only once per refresh_token
# If there is no http per the request then refresh the http passed in
# via execute()
creds = None
if request.http is not None and hasattr(request.http.request,
'credentials'):
creds = request.http.request.credentials
elif http is not None and hasattr(http.request, 'credentials'):
creds = http.request.credentials
if creds is not None:
if id(creds) not in self._refreshed_credentials:
creds.refresh(http)
self._refreshed_credentials[id(creds)] = 1
# Only apply the credentials if we are using the http object passed in,
# otherwise apply() will get called during _serialize_request().
if request.http is None or not hasattr(request.http.request,
'credentials'):
creds.apply(request.headers)
def _id_to_header(self, id_):
"""Convert an id to a Content-ID header value.
Args:
id_: string, identifier of individual request.
Returns:
A Content-ID header with the id_ encoded into it. A UUID is prepended to
the value because Content-ID headers are supposed to be universally
unique.
"""
if self._base_id is None:
self._base_id = uuid.uuid4()
return '<%s+%s>' % (self._base_id, urllib.quote(id_))
def _header_to_id(self, header):
"""Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that _id_to_header()
returns.
Args:
header: string, Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format.
"""
if header[0] != '<' or header[-1] != '>':
raise BatchError("Invalid value for Content-ID: %s" % header)
if '+' not in header:
raise BatchError("Invalid value for Content-ID: %s" % header)
base, id_ = header[1:-1].rsplit('+', 1)
return urllib.unquote(id_)
def _serialize_request(self, request):
"""Convert an HttpRequest object into a string.
Args:
request: HttpRequest, the request to serialize.
Returns:
The request as a string in application/http format.
"""
# Construct status line
parsed = urlparse.urlparse(request.uri)
request_line = urlparse.urlunparse(
(None, None, parsed.path, parsed.params, parsed.query, None)
)
status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
major, minor = request.headers.get('content-type', 'application/json').split('/')
msg = MIMENonMultipart(major, minor)
headers = request.headers.copy()
if request.http is not None and hasattr(request.http.request,
'credentials'):
request.http.request.credentials.apply(headers)
# MIMENonMultipart adds its own Content-Type header.
if 'content-type' in headers:
del headers['content-type']
for key, value in headers.iteritems():
msg[key] = value
msg['Host'] = parsed.netloc
msg.set_unixfrom(None)
if request.body is not None:
msg.set_payload(request.body)
msg['content-length'] = str(len(request.body))
# Serialize the mime message.
fp = StringIO.StringIO()
# maxheaderlen=0 means don't line wrap headers.
g = Generator(fp, maxheaderlen=0)
g.flatten(msg, unixfrom=False)
body = fp.getvalue()
# Strip off the \n\n that the MIME lib tacks onto the end of the payload.
if request.body is None:
body = body[:-2]
return status_line.encode('utf-8') + body
def _deserialize_response(self, payload):
"""Convert string into httplib2 response and content.
Args:
payload: string, headers and body as a string.
Returns:
A pair (resp, content) like would be returned from httplib2.request.
"""
# Strip off the status line
status_line, payload = payload.split('\n', 1)
protocol, status, reason = status_line.split(' ', 2)
# Parse the rest of the response
parser = FeedParser()
parser.feed(payload)
msg = parser.close()
msg['status'] = status
# Create httplib2.Response from the parsed headers.
resp = httplib2.Response(msg)
resp.reason = reason
resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
content = payload.split('\r\n\r\n', 1)[1]
return resp, content
def _new_id(self):
"""Create a new id.
Auto incrementing number that avoids conflicts with ids already used.
Returns:
string, a new unique id.
"""
self._last_auto_id += 1
while str(self._last_auto_id) in self._requests:
self._last_auto_id += 1
return str(self._last_auto_id)
def add(self, request, callback=None, request_id=None):
"""Add a new request.
Every callback added will be paired with a unique id, the request_id. That
unique id will be passed back to the callback when the response comes back
from the server. The default behavior is to have the library generate it's
own unique id. If the caller passes in a request_id then they must ensure
uniqueness for each request_id, and if they are not an exception is
raised. Callers should either supply all request_ids or nevery supply a
request id, to avoid such an error.
Args:
request: HttpRequest, Request to add to the batch.
callback: callable, A callback to be called for this response, of the
form callback(id, response). The first parameter is the request id, and
the second is the deserialized response object.
request_id: string, A unique id for the request. The id will be passed to
the callback with the response.
Returns:
None
Raises:
BatchError if a media request is added to a batch.
KeyError is the request_id is not unique.
"""
if request_id is None:
request_id = self._new_id()
if request.resumable is not None:
raise BatchError("Media requests cannot be used in a batch request.")
if request_id in self._requests:
raise KeyError("A request with this ID already exists: %s" % request_id)
self._requests[request_id] = request
self._callbacks[request_id] = callback
self._order.append(request_id)
def _execute(self, http, order, requests):
"""Serialize batch request, send to server, process response.
Args:
http: httplib2.Http, an http object to be used to make the request with.
order: list, list of request ids in the order they were added to the
batch.
request: list, list of request objects to send.
Raises:
httplib2.Error if a transport error has occured.
apiclient.errors.BatchError if the response is the wrong format.
"""
message = MIMEMultipart('mixed')
# Message should not write out it's own headers.
setattr(message, '_write_headers', lambda self: None)
# Add all the individual requests.
for request_id in order:
request = requests[request_id]
msg = MIMENonMultipart('application', 'http')
msg['Content-Transfer-Encoding'] = 'binary'
msg['Content-ID'] = self._id_to_header(request_id)
body = self._serialize_request(request)
msg.set_payload(body)
message.attach(msg)
body = message.as_string()
headers = {}
headers['content-type'] = ('multipart/mixed; '
'boundary="%s"') % message.get_boundary()
resp, content = http.request(self._batch_uri, 'POST', body=body,
headers=headers)
if resp.status >= 300:
raise HttpError(resp, content, self._batch_uri)
# Now break out the individual responses and store each one.
boundary, _ = content.split(None, 1)
# Prepend with a content-type header so FeedParser can handle it.
header = 'content-type: %s\r\n\r\n' % resp['content-type']
for_parser = header + content
parser = FeedParser()
parser.feed(for_parser)
mime_response = parser.close()
if not mime_response.is_multipart():
raise BatchError("Response not in multipart/mixed format.", resp,
content)
for part in mime_response.get_payload():
request_id = self._header_to_id(part['Content-ID'])
headers, content = self._deserialize_response(part.get_payload())
self._responses[request_id] = (headers, content)
def execute(self, http=None):
"""Execute all the requests as a single batched HTTP request.
Args:
http: httplib2.Http, an http object to be used in place of the one the
HttpRequest request object was constructed with. If one isn't supplied
then use a http object from the requests in this batch.
Returns:
None
Raises:
httplib2.Error if a transport error has occured.
apiclient.errors.BatchError if the response is the wrong format.
"""
# If http is not supplied use the first valid one given in the requests.
if http is None:
for request_id in self._order:
request = self._requests[request_id]
if request is not None:
http = request.http
break
if http is None:
raise ValueError("Missing a valid http object.")
self._execute(http, self._order, self._requests)
# Loop over all the requests and check for 401s. For each 401 request the
# credentials should be refreshed and then sent again in a separate batch.
redo_requests = {}
redo_order = []
for request_id in self._order:
headers, content = self._responses[request_id]
if headers['status'] == '401':
redo_order.append(request_id)
request = self._requests[request_id]
self._refresh_and_apply_credentials(request, http)
redo_requests[request_id] = request
if redo_requests:
self._execute(http, redo_order, redo_requests)
# Now process all callbacks that are erroring, and raise an exception for
# ones that return a non-2xx response? Or add extra parameter to callback
# that contains an HttpError?
for request_id in self._order:
headers, content = self._responses[request_id]
request = self._requests[request_id]
callback = self._callbacks[request_id]
response = None
exception = None
try:
r = httplib2.Response(headers)
response = request.postproc(r, content)
except HttpError, e:
exception = e
if callback is not None:
callback(request_id, response, exception)
if self._callback is not None:
self._callback(request_id, response, exception)
class HttpRequestMock(object):
"""Mock of HttpRequest.
Do not construct directly, instead use RequestMockBuilder.
"""
def __init__(self, resp, content, postproc):
"""Constructor for HttpRequestMock
Args:
resp: httplib2.Response, the response to emulate coming from the request
content: string, the response body
postproc: callable, the post processing function usually supplied by
the model class. See model.JsonModel.response() as an example.
"""
self.resp = resp
self.content = content
self.postproc = postproc
if resp is None:
self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
if 'reason' in self.resp:
self.resp.reason = self.resp['reason']
def execute(self, http=None):
"""Execute the request.
Same behavior as HttpRequest.execute(), but the response is
mocked and not really from an HTTP request/response.
"""
return self.postproc(self.resp, self.content)
class RequestMockBuilder(object):
"""A simple mock of HttpRequest
Pass in a dictionary to the constructor that maps request methodIds to
tuples of (httplib2.Response, content, opt_expected_body) that should be
returned when that method is called. None may also be passed in for the
httplib2.Response, in which case a 200 OK response will be generated.
If an opt_expected_body (str or dict) is provided, it will be compared to
the body and UnexpectedBodyError will be raised on inequality.
Example:
response = '{"data": {"id": "tag:google.c...'
requestBuilder = RequestMockBuilder(
{
'plus.activities.get': (None, response),
}
)
apiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
Methods that you do not supply a response for will return a
200 OK with an empty string as the response content or raise an excpetion
if check_unexpected is set to True. The methodId is taken from the rpcName
in the discovery document.
For more details see the project wiki.
"""
def __init__(self, responses, check_unexpected=False):
"""Constructor for RequestMockBuilder
The constructed object should be a callable object
that can replace the class HttpResponse.
responses - A dictionary that maps methodIds into tuples
of (httplib2.Response, content). The methodId
comes from the 'rpcName' field in the discovery
document.
check_unexpected - A boolean setting whether or not UnexpectedMethodError
should be raised on unsupplied method.
"""
self.responses = responses
self.check_unexpected = check_unexpected
def __call__(self, http, postproc, uri, method='GET', body=None,
headers=None, methodId=None, resumable=None):
"""Implements the callable interface that discovery.build() expects
of requestBuilder, which is to build an object compatible with
HttpRequest.execute(). See that method for the description of the
parameters and the expected response.
"""
if methodId in self.responses:
response = self.responses[methodId]
resp, content = response[:2]
if len(response) > 2:
# Test the body against the supplied expected_body.
expected_body = response[2]
if bool(expected_body) != bool(body):
# Not expecting a body and provided one
# or expecting a body and not provided one.
raise UnexpectedBodyError(expected_body, body)
if isinstance(expected_body, str):
expected_body = simplejson.loads(expected_body)
body = simplejson.loads(body)
if body != expected_body:
raise UnexpectedBodyError(expected_body, body)
return HttpRequestMock(resp, content, postproc)
elif self.check_unexpected:
raise UnexpectedMethodError(methodId)
else:
model = JsonModel(False)
return HttpRequestMock(None, '{}', model.response)
class HttpMock(object):
"""Mock of httplib2.Http"""
def __init__(self, filename, headers=None):
"""
Args:
filename: string, absolute filename to read response from
headers: dict, header to return with response
"""
if headers is None:
headers = {'status': '200 OK'}
f = file(filename, 'r')
self.data = f.read()
f.close()
self.headers = headers
def request(self, uri,
method='GET',
body=None,
headers=None,
redirections=1,
connection_type=None):
return httplib2.Response(self.headers), self.data
class HttpMockSequence(object):
"""Mock of httplib2.Http
Mocks a sequence of calls to request returning different responses for each
call. Create an instance initialized with the desired response headers
and content and then use as if an httplib2.Http instance.
http = HttpMockSequence([
({'status': '401'}, ''),
({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
({'status': '200'}, 'echo_request_headers'),
])
resp, content = http.request("http://examples.com")
There are special values you can pass in for content to trigger
behavours that are helpful in testing.
'echo_request_headers' means return the request headers in the response body
'echo_request_headers_as_json' means return the request headers in
the response body
'echo_request_body' means return the request body in the response body
'echo_request_uri' means return the request uri in the response body
"""
def __init__(self, iterable):
"""
Args:
iterable: iterable, a sequence of pairs of (headers, body)
"""
self._iterable = iterable
self.follow_redirects = True
def request(self, uri,
method='GET',
body=None,
headers=None,
redirections=1,
connection_type=None):
resp, content = self._iterable.pop(0)
if content == 'echo_request_headers':
content = headers
elif content == 'echo_request_headers_as_json':
content = simplejson.dumps(headers)
elif content == 'echo_request_body':
content = body
elif content == 'echo_request_uri':
content = uri
return httplib2.Response(resp), content
def set_user_agent(http, user_agent):
"""Set the user-agent on every request.
Args:
http - An instance of httplib2.Http
or something that acts like it.
user_agent: string, the value for the user-agent header.
Returns:
A modified instance of http that was passed in.
Example:
h = httplib2.Http()
h = set_user_agent(h, "my-app-name/6.0")
Most of the time the user-agent will be set doing auth, this is for the rare
cases where you are accessing an unauthenticated endpoint.
"""
request_orig = http.request
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
"""Modify the request headers to add the user-agent."""
if headers is None:
headers = {}
if 'user-agent' in headers:
headers['user-agent'] = user_agent + ' ' + headers['user-agent']
else:
headers['user-agent'] = user_agent
resp, content = request_orig(uri, method, body, headers,
redirections, connection_type)
return resp, content
http.request = new_request
return http
def tunnel_patch(http):
"""Tunnel PATCH requests over POST.
Args:
http - An instance of httplib2.Http
or something that acts like it.
Returns:
A modified instance of http that was passed in.
Example:
h = httplib2.Http()
h = tunnel_patch(h, "my-app-name/6.0")
Useful if you are running on a platform that doesn't support PATCH.
Apply this last if you are using OAuth 1.0, as changing the method
will result in a different signature.
"""
request_orig = http.request
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
"""Modify the request headers to add the user-agent."""
if headers is None:
headers = {}
if method == 'PATCH':
if 'oauth_token' in headers.get('authorization', ''):
logging.warning(
'OAuth 1.0 request made with Credentials after tunnel_patch.')
headers['x-http-method-override'] = "PATCH"
method = 'POST'
resp, content = request_orig(uri, method, body, headers,
redirections, connection_type)
return resp, content
http.request = new_request
return http
| Python |
#!/usr/bin/python2.4
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model objects for requests and responses.
Each API may support one or more serializations, such
as JSON, Atom, etc. The model classes are responsible
for converting between the wire format and the Python
object representation.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import gflags
import logging
import urllib
from errors import HttpError
from oauth2client.anyjson import simplejson
FLAGS = gflags.FLAGS
gflags.DEFINE_boolean('dump_request_response', False,
'Dump all http server requests and responses. '
)
def _abstract():
raise NotImplementedError('You need to override this function')
class Model(object):
"""Model base class.
All Model classes should implement this interface.
The Model serializes and de-serializes between a wire
format such as JSON and a Python object representation.
"""
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized in the desired wire format.
"""
_abstract()
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
apiclient.errors.HttpError if a non 2xx response is received.
"""
_abstract()
class BaseModel(Model):
"""Base model class.
Subclasses should provide implementations for the "serialize" and
"deserialize" methods, as well as values for the following class attributes.
Attributes:
accept: The value to use for the HTTP Accept header.
content_type: The value to use for the HTTP Content-type header.
no_content_response: The value to return when deserializing a 204 "No
Content" response.
alt_param: The value to supply as the "alt" query parameter for requests.
"""
accept = None
content_type = None
no_content_response = None
alt_param = None
def _log_request(self, headers, path_params, query, body):
"""Logs debugging information about the request if requested."""
if FLAGS.dump_request_response:
logging.info('--request-start--')
logging.info('-headers-start-')
for h, v in headers.iteritems():
logging.info('%s: %s', h, v)
logging.info('-headers-end-')
logging.info('-path-parameters-start-')
for h, v in path_params.iteritems():
logging.info('%s: %s', h, v)
logging.info('-path-parameters-end-')
logging.info('body: %s', body)
logging.info('query: %s', query)
logging.info('--request-end--')
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable by simplejson.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized as JSON
"""
query = self._build_query(query_params)
headers['accept'] = self.accept
headers['accept-encoding'] = 'gzip, deflate'
if 'user-agent' in headers:
headers['user-agent'] += ' '
else:
headers['user-agent'] = ''
headers['user-agent'] += 'google-api-python-client/1.0'
if body_value is not None:
headers['content-type'] = self.content_type
body_value = self.serialize(body_value)
self._log_request(headers, path_params, query, body_value)
return (headers, path_params, query, body_value)
def _build_query(self, params):
"""Builds a query string.
Args:
params: dict, the query parameters
Returns:
The query parameters properly encoded into an HTTP URI query string.
"""
if self.alt_param is not None:
params.update({'alt': self.alt_param})
astuples = []
for key, value in params.iteritems():
if type(value) == type([]):
for x in value:
x = x.encode('utf-8')
astuples.append((key, x))
else:
if getattr(value, 'encode', False) and callable(value.encode):
value = value.encode('utf-8')
astuples.append((key, value))
return '?' + urllib.urlencode(astuples)
def _log_response(self, resp, content):
"""Logs debugging information about the response if requested."""
if FLAGS.dump_request_response:
logging.info('--response-start--')
for h, v in resp.iteritems():
logging.info('%s: %s', h, v)
if content:
logging.info(content)
logging.info('--response-end--')
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
apiclient.errors.HttpError if a non 2xx response is received.
"""
self._log_response(resp, content)
# Error handling is TBD, for example, do we retry
# for some operation/error combinations?
if resp.status < 300:
if resp.status == 204:
# A 204: No Content response should be treated differently
# to all the other success states
return self.no_content_response
return self.deserialize(content)
else:
logging.debug('Content from bad request was: %s' % content)
raise HttpError(resp, content)
def serialize(self, body_value):
"""Perform the actual Python object serialization.
Args:
body_value: object, the request body as a Python object.
Returns:
string, the body in serialized form.
"""
_abstract()
def deserialize(self, content):
"""Perform the actual deserialization from response string to Python
object.
Args:
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
"""
_abstract()
class JsonModel(BaseModel):
"""Model class for JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request and response bodies.
"""
accept = 'application/json'
content_type = 'application/json'
alt_param = 'json'
def __init__(self, data_wrapper=False):
"""Construct a JsonModel.
Args:
data_wrapper: boolean, wrap requests and responses in a data wrapper
"""
self._data_wrapper = data_wrapper
def serialize(self, body_value):
if (isinstance(body_value, dict) and 'data' not in body_value and
self._data_wrapper):
body_value = {'data': body_value}
return simplejson.dumps(body_value)
def deserialize(self, content):
body = simplejson.loads(content)
if isinstance(body, dict) and 'data' in body:
body = body['data']
return body
@property
def no_content_response(self):
return {}
class RawModel(JsonModel):
"""Model class for requests that don't return JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request, and returns the raw bytes
of the response body.
"""
accept = '*/*'
content_type = 'application/json'
alt_param = None
def deserialize(self, content):
return content
@property
def no_content_response(self):
return ''
class MediaModel(JsonModel):
"""Model class for requests that return Media.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request, and returns the raw bytes
of the response body.
"""
accept = '*/*'
content_type = 'application/json'
alt_param = 'media'
def deserialize(self, content):
return content
@property
def no_content_response(self):
return ''
class ProtocolBufferModel(BaseModel):
"""Model class for protocol buffers.
Serializes and de-serializes the binary protocol buffer sent in the HTTP
request and response bodies.
"""
accept = 'application/x-protobuf'
content_type = 'application/x-protobuf'
alt_param = 'proto'
def __init__(self, protocol_buffer):
"""Constructs a ProtocolBufferModel.
The serialzed protocol buffer returned in an HTTP response will be
de-serialized using the given protocol buffer class.
Args:
protocol_buffer: The protocol buffer class used to de-serialize a
response from the API.
"""
self._protocol_buffer = protocol_buffer
def serialize(self, body_value):
return body_value.SerializeToString()
def deserialize(self, content):
return self._protocol_buffer.FromString(content)
@property
def no_content_response(self):
return self._protocol_buffer()
def makepatch(original, modified):
"""Create a patch object.
Some methods support PATCH, an efficient way to send updates to a resource.
This method allows the easy construction of patch bodies by looking at the
differences between a resource before and after it was modified.
Args:
original: object, the original deserialized resource
modified: object, the modified deserialized resource
Returns:
An object that contains only the changes from original to modified, in a
form suitable to pass to a PATCH method.
Example usage:
item = service.activities().get(postid=postid, userid=userid).execute()
original = copy.deepcopy(item)
item['object']['content'] = 'This is updated.'
service.activities.patch(postid=postid, userid=userid,
body=makepatch(original, item)).execute()
"""
patch = {}
for key, original_value in original.iteritems():
modified_value = modified.get(key, None)
if modified_value is None:
# Use None to signal that the element is deleted
patch[key] = None
elif original_value != modified_value:
if type(original_value) == type({}):
# Recursively descend objects
patch[key] = makepatch(original_value, modified_value)
else:
# In the case of simple types or arrays we just replace
patch[key] = modified_value
else:
# Don't add anything to patch if there's no change
pass
for key in modified:
if key not in original:
patch[key] = modified[key]
return patch
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility module to import a JSON module
Hides all the messy details of exactly where
we get a simplejson module from.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
try: # pragma: no cover
import simplejson
except ImportError: # pragma: no cover
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson
except ImportError:
# Should work for Python2.6 and higher.
import json as simplejson
| Python |
__version__ = "1.0c2"
| Python |
import Cookie
import datetime
import time
import email.utils
import calendar
import base64
import hashlib
import hmac
import re
import logging
# Ripped from the Tornado Framework's web.py
# http://github.com/facebook/tornado/commit/39ac6d169a36a54bb1f6b9bf1fdebb5c9da96e09
#
# Tornado is licensed under the Apache Licence, Version 2.0
# (http://www.apache.org/licenses/LICENSE-2.0.html).
#
# Example:
# from vendor.prayls.lilcookies import LilCookies
# cookieutil = LilCookies(self, application_settings['cookie_secret'])
# cookieutil.set_secure_cookie(name = 'mykey', value = 'myvalue', expires_days= 365*100)
# cookieutil.get_secure_cookie(name = 'mykey')
class LilCookies:
@staticmethod
def _utf8(s):
if isinstance(s, unicode):
return s.encode("utf-8")
assert isinstance(s, str)
return s
@staticmethod
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
@staticmethod
def _signature_from_secret(cookie_secret, *parts):
""" Takes a secret salt value to create a signature for values in the `parts` param."""
hash = hmac.new(cookie_secret, digestmod=hashlib.sha1)
for part in parts: hash.update(part)
return hash.hexdigest()
@staticmethod
def _signed_cookie_value(cookie_secret, name, value):
""" Returns a signed value for use in a cookie.
This is helpful to have in its own method if you need to re-use this function for other needs. """
timestamp = str(int(time.time()))
value = base64.b64encode(value)
signature = LilCookies._signature_from_secret(cookie_secret, name, value, timestamp)
return "|".join([value, timestamp, signature])
@staticmethod
def _verified_cookie_value(cookie_secret, name, signed_value):
"""Returns the un-encrypted value given the signed value if it validates, or None."""
value = signed_value
if not value: return None
parts = value.split("|")
if len(parts) != 3: return None
signature = LilCookies._signature_from_secret(cookie_secret, name, parts[0], parts[1])
if not LilCookies._time_independent_equals(parts[2], signature):
logging.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - 31 * 86400:
logging.warning("Expired cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except:
return None
def __init__(self, handler, cookie_secret):
"""You must specify the cookie_secret to use any of the secure methods.
It should be a long, random sequence of bytes to be used as the HMAC
secret for the signature.
"""
if len(cookie_secret) < 45:
raise ValueError("LilCookies cookie_secret should at least be 45 characters long, but got `%s`" % cookie_secret)
self.handler = handler
self.request = handler.request
self.response = handler.response
self.cookie_secret = cookie_secret
def cookies(self):
"""A dictionary of Cookie.Morsel objects."""
if not hasattr(self, "_cookies"):
self._cookies = Cookie.BaseCookie()
if "Cookie" in self.request.headers:
try:
self._cookies.load(self.request.headers["Cookie"])
except:
self.clear_all_cookies()
return self._cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if name in self.cookies():
return self._cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
name = LilCookies._utf8(name)
value = LilCookies._utf8(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookies"):
self._new_cookies = []
new_cookie = Cookie.BaseCookie()
self._new_cookies.append(new_cookie)
new_cookie[name] = value
if domain:
new_cookie[name]["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(days=expires_days)
if expires:
timestamp = calendar.timegm(expires.utctimetuple())
new_cookie[name]["expires"] = email.utils.formatdate(
timestamp, localtime=False, usegmt=True)
if path:
new_cookie[name]["path"] = path
for k, v in kwargs.iteritems():
new_cookie[name][k] = v
# The 2 lines below were not in Tornado. Instead, they output all their cookies to the headers at once before a response flush.
for vals in new_cookie.values():
self.response.headers._headers.append(('Set-Cookie', vals.OutputString(None)))
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self):
"""Deletes all the cookies the user sent with this request."""
for name in self.cookies().iterkeys():
self.clear_cookie(name)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
To read a cookie set with this method, use get_secure_cookie().
"""
value = LilCookies._signed_cookie_value(self.cookie_secret, name, value)
self.set_cookie(name, value, expires_days=expires_days, **kwargs)
def get_secure_cookie(self, name, value=None):
"""Returns the given signed cookie if it validates, or None."""
if value is None: value = self.get_cookie(name)
return LilCookies._verified_cookie_value(self.cookie_secret, name, value)
def _cookie_signature(self, *parts):
return LilCookies._signature_from_secret(self.cookie_secret)
| Python |
#!/usr/bin/env python
#
# Copyright (c) 2002, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ---
# Author: Chad Lester
# Design and style contributions by:
# Amit Patel, Bogdan Cocosel, Daniel Dulitz, Eric Tiedemann,
# Eric Veach, Laurence Gonsalves, Matthew Springer
# Code reorganized a bit by Craig Silverstein
"""This module is used to define and parse command line flags.
This module defines a *distributed* flag-definition policy: rather than
an application having to define all flags in or near main(), each python
module defines flags that are useful to it. When one python module
imports another, it gains access to the other's flags. (This is
implemented by having all modules share a common, global registry object
containing all the flag information.)
Flags are defined through the use of one of the DEFINE_xxx functions.
The specific function used determines how the flag is parsed, checked,
and optionally type-converted, when it's seen on the command line.
IMPLEMENTATION: DEFINE_* creates a 'Flag' object and registers it with a
'FlagValues' object (typically the global FlagValues FLAGS, defined
here). The 'FlagValues' object can scan the command line arguments and
pass flag arguments to the corresponding 'Flag' objects for
value-checking and type conversion. The converted flag values are
available as attributes of the 'FlagValues' object.
Code can access the flag through a FlagValues object, for instance
gflags.FLAGS.myflag. Typically, the __main__ module passes the command
line arguments to gflags.FLAGS for parsing.
At bottom, this module calls getopt(), so getopt functionality is
supported, including short- and long-style flags, and the use of -- to
terminate flags.
Methods defined by the flag module will throw 'FlagsError' exceptions.
The exception argument will be a human-readable string.
FLAG TYPES: This is a list of the DEFINE_*'s that you can do. All flags
take a name, default value, help-string, and optional 'short' name
(one-letter name). Some flags have other arguments, which are described
with the flag.
DEFINE_string: takes any input, and interprets it as a string.
DEFINE_bool or
DEFINE_boolean: typically does not take an argument: say --myflag to
set FLAGS.myflag to true, or --nomyflag to set
FLAGS.myflag to false. Alternately, you can say
--myflag=true or --myflag=t or --myflag=1 or
--myflag=false or --myflag=f or --myflag=0
DEFINE_float: takes an input and interprets it as a floating point
number. Takes optional args lower_bound and upper_bound;
if the number specified on the command line is out of
range, it will raise a FlagError.
DEFINE_integer: takes an input and interprets it as an integer. Takes
optional args lower_bound and upper_bound as for floats.
DEFINE_enum: takes a list of strings which represents legal values. If
the command-line value is not in this list, raise a flag
error. Otherwise, assign to FLAGS.flag as a string.
DEFINE_list: Takes a comma-separated list of strings on the commandline.
Stores them in a python list object.
DEFINE_spaceseplist: Takes a space-separated list of strings on the
commandline. Stores them in a python list object.
Example: --myspacesepflag "foo bar baz"
DEFINE_multistring: The same as DEFINE_string, except the flag can be
specified more than once on the commandline. The
result is a python list object (list of strings),
even if the flag is only on the command line once.
DEFINE_multi_int: The same as DEFINE_integer, except the flag can be
specified more than once on the commandline. The
result is a python list object (list of ints), even if
the flag is only on the command line once.
SPECIAL FLAGS: There are a few flags that have special meaning:
--help prints a list of all the flags in a human-readable fashion
--helpshort prints a list of all key flags (see below).
--helpxml prints a list of all flags, in XML format. DO NOT parse
the output of --help and --helpshort. Instead, parse
the output of --helpxml. For more info, see
"OUTPUT FOR --helpxml" below.
--flagfile=foo read flags from file foo.
--undefok=f1,f2 ignore unrecognized option errors for f1,f2.
For boolean flags, you should use --undefok=boolflag, and
--boolflag and --noboolflag will be accepted. Do not use
--undefok=noboolflag.
-- as in getopt(), terminates flag-processing
FLAGS VALIDATORS: If your program:
- requires flag X to be specified
- needs flag Y to match a regular expression
- or requires any more general constraint to be satisfied
then validators are for you!
Each validator represents a constraint over one flag, which is enforced
starting from the initial parsing of the flags and until the program
terminates.
Also, lower_bound and upper_bound for numerical flags are enforced using flag
validators.
Howto:
If you want to enforce a constraint over one flag, use
gflags.RegisterValidator(flag_name,
checker,
message='Flag validation failed',
flag_values=FLAGS)
After flag values are initially parsed, and after any change to the specified
flag, method checker(flag_value) will be executed. If constraint is not
satisfied, an IllegalFlagValue exception will be raised. See
RegisterValidator's docstring for a detailed explanation on how to construct
your own checker.
EXAMPLE USAGE:
FLAGS = gflags.FLAGS
gflags.DEFINE_integer('my_version', 0, 'Version number.')
gflags.DEFINE_string('filename', None, 'Input file name', short_name='f')
gflags.RegisterValidator('my_version',
lambda value: value % 2 == 0,
message='--my_version must be divisible by 2')
gflags.MarkFlagAsRequired('filename')
NOTE ON --flagfile:
Flags may be loaded from text files in addition to being specified on
the commandline.
Any flags you don't feel like typing, throw them in a file, one flag per
line, for instance:
--myflag=myvalue
--nomyboolean_flag
You then specify your file with the special flag '--flagfile=somefile'.
You CAN recursively nest flagfile= tokens OR use multiple files on the
command line. Lines beginning with a single hash '#' or a double slash
'//' are comments in your flagfile.
Any flagfile=<file> will be interpreted as having a relative path from
the current working directory rather than from the place the file was
included from:
myPythonScript.py --flagfile=config/somefile.cfg
If somefile.cfg includes further --flagfile= directives, these will be
referenced relative to the original CWD, not from the directory the
including flagfile was found in!
The caveat applies to people who are including a series of nested files
in a different dir than they are executing out of. Relative path names
are always from CWD, not from the directory of the parent include
flagfile. We do now support '~' expanded directory names.
Absolute path names ALWAYS work!
EXAMPLE USAGE:
FLAGS = gflags.FLAGS
# Flag names are globally defined! So in general, we need to be
# careful to pick names that are unlikely to be used by other libraries.
# If there is a conflict, we'll get an error at import time.
gflags.DEFINE_string('name', 'Mr. President', 'your name')
gflags.DEFINE_integer('age', None, 'your age in years', lower_bound=0)
gflags.DEFINE_boolean('debug', False, 'produces debugging output')
gflags.DEFINE_enum('gender', 'male', ['male', 'female'], 'your gender')
def main(argv):
try:
argv = FLAGS(argv) # parse flags
except gflags.FlagsError, e:
print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], FLAGS)
sys.exit(1)
if FLAGS.debug: print 'non-flag arguments:', argv
print 'Happy Birthday', FLAGS.name
if FLAGS.age is not None:
print 'You are a %d year old %s' % (FLAGS.age, FLAGS.gender)
if __name__ == '__main__':
main(sys.argv)
KEY FLAGS:
As we already explained, each module gains access to all flags defined
by all the other modules it transitively imports. In the case of
non-trivial scripts, this means a lot of flags ... For documentation
purposes, it is good to identify the flags that are key (i.e., really
important) to a module. Clearly, the concept of "key flag" is a
subjective one. When trying to determine whether a flag is key to a
module or not, assume that you are trying to explain your module to a
potential user: which flags would you really like to mention first?
We'll describe shortly how to declare which flags are key to a module.
For the moment, assume we know the set of key flags for each module.
Then, if you use the app.py module, you can use the --helpshort flag to
print only the help for the flags that are key to the main module, in a
human-readable format.
NOTE: If you need to parse the flag help, do NOT use the output of
--help / --helpshort. That output is meant for human consumption, and
may be changed in the future. Instead, use --helpxml; flags that are
key for the main module are marked there with a <key>yes</key> element.
The set of key flags for a module M is composed of:
1. Flags defined by module M by calling a DEFINE_* function.
2. Flags that module M explictly declares as key by using the function
DECLARE_key_flag(<flag_name>)
3. Key flags of other modules that M specifies by using the function
ADOPT_module_key_flags(<other_module>)
This is a "bulk" declaration of key flags: each flag that is key for
<other_module> becomes key for the current module too.
Notice that if you do not use the functions described at points 2 and 3
above, then --helpshort prints information only about the flags defined
by the main module of our script. In many cases, this behavior is good
enough. But if you move part of the main module code (together with the
related flags) into a different module, then it is nice to use
DECLARE_key_flag / ADOPT_module_key_flags and make sure --helpshort
lists all relevant flags (otherwise, your code refactoring may confuse
your users).
Note: each of DECLARE_key_flag / ADOPT_module_key_flags has its own
pluses and minuses: DECLARE_key_flag is more targeted and may lead a
more focused --helpshort documentation. ADOPT_module_key_flags is good
for cases when an entire module is considered key to the current script.
Also, it does not require updates to client scripts when a new flag is
added to the module.
EXAMPLE USAGE 2 (WITH KEY FLAGS):
Consider an application that contains the following three files (two
auxiliary modules and a main module)
File libfoo.py:
import gflags
gflags.DEFINE_integer('num_replicas', 3, 'Number of replicas to start')
gflags.DEFINE_boolean('rpc2', True, 'Turn on the usage of RPC2.')
... some code ...
File libbar.py:
import gflags
gflags.DEFINE_string('bar_gfs_path', '/gfs/path',
'Path to the GFS files for libbar.')
gflags.DEFINE_string('email_for_bar_errors', 'bar-team@google.com',
'Email address for bug reports about module libbar.')
gflags.DEFINE_boolean('bar_risky_hack', False,
'Turn on an experimental and buggy optimization.')
... some code ...
File myscript.py:
import gflags
import libfoo
import libbar
gflags.DEFINE_integer('num_iterations', 0, 'Number of iterations.')
# Declare that all flags that are key for libfoo are
# key for this module too.
gflags.ADOPT_module_key_flags(libfoo)
# Declare that the flag --bar_gfs_path (defined in libbar) is key
# for this module.
gflags.DECLARE_key_flag('bar_gfs_path')
... some code ...
When myscript is invoked with the flag --helpshort, the resulted help
message lists information about all the key flags for myscript:
--num_iterations, --num_replicas, --rpc2, and --bar_gfs_path.
Of course, myscript uses all the flags declared by it (in this case,
just --num_replicas) or by any of the modules it transitively imports
(e.g., the modules libfoo, libbar). E.g., it can access the value of
FLAGS.bar_risky_hack, even if --bar_risky_hack is not declared as a key
flag for myscript.
OUTPUT FOR --helpxml:
The --helpxml flag generates output with the following structure:
<?xml version="1.0"?>
<AllFlags>
<program>PROGRAM_BASENAME</program>
<usage>MAIN_MODULE_DOCSTRING</usage>
(<flag>
[<key>yes</key>]
<file>DECLARING_MODULE</file>
<name>FLAG_NAME</name>
<meaning>FLAG_HELP_MESSAGE</meaning>
<default>DEFAULT_FLAG_VALUE</default>
<current>CURRENT_FLAG_VALUE</current>
<type>FLAG_TYPE</type>
[OPTIONAL_ELEMENTS]
</flag>)*
</AllFlags>
Notes:
1. The output is intentionally similar to the output generated by the
C++ command-line flag library. The few differences are due to the
Python flags that do not have a C++ equivalent (at least not yet),
e.g., DEFINE_list.
2. New XML elements may be added in the future.
3. DEFAULT_FLAG_VALUE is in serialized form, i.e., the string you can
pass for this flag on the command-line. E.g., for a flag defined
using DEFINE_list, this field may be foo,bar, not ['foo', 'bar'].
4. CURRENT_FLAG_VALUE is produced using str(). This means that the
string 'false' will be represented in the same way as the boolean
False. Using repr() would have removed this ambiguity and simplified
parsing, but would have broken the compatibility with the C++
command-line flags.
5. OPTIONAL_ELEMENTS describe elements relevant for certain kinds of
flags: lower_bound, upper_bound (for flags that specify bounds),
enum_value (for enum flags), list_separator (for flags that consist of
a list of values, separated by a special token).
6. We do not provide any example here: please use --helpxml instead.
This module requires at least python 2.2.1 to run.
"""
import cgi
import getopt
import os
import re
import string
import struct
import sys
# pylint: disable-msg=C6204
try:
import fcntl
except ImportError:
fcntl = None
try:
# Importing termios will fail on non-unix platforms.
import termios
except ImportError:
termios = None
import gflags_validators
# pylint: enable-msg=C6204
# Are we running under pychecker?
_RUNNING_PYCHECKER = 'pychecker.python' in sys.modules
def _GetCallingModuleObjectAndName():
"""Returns the module that's calling into this module.
We generally use this function to get the name of the module calling a
DEFINE_foo... function.
"""
# Walk down the stack to find the first globals dict that's not ours.
for depth in range(1, sys.getrecursionlimit()):
if not sys._getframe(depth).f_globals is globals():
globals_for_frame = sys._getframe(depth).f_globals
module, module_name = _GetModuleObjectAndName(globals_for_frame)
if module_name is not None:
return module, module_name
raise AssertionError("No module was found")
def _GetCallingModule():
"""Returns the name of the module that's calling into this module."""
return _GetCallingModuleObjectAndName()[1]
def _GetThisModuleObjectAndName():
"""Returns: (module object, module name) for this module."""
return _GetModuleObjectAndName(globals())
# module exceptions:
class FlagsError(Exception):
"""The base class for all flags errors."""
pass
class DuplicateFlag(FlagsError):
"""Raised if there is a flag naming conflict."""
pass
class CantOpenFlagFileError(FlagsError):
"""Raised if flagfile fails to open: doesn't exist, wrong permissions, etc."""
pass
class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag):
"""Special case of DuplicateFlag -- SWIG flag value can't be set to None.
This can be raised when a duplicate flag is created. Even if allow_override is
True, we still abort if the new value is None, because it's currently
impossible to pass None default value back to SWIG. See FlagValues.SetDefault
for details.
"""
pass
class DuplicateFlagError(DuplicateFlag):
"""A DuplicateFlag whose message cites the conflicting definitions.
A DuplicateFlagError conveys more information than a DuplicateFlag,
namely the modules where the conflicting definitions occur. This
class was created to avoid breaking external modules which depend on
the existing DuplicateFlags interface.
"""
def __init__(self, flagname, flag_values, other_flag_values=None):
"""Create a DuplicateFlagError.
Args:
flagname: Name of the flag being redefined.
flag_values: FlagValues object containing the first definition of
flagname.
other_flag_values: If this argument is not None, it should be the
FlagValues object where the second definition of flagname occurs.
If it is None, we assume that we're being called when attempting
to create the flag a second time, and we use the module calling
this one as the source of the second definition.
"""
self.flagname = flagname
first_module = flag_values.FindModuleDefiningFlag(
flagname, default='<unknown>')
if other_flag_values is None:
second_module = _GetCallingModule()
else:
second_module = other_flag_values.FindModuleDefiningFlag(
flagname, default='<unknown>')
msg = "The flag '%s' is defined twice. First from %s, Second from %s" % (
self.flagname, first_module, second_module)
DuplicateFlag.__init__(self, msg)
class IllegalFlagValue(FlagsError):
"""The flag command line argument is illegal."""
pass
class UnrecognizedFlag(FlagsError):
"""Raised if a flag is unrecognized."""
pass
# An UnrecognizedFlagError conveys more information than an UnrecognizedFlag.
# Since there are external modules that create DuplicateFlags, the interface to
# DuplicateFlag shouldn't change. The flagvalue will be assigned the full value
# of the flag and its argument, if any, allowing handling of unrecognized flags
# in an exception handler.
# If flagvalue is the empty string, then this exception is an due to a
# reference to a flag that was not already defined.
class UnrecognizedFlagError(UnrecognizedFlag):
def __init__(self, flagname, flagvalue=''):
self.flagname = flagname
self.flagvalue = flagvalue
UnrecognizedFlag.__init__(
self, "Unknown command line flag '%s'" % flagname)
# Global variable used by expvar
_exported_flags = {}
_help_width = 80 # width of help output
def GetHelpWidth():
"""Returns: an integer, the width of help lines that is used in TextWrap."""
if (not sys.stdout.isatty()) or (termios is None) or (fcntl is None):
return _help_width
try:
data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234')
columns = struct.unpack('hh', data)[1]
# Emacs mode returns 0.
# Here we assume that any value below 40 is unreasonable
if columns >= 40:
return columns
# Returning an int as default is fine, int(int) just return the int.
return int(os.getenv('COLUMNS', _help_width))
except (TypeError, IOError, struct.error):
return _help_width
def CutCommonSpacePrefix(text):
"""Removes a common space prefix from the lines of a multiline text.
If the first line does not start with a space, it is left as it is and
only in the remaining lines a common space prefix is being searched
for. That means the first line will stay untouched. This is especially
useful to turn doc strings into help texts. This is because some
people prefer to have the doc comment start already after the
apostrophe and then align the following lines while others have the
apostrophes on a separate line.
The function also drops trailing empty lines and ignores empty lines
following the initial content line while calculating the initial
common whitespace.
Args:
text: text to work on
Returns:
the resulting text
"""
text_lines = text.splitlines()
# Drop trailing empty lines
while text_lines and not text_lines[-1]:
text_lines = text_lines[:-1]
if text_lines:
# We got some content, is the first line starting with a space?
if text_lines[0] and text_lines[0][0].isspace():
text_first_line = []
else:
text_first_line = [text_lines.pop(0)]
# Calculate length of common leading whitespace (only over content lines)
common_prefix = os.path.commonprefix([line for line in text_lines if line])
space_prefix_len = len(common_prefix) - len(common_prefix.lstrip())
# If we have a common space prefix, drop it from all lines
if space_prefix_len:
for index in xrange(len(text_lines)):
if text_lines[index]:
text_lines[index] = text_lines[index][space_prefix_len:]
return '\n'.join(text_first_line + text_lines)
return ''
def TextWrap(text, length=None, indent='', firstline_indent=None, tabs=' '):
"""Wraps a given text to a maximum line length and returns it.
We turn lines that only contain whitespace into empty lines. We keep
new lines and tabs (e.g., we do not treat tabs as spaces).
Args:
text: text to wrap
length: maximum length of a line, includes indentation
if this is None then use GetHelpWidth()
indent: indent for all but first line
firstline_indent: indent for first line; if None, fall back to indent
tabs: replacement for tabs
Returns:
wrapped text
Raises:
FlagsError: if indent not shorter than length
FlagsError: if firstline_indent not shorter than length
"""
# Get defaults where callee used None
if length is None:
length = GetHelpWidth()
if indent is None:
indent = ''
if len(indent) >= length:
raise FlagsError('Indent must be shorter than length')
# In line we will be holding the current line which is to be started
# with indent (or firstline_indent if available) and then appended
# with words.
if firstline_indent is None:
firstline_indent = ''
line = indent
else:
line = firstline_indent
if len(firstline_indent) >= length:
raise FlagsError('First line indent must be shorter than length')
# If the callee does not care about tabs we simply convert them to
# spaces If callee wanted tabs to be single space then we do that
# already here.
if not tabs or tabs == ' ':
text = text.replace('\t', ' ')
else:
tabs_are_whitespace = not tabs.strip()
line_regex = re.compile('([ ]*)(\t*)([^ \t]+)', re.MULTILINE)
# Split the text into lines and the lines with the regex above. The
# resulting lines are collected in result[]. For each split we get the
# spaces, the tabs and the next non white space (e.g. next word).
result = []
for text_line in text.splitlines():
# Store result length so we can find out whether processing the next
# line gave any new content
old_result_len = len(result)
# Process next line with line_regex. For optimization we do an rstrip().
# - process tabs (changes either line or word, see below)
# - process word (first try to squeeze on line, then wrap or force wrap)
# Spaces found on the line are ignored, they get added while wrapping as
# needed.
for spaces, current_tabs, word in line_regex.findall(text_line.rstrip()):
# If tabs weren't converted to spaces, handle them now
if current_tabs:
# If the last thing we added was a space anyway then drop
# it. But let's not get rid of the indentation.
if (((result and line != indent) or
(not result and line != firstline_indent)) and line[-1] == ' '):
line = line[:-1]
# Add the tabs, if that means adding whitespace, just add it at
# the line, the rstrip() code while shorten the line down if
# necessary
if tabs_are_whitespace:
line += tabs * len(current_tabs)
else:
# if not all tab replacement is whitespace we prepend it to the word
word = tabs * len(current_tabs) + word
# Handle the case where word cannot be squeezed onto current last line
if len(line) + len(word) > length and len(indent) + len(word) <= length:
result.append(line.rstrip())
line = indent + word
word = ''
# No space left on line or can we append a space?
if len(line) + 1 >= length:
result.append(line.rstrip())
line = indent
else:
line += ' '
# Add word and shorten it up to allowed line length. Restart next
# line with indent and repeat, or add a space if we're done (word
# finished) This deals with words that cannot fit on one line
# (e.g. indent + word longer than allowed line length).
while len(line) + len(word) >= length:
line += word
result.append(line[:length])
word = line[length:]
line = indent
# Default case, simply append the word and a space
if word:
line += word + ' '
# End of input line. If we have content we finish the line. If the
# current line is just the indent but we had content in during this
# original line then we need to add an empty line.
if (result and line != indent) or (not result and line != firstline_indent):
result.append(line.rstrip())
elif len(result) == old_result_len:
result.append('')
line = indent
return '\n'.join(result)
def DocToHelp(doc):
"""Takes a __doc__ string and reformats it as help."""
# Get rid of starting and ending white space. Using lstrip() or even
# strip() could drop more than maximum of first line and right space
# of last line.
doc = doc.strip()
# Get rid of all empty lines
whitespace_only_line = re.compile('^[ \t]+$', re.M)
doc = whitespace_only_line.sub('', doc)
# Cut out common space at line beginnings
doc = CutCommonSpacePrefix(doc)
# Just like this module's comment, comments tend to be aligned somehow.
# In other words they all start with the same amount of white space
# 1) keep double new lines
# 2) keep ws after new lines if not empty line
# 3) all other new lines shall be changed to a space
# Solution: Match new lines between non white space and replace with space.
doc = re.sub('(?<=\S)\n(?=\S)', ' ', doc, re.M)
return doc
def _GetModuleObjectAndName(globals_dict):
"""Returns the module that defines a global environment, and its name.
Args:
globals_dict: A dictionary that should correspond to an environment
providing the values of the globals.
Returns:
A pair consisting of (1) module object and (2) module name (a
string). Returns (None, None) if the module could not be
identified.
"""
# The use of .items() (instead of .iteritems()) is NOT a mistake: if
# a parallel thread imports a module while we iterate over
# .iteritems() (not nice, but possible), we get a RuntimeError ...
# Hence, we use the slightly slower but safer .items().
for name, module in sys.modules.items():
if getattr(module, '__dict__', None) is globals_dict:
if name == '__main__':
# Pick a more informative name for the main module.
name = sys.argv[0]
return (module, name)
return (None, None)
def _GetMainModule():
"""Returns: string, name of the module from which execution started."""
# First, try to use the same logic used by _GetCallingModuleObjectAndName(),
# i.e., call _GetModuleObjectAndName(). For that we first need to
# find the dictionary that the main module uses to store the
# globals.
#
# That's (normally) the same dictionary object that the deepest
# (oldest) stack frame is using for globals.
deepest_frame = sys._getframe(0)
while deepest_frame.f_back is not None:
deepest_frame = deepest_frame.f_back
globals_for_main_module = deepest_frame.f_globals
main_module_name = _GetModuleObjectAndName(globals_for_main_module)[1]
# The above strategy fails in some cases (e.g., tools that compute
# code coverage by redefining, among other things, the main module).
# If so, just use sys.argv[0]. We can probably always do this, but
# it's safest to try to use the same logic as _GetCallingModuleObjectAndName()
if main_module_name is None:
main_module_name = sys.argv[0]
return main_module_name
class FlagValues:
"""Registry of 'Flag' objects.
A 'FlagValues' can then scan command line arguments, passing flag
arguments through to the 'Flag' objects that it owns. It also
provides easy access to the flag values. Typically only one
'FlagValues' object is needed by an application: gflags.FLAGS
This class is heavily overloaded:
'Flag' objects are registered via __setitem__:
FLAGS['longname'] = x # register a new flag
The .value attribute of the registered 'Flag' objects can be accessed
as attributes of this 'FlagValues' object, through __getattr__. Both
the long and short name of the original 'Flag' objects can be used to
access its value:
FLAGS.longname # parsed flag value
FLAGS.x # parsed flag value (short name)
Command line arguments are scanned and passed to the registered 'Flag'
objects through the __call__ method. Unparsed arguments, including
argv[0] (e.g. the program name) are returned.
argv = FLAGS(sys.argv) # scan command line arguments
The original registered Flag objects can be retrieved through the use
of the dictionary-like operator, __getitem__:
x = FLAGS['longname'] # access the registered Flag object
The str() operator of a 'FlagValues' object provides help for all of
the registered 'Flag' objects.
"""
def __init__(self):
# Since everything in this class is so heavily overloaded, the only
# way of defining and using fields is to access __dict__ directly.
# Dictionary: flag name (string) -> Flag object.
self.__dict__['__flags'] = {}
# Dictionary: module name (string) -> list of Flag objects that are defined
# by that module.
self.__dict__['__flags_by_module'] = {}
# Dictionary: module id (int) -> list of Flag objects that are defined by
# that module.
self.__dict__['__flags_by_module_id'] = {}
# Dictionary: module name (string) -> list of Flag objects that are
# key for that module.
self.__dict__['__key_flags_by_module'] = {}
# Set if we should use new style gnu_getopt rather than getopt when parsing
# the args. Only possible with Python 2.3+
self.UseGnuGetOpt(False)
def UseGnuGetOpt(self, use_gnu_getopt=True):
"""Use GNU-style scanning. Allows mixing of flag and non-flag arguments.
See http://docs.python.org/library/getopt.html#getopt.gnu_getopt
Args:
use_gnu_getopt: wether or not to use GNU style scanning.
"""
self.__dict__['__use_gnu_getopt'] = use_gnu_getopt
def IsGnuGetOpt(self):
return self.__dict__['__use_gnu_getopt']
def FlagDict(self):
return self.__dict__['__flags']
def FlagsByModuleDict(self):
"""Returns the dictionary of module_name -> list of defined flags.
Returns:
A dictionary. Its keys are module names (strings). Its values
are lists of Flag objects.
"""
return self.__dict__['__flags_by_module']
def FlagsByModuleIdDict(self):
"""Returns the dictionary of module_id -> list of defined flags.
Returns:
A dictionary. Its keys are module IDs (ints). Its values
are lists of Flag objects.
"""
return self.__dict__['__flags_by_module_id']
def KeyFlagsByModuleDict(self):
"""Returns the dictionary of module_name -> list of key flags.
Returns:
A dictionary. Its keys are module names (strings). Its values
are lists of Flag objects.
"""
return self.__dict__['__key_flags_by_module']
def _RegisterFlagByModule(self, module_name, flag):
"""Records the module that defines a specific flag.
We keep track of which flag is defined by which module so that we
can later sort the flags by module.
Args:
module_name: A string, the name of a Python module.
flag: A Flag object, a flag that is key to the module.
"""
flags_by_module = self.FlagsByModuleDict()
flags_by_module.setdefault(module_name, []).append(flag)
def _RegisterFlagByModuleId(self, module_id, flag):
"""Records the module that defines a specific flag.
Args:
module_id: An int, the ID of the Python module.
flag: A Flag object, a flag that is key to the module.
"""
flags_by_module_id = self.FlagsByModuleIdDict()
flags_by_module_id.setdefault(module_id, []).append(flag)
def _RegisterKeyFlagForModule(self, module_name, flag):
"""Specifies that a flag is a key flag for a module.
Args:
module_name: A string, the name of a Python module.
flag: A Flag object, a flag that is key to the module.
"""
key_flags_by_module = self.KeyFlagsByModuleDict()
# The list of key flags for the module named module_name.
key_flags = key_flags_by_module.setdefault(module_name, [])
# Add flag, but avoid duplicates.
if flag not in key_flags:
key_flags.append(flag)
def _GetFlagsDefinedByModule(self, module):
"""Returns the list of flags defined by a module.
Args:
module: A module object or a module name (a string).
Returns:
A new list of Flag objects. Caller may update this list as he
wishes: none of those changes will affect the internals of this
FlagValue object.
"""
if not isinstance(module, str):
module = module.__name__
return list(self.FlagsByModuleDict().get(module, []))
def _GetKeyFlagsForModule(self, module):
"""Returns the list of key flags for a module.
Args:
module: A module object or a module name (a string)
Returns:
A new list of Flag objects. Caller may update this list as he
wishes: none of those changes will affect the internals of this
FlagValue object.
"""
if not isinstance(module, str):
module = module.__name__
# Any flag is a key flag for the module that defined it. NOTE:
# key_flags is a fresh list: we can update it without affecting the
# internals of this FlagValues object.
key_flags = self._GetFlagsDefinedByModule(module)
# Take into account flags explicitly declared as key for a module.
for flag in self.KeyFlagsByModuleDict().get(module, []):
if flag not in key_flags:
key_flags.append(flag)
return key_flags
def FindModuleDefiningFlag(self, flagname, default=None):
"""Return the name of the module defining this flag, or default.
Args:
flagname: Name of the flag to lookup.
default: Value to return if flagname is not defined. Defaults
to None.
Returns:
The name of the module which registered the flag with this name.
If no such module exists (i.e. no flag with this name exists),
we return default.
"""
for module, flags in self.FlagsByModuleDict().iteritems():
for flag in flags:
if flag.name == flagname or flag.short_name == flagname:
return module
return default
def FindModuleIdDefiningFlag(self, flagname, default=None):
"""Return the ID of the module defining this flag, or default.
Args:
flagname: Name of the flag to lookup.
default: Value to return if flagname is not defined. Defaults
to None.
Returns:
The ID of the module which registered the flag with this name.
If no such module exists (i.e. no flag with this name exists),
we return default.
"""
for module_id, flags in self.FlagsByModuleIdDict().iteritems():
for flag in flags:
if flag.name == flagname or flag.short_name == flagname:
return module_id
return default
def AppendFlagValues(self, flag_values):
"""Appends flags registered in another FlagValues instance.
Args:
flag_values: registry to copy from
"""
for flag_name, flag in flag_values.FlagDict().iteritems():
# Each flags with shortname appears here twice (once under its
# normal name, and again with its short name). To prevent
# problems (DuplicateFlagError) with double flag registration, we
# perform a check to make sure that the entry we're looking at is
# for its normal name.
if flag_name == flag.name:
try:
self[flag_name] = flag
except DuplicateFlagError:
raise DuplicateFlagError(flag_name, self,
other_flag_values=flag_values)
def RemoveFlagValues(self, flag_values):
"""Remove flags that were previously appended from another FlagValues.
Args:
flag_values: registry containing flags to remove.
"""
for flag_name in flag_values.FlagDict():
self.__delattr__(flag_name)
def __setitem__(self, name, flag):
"""Registers a new flag variable."""
fl = self.FlagDict()
if not isinstance(flag, Flag):
raise IllegalFlagValue(flag)
if not isinstance(name, type("")):
raise FlagsError("Flag name must be a string")
if len(name) == 0:
raise FlagsError("Flag name cannot be empty")
# If running under pychecker, duplicate keys are likely to be
# defined. Disable check for duplicate keys when pycheck'ing.
if (name in fl and not flag.allow_override and
not fl[name].allow_override and not _RUNNING_PYCHECKER):
module, module_name = _GetCallingModuleObjectAndName()
if (self.FindModuleDefiningFlag(name) == module_name and
id(module) != self.FindModuleIdDefiningFlag(name)):
# If the flag has already been defined by a module with the same name,
# but a different ID, we can stop here because it indicates that the
# module is simply being imported a subsequent time.
return
raise DuplicateFlagError(name, self)
short_name = flag.short_name
if short_name is not None:
if (short_name in fl and not flag.allow_override and
not fl[short_name].allow_override and not _RUNNING_PYCHECKER):
raise DuplicateFlagError(short_name, self)
fl[short_name] = flag
fl[name] = flag
global _exported_flags
_exported_flags[name] = flag
def __getitem__(self, name):
"""Retrieves the Flag object for the flag --name."""
return self.FlagDict()[name]
def __getattr__(self, name):
"""Retrieves the 'value' attribute of the flag --name."""
fl = self.FlagDict()
if name not in fl:
raise AttributeError(name)
return fl[name].value
def __setattr__(self, name, value):
"""Sets the 'value' attribute of the flag --name."""
fl = self.FlagDict()
fl[name].value = value
self._AssertValidators(fl[name].validators)
return value
def _AssertAllValidators(self):
all_validators = set()
for flag in self.FlagDict().itervalues():
for validator in flag.validators:
all_validators.add(validator)
self._AssertValidators(all_validators)
def _AssertValidators(self, validators):
"""Assert if all validators in the list are satisfied.
Asserts validators in the order they were created.
Args:
validators: Iterable(gflags_validators.Validator), validators to be
verified
Raises:
AttributeError: if validators work with a non-existing flag.
IllegalFlagValue: if validation fails for at least one validator
"""
for validator in sorted(
validators, key=lambda validator: validator.insertion_index):
try:
validator.Verify(self)
except gflags_validators.Error, e:
message = validator.PrintFlagsWithValues(self)
raise IllegalFlagValue('%s: %s' % (message, str(e)))
def _FlagIsRegistered(self, flag_obj):
"""Checks whether a Flag object is registered under some name.
Note: this is non trivial: in addition to its normal name, a flag
may have a short name too. In self.FlagDict(), both the normal and
the short name are mapped to the same flag object. E.g., calling
only "del FLAGS.short_name" is not unregistering the corresponding
Flag object (it is still registered under the longer name).
Args:
flag_obj: A Flag object.
Returns:
A boolean: True iff flag_obj is registered under some name.
"""
flag_dict = self.FlagDict()
# Check whether flag_obj is registered under its long name.
name = flag_obj.name
if flag_dict.get(name, None) == flag_obj:
return True
# Check whether flag_obj is registered under its short name.
short_name = flag_obj.short_name
if (short_name is not None and
flag_dict.get(short_name, None) == flag_obj):
return True
# The flag cannot be registered under any other name, so we do not
# need to do a full search through the values of self.FlagDict().
return False
def __delattr__(self, flag_name):
"""Deletes a previously-defined flag from a flag object.
This method makes sure we can delete a flag by using
del flag_values_object.<flag_name>
E.g.,
gflags.DEFINE_integer('foo', 1, 'Integer flag.')
del gflags.FLAGS.foo
Args:
flag_name: A string, the name of the flag to be deleted.
Raises:
AttributeError: When there is no registered flag named flag_name.
"""
fl = self.FlagDict()
if flag_name not in fl:
raise AttributeError(flag_name)
flag_obj = fl[flag_name]
del fl[flag_name]
if not self._FlagIsRegistered(flag_obj):
# If the Flag object indicated by flag_name is no longer
# registered (please see the docstring of _FlagIsRegistered), then
# we delete the occurrences of the flag object in all our internal
# dictionaries.
self.__RemoveFlagFromDictByModule(self.FlagsByModuleDict(), flag_obj)
self.__RemoveFlagFromDictByModule(self.FlagsByModuleIdDict(), flag_obj)
self.__RemoveFlagFromDictByModule(self.KeyFlagsByModuleDict(), flag_obj)
def __RemoveFlagFromDictByModule(self, flags_by_module_dict, flag_obj):
"""Removes a flag object from a module -> list of flags dictionary.
Args:
flags_by_module_dict: A dictionary that maps module names to lists of
flags.
flag_obj: A flag object.
"""
for unused_module, flags_in_module in flags_by_module_dict.iteritems():
# while (as opposed to if) takes care of multiple occurrences of a
# flag in the list for the same module.
while flag_obj in flags_in_module:
flags_in_module.remove(flag_obj)
def SetDefault(self, name, value):
"""Changes the default value of the named flag object."""
fl = self.FlagDict()
if name not in fl:
raise AttributeError(name)
fl[name].SetDefault(value)
self._AssertValidators(fl[name].validators)
def __contains__(self, name):
"""Returns True if name is a value (flag) in the dict."""
return name in self.FlagDict()
has_key = __contains__ # a synonym for __contains__()
def __iter__(self):
return iter(self.FlagDict())
def __call__(self, argv):
"""Parses flags from argv; stores parsed flags into this FlagValues object.
All unparsed arguments are returned. Flags are parsed using the GNU
Program Argument Syntax Conventions, using getopt:
http://www.gnu.org/software/libc/manual/html_mono/libc.html#Getopt
Args:
argv: argument list. Can be of any type that may be converted to a list.
Returns:
The list of arguments not parsed as options, including argv[0]
Raises:
FlagsError: on any parsing error
"""
# Support any sequence type that can be converted to a list
argv = list(argv)
shortopts = ""
longopts = []
fl = self.FlagDict()
# This pre parses the argv list for --flagfile=<> options.
argv = argv[:1] + self.ReadFlagsFromFiles(argv[1:], force_gnu=False)
# Correct the argv to support the google style of passing boolean
# parameters. Boolean parameters may be passed by using --mybool,
# --nomybool, --mybool=(true|false|1|0). getopt does not support
# having options that may or may not have a parameter. We replace
# instances of the short form --mybool and --nomybool with their
# full forms: --mybool=(true|false).
original_argv = list(argv) # list() makes a copy
shortest_matches = None
for name, flag in fl.items():
if not flag.boolean:
continue
if shortest_matches is None:
# Determine the smallest allowable prefix for all flag names
shortest_matches = self.ShortestUniquePrefixes(fl)
no_name = 'no' + name
prefix = shortest_matches[name]
no_prefix = shortest_matches[no_name]
# Replace all occurrences of this boolean with extended forms
for arg_idx in range(1, len(argv)):
arg = argv[arg_idx]
if arg.find('=') >= 0: continue
if arg.startswith('--'+prefix) and ('--'+name).startswith(arg):
argv[arg_idx] = ('--%s=true' % name)
elif arg.startswith('--'+no_prefix) and ('--'+no_name).startswith(arg):
argv[arg_idx] = ('--%s=false' % name)
# Loop over all of the flags, building up the lists of short options
# and long options that will be passed to getopt. Short options are
# specified as a string of letters, each letter followed by a colon
# if it takes an argument. Long options are stored in an array of
# strings. Each string ends with an '=' if it takes an argument.
for name, flag in fl.items():
longopts.append(name + "=")
if len(name) == 1: # one-letter option: allow short flag type also
shortopts += name
if not flag.boolean:
shortopts += ":"
longopts.append('undefok=')
undefok_flags = []
# In case --undefok is specified, loop to pick up unrecognized
# options one by one.
unrecognized_opts = []
args = argv[1:]
while True:
try:
if self.__dict__['__use_gnu_getopt']:
optlist, unparsed_args = getopt.gnu_getopt(args, shortopts, longopts)
else:
optlist, unparsed_args = getopt.getopt(args, shortopts, longopts)
break
except getopt.GetoptError, e:
if not e.opt or e.opt in fl:
# Not an unrecognized option, re-raise the exception as a FlagsError
raise FlagsError(e)
# Remove offender from args and try again
for arg_index in range(len(args)):
if ((args[arg_index] == '--' + e.opt) or
(args[arg_index] == '-' + e.opt) or
(args[arg_index].startswith('--' + e.opt + '='))):
unrecognized_opts.append((e.opt, args[arg_index]))
args = args[0:arg_index] + args[arg_index+1:]
break
else:
# We should have found the option, so we don't expect to get
# here. We could assert, but raising the original exception
# might work better.
raise FlagsError(e)
for name, arg in optlist:
if name == '--undefok':
flag_names = arg.split(',')
undefok_flags.extend(flag_names)
# For boolean flags, if --undefok=boolflag is specified, then we should
# also accept --noboolflag, in addition to --boolflag.
# Since we don't know the type of the undefok'd flag, this will affect
# non-boolean flags as well.
# NOTE: You shouldn't use --undefok=noboolflag, because then we will
# accept --nonoboolflag here. We are choosing not to do the conversion
# from noboolflag -> boolflag because of the ambiguity that flag names
# can start with 'no'.
undefok_flags.extend('no' + name for name in flag_names)
continue
if name.startswith('--'):
# long option
name = name[2:]
short_option = 0
else:
# short option
name = name[1:]
short_option = 1
if name in fl:
flag = fl[name]
if flag.boolean and short_option: arg = 1
flag.Parse(arg)
# If there were unrecognized options, raise an exception unless
# the options were named via --undefok.
for opt, value in unrecognized_opts:
if opt not in undefok_flags:
raise UnrecognizedFlagError(opt, value)
if unparsed_args:
if self.__dict__['__use_gnu_getopt']:
# if using gnu_getopt just return the program name + remainder of argv.
ret_val = argv[:1] + unparsed_args
else:
# unparsed_args becomes the first non-flag detected by getopt to
# the end of argv. Because argv may have been modified above,
# return original_argv for this region.
ret_val = argv[:1] + original_argv[-len(unparsed_args):]
else:
ret_val = argv[:1]
self._AssertAllValidators()
return ret_val
def Reset(self):
"""Resets the values to the point before FLAGS(argv) was called."""
for f in self.FlagDict().values():
f.Unparse()
def RegisteredFlags(self):
"""Returns: a list of the names and short names of all registered flags."""
return list(self.FlagDict())
def FlagValuesDict(self):
"""Returns: a dictionary that maps flag names to flag values."""
flag_values = {}
for flag_name in self.RegisteredFlags():
flag = self.FlagDict()[flag_name]
flag_values[flag_name] = flag.value
return flag_values
def __str__(self):
"""Generates a help string for all known flags."""
return self.GetHelp()
def GetHelp(self, prefix=''):
"""Generates a help string for all known flags."""
helplist = []
flags_by_module = self.FlagsByModuleDict()
if flags_by_module:
modules = sorted(flags_by_module)
# Print the help for the main module first, if possible.
main_module = _GetMainModule()
if main_module in modules:
modules.remove(main_module)
modules = [main_module] + modules
for module in modules:
self.__RenderOurModuleFlags(module, helplist)
self.__RenderModuleFlags('gflags',
_SPECIAL_FLAGS.FlagDict().values(),
helplist)
else:
# Just print one long list of flags.
self.__RenderFlagList(
self.FlagDict().values() + _SPECIAL_FLAGS.FlagDict().values(),
helplist, prefix)
return '\n'.join(helplist)
def __RenderModuleFlags(self, module, flags, output_lines, prefix=""):
"""Generates a help string for a given module."""
if not isinstance(module, str):
module = module.__name__
output_lines.append('\n%s%s:' % (prefix, module))
self.__RenderFlagList(flags, output_lines, prefix + " ")
def __RenderOurModuleFlags(self, module, output_lines, prefix=""):
"""Generates a help string for a given module."""
flags = self._GetFlagsDefinedByModule(module)
if flags:
self.__RenderModuleFlags(module, flags, output_lines, prefix)
def __RenderOurModuleKeyFlags(self, module, output_lines, prefix=""):
"""Generates a help string for the key flags of a given module.
Args:
module: A module object or a module name (a string).
output_lines: A list of strings. The generated help message
lines will be appended to this list.
prefix: A string that is prepended to each generated help line.
"""
key_flags = self._GetKeyFlagsForModule(module)
if key_flags:
self.__RenderModuleFlags(module, key_flags, output_lines, prefix)
def ModuleHelp(self, module):
"""Describe the key flags of a module.
Args:
module: A module object or a module name (a string).
Returns:
string describing the key flags of a module.
"""
helplist = []
self.__RenderOurModuleKeyFlags(module, helplist)
return '\n'.join(helplist)
def MainModuleHelp(self):
"""Describe the key flags of the main module.
Returns:
string describing the key flags of a module.
"""
return self.ModuleHelp(_GetMainModule())
def __RenderFlagList(self, flaglist, output_lines, prefix=" "):
fl = self.FlagDict()
special_fl = _SPECIAL_FLAGS.FlagDict()
flaglist = [(flag.name, flag) for flag in flaglist]
flaglist.sort()
flagset = {}
for (name, flag) in flaglist:
# It's possible this flag got deleted or overridden since being
# registered in the per-module flaglist. Check now against the
# canonical source of current flag information, the FlagDict.
if fl.get(name, None) != flag and special_fl.get(name, None) != flag:
# a different flag is using this name now
continue
# only print help once
if flag in flagset: continue
flagset[flag] = 1
flaghelp = ""
if flag.short_name: flaghelp += "-%s," % flag.short_name
if flag.boolean:
flaghelp += "--[no]%s" % flag.name + ":"
else:
flaghelp += "--%s" % flag.name + ":"
flaghelp += " "
if flag.help:
flaghelp += flag.help
flaghelp = TextWrap(flaghelp, indent=prefix+" ",
firstline_indent=prefix)
if flag.default_as_str:
flaghelp += "\n"
flaghelp += TextWrap("(default: %s)" % flag.default_as_str,
indent=prefix+" ")
if flag.parser.syntactic_help:
flaghelp += "\n"
flaghelp += TextWrap("(%s)" % flag.parser.syntactic_help,
indent=prefix+" ")
output_lines.append(flaghelp)
def get(self, name, default):
"""Returns the value of a flag (if not None) or a default value.
Args:
name: A string, the name of a flag.
default: Default value to use if the flag value is None.
"""
value = self.__getattr__(name)
if value is not None: # Can't do if not value, b/c value might be '0' or ""
return value
else:
return default
def ShortestUniquePrefixes(self, fl):
"""Returns: dictionary; maps flag names to their shortest unique prefix."""
# Sort the list of flag names
sorted_flags = []
for name, flag in fl.items():
sorted_flags.append(name)
if flag.boolean:
sorted_flags.append('no%s' % name)
sorted_flags.sort()
# For each name in the sorted list, determine the shortest unique
# prefix by comparing itself to the next name and to the previous
# name (the latter check uses cached info from the previous loop).
shortest_matches = {}
prev_idx = 0
for flag_idx in range(len(sorted_flags)):
curr = sorted_flags[flag_idx]
if flag_idx == (len(sorted_flags) - 1):
next = None
else:
next = sorted_flags[flag_idx+1]
next_len = len(next)
for curr_idx in range(len(curr)):
if (next is None
or curr_idx >= next_len
or curr[curr_idx] != next[curr_idx]):
# curr longer than next or no more chars in common
shortest_matches[curr] = curr[:max(prev_idx, curr_idx) + 1]
prev_idx = curr_idx
break
else:
# curr shorter than (or equal to) next
shortest_matches[curr] = curr
prev_idx = curr_idx + 1 # next will need at least one more char
return shortest_matches
def __IsFlagFileDirective(self, flag_string):
"""Checks whether flag_string contain a --flagfile=<foo> directive."""
if isinstance(flag_string, type("")):
if flag_string.startswith('--flagfile='):
return 1
elif flag_string == '--flagfile':
return 1
elif flag_string.startswith('-flagfile='):
return 1
elif flag_string == '-flagfile':
return 1
else:
return 0
return 0
def ExtractFilename(self, flagfile_str):
"""Returns filename from a flagfile_str of form -[-]flagfile=filename.
The cases of --flagfile foo and -flagfile foo shouldn't be hitting
this function, as they are dealt with in the level above this
function.
"""
if flagfile_str.startswith('--flagfile='):
return os.path.expanduser((flagfile_str[(len('--flagfile=')):]).strip())
elif flagfile_str.startswith('-flagfile='):
return os.path.expanduser((flagfile_str[(len('-flagfile=')):]).strip())
else:
raise FlagsError('Hit illegal --flagfile type: %s' % flagfile_str)
def __GetFlagFileLines(self, filename, parsed_file_list):
"""Returns the useful (!=comments, etc) lines from a file with flags.
Args:
filename: A string, the name of the flag file.
parsed_file_list: A list of the names of the files we have
already read. MUTATED BY THIS FUNCTION.
Returns:
List of strings. See the note below.
NOTE(springer): This function checks for a nested --flagfile=<foo>
tag and handles the lower file recursively. It returns a list of
all the lines that _could_ contain command flags. This is
EVERYTHING except whitespace lines and comments (lines starting
with '#' or '//').
"""
line_list = [] # All line from flagfile.
flag_line_list = [] # Subset of lines w/o comments, blanks, flagfile= tags.
try:
file_obj = open(filename, 'r')
except IOError, e_msg:
raise CantOpenFlagFileError('ERROR:: Unable to open flagfile: %s' % e_msg)
line_list = file_obj.readlines()
file_obj.close()
parsed_file_list.append(filename)
# This is where we check each line in the file we just read.
for line in line_list:
if line.isspace():
pass
# Checks for comment (a line that starts with '#').
elif line.startswith('#') or line.startswith('//'):
pass
# Checks for a nested "--flagfile=<bar>" flag in the current file.
# If we find one, recursively parse down into that file.
elif self.__IsFlagFileDirective(line):
sub_filename = self.ExtractFilename(line)
# We do a little safety check for reparsing a file we've already done.
if not sub_filename in parsed_file_list:
included_flags = self.__GetFlagFileLines(sub_filename,
parsed_file_list)
flag_line_list.extend(included_flags)
else: # Case of hitting a circularly included file.
sys.stderr.write('Warning: Hit circular flagfile dependency: %s\n' %
(sub_filename,))
else:
# Any line that's not a comment or a nested flagfile should get
# copied into 2nd position. This leaves earlier arguments
# further back in the list, thus giving them higher priority.
flag_line_list.append(line.strip())
return flag_line_list
def ReadFlagsFromFiles(self, argv, force_gnu=True):
"""Processes command line args, but also allow args to be read from file.
Args:
argv: A list of strings, usually sys.argv[1:], which may contain one or
more flagfile directives of the form --flagfile="./filename".
Note that the name of the program (sys.argv[0]) should be omitted.
force_gnu: If False, --flagfile parsing obeys normal flag semantics.
If True, --flagfile parsing instead follows gnu_getopt semantics.
*** WARNING *** force_gnu=False may become the future default!
Returns:
A new list which has the original list combined with what we read
from any flagfile(s).
References: Global gflags.FLAG class instance.
This function should be called before the normal FLAGS(argv) call.
This function scans the input list for a flag that looks like:
--flagfile=<somefile>. Then it opens <somefile>, reads all valid key
and value pairs and inserts them into the input list between the
first item of the list and any subsequent items in the list.
Note that your application's flags are still defined the usual way
using gflags DEFINE_flag() type functions.
Notes (assuming we're getting a commandline of some sort as our input):
--> Flags from the command line argv _should_ always take precedence!
--> A further "--flagfile=<otherfile.cfg>" CAN be nested in a flagfile.
It will be processed after the parent flag file is done.
--> For duplicate flags, first one we hit should "win".
--> In a flagfile, a line beginning with # or // is a comment.
--> Entirely blank lines _should_ be ignored.
"""
parsed_file_list = []
rest_of_args = argv
new_argv = []
while rest_of_args:
current_arg = rest_of_args[0]
rest_of_args = rest_of_args[1:]
if self.__IsFlagFileDirective(current_arg):
# This handles the case of -(-)flagfile foo. In this case the
# next arg really is part of this one.
if current_arg == '--flagfile' or current_arg == '-flagfile':
if not rest_of_args:
raise IllegalFlagValue('--flagfile with no argument')
flag_filename = os.path.expanduser(rest_of_args[0])
rest_of_args = rest_of_args[1:]
else:
# This handles the case of (-)-flagfile=foo.
flag_filename = self.ExtractFilename(current_arg)
new_argv.extend(
self.__GetFlagFileLines(flag_filename, parsed_file_list))
else:
new_argv.append(current_arg)
# Stop parsing after '--', like getopt and gnu_getopt.
if current_arg == '--':
break
# Stop parsing after a non-flag, like getopt.
if not current_arg.startswith('-'):
if not force_gnu and not self.__dict__['__use_gnu_getopt']:
break
if rest_of_args:
new_argv.extend(rest_of_args)
return new_argv
def FlagsIntoString(self):
"""Returns a string with the flags assignments from this FlagValues object.
This function ignores flags whose value is None. Each flag
assignment is separated by a newline.
NOTE: MUST mirror the behavior of the C++ CommandlineFlagsIntoString
from http://code.google.com/p/google-gflags
"""
s = ''
for flag in self.FlagDict().values():
if flag.value is not None:
s += flag.Serialize() + '\n'
return s
def AppendFlagsIntoFile(self, filename):
"""Appends all flags assignments from this FlagInfo object to a file.
Output will be in the format of a flagfile.
NOTE: MUST mirror the behavior of the C++ AppendFlagsIntoFile
from http://code.google.com/p/google-gflags
"""
out_file = open(filename, 'a')
out_file.write(self.FlagsIntoString())
out_file.close()
def WriteHelpInXMLFormat(self, outfile=None):
"""Outputs flag documentation in XML format.
NOTE: We use element names that are consistent with those used by
the C++ command-line flag library, from
http://code.google.com/p/google-gflags
We also use a few new elements (e.g., <key>), but we do not
interfere / overlap with existing XML elements used by the C++
library. Please maintain this consistency.
Args:
outfile: File object we write to. Default None means sys.stdout.
"""
outfile = outfile or sys.stdout
outfile.write('<?xml version=\"1.0\"?>\n')
outfile.write('<AllFlags>\n')
indent = ' '
_WriteSimpleXMLElement(outfile, 'program', os.path.basename(sys.argv[0]),
indent)
usage_doc = sys.modules['__main__'].__doc__
if not usage_doc:
usage_doc = '\nUSAGE: %s [flags]\n' % sys.argv[0]
else:
usage_doc = usage_doc.replace('%s', sys.argv[0])
_WriteSimpleXMLElement(outfile, 'usage', usage_doc, indent)
# Get list of key flags for the main module.
key_flags = self._GetKeyFlagsForModule(_GetMainModule())
# Sort flags by declaring module name and next by flag name.
flags_by_module = self.FlagsByModuleDict()
all_module_names = list(flags_by_module.keys())
all_module_names.sort()
for module_name in all_module_names:
flag_list = [(f.name, f) for f in flags_by_module[module_name]]
flag_list.sort()
for unused_flag_name, flag in flag_list:
is_key = flag in key_flags
flag.WriteInfoInXMLFormat(outfile, module_name,
is_key=is_key, indent=indent)
outfile.write('</AllFlags>\n')
outfile.flush()
def AddValidator(self, validator):
"""Register new flags validator to be checked.
Args:
validator: gflags_validators.Validator
Raises:
AttributeError: if validators work with a non-existing flag.
"""
for flag_name in validator.GetFlagsNames():
flag = self.FlagDict()[flag_name]
flag.validators.append(validator)
# end of FlagValues definition
# The global FlagValues instance
FLAGS = FlagValues()
def _StrOrUnicode(value):
"""Converts value to a python string or, if necessary, unicode-string."""
try:
return str(value)
except UnicodeEncodeError:
return unicode(value)
def _MakeXMLSafe(s):
"""Escapes <, >, and & from s, and removes XML 1.0-illegal chars."""
s = cgi.escape(s) # Escape <, >, and &
# Remove characters that cannot appear in an XML 1.0 document
# (http://www.w3.org/TR/REC-xml/#charsets).
#
# NOTE: if there are problems with current solution, one may move to
# XML 1.1, which allows such chars, if they're entity-escaped (&#xHH;).
s = re.sub(r'[\x00-\x08\x0b\x0c\x0e-\x1f]', '', s)
# Convert non-ascii characters to entities. Note: requires python >=2.3
s = s.encode('ascii', 'xmlcharrefreplace') # u'\xce\x88' -> 'uΈ'
return s
def _WriteSimpleXMLElement(outfile, name, value, indent):
"""Writes a simple XML element.
Args:
outfile: File object we write the XML element to.
name: A string, the name of XML element.
value: A Python object, whose string representation will be used
as the value of the XML element.
indent: A string, prepended to each line of generated output.
"""
value_str = _StrOrUnicode(value)
if isinstance(value, bool):
# Display boolean values as the C++ flag library does: no caps.
value_str = value_str.lower()
safe_value_str = _MakeXMLSafe(value_str)
outfile.write('%s<%s>%s</%s>\n' % (indent, name, safe_value_str, name))
class Flag:
"""Information about a command-line flag.
'Flag' objects define the following fields:
.name - the name for this flag
.default - the default value for this flag
.default_as_str - default value as repr'd string, e.g., "'true'" (or None)
.value - the most recent parsed value of this flag; set by Parse()
.help - a help string or None if no help is available
.short_name - the single letter alias for this flag (or None)
.boolean - if 'true', this flag does not accept arguments
.present - true if this flag was parsed from command line flags.
.parser - an ArgumentParser object
.serializer - an ArgumentSerializer object
.allow_override - the flag may be redefined without raising an error
The only public method of a 'Flag' object is Parse(), but it is
typically only called by a 'FlagValues' object. The Parse() method is
a thin wrapper around the 'ArgumentParser' Parse() method. The parsed
value is saved in .value, and the .present attribute is updated. If
this flag was already present, a FlagsError is raised.
Parse() is also called during __init__ to parse the default value and
initialize the .value attribute. This enables other python modules to
safely use flags even if the __main__ module neglects to parse the
command line arguments. The .present attribute is cleared after
__init__ parsing. If the default value is set to None, then the
__init__ parsing step is skipped and the .value attribute is
initialized to None.
Note: The default value is also presented to the user in the help
string, so it is important that it be a legal value for this flag.
"""
def __init__(self, parser, serializer, name, default, help_string,
short_name=None, boolean=0, allow_override=0):
self.name = name
if not help_string:
help_string = '(no help available)'
self.help = help_string
self.short_name = short_name
self.boolean = boolean
self.present = 0
self.parser = parser
self.serializer = serializer
self.allow_override = allow_override
self.value = None
self.validators = []
self.SetDefault(default)
def __hash__(self):
return hash(id(self))
def __eq__(self, other):
return self is other
def __lt__(self, other):
if isinstance(other, Flag):
return id(self) < id(other)
return NotImplemented
def __GetParsedValueAsString(self, value):
if value is None:
return None
if self.serializer:
return repr(self.serializer.Serialize(value))
if self.boolean:
if value:
return repr('true')
else:
return repr('false')
return repr(_StrOrUnicode(value))
def Parse(self, argument):
try:
self.value = self.parser.Parse(argument)
except ValueError, e: # recast ValueError as IllegalFlagValue
raise IllegalFlagValue("flag --%s=%s: %s" % (self.name, argument, e))
self.present += 1
def Unparse(self):
if self.default is None:
self.value = None
else:
self.Parse(self.default)
self.present = 0
def Serialize(self):
if self.value is None:
return ''
if self.boolean:
if self.value:
return "--%s" % self.name
else:
return "--no%s" % self.name
else:
if not self.serializer:
raise FlagsError("Serializer not present for flag %s" % self.name)
return "--%s=%s" % (self.name, self.serializer.Serialize(self.value))
def SetDefault(self, value):
"""Changes the default value (and current value too) for this Flag."""
# We can't allow a None override because it may end up not being
# passed to C++ code when we're overriding C++ flags. So we
# cowardly bail out until someone fixes the semantics of trying to
# pass None to a C++ flag. See swig_flags.Init() for details on
# this behavior.
# TODO(olexiy): Users can directly call this method, bypassing all flags
# validators (we don't have FlagValues here, so we can not check
# validators).
# The simplest solution I see is to make this method private.
# Another approach would be to store reference to the corresponding
# FlagValues with each flag, but this seems to be an overkill.
if value is None and self.allow_override:
raise DuplicateFlagCannotPropagateNoneToSwig(self.name)
self.default = value
self.Unparse()
self.default_as_str = self.__GetParsedValueAsString(self.value)
def Type(self):
"""Returns: a string that describes the type of this Flag."""
# NOTE: we use strings, and not the types.*Type constants because
# our flags can have more exotic types, e.g., 'comma separated list
# of strings', 'whitespace separated list of strings', etc.
return self.parser.Type()
def WriteInfoInXMLFormat(self, outfile, module_name, is_key=False, indent=''):
"""Writes common info about this flag, in XML format.
This is information that is relevant to all flags (e.g., name,
meaning, etc.). If you defined a flag that has some other pieces of
info, then please override _WriteCustomInfoInXMLFormat.
Please do NOT override this method.
Args:
outfile: File object we write to.
module_name: A string, the name of the module that defines this flag.
is_key: A boolean, True iff this flag is key for main module.
indent: A string that is prepended to each generated line.
"""
outfile.write(indent + '<flag>\n')
inner_indent = indent + ' '
if is_key:
_WriteSimpleXMLElement(outfile, 'key', 'yes', inner_indent)
_WriteSimpleXMLElement(outfile, 'file', module_name, inner_indent)
# Print flag features that are relevant for all flags.
_WriteSimpleXMLElement(outfile, 'name', self.name, inner_indent)
if self.short_name:
_WriteSimpleXMLElement(outfile, 'short_name', self.short_name,
inner_indent)
if self.help:
_WriteSimpleXMLElement(outfile, 'meaning', self.help, inner_indent)
# The default flag value can either be represented as a string like on the
# command line, or as a Python object. We serialize this value in the
# latter case in order to remain consistent.
if self.serializer and not isinstance(self.default, str):
default_serialized = self.serializer.Serialize(self.default)
else:
default_serialized = self.default
_WriteSimpleXMLElement(outfile, 'default', default_serialized, inner_indent)
_WriteSimpleXMLElement(outfile, 'current', self.value, inner_indent)
_WriteSimpleXMLElement(outfile, 'type', self.Type(), inner_indent)
# Print extra flag features this flag may have.
self._WriteCustomInfoInXMLFormat(outfile, inner_indent)
outfile.write(indent + '</flag>\n')
def _WriteCustomInfoInXMLFormat(self, outfile, indent):
"""Writes extra info about this flag, in XML format.
"Extra" means "not already printed by WriteInfoInXMLFormat above."
Args:
outfile: File object we write to.
indent: A string that is prepended to each generated line.
"""
# Usually, the parser knows the extra details about the flag, so
# we just forward the call to it.
self.parser.WriteCustomInfoInXMLFormat(outfile, indent)
# End of Flag definition
class _ArgumentParserCache(type):
"""Metaclass used to cache and share argument parsers among flags."""
_instances = {}
def __call__(mcs, *args, **kwargs):
"""Returns an instance of the argument parser cls.
This method overrides behavior of the __new__ methods in
all subclasses of ArgumentParser (inclusive). If an instance
for mcs with the same set of arguments exists, this instance is
returned, otherwise a new instance is created.
If any keyword arguments are defined, or the values in args
are not hashable, this method always returns a new instance of
cls.
Args:
args: Positional initializer arguments.
kwargs: Initializer keyword arguments.
Returns:
An instance of cls, shared or new.
"""
if kwargs:
return type.__call__(mcs, *args, **kwargs)
else:
instances = mcs._instances
key = (mcs,) + tuple(args)
try:
return instances[key]
except KeyError:
# No cache entry for key exists, create a new one.
return instances.setdefault(key, type.__call__(mcs, *args))
except TypeError:
# An object in args cannot be hashed, always return
# a new instance.
return type.__call__(mcs, *args)
class ArgumentParser(object):
"""Base class used to parse and convert arguments.
The Parse() method checks to make sure that the string argument is a
legal value and convert it to a native type. If the value cannot be
converted, it should throw a 'ValueError' exception with a human
readable explanation of why the value is illegal.
Subclasses should also define a syntactic_help string which may be
presented to the user to describe the form of the legal values.
Argument parser classes must be stateless, since instances are cached
and shared between flags. Initializer arguments are allowed, but all
member variables must be derived from initializer arguments only.
"""
__metaclass__ = _ArgumentParserCache
syntactic_help = ""
def Parse(self, argument):
"""Default implementation: always returns its argument unmodified."""
return argument
def Type(self):
return 'string'
def WriteCustomInfoInXMLFormat(self, outfile, indent):
pass
class ArgumentSerializer:
"""Base class for generating string representations of a flag value."""
def Serialize(self, value):
return _StrOrUnicode(value)
class ListSerializer(ArgumentSerializer):
def __init__(self, list_sep):
self.list_sep = list_sep
def Serialize(self, value):
return self.list_sep.join([_StrOrUnicode(x) for x in value])
# Flags validators
def RegisterValidator(flag_name,
checker,
message='Flag validation failed',
flag_values=FLAGS):
"""Adds a constraint, which will be enforced during program execution.
The constraint is validated when flags are initially parsed, and after each
change of the corresponding flag's value.
Args:
flag_name: string, name of the flag to be checked.
checker: method to validate the flag.
input - value of the corresponding flag (string, boolean, etc.
This value will be passed to checker by the library). See file's
docstring for examples.
output - Boolean.
Must return True if validator constraint is satisfied.
If constraint is not satisfied, it should either return False or
raise gflags_validators.Error(desired_error_message).
message: error text to be shown to the user if checker returns False.
If checker raises gflags_validators.Error, message from the raised
Error will be shown.
flag_values: FlagValues
Raises:
AttributeError: if flag_name is not registered as a valid flag name.
"""
flag_values.AddValidator(gflags_validators.SimpleValidator(flag_name,
checker,
message))
def MarkFlagAsRequired(flag_name, flag_values=FLAGS):
"""Ensure that flag is not None during program execution.
Registers a flag validator, which will follow usual validator
rules.
Args:
flag_name: string, name of the flag
flag_values: FlagValues
Raises:
AttributeError: if flag_name is not registered as a valid flag name.
"""
RegisterValidator(flag_name,
lambda value: value is not None,
message='Flag --%s must be specified.' % flag_name,
flag_values=flag_values)
def _RegisterBoundsValidatorIfNeeded(parser, name, flag_values):
"""Enforce lower and upper bounds for numeric flags.
Args:
parser: NumericParser (either FloatParser or IntegerParser). Provides lower
and upper bounds, and help text to display.
name: string, name of the flag
flag_values: FlagValues
"""
if parser.lower_bound is not None or parser.upper_bound is not None:
def Checker(value):
if value is not None and parser.IsOutsideBounds(value):
message = '%s is not %s' % (value, parser.syntactic_help)
raise gflags_validators.Error(message)
return True
RegisterValidator(name,
Checker,
flag_values=flag_values)
# The DEFINE functions are explained in mode details in the module doc string.
def DEFINE(parser, name, default, help, flag_values=FLAGS, serializer=None,
**args):
"""Registers a generic Flag object.
NOTE: in the docstrings of all DEFINE* functions, "registers" is short
for "creates a new flag and registers it".
Auxiliary function: clients should use the specialized DEFINE_<type>
function instead.
Args:
parser: ArgumentParser that is used to parse the flag arguments.
name: A string, the flag name.
default: The default value of the flag.
help: A help string.
flag_values: FlagValues object the flag will be registered with.
serializer: ArgumentSerializer that serializes the flag value.
args: Dictionary with extra keyword args that are passes to the
Flag __init__.
"""
DEFINE_flag(Flag(parser, serializer, name, default, help, **args),
flag_values)
def DEFINE_flag(flag, flag_values=FLAGS):
"""Registers a 'Flag' object with a 'FlagValues' object.
By default, the global FLAGS 'FlagValue' object is used.
Typical users will use one of the more specialized DEFINE_xxx
functions, such as DEFINE_string or DEFINE_integer. But developers
who need to create Flag objects themselves should use this function
to register their flags.
"""
# copying the reference to flag_values prevents pychecker warnings
fv = flag_values
fv[flag.name] = flag
# Tell flag_values who's defining the flag.
if isinstance(flag_values, FlagValues):
# Regarding the above isinstance test: some users pass funny
# values of flag_values (e.g., {}) in order to avoid the flag
# registration (in the past, there used to be a flag_values ==
# FLAGS test here) and redefine flags with the same name (e.g.,
# debug). To avoid breaking their code, we perform the
# registration only if flag_values is a real FlagValues object.
module, module_name = _GetCallingModuleObjectAndName()
flag_values._RegisterFlagByModule(module_name, flag)
flag_values._RegisterFlagByModuleId(id(module), flag)
def _InternalDeclareKeyFlags(flag_names,
flag_values=FLAGS, key_flag_values=None):
"""Declares a flag as key for the calling module.
Internal function. User code should call DECLARE_key_flag or
ADOPT_module_key_flags instead.
Args:
flag_names: A list of strings that are names of already-registered
Flag objects.
flag_values: A FlagValues object that the flags listed in
flag_names have registered with (the value of the flag_values
argument from the DEFINE_* calls that defined those flags).
This should almost never need to be overridden.
key_flag_values: A FlagValues object that (among possibly many
other things) keeps track of the key flags for each module.
Default None means "same as flag_values". This should almost
never need to be overridden.
Raises:
UnrecognizedFlagError: when we refer to a flag that was not
defined yet.
"""
key_flag_values = key_flag_values or flag_values
module = _GetCallingModule()
for flag_name in flag_names:
if flag_name not in flag_values:
raise UnrecognizedFlagError(flag_name)
flag = flag_values.FlagDict()[flag_name]
key_flag_values._RegisterKeyFlagForModule(module, flag)
def DECLARE_key_flag(flag_name, flag_values=FLAGS):
"""Declares one flag as key to the current module.
Key flags are flags that are deemed really important for a module.
They are important when listing help messages; e.g., if the
--helpshort command-line flag is used, then only the key flags of the
main module are listed (instead of all flags, as in the case of
--help).
Sample usage:
gflags.DECLARED_key_flag('flag_1')
Args:
flag_name: A string, the name of an already declared flag.
(Redeclaring flags as key, including flags implicitly key
because they were declared in this module, is a no-op.)
flag_values: A FlagValues object. This should almost never
need to be overridden.
"""
if flag_name in _SPECIAL_FLAGS:
# Take care of the special flags, e.g., --flagfile, --undefok.
# These flags are defined in _SPECIAL_FLAGS, and are treated
# specially during flag parsing, taking precedence over the
# user-defined flags.
_InternalDeclareKeyFlags([flag_name],
flag_values=_SPECIAL_FLAGS,
key_flag_values=flag_values)
return
_InternalDeclareKeyFlags([flag_name], flag_values=flag_values)
def ADOPT_module_key_flags(module, flag_values=FLAGS):
"""Declares that all flags key to a module are key to the current module.
Args:
module: A module object.
flag_values: A FlagValues object. This should almost never need
to be overridden.
Raises:
FlagsError: When given an argument that is a module name (a
string), instead of a module object.
"""
# NOTE(salcianu): an even better test would be if not
# isinstance(module, types.ModuleType) but I didn't want to import
# types for such a tiny use.
if isinstance(module, str):
raise FlagsError('Received module name %s; expected a module object.'
% module)
_InternalDeclareKeyFlags(
[f.name for f in flag_values._GetKeyFlagsForModule(module.__name__)],
flag_values=flag_values)
# If module is this flag module, take _SPECIAL_FLAGS into account.
if module == _GetThisModuleObjectAndName()[0]:
_InternalDeclareKeyFlags(
# As we associate flags with _GetCallingModuleObjectAndName(), the
# special flags defined in this module are incorrectly registered with
# a different module. So, we can't use _GetKeyFlagsForModule.
# Instead, we take all flags from _SPECIAL_FLAGS (a private
# FlagValues, where no other module should register flags).
[f.name for f in _SPECIAL_FLAGS.FlagDict().values()],
flag_values=_SPECIAL_FLAGS,
key_flag_values=flag_values)
#
# STRING FLAGS
#
def DEFINE_string(name, default, help, flag_values=FLAGS, **args):
"""Registers a flag whose value can be any string."""
parser = ArgumentParser()
serializer = ArgumentSerializer()
DEFINE(parser, name, default, help, flag_values, serializer, **args)
#
# BOOLEAN FLAGS
#
class BooleanParser(ArgumentParser):
"""Parser of boolean values."""
def Convert(self, argument):
"""Converts the argument to a boolean; raise ValueError on errors."""
if type(argument) == str:
if argument.lower() in ['true', 't', '1']:
return True
elif argument.lower() in ['false', 'f', '0']:
return False
bool_argument = bool(argument)
if argument == bool_argument:
# The argument is a valid boolean (True, False, 0, or 1), and not just
# something that always converts to bool (list, string, int, etc.).
return bool_argument
raise ValueError('Non-boolean argument to boolean flag', argument)
def Parse(self, argument):
val = self.Convert(argument)
return val
def Type(self):
return 'bool'
class BooleanFlag(Flag):
"""Basic boolean flag.
Boolean flags do not take any arguments, and their value is either
True (1) or False (0). The false value is specified on the command
line by prepending the word 'no' to either the long or the short flag
name.
For example, if a Boolean flag was created whose long name was
'update' and whose short name was 'x', then this flag could be
explicitly unset through either --noupdate or --nox.
"""
def __init__(self, name, default, help, short_name=None, **args):
p = BooleanParser()
Flag.__init__(self, p, None, name, default, help, short_name, 1, **args)
if not self.help: self.help = "a boolean value"
def DEFINE_boolean(name, default, help, flag_values=FLAGS, **args):
"""Registers a boolean flag.
Such a boolean flag does not take an argument. If a user wants to
specify a false value explicitly, the long option beginning with 'no'
must be used: i.e. --noflag
This flag will have a value of None, True or False. None is possible
if default=None and the user does not specify the flag on the command
line.
"""
DEFINE_flag(BooleanFlag(name, default, help, **args), flag_values)
# Match C++ API to unconfuse C++ people.
DEFINE_bool = DEFINE_boolean
class HelpFlag(BooleanFlag):
"""
HelpFlag is a special boolean flag that prints usage information and
raises a SystemExit exception if it is ever found in the command
line arguments. Note this is called with allow_override=1, so other
apps can define their own --help flag, replacing this one, if they want.
"""
def __init__(self):
BooleanFlag.__init__(self, "help", 0, "show this help",
short_name="?", allow_override=1)
def Parse(self, arg):
if arg:
doc = sys.modules["__main__"].__doc__
flags = str(FLAGS)
print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0])
if flags:
print "flags:"
print flags
sys.exit(1)
class HelpXMLFlag(BooleanFlag):
"""Similar to HelpFlag, but generates output in XML format."""
def __init__(self):
BooleanFlag.__init__(self, 'helpxml', False,
'like --help, but generates XML output',
allow_override=1)
def Parse(self, arg):
if arg:
FLAGS.WriteHelpInXMLFormat(sys.stdout)
sys.exit(1)
class HelpshortFlag(BooleanFlag):
"""
HelpshortFlag is a special boolean flag that prints usage
information for the "main" module, and rasies a SystemExit exception
if it is ever found in the command line arguments. Note this is
called with allow_override=1, so other apps can define their own
--helpshort flag, replacing this one, if they want.
"""
def __init__(self):
BooleanFlag.__init__(self, "helpshort", 0,
"show usage only for this module", allow_override=1)
def Parse(self, arg):
if arg:
doc = sys.modules["__main__"].__doc__
flags = FLAGS.MainModuleHelp()
print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0])
if flags:
print "flags:"
print flags
sys.exit(1)
#
# Numeric parser - base class for Integer and Float parsers
#
class NumericParser(ArgumentParser):
"""Parser of numeric values.
Parsed value may be bounded to a given upper and lower bound.
"""
def IsOutsideBounds(self, val):
return ((self.lower_bound is not None and val < self.lower_bound) or
(self.upper_bound is not None and val > self.upper_bound))
def Parse(self, argument):
val = self.Convert(argument)
if self.IsOutsideBounds(val):
raise ValueError("%s is not %s" % (val, self.syntactic_help))
return val
def WriteCustomInfoInXMLFormat(self, outfile, indent):
if self.lower_bound is not None:
_WriteSimpleXMLElement(outfile, 'lower_bound', self.lower_bound, indent)
if self.upper_bound is not None:
_WriteSimpleXMLElement(outfile, 'upper_bound', self.upper_bound, indent)
def Convert(self, argument):
"""Default implementation: always returns its argument unmodified."""
return argument
# End of Numeric Parser
#
# FLOAT FLAGS
#
class FloatParser(NumericParser):
"""Parser of floating point values.
Parsed value may be bounded to a given upper and lower bound.
"""
number_article = "a"
number_name = "number"
syntactic_help = " ".join((number_article, number_name))
def __init__(self, lower_bound=None, upper_bound=None):
super(FloatParser, self).__init__()
self.lower_bound = lower_bound
self.upper_bound = upper_bound
sh = self.syntactic_help
if lower_bound is not None and upper_bound is not None:
sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound))
elif lower_bound == 0:
sh = "a non-negative %s" % self.number_name
elif upper_bound == 0:
sh = "a non-positive %s" % self.number_name
elif upper_bound is not None:
sh = "%s <= %s" % (self.number_name, upper_bound)
elif lower_bound is not None:
sh = "%s >= %s" % (self.number_name, lower_bound)
self.syntactic_help = sh
def Convert(self, argument):
"""Converts argument to a float; raises ValueError on errors."""
return float(argument)
def Type(self):
return 'float'
# End of FloatParser
def DEFINE_float(name, default, help, lower_bound=None, upper_bound=None,
flag_values=FLAGS, **args):
"""Registers a flag whose value must be a float.
If lower_bound or upper_bound are set, then this flag must be
within the given range.
"""
parser = FloatParser(lower_bound, upper_bound)
serializer = ArgumentSerializer()
DEFINE(parser, name, default, help, flag_values, serializer, **args)
_RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values)
#
# INTEGER FLAGS
#
class IntegerParser(NumericParser):
"""Parser of an integer value.
Parsed value may be bounded to a given upper and lower bound.
"""
number_article = "an"
number_name = "integer"
syntactic_help = " ".join((number_article, number_name))
def __init__(self, lower_bound=None, upper_bound=None):
super(IntegerParser, self).__init__()
self.lower_bound = lower_bound
self.upper_bound = upper_bound
sh = self.syntactic_help
if lower_bound is not None and upper_bound is not None:
sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound))
elif lower_bound == 1:
sh = "a positive %s" % self.number_name
elif upper_bound == -1:
sh = "a negative %s" % self.number_name
elif lower_bound == 0:
sh = "a non-negative %s" % self.number_name
elif upper_bound == 0:
sh = "a non-positive %s" % self.number_name
elif upper_bound is not None:
sh = "%s <= %s" % (self.number_name, upper_bound)
elif lower_bound is not None:
sh = "%s >= %s" % (self.number_name, lower_bound)
self.syntactic_help = sh
def Convert(self, argument):
__pychecker__ = 'no-returnvalues'
if type(argument) == str:
base = 10
if len(argument) > 2 and argument[0] == "0" and argument[1] == "x":
base = 16
return int(argument, base)
else:
return int(argument)
def Type(self):
return 'int'
def DEFINE_integer(name, default, help, lower_bound=None, upper_bound=None,
flag_values=FLAGS, **args):
"""Registers a flag whose value must be an integer.
If lower_bound, or upper_bound are set, then this flag must be
within the given range.
"""
parser = IntegerParser(lower_bound, upper_bound)
serializer = ArgumentSerializer()
DEFINE(parser, name, default, help, flag_values, serializer, **args)
_RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values)
#
# ENUM FLAGS
#
class EnumParser(ArgumentParser):
"""Parser of a string enum value (a string value from a given set).
If enum_values (see below) is not specified, any string is allowed.
"""
def __init__(self, enum_values=None):
super(EnumParser, self).__init__()
self.enum_values = enum_values
def Parse(self, argument):
if self.enum_values and argument not in self.enum_values:
raise ValueError("value should be one of <%s>" %
"|".join(self.enum_values))
return argument
def Type(self):
return 'string enum'
class EnumFlag(Flag):
"""Basic enum flag; its value can be any string from list of enum_values."""
def __init__(self, name, default, help, enum_values=None,
short_name=None, **args):
enum_values = enum_values or []
p = EnumParser(enum_values)
g = ArgumentSerializer()
Flag.__init__(self, p, g, name, default, help, short_name, **args)
if not self.help: self.help = "an enum string"
self.help = "<%s>: %s" % ("|".join(enum_values), self.help)
def _WriteCustomInfoInXMLFormat(self, outfile, indent):
for enum_value in self.parser.enum_values:
_WriteSimpleXMLElement(outfile, 'enum_value', enum_value, indent)
def DEFINE_enum(name, default, enum_values, help, flag_values=FLAGS,
**args):
"""Registers a flag whose value can be any string from enum_values."""
DEFINE_flag(EnumFlag(name, default, help, enum_values, ** args),
flag_values)
#
# LIST FLAGS
#
class BaseListParser(ArgumentParser):
"""Base class for a parser of lists of strings.
To extend, inherit from this class; from the subclass __init__, call
BaseListParser.__init__(self, token, name)
where token is a character used to tokenize, and name is a description
of the separator.
"""
def __init__(self, token=None, name=None):
assert name
super(BaseListParser, self).__init__()
self._token = token
self._name = name
self.syntactic_help = "a %s separated list" % self._name
def Parse(self, argument):
if isinstance(argument, list):
return argument
elif argument == '':
return []
else:
return [s.strip() for s in argument.split(self._token)]
def Type(self):
return '%s separated list of strings' % self._name
class ListParser(BaseListParser):
"""Parser for a comma-separated list of strings."""
def __init__(self):
BaseListParser.__init__(self, ',', 'comma')
def WriteCustomInfoInXMLFormat(self, outfile, indent):
BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent)
_WriteSimpleXMLElement(outfile, 'list_separator', repr(','), indent)
class WhitespaceSeparatedListParser(BaseListParser):
"""Parser for a whitespace-separated list of strings."""
def __init__(self):
BaseListParser.__init__(self, None, 'whitespace')
def WriteCustomInfoInXMLFormat(self, outfile, indent):
BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent)
separators = list(string.whitespace)
separators.sort()
for ws_char in string.whitespace:
_WriteSimpleXMLElement(outfile, 'list_separator', repr(ws_char), indent)
def DEFINE_list(name, default, help, flag_values=FLAGS, **args):
"""Registers a flag whose value is a comma-separated list of strings."""
parser = ListParser()
serializer = ListSerializer(',')
DEFINE(parser, name, default, help, flag_values, serializer, **args)
def DEFINE_spaceseplist(name, default, help, flag_values=FLAGS, **args):
"""Registers a flag whose value is a whitespace-separated list of strings.
Any whitespace can be used as a separator.
"""
parser = WhitespaceSeparatedListParser()
serializer = ListSerializer(' ')
DEFINE(parser, name, default, help, flag_values, serializer, **args)
#
# MULTI FLAGS
#
class MultiFlag(Flag):
"""A flag that can appear multiple time on the command-line.
The value of such a flag is a list that contains the individual values
from all the appearances of that flag on the command-line.
See the __doc__ for Flag for most behavior of this class. Only
differences in behavior are described here:
* The default value may be either a single value or a list of values.
A single value is interpreted as the [value] singleton list.
* The value of the flag is always a list, even if the option was
only supplied once, and even if the default value is a single
value
"""
def __init__(self, *args, **kwargs):
Flag.__init__(self, *args, **kwargs)
self.help += ';\n repeat this option to specify a list of values'
def Parse(self, arguments):
"""Parses one or more arguments with the installed parser.
Args:
arguments: a single argument or a list of arguments (typically a
list of default values); a single argument is converted
internally into a list containing one item.
"""
if not isinstance(arguments, list):
# Default value may be a list of values. Most other arguments
# will not be, so convert them into a single-item list to make
# processing simpler below.
arguments = [arguments]
if self.present:
# keep a backup reference to list of previously supplied option values
values = self.value
else:
# "erase" the defaults with an empty list
values = []
for item in arguments:
# have Flag superclass parse argument, overwriting self.value reference
Flag.Parse(self, item) # also increments self.present
values.append(self.value)
# put list of option values back in the 'value' attribute
self.value = values
def Serialize(self):
if not self.serializer:
raise FlagsError("Serializer not present for flag %s" % self.name)
if self.value is None:
return ''
s = ''
multi_value = self.value
for self.value in multi_value:
if s: s += ' '
s += Flag.Serialize(self)
self.value = multi_value
return s
def Type(self):
return 'multi ' + self.parser.Type()
def DEFINE_multi(parser, serializer, name, default, help, flag_values=FLAGS,
**args):
"""Registers a generic MultiFlag that parses its args with a given parser.
Auxiliary function. Normal users should NOT use it directly.
Developers who need to create their own 'Parser' classes for options
which can appear multiple times can call this module function to
register their flags.
"""
DEFINE_flag(MultiFlag(parser, serializer, name, default, help, **args),
flag_values)
def DEFINE_multistring(name, default, help, flag_values=FLAGS, **args):
"""Registers a flag whose value can be a list of any strings.
Use the flag on the command line multiple times to place multiple
string values into the list. The 'default' may be a single string
(which will be converted into a single-element list) or a list of
strings.
"""
parser = ArgumentParser()
serializer = ArgumentSerializer()
DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
def DEFINE_multi_int(name, default, help, lower_bound=None, upper_bound=None,
flag_values=FLAGS, **args):
"""Registers a flag whose value can be a list of arbitrary integers.
Use the flag on the command line multiple times to place multiple
integer values into the list. The 'default' may be a single integer
(which will be converted into a single-element list) or a list of
integers.
"""
parser = IntegerParser(lower_bound, upper_bound)
serializer = ArgumentSerializer()
DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
def DEFINE_multi_float(name, default, help, lower_bound=None, upper_bound=None,
flag_values=FLAGS, **args):
"""Registers a flag whose value can be a list of arbitrary floats.
Use the flag on the command line multiple times to place multiple
float values into the list. The 'default' may be a single float
(which will be converted into a single-element list) or a list of
floats.
"""
parser = FloatParser(lower_bound, upper_bound)
serializer = ArgumentSerializer()
DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
# Now register the flags that we want to exist in all applications.
# These are all defined with allow_override=1, so user-apps can use
# these flagnames for their own purposes, if they want.
DEFINE_flag(HelpFlag())
DEFINE_flag(HelpshortFlag())
DEFINE_flag(HelpXMLFlag())
# Define special flags here so that help may be generated for them.
# NOTE: Please do NOT use _SPECIAL_FLAGS from outside this module.
_SPECIAL_FLAGS = FlagValues()
DEFINE_string(
'flagfile', "",
"Insert flag definitions from the given file into the command line.",
_SPECIAL_FLAGS)
DEFINE_string(
'undefok', "",
"comma-separated list of flag names that it is okay to specify "
"on the command line even if the program does not define a flag "
"with that name. IMPORTANT: flags in this list that have "
"arguments MUST use the --flag=value format.", _SPECIAL_FLAGS)
| Python |
#!/usr/bin/env python
# Copyright (c) 2010, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module to enforce different constraints on flags.
A validator represents an invariant, enforced over a one or more flags.
See 'FLAGS VALIDATORS' in gflags.py's docstring for a usage manual.
"""
__author__ = 'olexiy@google.com (Olexiy Oryeshko)'
class Error(Exception):
"""Thrown If validator constraint is not satisfied."""
class Validator(object):
"""Base class for flags validators.
Users should NOT overload these classes, and use gflags.Register...
methods instead.
"""
# Used to assign each validator an unique insertion_index
validators_count = 0
def __init__(self, checker, message):
"""Constructor to create all validators.
Args:
checker: function to verify the constraint.
Input of this method varies, see SimpleValidator and
DictionaryValidator for a detailed description.
message: string, error message to be shown to the user
"""
self.checker = checker
self.message = message
Validator.validators_count += 1
# Used to assert validators in the order they were registered (CL/18694236)
self.insertion_index = Validator.validators_count
def Verify(self, flag_values):
"""Verify that constraint is satisfied.
flags library calls this method to verify Validator's constraint.
Args:
flag_values: gflags.FlagValues, containing all flags
Raises:
Error: if constraint is not satisfied.
"""
param = self._GetInputToCheckerFunction(flag_values)
if not self.checker(param):
raise Error(self.message)
def GetFlagsNames(self):
"""Return the names of the flags checked by this validator.
Returns:
[string], names of the flags
"""
raise NotImplementedError('This method should be overloaded')
def PrintFlagsWithValues(self, flag_values):
raise NotImplementedError('This method should be overloaded')
def _GetInputToCheckerFunction(self, flag_values):
"""Given flag values, construct the input to be given to checker.
Args:
flag_values: gflags.FlagValues, containing all flags.
Returns:
Return type depends on the specific validator.
"""
raise NotImplementedError('This method should be overloaded')
class SimpleValidator(Validator):
"""Validator behind RegisterValidator() method.
Validates that a single flag passes its checker function. The checker function
takes the flag value and returns True (if value looks fine) or, if flag value
is not valid, either returns False or raises an Exception."""
def __init__(self, flag_name, checker, message):
"""Constructor.
Args:
flag_name: string, name of the flag.
checker: function to verify the validator.
input - value of the corresponding flag (string, boolean, etc).
output - Boolean. Must return True if validator constraint is satisfied.
If constraint is not satisfied, it should either return False or
raise Error.
message: string, error message to be shown to the user if validator's
condition is not satisfied
"""
super(SimpleValidator, self).__init__(checker, message)
self.flag_name = flag_name
def GetFlagsNames(self):
return [self.flag_name]
def PrintFlagsWithValues(self, flag_values):
return 'flag --%s=%s' % (self.flag_name, flag_values[self.flag_name].value)
def _GetInputToCheckerFunction(self, flag_values):
"""Given flag values, construct the input to be given to checker.
Args:
flag_values: gflags.FlagValues
Returns:
value of the corresponding flag.
"""
return flag_values[self.flag_name].value
class DictionaryValidator(Validator):
"""Validator behind RegisterDictionaryValidator method.
Validates that flag values pass their common checker function. The checker
function takes flag values and returns True (if values look fine) or,
if values are not valid, either returns False or raises an Exception.
"""
def __init__(self, flag_names, checker, message):
"""Constructor.
Args:
flag_names: [string], containing names of the flags used by checker.
checker: function to verify the validator.
input - dictionary, with keys() being flag_names, and value for each
key being the value of the corresponding flag (string, boolean, etc).
output - Boolean. Must return True if validator constraint is satisfied.
If constraint is not satisfied, it should either return False or
raise Error.
message: string, error message to be shown to the user if validator's
condition is not satisfied
"""
super(DictionaryValidator, self).__init__(checker, message)
self.flag_names = flag_names
def _GetInputToCheckerFunction(self, flag_values):
"""Given flag values, construct the input to be given to checker.
Args:
flag_values: gflags.FlagValues
Returns:
dictionary, with keys() being self.lag_names, and value for each key
being the value of the corresponding flag (string, boolean, etc).
"""
return dict([key, flag_values[key].value] for key in self.flag_names)
def PrintFlagsWithValues(self, flag_values):
prefix = 'flags '
flags_with_values = []
for key in self.flag_names:
flags_with_values.append('%s=%s' % (key, flag_values[key].value))
return prefix + ', '.join(flags_with_values)
def GetFlagsNames(self):
return self.flag_names
| Python |
# Early, and incomplete implementation of -04.
#
import re
import urllib
RESERVED = ":/?#[]@!$&'()*+,;="
OPERATOR = "+./;?|!@"
EXPLODE = "*+"
MODIFIER = ":^"
TEMPLATE = re.compile(r"{(?P<operator>[\+\./;\?|!@])?(?P<varlist>[^}]+)}", re.UNICODE)
VAR = re.compile(r"^(?P<varname>[^=\+\*:\^]+)((?P<explode>[\+\*])|(?P<partial>[:\^]-?[0-9]+))?(=(?P<default>.*))?$", re.UNICODE)
def _tostring(varname, value, explode, operator, safe=""):
if type(value) == type([]):
if explode == "+":
return ",".join([varname + "." + urllib.quote(x, safe) for x in value])
else:
return ",".join([urllib.quote(x, safe) for x in value])
if type(value) == type({}):
keys = value.keys()
keys.sort()
if explode == "+":
return ",".join([varname + "." + urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
return urllib.quote(value, safe)
def _tostring_path(varname, value, explode, operator, safe=""):
joiner = operator
if type(value) == type([]):
if explode == "+":
return joiner.join([varname + "." + urllib.quote(x, safe) for x in value])
elif explode == "*":
return joiner.join([urllib.quote(x, safe) for x in value])
else:
return ",".join([urllib.quote(x, safe) for x in value])
elif type(value) == type({}):
keys = value.keys()
keys.sort()
if explode == "+":
return joiner.join([varname + "." + urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
elif explode == "*":
return joiner.join([urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
else:
return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
if value:
return urllib.quote(value, safe)
else:
return ""
def _tostring_query(varname, value, explode, operator, safe=""):
joiner = operator
varprefix = ""
if operator == "?":
joiner = "&"
varprefix = varname + "="
if type(value) == type([]):
if 0 == len(value):
return ""
if explode == "+":
return joiner.join([varname + "=" + urllib.quote(x, safe) for x in value])
elif explode == "*":
return joiner.join([urllib.quote(x, safe) for x in value])
else:
return varprefix + ",".join([urllib.quote(x, safe) for x in value])
elif type(value) == type({}):
if 0 == len(value):
return ""
keys = value.keys()
keys.sort()
if explode == "+":
return joiner.join([varname + "." + urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
elif explode == "*":
return joiner.join([urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
else:
return varprefix + ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
else:
if value:
return varname + "=" + urllib.quote(value, safe)
else:
return varname
TOSTRING = {
"" : _tostring,
"+": _tostring,
";": _tostring_query,
"?": _tostring_query,
"/": _tostring_path,
".": _tostring_path,
}
def expand(template, vars):
def _sub(match):
groupdict = match.groupdict()
operator = groupdict.get('operator')
if operator is None:
operator = ''
varlist = groupdict.get('varlist')
safe = "@"
if operator == '+':
safe = RESERVED
varspecs = varlist.split(",")
varnames = []
defaults = {}
for varspec in varspecs:
m = VAR.search(varspec)
groupdict = m.groupdict()
varname = groupdict.get('varname')
explode = groupdict.get('explode')
partial = groupdict.get('partial')
default = groupdict.get('default')
if default:
defaults[varname] = default
varnames.append((varname, explode, partial))
retval = []
joiner = operator
prefix = operator
if operator == "+":
prefix = ""
joiner = ","
if operator == "?":
joiner = "&"
if operator == "":
joiner = ","
for varname, explode, partial in varnames:
if varname in vars:
value = vars[varname]
#if not value and (type(value) == type({}) or type(value) == type([])) and varname in defaults:
if not value and value != "" and varname in defaults:
value = defaults[varname]
elif varname in defaults:
value = defaults[varname]
else:
continue
retval.append(TOSTRING[operator](varname, value, explode, operator, safe=safe))
if "".join(retval):
return prefix + joiner.join(retval)
else:
return ""
return TEMPLATE.sub(_sub, template)
| Python |
#!/usr/bin/env python
usage = """usage: %prog [options] path1 path2 ...
diffstrings compares your primary locale with all your other locales to help you determine which new strings need to be translated. It outputs XML files which can be translated, and then merged back into your strings files.
The path arguments supplied to this script should be the path containing your source files. The file system will be searched to find the .lproj directories containing the Localizable.strings files that the script will read and write.
"""
import os.path, codecs, optparse, re, datetime
from xml.sax.saxutils import escape
from xml.dom import minidom
###################################################################################################
global xmlFormat
xmlFormat = {}
reLprojFileName = re.compile(r'(.+?)\.lproj')
reStringsFileName = re.compile(r'(.+?)\.strings')
reComment = re.compile(r'/\*(.*?)\*/')
reString = re.compile(r'\s*"((\\.|.)+?)"\s*=\s*"(.+?)";')
reVariable = re.compile(r'%(@|d|f|lld|\d+\.?f|\d+\.\d+f|\d+d)')
reEnumeratedVariable = re.compile('(%?\d+\$)')
defaultComment = "No comment provided by engineer."
###################################################################################################
def generateProjects(projects):
for project in projects:
project.generateStrings()
def diffProjects(projects, sourceLocaleName, focusedLocaleName=None, verbose=False, dryRun=False):
compiledStrings = {}
sourceMap = {}
for project in projects:
project.compileStrings(compiledStrings, focusedLocaleName)
project.compileSourceMap(sourceMap)
if sourceLocaleName not in compiledStrings:
print "WARNING: No '%s' locale found!" % sourceLocaleName
else:
sourceStrings = compiledStrings[sourceLocaleName]
if verbose:
print "\n", "=" * 80
print "* %s has %d total strings." \
% (sourceLocaleName, len(sourceStrings.strings))
print "=" * 80
for localeName, localizedStrings in compiledStrings.iteritems():
if localizedStrings != sourceStrings:
sourceStrings.diffReport(localizedStrings)
for localeName, localizedStrings in compiledStrings.iteritems():
if localizedStrings != sourceStrings:
translation = sourceStrings.diff(localizedStrings)
if not dryRun:
translation.save(".", sourceMap=sourceMap)
def mergeProjects(projects, sourceLocaleName, focusedLocaleName=None, verbose=False, dryRun=False):
translations = {}
for project in projects:
sourceStrings = project.condenseStringSourceFiles()
sourceStrings.save()
for localeName, localizedStrings in project.locales.iteritems():
if not focusedLocaleName or focusedLocaleName == localeName:
if localizedStrings.name in translations:
translation = translations[localizedStrings.name]
else:
translation = Translation(localizedStrings.name)
translation.open(".")
translations[localizedStrings.name] = translation
if translation.strings:
if verbose:
localizedStrings.mergeReport(sourceStrings, translation)
localizedStrings.mergeTranslation(sourceStrings, translation)
if not dryRun:
localizedStrings.save()
else:
if verbose:
print "no translation.strings for %s, sad" % localeName
###################################################################################################
class XcodeProject(object):
def __init__(self, path, sourceLocaleName):
self.path = path
self.sourceLocaleName = sourceLocaleName
self.sourceLocalePath = self.__findStringsFile(sourceLocaleName, self.path)
self.stringSourceFiles = list(self.__iterateSourceStrings())
self.locales = {}
for localeName, localizedStrings in self.__iterateLocalizableStrings():
self.locales[localeName] = localizedStrings
def condenseStringSourceFiles(self):
""" Copies all strings from all sources files into a single file."""
sourceStrings = LocalizableStrings(self.sourceLocaleName)
sourceStrings.path = self.__findSourceStringsPath()
if not sourceStrings.path:
sourceStrings.path = os.path.join(self.sourceLocalePath, "Localizable.strings")
for sourceFile in self.stringSourceFiles:
sourceStrings.update(sourceFile)
return sourceStrings
def compileStrings(self, compiledStrings, focusedLocaleName=None):
""" Copies all strings in a dictionary for each locale."""
if not self.sourceLocaleName in compiledStrings:
compiledStrings[self.sourceLocaleName] = LocalizableStrings(self.sourceLocaleName)
compiledStringsFile = compiledStrings[self.sourceLocaleName]
for sourceStrings in self.stringSourceFiles:
compiledStringsFile.update(sourceStrings)
if focusedLocaleName:
locales = {focusedLocaleName: self.locales[focusedLocaleName]}
else:
locales = self.locales
for localeName, sourceStrings in locales.iteritems():
if not localeName in compiledStrings:
compiledStringsFile = LocalizableStrings(localeName)
compiledStrings[localeName] = compiledStringsFile
else:
compiledStringsFile = compiledStrings[localeName]
compiledStringsFile.update(sourceStrings)
def compileSourceMap(self, sourceMap):
for sourceStrings in self.stringSourceFiles:
for source in sourceStrings.strings:
if not source in sourceMap:
sourceMap[source] = []
name,ext = os.path.splitext(os.path.basename(sourceStrings.path))
sourceMap[source].append(name)
def generateStrings(self):
buildPath = None
cwd = os.getcwd()
os.chdir(self.path)
extras = ""
if os.path.isdir(os.path.join(self.path, "Three20.xcodeproj")):
extras = " -s TTLocalizedString"
for fileName in os.listdir(self.path):
name,ext = os.path.splitext(fileName)
if ext == ".m":
if not buildPath:
buildPath = self.__makeBuildPath()
if not os.path.isdir(buildPath):
os.makedirs(buildPath)
command = "genstrings %s -o %s%s" % (fileName, buildPath, extras)
print " %s" % command
os.system(command)
resultPath = os.path.join(buildPath, "Localizable.strings")
if os.path.isfile(resultPath):
renamedPath = os.path.join(buildPath, "%s.strings" % name)
os.rename(resultPath, renamedPath)
os.chdir(cwd)
def __findStringsFile(self, localeName, searchPath):
dirName = "%s.lproj" % localeName
localeDirPath = os.path.join(searchPath, dirName)
if os.path.isdir(localeDirPath):
return localeDirPath
for name in os.listdir(searchPath):
path = os.path.join(searchPath, name)
if os.path.isdir(path):
localeDirPath = self.__findStringsFile(localeName, path)
if localeDirPath:
return localeDirPath
return None
def __iterateSourceStrings(self):
buildPath = self.__makeBuildPath()
if not os.path.exists(buildPath):
for path in self.__findSourceStrings():
yield path
else:
for fileName in os.listdir(buildPath):
name,ext = os.path.splitext(fileName)
if ext == ".strings":
strings = LocalizableStrings(self.sourceLocaleName)
filePath = os.path.join(buildPath, fileName)
strings.open(filePath)
yield strings
def __findSourceStringsPath(self):
for name in os.listdir(self.sourceLocalePath):
m = reStringsFileName.match(name)
if m:
return os.path.join(self.sourceLocalePath, name)
def __findSourceStrings(self):
for name in os.listdir(self.sourceLocalePath):
m = reStringsFileName.match(name)
if m:
strings = LocalizableStrings(self.sourceLocaleName)
filePath = os.path.join(self.sourceLocalePath, name)
strings.open(filePath)
yield strings
def __iterateLocalizableStrings(self):
dirPath = os.path.dirname(self.sourceLocalePath)
for dirName in os.listdir(dirPath):
m = reLprojFileName.match(dirName)
if m:
localeName = m.groups()[0]
if localeName != self.sourceLocaleName:
strings = LocalizableStrings(localeName)
localeDirPath = os.path.join(dirPath, dirName)
for name in os.listdir(localeDirPath):
m = reStringsFileName.match(name)
if m:
filePath = os.path.abspath(os.path.join(localeDirPath, name))
strings.open(filePath)
break
yield localeName, strings
def __makeBuildPath(self):
return os.path.join(self.path, "build", "i18n")
###################################################################################################
class LocalizableStrings(object):
def __init__(self, name):
self.name = name
self.path = None
self.strings = {}
self.comments = {}
def open(self, path):
if os.path.isfile(path):
self.path = path
self.__parse()
def save(self, path=None, suffix=""):
text = self.generate()
if text:
if path:
filePath = self.__makePath(path, suffix)
else:
filePath = self.path
print "***** Saving %s" % filePath
f = codecs.open(filePath, 'w', 'utf-16')
f.write(text)
f.close()
def generate(self):
lines = []
# This may not sort non-English strings sensibly, but the order itself
# doesn't matter - this is just so that the strings come out in some
# consistent order every time. (Less efficient, but oh well.)
for source in sorted(self.strings.keys()):
if source in self.comments:
comment = self.comments[source]
lines.append("/* %s */" % comment)
lines.append('"%s" = "%s";\n' % (source, self.strings[source]))
return "\n".join(lines)
def mergeTranslation(self, sourceStrings, translation):
for source in sourceStrings.strings:
sourceEnum = enumerateStringVariables(source)
if sourceEnum in translation.strings:
targetEnum = translation.strings[sourceEnum]
target = denumerateStringVariables(targetEnum)
self.strings[source] = target
def update(self, other):
self.strings.update(other.strings)
self.comments.update(other.comments)
def diff(self, localizedStrings):
translation = Translation(localizedStrings.name)
for source, target in self.strings.iteritems():
sourceEnum = enumerateStringVariables(source)
if source in localizedStrings.strings:
target = localizedStrings.strings[source]
translation.translated[sourceEnum] = True
targetEnum = enumerateStringVariables(target)
translation.strings[sourceEnum] = targetEnum
if source in self.comments:
translation.comments[sourceEnum] = self.comments[source]
return translation
def diffReport(self, localizedStrings):
name = localizedStrings.name
newStrings = list(self.__compare(localizedStrings))
obsoleteStrings = list(localizedStrings.__compare(self))
troubleStrings = list(self.__compareSizes(localizedStrings))
print "\n", "=" * 80
if not len(newStrings):
if len(obsoleteStrings):
print "%s is fully translated, but has %s obsolete strings" \
% (name, len(obsoleteStrings))
else:
print "%s is fully translated" % name
else:
existingCount = len(self.strings) - len(newStrings)
if len(obsoleteStrings):
print "%s has %s new strings, %s translated, and %s obsolete."\
% (name, len(newStrings), existingCount, len(obsoleteStrings))
else:
print "%s has %s new strings, with %s already translated."\
% (name, len(newStrings), existingCount)
print "=" * 80
if len(newStrings):
print "\n---- %s NEW STRINGS ---\n" % name
print "\n".join(newStrings)
if len(obsoleteStrings):
print "\n---- %s OBSOLETE STRINGS ---\n" % name
print "\n".join(obsoleteStrings)
if len(troubleStrings):
print "\n---- %s TROUBLE STRINGS ---\n" % name
for source, diff in sorted(troubleStrings, lambda a,b: cmp(b[1], a[1])):
print "%3d. %s " % (diff, codecs.encode(source, 'utf-8'))
print " %s " % codecs.encode(localizedStrings.strings[source], 'utf-8')
print "\n"
def mergeReport(self, sourceStrings, translation):
name = self.name
updatedStrings = []
ignoredStrings = []
for source in sourceStrings.strings:
sourceEnum = enumerateStringVariables(source)
if sourceEnum in translation.strings:
targetEnum = translation.strings[sourceEnum]
target = denumerateStringVariables(targetEnum)
if source not in self.strings or target != self.strings[source]:
updatedStrings.append(source)
else:
ignoredStrings.append(source)
print "\n", "=" * 80
print self.path
print "%d newly translated strings and %d untranslated strings" \
% (len(updatedStrings), len(ignoredStrings))
print "=" * 80
if len(updatedStrings):
print "\n---- %s NEWLY TRANSLATED STRINGS ---\n" % name
print "\n".join(updatedStrings)
if len(ignoredStrings):
print "\n---- %s UNTRANSLATED STRINGS ---\n" % name
print "\n".join(ignoredStrings)
def __makePath(self, path=".", suffix=""):
fileName = "Localizable%s.strings" % (suffix)
return os.path.abspath(os.path.join(path, fileName))
def __parse(self):
lastIdentical = False
lastComment = None
for line in openWithProperEncoding(self.path):
m = reString.search(line)
if m:
source = m.groups()[0]
target = m.groups()[2]
self.strings[source] = target
if lastComment:
self.comments[source] = lastComment
lastComment = None
if lastIdentical:
lastIdentical = False
else:
m = reComment.search(line)
if m:
comment = m.groups()[0].strip()
if comment != defaultComment:
lastComment = comment
def __compare(self, other, compareStrings=False):
for source, target in self.strings.iteritems():
if source in other.strings:
target = other.strings[source]
if compareStrings and target == source:
yield source
else:
yield source
def __compareSizes(self, other):
for source, target in self.strings.iteritems():
if source in other.strings:
target = other.strings[source]
ratio = float(len(target)) / len(source)
diff = len(target) - len(source)
if ratio > 1.3 and diff > 5:
yield (source, diff)
###################################################################################################
class Translation(object):
def __init__(self, name):
self.name = name
self.path = None
self.strings = {}
self.translated = {}
self.comments = {}
def open(self, path=".", suffix=""):
filePath = self.__makePath(path, suffix)
if os.path.isfile(filePath):
self.__parse(filePath)
def save(self, path=None, suffix="", sourceMap=None):
text = self.generate(sourceMap)
if text:
if path:
filePath = self.__makePath(path, suffix)
else:
filePath = self.path
print "***** Saving %s" % filePath
#print codecs.encode(text, 'utf-8')
f = codecs.open(filePath, 'w', 'utf-16')
f.write(text)
f.close()
def generate(self, sourceMap=None):
lines = []
global xmlFormat
prefix = xmlFormat['prefix']
lines.append('<?xml version="1.0" encoding="utf-16"?>')
lines.append('<%sexternal>' % prefix)
lines.append(' <meta>')
if xmlFormat['appName']:
lines.append(' <appName>%s</appName>' % xmlFormat['appName'])
lines.append(' <date>%s</date>' % datetime.datetime.now().strftime('%Y%m%d'))
lines.append(' <locale>%s</locale>' % self.name)
lines.append(' </meta>')
for sourceFileName, sourceFileStrings in self.__invertSourceMap(sourceMap):
lines.append(" <!-- %s -->" % sourceFileName)
for source in sourceFileStrings:
target = self.strings[source]
lines.append(" <entry>")
lines.append(" <%ssource>%s</%ssource>" % (prefix, escape(source), prefix))
if source in self.translated:
lines.append(" <%sxtarget>%s</%sxtarget>" % (prefix, escape(target), prefix))
else:
lines.append(" <%starget>%s</%starget>" % (prefix, escape(target), prefix))
if source in self.comments:
lines.append(" <%sdescription>%s</%sdescription>"
% (prefix, escape(self.comments[source]), prefix))
lines.append(" </entry>")
lines.append('</%sexternal>' % prefix)
return "\n".join(lines)
def __makePath(self, path=".", suffix=""):
fileName = "%s%s.xml" % (self.name, suffix)
return os.path.abspath(os.path.join(path, fileName))
def __parse(self, filePath):
self.path = filePath
global xmlFormat
prefix = xmlFormat['prefix']
document = minidom.parse(filePath)
for entry in document.documentElement.childNodes:
if entry.nodeType == 1:
source = None
target = None
translated = False
sources = entry.getElementsByTagName("%ssource" % prefix)
if len(sources):
source = sources[0]
source = source.childNodes[0].data
targets = entry.getElementsByTagName("%sxtarget" % prefix)
if not len(targets):
targets = entry.getElementsByTagName("%starget" % prefix)
translated = True
if len(targets):
target = targets[0]
target = target.childNodes[0].data
if source and target:
self.strings[source] = target
if translated:
self.translated[source] = True
def __invertSourceMap(self, sourceMap):
sourceFileMap = {}
for sourceEnum in self.strings:
source = denumerateStringVariables(sourceEnum)
if source in sourceMap:
sourcePaths = sourceMap[source]
for sourcePath in sourcePaths:
if sourcePath not in sourceFileMap:
sourceFileMap[sourcePath] = []
sourceFileMap[sourcePath].append(sourceEnum)
break
for sourceName, sourceFileStrings in sourceFileMap.iteritems():
sourceFileStrings.sort()
keys = sourceFileMap.keys()
keys.sort()
for key in keys:
yield key, sourceFileMap[key]
###################################################################################################
## Helpers
def openProjects(projectDirPaths, sourceLocaleName):
for projectDirPath in projectDirPaths:
yield XcodeProject(projectDirPath, sourceLocaleName)
def openWithProperEncoding(path):
if not os.path.isfile(path):
return []
try:
f = codecs.open(path, 'r', 'utf-16')
lines = f.read().splitlines()
f.close()
except UnicodeError,exc:
f = codecs.open(path, 'r', 'utf-8')
lines = f.read().splitlines()
f.close()
return lines
def enumerateStringVariables(s):
i = 1
for var in reVariable.findall(s):
s = s.replace("%%%s" % var, "%%%d$%s" % (i, var), 1)
i += 1
return s
def denumerateStringVariables(s):
for var in reEnumeratedVariable.findall(s):
s = s.replace(var, "%")
return s
###################################################################################################
## Main
def parseOptions():
parser = optparse.OptionParser(usage)
parser.set_defaults(locale="en", focus=None, build=False, merge=False, diff=False,
verbose=False, dryrun=False, appName="", prefix="")
parser.add_option("-l", "--locale", dest="locale", type="str",
help = "The name of your source locale. The default is 'en'.")
parser.add_option("-f", "--focus", dest="focus", type="str",
help = "The name of the locale to operate on, excluding all others.")
parser.add_option("-v", "--verbose", dest="verbose", action="store_true",
help = "Verbose reporting of activity.")
parser.add_option("-r", "--dryrun", dest="dryrun", action="store_true",
help = "Print the output of files instead of saving them.")
parser.add_option("-b", "--build", dest="build", action="store_true",
help = "Runs genstrings on each source file in each project.")
parser.add_option("-d", "--diff", dest="diff", action="store_true",
help="Generates a diff of each locale against the source locale. Each locale's diff will be stored in a file in the working directory named <locale>.xml.")
parser.add_option("-m", "--merge", dest="merge", action="store_true",
help="Merges strings from the <locale>.xml file in the working directory back into the Localized.strings files in each locale.")
parser.add_option("-p", "--prefix", dest="prefix", type="str",
help="The prefix to use on the xml tags.")
parser.add_option("-a", "--appname", dest="appName", type="str",
help="The name of the application to include in the xml metadata.")
options, arguments = parser.parse_args()
paths = ["."] if not len(arguments) else arguments
if not options.merge:
options.diff = True
return options, paths
def main():
options, projectPaths = parseOptions()
projectPaths = [os.path.abspath(os.path.expanduser(path)) for path in projectPaths]
global xmlFormat
xmlFormat['prefix'] = options.prefix
xmlFormat['appName'] = options.appName
projects = list(openProjects(projectPaths, options.locale))
if options.build:
print "******* Generating strings *******"
generateProjects(projects)
print ""
if options.merge:
print "******* Merging *******"
mergeProjects(projects, options.locale, options.focus, options.verbose, options.dryrun)
print ""
if options.diff:
print "******* Diffing *******"
diffProjects(projects, options.locale, options.focus, options.verbose, options.dryrun)
print ""
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/env python
usage = """usage: %prog [options] path1 path2 ...
diffstrings compares your primary locale with all your other locales to help you determine which new strings need to be translated. It outputs XML files which can be translated, and then merged back into your strings files.
The path arguments supplied to this script should be the path containing your source files. The file system will be searched to find the .lproj directories containing the Localizable.strings files that the script will read and write.
"""
import os.path, codecs, optparse, re, datetime
from xml.sax.saxutils import escape
from xml.dom import minidom
###################################################################################################
global xmlFormat
xmlFormat = {}
reLprojFileName = re.compile(r'(.+?)\.lproj')
reStringsFileName = re.compile(r'(.+?)\.strings')
reComment = re.compile(r'/\*(.*?)\*/')
reString = re.compile(r'\s*"((\\.|.)+?)"\s*=\s*"(.+?)";')
reVariable = re.compile(r'%(@|d|f|lld|\d+\.?f|\d+\.\d+f|\d+d)')
reEnumeratedVariable = re.compile('(%?\d+\$)')
defaultComment = "No comment provided by engineer."
###################################################################################################
def generateProjects(projects):
for project in projects:
project.generateStrings()
def diffProjects(projects, sourceLocaleName, focusedLocaleName=None, verbose=False, dryRun=False):
compiledStrings = {}
sourceMap = {}
for project in projects:
project.compileStrings(compiledStrings, focusedLocaleName)
project.compileSourceMap(sourceMap)
if sourceLocaleName not in compiledStrings:
print "WARNING: No '%s' locale found!" % sourceLocaleName
else:
sourceStrings = compiledStrings[sourceLocaleName]
if verbose:
print "\n", "=" * 80
print "* %s has %d total strings." \
% (sourceLocaleName, len(sourceStrings.strings))
print "=" * 80
for localeName, localizedStrings in compiledStrings.iteritems():
if localizedStrings != sourceStrings:
sourceStrings.diffReport(localizedStrings)
for localeName, localizedStrings in compiledStrings.iteritems():
if localizedStrings != sourceStrings:
translation = sourceStrings.diff(localizedStrings)
if not dryRun:
translation.save(".", sourceMap=sourceMap)
def mergeProjects(projects, sourceLocaleName, focusedLocaleName=None, verbose=False, dryRun=False):
translations = {}
for project in projects:
sourceStrings = project.condenseStringSourceFiles()
sourceStrings.save()
for localeName, localizedStrings in project.locales.iteritems():
if not focusedLocaleName or focusedLocaleName == localeName:
if localizedStrings.name in translations:
translation = translations[localizedStrings.name]
else:
translation = Translation(localizedStrings.name)
translation.open(".")
translations[localizedStrings.name] = translation
if translation.strings:
if verbose:
localizedStrings.mergeReport(sourceStrings, translation)
localizedStrings.mergeTranslation(sourceStrings, translation)
if not dryRun:
localizedStrings.save()
else:
if verbose:
print "no translation.strings for %s, sad" % localeName
###################################################################################################
class XcodeProject(object):
def __init__(self, path, sourceLocaleName):
self.path = path
self.sourceLocaleName = sourceLocaleName
self.sourceLocalePath = self.__findStringsFile(sourceLocaleName, self.path)
self.stringSourceFiles = list(self.__iterateSourceStrings())
self.locales = {}
for localeName, localizedStrings in self.__iterateLocalizableStrings():
self.locales[localeName] = localizedStrings
def condenseStringSourceFiles(self):
""" Copies all strings from all sources files into a single file."""
sourceStrings = LocalizableStrings(self.sourceLocaleName)
sourceStrings.path = self.__findSourceStringsPath()
if not sourceStrings.path:
sourceStrings.path = os.path.join(self.sourceLocalePath, "Localizable.strings")
for sourceFile in self.stringSourceFiles:
sourceStrings.update(sourceFile)
return sourceStrings
def compileStrings(self, compiledStrings, focusedLocaleName=None):
""" Copies all strings in a dictionary for each locale."""
if not self.sourceLocaleName in compiledStrings:
compiledStrings[self.sourceLocaleName] = LocalizableStrings(self.sourceLocaleName)
compiledStringsFile = compiledStrings[self.sourceLocaleName]
for sourceStrings in self.stringSourceFiles:
compiledStringsFile.update(sourceStrings)
if focusedLocaleName:
locales = {focusedLocaleName: self.locales[focusedLocaleName]}
else:
locales = self.locales
for localeName, sourceStrings in locales.iteritems():
if not localeName in compiledStrings:
compiledStringsFile = LocalizableStrings(localeName)
compiledStrings[localeName] = compiledStringsFile
else:
compiledStringsFile = compiledStrings[localeName]
compiledStringsFile.update(sourceStrings)
def compileSourceMap(self, sourceMap):
for sourceStrings in self.stringSourceFiles:
for source in sourceStrings.strings:
if not source in sourceMap:
sourceMap[source] = []
name,ext = os.path.splitext(os.path.basename(sourceStrings.path))
sourceMap[source].append(name)
def generateStrings(self):
buildPath = None
cwd = os.getcwd()
os.chdir(self.path)
extras = ""
if os.path.isdir(os.path.join(self.path, "Three20.xcodeproj")):
extras = " -s TTLocalizedString"
for fileName in os.listdir(self.path):
name,ext = os.path.splitext(fileName)
if ext == ".m":
if not buildPath:
buildPath = self.__makeBuildPath()
if not os.path.isdir(buildPath):
os.makedirs(buildPath)
command = "genstrings %s -o %s%s" % (fileName, buildPath, extras)
print " %s" % command
os.system(command)
resultPath = os.path.join(buildPath, "Localizable.strings")
if os.path.isfile(resultPath):
renamedPath = os.path.join(buildPath, "%s.strings" % name)
os.rename(resultPath, renamedPath)
os.chdir(cwd)
def __findStringsFile(self, localeName, searchPath):
dirName = "%s.lproj" % localeName
localeDirPath = os.path.join(searchPath, dirName)
if os.path.isdir(localeDirPath):
return localeDirPath
for name in os.listdir(searchPath):
path = os.path.join(searchPath, name)
if os.path.isdir(path):
localeDirPath = self.__findStringsFile(localeName, path)
if localeDirPath:
return localeDirPath
return None
def __iterateSourceStrings(self):
buildPath = self.__makeBuildPath()
if not os.path.exists(buildPath):
for path in self.__findSourceStrings():
yield path
else:
for fileName in os.listdir(buildPath):
name,ext = os.path.splitext(fileName)
if ext == ".strings":
strings = LocalizableStrings(self.sourceLocaleName)
filePath = os.path.join(buildPath, fileName)
strings.open(filePath)
yield strings
def __findSourceStringsPath(self):
for name in os.listdir(self.sourceLocalePath):
m = reStringsFileName.match(name)
if m:
return os.path.join(self.sourceLocalePath, name)
def __findSourceStrings(self):
for name in os.listdir(self.sourceLocalePath):
m = reStringsFileName.match(name)
if m:
strings = LocalizableStrings(self.sourceLocaleName)
filePath = os.path.join(self.sourceLocalePath, name)
strings.open(filePath)
yield strings
def __iterateLocalizableStrings(self):
dirPath = os.path.dirname(self.sourceLocalePath)
for dirName in os.listdir(dirPath):
m = reLprojFileName.match(dirName)
if m:
localeName = m.groups()[0]
if localeName != self.sourceLocaleName:
strings = LocalizableStrings(localeName)
localeDirPath = os.path.join(dirPath, dirName)
for name in os.listdir(localeDirPath):
m = reStringsFileName.match(name)
if m:
filePath = os.path.abspath(os.path.join(localeDirPath, name))
strings.open(filePath)
break
yield localeName, strings
def __makeBuildPath(self):
return os.path.join(self.path, "build", "i18n")
###################################################################################################
class LocalizableStrings(object):
def __init__(self, name):
self.name = name
self.path = None
self.strings = {}
self.comments = {}
def open(self, path):
if os.path.isfile(path):
self.path = path
self.__parse()
def save(self, path=None, suffix=""):
text = self.generate()
if text:
if path:
filePath = self.__makePath(path, suffix)
else:
filePath = self.path
print "***** Saving %s" % filePath
f = codecs.open(filePath, 'w', 'utf-16')
f.write(text)
f.close()
def generate(self):
lines = []
# This may not sort non-English strings sensibly, but the order itself
# doesn't matter - this is just so that the strings come out in some
# consistent order every time. (Less efficient, but oh well.)
for source in sorted(self.strings.keys()):
if source in self.comments:
comment = self.comments[source]
lines.append("/* %s */" % comment)
lines.append('"%s" = "%s";\n' % (source, self.strings[source]))
return "\n".join(lines)
def mergeTranslation(self, sourceStrings, translation):
for source in sourceStrings.strings:
sourceEnum = enumerateStringVariables(source)
if sourceEnum in translation.strings:
targetEnum = translation.strings[sourceEnum]
target = denumerateStringVariables(targetEnum)
self.strings[source] = target
def update(self, other):
self.strings.update(other.strings)
self.comments.update(other.comments)
def diff(self, localizedStrings):
translation = Translation(localizedStrings.name)
for source, target in self.strings.iteritems():
sourceEnum = enumerateStringVariables(source)
if source in localizedStrings.strings:
target = localizedStrings.strings[source]
translation.translated[sourceEnum] = True
targetEnum = enumerateStringVariables(target)
translation.strings[sourceEnum] = targetEnum
if source in self.comments:
translation.comments[sourceEnum] = self.comments[source]
return translation
def diffReport(self, localizedStrings):
name = localizedStrings.name
newStrings = list(self.__compare(localizedStrings))
obsoleteStrings = list(localizedStrings.__compare(self))
troubleStrings = list(self.__compareSizes(localizedStrings))
print "\n", "=" * 80
if not len(newStrings):
if len(obsoleteStrings):
print "%s is fully translated, but has %s obsolete strings" \
% (name, len(obsoleteStrings))
else:
print "%s is fully translated" % name
else:
existingCount = len(self.strings) - len(newStrings)
if len(obsoleteStrings):
print "%s has %s new strings, %s translated, and %s obsolete."\
% (name, len(newStrings), existingCount, len(obsoleteStrings))
else:
print "%s has %s new strings, with %s already translated."\
% (name, len(newStrings), existingCount)
print "=" * 80
if len(newStrings):
print "\n---- %s NEW STRINGS ---\n" % name
print "\n".join(newStrings)
if len(obsoleteStrings):
print "\n---- %s OBSOLETE STRINGS ---\n" % name
print "\n".join(obsoleteStrings)
if len(troubleStrings):
print "\n---- %s TROUBLE STRINGS ---\n" % name
for source, diff in sorted(troubleStrings, lambda a,b: cmp(b[1], a[1])):
print "%3d. %s " % (diff, codecs.encode(source, 'utf-8'))
print " %s " % codecs.encode(localizedStrings.strings[source], 'utf-8')
print "\n"
def mergeReport(self, sourceStrings, translation):
name = self.name
updatedStrings = []
ignoredStrings = []
for source in sourceStrings.strings:
sourceEnum = enumerateStringVariables(source)
if sourceEnum in translation.strings:
targetEnum = translation.strings[sourceEnum]
target = denumerateStringVariables(targetEnum)
if source not in self.strings or target != self.strings[source]:
updatedStrings.append(source)
else:
ignoredStrings.append(source)
print "\n", "=" * 80
print self.path
print "%d newly translated strings and %d untranslated strings" \
% (len(updatedStrings), len(ignoredStrings))
print "=" * 80
if len(updatedStrings):
print "\n---- %s NEWLY TRANSLATED STRINGS ---\n" % name
print "\n".join(updatedStrings)
if len(ignoredStrings):
print "\n---- %s UNTRANSLATED STRINGS ---\n" % name
print "\n".join(ignoredStrings)
def __makePath(self, path=".", suffix=""):
fileName = "Localizable%s.strings" % (suffix)
return os.path.abspath(os.path.join(path, fileName))
def __parse(self):
lastIdentical = False
lastComment = None
for line in openWithProperEncoding(self.path):
m = reString.search(line)
if m:
source = m.groups()[0]
target = m.groups()[2]
self.strings[source] = target
if lastComment:
self.comments[source] = lastComment
lastComment = None
if lastIdentical:
lastIdentical = False
else:
m = reComment.search(line)
if m:
comment = m.groups()[0].strip()
if comment != defaultComment:
lastComment = comment
def __compare(self, other, compareStrings=False):
for source, target in self.strings.iteritems():
if source in other.strings:
target = other.strings[source]
if compareStrings and target == source:
yield source
else:
yield source
def __compareSizes(self, other):
for source, target in self.strings.iteritems():
if source in other.strings:
target = other.strings[source]
ratio = float(len(target)) / len(source)
diff = len(target) - len(source)
if ratio > 1.3 and diff > 5:
yield (source, diff)
###################################################################################################
class Translation(object):
def __init__(self, name):
self.name = name
self.path = None
self.strings = {}
self.translated = {}
self.comments = {}
def open(self, path=".", suffix=""):
filePath = self.__makePath(path, suffix)
if os.path.isfile(filePath):
self.__parse(filePath)
def save(self, path=None, suffix="", sourceMap=None):
text = self.generate(sourceMap)
if text:
if path:
filePath = self.__makePath(path, suffix)
else:
filePath = self.path
print "***** Saving %s" % filePath
#print codecs.encode(text, 'utf-8')
f = codecs.open(filePath, 'w', 'utf-16')
f.write(text)
f.close()
def generate(self, sourceMap=None):
lines = []
global xmlFormat
prefix = xmlFormat['prefix']
lines.append('<?xml version="1.0" encoding="utf-16"?>')
lines.append('<%sexternal>' % prefix)
lines.append(' <meta>')
if xmlFormat['appName']:
lines.append(' <appName>%s</appName>' % xmlFormat['appName'])
lines.append(' <date>%s</date>' % datetime.datetime.now().strftime('%Y%m%d'))
lines.append(' <locale>%s</locale>' % self.name)
lines.append(' </meta>')
for sourceFileName, sourceFileStrings in self.__invertSourceMap(sourceMap):
lines.append(" <!-- %s -->" % sourceFileName)
for source in sourceFileStrings:
target = self.strings[source]
lines.append(" <entry>")
lines.append(" <%ssource>%s</%ssource>" % (prefix, escape(source), prefix))
if source in self.translated:
lines.append(" <%sxtarget>%s</%sxtarget>" % (prefix, escape(target), prefix))
else:
lines.append(" <%starget>%s</%starget>" % (prefix, escape(target), prefix))
if source in self.comments:
lines.append(" <%sdescription>%s</%sdescription>"
% (prefix, escape(self.comments[source]), prefix))
lines.append(" </entry>")
lines.append('</%sexternal>' % prefix)
return "\n".join(lines)
def __makePath(self, path=".", suffix=""):
fileName = "%s%s.xml" % (self.name, suffix)
return os.path.abspath(os.path.join(path, fileName))
def __parse(self, filePath):
self.path = filePath
global xmlFormat
prefix = xmlFormat['prefix']
document = minidom.parse(filePath)
for entry in document.documentElement.childNodes:
if entry.nodeType == 1:
source = None
target = None
translated = False
sources = entry.getElementsByTagName("%ssource" % prefix)
if len(sources):
source = sources[0]
source = source.childNodes[0].data
targets = entry.getElementsByTagName("%sxtarget" % prefix)
if not len(targets):
targets = entry.getElementsByTagName("%starget" % prefix)
translated = True
if len(targets):
target = targets[0]
target = target.childNodes[0].data
if source and target:
self.strings[source] = target
if translated:
self.translated[source] = True
def __invertSourceMap(self, sourceMap):
sourceFileMap = {}
for sourceEnum in self.strings:
source = denumerateStringVariables(sourceEnum)
if source in sourceMap:
sourcePaths = sourceMap[source]
for sourcePath in sourcePaths:
if sourcePath not in sourceFileMap:
sourceFileMap[sourcePath] = []
sourceFileMap[sourcePath].append(sourceEnum)
break
for sourceName, sourceFileStrings in sourceFileMap.iteritems():
sourceFileStrings.sort()
keys = sourceFileMap.keys()
keys.sort()
for key in keys:
yield key, sourceFileMap[key]
###################################################################################################
## Helpers
def openProjects(projectDirPaths, sourceLocaleName):
for projectDirPath in projectDirPaths:
yield XcodeProject(projectDirPath, sourceLocaleName)
def openWithProperEncoding(path):
if not os.path.isfile(path):
return []
try:
f = codecs.open(path, 'r', 'utf-16')
lines = f.read().splitlines()
f.close()
except UnicodeError,exc:
f = codecs.open(path, 'r', 'utf-8')
lines = f.read().splitlines()
f.close()
return lines
def enumerateStringVariables(s):
i = 1
for var in reVariable.findall(s):
s = s.replace("%%%s" % var, "%%%d$%s" % (i, var), 1)
i += 1
return s
def denumerateStringVariables(s):
for var in reEnumeratedVariable.findall(s):
s = s.replace(var, "%")
return s
###################################################################################################
## Main
def parseOptions():
parser = optparse.OptionParser(usage)
parser.set_defaults(locale="en", focus=None, build=False, merge=False, diff=False,
verbose=False, dryrun=False, appName="", prefix="")
parser.add_option("-l", "--locale", dest="locale", type="str",
help = "The name of your source locale. The default is 'en'.")
parser.add_option("-f", "--focus", dest="focus", type="str",
help = "The name of the locale to operate on, excluding all others.")
parser.add_option("-v", "--verbose", dest="verbose", action="store_true",
help = "Verbose reporting of activity.")
parser.add_option("-r", "--dryrun", dest="dryrun", action="store_true",
help = "Print the output of files instead of saving them.")
parser.add_option("-b", "--build", dest="build", action="store_true",
help = "Runs genstrings on each source file in each project.")
parser.add_option("-d", "--diff", dest="diff", action="store_true",
help="Generates a diff of each locale against the source locale. Each locale's diff will be stored in a file in the working directory named <locale>.xml.")
parser.add_option("-m", "--merge", dest="merge", action="store_true",
help="Merges strings from the <locale>.xml file in the working directory back into the Localized.strings files in each locale.")
parser.add_option("-p", "--prefix", dest="prefix", type="str",
help="The prefix to use on the xml tags.")
parser.add_option("-a", "--appname", dest="appName", type="str",
help="The name of the application to include in the xml metadata.")
options, arguments = parser.parse_args()
paths = ["."] if not len(arguments) else arguments
if not options.merge:
options.diff = True
return options, paths
def main():
options, projectPaths = parseOptions()
projectPaths = [os.path.abspath(os.path.expanduser(path)) for path in projectPaths]
global xmlFormat
xmlFormat['prefix'] = options.prefix
xmlFormat['appName'] = options.appName
projects = list(openProjects(projectPaths, options.locale))
if options.build:
print "******* Generating strings *******"
generateProjects(projects)
print ""
if options.merge:
print "******* Merging *******"
mergeProjects(projects, options.locale, options.focus, options.verbose, options.dryrun)
print ""
if options.diff:
print "******* Diffing *******"
diffProjects(projects, options.locale, options.focus, options.verbose, options.dryrun)
print ""
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
lint
Validate style guidelines for a given source file.
When run from Xcode, the linter will automatically lint all of the built source files
and headers.
Version 1.0
History:
1.0 - February 27, 2011: Includes a set of simple linters and a delinter for most lints.
Created by Jeff Verkoeyen on 2011-02-27.
Copyright 2009-2011 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import ConfigParser
import logging
import os
import Paths
import pickle
import re
import string
import sys
from optparse import OptionParser
from Pbxproj import Pbxproj
from Pbxproj import relpath
gcopyrightyears = '2009-2011'
gdivider = '///////////////////////////////////////////////////////////////////////////////////////////////////'
maxlinelength = 100
# Program entry. The meat of this script happens in the lint() method below.
def main():
usage = '''%prog filename
The Three20 Linter.
Verify Three20 style guidelines for source code.'''
parser = OptionParser(usage = usage)
parser.add_option("-d", "--delint", dest="delint",
help="Delint the source",
action="store_true")
parser.add_option("-l", "--maxline", dest="maxlinelength",
help="Maximum permissible length of a line",type="int",
action="store")
(options, args) = parser.parse_args()
global maxlinelength
if options.maxlinelength:
maxlinelength = options.maxlinelength
if len(args) == 0:
parser.print_help()
enabled_for_projects = True
# Allow third-party developers to disable the linter entirely. See config.template for
# more information in the "lint" section.
configpath = os.path.join(os.path.dirname(Paths.src_dir), 'config')
if os.path.exists(configpath):
config = ConfigParser.ConfigParser()
config.read(configpath)
enabled_for_projects = config.getboolean('lint', 'enabled_for_projects')
# If we're running the linter from Xcode, let's just process the project.
if 'PROJECT_FILE_PATH' in os.environ:
if enabled_for_projects:
lint_project(os.environ['PROJECT_FILE_PATH'], options)
else:
for filename in args:
lint(filename, options)
# This filter makes it possible to set the line number on logging.error calls.
class FilenameFilter(logging.Filter):
def __init__(self):
self.lineno = -1
def filter(self, record):
record.linenumber = self.lineno
return True
def lint_project(project_path, options):
project = Pbxproj.get_pbxproj_by_name(project_path)
tempdir = None
if os.environ['TEMP_FILES_DIR']:
tempdir = os.environ['TEMP_FILES_DIR']
# We avoid relinting the same file over and over again by maintaining a mapping of filenames
# to modified times on disk. We store this information in the project's build directory and
# load it each time we run the linter for this project.
# Because we store the mtimes on a per project basis, we shouldn't run into any performance
# issues with a lint.dat file that's becoming completely massive.
mtimes = {}
# Read the lint.dat file and unpickle it if we find it.
if tempdir:
lintdatpath = os.path.join(os.path.abspath(tempdir), 'lint.dat')
if os.path.exists(lintdatpath):
lintdatfile = open(lintdatpath, 'rb')
mtimes = pickle.load(lintdatfile)
# The linter script may have changed since we last ran this project, so we might have to
# force lint every file to update them because there may be new linters.
# Assume that the linter hasn't been run for this project.
forcelint = True
# Get this script's path
lintfilename = os.path.realpath(__file__)
# Check the mtime.
mtime = os.path.getmtime(lintfilename)
if lintfilename in mtimes:
if mtime <= mtimes[lintfilename]:
# The lint script hasn't changed since we last ran this, so we don't have to force
# lint.
forcelint = False
# Store the linter's mtime for future runs.
mtimes[lintfilename] = mtime
#
# Get all of the "built" filenames in this project.
# The "Compile sources" phase files
filenames = project.get_built_sources()
# The "Copy headers" phase files if they exist
if project.get_built_headers():
filenames = filenames + project.get_built_headers()
# Iterate through and lint each of the files that have been modified since we last ran
# the linter, unless we're forcelinting, in which case we lint everything.
for filename in filenames:
mtime = os.path.getmtime(filename)
# If the filename isn't in the lint data, we have no idea when it was last modified so
# we'll run the linter anyway.
if not forcelint and filename in mtimes:
# Is it older or unchanged?
if mtime <= mtimes[filename]:
# Yeah, let's skip it then.
continue
# The beef.
if lint(filename, options):
# Only update the last known modification time if there weren't any errors.
mtimes[filename] = mtime
else:
print "If you would like to disable the lint tool, please read the instructions in config.template in the root of the Three20 project"
if filename in mtimes:
del mtimes[filename]
# Write out the lint data once we're done with this project. Thanks, pickle!
if tempdir:
lintdatfile = open(lintdatpath, 'wb')
pickle.dump(mtimes, lintdatfile)
# Lint the given filename.
def lint(filename, options):
logger = logging.getLogger()
f = FilenameFilter()
logger.addFilter(f)
# Set up the warning logger format.
ch = logging.StreamHandler()
if 'PROJECT_FILE_PATH' in os.environ:
formatter = logging.Formatter(filename+":%(linenumber)s: warning: "+relpath(os.getcwd(), filename)+":%(linenumber)s: %(message)s")
else:
formatter = logging.Formatter(filename+":%(linenumber)s: %(message)s")
ch.setFormatter(formatter)
logger.addHandler(ch)
file = open(filename, 'r')
filedata = file.read()
did_lint_cleanly = True
# Everything is set up now, let's run through the linters!
if not lint_basics(filedata, filename, f, options.delint):
did_lint_cleanly = False
logger.removeFilter(f)
logger.removeHandler(ch)
return did_lint_cleanly
# Basic lint tests that only look at one line's information.
# If isdelinting is True, this method will try to fix as many lint issues as it can and then
# write the results out to disk.
def lint_basics(filedata, filename, linenofilter, isdelinting = False):
logger = logging.getLogger()
lines = string.split(filedata, "\n")
linenofilter.lineno = 1
prevline = None
did_lint_cleanly = True
nwarningsfixed = 0
nwarnings = 0
if isdelinting:
newfilelines = []
for line in lines:
# Check line lengths.
if len(line) > maxlinelength:
did_lint_cleanly = False
nwarnings = nwarnings + 1
# This is not something we can fix with the delinter.
if isdelinting:
logger.error('I don\'t know how to split this line up.')
else:
logger.error('Line length > %d'% maxlinelength)
# Check method dividers.
if not re.search(r'.h$', filename) and re.search(r'^[-+][ ]*\([\w\s*]+\)', line):
if prevline != gdivider and prevline != ' */':
did_lint_cleanly = False
nwarnings = nwarnings + 1
# This is not something we can fix with the delinter.
if isdelinting:
if re.match(r'/+', prevline):
newfilelines.pop()
newfilelines.append(gdivider)
nwarningsfixed = nwarningsfixed + 1
else:
logger.error('This method is missing a correct divider before it')
# Properties
if re.search(r'^@property', line):
if re.search(r'(NSString|NSArray|NSDictionary|NSSet)[ ]*\*', line) and not re.search(r'copy|readonly', line):
nwarnings = nwarnings + 1
if isdelinting:
line = re.sub(r'\bretain\b', r'copy', line)
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Objects that have mutable subclasses, such as NSString, should be copied, not retained')
if re.search(r'^@property\(', line):
nwarnings = nwarnings + 1
if isdelinting:
line = line.rstrip(' \t')
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Must be a space after the @property declarator')
# Trailing whitespace
if re.search('[ \t]+$', line):
nwarnings = nwarnings + 1
if isdelinting:
line = line.rstrip(' \t')
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Trailing whitespace')
# Spaces after logical constructs
if re.search('(if|while|for)\(', line, re.IGNORECASE):
nwarnings = nwarnings + 1
if isdelinting:
line = re.sub(r'(if|while|for)\(', r'\1 (', line)
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Missing space after logical construct')
# Boolean checks against non-boolean objects
# This test is really hard to do without knowing the type of the object.
#if re.search('[^!]!(?!TTIs|[a-z0-9_.]*\.is|is|_is|has|_has|\[|self\.is|[a-z0-9_]+\.)[a-z0-9_]+', line, re.IGNORECASE):
# did_lint_cleanly = False
# logger.error('Use if (nil == value) instead of boolean checks for pointers')
# Else statements must have one empty line before them
if re.search('}[ ]+else', line, re.IGNORECASE) and prevline != '' and not re.search(r'^[ ]*//', prevline):
nwarnings = nwarnings + 1
if isdelinting:
newfilelines.append('')
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('There must be one empty line before an else statement')
# Copyright statement for Facebook
match = re.match('\/\/ Copyright ([0-9]+-[0-9]+) Facebook', line, re.IGNORECASE)
if match:
(copyrightyears, ) = match.groups()
if copyrightyears != gcopyrightyears:
nwarnings = nwarnings + 1
if isdelinting:
line = re.sub(r'([0-9]+-[0-9]+)', gcopyrightyears, line)
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('The copyright statement on this file is outdated. Should be 2009-2011')
if isdelinting:
newfilelines.append(line)
prevline = line
linenofilter.lineno = linenofilter.lineno + 1
if isdelinting and nwarnings > 0:
newfiledata = '\n'.join(newfilelines)
file = open(filename, 'w')
file.write(newfiledata)
return did_lint_cleanly
if __name__ == "__main__":
sys.exit(main())
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
lint
Validate style guidelines for a given source file.
When run from Xcode, the linter will automatically lint all of the built source files
and headers.
Version 1.0
History:
1.0 - February 27, 2011: Includes a set of simple linters and a delinter for most lints.
Created by Jeff Verkoeyen on 2011-02-27.
Copyright 2009-2011 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import ConfigParser
import logging
import os
import Paths
import pickle
import re
import string
import sys
from optparse import OptionParser
from Pbxproj import Pbxproj
from Pbxproj import relpath
gcopyrightyears = '2009-2011'
gdivider = '///////////////////////////////////////////////////////////////////////////////////////////////////'
maxlinelength = 100
# Program entry. The meat of this script happens in the lint() method below.
def main():
usage = '''%prog filename
The Three20 Linter.
Verify Three20 style guidelines for source code.'''
parser = OptionParser(usage = usage)
parser.add_option("-d", "--delint", dest="delint",
help="Delint the source",
action="store_true")
parser.add_option("-l", "--maxline", dest="maxlinelength",
help="Maximum permissible length of a line",type="int",
action="store")
(options, args) = parser.parse_args()
global maxlinelength
if options.maxlinelength:
maxlinelength = options.maxlinelength
if len(args) == 0:
parser.print_help()
enabled_for_projects = True
# Allow third-party developers to disable the linter entirely. See config.template for
# more information in the "lint" section.
configpath = os.path.join(os.path.dirname(Paths.src_dir), 'config')
if os.path.exists(configpath):
config = ConfigParser.ConfigParser()
config.read(configpath)
enabled_for_projects = config.getboolean('lint', 'enabled_for_projects')
# If we're running the linter from Xcode, let's just process the project.
if 'PROJECT_FILE_PATH' in os.environ:
if enabled_for_projects:
lint_project(os.environ['PROJECT_FILE_PATH'], options)
else:
for filename in args:
lint(filename, options)
# This filter makes it possible to set the line number on logging.error calls.
class FilenameFilter(logging.Filter):
def __init__(self):
self.lineno = -1
def filter(self, record):
record.linenumber = self.lineno
return True
def lint_project(project_path, options):
project = Pbxproj.get_pbxproj_by_name(project_path)
tempdir = None
if os.environ['TEMP_FILES_DIR']:
tempdir = os.environ['TEMP_FILES_DIR']
# We avoid relinting the same file over and over again by maintaining a mapping of filenames
# to modified times on disk. We store this information in the project's build directory and
# load it each time we run the linter for this project.
# Because we store the mtimes on a per project basis, we shouldn't run into any performance
# issues with a lint.dat file that's becoming completely massive.
mtimes = {}
# Read the lint.dat file and unpickle it if we find it.
if tempdir:
lintdatpath = os.path.join(os.path.abspath(tempdir), 'lint.dat')
if os.path.exists(lintdatpath):
lintdatfile = open(lintdatpath, 'rb')
mtimes = pickle.load(lintdatfile)
# The linter script may have changed since we last ran this project, so we might have to
# force lint every file to update them because there may be new linters.
# Assume that the linter hasn't been run for this project.
forcelint = True
# Get this script's path
lintfilename = os.path.realpath(__file__)
# Check the mtime.
mtime = os.path.getmtime(lintfilename)
if lintfilename in mtimes:
if mtime <= mtimes[lintfilename]:
# The lint script hasn't changed since we last ran this, so we don't have to force
# lint.
forcelint = False
# Store the linter's mtime for future runs.
mtimes[lintfilename] = mtime
#
# Get all of the "built" filenames in this project.
# The "Compile sources" phase files
filenames = project.get_built_sources()
# The "Copy headers" phase files if they exist
if project.get_built_headers():
filenames = filenames + project.get_built_headers()
# Iterate through and lint each of the files that have been modified since we last ran
# the linter, unless we're forcelinting, in which case we lint everything.
for filename in filenames:
mtime = os.path.getmtime(filename)
# If the filename isn't in the lint data, we have no idea when it was last modified so
# we'll run the linter anyway.
if not forcelint and filename in mtimes:
# Is it older or unchanged?
if mtime <= mtimes[filename]:
# Yeah, let's skip it then.
continue
# The beef.
if lint(filename, options):
# Only update the last known modification time if there weren't any errors.
mtimes[filename] = mtime
else:
print "If you would like to disable the lint tool, please read the instructions in config.template in the root of the Three20 project"
if filename in mtimes:
del mtimes[filename]
# Write out the lint data once we're done with this project. Thanks, pickle!
if tempdir:
lintdatfile = open(lintdatpath, 'wb')
pickle.dump(mtimes, lintdatfile)
# Lint the given filename.
def lint(filename, options):
logger = logging.getLogger()
f = FilenameFilter()
logger.addFilter(f)
# Set up the warning logger format.
ch = logging.StreamHandler()
if 'PROJECT_FILE_PATH' in os.environ:
formatter = logging.Formatter(filename+":%(linenumber)s: warning: "+relpath(os.getcwd(), filename)+":%(linenumber)s: %(message)s")
else:
formatter = logging.Formatter(filename+":%(linenumber)s: %(message)s")
ch.setFormatter(formatter)
logger.addHandler(ch)
file = open(filename, 'r')
filedata = file.read()
did_lint_cleanly = True
# Everything is set up now, let's run through the linters!
if not lint_basics(filedata, filename, f, options.delint):
did_lint_cleanly = False
logger.removeFilter(f)
logger.removeHandler(ch)
return did_lint_cleanly
# Basic lint tests that only look at one line's information.
# If isdelinting is True, this method will try to fix as many lint issues as it can and then
# write the results out to disk.
def lint_basics(filedata, filename, linenofilter, isdelinting = False):
logger = logging.getLogger()
lines = string.split(filedata, "\n")
linenofilter.lineno = 1
prevline = None
did_lint_cleanly = True
nwarningsfixed = 0
nwarnings = 0
if isdelinting:
newfilelines = []
for line in lines:
# Check line lengths.
if len(line) > maxlinelength:
did_lint_cleanly = False
nwarnings = nwarnings + 1
# This is not something we can fix with the delinter.
if isdelinting:
logger.error('I don\'t know how to split this line up.')
else:
logger.error('Line length > %d'% maxlinelength)
# Check method dividers.
if not re.search(r'.h$', filename) and re.search(r'^[-+][ ]*\([\w\s*]+\)', line):
if prevline != gdivider and prevline != ' */':
did_lint_cleanly = False
nwarnings = nwarnings + 1
# This is not something we can fix with the delinter.
if isdelinting:
if re.match(r'/+', prevline):
newfilelines.pop()
newfilelines.append(gdivider)
nwarningsfixed = nwarningsfixed + 1
else:
logger.error('This method is missing a correct divider before it')
# Properties
if re.search(r'^@property', line):
if re.search(r'(NSString|NSArray|NSDictionary|NSSet)[ ]*\*', line) and not re.search(r'copy|readonly', line):
nwarnings = nwarnings + 1
if isdelinting:
line = re.sub(r'\bretain\b', r'copy', line)
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Objects that have mutable subclasses, such as NSString, should be copied, not retained')
if re.search(r'^@property\(', line):
nwarnings = nwarnings + 1
if isdelinting:
line = line.rstrip(' \t')
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Must be a space after the @property declarator')
# Trailing whitespace
if re.search('[ \t]+$', line):
nwarnings = nwarnings + 1
if isdelinting:
line = line.rstrip(' \t')
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Trailing whitespace')
# Spaces after logical constructs
if re.search('(if|while|for)\(', line, re.IGNORECASE):
nwarnings = nwarnings + 1
if isdelinting:
line = re.sub(r'(if|while|for)\(', r'\1 (', line)
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('Missing space after logical construct')
# Boolean checks against non-boolean objects
# This test is really hard to do without knowing the type of the object.
#if re.search('[^!]!(?!TTIs|[a-z0-9_.]*\.is|is|_is|has|_has|\[|self\.is|[a-z0-9_]+\.)[a-z0-9_]+', line, re.IGNORECASE):
# did_lint_cleanly = False
# logger.error('Use if (nil == value) instead of boolean checks for pointers')
# Else statements must have one empty line before them
if re.search('}[ ]+else', line, re.IGNORECASE) and prevline != '' and not re.search(r'^[ ]*//', prevline):
nwarnings = nwarnings + 1
if isdelinting:
newfilelines.append('')
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('There must be one empty line before an else statement')
# Copyright statement for Facebook
match = re.match('\/\/ Copyright ([0-9]+-[0-9]+) Facebook', line, re.IGNORECASE)
if match:
(copyrightyears, ) = match.groups()
if copyrightyears != gcopyrightyears:
nwarnings = nwarnings + 1
if isdelinting:
line = re.sub(r'([0-9]+-[0-9]+)', gcopyrightyears, line)
nwarningsfixed = nwarningsfixed + 1
else:
did_lint_cleanly = False
logger.error('The copyright statement on this file is outdated. Should be 2009-2011')
if isdelinting:
newfilelines.append(line)
prevline = line
linenofilter.lineno = linenofilter.lineno + 1
if isdelinting and nwarnings > 0:
newfiledata = '\n'.join(newfilelines)
file = open(filename, 'w')
file.write(newfiledata)
return did_lint_cleanly
if __name__ == "__main__":
sys.exit(main())
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
Pbxproj.py
Working with the pbxproj file format is a pain in the ass.
This object provides a couple basic features for parsing pbxproj files:
* Getting a dependency list
* Adding one pbxproj to another pbxproj as a dependency
Version 1.1.
History:
1.0 - October 20, 2010: Initial hacked-together version finished. It is alive!
1.1 - January 11, 2011: Add configuration settings to all configurations by default.
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2011 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import hashlib
import logging
import os
import re
import sys
import Paths
pbxproj_cache = {}
# The following relative path methods recyled from:
# http://code.activestate.com/recipes/208993-compute-relative-path-from-one-directory-to-anothe/
# Author: Cimarron Taylor
# Date: July 6, 2003
def pathsplit(p, rest=[]):
(h,t) = os.path.split(p)
if len(h) < 1: return [t]+rest
if len(t) < 1: return [h]+rest
return pathsplit(h,[t]+rest)
def commonpath(l1, l2, common=[]):
if len(l1) < 1: return (common, l1, l2)
if len(l2) < 1: return (common, l1, l2)
if l1[0] != l2[0]: return (common, l1, l2)
return commonpath(l1[1:], l2[1:], common+[l1[0]])
def relpath(p1, p2):
(common,l1,l2) = commonpath(pathsplit(p1), pathsplit(p2))
p = []
if len(l1) > 0:
p = [ '../' * len(l1) ]
p = p + l2
return os.path.join( *p )
class Pbxproj(object):
@staticmethod
def get_pbxproj_by_name(name, xcode_version = None):
if name not in pbxproj_cache:
pbxproj_cache[name] = Pbxproj(name, xcode_version = xcode_version)
return pbxproj_cache[name]
# Valid names
# Three20
# Three20:Three20-Xcode3.2.5
# /path/to/project.xcodeproj/project.pbxproj
def __init__(self, name, xcode_version = None):
self._project_data = None
parts = name.split(':')
self.name = parts[0]
if len(parts) > 1:
self.target = parts[1]
else:
valid_file_chars = '[a-zA-Z0-9\.\-:+ "\'!@#$%^&*\(\)]';
if re.match('^'+valid_file_chars+'+$', self.name):
self.target = self.name
else:
result = re.search('('+valid_file_chars+'+)\.xcodeproj', self.name)
if not result:
self.target = self.name
else:
(self.target, ) = result.groups()
match = re.search('([^/\\\\]+)\.xcodeproj', self.name)
if not match:
self._project_name = self.name
else:
(self._project_name, ) = match.groups()
self._guid = None
self._deps = None
self._xcode_version = xcode_version
self._projectVersion = None
self.guid()
def __str__(self):
return str(self.name)+" target:"+str(self.target)+" guid:"+str(self._guid)+" prodguid: "+self._product_guid+" prodname: "+self._product_name
def uniqueid(self):
return self.name + ':' + self.target
def path(self):
# TODO: No sense calculating this every time, just store it when we get the name.
if re.match('^[a-zA-Z0-9\.\-:+"]+$', self.name):
return os.path.join(Paths.src_dir, self.name.strip('"'), self.name.strip('"')+'.xcodeproj', 'project.pbxproj')
elif not re.match('project.pbxproj$', self.name):
return os.path.join(self.name, 'project.pbxproj')
else:
return self.name
# A pbxproj file is contained within an xcodeproj file.
# This method simply strips off the project.pbxproj part of the path.
def xcodeprojpath(self):
return os.path.dirname(self.path())
def guid(self):
if not self._guid:
self.dependencies()
return self._guid
def version(self):
if not self._projectVersion:
self.dependencies()
return self._projectVersion
# Load the project data from disk.
def get_project_data(self):
if self._project_data is None:
if not os.path.exists(self.path()):
logging.info("Couldn't find the project at this path:")
logging.info(self.path())
return None
project_file = open(self.path(), 'r')
self._project_data = project_file.read()
return self._project_data
# Write the project data to disk.
def set_project_data(self, project_data):
if self._project_data != project_data:
self._project_data = project_data
project_file = open(self.path(), 'w')
project_file.write(self._project_data)
# Get and cache the dependencies for this project.
def dependencies(self):
if self._deps is not None:
return self._deps
project_data = self.get_project_data()
if project_data is None:
logging.error("Unable to open the project file at this path (is it readable?): "+self.path())
return None
# Get project file format version
result = re.search('\tobjectVersion = ([0-9]+);', project_data)
if not result:
logging.error("Can't recover: unable to find the project version for your target at: "+self.path())
return None
(self._projectVersion,) = result.groups()
self._projectVersion = int(self._projectVersion)
# Get configuration list guid
result = re.search('[A-Z0-9]+ \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?buildConfigurationList = ([A-Z0-9]+) \/\* Build configuration list for PBXNativeTarget "'+re.escape(self.target)+'" \*\/;',
project_data)
if result:
(self.configurationListGuid, ) = result.groups()
else:
self.configurationListGuid = None
# Get configuration list
if self.configurationListGuid:
match = re.search(re.escape(self.configurationListGuid)+' \/\* Build configuration list for PBXNativeTarget "'+re.escape(self.target)+'" \*\/ = \{\n[ \t]+isa = XCConfigurationList;\n[ \t]+buildConfigurations = \(\n((?:.|\n)+?)\);', project_data)
if not match:
logging.error("Couldn't find the configuration list.")
return False
(configurationList,) = match.groups()
self.configurations = re.findall('[ \t]+([A-Z0-9]+) \/\* (.+) \*\/,\n', configurationList)
# Get build phases
result = re.search('([A-Z0-9]+) \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?buildPhases = \(\n((?:.|\n)+?)\);',
project_data)
if not result:
logging.error("Can't recover: Unable to find the build phases from your target at: "+self.path())
return None
(self._guid, buildPhases, ) = result.groups()
# Get the build phases we care about.
match = re.search('([A-Z0-9]+) \/\* Resources \*\/', buildPhases)
if match:
(self._resources_guid, ) = match.groups()
else:
self._resources_guid = None
match = re.search('([A-Z0-9]+) \/\* Frameworks \*\/', buildPhases)
if not match:
logging.error("Couldn't find the Frameworks phase from: "+self.path())
logging.error("Please add a New Link Binary With Libraries Build Phase to your target")
logging.error("Right click your target in the project, Add, New Build Phase,")
logging.error(" \"New Link Binary With Libraries Build Phase\"")
return None
(self._frameworks_guid, ) = match.groups()
# Get the dependencies
result = re.search(re.escape(self._guid)+' \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?dependencies = \(\n((?:[ \t]+[A-Z0-9]+ \/\* PBXTargetDependency \*\/,\n)*)[ \t]*\);\n',
project_data)
if not result:
logging.error("Unable to get dependencies from: "+self.path())
return None
(dependency_set, ) = result.groups()
dependency_guids = re.findall('[ \t]+([A-Z0-9]+) \/\* PBXTargetDependency \*\/,\n', dependency_set)
# Parse the dependencies
dependency_names = []
for guid in dependency_guids:
result = re.search(guid+' \/\* PBXTargetDependency \*\/ = \{\n[ \t]+isa = PBXTargetDependency;\n[ \t]*name = (["a-zA-Z0-9\.\-]+);',
project_data)
if result:
(dependency_name, ) = result.groups()
dependency_names.append(dependency_name)
self._deps = dependency_names
# Get the product guid and name.
result = re.search(re.escape(self._guid)+' \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?productReference = ([A-Z0-9]+) \/\* (.+?) \*\/;',
project_data)
if not result:
logging.error("Unable to get product guid from: "+self.path())
return None
(self._product_guid, self._product_name, ) = result.groups()
return self._deps
# Add a line to the PBXBuildFile section.
#
# <default_guid> /* <name> in Frameworks */ = {isa = PBXBuildFile; fileRef = <file_ref_hash> /* <name> */; };
#
# Returns: <default_guid> if a line was added.
# Otherwise, the existing guid is returned.
def add_buildfile(self, name, file_ref_hash, default_guid):
project_data = self.get_project_data()
match = re.search('\/\* Begin PBXBuildFile section \*\/\n((?:.|\n)+?)\/\* End PBXBuildFile section \*\/', project_data)
if not match:
logging.error("Couldn't find PBXBuildFile section.")
return None
(subtext, ) = match.groups()
buildfile_hash = None
match = re.search('([A-Z0-9]+).+?fileRef = '+re.escape(file_ref_hash), subtext)
if match:
(buildfile_hash, ) = match.groups()
logging.info("This build file already exists: "+buildfile_hash)
if buildfile_hash is None:
match = re.search('\/\* Begin PBXBuildFile section \*\/\n', project_data)
buildfile_hash = default_guid
libfiletext = "\t\t"+buildfile_hash+" /* "+name+" in Frameworks */ = {isa = PBXBuildFile; fileRef = "+file_ref_hash+" /* "+name+" */; };\n"
project_data = project_data[:match.end()] + libfiletext + project_data[match.end():]
self.set_project_data(project_data)
return buildfile_hash
# Add a line to the PBXFileReference section.
#
# <default_guid> /* <name> */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.<file_type>"; name = <name>; path = <rel_path>; sourceTree = <source_tree>; };
#
# Returns: <default_guid> if a line was added.
# Otherwise, the existing guid is returned.
def add_filereference(self, name, file_type, default_guid, rel_path, source_tree):
project_data = self.get_project_data()
quoted_rel_path = '"'+rel_path.strip('"')+'"'
fileref_hash = None
match = re.search('([A-Z0-9]+) \/\* '+re.escape(name)+' \*\/ = \{isa = PBXFileReference; lastKnownFileType = "wrapper.'+file_type+'"; name = '+re.escape(name)+'; path = '+re.escape(rel_path)+';', project_data)
if not match:
# Check again for quoted versions, just to be sure.
match = re.search('([A-Z0-9]+) \/\* '+re.escape(name)+' \*\/ = \{isa = PBXFileReference; lastKnownFileType = "wrapper.'+file_type+'"; name = '+re.escape(name)+'; path = '+re.escape(quoted_rel_path)+';', project_data)
if match:
logging.info("This file has already been added.")
(fileref_hash, ) = match.groups()
else:
match = re.search('\/\* Begin PBXFileReference section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXFileReference section.")
return False
fileref_hash = default_guid
pbxfileref = "\t\t"+fileref_hash+" /* "+name+" */ = {isa = PBXFileReference; lastKnownFileType = \"wrapper."+file_type+"\"; name = "+name+"; path = "+quoted_rel_path+"; sourceTree = "+source_tree+"; };\n"
project_data = project_data[:match.end()] + pbxfileref + project_data[match.end():]
self.set_project_data(project_data)
return fileref_hash
# Add a file to the given PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_group(self, name, guid, group):
project_data = self.get_project_data()
match = re.search('\/\* '+re.escape(group)+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);', project_data)
if not match:
logging.error("Couldn't find the "+group+" children.")
return False
(children,) = match.groups()
match = re.search(re.escape(guid), children)
if match:
logging.info("This file is already a member of the "+name+" group.")
else:
match = re.search('\/\* '+re.escape(group)+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n', project_data)
if not match:
logging.error("Couldn't find the "+group+" group.")
return False
pbxgroup = "\t\t\t\t"+guid+" /* "+name+" */,\n"
project_data = project_data[:match.end()] + pbxgroup + project_data[match.end():]
self.set_project_data(project_data)
return True
# Add a file to the Frameworks PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_frameworks(self, name, guid):
return self.add_file_to_group(name, guid, 'Frameworks')
# Add a file to the Resources PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_resources(self, name, guid):
match = re.search('\/\* '+re.escape('Resources')+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);', self.get_project_data())
if not match:
return self.add_file_to_group(name, guid, 'Supporting Files')
return self.add_file_to_group(name, guid, 'Resources')
def add_file_to_phase(self, name, guid, phase_guid, phase):
project_data = self.get_project_data()
match = re.search(re.escape(phase_guid)+" \/\* "+re.escape(phase)+" \*\/ = {(?:.|\n)+?files = \(((?:.|\n)+?)\);", project_data)
if not match:
logging.error("Couldn't find the "+phase+" phase.")
return False
(files, ) = match.groups()
match = re.search(re.escape(guid), files)
if match:
logging.info("The file has already been added.")
else:
match = re.search(re.escape(phase_guid)+" \/\* "+phase+" \*\/ = {(?:.|\n)+?files = \(\n", project_data)
if not match:
logging.error("Couldn't find the "+phase+" files")
return False
frameworktext = "\t\t\t\t"+guid+" /* "+name+" in "+phase+" */,\n"
project_data = project_data[:match.end()] + frameworktext + project_data[match.end():]
self.set_project_data(project_data)
return True
def get_rel_path_to_products_dir(self):
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products')
return relpath(project_path, build_path)
def add_file_to_frameworks_phase(self, name, guid):
return self.add_file_to_phase(name, guid, self._frameworks_guid, 'Frameworks')
def add_file_to_resources_phase(self, name, guid):
if self._resources_guid is None:
logging.error("No resources build phase found in the destination project")
logging.error("Please add a New Copy Bundle Resources Build Phase to your target")
logging.error("Right click your target in the project, Add, New Build Phase,")
logging.error(" \"New Copy Bundle Resources Build Phase\"")
return False
return self.add_file_to_phase(name, guid, self._resources_guid, 'Resources')
def add_header_search_path(self, configuration):
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products'), 'three20')
rel_path = relpath(project_path, build_path)
did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"'+rel_path+'"')
if not did_add_build_setting:
return did_add_build_setting
# Version 46 is Xcode 4's file format.
try:
primary_version = int(self._xcode_version.split('.')[0])
except ValueError, e:
primary_version = 0
if self._projectVersion >= 46 or primary_version >= 4:
did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"$(BUILT_PRODUCTS_DIR)/../../three20"')
if not did_add_build_setting:
return did_add_build_setting
did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"$(BUILT_PRODUCTS_DIR)/../three20"')
if not did_add_build_setting:
return did_add_build_setting
return did_add_build_setting
def add_build_setting(self, configuration, setting_name, value):
project_data = self.get_project_data()
match = re.search('\/\* '+configuration+' \*\/ = {\n[ \t]+isa = XCBuildConfiguration;\n(?:.|\n)+?[ \t]+buildSettings = \{\n((?:.|\n)+?)\};', project_data)
if not match:
print "Couldn't find the "+configuration+" configuration in "+self.path()
return False
settings_start = match.start(1)
settings_end = match.end(1)
(build_settings, ) = match.groups()
match = re.search(re.escape(setting_name)+' = ((?:.|\n)+?);', build_settings)
if not match:
# Add a brand new build setting. No checking for existing settings necessary.
settingtext = '\t\t\t\t'+setting_name+' = '+value+';\n'
project_data = project_data[:settings_start] + settingtext + project_data[settings_start:]
else:
# Build settings already exist. Is there one or many?
(search_paths,) = match.groups()
if re.search('\(\n', search_paths):
# Many
match = re.search(re.escape(value), search_paths)
if not match:
# If value has any spaces in it, Xcode will split it up into
# multiple entries.
escaped_value = re.escape(value).replace(' ', '",\n[ \t]+"')
match = re.search(escaped_value, search_paths)
if not match and not re.search(re.escape(value.strip('"')), search_paths):
match = re.search(re.escape(setting_name)+' = \(\n', build_settings)
build_settings = build_settings[:match.end()] + '\t\t\t\t\t'+value+',\n' + build_settings[match.end():]
project_data = project_data[:settings_start] + build_settings + project_data[settings_end:]
else:
# One
if search_paths.strip('"') != value.strip('"'):
existing_path = search_paths
path_set = '(\n\t\t\t\t\t'+value+',\n\t\t\t\t\t'+existing_path+'\n\t\t\t\t)'
build_settings = build_settings[:match.start(1)] + path_set + build_settings[match.end(1):]
project_data = project_data[:settings_start] + build_settings + project_data[settings_end:]
self.set_project_data(project_data)
return True
def get_hash_base(self, uniquename):
examplehash = '320FFFEEEDDDCCCBBBAAA000'
uniquehash = hashlib.sha224(uniquename).hexdigest().upper()
uniquehash = uniquehash[:len(examplehash) - 4]
return '320'+uniquehash
def add_framework(self, framework):
tthash_base = self.get_hash_base(framework)
fileref_hash = self.add_filereference(framework, 'frameworks', tthash_base+'0', 'System/Library/Frameworks/'+framework, 'SDKROOT')
libfile_hash = self.add_buildfile(framework, fileref_hash, tthash_base+'1')
if not self.add_file_to_frameworks(framework, fileref_hash):
return False
if not self.add_file_to_frameworks_phase(framework, libfile_hash):
return False
return True
def add_bundle(self):
tthash_base = self.get_hash_base('Three20.bundle')
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(Paths.src_dir, 'Three20.bundle')
rel_path = relpath(project_path, build_path)
fileref_hash = self.add_filereference('Three20.bundle', 'plug-in', tthash_base+'0', rel_path, 'SOURCE_ROOT')
libfile_hash = self.add_buildfile('Three20.bundle', fileref_hash, tthash_base+'1')
if not self.add_file_to_resources('Three20.bundle', fileref_hash):
return False
if not self.add_file_to_resources_phase('Three20.bundle', libfile_hash):
return False
return True
# Get the PBXFileReference from the given PBXBuildFile guid.
def get_filerefguid_from_buildfileguid(self, buildfileguid):
project_data = self.get_project_data()
match = re.search(buildfileguid+' \/\* .+ \*\/ = {isa = PBXBuildFile; fileRef = ([A-Z0-9]+) \/\* .+ \*\/;', project_data)
if not match:
logging.error("Couldn't find PBXBuildFile row.")
return None
(filerefguid, ) = match.groups()
return filerefguid
def get_filepath_from_filerefguid(self, filerefguid):
project_data = self.get_project_data()
match = re.search(filerefguid+' \/\* .+ \*\/ = {isa = PBXFileReference; .+ path = (.+); .+ };', project_data)
if not match:
logging.error("Couldn't find PBXFileReference row.")
return None
(path, ) = match.groups()
return path
# Get all source files that are "built" in this project. This includes files built for
# libraries, executables, and unit testing.
def get_built_sources(self):
project_data = self.get_project_data()
match = re.search('\/\* Begin PBXSourcesBuildPhase section \*\/\n((?:.|\n)+?)\/\* End PBXSourcesBuildPhase section \*\/', project_data)
if not match:
logging.error("Couldn't find PBXSourcesBuildPhase section.")
return None
(buildphasedata, ) = match.groups()
buildfileguids = re.findall('[ \t]+([A-Z0-9]+) \/\* .+ \*\/,\n', buildphasedata)
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
filenames = []
for buildfileguid in buildfileguids:
filerefguid = self.get_filerefguid_from_buildfileguid(buildfileguid)
filepath = self.get_filepath_from_filerefguid(filerefguid)
filenames.append(os.path.join(project_path, filepath.strip('"')))
return filenames
# Get all header files that are "built" in this project. This includes files built for
# libraries, executables, and unit testing.
def get_built_headers(self):
project_data = self.get_project_data()
match = re.search('\/\* Begin PBXHeadersBuildPhase section \*\/\n((?:.|\n)+?)\/\* End PBXHeadersBuildPhase section \*\/', project_data)
if not match:
logging.error("Couldn't find PBXHeadersBuildPhase section.")
return None
(buildphasedata, ) = match.groups()
buildfileguids = re.findall('[ \t]+([A-Z0-9]+) \/\* .+ \*\/,\n', buildphasedata)
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
filenames = []
for buildfileguid in buildfileguids:
filerefguid = self.get_filerefguid_from_buildfileguid(buildfileguid)
filepath = self.get_filepath_from_filerefguid(filerefguid)
filenames.append(os.path.join(project_path, filepath.strip('"')))
return filenames
def add_dependency(self, dep):
project_data = self.get_project_data()
dep_data = dep.get_project_data()
if project_data is None or dep_data is None:
return False
logging.info("\nAdding "+str(dep)+" to "+str(self))
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
dep_path = os.path.abspath(dep.xcodeprojpath())
rel_path = relpath(project_path, dep_path)
logging.info("")
logging.info("Project path: "+project_path)
logging.info("Dependency path: "+dep_path)
logging.info("Relative path: "+rel_path)
tthash_base = self.get_hash_base(dep.uniqueid())
###############################################
logging.info("")
logging.info("Step 1: Add file reference to the dependency...")
self.set_project_data(project_data)
pbxfileref_hash = self.add_filereference(dep._project_name+'.xcodeproj', 'pb-project', tthash_base+'0', rel_path, 'SOURCE_ROOT')
project_data = self.get_project_data()
logging.info("Done: Added file reference: "+pbxfileref_hash)
###############################################
logging.info("")
logging.info("Step 2: Add file to Frameworks group...")
self.set_project_data(project_data)
if not self.add_file_to_frameworks(dep._project_name+".xcodeproj", pbxfileref_hash):
return False
project_data = self.get_project_data()
logging.info("Done: Added file to Frameworks group.")
###############################################
logging.info("")
logging.info("Step 3: Add dependencies...")
pbxtargetdependency_hash = None
pbxcontaineritemproxy_hash = None
match = re.search('\/\* Begin PBXTargetDependency section \*\/\n((?:.|\n)+?)\/\* End PBXTargetDependency section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXTargetDependency section...")
match = re.search('\/\* End PBXSourcesBuildPhase section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXSourcesBuildPhase section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXTargetDependency section */\n\n/* End PBXTargetDependency section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search('([A-Z0-9]+) \/\* PBXTargetDependency \*\/ = {\n[ \t]+isa = PBXTargetDependency;\n[ \t]+name = '+re.escape(dep._project_name)+';\n[ \t]+targetProxy = ([A-Z0-9]+) \/\* PBXContainerItemProxy \*\/;', project_data)
if match:
(pbxtargetdependency_hash, pbxcontaineritemproxy_hash,) = match.groups()
logging.info("This dependency already exists.")
if pbxtargetdependency_hash is None or pbxcontaineritemproxy_hash is None:
match = re.search('\/\* Begin PBXTargetDependency section \*\/\n', project_data)
pbxtargetdependency_hash = tthash_base+'1'
pbxcontaineritemproxy_hash = tthash_base+'2'
pbxtargetdependency = "\t\t"+pbxtargetdependency_hash+" /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\tname = "+dep._project_name+";\n\t\t\ttargetProxy = "+pbxcontaineritemproxy_hash+" /* PBXContainerItemProxy */;\n\t\t};\n"
project_data = project_data[:match.end()] + pbxtargetdependency + project_data[match.end():]
logging.info("Done: Added dependency.")
###############################################
logging.info("")
logging.info("Step 3.1: Add container proxy for dependencies...")
containerExists = False
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXContainerItemProxy section...")
match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXBuildFile section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(pbxcontaineritemproxy_hash), subtext)
if match:
logging.info("This container proxy already exists.")
containerExists = True
if not containerExists:
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n', project_data)
pbxcontaineritemproxy = "\t\t"+pbxcontaineritemproxy_hash+" /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = "+pbxfileref_hash+" /* "+dep._project_name+".xcodeproj */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = "+dep.guid()+";\n\t\t\tremoteInfo = "+dep._project_name+";\n\t\t};\n"
project_data = project_data[:match.end()] + pbxcontaineritemproxy + project_data[match.end():]
logging.info("Done: Added container proxy.")
###############################################
logging.info("")
logging.info("Step 3.2: Add module to the dependency list...")
match = re.search(self.guid()+' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n((?:.|\n)+?)\);', project_data)
dependency_exists = False
if not match:
logging.error("Couldn't find the dependency list.")
return False
else:
(dependencylist, ) = match.groups()
match = re.search(re.escape(pbxtargetdependency_hash), dependencylist)
if match:
logging.info("This dependency has already been added.")
dependency_exists = True
if not dependency_exists:
match = re.search(self.guid()+' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n', project_data)
if not match:
logging.error("Couldn't find the dependency list.")
return False
dependency_item = '\t\t\t\t'+pbxtargetdependency_hash+' /* PBXTargetDependency */,\n'
project_data = project_data[:match.end()] + dependency_item + project_data[match.end():]
logging.info("Done: Added module to the dependency list.")
###############################################
logging.info("")
logging.info("Step 4: Create project references...")
match = re.search('\/\* Begin PBXProject section \*\/\n((?:.|\n)+?)\/\* End PBXProject section \*\/', project_data)
if not match:
logging.error("Couldn't find the project section.")
return False
project_start = match.start(1)
project_end = match.end(1)
(project_section, ) = match.groups()
reference_exists = False
did_change = False
productgroup_hash = None
match = re.search('projectReferences = \(\n((?:.|\n)+?)\n[ \t]+\);', project_section)
if not match:
logging.info("Creating project references...")
match = re.search('projectDirPath = ".*?";\n', project_section)
if not match:
logging.error("Couldn't find project references anchor.")
return False
did_change = True
project_section = project_section[:match.end()] + '\t\t\tprojectReferences = (\n\t\t\t);\n' + project_section[match.end():]
else:
(refs, ) = match.groups()
match = re.search('\{\n[ \t]+ProductGroup = ([A-Z0-9]+) \/\* Products \*\/;\n[ \t]+ProjectRef = '+re.escape(pbxfileref_hash), refs)
if match:
(productgroup_hash, ) = match.groups()
logging.info("This product group already exists: "+productgroup_hash)
reference_exists = True
if not reference_exists:
match = re.search('projectReferences = \(\n', project_section)
if not match:
logging.error("Missing the project references item.")
return False
productgroup_hash = tthash_base+'3'
reference_text = '\t\t\t\t{\n\t\t\t\t\tProductGroup = '+productgroup_hash+' /* Products */;\n\t\t\t\t\tProjectRef = '+pbxfileref_hash+' /* '+dep._project_name+'.xcodeproj */;\n\t\t\t\t},\n'
project_section = project_section[:match.end()] + reference_text + project_section[match.end():]
did_change = True
if did_change:
project_data = project_data[:project_start] + project_section + project_data[project_end:]
logging.info("Done: Created project reference.")
###############################################
logging.info("")
logging.info("Step 4.1: Create product group...")
match = re.search('\/\* Begin PBXGroup section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the group section.")
return False
group_start = match.end()
lib_hash = None
match = re.search(re.escape(productgroup_hash)+" \/\* Products \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);", project_data)
if match:
logging.info("This product group already exists.")
(children, ) = match.groups()
match = re.search('([A-Z0-9]+) \/\* '+re.escape(dep._product_name)+' \*\/', children)
if not match:
logging.error("No product found")
return False
# TODO: Add this product.
else:
(lib_hash, ) = match.groups()
else:
lib_hash = tthash_base+'4'
productgrouptext = "\t\t"+productgroup_hash+" /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t"+lib_hash+" /* "+dep._product_name+" */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n"
project_data = project_data[:group_start] + productgrouptext + project_data[group_start:]
logging.info("Done: Created product group: "+lib_hash)
###############################################
logging.info("")
logging.info("Step 4.2: Add container proxy for target product...")
containerExists = False
targetproduct_hash = tthash_base+'6'
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXContainerItemProxy section...")
match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXBuildFile section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(targetproduct_hash), subtext)
if match:
logging.info("This container proxy already exists.")
containerExists = True
if not containerExists:
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n', project_data)
pbxcontaineritemproxy = "\t\t"+targetproduct_hash+" /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = "+pbxfileref_hash+" /* "+dep._project_name+".xcodeproj */;\n\t\t\tproxyType = 2;\n\t\t\tremoteGlobalIDString = "+dep._product_guid+";\n\t\t\tremoteInfo = "+dep._project_name+";\n\t\t};\n"
project_data = project_data[:match.end()] + pbxcontaineritemproxy + project_data[match.end():]
logging.info("Done: Added target container proxy.")
###############################################
logging.info("")
logging.info("Step 4.3: Create reference proxy...")
referenceExists = False
match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n((?:.|\n)+?)\/\* End PBXReferenceProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXReferenceProxy section...")
match = re.search('\/\* End PBXProject section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXProject section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXReferenceProxy section */\n\n/* End PBXReferenceProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(lib_hash), subtext)
if match:
logging.info("This reference proxy already exists.")
referenceExists = True
if not referenceExists:
match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n', project_data)
referenceproxytext = "\t\t"+lib_hash+" /* "+dep._product_name+" */ = {\n\t\t\tisa = PBXReferenceProxy;\n\t\t\tfileType = archive.ar;\n\t\t\tpath = \""+dep._product_name+"\";\n\t\t\tremoteRef = "+targetproduct_hash+" /* PBXContainerItemProxy */;\n\t\t\tsourceTree = BUILT_PRODUCTS_DIR;\n\t\t};\n"
project_data = project_data[:match.end()] + referenceproxytext + project_data[match.end():]
logging.info("Done: Created reference proxy.")
###############################################
logging.info("")
logging.info("Step 5: Add target file...")
self.set_project_data(project_data)
libfile_hash = self.add_buildfile(dep._product_name, lib_hash, tthash_base+'5')
project_data = self.get_project_data()
logging.info("Done: Added target file.")
###############################################
logging.info("")
logging.info("Step 6: Add frameworks...")
self.set_project_data(project_data)
self.add_file_to_frameworks_phase(dep._product_name, libfile_hash)
project_data = self.get_project_data()
logging.info("Done: Adding module.")
self.set_project_data(project_data)
return True
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
docs.py
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2010 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import re
import os
import sys
import shutil
import errno
import git
# Three20 Python Objects
import Paths
from optparse import OptionParser
def generate_appledoc(version):
logging.info("Generating appledoc")
os.system("appledoc " +
"--project-name Three20 " +
"--project-company \"Facebook\" " +
"--company-id=com.facebook " +
"--output Docs/ " +
"--project-version " + version + " " +
"--ignore .m --ignore Vendors --ignore UnitTests " +
"--keep-undocumented-objects " +
"--keep-undocumented-members " +
"--warn-undocumented-object " +
"--warn-undocumented-member " +
"--warn-empty-description " +
"--warn-unknown-directive " +
"--warn-invalid-crossref " +
"--warn-missing-arg " +
"--keep-intermediate-files " +
"--docset-feed-name \"Three20 " + version + " Documentation\" " +
"--docset-feed-url http://facebook.github.com/three20/api/%DOCSETATOMFILENAME " +
"--docset-package-url http://facebook.github.com/three20/api/%DOCSETPACKAGEFILENAME " +
"--publish-docset " +
"--verbose 5 src/")
def publish_ghpages(version):
logging.info("Cloning and checking out gh-pages")
os.system("git clone git@github.com:facebook/three20.git Docs/gh-pages")
os.system("cd Docs/gh-pages && git pull")
os.system("cd Docs/gh-pages && git checkout gh-pages")
logging.info("Copying docset into gh-pages folder")
os.system("cp -r -f Docs/html/* Docs/gh-pages/api")
os.system("cp -r -f Docs/publish/ Docs/gh-pages/api")
logging.info("Committing new docs")
os.system("cd Docs/gh-pages && git add -A .")
os.system("cd Docs/gh-pages && git commit -am \"Three20 " + version + " Documentation\"")
os.system("cd Docs/gh-pages && git push origin gh-pages")
def main():
usage = '''%prog [options]
The Three20 Appledoc Generator Script.
Use this script to generate appledoc
--generate will generate the docs
--publish will publish the new docs into the three20's gh-pages branch
EXAMPLES:
Most common use case:
> %prog --version 1.0.10-dev --generate
'''
parser = OptionParser(usage = usage)
parser.add_option("-o", "--generate", dest="generate",
help="Generate appledoc",
action="store_true")
parser.add_option("-p", "--publish", dest="publish",
help="publish gh-pages",
action="store_true")
parser.add_option("-v", "--version", dest="version",
help="Project version")
parser.add_option("", "--verbose", dest="verbose",
help="Display verbose output",
action="store_true")
(options, args) = parser.parse_args()
if options.verbose:
log_level = logging.INFO
else:
log_level = logging.WARNING
logging.basicConfig(level=log_level)
did_anything = False
if options.generate:
did_anything = True
generate_appledoc(options.version)
if options.publish:
did_anything = True
publish_ghpages(options.version)
if not did_anything:
parser.print_help()
if __name__ == "__main__":
sys.exit(main())
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
ttmodule.py
Most of the documentation is found in Pbxproj.py.
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2010 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import re
import os
import sys
from optparse import OptionParser
# Three20 Python Objects
import Paths
from Pbxproj import Pbxproj
# Print the given project's dependencies to stdout.
def print_dependencies(name):
pbxproj = Pbxproj.get_pbxproj_by_name(name)
print str(pbxproj)+"..."
if pbxproj.dependencies():
[sys.stdout.write("\t"+x+"\n") for x in pbxproj.dependencies()]
def get_dependency_modules(dependency_names):
dependency_modules = {}
if not dependency_names:
return dependency_modules
for name in dependency_names:
project = Pbxproj.get_pbxproj_by_name(name)
dependency_modules[project.uniqueid()] = project
dependencies = project.dependencies()
if dependencies is None:
print "Failed to get dependencies; it's possible that the given target doesn't exist."
sys.exit(0)
submodules = get_dependency_modules(dependencies)
for guid, submodule in submodules.items():
dependency_modules[guid] = submodule
return dependency_modules
def add_modules_to_project(module_names, project, configs):
logging.info(project)
logging.info("Checking dependencies...")
if project.dependencies() is None:
logging.error("Failed to get dependencies. Check the error logs for more details.")
sys.exit(0)
if len(project.dependencies()) == 0:
logging.info("\tNo dependencies.")
else:
logging.info("Existing dependencies:")
[logging.info("\t"+x) for x in project.dependencies()]
modules = get_dependency_modules(module_names)
logging.info("Requested dependency list:")
[logging.info("\t"+str(x)) for k,x in modules.items()]
logging.info("Adding dependencies...")
failed = []
for k,v in modules.items():
if v.name == 'Three20UI':
project.add_framework('QuartzCore.framework')
if v.name == 'Three20Core':
project.add_bundle()
if not project.add_dependency(v):
failed.append(k)
if configs:
for config in configs:
project.add_header_search_path(config)
project.add_build_setting(config, 'OTHER_LDFLAGS', '-ObjC')
else:
for configuration in project.configurations:
project.add_header_search_path(configuration[1])
for k,v in modules.items():
project.add_build_setting(configuration[1], 'OTHER_LDFLAGS', '-ObjC')
if len(failed) > 0:
logging.error("Some dependencies failed to be added:")
[logging.error("\t"+str(x)+"\n") for x in failed]
def main():
usage = '''%prog [options] module(s)
The Three20 Module Script.
Easily add Three20 modules to your projects.
MODULES:
Modules may take the form <module-name>(:<module-target>)
<module-target> defaults to <module-name> if it is not specified
<module-name> may be a path to a .pbxproj file.
EXAMPLES:
Most common use case:
> %prog -p path/to/myApp/myApp.xcodeproj Three20
For adding Xcode 4 support to an Xcode 3.2.# project:
> %prog -p path/to/myApp/myApp.xcodeproj Three20 --xcode-version=4
Print all dependencies for the Three20UI module
> %prog -d Three20UI
Print all dependencies for the extThree20JSON module's extThree20JSON+SBJSON target.
> %prog -d extThree20JSON:extThree20JSON+SBJSON
Add the Three20 project settings specifically to the Debug and Release configurations.
By default, all Three20 settings are added to all project configurations.
This includes adding the header search path and linker flags.
> %prog -p path/to/myApp.xcodeproj -c Debug -c Release
Add the extThree20XML module and all of its dependencies to the myApp project.
> %prog -p path/to/myApp.xcodeproj extThree20XML
Add a specific target of a module to a project.
> %prog -p path/to/myApp.xcodeproj extThree20JSON:extThree20JSON+SBJSON'''
parser = OptionParser(usage = usage)
parser.add_option("-d", "--dependencies", dest="print_dependencies",
help="Print dependencies for the given modules",
action="store_true")
parser.add_option("-v", "--verbose", dest="verbose",
help="Display verbose output",
action="store_true")
parser.add_option("-p", "--project", dest="projects",
help="Add the given modules to this project", action="append")
parser.add_option("--xcode-version", dest="xcode_version",
help="Set the xcode version you plan to open this project in. By default uses xcodebuild to determine your latest Xcode version.")
parser.add_option("-c", "--config", dest="configs",
help="Explicit configurations to add Three20 settings to (example: Debug). By default, ttmodule will add configuration settings to every configuration for the given target", action="append")
(options, args) = parser.parse_args()
if options.verbose:
log_level = logging.INFO
else:
log_level = logging.WARNING
logging.basicConfig(level=log_level)
did_anything = False
if options.print_dependencies:
[print_dependencies(x) for x in args]
did_anything = True
if options.projects is not None:
did_anything = True
if not options.xcode_version:
f=os.popen("xcodebuild -version")
xcodebuild_version = f.readlines()[0]
match = re.search('Xcode ([a-zA-Z0-9.]+)', xcodebuild_version)
if match:
(options.xcode_version, ) = match.groups()
for name in options.projects:
project = Pbxproj.get_pbxproj_by_name(name, xcode_version = options.xcode_version)
add_modules_to_project(args, project, options.configs)
if not did_anything:
parser.print_help()
if __name__ == "__main__":
sys.exit(main())
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
Paths.py
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2010 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
script_dir = os.path.dirname(os.path.realpath(__file__))
src_dir = os.path.dirname(script_dir)
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
ttmodule.py
Most of the documentation is found in Pbxproj.py.
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2010 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import re
import os
import sys
from optparse import OptionParser
# Three20 Python Objects
import Paths
from Pbxproj import Pbxproj
# Print the given project's dependencies to stdout.
def print_dependencies(name):
pbxproj = Pbxproj.get_pbxproj_by_name(name)
print str(pbxproj)+"..."
if pbxproj.dependencies():
[sys.stdout.write("\t"+x+"\n") for x in pbxproj.dependencies()]
def get_dependency_modules(dependency_names):
dependency_modules = {}
if not dependency_names:
return dependency_modules
for name in dependency_names:
project = Pbxproj.get_pbxproj_by_name(name)
dependency_modules[project.uniqueid()] = project
dependencies = project.dependencies()
if dependencies is None:
print "Failed to get dependencies; it's possible that the given target doesn't exist."
sys.exit(0)
submodules = get_dependency_modules(dependencies)
for guid, submodule in submodules.items():
dependency_modules[guid] = submodule
return dependency_modules
def add_modules_to_project(module_names, project, configs):
logging.info(project)
logging.info("Checking dependencies...")
if project.dependencies() is None:
logging.error("Failed to get dependencies. Check the error logs for more details.")
sys.exit(0)
if len(project.dependencies()) == 0:
logging.info("\tNo dependencies.")
else:
logging.info("Existing dependencies:")
[logging.info("\t"+x) for x in project.dependencies()]
modules = get_dependency_modules(module_names)
logging.info("Requested dependency list:")
[logging.info("\t"+str(x)) for k,x in modules.items()]
logging.info("Adding dependencies...")
failed = []
for k,v in modules.items():
if v.name == 'Three20UI':
project.add_framework('QuartzCore.framework')
if v.name == 'Three20Core':
project.add_bundle()
if not project.add_dependency(v):
failed.append(k)
if configs:
for config in configs:
project.add_header_search_path(config)
project.add_build_setting(config, 'OTHER_LDFLAGS', '-ObjC')
else:
for configuration in project.configurations:
project.add_header_search_path(configuration[1])
for k,v in modules.items():
project.add_build_setting(configuration[1], 'OTHER_LDFLAGS', '-ObjC')
if len(failed) > 0:
logging.error("Some dependencies failed to be added:")
[logging.error("\t"+str(x)+"\n") for x in failed]
def main():
usage = '''%prog [options] module(s)
The Three20 Module Script.
Easily add Three20 modules to your projects.
MODULES:
Modules may take the form <module-name>(:<module-target>)
<module-target> defaults to <module-name> if it is not specified
<module-name> may be a path to a .pbxproj file.
EXAMPLES:
Most common use case:
> %prog -p path/to/myApp/myApp.xcodeproj Three20
For adding Xcode 4 support to an Xcode 3.2.# project:
> %prog -p path/to/myApp/myApp.xcodeproj Three20 --xcode-version=4
Print all dependencies for the Three20UI module
> %prog -d Three20UI
Print all dependencies for the extThree20JSON module's extThree20JSON+SBJSON target.
> %prog -d extThree20JSON:extThree20JSON+SBJSON
Add the Three20 project settings specifically to the Debug and Release configurations.
By default, all Three20 settings are added to all project configurations.
This includes adding the header search path and linker flags.
> %prog -p path/to/myApp.xcodeproj -c Debug -c Release
Add the extThree20XML module and all of its dependencies to the myApp project.
> %prog -p path/to/myApp.xcodeproj extThree20XML
Add a specific target of a module to a project.
> %prog -p path/to/myApp.xcodeproj extThree20JSON:extThree20JSON+SBJSON'''
parser = OptionParser(usage = usage)
parser.add_option("-d", "--dependencies", dest="print_dependencies",
help="Print dependencies for the given modules",
action="store_true")
parser.add_option("-v", "--verbose", dest="verbose",
help="Display verbose output",
action="store_true")
parser.add_option("-p", "--project", dest="projects",
help="Add the given modules to this project", action="append")
parser.add_option("--xcode-version", dest="xcode_version",
help="Set the xcode version you plan to open this project in. By default uses xcodebuild to determine your latest Xcode version.")
parser.add_option("-c", "--config", dest="configs",
help="Explicit configurations to add Three20 settings to (example: Debug). By default, ttmodule will add configuration settings to every configuration for the given target", action="append")
(options, args) = parser.parse_args()
if options.verbose:
log_level = logging.INFO
else:
log_level = logging.WARNING
logging.basicConfig(level=log_level)
did_anything = False
if options.print_dependencies:
[print_dependencies(x) for x in args]
did_anything = True
if options.projects is not None:
did_anything = True
if not options.xcode_version:
f=os.popen("xcodebuild -version")
xcodebuild_version = f.readlines()[0]
match = re.search('Xcode ([a-zA-Z0-9.]+)', xcodebuild_version)
if match:
(options.xcode_version, ) = match.groups()
for name in options.projects:
project = Pbxproj.get_pbxproj_by_name(name, xcode_version = options.xcode_version)
add_modules_to_project(args, project, options.configs)
if not did_anything:
parser.print_help()
if __name__ == "__main__":
sys.exit(main())
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
Paths.py
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2010 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
script_dir = os.path.dirname(os.path.realpath(__file__))
src_dir = os.path.dirname(script_dir)
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
Pbxproj.py
Working with the pbxproj file format is a pain in the ass.
This object provides a couple basic features for parsing pbxproj files:
* Getting a dependency list
* Adding one pbxproj to another pbxproj as a dependency
Version 1.1.
History:
1.0 - October 20, 2010: Initial hacked-together version finished. It is alive!
1.1 - January 11, 2011: Add configuration settings to all configurations by default.
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2011 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import hashlib
import logging
import os
import re
import sys
import Paths
pbxproj_cache = {}
# The following relative path methods recyled from:
# http://code.activestate.com/recipes/208993-compute-relative-path-from-one-directory-to-anothe/
# Author: Cimarron Taylor
# Date: July 6, 2003
def pathsplit(p, rest=[]):
(h,t) = os.path.split(p)
if len(h) < 1: return [t]+rest
if len(t) < 1: return [h]+rest
return pathsplit(h,[t]+rest)
def commonpath(l1, l2, common=[]):
if len(l1) < 1: return (common, l1, l2)
if len(l2) < 1: return (common, l1, l2)
if l1[0] != l2[0]: return (common, l1, l2)
return commonpath(l1[1:], l2[1:], common+[l1[0]])
def relpath(p1, p2):
(common,l1,l2) = commonpath(pathsplit(p1), pathsplit(p2))
p = []
if len(l1) > 0:
p = [ '../' * len(l1) ]
p = p + l2
return os.path.join( *p )
class Pbxproj(object):
@staticmethod
def get_pbxproj_by_name(name, xcode_version = None):
if name not in pbxproj_cache:
pbxproj_cache[name] = Pbxproj(name, xcode_version = xcode_version)
return pbxproj_cache[name]
# Valid names
# Three20
# Three20:Three20-Xcode3.2.5
# /path/to/project.xcodeproj/project.pbxproj
def __init__(self, name, xcode_version = None):
self._project_data = None
parts = name.split(':')
self.name = parts[0]
if len(parts) > 1:
self.target = parts[1]
else:
valid_file_chars = '[a-zA-Z0-9\.\-:+ "\'!@#$%^&*\(\)]';
if re.match('^'+valid_file_chars+'+$', self.name):
self.target = self.name
else:
result = re.search('('+valid_file_chars+'+)\.xcodeproj', self.name)
if not result:
self.target = self.name
else:
(self.target, ) = result.groups()
match = re.search('([^/\\\\]+)\.xcodeproj', self.name)
if not match:
self._project_name = self.name
else:
(self._project_name, ) = match.groups()
self._guid = None
self._deps = None
self._xcode_version = xcode_version
self._projectVersion = None
self.guid()
def __str__(self):
return str(self.name)+" target:"+str(self.target)+" guid:"+str(self._guid)+" prodguid: "+self._product_guid+" prodname: "+self._product_name
def uniqueid(self):
return self.name + ':' + self.target
def path(self):
# TODO: No sense calculating this every time, just store it when we get the name.
if re.match('^[a-zA-Z0-9\.\-:+"]+$', self.name):
return os.path.join(Paths.src_dir, self.name.strip('"'), self.name.strip('"')+'.xcodeproj', 'project.pbxproj')
elif not re.match('project.pbxproj$', self.name):
return os.path.join(self.name, 'project.pbxproj')
else:
return self.name
# A pbxproj file is contained within an xcodeproj file.
# This method simply strips off the project.pbxproj part of the path.
def xcodeprojpath(self):
return os.path.dirname(self.path())
def guid(self):
if not self._guid:
self.dependencies()
return self._guid
def version(self):
if not self._projectVersion:
self.dependencies()
return self._projectVersion
# Load the project data from disk.
def get_project_data(self):
if self._project_data is None:
if not os.path.exists(self.path()):
logging.info("Couldn't find the project at this path:")
logging.info(self.path())
return None
project_file = open(self.path(), 'r')
self._project_data = project_file.read()
return self._project_data
# Write the project data to disk.
def set_project_data(self, project_data):
if self._project_data != project_data:
self._project_data = project_data
project_file = open(self.path(), 'w')
project_file.write(self._project_data)
# Get and cache the dependencies for this project.
def dependencies(self):
if self._deps is not None:
return self._deps
project_data = self.get_project_data()
if project_data is None:
logging.error("Unable to open the project file at this path (is it readable?): "+self.path())
return None
# Get project file format version
result = re.search('\tobjectVersion = ([0-9]+);', project_data)
if not result:
logging.error("Can't recover: unable to find the project version for your target at: "+self.path())
return None
(self._projectVersion,) = result.groups()
self._projectVersion = int(self._projectVersion)
# Get configuration list guid
result = re.search('[A-Z0-9]+ \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?buildConfigurationList = ([A-Z0-9]+) \/\* Build configuration list for PBXNativeTarget "'+re.escape(self.target)+'" \*\/;',
project_data)
if result:
(self.configurationListGuid, ) = result.groups()
else:
self.configurationListGuid = None
# Get configuration list
if self.configurationListGuid:
match = re.search(re.escape(self.configurationListGuid)+' \/\* Build configuration list for PBXNativeTarget "'+re.escape(self.target)+'" \*\/ = \{\n[ \t]+isa = XCConfigurationList;\n[ \t]+buildConfigurations = \(\n((?:.|\n)+?)\);', project_data)
if not match:
logging.error("Couldn't find the configuration list.")
return False
(configurationList,) = match.groups()
self.configurations = re.findall('[ \t]+([A-Z0-9]+) \/\* (.+) \*\/,\n', configurationList)
# Get build phases
result = re.search('([A-Z0-9]+) \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?buildPhases = \(\n((?:.|\n)+?)\);',
project_data)
if not result:
logging.error("Can't recover: Unable to find the build phases from your target at: "+self.path())
return None
(self._guid, buildPhases, ) = result.groups()
# Get the build phases we care about.
match = re.search('([A-Z0-9]+) \/\* Resources \*\/', buildPhases)
if match:
(self._resources_guid, ) = match.groups()
else:
self._resources_guid = None
match = re.search('([A-Z0-9]+) \/\* Frameworks \*\/', buildPhases)
if not match:
logging.error("Couldn't find the Frameworks phase from: "+self.path())
logging.error("Please add a New Link Binary With Libraries Build Phase to your target")
logging.error("Right click your target in the project, Add, New Build Phase,")
logging.error(" \"New Link Binary With Libraries Build Phase\"")
return None
(self._frameworks_guid, ) = match.groups()
# Get the dependencies
result = re.search(re.escape(self._guid)+' \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?dependencies = \(\n((?:[ \t]+[A-Z0-9]+ \/\* PBXTargetDependency \*\/,\n)*)[ \t]*\);\n',
project_data)
if not result:
logging.error("Unable to get dependencies from: "+self.path())
return None
(dependency_set, ) = result.groups()
dependency_guids = re.findall('[ \t]+([A-Z0-9]+) \/\* PBXTargetDependency \*\/,\n', dependency_set)
# Parse the dependencies
dependency_names = []
for guid in dependency_guids:
result = re.search(guid+' \/\* PBXTargetDependency \*\/ = \{\n[ \t]+isa = PBXTargetDependency;\n[ \t]*name = (["a-zA-Z0-9\.\-]+);',
project_data)
if result:
(dependency_name, ) = result.groups()
dependency_names.append(dependency_name)
self._deps = dependency_names
# Get the product guid and name.
result = re.search(re.escape(self._guid)+' \/\* '+re.escape(self.target)+' \*\/ = {\n[ \t]+isa = PBXNativeTarget;(?:.|\n)+?productReference = ([A-Z0-9]+) \/\* (.+?) \*\/;',
project_data)
if not result:
logging.error("Unable to get product guid from: "+self.path())
return None
(self._product_guid, self._product_name, ) = result.groups()
return self._deps
# Add a line to the PBXBuildFile section.
#
# <default_guid> /* <name> in Frameworks */ = {isa = PBXBuildFile; fileRef = <file_ref_hash> /* <name> */; };
#
# Returns: <default_guid> if a line was added.
# Otherwise, the existing guid is returned.
def add_buildfile(self, name, file_ref_hash, default_guid):
project_data = self.get_project_data()
match = re.search('\/\* Begin PBXBuildFile section \*\/\n((?:.|\n)+?)\/\* End PBXBuildFile section \*\/', project_data)
if not match:
logging.error("Couldn't find PBXBuildFile section.")
return None
(subtext, ) = match.groups()
buildfile_hash = None
match = re.search('([A-Z0-9]+).+?fileRef = '+re.escape(file_ref_hash), subtext)
if match:
(buildfile_hash, ) = match.groups()
logging.info("This build file already exists: "+buildfile_hash)
if buildfile_hash is None:
match = re.search('\/\* Begin PBXBuildFile section \*\/\n', project_data)
buildfile_hash = default_guid
libfiletext = "\t\t"+buildfile_hash+" /* "+name+" in Frameworks */ = {isa = PBXBuildFile; fileRef = "+file_ref_hash+" /* "+name+" */; };\n"
project_data = project_data[:match.end()] + libfiletext + project_data[match.end():]
self.set_project_data(project_data)
return buildfile_hash
# Add a line to the PBXFileReference section.
#
# <default_guid> /* <name> */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.<file_type>"; name = <name>; path = <rel_path>; sourceTree = <source_tree>; };
#
# Returns: <default_guid> if a line was added.
# Otherwise, the existing guid is returned.
def add_filereference(self, name, file_type, default_guid, rel_path, source_tree):
project_data = self.get_project_data()
quoted_rel_path = '"'+rel_path.strip('"')+'"'
fileref_hash = None
match = re.search('([A-Z0-9]+) \/\* '+re.escape(name)+' \*\/ = \{isa = PBXFileReference; lastKnownFileType = "wrapper.'+file_type+'"; name = '+re.escape(name)+'; path = '+re.escape(rel_path)+';', project_data)
if not match:
# Check again for quoted versions, just to be sure.
match = re.search('([A-Z0-9]+) \/\* '+re.escape(name)+' \*\/ = \{isa = PBXFileReference; lastKnownFileType = "wrapper.'+file_type+'"; name = '+re.escape(name)+'; path = '+re.escape(quoted_rel_path)+';', project_data)
if match:
logging.info("This file has already been added.")
(fileref_hash, ) = match.groups()
else:
match = re.search('\/\* Begin PBXFileReference section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXFileReference section.")
return False
fileref_hash = default_guid
pbxfileref = "\t\t"+fileref_hash+" /* "+name+" */ = {isa = PBXFileReference; lastKnownFileType = \"wrapper."+file_type+"\"; name = "+name+"; path = "+quoted_rel_path+"; sourceTree = "+source_tree+"; };\n"
project_data = project_data[:match.end()] + pbxfileref + project_data[match.end():]
self.set_project_data(project_data)
return fileref_hash
# Add a file to the given PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_group(self, name, guid, group):
project_data = self.get_project_data()
match = re.search('\/\* '+re.escape(group)+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);', project_data)
if not match:
logging.error("Couldn't find the "+group+" children.")
return False
(children,) = match.groups()
match = re.search(re.escape(guid), children)
if match:
logging.info("This file is already a member of the "+name+" group.")
else:
match = re.search('\/\* '+re.escape(group)+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n', project_data)
if not match:
logging.error("Couldn't find the "+group+" group.")
return False
pbxgroup = "\t\t\t\t"+guid+" /* "+name+" */,\n"
project_data = project_data[:match.end()] + pbxgroup + project_data[match.end():]
self.set_project_data(project_data)
return True
# Add a file to the Frameworks PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_frameworks(self, name, guid):
return self.add_file_to_group(name, guid, 'Frameworks')
# Add a file to the Resources PBXGroup.
#
# <guid> /* <name> */,
def add_file_to_resources(self, name, guid):
match = re.search('\/\* '+re.escape('Resources')+' \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);', self.get_project_data())
if not match:
return self.add_file_to_group(name, guid, 'Supporting Files')
return self.add_file_to_group(name, guid, 'Resources')
def add_file_to_phase(self, name, guid, phase_guid, phase):
project_data = self.get_project_data()
match = re.search(re.escape(phase_guid)+" \/\* "+re.escape(phase)+" \*\/ = {(?:.|\n)+?files = \(((?:.|\n)+?)\);", project_data)
if not match:
logging.error("Couldn't find the "+phase+" phase.")
return False
(files, ) = match.groups()
match = re.search(re.escape(guid), files)
if match:
logging.info("The file has already been added.")
else:
match = re.search(re.escape(phase_guid)+" \/\* "+phase+" \*\/ = {(?:.|\n)+?files = \(\n", project_data)
if not match:
logging.error("Couldn't find the "+phase+" files")
return False
frameworktext = "\t\t\t\t"+guid+" /* "+name+" in "+phase+" */,\n"
project_data = project_data[:match.end()] + frameworktext + project_data[match.end():]
self.set_project_data(project_data)
return True
def get_rel_path_to_products_dir(self):
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products')
return relpath(project_path, build_path)
def add_file_to_frameworks_phase(self, name, guid):
return self.add_file_to_phase(name, guid, self._frameworks_guid, 'Frameworks')
def add_file_to_resources_phase(self, name, guid):
if self._resources_guid is None:
logging.error("No resources build phase found in the destination project")
logging.error("Please add a New Copy Bundle Resources Build Phase to your target")
logging.error("Right click your target in the project, Add, New Build Phase,")
logging.error(" \"New Copy Bundle Resources Build Phase\"")
return False
return self.add_file_to_phase(name, guid, self._resources_guid, 'Resources')
def add_header_search_path(self, configuration):
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(os.path.join(os.path.join(os.path.dirname(Paths.src_dir), 'Build'), 'Products'), 'three20')
rel_path = relpath(project_path, build_path)
did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"'+rel_path+'"')
if not did_add_build_setting:
return did_add_build_setting
# Version 46 is Xcode 4's file format.
try:
primary_version = int(self._xcode_version.split('.')[0])
except ValueError, e:
primary_version = 0
if self._projectVersion >= 46 or primary_version >= 4:
did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"$(BUILT_PRODUCTS_DIR)/../../three20"')
if not did_add_build_setting:
return did_add_build_setting
did_add_build_setting = self.add_build_setting(configuration, 'HEADER_SEARCH_PATHS', '"$(BUILT_PRODUCTS_DIR)/../three20"')
if not did_add_build_setting:
return did_add_build_setting
return did_add_build_setting
def add_build_setting(self, configuration, setting_name, value):
project_data = self.get_project_data()
match = re.search('\/\* '+configuration+' \*\/ = {\n[ \t]+isa = XCBuildConfiguration;\n(?:.|\n)+?[ \t]+buildSettings = \{\n((?:.|\n)+?)\};', project_data)
if not match:
print "Couldn't find the "+configuration+" configuration in "+self.path()
return False
settings_start = match.start(1)
settings_end = match.end(1)
(build_settings, ) = match.groups()
match = re.search(re.escape(setting_name)+' = ((?:.|\n)+?);', build_settings)
if not match:
# Add a brand new build setting. No checking for existing settings necessary.
settingtext = '\t\t\t\t'+setting_name+' = '+value+';\n'
project_data = project_data[:settings_start] + settingtext + project_data[settings_start:]
else:
# Build settings already exist. Is there one or many?
(search_paths,) = match.groups()
if re.search('\(\n', search_paths):
# Many
match = re.search(re.escape(value), search_paths)
if not match:
# If value has any spaces in it, Xcode will split it up into
# multiple entries.
escaped_value = re.escape(value).replace(' ', '",\n[ \t]+"')
match = re.search(escaped_value, search_paths)
if not match and not re.search(re.escape(value.strip('"')), search_paths):
match = re.search(re.escape(setting_name)+' = \(\n', build_settings)
build_settings = build_settings[:match.end()] + '\t\t\t\t\t'+value+',\n' + build_settings[match.end():]
project_data = project_data[:settings_start] + build_settings + project_data[settings_end:]
else:
# One
if search_paths.strip('"') != value.strip('"'):
existing_path = search_paths
path_set = '(\n\t\t\t\t\t'+value+',\n\t\t\t\t\t'+existing_path+'\n\t\t\t\t)'
build_settings = build_settings[:match.start(1)] + path_set + build_settings[match.end(1):]
project_data = project_data[:settings_start] + build_settings + project_data[settings_end:]
self.set_project_data(project_data)
return True
def get_hash_base(self, uniquename):
examplehash = '320FFFEEEDDDCCCBBBAAA000'
uniquehash = hashlib.sha224(uniquename).hexdigest().upper()
uniquehash = uniquehash[:len(examplehash) - 4]
return '320'+uniquehash
def add_framework(self, framework):
tthash_base = self.get_hash_base(framework)
fileref_hash = self.add_filereference(framework, 'frameworks', tthash_base+'0', 'System/Library/Frameworks/'+framework, 'SDKROOT')
libfile_hash = self.add_buildfile(framework, fileref_hash, tthash_base+'1')
if not self.add_file_to_frameworks(framework, fileref_hash):
return False
if not self.add_file_to_frameworks_phase(framework, libfile_hash):
return False
return True
def add_bundle(self):
tthash_base = self.get_hash_base('Three20.bundle')
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
build_path = os.path.join(Paths.src_dir, 'Three20.bundle')
rel_path = relpath(project_path, build_path)
fileref_hash = self.add_filereference('Three20.bundle', 'plug-in', tthash_base+'0', rel_path, 'SOURCE_ROOT')
libfile_hash = self.add_buildfile('Three20.bundle', fileref_hash, tthash_base+'1')
if not self.add_file_to_resources('Three20.bundle', fileref_hash):
return False
if not self.add_file_to_resources_phase('Three20.bundle', libfile_hash):
return False
return True
# Get the PBXFileReference from the given PBXBuildFile guid.
def get_filerefguid_from_buildfileguid(self, buildfileguid):
project_data = self.get_project_data()
match = re.search(buildfileguid+' \/\* .+ \*\/ = {isa = PBXBuildFile; fileRef = ([A-Z0-9]+) \/\* .+ \*\/;', project_data)
if not match:
logging.error("Couldn't find PBXBuildFile row.")
return None
(filerefguid, ) = match.groups()
return filerefguid
def get_filepath_from_filerefguid(self, filerefguid):
project_data = self.get_project_data()
match = re.search(filerefguid+' \/\* .+ \*\/ = {isa = PBXFileReference; .+ path = (.+); .+ };', project_data)
if not match:
logging.error("Couldn't find PBXFileReference row.")
return None
(path, ) = match.groups()
return path
# Get all source files that are "built" in this project. This includes files built for
# libraries, executables, and unit testing.
def get_built_sources(self):
project_data = self.get_project_data()
match = re.search('\/\* Begin PBXSourcesBuildPhase section \*\/\n((?:.|\n)+?)\/\* End PBXSourcesBuildPhase section \*\/', project_data)
if not match:
logging.error("Couldn't find PBXSourcesBuildPhase section.")
return None
(buildphasedata, ) = match.groups()
buildfileguids = re.findall('[ \t]+([A-Z0-9]+) \/\* .+ \*\/,\n', buildphasedata)
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
filenames = []
for buildfileguid in buildfileguids:
filerefguid = self.get_filerefguid_from_buildfileguid(buildfileguid)
filepath = self.get_filepath_from_filerefguid(filerefguid)
filenames.append(os.path.join(project_path, filepath.strip('"')))
return filenames
# Get all header files that are "built" in this project. This includes files built for
# libraries, executables, and unit testing.
def get_built_headers(self):
project_data = self.get_project_data()
match = re.search('\/\* Begin PBXHeadersBuildPhase section \*\/\n((?:.|\n)+?)\/\* End PBXHeadersBuildPhase section \*\/', project_data)
if not match:
logging.error("Couldn't find PBXHeadersBuildPhase section.")
return None
(buildphasedata, ) = match.groups()
buildfileguids = re.findall('[ \t]+([A-Z0-9]+) \/\* .+ \*\/,\n', buildphasedata)
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
filenames = []
for buildfileguid in buildfileguids:
filerefguid = self.get_filerefguid_from_buildfileguid(buildfileguid)
filepath = self.get_filepath_from_filerefguid(filerefguid)
filenames.append(os.path.join(project_path, filepath.strip('"')))
return filenames
def add_dependency(self, dep):
project_data = self.get_project_data()
dep_data = dep.get_project_data()
if project_data is None or dep_data is None:
return False
logging.info("\nAdding "+str(dep)+" to "+str(self))
project_path = os.path.dirname(os.path.abspath(self.xcodeprojpath()))
dep_path = os.path.abspath(dep.xcodeprojpath())
rel_path = relpath(project_path, dep_path)
logging.info("")
logging.info("Project path: "+project_path)
logging.info("Dependency path: "+dep_path)
logging.info("Relative path: "+rel_path)
tthash_base = self.get_hash_base(dep.uniqueid())
###############################################
logging.info("")
logging.info("Step 1: Add file reference to the dependency...")
self.set_project_data(project_data)
pbxfileref_hash = self.add_filereference(dep._project_name+'.xcodeproj', 'pb-project', tthash_base+'0', rel_path, 'SOURCE_ROOT')
project_data = self.get_project_data()
logging.info("Done: Added file reference: "+pbxfileref_hash)
###############################################
logging.info("")
logging.info("Step 2: Add file to Frameworks group...")
self.set_project_data(project_data)
if not self.add_file_to_frameworks(dep._project_name+".xcodeproj", pbxfileref_hash):
return False
project_data = self.get_project_data()
logging.info("Done: Added file to Frameworks group.")
###############################################
logging.info("")
logging.info("Step 3: Add dependencies...")
pbxtargetdependency_hash = None
pbxcontaineritemproxy_hash = None
match = re.search('\/\* Begin PBXTargetDependency section \*\/\n((?:.|\n)+?)\/\* End PBXTargetDependency section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXTargetDependency section...")
match = re.search('\/\* End PBXSourcesBuildPhase section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXSourcesBuildPhase section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXTargetDependency section */\n\n/* End PBXTargetDependency section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search('([A-Z0-9]+) \/\* PBXTargetDependency \*\/ = {\n[ \t]+isa = PBXTargetDependency;\n[ \t]+name = '+re.escape(dep._project_name)+';\n[ \t]+targetProxy = ([A-Z0-9]+) \/\* PBXContainerItemProxy \*\/;', project_data)
if match:
(pbxtargetdependency_hash, pbxcontaineritemproxy_hash,) = match.groups()
logging.info("This dependency already exists.")
if pbxtargetdependency_hash is None or pbxcontaineritemproxy_hash is None:
match = re.search('\/\* Begin PBXTargetDependency section \*\/\n', project_data)
pbxtargetdependency_hash = tthash_base+'1'
pbxcontaineritemproxy_hash = tthash_base+'2'
pbxtargetdependency = "\t\t"+pbxtargetdependency_hash+" /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\tname = "+dep._project_name+";\n\t\t\ttargetProxy = "+pbxcontaineritemproxy_hash+" /* PBXContainerItemProxy */;\n\t\t};\n"
project_data = project_data[:match.end()] + pbxtargetdependency + project_data[match.end():]
logging.info("Done: Added dependency.")
###############################################
logging.info("")
logging.info("Step 3.1: Add container proxy for dependencies...")
containerExists = False
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXContainerItemProxy section...")
match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXBuildFile section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(pbxcontaineritemproxy_hash), subtext)
if match:
logging.info("This container proxy already exists.")
containerExists = True
if not containerExists:
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n', project_data)
pbxcontaineritemproxy = "\t\t"+pbxcontaineritemproxy_hash+" /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = "+pbxfileref_hash+" /* "+dep._project_name+".xcodeproj */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = "+dep.guid()+";\n\t\t\tremoteInfo = "+dep._project_name+";\n\t\t};\n"
project_data = project_data[:match.end()] + pbxcontaineritemproxy + project_data[match.end():]
logging.info("Done: Added container proxy.")
###############################################
logging.info("")
logging.info("Step 3.2: Add module to the dependency list...")
match = re.search(self.guid()+' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n((?:.|\n)+?)\);', project_data)
dependency_exists = False
if not match:
logging.error("Couldn't find the dependency list.")
return False
else:
(dependencylist, ) = match.groups()
match = re.search(re.escape(pbxtargetdependency_hash), dependencylist)
if match:
logging.info("This dependency has already been added.")
dependency_exists = True
if not dependency_exists:
match = re.search(self.guid()+' \/\* .+? \*\/ = {\n[ \t]+(?:.|\n)+?[ \t]+dependencies = \(\n', project_data)
if not match:
logging.error("Couldn't find the dependency list.")
return False
dependency_item = '\t\t\t\t'+pbxtargetdependency_hash+' /* PBXTargetDependency */,\n'
project_data = project_data[:match.end()] + dependency_item + project_data[match.end():]
logging.info("Done: Added module to the dependency list.")
###############################################
logging.info("")
logging.info("Step 4: Create project references...")
match = re.search('\/\* Begin PBXProject section \*\/\n((?:.|\n)+?)\/\* End PBXProject section \*\/', project_data)
if not match:
logging.error("Couldn't find the project section.")
return False
project_start = match.start(1)
project_end = match.end(1)
(project_section, ) = match.groups()
reference_exists = False
did_change = False
productgroup_hash = None
match = re.search('projectReferences = \(\n((?:.|\n)+?)\n[ \t]+\);', project_section)
if not match:
logging.info("Creating project references...")
match = re.search('projectDirPath = ".*?";\n', project_section)
if not match:
logging.error("Couldn't find project references anchor.")
return False
did_change = True
project_section = project_section[:match.end()] + '\t\t\tprojectReferences = (\n\t\t\t);\n' + project_section[match.end():]
else:
(refs, ) = match.groups()
match = re.search('\{\n[ \t]+ProductGroup = ([A-Z0-9]+) \/\* Products \*\/;\n[ \t]+ProjectRef = '+re.escape(pbxfileref_hash), refs)
if match:
(productgroup_hash, ) = match.groups()
logging.info("This product group already exists: "+productgroup_hash)
reference_exists = True
if not reference_exists:
match = re.search('projectReferences = \(\n', project_section)
if not match:
logging.error("Missing the project references item.")
return False
productgroup_hash = tthash_base+'3'
reference_text = '\t\t\t\t{\n\t\t\t\t\tProductGroup = '+productgroup_hash+' /* Products */;\n\t\t\t\t\tProjectRef = '+pbxfileref_hash+' /* '+dep._project_name+'.xcodeproj */;\n\t\t\t\t},\n'
project_section = project_section[:match.end()] + reference_text + project_section[match.end():]
did_change = True
if did_change:
project_data = project_data[:project_start] + project_section + project_data[project_end:]
logging.info("Done: Created project reference.")
###############################################
logging.info("")
logging.info("Step 4.1: Create product group...")
match = re.search('\/\* Begin PBXGroup section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the group section.")
return False
group_start = match.end()
lib_hash = None
match = re.search(re.escape(productgroup_hash)+" \/\* Products \*\/ = \{\n[ \t]+isa = PBXGroup;\n[ \t]+children = \(\n((?:.|\n)+?)\);", project_data)
if match:
logging.info("This product group already exists.")
(children, ) = match.groups()
match = re.search('([A-Z0-9]+) \/\* '+re.escape(dep._product_name)+' \*\/', children)
if not match:
logging.error("No product found")
return False
# TODO: Add this product.
else:
(lib_hash, ) = match.groups()
else:
lib_hash = tthash_base+'4'
productgrouptext = "\t\t"+productgroup_hash+" /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t"+lib_hash+" /* "+dep._product_name+" */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n"
project_data = project_data[:group_start] + productgrouptext + project_data[group_start:]
logging.info("Done: Created product group: "+lib_hash)
###############################################
logging.info("")
logging.info("Step 4.2: Add container proxy for target product...")
containerExists = False
targetproduct_hash = tthash_base+'6'
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n((?:.|\n)+?)\/\* End PBXContainerItemProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXContainerItemProxy section...")
match = re.search('\/\* End PBXBuildFile section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXBuildFile section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXContainerItemProxy section */\n\n/* End PBXContainerItemProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(targetproduct_hash), subtext)
if match:
logging.info("This container proxy already exists.")
containerExists = True
if not containerExists:
match = re.search('\/\* Begin PBXContainerItemProxy section \*\/\n', project_data)
pbxcontaineritemproxy = "\t\t"+targetproduct_hash+" /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = "+pbxfileref_hash+" /* "+dep._project_name+".xcodeproj */;\n\t\t\tproxyType = 2;\n\t\t\tremoteGlobalIDString = "+dep._product_guid+";\n\t\t\tremoteInfo = "+dep._project_name+";\n\t\t};\n"
project_data = project_data[:match.end()] + pbxcontaineritemproxy + project_data[match.end():]
logging.info("Done: Added target container proxy.")
###############################################
logging.info("")
logging.info("Step 4.3: Create reference proxy...")
referenceExists = False
match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n((?:.|\n)+?)\/\* End PBXReferenceProxy section \*\/', project_data)
if not match:
logging.info("\tAdding a PBXReferenceProxy section...")
match = re.search('\/\* End PBXProject section \*\/\n', project_data)
if not match:
logging.error("Couldn't find the PBXProject section.")
return False
project_data = project_data[:match.end()] + "\n/* Begin PBXReferenceProxy section */\n\n/* End PBXReferenceProxy section */\n" + project_data[match.end():]
else:
(subtext, ) = match.groups()
match = re.search(re.escape(lib_hash), subtext)
if match:
logging.info("This reference proxy already exists.")
referenceExists = True
if not referenceExists:
match = re.search('\/\* Begin PBXReferenceProxy section \*\/\n', project_data)
referenceproxytext = "\t\t"+lib_hash+" /* "+dep._product_name+" */ = {\n\t\t\tisa = PBXReferenceProxy;\n\t\t\tfileType = archive.ar;\n\t\t\tpath = \""+dep._product_name+"\";\n\t\t\tremoteRef = "+targetproduct_hash+" /* PBXContainerItemProxy */;\n\t\t\tsourceTree = BUILT_PRODUCTS_DIR;\n\t\t};\n"
project_data = project_data[:match.end()] + referenceproxytext + project_data[match.end():]
logging.info("Done: Created reference proxy.")
###############################################
logging.info("")
logging.info("Step 5: Add target file...")
self.set_project_data(project_data)
libfile_hash = self.add_buildfile(dep._product_name, lib_hash, tthash_base+'5')
project_data = self.get_project_data()
logging.info("Done: Added target file.")
###############################################
logging.info("")
logging.info("Step 6: Add frameworks...")
self.set_project_data(project_data)
self.add_file_to_frameworks_phase(dep._product_name, libfile_hash)
project_data = self.get_project_data()
logging.info("Done: Adding module.")
self.set_project_data(project_data)
return True
| Python |
#!/usr/bin/env python
# encoding: utf-8
"""
docs.py
Created by Jeff Verkoeyen on 2010-10-18.
Copyright 2009-2010 Facebook
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import re
import os
import sys
import shutil
import errno
import git
# Three20 Python Objects
import Paths
from optparse import OptionParser
def generate_appledoc(version):
logging.info("Generating appledoc")
os.system("appledoc " +
"--project-name Three20 " +
"--project-company \"Facebook\" " +
"--company-id=com.facebook " +
"--output Docs/ " +
"--project-version " + version + " " +
"--ignore .m --ignore Vendors --ignore UnitTests " +
"--keep-undocumented-objects " +
"--keep-undocumented-members " +
"--warn-undocumented-object " +
"--warn-undocumented-member " +
"--warn-empty-description " +
"--warn-unknown-directive " +
"--warn-invalid-crossref " +
"--warn-missing-arg " +
"--keep-intermediate-files " +
"--docset-feed-name \"Three20 " + version + " Documentation\" " +
"--docset-feed-url http://facebook.github.com/three20/api/%DOCSETATOMFILENAME " +
"--docset-package-url http://facebook.github.com/three20/api/%DOCSETPACKAGEFILENAME " +
"--publish-docset " +
"--verbose 5 src/")
def publish_ghpages(version):
logging.info("Cloning and checking out gh-pages")
os.system("git clone git@github.com:facebook/three20.git Docs/gh-pages")
os.system("cd Docs/gh-pages && git pull")
os.system("cd Docs/gh-pages && git checkout gh-pages")
logging.info("Copying docset into gh-pages folder")
os.system("cp -r -f Docs/html/* Docs/gh-pages/api")
os.system("cp -r -f Docs/publish/ Docs/gh-pages/api")
logging.info("Committing new docs")
os.system("cd Docs/gh-pages && git add -A .")
os.system("cd Docs/gh-pages && git commit -am \"Three20 " + version + " Documentation\"")
os.system("cd Docs/gh-pages && git push origin gh-pages")
def main():
usage = '''%prog [options]
The Three20 Appledoc Generator Script.
Use this script to generate appledoc
--generate will generate the docs
--publish will publish the new docs into the three20's gh-pages branch
EXAMPLES:
Most common use case:
> %prog --version 1.0.10-dev --generate
'''
parser = OptionParser(usage = usage)
parser.add_option("-o", "--generate", dest="generate",
help="Generate appledoc",
action="store_true")
parser.add_option("-p", "--publish", dest="publish",
help="publish gh-pages",
action="store_true")
parser.add_option("-v", "--version", dest="version",
help="Project version")
parser.add_option("", "--verbose", dest="verbose",
help="Display verbose output",
action="store_true")
(options, args) = parser.parse_args()
if options.verbose:
log_level = logging.INFO
else:
log_level = logging.WARNING
logging.basicConfig(level=log_level)
did_anything = False
if options.generate:
did_anything = True
generate_appledoc(options.version)
if options.publish:
did_anything = True
publish_ghpages(options.version)
if not did_anything:
parser.print_help()
if __name__ == "__main__":
sys.exit(main())
| Python |
#!/usr/bin/env python
#
# hanzim2dict
#
# Original version written by Michael Robinson (robinson@netrinsics.com)
# Version 0.0.2
# Copyright 2004
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Usage: Run hanzim2dict in a directory containing the "zidianf.gb",
# "cidianf.gb", and "sanzidianf.gb" files from the Hanzi Master distribution
# (available at http://zakros.ucsd.edu/~arobert/hanzim.html). The output
# will be a StarDict dictionary in 2.4.2 format: hanzim.dict, hanzim.idx,
# and hanzim.ifo
#
# The dictionary and index files may be compressed as follows:
# $ gzip -9 hanzim.idx
# $ dictzip hanzim.dict
#
from string import split
from codecs import getdecoder, getencoder
from struct import pack
class Word:
def __init__(self, code, definition):
self.code = code
self.definition = [definition]
def add(self, definition):
self.definition.append(definition)
wordmap = {}
fromGB = getdecoder("GB2312")
toUTF = getencoder("utf_8")
file = open("zidianf.gb", "r")
lines = map(lambda x: split(x[:-1], '\t'), file.readlines())
for line in lines:
code = toUTF(fromGB(line[0])[0])[0]
pinyin = line[2]
definition = '<'+pinyin+'> '+line[3]+' ['+line[1]+']'
if wordmap.has_key(code):
wordmap[code].add(definition)
else:
wordmap[code] = Word(code, definition)
for filename in ("cidianf.gb", "sanzicidianf.gb"):
file = open(filename, "r")
lines = map(lambda x: split(x[:-1], '\t'), file.readlines())
for line in lines:
if len(line) < 2:
print len(line)
continue
code = toUTF(fromGB(line[0][:-2])[0])[0]
definition = line[1]+' ['+line[0][-1:]+']'
if wordmap.has_key(code):
wordmap[code].add(definition)
else:
wordmap[code] = Word(code, definition)
dict = open("hanzim.dict", "wb")
idx = open("hanzim.idx", "wb")
ifo = open("hanzim.ifo", "wb")
offset = 0
count = 0
keylen = 0
keys = list(wordmap.keys())
keys.sort()
for key in keys:
word = wordmap[key]
deftext = ""
multi = False
for d in word.definition:
if multi:
deftext += '\n'
deftext += d
multi = True
dict.write(deftext)
idx.write(key+'\0')
idx.write(pack("!I", offset))
idx.write(pack("!I", len(deftext)))
offset += len(deftext)
count += 1
keylen += len(key)
dict.close()
idx.close()
ifo.write("StarDict's dict ifo file\n")
ifo.write("version=2.4.2\n")
ifo.write("bookname=Hanzi Master 1.3\n")
ifo.write("wordcount="+str(count)+"\n")
ifo.write("idxfilesize="+str(keylen+(count*9))+"\n")
ifo.write("author=Adrian Robert\n")
ifo.write("email=arobert@cogsci.ucsd.edu\n")
ifo.write("website=http://zakros.ucsd.edu/~arobert/hanzim.html\n")
ifo.write("sametypesequence=m\n")
ifo.close()
| Python |
# This tool convert KangXiZiDian djvu files to tiff files.
# Download djvu files: http://bbs.dartmouth.edu/~fangq/KangXi/KangXi.tar
# Character page info: http://wenq.org/unihan/Unihan.txt as kIRGKangXi field.
# Character seek position in Unihan.txt http://wenq.org/unihan/unihandata.txt
# DjVuLibre package provides the ddjvu tool.
# The 410 page is bad, but it should be blank page in fact. so just remove 410.tif
import os
if __name__ == "__main__":
os.system("mkdir tif")
pages = range(1, 1683+1)
for i in pages:
page = str(i)
print(page)
os.system("ddjvu -format=tiff -page="+ page + " -scale=100 -quality=150 KangXiZiDian.djvu"+ " tif/" + page + ".tif")
| Python |
#!/usr/bin/python
# WinVNKey Hannom Database to Stardict dictionary source Conversion Tool
# coded by wesnoth@ustc on 070804
# http://winvnkey.sourceforge.net
import sys, os, string, types, pprint
infileencoding = 'utf-16-le'
outfileencoding = 'utf-8'
def showhelp():
print "Usage: %s filename" % sys.argv[0]
def ishantu(str):
if len(str) > 0 and ord(str[0]) > 0x2e80:
return True
else:
return False
def mysplit(line):
status = 0 # 0: normal, 1: quote
i = 0
line = line.lstrip()
linelen = len(line)
while i < linelen:
if status == 0 and line[i].isspace():
break
if line[i] == u'"':
status = 1 - status
i += 1
#print 'mysplit: i=%d, line=%s' % (i, `line`)
if i == 0:
return []
else:
line = [line[:i], line[i:].strip()]
if line[1] == u'':
return [line[0]]
else:
return line
if __name__ == '__main__':
if len(sys.argv) <> 2:
showhelp()
else:
fp = open(sys.argv[1], 'r')
print 'Reading file...'
lines = unicode(fp.read(), infileencoding).split(u'\n')
lineno = 0
hugedict = {}
print 'Generating Han-Viet dict...'
for line in lines:
lineno += 1
if line.endswith(u'\r'):
line = line[:-1]
if line.startswith(u'\ufeff'):
line = line[1:]
ind = line.find(u'#')
if ind >= 0:
line = line[:ind]
line = mysplit(line)
if len(line) == 0:
continue
elif len(line) == 1:
continue # ignore this incomplete line
if line[0].startswith(u'"') and line[0].endswith(u'"'):
line[0] = line[0][1:-1]
if line[0].startswith(u'U+') or line[0].startswith(u'u+'):
line[0] = unichr(int(line[0][2:], 16))
if not ishantu(line[0]):
continue # invalid Han character
#print 'error occurred on line %d: %s' % (lineno, `line`)
if line[1].startswith(u'"') and line[1].endswith(u'"'):
line[1] = line[1][1:-1]
line[1] = filter(None, map(string.strip, line[1].split(u',')))
#hugedict[line[0]] = hugedict.get(line[0], []) + line[1]
for item in line[1]:
if not hugedict.has_key(line[0]):
hugedict[line[0]] = [item]
elif not item in hugedict[line[0]]:
hugedict[line[0]] += [item]
#print lineno, `line`
#for hantu, quocngu in hugedict.iteritems():
# print hantu.encode('utf-8'), ':',
# for viettu in quocngu:
# print viettu.encode('utf-8'), ',',
# print
fp.close()
print 'Generating Viet-Han dict...'
dicthuge = {}
for hantu, quocngu in hugedict.iteritems():
for viettu in quocngu:
if not dicthuge.has_key(viettu):
dicthuge[viettu] = [hantu]
elif not hantu in dicthuge[viettu]:
dicthuge[viettu] += [hantu]
print 'Writing Han-Viet dict...'
gp = open('hanviet.txt', 'w')
for hantu, quocngu in hugedict.iteritems():
gp.write(hantu.encode('utf-8'))
gp.write('\t')
gp.write((u', '.join(quocngu)).encode('utf-8'))
gp.write('\n')
gp.close()
print 'Writing Viet-Han dict...'
gp = open('viethan.txt', 'w')
for quocngu,hantu in dicthuge.iteritems():
gp.write(quocngu.encode('utf-8'))
gp.write('\t')
gp.write((u' '.join(hantu)).encode('utf-8'))
gp.write('\n')
gp.close()
| Python |
#!/usr/bin/env python2
#
# converts XML JMDict to Stardict idx/dict format
# JMDict website: http://www.csse.monash.edu.au/~jwb/j_jmdict.html
#
# Date: 3rd July 2003
# Author: Alastair Tse <acnt2@cam.ac.uk>
# License: BSD (http://www.opensource.org/licenses/bsd-license.php)
#
# Usage: jm2stardict expects the file JMdict.gz in the current working
# directory and outputs to files jmdict-ja-en and jmdict-en-ja
#
# To compress the resulting files, use:
#
# gzip -9 jmdict-en-ja.idx
# gzip -9 jmdict-ja-en.idx
# dictzip jmdict-en-ja.dict
# dictzip jmdict-ja-en.dict
#
# note - dictzip is from www.dict.org
#
import xml.sax
from xml.sax.handler import *
import gzip
import struct, sys, string, codecs,os
def text(nodes):
label = ""
textnodes = filter(lambda x: x.nodeName == "#text", nodes)
for t in textnodes:
label += t.data
return label
def strcasecmp(a, b):
result = 0
# to ascii
#str_a = string.join(filter(lambda x: ord(x) < 128, a[0]), "").lower()
#str_b = string.join(filter(lambda x: ord(x) < 128, b[0]), "").lower()
#result = cmp(str_a, str_b)
# if result == 0:
result = cmp(a[0].lower() , b[0].lower())
return result
def merge_dup(list):
newlist = []
lastkey = ""
for x in list:
if x[0] == lastkey:
newlist[-1] = (newlist[-1][0], newlist[-1][1] + "\n" + x[1])
else:
newlist.append(x)
lastkey = x[0]
return newlist
class JMDictHandler(ContentHandler):
def __init__(self):
self.mapping = []
self.state = ""
self.buffer = ""
def startElement(self, name, attrs):
if name == "entry":
self.kanji = []
self.chars = []
self.gloss = []
self.state = ""
self.buffer = ""
elif name == "keb":
self.state = "keb"
elif name == "reb":
self.state = "reb"
elif name == "gloss" and not attrs:
self.state = "gloss"
elif name == "xref":
self.state = "xref"
def endElement(self, name):
if name == "entry":
self.mapping.append((self.kanji, self.chars, self.gloss))
elif name == "keb":
self.kanji.append(self.buffer)
elif name == "reb":
self.chars.append(self.buffer)
elif name == "gloss" and self.buffer:
self.gloss.append(self.buffer)
elif name == "xref":
self.gloss.append(self.buffer)
self.buffer = ""
self.state = ""
def characters(self, ch):
if self.state in ["keb", "reb", "gloss", "xref"]:
self.buffer = self.buffer + ch
def map_to_file(dictmap, filename):
dict = open(filename + ".dict","wb")
idx = open(filename + ".idx","wb")
offset = 0
idx.write("StarDict's idx file\nversion=2.1.0\n");
idx.write("bookname=" + filename + "\nauthor=Jim Breen\nemail=j.breen@csse.monash.edu.au\nwebsite=http://www.csse.monash.edu.au/~jwb/j_jmdict.html\ndescription=Convert to stardict by Alastair Tse <liquidx@gentoo.org>, http://www-lce.eng.cam.ac.uk/~acnt2/code/\ndate=2003.07.01\n")
idx.write("sametypesequence=m\n")
idx.write("BEGIN:\n")
idx.write(struct.pack("!I",len(dictmap)))
for k,v in dictmap:
k_utf8 = k.encode("utf-8")
v_utf8 = v.encode("utf-8")
idx.write(k_utf8 + "\0")
idx.write(struct.pack("!I",offset))
idx.write(struct.pack("!I",len(v_utf8)))
offset += len(v_utf8)
dict.write(v_utf8)
dict.close()
idx.close()
if __name__ == "__main__":
print "opening xml dict .."
f = gzip.open("JMdict.gz")
#f = open("jmdict_sample.xml")
print "parsing xml file .."
parser = xml.sax.make_parser()
handler = JMDictHandler()
parser.setContentHandler(handler)
parser.parse(f)
f.close()
print "creating dictionary .."
# create a japanese -> english mappings
jap_to_eng = []
for kanji,chars,gloss in handler.mapping:
for k in kanji:
key = k
value = string.join(chars + gloss, "\n")
jap_to_eng.append((key,value))
for c in chars:
key = c
value = string.join(kanji + gloss, "\n")
jap_to_eng.append((key,value))
eng_to_jap = []
for kanji,chars,gloss in handler.mapping:
for k in gloss:
key = k
value = string.join(kanji + chars, "\n")
eng_to_jap.append((key,value))
print "sorting dictionary .."
jap_to_eng.sort(strcasecmp)
eng_to_jap.sort(strcasecmp)
print "merging and pruning dups.."
jap_to_eng = merge_dup(jap_to_eng)
eng_to_jap = merge_dup(eng_to_jap)
print "writing to files.."
# create dict and idx file
map_to_file(jap_to_eng, "jmdict-ja-en")
map_to_file(eng_to_jap, "jmdict-en-ja")
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gimpfu import *
import os
def prepare_image(image, visibleLayers, size, numColors = None):
"""prepare custom image
image - image object to change
size - size of the image in pixels
visibleLayers - a list of layers that must be visible
"""
for layer in image.layers:
if layer.name in visibleLayers:
layer.visible = True
else:
image.remove_layer(layer)
gimp.pdb.gimp_image_merge_visible_layers(image, CLIP_TO_IMAGE)
drawable=gimp.pdb.gimp_image_get_active_layer(image)
gimp.pdb.gimp_layer_scale_full(drawable, size, size, False, INTERPOLATION_CUBIC)
"""
image 670x670, all layers have the same dimensions
after applying gimp_image_scale_full functions with size=32,
image.width = 32, image.height = 32
layer.width = 27, layer.height = 31
gimp.pdb.gimp_image_scale_full(image, size, size, INTERPOLATION_CUBIC)
"""
#print 'width = {0}, height = {1}'.format(drawable.width, drawable.height)
#print 'width = {0}, height = {1}'.format(image.width, image.height)
if numColors != None:
gimp.pdb.gimp_image_convert_indexed(image, NO_DITHER, MAKE_PALETTE, numColors, False, False, "")
def save_image(image, dstFilePath):
dirPath = os.path.dirname(dstFilePath)
if not os.path.exists(dirPath):
os.makedirs(dirPath)
drawable=gimp.pdb.gimp_image_get_active_layer(image)
gimp.pdb.gimp_file_save(image, drawable, dstFilePath, dstFilePath)
gimp.delete(drawable)
gimp.delete(image)
def create_icon(origImage, visibleLayers, props):
"""visibleLayers - a list of layers that must be visible
props - tuple of image properties in format ((size, bpp), ...)
where:
size - size of the icon in pixels,
bpp - bits per pixel, None to leave by default
return value - new image
"""
iconImage = None
i = 0
for prop in props:
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, visibleLayers, prop[0], prop[1])
image.layers[0].name = 's{0}'.format(i)
if iconImage == None:
iconImage = image
else:
newLayer = gimp.pdb.gimp_layer_new_from_drawable(image.layers[0], iconImage)
gimp.pdb.gimp_image_add_layer(iconImage, newLayer, -1)
gimp.delete(image)
i += 1
return iconImage
def stardict_images(srcFilePath, rootDir):
if not rootDir:
# srcFilePath = rootDir + "/pixmaps/stardict.xcf"
if not srcFilePath.endswith("/pixmaps/stardict.xcf"):
print 'Unable to automatically detect StarDict root directory. Specify non-blank root directory parameter.'
return
dstDirPath = os.path.dirname(srcFilePath)
dstDirPath = os.path.dirname(dstDirPath)
else:
dstDirPath = rootDir
"""
print 'srcFilePath = {0}'.format(srcFilePath)
print 'rootDir = {0}'.format(rootDir)
print 'dstDirPath = {0}'.format(dstDirPath)
"""
dstStarDict_s128_FilePath=os.path.join(dstDirPath, "pixmaps/stardict_128.png")
dstStarDict_s32_FilePath=os.path.join(dstDirPath, "pixmaps/stardict_32.png")
dstStarDict_s16_FilePath=os.path.join(dstDirPath, "pixmaps/stardict_16.png")
dstStarDict_FilePath=os.path.join(dstDirPath, "pixmaps/stardict.png")
dstStarDictEditor_s128_FilePath=os.path.join(dstDirPath, "pixmaps/stardict-editor_128.png")
dstStarDictEditor_s32_FilePath=os.path.join(dstDirPath, "pixmaps/stardict-editor_32.png")
dstStarDictEditor_s16_FilePath=os.path.join(dstDirPath, "pixmaps/stardict-editor_16.png")
dstStarDictIconFilePath=os.path.join(dstDirPath, "pixmaps/stardict.ico")
dstStarDictEditorIconFilePath=os.path.join(dstDirPath, "pixmaps/stardict-editor.ico")
dstStarDictUninstIconFilePath=os.path.join(dstDirPath, "pixmaps/stardict-uninst.ico")
dstDockletNormalFilePath=os.path.join(dstDirPath, "src/pixmaps/docklet_normal.png")
dstDockletScanFilePath=os.path.join(dstDirPath, "src/pixmaps/docklet_scan.png")
dstDockletStopFilePath=os.path.join(dstDirPath, "src/pixmaps/docklet_stop.png")
dstDockletGPENormalFilePath=os.path.join(dstDirPath, "src/pixmaps/docklet_gpe_normal.png")
dstDockletGPEScanFilePath=os.path.join(dstDirPath, "src/pixmaps/docklet_gpe_scan.png")
dstDockletGPEStopFilePath=os.path.join(dstDirPath, "src/pixmaps/docklet_gpe_stop.png")
dstWordPickFilePath=os.path.join(dstDirPath, "src/win32/acrobat/win32/wordPick.bmp")
origImage=gimp.pdb.gimp_file_load(srcFilePath, srcFilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2"), 128)
save_image(image, dstStarDict_s128_FilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2"), 32)
save_image(image, dstStarDict_s32_FilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2"), 16)
save_image(image, dstStarDict_s16_FilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2"), 64)
save_image(image, dstStarDict_FilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2", "edit"), 128)
save_image(image, dstStarDictEditor_s128_FilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2", "edit"), 32)
save_image(image, dstStarDictEditor_s32_FilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2", "edit"), 16)
save_image(image, dstStarDictEditor_s16_FilePath)
image = create_icon(origImage, ("book1", "book2"),
((16, None), (32, None), (48, None), (16, 256), (32, 256), (48, 256), (256, None))
)
save_image(image, dstStarDictIconFilePath)
image = create_icon(origImage, ("book1", "book2", "edit"),
((16, None), (32, None), (48, None), (16, 256), (32, 256), (48, 256), (256, None))
)
save_image(image, dstStarDictEditorIconFilePath)
image = create_icon(origImage, ("book1", "book2", "cross"),
((16, None), (32, None), (48, None), (16, 256), (32, 256), (48, 256), (256, None))
)
save_image(image, dstStarDictUninstIconFilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2"), 32)
save_image(image, dstDockletNormalFilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2", "search"), 32)
save_image(image, dstDockletScanFilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2", "stop"), 32)
save_image(image, dstDockletStopFilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2"), 16)
save_image(image, dstDockletGPENormalFilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2", "search"), 16)
save_image(image, dstDockletGPEScanFilePath)
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2", "stop"), 16)
save_image(image, dstDockletGPEStopFilePath)
# See AVToolButtonNew function in PDF API Reference
# Recommended icon size is 18x18, but it looks too small...
image = gimp.pdb.gimp_image_duplicate(origImage)
prepare_image(image, ("book1", "book2"), 22)
gimp.set_background(192, 192, 192)
gimp.pdb.gimp_layer_flatten(image.layers[0])
save_image(image, dstWordPickFilePath)
register(
"stardict_images",
"Create images for StarDict",
"Create images for StarDict",
"StarDict team",
"GPL",
"Mar 2011",
"<Toolbox>/Tools/stardict images",
"",
[
(PF_FILE, "src_image", "Multilayer image used as source for all other images in StarDict, "
+ "normally that is pixmaps/stardict.xcf is StarDict source tree.", None),
(PF_DIRNAME, "stardict_dir", "Root directory of StarDict source tree. New images will be saved here.", None)
],
[],
stardict_images)
main()
| Python |
import sys, string
base = {}
for line in sys.stdin.readlines():
words = string.split(line[:-1], '\t')
if len(words) != 2:
print "Error!"
exit
if base.has_key(words[0]):
base[words[0]] += [words[1]]
else:
base[words[0]] = [words[1]]
keys = base.keys()
keys.sort()
for key in keys:
print key,'\t',
for val in base[key]:
print val,',',
print
| Python |
#!/usr/bin/env python
#
# uyghur2dict
# By Abdisalam (anatilim@gmail.com), inspired by Michael Robinson's hanzim2dict converter.
#
# Original version, hanzim2dict, written by Michael Robinson (robinson@netrinsics.com)
# Version 0.0.2
# Copyright 2004
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Usage: Run hanzim2dict in a directory containing the "zidianf.gb",
# "cidianf.gb", and "sanzidianf.gb" files from the Hanzi Master distribution
# (available at http://zakros.ucsd.edu/~arobert/hanzim.html). The output
# will be a StarDict dictionary in 2.4.2 format: hanzim.dict, hanzim.idx,
# and hanzim.ifo
#
# The dictionary and index files may be compressed as follows:
# $ gzip -9 hanzim.idx
# $ dictzip hanzim.dict
#
from string import split
from struct import pack
class Word:
def __init__(self, code, definition):
self.code = code
self.definition = [definition]
def add(self, definition):
self.definition.append(definition)
wordmap = {}
file = open("ChineseUyghurStarDict.txt", "r")
lines = map(lambda x: split(x[:-1], '\t\t'), file.readlines())
for line in lines:
code = line[0]
definition = line[1]
if wordmap.has_key(code):
wordmap[code].add(definition)
else:
wordmap[code] = Word(code, definition)
dict = open("Anatilim_Chinese_Uyghur.dict", "wb")
idx = open("Anatilim_Chinese_Uyghur.idx", "wb")
ifo = open("Anatilim_Chinese_Uyghur.ifo", "wb")
offset = 0
count = 0
keylen = 0
keys = list(wordmap.keys())
keys.sort()
for key in keys:
word = wordmap[key]
deftext = ""
for d in word.definition:
deftext=d
deftext += '\0'
dict.write(deftext)
idx.write(key+'\0')
idx.write(pack("!I", offset))
idx.write(pack("!I", len(deftext)))
offset += len(deftext)
count += 1
keylen += len(key)
dict.close()
idx.close()
ifo.write("StarDict's dict ifo file\n")
ifo.write("version=2.4.2\n")
ifo.write("bookname=Anatilim 《汉维词典》-- Anatilim Chinese Uyghur Dictionary\n")
ifo.write("wordcount="+str(count)+"\n")
ifo.write("idxfilesize="+str(keylen+(count*9))+"\n")
ifo.write("author=Abdisalam\n")
ifo.write("email=anatilim@gmail.com\n")
ifo.write("description=感谢新疆维吾尔自治区语委会、新疆青少年出版社为我们提供《汉维词典》的词库\n")
ifo.write("sametypesequence=m\n")
ifo.close() | Python |
import sys, string
for line in sys.stdin.readlines():
words = string.split(line[:-1], '\t')
muci = words[1]
sheng = words[2]
deng = words[3]
hu = words[4]
yunbu = words[5]
diao = words[6]
fanqie= words[7]
she = words[8]
chars = words[9]
romazi= words[10]
beizhu= words[12]
pinyin= words[13]
psyun = words[22]
if beizhu == '':
print "%s\t%s %s%s%s%s%s%s %sQIE PINYIN%s PSYUN%s\\n%s" % (romazi, muci, sheng, yunbu, she, hu, deng, diao, fanqie, pinyin, psyun, chars)
else:
print "%s\t%s %s%s%s%s%s%s %sQIE PINYIN%s PSYUN%s\\n%s\\n%s" % (romazi, muci, sheng, yunbu, she, hu, deng, diao, fanqie, pinyin, psyun, chars, beizhu)
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys, os, string, re, glob
import libxml2dom
fencoding = 'utf-8'
whattoextract = u'康熙字典'
#def TextInNode(node):
# result = u''
# for child in node.childNodes:
# if child.nodeType == child.TEXT_NODE:
# result += child.nodeValue
# else:
# result += TextInNode(child)
# return result
filelist = glob.glob('*.htm')
filenum = len(filelist)
num = 0
errorfiles = []
for filename in filelist:
num += 1
print >> sys.stderr, filename, num, 'of', filenum
try:
fp = open(filename, 'r')
doc = libxml2dom.parseString(fp.read(), html=1)
fp.close()
style = doc.getElementsByTagName("style")[0].textContent
style = re.search(r'(?s)\s*\.(\S+)\s*{\s*display:\s*none', style)
displaynone = style.group(1)
tabpages = doc.getElementsByTagName("div")
tabpages = filter(lambda s: s.getAttribute("class") == "tab-page", tabpages)
for tabpage in tabpages:
found = False
for node in tabpage.childNodes:
if node.nodeType == node.ELEMENT_NODE and node.name == 'h2':
if node.textContent == whattoextract:
found = True
break
if found:
spans = tabpage.getElementsByTagName("span")
for span in spans:
if span.getAttribute("class") == "kszi":
character = span.textContent
paragraphs = tabpage.getElementsByTagName("p")
thisitem = character + u'\t'
for paragraph in paragraphs:
if paragraph.getAttribute("class") <> displaynone:
#print TextInNode(paragraph).encode(fencoding)
text = paragraph.textContent
#text = filter(lambda s: not s in u' \t\r\n', text)
text = re.sub(r'\s+', r' ', text)
thisitem += text + u'\\n'
print thisitem.encode(fencoding)
except:
print >> sys.stderr, 'error occured'
errorfiles += [filename]
continue
if errorfiles:
print >> sys.stderr, 'Error files:', '\n'.join(errorfiles)
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Script for decoding Lingea Dictionary (.trd) file
# Result is <header>\t<definition> file, convertable easily
# by stardict-editor from package stardict-tools into native
# Stardict dictionary (stardict.sf.net and www.stardict.org)
#
# Copyright (C) 2007 - Klokan Petr Přidal (www.klokan.cz)
#
# Based on script CobuildConv.rb by Nomad
# http://hp.vector.co.jp/authors/VA005784/cobuild/cobuildconv.html
#
# Version history:
# 0.4 (30.10.2007) Patch by Petr Dlouhy, optional HTML generation
# 0.3 (28.10.2007) Patch by Petr Dlouhy, cleanup, bugfix. More dictionaries.
# 0.2 (19.7.2007) Changes, documentation, first 100% dictionary
# 0.1 (20.5.2006) Initial version based on Nomad specs
#
# Supported dictionaries:
# - Lingea Německý Kapesní slovník
# - Lingea Anglický Kapesní slovník
# - Lingea 2002 series (theoretically)
#
# Modified by:
# - Petr Dlouhy (petr.dlouhy | email.cz)
# Generalization of data block rules, sampleFlag 0x04, sound out fix, data phrase prefix with comment (0x04)
# HTML output, debugging patch, options on command line
#
# <write your name here>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
# VERSION
VERSION = "0.4"
import getopt, sys
def usage():
print "Lingea Dictionary Decoder"
print "-------------------------"
print "Version: %s" % VERSION
print "Copyright (C) 2007 - Klokan Petr Pridal, Petr Dlouhy"
print
print "Usage: python lingea-trd-decoder.py DICTIONARY.trd > DICTIONARY.tab"
print "Result convertion by stardict-tools: /usr/lib/stardict-tools/tabfile"
print
print " -o <num> --out-style : Output style"
print " 0 no tags"
print " 1 \\n tags"
print " 2 html tags"
print " -h --help : Print this message"
print " -d --debug : Degub"
print " -r --debug-header : Degub - print headers"
print " -a --debug-all : Degub - print all records"
print " -l --debug-limit : Degub limit"
print
print "For HTML support in StarDict dictionary .ifo has to contain:"
print "sametypesequence=g"
print "!!! Change the .ifo file after generation by tabfile !!!"
print
try:
opts, args = getopt.getopt(sys.argv[1:], "hdo:ral:", ["help", "debug", "out-style=", "debug-header", "debug-all", "debug-limit="])
except getopt.GetoptError:
usage()
print "ERROR: Bad option"
sys.exit(2)
import locale
DEBUG = False
OUTSTYLE = 2
DEBUGHEADER = False
DEBUGALL = False
DEBUGLIMIT = 1
for o, a in opts:
if o in ("-d", "-debug"):
# DEBUGING !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
DEBUG = True
if o in ("-o", "--out-style"):
# output style
OUTSTYLE = locale.atoi(a)
if OUTSTYLE > 2:
usage()
print "ERROR: Output style not specified"
if o in ("-r", "--debug-header"):
# If DEBUG and DEBUGHEADER, then print just all header records
DEBUGHEADER = True
if o in ("-a", "--debug-all"):
# If DEBUG and DEBUGALL then print debug info for all records
DEBUGALL = True
if o in ("-h", "--help"):
usage()
sys.exit(0)
if o in ("-l", "--debug-limit"):
# Number of wrong records for printing to stop during debugging
DEBUGLIMIT = locale.atoi(a)
# FILENAME is a first parameter on the commandline now
if len(args) == 1:
FILENAME = args[0]
else:
usage()
print "ERROR: You have to specify .trd file to decode"
sys.exit(2)
from struct import *
import re
alpha = ['\x00', 'a','b','c','d','e','f','g','h','i',
'j','k','l','m','n','o','p','q','r','s',
't','u','v','w','x','y','z','#AL27#','#AL28#','#AL29#',
'#AL30#','#AL31#', ' ', '.', '<', '>', ',', ';', '-', '#AL39#',
'#GRAVE#', '#ACUTE#', '#CIRC#', '#TILDE#', '#UML#', '#AL45#', '#AL46#', '#CARON#', '#AL48#', '#CEDIL#',
'#AL50#', '#AL51#', '#GREEK#', '#AL53#', '#AL54#', '#AL55#', '#AL56#', '#AL57#', '#AL58#', '#SYMBOL#',
'#AL60#', '#UPCASE#', '#SPECIAL#', '#UNICODE#'] # 4 bytes after unicode
upcase = ['#UP0#','#UP1#','#UP2#','#UP3#','#UP4#','#UP5#','#UP6#','#UP7#','#UP8#','#UP9#',
'#UP10#','#UP11#','#UP12#','#UP13#','#UP14#','#UP15#','#UP16#','#UP17#','#UP18#','#UP19#',
'#UP20#','#UP21#','#UP22#','#UP23#','#UP24#','#UP25#','#UP26#','#UP27#','#UP28#','#UP29#',
'#UP30#','#UP31#','A','B','C','D','E','F','G','H',
'I','J','K','L','M','N','O','P','Q','R',
'S','T','U','V','W','X','Y','Z','#UP58#','#UP59#',
'#UP60#','#UP61#','#UP62#','#UP63#']
upcase_pron = ['#pr0#', '#pr1#','#pr2#','#pr3#','#pr4#','#pr5#','#pr6#','#pr7#','#pr8#','#pr9#',
'#pr10#', '#pr11#','#pr12#','#pr13#','#pr14#','#pr15#','#pr16#','#pr17#','#pr18#','#pr19#',
'#pr20#', '#pr21#','#pr22#','#pr23#','#pr24#','#pr25#','#pr26#','#pr27#','#pr28#','#pr29#',
'#pr30#', '#pr31#','ɑ','#pr33#','ʧ','ð','ə','ɜ','#pr38#','æ',
'ɪ', 'ɭ','#pr42#','ŋ','#pr44#','ɳ','ɔ','#pr47#','ɒ','ɽ',
'ʃ', 'θ','ʊ','ʌ','#pr54#','#pr55#','#pr56#','ʒ','#pr58#','#pr59#',
'#pr60#', '#pr61#','#pr62#','#pr63#']
symbol = ['#SY0#', '#SY1#','#SY2#','#SY3#','§','#SY5#','#SY6#','#SY7#','#SY8#','#SY9#',
'#SY10#', '#SY11#','#SY12#','#SY13#','#SY14#','™','#SY16#','#SY17#','¢','£',
'#SY20#', '#SY21#','#SY22#','#SY23#','©','#SY25#','#SY26#','#SY27#','®','°',
'#SY30#', '²','³','#SY33#','#SY34#','#SY35#','¹','#SY37#','#SY38#','#SY39#',
'½', '#SY41#','#SY42#','×','÷','#SY45#','#SY46#','#SY47#','#SY48#','#SY49#',
'#SY50#', '#SY51#','#SY52#','#SY53#','#SY54#','#SY55#','#SY56#','#SY57#','#SY58#','#SY59#',
'#SY60#', '#SY61#','#SY62#','#SY63#']
special = ['#SP0#', '!','"','#','$','%','&','\'','(',')',
'*', '+','#SP12#','#SP13#','#SP14#','/','0','1','2','3',
'4', '5','6','7','8','9',':',';','<','=',
'>', '?','@','[','\\',']','^','_','`','{',
'|', '}','~','#SP43#','#SP44#','#SP45#','#SP46#','#SP47#','#SP48#','#SP49#',
'#SP50#', '#SP51#','#SP52#','#SP53#','#SP54#','#SP55#','#SP56#','#SP57#','#SP58#','#SP59#',
'#SP60#', '#SP61#','#SP62#','#SP63#']
wordclass = ('#0#','n:','adj:','pron:','#4#','v:','adv:','prep:','#8#','#9#',
'intr:','phr:','#12#','#13#','#14#','#15#','#16#','#17#','#18#','#19#',
'#20#','#21#','#22#','#23#','#24#','#25#','#26#','#27#','#28#','#29#',
'#30#','#31#')
if OUTSTYLE == 0:
tag = {
'db':('' ,''), #Data begining
'rn':('' ,'\t'), #Record name
'va':('' ,' '), #Header variant
'wc':('(' ,')'), #WordClass
'pa':('' ,' '), #Header parts
'fo':('(' ,') '), #Header forms
'on':('(' ,')' ), #Header origin note
'pr':('[' ,']'), #Header pronunciation
'dv':('{' ,'} '), #Header dataVariant
'sa':('`' ,'`' ), #Data sample
'sw':('' ,''), #Data sample wordclass; is no printed by Lingea
'do':('`' ,'`' ), #Data origin note
'df':('' ,' '), #Data definition
'ps':('"' ,'" '), #Data phrase short form
'pg':('"' ,' = '), #Data phrase green
'pc':('`' ,'`'), #Data phrase comment; this comment is not printed by Lingea), but it seems useful
'p1':('"' ,' = '), #Data phrase 1
'p2':('' ,'" ' ), #Data phrase 2
'sp':('"' ,' = ' ),#Data simple phrase
'b1':('"' ,' = '), #Data phrase (block) 1
'b2':('" ' ,''), #Data phrase (block) 2
}
if OUTSTYLE == 1:
tag = {
'db':('•' ,''), #Data begining
'rn':('' ,'\t'), #Record name
'va':('' ,' '), #Header variant
'wc':('' ,'\\n'), #WordClass
'pa':('' ,':\\n'), #Header parts
'fo':('(' ,') '), #Header forms
'on':('(' ,')\\n' ), #Header origin note
'pr':('[' ,']\\n'), #Header pronunciation
'dv':('{' ,'} '), #Header dataVariant
'sa':(' ' ,'\\n' ), #Data sample
'sw':('' ,''), #Data sample wordclass; is not printed by Lingea
'do':(' ' ,' ' ), #Data origin note
'df':(' ' ,'\\n'), #Data definition
'ps':(' ' ,'\\n'), #Data phrase short form
'pg':(' ' ,' '), #Data phrase green
'pc':(' ' ,' '), #Data phrase comment; this comment is not printed by Lingea), but it seems useful
'p1':(' ' ,' '), #Data phrase 1
'p2':(' ' ,'\\n' ), #Data phrase 2
'sp':('' ,'\\n' ), #Data simple phrase
'b1':('"' ,' = '), #Data phrase (block) 1
'b2':('" ' ,''), #Data phrase (block) 2
}
if OUTSTYLE == 2:
tag = {
'db':('•' ,''), #Data begining
'rn':('' ,'\t'), #Record name
'va':('' ,' '), #Header variant
'wc':('<span size="larger" color="darkred" weight="bold">','</span>\\n'), #WordClass
'pa':('<span size="larger" color="darkred" weight="bold">',':</span>\\n'), #Header parts
'fo':('(' ,') '), #Header forms
'on':('<span color="blue">(' ,')</span>\\n' ), #Header origin note
'pr':('[' ,']\\n'), #Header pronunciation
'dv':('{' ,'} '), #Header dataVariant
'sa':(' <span color="darkred" weight="bold">' ,'</span>\\n' ), #Data sample
'sw':('' ,''), #Data sample wordclass; is not printed by Lingea
'do':(' <span color="darkred" weight="bold">' ,'</span> ' ), #Data origin note
'df':(' <span weight="bold">' ,'</span>\\n'), #Data definition
'ps':(' <span color="dimgray" weight="bold">' ,'</span>\\n'), #Data phrase short form
'pg':(' <span color="darkgreen" style="italic">' ,'</span> '), #Data phrase green
'pc':(' <span color="darkgreen" style="italic">' ,'</span> '), #Data phrase comment; this comment is not printed by Lingea), but it seems useful
'p1':(' <span color="dimgray" style="italic">' ,'</span> '), #Data phrase 1
'p2':(' ' ,'\\n' ), #Data phrase 2
'sp':('<span color="cyan">' ,'</span>\\n' ), #Data simple phrase
'b1':('"' ,' = '), #Data phrase (block) 1
'b2':('" ' ,''), #Data phrase (block) 2
}
# Print color debug functions
purple = lambda c: '\x1b[1;35m'+c+'\x1b[0m'
blue = lambda c: '\x1b[1;34m'+c+'\x1b[0m'
cyan = lambda c: '\x1b[36m'+c+'\x1b[0m'
gray = lambda c: '\x1b[1m'+c+'\x1b[0m'
def getRec(n):
"""Get data stream for record of given number"""
if n >= 0 and n < entryCount:
f.seek(index[n])
return f.read(index[n+1] - index[n])
else:
return ''
def decode_alpha( stream, nullstop=True):
"""Decode 6-bit encoding data stream from the begining untit first NULL"""
offset = 0
triple = 0
result = []
while triple < len( stream ):
if offset % 4 == 0:
c = stream[triple] >> 2
triple += 1
if offset % 4 == 1:
c = (stream[triple-1] & 3) << 4 | stream[triple] >> 4
triple += 1
if offset % 4 == 2:
c = (stream[triple-1] & 15) << 2 | (stream[triple] & 192) >> 6
triple += 1
if offset % 4 == 3:
c = stream[triple-1] & 63
if c == 0 and nullstop:
break
offset += 1
# TODO: ENCODE UNICODE 4 BYTE STREAM!!! and but it after #UNICODE# as unichr()
result.append(c)
return decode_alpha_postprocessing(result), triple - 1
def decode_alpha_postprocessing( input ):
"""Lowlevel alphabet decoding postprocessing, combines tuples into one character"""
result = ""
input.extend([0x00]*5)
# UPCASE, UPCASE_PRON, SYMBOL, SPECIAL
skip = False
for i in range(0,len(input)-1):
if skip:
skip = False
continue
bc = input[i]
c = alpha[bc]
bc1 = input[i+1]
c1 = alpha[bc1]
if bc < 40:
result += c
else:
if c == "#GRAVE#":
if c1 == 'a': result += 'à'
else: result += '#GRAVE%s#' % c1
elif c == "#UML#":
if c1 == 'o': result += 'ö'
elif c1 == 'u': result += 'ü'
elif c1 == 'a': result += 'ä'
elif c1 == ' ': result += 'Ä'
elif c1 == '#AL46#': result += 'Ö'
elif c1 == '#GREEK#': result += 'Ü'
else: result += '#UML%s#' % c1
elif c == "#ACUTE#":
if c1 == 'a': result += 'á'
elif c1 == 'e': result += 'é'
elif c1 == 'i': result += 'í'
elif c1 == 'o': result += 'ó'
elif c1 == 'u': result += 'ú'
elif c1 == 'y': result += 'ý'
elif c1 == ' ': result += 'Á'
elif c1 == '#GRAVE#': result += 'Í'
else: result += '#ACUTE%s#' % c1
elif c == "#CARON#":
if c1 == 'r': result += 'ř'
elif c1 == 'c': result += 'č'
elif c1 == 's': result += 'š'
elif c1 == 'z': result += 'ž'
elif c1 == 'e': result += 'ě'
elif c1 == 'd': result += 'ď'
elif c1 == 't': result += 'ť'
elif c1 == 'a': result += 'å'
elif c1 == 'u': result += 'ů'
elif c1 == 'n': result += 'ň'
elif c1 == '<': result += 'Č'
elif c1 == '#CEDIL#': result += 'Ř'
elif c1 == '#AL50#': result += 'Š'
elif c1 == '#AL57#': result += 'Ž'
else: result += '#CARON%s#' % c1
elif c == "#UPCASE#":
result += upcase[bc1]
elif c == "#SYMBOL#":
result += symbol[bc1]
elif c == "#AL51#":
if c1 == 's': result += 'ß'
elif c == "#AL48#":
result += "#AL48#%s" % c1
elif c == "#SPECIAL#":
result += special[bc1]
elif c == "#UNICODE#":
result += '#UNICODE%s#' % bc1
elif c == "#CIRC#":
if c1 == 'a': result += 'â'
else: result += '#CARON%s#' % c1
else:
result += '%sX%s#' % (c[:-1], bc1)
skip = True
return result
def pronunciation_encode(s):
"""Encode pronunciation upcase symbols into IPA symbols"""
for i in range(0, 64):
s = s.replace(upcase[i], upcase_pron[i])
return s
re_d = re.compile(r'<d(.*?)>')
re_w = re.compile(r'<w(.*?)>')
re_y = re.compile(r'<y(.*?)>')
re_c = re.compile(r'<c(.*?)>')
def decode_tag_postprocessing(input):
"""Decode and replace tags used in lingea dictionaries; decode internal tags"""
s = input
# General information in http://www.david-zbiral.cz/El-slovniky-plnaverze.htm#_Toc151656799
# TODO: Better output handling
if OUTSTYLE == 0:
# ?? <d...>
s = re_d.sub(r'(\1)',s)
# ?? <w...>
s = re_w.sub(r'(\1)',s)
# ?? <y...>
s = re_y.sub(r'(\1)',s)
# ?? <c...>
s = re_c.sub(r'(\1)',s)
# ...
if OUTSTYLE == 1:
# ?? <d...>
s = re_d.sub(r'(\1)',s)
# ?? <w...>
s = re_w.sub(r'(\1)',s)
# ?? <y...>
s = re_y.sub(r'(\1)',s)
# ?? <c...>
s = re_c.sub(r'(\1)',s)
# ...
if OUTSTYLE == 2:
# ?? <d...>
s = re_d.sub(r'<span size="small" color="blue">(\1)</span>',s)
# ?? <w...>
s = re_w.sub(r'<span size="small" color="blue" style="italic">\1</span>',s)
# ?? <y...>
s = re_y.sub(r'<span size="small" color="blue" style="italic">\1</span>',s)
# ?? <c...>
s = re_c.sub(r'<span size="small" color="blue" style="italic">\1</span>',s)
# ...
return s
def toBin( b ):
"""Prettify debug output format: hex(bin)dec"""
original = b
r = 0;
i = 1;
while b > 0:
if b & 0x01 != 0: r += i
i *= 10
b = b >> 1
return "0x%02X(%08d)%03d" % (original, r, original)
def out( comment = "", skip = False):
"""Read next byte or string (with skip=True) and output DEBUG info"""
global bs, pos
s, triple = decode_alpha(bs[pos:])
s = s.split('\x00')[0] # give me string until first NULL
if (comment.find('%') != -1):
if skip:
comment = comment % s
else:
comment = comment % bs[pos]
if DEBUG: print "%03d %s %s | %s | %03d" % (pos, toBin(bs[pos]),comment, s, (triple + pos))
if skip:
pos += triple + 1
return s.replace('`','') # Remove '`' character from words
else:
pos += 1
return bs[pos-1]
outInt = lambda c: out(c)
outStr = lambda c: out(c, True)
def decode(stream):
"""Decode byte stream of one record, return decoded string with formatting in utf"""
result = ""
global bs, pos
# stream - data byte stream for one record
bs = unpack("<%sB" % len(stream), stream)
# bs - list of bytes from stream
pos = 0
itemCount = outInt("ItemCount: %s") # Number of blocks in the record
mainFlag = outInt("MainFlag: %s")
# HEADER BLOCK
# ------------
if mainFlag & 0x01:
headerFlag = outInt("HeaderFlag: %s") # Blocks in header
if headerFlag & 0x01:
result += tag['rn'][0] + outStr("Header record name: %s").replace('_','') + tag['rn'][1] # Remove character '_' from index
if headerFlag & 0x02:
result += tag['va'][0] + outStr("Header variant: %s") + tag['va'][1]
if headerFlag & 0x04:
s = outInt("Header wordclass: %s")
if s < 32:
result += tag['wc'][0] + wordclass[s] + tag['wc'][1]
else:
raise "Header wordclass out of range in: %s" % result
if headerFlag & 0x08:
result += tag['pa'][0] + outStr("Header parts: %s") + tag['pa'][1]
if headerFlag & 0x10:
result += tag['fo'][0] + outStr("Header forms: %s") + tag['fo'][1]
if headerFlag & 0x20:
result += tag['on'][0] + outStr("Header origin note: %s") + tag['on'][1]
if headerFlag & 0x80:
result += tag['pr'][0] + pronunciation_encode(outStr("Header pronunciation: %s")) + tag['pr'][1]
# Header data block
if mainFlag & 0x02:
headerFlag = outInt("Header dataFlag: %s") # Blocks in header
if headerFlag & 0x02:
result += tag['dv'][0] + outStr("Header dataVariant: %s")+ tag['dv'][1]
# ??? Link elsewhere
pass
# SOUND DATA REFERENCE
if mainFlag & 0x80:
outInt("Sound reference byte #1: %s")
outInt("Sound reference byte #2: %s")
outInt("Sound reference byte #3: %s")
outInt("Sound reference byte #4: %s")
outInt("Sound reference byte #5: %s")
#out("Sound data reference (5 bytes)", 6)
# TODO: Test all mainFlags in header!!!!
#result += ': '
li = 0
#print just every first word class identifier
# TODO: this is not systematic (should be handled by output)
global lastWordClass
lastWordClass = 0
# DATA BLOCK(S)
# -------------
for i in range(0, itemCount):
item = tag['db'][0] + tag['db'][1]
ol = False
dataFlag = outInt("DataFlag: %s -----------------------------")
if dataFlag & 0x01: # small index
sampleFlag = outInt("Data sampleFlag: %s")
if sampleFlag & 0x01:
result += tag['sa'][0] + outStr("Data sample: %s") + tag['sa'][1]
if sampleFlag & 0x04:
s = outInt("Data wordclass: %s")
if s != lastWordClass:
if s < 32:
result += tag['wc'][0] + wordclass[s] + tag['wc'][1]
else:
raise "Header wordclass out of range in: %s" % result
lastWordClass = s
if sampleFlag & 0x08:
result += tag['sw'][0] + outStr("Data sample wordclass: %s") + tag['sw'][1]
if sampleFlag & 0x10:
outInt("Data sample Int: %s")
outInt("Data sample Int: %s")
outInt("Data sample Int: %s")
if sampleFlag & 0x20:
item += tag['do'][0] + outStr("Data origin note: %s") + tag['do'][1]
if sampleFlag & 0x80:
item += " "
result += tag['pr'][0] + pronunciation_encode(outStr("Data sample pronunciation: %s")) + tag['pr'][1]
if dataFlag & 0x02:
item += " "
subFlag = outInt("Data subFlag: %s")
if subFlag == 0x80:
outStr("Data sub prefix: %s")
# It seams that data sub prefix content is ignored and there is a generated number for the whole block instead.
li += 1
ol = True
if dataFlag & 0x04: # chart
pass # ???
if dataFlag & 0x08: # reference
item += tag['df'][0] + outStr("Data definition: %s") + tag['df'][1]
if dataFlag & 0x10:
pass # ???
if dataFlag & 0x20: # phrase
phraseFlag1 = outInt("Data phraseFlag1: %s")
if phraseFlag1 & 0x01:
item += tag['ps'][0] + outStr("Data phrase short form: %s") + tag['ps'][1]
if phraseFlag1 & 0x02:
phraseCount = outInt("Data phraseCount: %s")
for i in range(0, phraseCount):
phraseComment = outInt("Data phrase prefix")
if phraseComment & 0x04:
item += tag['pc'][0] + outStr("Data phrase comment: %s") + tag['pc'][1]
item += tag['p1'][0] + outStr("Data phrase 1: %s") + tag['p1'][1]
item += tag['p2'][0] + outStr("Data phrase 2: %s") + tag['p2'][1]
if phraseFlag1 & 0x04:
phraseCount = outInt("Data phraseCount: %s")
for i in range(0, phraseCount):
phraseComment = outInt("Data phrase prefix")
if phraseComment & 0x04:
item += tag['pc'][0] + outStr("Data phrase 1: %s") + tag['pc'][1]
item += tag['pg'][0] + outStr("Data phrase comment: %s") + tag['pg'][1]
item += tag['p2'][0] + outStr("Data phrase 2: %s") + tag['p2'][1]
if phraseFlag1 & 0x08:
phraseCount = outInt("Data simple phraseCount: %s")
for i in range(0, phraseCount):
item += " "
item += tag['sp'][0] + outStr("Data simple phrase: %s") + tag['sp'][1]
if phraseFlag1 & 0x40:
item += tag['ps'][0] + outStr("Data phrase short form: %s") + tag['ps'][1]
# TODO: be careful in changing the rules, to have back compatibility!
if dataFlag & 0x40: # reference, related language
#0x01 synonym ?
#0x02 antonym ?
pass
if dataFlag & 0x80: # Phrase block
flags = [
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s"),
out("Data phrase block: %s")]
if flags == [0x80,0x80,0xF9,0xDF,0x9D,0x00,0x0B,0x01]:
result += "\\nphr: "
li = 1
ol = True
item += tag['b1'][0]+outStr("Data phrase 1: %s") + tag['b1'][1]
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
item += tag['ds'][0] + outStr("Data phrase 2: %s") + tag['ds'][1]
if flags == [0x80,0x80,0xF9,0xDF,0x9D,0x00,0x23,0x01]:
result += "\\nphr: "
li = 1
ol = True
item += tag['b1'][0]+outStr("Data phrase 1: %s") + tag['b1'][1]
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
out("Data phrase block: %s")
item += tag['ds'][0] + outStr("Data phrase 2: %s") + tag['ds'][1]
if ol:
result += "\\n%d. %s" % (li, item)
else:
result += item
ok = True
while pos < len(stream):
ok = (out() == 0x00) and ok
if ok:
result += '\n'
return decode_tag_postprocessing(result)
################################################################
# MAIN
################################################################
f = open(FILENAME,'rb')
# DECODE HEADER OF FILE
copyright = unpack("<64s",f.read(64))[0]
a = unpack("<16L",f.read(64))
entryCount = a[4]
indexBaseCount = a[6]
indexOffsetCount = a[7]
pos1 = a[8]
indexPos = a[9]
bodyPos = a[10]
smallIndex = (a[3] == 2052)
# DECODE INDEX STRUCTURE OF FILE
index = []
f.seek(indexPos)
bases = unpack("<%sL" % indexBaseCount, f.read(indexBaseCount * 4))
if smallIndex: # In small dictionaries every base is used 4-times
bases4 = []
for i in bases:
bases4.extend([i,i,i,i])
bases = bases4
for b in bases:
offsets = unpack("<64H", f.read(64*2))
for o in offsets:
if len(index) < indexOffsetCount:
#print "Index %s: %s + %s + %s * 4 = %s" % (len(index), bodyPos, b, o, toBin(bodyPos + b + o * 4))
index.append(bodyPos + b + o * 4)
# DECODE RECORDS
if DEBUG:
# PRINTOUT DEBUG OF FIRST <DEBUGLIMIT> WRONG RECORDS:
for i in range(1,entryCount):
if not DEBUGALL:
DEBUG = False
s = decode(getRec(i))
if DEBUGHEADER:
# print s.split('\t')[0]
print s
if DEBUGLIMIT > 0 and not s.endswith('\n'):
DEBUG = True
print "-"*80
print "%s) at address %s" % (i, toBin(index[i]))
print
s = decode(getRec(i))
print s
DEBUGLIMIT -= 1
DEBUG = True
else:
# DECODE EACH RECORD AND PRINT IT IN FORMAT FOR stardict-editor <term>\t<definition>
for i in range(1,entryCount):
s = decode(getRec(i))
if s.endswith('\n'):
print s,
else:
print s
print "!!! RECORD STRUCTURE DECODING ERROR !!!"
print "Please run this script in DEBUG mode and repair DATA BLOCK(S) section in function decode()"
print "If you succeed with whole dictionary send report (name of the dictionary and source code of script) to slovniky@googlegroups.com"
break
| Python |
#!/usr/bin/env python3
# This file is part of First Step.
#
# First Step is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later version.
#
# First Step is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with First Step. If not, see <http://www.gnu.org/licenses/>
#
# Copyright (C) Marco Cilloni <marco.cilloni@yahoo.com> 2014
| Python |
import argparse
import atexit
import os
import platform
import subprocess
import sys
todelete = []
def clean():
for f in todelete:
os.remove(f)
atexit.register(clean)
buildpath = os.path.dirname(os.path.abspath(__file__))
helmc1path = os.path.join(buildpath, 'helmc1')
def helmc1(helmfile):
if not helmfile.endswith('.helm'):
sys.exit((__file__ +
': error: file {} does not end with .helm').format(helmfile))
cname = helmfile.replace('.helm', '.c', 1)
newenv = os.environ.copy()
newenv['HELM_MODULES'] = buildpath + '/libhelm/hemd/'
if 'HELM_MODULES' in os.environ:
newenv['HELM_MODULES'] = newenv['HELM_MODULES'] \
+ ':' + os.environ['HELM_MODULES']
proc = subprocess.Popen([helmc1path, helmfile],
env=newenv, stdout=subprocess.PIPE)
out, err = proc.communicate()
if (proc.returncode != 0):
sys.exit(proc.returncode)
outfile = open(cname, 'wb')
outfile.write(out)
outfile.close()
return cname
def cc(ccCommand, cfile, ofile=None):
if not cfile.endswith('.c'):
sys.exit((__file__
+ ': error: file {} does not end with .c').format(cfile))
if ofile is None:
ofile = cfile.replace('.c', '.o', 1)
fpic = []
if platform.machine() in ['x86_64', 'amd64']:
fpic = ['-fPIC']
retval = subprocess.call(ccCommand.split()
+ [cfile, '-w', '-g', '-c', '-o', ofile] + fpic)
if (retval != 0):
sys.exit(retval)
def main():
parser = argparse.ArgumentParser(
description="helmc compiles .helm files to objects."
" Use helml to link them."
" Set HELM_MODULES to specify where to find more modules.\n")
parser.add_argument('files',
metavar='FILE',
type=str,
nargs='+',
help='.helm file to compile')
parser.add_argument('-C',
'--emit-c',
action='store_true',
help='emits C code into .c files instead of compiling')
parser.add_argument('-X',
'--cc',
default='cc',
type=str,
help='specifies the C compiler to use. '
'Defaults to "cc"')
parser.add_argument('-o',
'--objname',
type=str,
help='indicates the alternative name '
'for the object file. '
'Defaults to <helmfile>.o')
args = parser.parse_args()
if len(args.files) > 1 and args.objname:
sys.exit(__file__ +
': error: cannot specify -o'
' when generating multiple output files')
if args.cc == 'cc':
if 'CC' in os.environ:
args.cc = os.environ['CC']
if args.objname:
cfile = helmc1(args.files[0])
if not args.emit_c:
todelete.append(cfile)
cc(args.cc, cfile, args.objname)
else:
for f in args.files:
cfile = helmc1(f)
if not args.emit_c:
todelete.append(cfile)
cc(args.cc, cfile)
if __name__ == '__main__':
main()
| Python |
import argparse, atexit, os, subprocess, sys
buildpath = os.path.dirname(os.path.abspath(__file__))
helmrtpath = os.path.join(buildpath, 'helmrt.o')
def ld(ccCommand, ofiles, ofile=None):
if ofile == None:
if len(ofiles) == 1 and ofiles[0].endswith('.o'):
ofile = ofiles[0].replace('.o', '', 1)
else:
ofile = 'a.out'
retval = subprocess.call(ccCommand.split() + ofiles + [helmrtpath, '-g', '-L' + buildpath, '-lhelm', '-w', '-o', ofile])
if (retval != 0):
sys.exit(retval)
def main():
parser = argparse.ArgumentParser(description="helml links object files to executables.\n")
parser.add_argument('files', metavar='FILE', type=str, nargs='+', help='object files to link')
parser.add_argument('-X', '--cc', default='cc', type=str, help='specifies the C compiler to use. Defaults to "cc"')
parser.add_argument('-o', '--objname', type=str, help='indicates the alternative name for the executable. Defaults to <file> if only one file is given, a.out otherwise.')
args = parser.parse_args()
if args.cc == 'cc':
if 'CC' in os.environ:
args.cc = os.environ['CC']
ld(args.cc, args.files, args.objname)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python3
#
# Does the same thing as the SmartDashboard. Sorta.
#
import sys
import time
from pynetworktables import *
ip = "10.20.53.2"
NetworkTable.SetIPAddress(ip)
NetworkTable.SetClientMode()
NetworkTable.Initialize()
table = NetworkTable.GetTable("robotMovement")
time.sleep(10)
table.PutBoolean("tableInUse",False)
table.PutBoolean("tableRead",False)
# NetworkTable::GetTable("robotMovement")->PutNumber("angleAdjust",0.0)
# NetworkTable::GetTable("robotMovement")->PutNumber("currAngle",10.0)
# NetworkTable::GetTable("robotMovement")->PutBoolean("tableRead",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("tableUpdated",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("tableInUse",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("adjustEnabled",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("foward",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("back",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("left",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("right",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("strafeLeft",false)
# NetworkTable::GetTable("robotMovement")->PutBoolean("strafeRight",false)
# NetworkTable::GetTable("robotMovement")->PutString("currMode","None")
table.PutBoolean("tableInUse",True)
print("Setting values for test\n")
table.PutNumber("angle",70.0)
table.PutBoolean("adjustEnabled",True)
table.PutBoolean("foward",True)
table.PutBoolean("back",True)
table.PutBoolean("left",True)
table.PutBoolean("right",True)
table.PutBoolean("strafeLeft",True)
table.PutBoolean("strafeRight",True)
table.PutBoolean("tableUpdated",True)
table.PutBoolean("tableInUse",False)
loop_ctrl = True
inUse = True
time.sleep(6)
print("Entering loop, waiting for robot feedback\n")
while loop_ctrl:
inUse = table.GetBoolean("tableInUse")
if(not inUse and table.GetBoolean("tableRead")):
print("Table was read, now to set all values to stop adjustment\n")
table.PutBoolean("adjustEnabled",False)
table.PutBoolean("tableInUse",True)
table.PutNumber("angle",45)
table.PutBoolean("foward",False)
table.PutBoolean("back",False)
table.PutBoolean("left",False)
table.PutBoolean("right",False)
table.PutBoolean("strafeLeft",False)
table.PutBoolean("strafeRight",False)
table.PutBoolean("tableRead",False)
table.PutBoolean("tableUpdated",False)
table.PutBoolean("tableInUse",False)
loop_ctrl = False
# try:
# print("SmartDashboard::test: %s" % table.GetNumber('X'))
# num = table.GetNumber('X')
# num = num + 1
# table.PutNumber('X',num)
# except:
# print("No value yet")
time.sleep(1)
| Python |
#week 1 assianment
#Prime Factors
def primeFactors(n):
"Returns all the prime factors of a positvie integer"
factors = []
d = 2
while( n > 1):
while (n%d == 0):
factors.append(d)
n/= d
d = d + 1
return factors
userInput = raw_input('enter an integer')
userInput = int(userInput)
pfs = primeFactors(userInput)
for p in pfs:
print p
| Python |
# -*- encoding: utf-8 -*-
"""
staticDHCPd module: main
Purpose
=======
Runs a staticDHCPd server.
Legal
=====
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2009 <red.hamsterx@gmail.com>
"""
import os
import signal
import sys
import time
import thread
import src.conf_buffer as conf
import src.dhcp
import src.logging
import src.webface
if not conf.DEBUG: #Suppress all unnecessary prints.
sys.stdout = sys.stderr = open('/dev/null', 'w')
else:
sys.stdout = sys.stderr
def _quitHandler(signum, frame):
"""
Cleanly shuts down this daemon upon receipt of a SIGTERM.
@type signum: int
@param signum: The kill-signal constant received. This will always be
SIGTERM.
@type frame: int
@param frame: The stack-frame in which the kill-signal was received.
This is not used.
"""
#Remove PID.
try:
os.unlink(conf.PID_FILE)
except:
pass
src.logging.logToDisk()
exit(0)
def _logHandler(signum, frame):
"""
Flushes DHCP cache and writes log to disk upon receipt of a SIGHUP.
@type signum: int
@param signum: The kill-signal constant received. This will always be
SIGHUP.
@type frame: int
@param frame: The stack-frame in which the kill-signal was received.
This is not used.
"""
src.dhcp.flushCache()
if not src.logging.logToDisk():
src.logging.writeLog("Unable to write logfile: %(log)s" % {'log': conf.LOG_FILE,})
else:
src.logging.writeLog("Wrote log to '%(log)s'" % {'log': conf.LOG_FILE,})
if __name__ == '__main__':
#Ensure that pre-setup tasks are taken care of.
conf.init()
#Start Web server.
if conf.WEB_ENABLED:
thread.start_new_thread(src.webface.webserver,())
#Start DHCP server.
dhcp_thread = src.dhcp.DHCPService()
dhcp_thread.start()
#Record PID.
try:
pidfile = open(conf.PID_FILE, 'w')
pidfile.write(str(os.getpid()) + '\n')
pidfile.close()
os.chown(conf.PID_FILE, conf.UID, conf.GID)
except:
src.logging.writeLog("Unable to write pidfile: %(file)s" % {'file': conf.PID_FILE,})
#Touch logfile.
try:
open(conf.LOG_FILE, 'a').close()
os.chown(conf.LOG_FILE, conf.UID, conf.GID)
except:
src.logging.writeLog("Unable to write pidfile: %(file)s" % {'file': conf.PID_FILE,})
#Set signal-handlers.
signal.signal(signal.SIGHUP, _logHandler)
signal.signal(signal.SIGTERM, _quitHandler)
#Set proper permissions for execution
os.setregid(conf.GID, conf.GID)
os.setreuid(conf.UID, conf.UID)
#Serve until interrupted.
tick = 0
while True:
time.sleep(1)
tick += 1
if tick >= conf.POLLING_INTERVAL: #Perform periodic cleanup.
dhcp_thread.pollStats()
src.logging.emailTimeoutCooldown()
tick = 0
| Python |
# -*- coding: utf-8 -*-
# pep8-ignore: E501
'''
====================================
:mod:`ipcalc` IP subnet calculator
====================================
.. moduleauthor:: Wijnand Modderman-Lenstra <maze@pyth0n.org>
.. note:: BSD License
About
=====
This module allows you to perform network calculations.
References
==========
References:
* http://www.estoile.com/links/ipv6.pdf
* http://www.iana.org/assignments/ipv4-address-space
* http://www.iana.org/assignments/multicast-addresses
* http://www.iana.org/assignments/ipv6-address-space
* http://www.iana.org/assignments/ipv6-tla-assignments
* http://www.iana.org/assignments/ipv6-multicast-addresses
* http://www.iana.org/assignments/ipv6-anycast-addresses
Thanks
======
I wish to thank the following people for their input:
* Bas van Oostveen (trbs)
* Peter van Dijk (Habbie)
* Hans van Kranenburg (Knorrie)
* Jeroen Habraken (VeXocide)
* Torbjörn Lönnemark (tobbez)
* Anthony Cornehl (twinshadow)
'''
__version__ = '1.0.0'
try:
bin(42)
except NameError:
def bin(x):
'''
Stringifies an int or long in base 2.
'''
if x < 0:
return '-' + bin(-x)
out = []
if x == 0:
out.append('0')
while x > 0:
out.append('01'[x & 1])
x >>= 1
pass
try:
return '0b' + ''.join(reversed(out))
except NameError:
out.reverse()
return '0b' + ''.join(out)
class IP(object):
'''
Represents a single IP address.
:param ip: the ip address
:type ip: :class:`IP` or str or long or int
>>> localhost = IP("127.0.0.1")
>>> print localhost
127.0.0.1
>>> localhost6 = IP("::1")
>>> print localhost6
0000:0000:0000:0000:0000:0000:0000:0001
'''
# Hex-to-Bin conversion masks
_bitmask = {
'0': '0000', '1': '0001', '2': '0010', '3': '0011',
'4': '0100', '5': '0101', '6': '0110', '7': '0111',
'8': '1000', '9': '1001', 'a': '1010', 'b': '1011',
'c': '1100', 'd': '1101', 'e': '1110', 'f': '1111'
}
# IP range specific information, see IANA allocations.
_range = {
4: {
'01': 'CLASS A',
'10': 'CLASS B',
'110': 'CLASS C',
'1110': 'CLASS D MULTICAST',
'11100000': 'CLASS D LINKLOCAL',
'1111': 'CLASS E',
'00001010': 'PRIVATE RFC1918', # 10/8
'101011000001': 'PRIVATE RFC1918', # 172.16/12
'1100000010101000': 'PRIVATE RFC1918', # 192.168/16
},
6: {
'00000000': 'RESERVED', # ::/8
'00000001': 'UNASSIGNED', # 100::/8
'0000001': 'NSAP', # 200::/7
'0000010': 'IPX', # 400::/7
'0000011': 'UNASSIGNED', # 600::/7
'00001': 'UNASSIGNED', # 800::/5
'0001': 'UNASSIGNED', # 1000::/4
'0010000000000000': 'RESERVED', # 2000::/16 Reserved
'0010000000000001': 'ASSIGNABLE', # 2001::/16 Sub-TLA Assignments [RFC2450]
'00100000000000010000000': 'ASSIGNABLE IANA', # 2001:0000::/29 - 2001:01F8::/29 IANA
'00100000000000010000001': 'ASSIGNABLE APNIC', # 2001:0200::/29 - 2001:03F8::/29 APNIC
'00100000000000010000010': 'ASSIGNABLE ARIN', # 2001:0400::/29 - 2001:05F8::/29 ARIN
'00100000000000010000011': 'ASSIGNABLE RIPE', # 2001:0600::/29 - 2001:07F8::/29 RIPE NCC
'0010000000000010': '6TO4', # 2002::/16 "6to4" [RFC3056]
'0011111111111110': '6BONE TEST', # 3ffe::/16 6bone Testing [RFC2471]
'0011111111111111': 'RESERVED', # 3fff::/16 Reserved
'010': 'GLOBAL-UNICAST', # 4000::/3
'011': 'UNASSIGNED', # 6000::/3
'100': 'GEO-UNICAST', # 8000::/3
'101': 'UNASSIGNED', # a000::/3
'110': 'UNASSIGNED', # c000::/3
'1110': 'UNASSIGNED', # e000::/4
'11110': 'UNASSIGNED', # f000::/5
'111110': 'UNASSIGNED', # f800::/6
'1111110': 'UNASSIGNED', # fc00::/7
'111111100': 'UNASSIGNED', # fe00::/9
'1111111010': 'LINKLOCAL', # fe80::/10
'1111111011': 'SITELOCAL', # fec0::/10
'11111111': 'MULTICAST', # ff00::/8
'0' * 96: 'IPV4COMP', # ::/96
'0' * 80 + '1' * 16: 'IPV4MAP', # ::ffff:0:0/96
'0' * 128: 'UNSPECIFIED', # ::/128
'0' * 127 + '1': 'LOOPBACK' # ::1/128
}
}
def __init__(self, ip, mask=None, version=0):
self.mask = mask
self.v = 0
# Parse input
if ip is None:
raise ValueError('Can not pass None')
elif isinstance(ip, IP):
self.ip = ip.ip
self.dq = ip.dq
self.v = ip.v
self.mask = ip.mask
elif isinstance(ip, (int, long)):
self.ip = long(ip)
if self.ip <= 0xffffffff:
self.v = version or 4
self.dq = self._itodq(ip)
else:
self.v = version or 6
self.dq = self._itodq(ip)
else:
# If string is in CIDR or netmask notation
if '/' in ip:
ip, mask = ip.split('/', 1)
self.mask = mask
self.v = version or 0
self.dq = ip
self.ip = self._dqtoi(ip)
assert self.v != 0, 'Could not parse input'
# Netmask defaults to one ip
if self.mask is None:
self.mask = self.v == 4 and 32 or 128
# Netmask is numeric CIDR subnet
elif isinstance(self.mask, (int, long)) or self.mask.isdigit():
self.mask = int(self.mask)
# Netmask is in subnet notation
elif isinstance(self.mask, basestring):
limit = [32, 128][':' in self.mask]
inverted = ~self._dqtoi(self.mask)
count = 0
while inverted & pow(2, count):
count += 1
self.mask = (limit - count)
else:
raise ValueError('Invalid netmask')
# Validate subnet size
if self.v == 6:
self.dq = self._itodq(self.ip)
if not 0 <= self.mask <= 128:
raise ValueError('IPv6 subnet size must be between 0 and 128')
elif self.v == 4:
if not 0 <= self.mask <= 32:
raise ValueError('IPv4 subnet size must be between 0 and 32')
def bin(self):
'''
Full-length binary representation of the IP address.
>>> ip = IP("127.0.0.1")
>>> print ip.bin()
01111111000000000000000000000001
'''
return bin(self.ip).split('b')[1].rjust(self.mask, '0')
def hex(self):
'''
Full-length hexadecimal representation of the IP address.
>>> ip = IP("127.0.0.1")
>>> print ip.hex()
7f000001
'''
if self.v == 4:
return '%08x' % self.ip
else:
return '%032x' % self.ip
def subnet(self):
return self.mask
def version(self):
'''
IP version.
>>> ip = IP("127.0.0.1")
>>> print ip.version()
4
'''
return self.v
def info(self):
'''
Show IANA allocation information for the current IP address.
>>> ip = IP("127.0.0.1")
>>> print ip.info()
CLASS A
'''
b = self.bin()
self.v == 4 and 32 or 128
for i in range(len(b), 0, -1):
if b[:i] in self._range[self.v]:
return self._range[self.v][b[:i]]
return 'UNKNOWN'
def _dqtoi(self, dq):
'''
Convert dotquad or hextet to long.
'''
# hex notation
if dq.startswith('0x'):
ip = long(dq[2:], 16)
if ip > 0xffffffffffffffffffffffffffffffffL:
raise ValueError('%s: IP address is bigger than 2^128' % dq)
if ip <= 0xffffffff:
self.v = 4
else:
self.v = 6
return ip
# IPv6
if ':' in dq:
# Split hextets
hx = dq.split(':')
if ':::' in dq:
raise ValueError("%s: IPv6 address can't contain :::" % dq)
# Mixed address (or 4-in-6), ::ffff:192.0.2.42
if '.' in dq:
return self._dqtoi(hx[-1])
if len(hx) > 8:
raise ValueError('%s: IPv6 address with more than 8 hexlets' % dq)
elif len(hx) < 8:
# No :: in address
if not '' in hx:
raise ValueError('%s: IPv6 address invalid: ' +
'compressed format malformed' % dq)
elif not (dq.startswith('::') or dq.endswith('::')) and len([x for x in hx if x == '']) > 1:
raise ValueError('%s: IPv6 address invalid: ' +
'compressed format malformed' % dq)
ix = hx.index('')
px = len(hx[ix + 1:])
for x in xrange(ix + px + 1, 8):
hx.insert(ix, '0')
elif dq.endswith('::'):
pass
elif '' in hx:
raise ValueError('%s: IPv6 address invalid: ' +
'compressed format detected in full notation' % dq())
ip = ''
hx = [x == '' and '0' or x for x in hx]
for h in hx:
if len(h) < 4:
h = '%04x' % int(h, 16)
if not 0 <= int(h, 16) <= 0xffff:
raise ValueError('%r: IPv6 address invalid: ' +
'hexlets should be between 0x0000 and 0xffff' % dq)
ip += h
self.v = 6
return long(ip, 16)
elif len(dq) == 32:
# Assume full heximal notation
self.v = 6
return long(h, 16)
# IPv4
if '.' in dq:
q = dq.split('.')
q.reverse()
if len(q) > 4:
raise ValueError('%s: IPv4 address invalid: ' +
'more than 4 bytes' % dq)
for x in q:
if not 0 <= int(x) <= 255:
raise ValueError('%s: IPv4 address invalid: ' +
'bytes should be between 0 and 255' % dq)
while len(q) < 4:
q.insert(1, '0')
self.v = 4
return sum(long(byte) << 8 * index for index, byte in enumerate(q))
raise ValueError('Invalid address input')
def _itodq(self, n):
'''
Convert long to dotquad or hextet.
'''
if self.v == 4:
return '.'.join(map(str, [
(n >> 24) & 0xff,
(n >> 16) & 0xff,
(n >> 8) & 0xff,
n & 0xff,
]))
else:
n = '%032x' % n
return ':'.join(n[4 * x:4 * x + 4] for x in xrange(0, 8))
def __str__(self):
'''
Return dotquad representation of the IP.
>>> ip = IP("::1")
>>> print str(ip)
0000:0000:0000:0000:0000:0000:0000:0001
'''
return self.dq
def __int__(self):
return int(self.ip)
def __long__(self):
return self.ip
def __lt__(self, other):
return long(self) < long(IP(other))
def __le__(self, other):
return long(self) <= long(IP(other))
def __ge__(self, other):
return long(self) >= long(IP(other))
def __gt__(self, other):
return long(self) > long(IP(other))
def __eq__(self, other):
return long(self) == long(IP(other))
def size(self):
return 1
def clone(self):
'''
Return a new <IP> object with a copy of this one.
>>> ip = IP('127.0.0.1')
>>> ip.clone() # doctest: +ELLIPSIS
<ipcalc.IP object at 0x...>
'''
return IP(self)
def to_ipv4(self):
'''
Convert (an IPv6) IP address to an IPv4 address, if possible. Only works
for IPv4-compat (::/96) and 6-to-4 (2002::/16) addresses.
>>> ip = IP('2002:c000:022a::')
>>> print ip.to_ipv4()
192.0.2.42
'''
if self.v == 4:
return self
else:
if self.bin().startswith('0' * 96):
return IP(long(self), version=4)
elif long(self) & 0x20020000000000000000000000000000L:
return IP((long(self) - 0x20020000000000000000000000000000L) >> 80, version=4)
else:
return ValueError('%s: IPv6 address is not IPv4 compatible, ' +
'nor an 6-to-4 IP' % self.dq)
@classmethod
def from_bin(cls, value):
value = value.lstrip('b')
if len(value) == 32:
return cls(int(value, 2))
elif len(value) == 128:
return cls(long(value, 2))
else:
return ValueError('%r: invalid binary notation' % (value,))
@classmethod
def from_hex(cls, value):
if len(value) == 8:
return cls(int(value, 16))
elif len(value) == 32:
return cls(long(value, 16))
else:
raise ValueError('%r: invalid hexadecimal notation' % (value,))
def to_ipv6(self, type='6-to-4'):
'''
Convert (an IPv4) IP address to an IPv6 address.
>>> ip = IP('192.0.2.42')
>>> print ip.to_ipv6()
2002:c000:022a:0000:0000:0000:0000:0000
'''
assert type in ['6-to-4', 'compat'], 'Conversion type not supported'
if self.v == 4:
if type == '6-to-4':
return IP(0x20020000000000000000000000000000L | long(self) << 80, version=6)
elif type == 'compat':
return IP(long(self), version=6)
else:
return self
def to_reverse(self):
'''
Convert the IP address to a PTR record in .in-addr.arpa for IPv4 and
.ip6.arpa for IPv6 addresses.
>>> ip = IP('192.0.2.42')
>>> print ip.to_reverse()
42.2.0.192.in-addr.arpa
>>> print ip.to_ipv6().to_reverse()
0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.a.2.2.0.0.0.0.c.2.0.0.2.ip6.arpa
'''
if self.v == 4:
return '.'.join(list(self.dq.split('.')[::-1]) + ['in-addr', 'arpa'])
else:
return '.'.join(list(self.hex())[::-1] + ['ip6', 'arpa'])
def to_tuple(self):
'''
Used for comparisons.
'''
return (self.dq, self.mask)
class Network(IP):
'''
Network slice calculations.
:param ip: network address
:type ip: :class:`IP` or str or long or int
:param mask: netmask
:type mask: int or str
>>> localnet = Network('127.0.0.1/8')
>>> print localnet
127.0.0.1
'''
def netmask(self):
'''
Network netmask derived from subnet size, as IP object.
>>> localnet = Network('127.0.0.1/8')
>>> print localnet.netmask()
255.0.0.0
'''
return IP(self.netmask_long(), version=self.version())
def netmask_long(self):
'''
Network netmask derived from subnet size, as long.
>>> localnet = Network('127.0.0.1/8')
>>> print localnet.netmask_long()
4278190080
'''
if self.version() == 4:
return (0xffffffffL >> (32 - self.mask)) << (32 - self.mask)
else:
return (0xffffffffffffffffffffffffffffffffL >> (128 - self.mask)) << (128 - self.mask)
def network(self):
'''
Network address, as IP object.
>>> localnet = Network('127.128.99.3/8')
>>> print localnet.network()
127.0.0.0
'''
return IP(self.network_long(), version=self.version())
def network_long(self):
'''
Network address, as long.
>>> localnet = Network('127.128.99.3/8')
>>> print localnet.network_long()
2130706432
'''
return self.ip & self.netmask_long()
def broadcast(self):
'''
Broadcast address, as IP object.
>>> localnet = Network('127.0.0.1/8')
>>> print localnet.broadcast()
127.255.255.255
'''
# XXX: IPv6 doesn't have a broadcast address, but it's used for other
# calculations such as <Network.host_last>
return IP(self.broadcast_long(), version=self.version())
def broadcast_long(self):
'''
Broadcast address, as long.
>>> localnet = Network('127.0.0.1/8')
>>> print localnet.broadcast_long()
2147483647
'''
if self.version() == 4:
return self.network_long() | (0xffffffffL - self.netmask_long())
else:
return self.network_long() \
| (0xffffffffffffffffffffffffffffffffL - self.netmask_long())
def host_first(self):
'''
First available host in this subnet.
'''
if (self.version() == 4 and self.mask > 30) or \
(self.version() == 6 and self.mask > 126):
return self
else:
return IP(self.network_long() + 1, version=self.version())
def host_last(self):
'''
Last available host in this subnet.
'''
if (self.version() == 4 and self.mask == 32) or \
(self.version() == 6 and self.mask == 128):
return self
elif (self.version() == 4 and self.mask == 31) or \
(self.version() == 6 and self.mask == 127):
return IP(long(self) + 1, version=self.version())
else:
return IP(self.broadcast_long() - 1, version=self.version())
def in_network(self, other):
'''
Check if the given IP address is within this network.
'''
other = Network(other)
return long(other) >= long(self) and long(other) < long(self) + self.size() - other.size() + 1
def __contains__(self, ip):
'''
Check if the given ip is part of the network.
>>> '192.0.2.42' in Network('192.0.2.0/24')
True
>>> '192.168.2.42' in Network('192.0.2.0/24')
False
'''
return self.in_network(ip)
def __lt__(self, other):
return self.size() < IP(other).size()
def __le__(self, other):
return self.size() <= IP(other).size()
def __gt__(self, other):
return self.size() > IP(other).size()
def __ge__(self, other):
return self.size() >= IP(other).size()
def __eq__(self, other):
return self.size() == IP(other).size()
def __getitem__(self, key):
if isinstance(key, slice):
# Work-around IPv6 subnets being huge. Slice indices don't like
# long int.
x = key.start or 0
slice_stop = (key.stop or self.size()) - 1
slice_step = key.step or 1
arr = list()
while x < slice_stop:
arr.append(IP(long(self) + x))
x += slice_step
return tuple(arr)
else:
return IP(long(self) + key)
def __iter__(self):
'''
Generate a range of usable host IP addresses within the network, as IP
objects.
>>> for ip in Network('192.168.114.0/30'):
... print str(ip)
...
192.168.114.0
192.168.114.1
192.168.114.2
192.168.114.3
'''
curr = long(self.host_first())
stop = long(self.host_last())
while curr <= stop:
yield IP(curr)
curr += 1
def has_key(self, ip):
'''
Check if the given ip is part of the network.
:param ip: the ip address
:type ip: :class:`IP` or str or long or int
>>> net = Network('192.0.2.0/24')
>>> net.has_key('192.168.2.0')
False
>>> net.has_key('192.0.2.42')
True
'''
return self.__contains__(ip)
def size(self):
'''
Number of ip's within the network.
>>> net = Network('192.0.2.0/24')
>>> print net.size()
256
'''
return 2 ** ((self.version() == 4 and 32 or 128) - self.mask)
if __name__ == '__main__':
tests = [
('192.168.114.42', 23, ['192.168.0.1', '192.168.114.128', '10.0.0.1']),
('123::', 128, ['123:456::', '::1', '123::456']),
('::42', 64, ['::1', '1::']),
('2001:dead:beef:1:c01d:c01a::', 48, ['2001:dead:beef:babe::']),
('10.10.0.0', '255.255.255.0', ['10.10.0.20', '10.10.10.20']),
('2001:dead:beef:1:c01d:c01a::', 'ffff:ffff:ffff::', ['2001:dead:beef:babe::']),
('10.10.0.0/255.255.240.0', None, ['10.10.0.20', '10.10.250.0']),
]
for ip, mask, test_ip in tests:
net = Network(ip, mask)
print '==========='
print 'ip address:', net
print 'to ipv6...:', net.to_ipv6()
print 'ip version:', net.version()
print 'ip info...:', net.info()
print 'subnet....:', net.subnet()
print 'num ip\'s..:', net.size()
print 'integer...:', long(net)
print 'hex.......:', net.hex()
print 'netmask...:', net.netmask()
# Not implemented in IPv6
if net.version() == 4:
print 'network...:', net.network()
print 'broadcast.:', net.broadcast()
print 'first host:', net.host_first()
print 'reverse...:', net.host_first().to_reverse()
print 'last host.:', net.host_last()
print 'reverse...:', net.host_last().to_reverse()
for ip in test_ip:
print '%s in network: ' % ip, ip in net
| Python |
# -*- encoding: utf-8 -*-
"""
staticDHCPd module: src.conf
Purpose
=======
Provides a buffer to seed options with default values to make upgrading easier
for end users who do not need to manage any newly added features.
Legal
=====
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2011 <red.hamsterx@gmail.com>
"""
import conf
#Options passed through from conf.py
#For explanations, please consult that file.
##############################################################################
_defaults = {}
#General settings
#######################################
_defaults.update({
'DEBUG': False,
'POLLING_INTERVAL': 30,
'LOG_CAPACITY': 1000,
'POLL_INTERVALS_TO_TRACK': 20,
})
#Server settings
#######################################
_defaults.update({
'PXE_PORT': None,
})
#Server behaviour settings
#######################################
_defaults.update({
'ALLOW_LOCAL_DHCP': True,
'ALLOW_DHCP_RELAYS': False,
'ALLOWED_DHCP_RELAYS': (),
'AUTHORITATIVE': False,
'NAK_RENEWALS': False,
'UNAUTHORIZED_CLIENT_TIMEOUT': 60,
'MISBEHAVING_CLIENT_TIMEOUT': 150,
'ENABLE_SUSPEND': True,
'SUSPEND_THRESHOLD': 10,
'WEB_RELOAD_KEY': '5f4dcc3b5aa765d61d8327deb882cf99',
})
#Database settings
#######################################
_defaults.update({
'USE_CACHE': False,
'USE_POOL': True,
'MYSQL_DATABASE': 'dhcp',
'MYSQL_USERNAME': 'dhcp_user',
'MYSQL_PASSWORD': 'dhcp_pass',
'MYSQL_HOST': None,
'MYSQL_PORT': 3306,
'MYSQL_MAXIMUM_CONNECTIONS': 4,
'POSTGRESQL_DATABASE': 'dhcp',
'POSTGRESQL_USERNAME': 'dhcp_user',
'POSTGRESQL_PASSWORD': 'dhcp_pass',
'POSTGRESQL_HOST': None,
'POSTGRESQL_PORT': 5432,
'POSTGRESQL_SSLMODE': 'disabled',
'POSTGRESQL_MAXIMUM_CONNECTIONS': 4,
'ORACLE_DATABASE': 'dhcp',
'ORACLE_USERNAME': 'dhcp_user',
'ORACLE_PASSWORD': 'dhcp_pass',
'ORACLE_MAXIMUM_CONNECTIONS': 4,
'SQLITE_FILE': '/etc/staticDHCPd/dhcp.sqlite3',
})
#E-mail settings
#######################################
_defaults.update({
'EMAIL_ENABLED': False,
'EMAIL_SERVER': 'mail.yourdomain.com',
'EMAIL_SOURCE': 'you@yourdomain.com',
'EMAIL_DESTINATION': 'problems@yourdomain.com',
'EMAIL_USER': 'you',
'EMAIL_PASSWORD': 'password',
'EMAIL_TIMEOUT': 600,
})
#Construct a unified namespace
#######################################
for key in [k for k in dir(conf) if k.isupper()]: #Copy everything that looks like a constant.
globals()[key] = getattr(conf, key)
for (key, value) in _defaults.iteritems():
if not key in globals():
globals()[key] = value
del _defaults
init = conf.init
loadDHCPPacket = conf.loadDHCPPacket
#Inject namespace elements into conf.
##############################################################################
import libpydhcpserver.type_rfc as type_rfc
conf.rfc3046_decode = type_rfc.rfc3046_decode
conf.rfc1035_plus = type_rfc.rfc1035_plus
conf.ipToList = type_rfc.ipToList
conf.ipsToList = type_rfc.ipsToList
conf.intToList = type_rfc.intToList
conf.intsToList = type_rfc.intsToList
conf.longToList = type_rfc.longToList
conf.longsToList = type_rfc.longsToList
conf.strToList = type_rfc.strToList
conf.strToPaddedList = type_rfc.strToPaddedList
conf.rfc2610_78 = type_rfc.rfc2610_78
conf.rfc2610_79 = type_rfc.rfc2610_79
conf.rfc3361_120 = type_rfc.rfc3361_120
conf.rfc3397_119 = type_rfc.rfc3397_119
conf.rfc3925_124 = type_rfc.rfc3925_124
conf.rfc3925_125 = type_rfc.rfc3925_125
conf.rfc4174_83 = type_rfc.rfc4174_83
conf.rfc4280_88 = type_rfc.rfc4280_88
conf.rfc5223_137 = type_rfc.rfc5223_137
conf.rfc5678_139 = type_rfc.rfc5678_139
conf.rfc5678_140 = type_rfc.rfc5678_140
del type_rfc
import logging
conf.writeLog = logging.writeLog
del logging
| Python |
import web
import src.sql as sql
import src.conf_buffer as conf
import src.ipcalc as ipcalc
import psycopg2.pool
urls = (
'/', 'index',
'/login','login',
'/logout','logout',
'/group','group',
'/maps','maps'
)
conn_string="dbname='%s' user='%s' password='%s' host='%s'" % (conf.POSTGRESQL_DATABASE,conf.POSTGRESQL_USERNAME,conf.POSTGRESQL_PASSWORD,conf.POSTGRESQL_HOST)
render=web.template.render("src/templates/",globals={'IP':ipcalc.IP,'str':str})
pool=sql.SQL_MODULE.pool.SimpleConnectionPool(1,5,conn_string)
def checkauthorisation():
conn = pool.getconn()
cur=conn.cursor()
cur.execute("SELECT value FROM login WHERE property='uuid'")
result = cur.fetchone()[0]
cur.close()
pool.putconn(conn)
if not (web.cookies().get("uuid")==result):
web.seeother("/login?fail=1")
def getgroupstable():
conn = pool.getconn()
cur=conn.cursor()
cur.execute("SELECT * FROM groups")
table=render.groupstable(cur)
cur.close()
pool.putconn(conn)
return table
class group:
def GET(self):
checkauthorisation()
return render.layout(getgroupstable())
def POST(self):
checkauthorisation()
if web.input(Action=None).Action == "Add":
querystring="INSERT INTO groups (name,start_ip_int,end_ip_int,gateway,domain_name,domain_name_servers,ntp_servers) VALUES ('%s',%s,%s,'%s','%s','%s','%s')" % (web.input().name,int(ipcalc.IP(web.input().start_ip_int)),int(ipcalc.IP(web.input().end_ip_int)),web.input().gateway,web.input().domain_name,web.input().domain_name_servers,web.input().ntp_servers)
conn = pool.getconn()
cur=conn.cursor()
cur.execute(querystring)
conn.commit()
cur.close()
pool.putconn(conn)
return render.layout(getgroupstable())
elif web.input(Action=None).Action == "Update":
querystring="UPDATE groups SET name='%s',start_ip_int=%s, end_ip_int=%s, gateway='%s', domain_name='%s', domain_name_servers='%s' ,ntp_servers='%s' WHERE id=%s" % (web.input().name,int(ipcalc.IP(web.input().start_ip_int)),int(ipcalc.IP(web.input().end_ip_int)), web.input().gateway, web.input().domain_name, web.input().domain_name_servers, web.input().ntp_servers,web.input().id)
conn = pool.getconn()
cur=conn.cursor()
cur.execute(querystring)
conn.commit()
cur.close()
pool.putconn(conn)
return render.layout(getgroupstable())
elif web.input(Action=None).Action == "Delete":
querystring="DELETE FROM groups WHERE id=%s" % (web.input().id,)
conn = pool.getconn()
cur=conn.cursor()
cur.execute(querystring)
conn.commit()
cur.close()
pool.putconn(conn)
return render.layout(getgroupstable())
def getsubnetstable():
conn = pool.getconn()
cur=conn.cursor()
cur.execute("SELECT subnet, serial, lease_time, subnet_mask, broadcast_address FROM subnets")
row=cur.fetchone();
table=render.subnets(row)
cur.close()
pool.putconn(conn)
return table
def getindexpage():
return render.layout(str(getsubnetstable()))
class index:
def GET(self):
checkauthorisation()
return getindexpage()
def POST(self):
checkauthorisation()
if not web.input(submit_subnet=None).submit_subnet is None:
cur.execute("UPDATE subnets SET subnet=%s, serial=%s, lease_time=%s, subnet_mask=%s, broadcast_address=%s",(web.input().subnet, web.input().serial, web.input().lease_time, web.input().subnet_mask, web.input().broadcast_address))
conn.commit()
cur.close()
pool.putconn(conn)
return getindexpage()
password_form=web.form.Form(
web.form.Password("password", description="Password"),
web.form.Button("submit", type="submit", description="Submit")
)
class login:
def GET(self):
f = password_form()
r = f.render()
form="<form name='password' method='POST'>%s</form>" % (r,)
return render.layout(form)
def POST(self):
f = password_form()
password=web.input().password
conn = pool.getconn()
cur=conn.cursor()
cur.execute("SELECT value FROM login WHERE property='password'")
result = cur.fetchone()[0]
if(password==result):
import random
_uuid=str(random.randint(0,100000000))
cur.execute("UPDATE login SET value=%s WHERE property='uuid'",(_uuid,))
conn.commit()
cur.close()
pool.putconn(conn)
web.setcookie("uuid", _uuid, 3600)
web.seeother("/")
else:
web.seeother("/login?fail=1")
class logout:
def GET(self):
web.setcookie("uuid","", expires=-1)
web.seeother("/")
def getmapstable(group):
conn = pool.getconn()
cur=conn.cursor()
cur.execute("SELECT mac,ip FROM maps WHERE parentgroup=%s",(group,))
table=render.table(cur.fetchall(),group)
cur.close()
pool.putconn(conn)
return table
class maps:
def GET(self):
checkauthorisation()
group=web.input(group=None).group
conn=pool.getconn()
cur=conn.cursor()
cur.execute("SELECT name FROM groups WHERE id=%s",(group,))
group=group if (cur.rowcount>0) else None
cur.close()
pool.putconn(conn)
if group is not None:
return render.layout(getmapstable(group))
else:
return render.layout("Unrecognized Group")
def POST(self):
checkauthorisation()
conn=pool.getconn()
cur=conn.cursor()
if not web.input(delmac=None).delmac is None:
cur.execute("DELETE FROM maps WHERE mac=%s",(web.input().delmac,))
conn.commit()
cur.close()
pool.putconn(conn)
return web.seeother("/maps?group="+web.input().group)
elif not web.input(add_mac=None).add_mac is None:
cur.execute("SELECT subnet,serial FROM subnets LIMIT 1")
row=cur.fetchone();
subnet=row[0]
serial=row[1]
cur.execute("SELECT start_ip_int,end_ip_int FROM groups WHERE id=%s",(web.input().group,))
row=cur.fetchone()
macs=web.input().macs.split(",")
cur.execute("select t.ip as missing from generate_series(%s, %s) t(ip) left join maps on (t.ip = maps.ip) where maps.ip is null limit %s",(row[0],row[1],len(macs)))
ips=cur.fetchall();
query="INSERT INTO maps (mac,ip,hostname,subnet,serial,parentgroup) VALUES "
for idx,mac in enumerate(macs):
if(idx>0):
query+=","
query+="('%s',%s,NULL,'%s',%s,%s)" % (mac, ips[idx][0], subnet, serial, web.input().group)
cur.execute(query)
conn.commit()
cur.close()
pool.putconn(conn)
return web.seeother("/maps?group="+web.input().group)
else:
return web.seeother("/maps?group="+web.input().group)
def webserver():
app = web.application(urls, globals())
app.run()
| Python |
# -*- encoding: utf-8 -*-
"""
staticDHCPd module: src.sql
Purpose
=======
Provides a uniform datasource API, selecting from multiple backends,
for a staticDHCPd server.
Legal
=====
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2011 <red.hamsterx@gmail.com>
(C) Matthew Boedicker, 2011 <matthewm@boedicker.org>
"""
################################################################################
# The decision of which engine to use occurs at the bottom of this module #
# The chosen class is made accessible via the module-level SQL_BROKER variable #
# The chosen module is accessible via the module-level SQL_MODULE variable #
################################################################################
import threading
import src.conf_buffer as conf
import src.logging
import src.ipcalc
class _SQLBroker(object):
"""
A stub documenting the features an _SQLBroker object must provide.
"""
_resource_lock = None #: A lock used to prevent the database from being overwhelmed.
_cache_lock = None #: A lock used to prevent multiple simultaneous cache updates.
_mac_cache = None #: A cache used to prevent unnecessary database hits.
_subnet_cache = None #: A cache used to prevent unnecessary database hits.
def _getConnection(self):
"""
Provides a connection to the database.
@return: The connection object to be used.
@raise Exception: If a problem occurs while accessing the database.
"""
raise NotImplementedError("_getConnection must be overridden")
def _setupBroker(self, concurrency_limit):
"""
Sets up common attributes of broker objects.
@type concurrency_limit: int
@param concurrent_limit: The number of concurrent database hits to
permit.
"""
self._resource_lock = threading.BoundedSemaphore(concurrency_limit)
self._setupCache()
def _setupCache(self):
"""
Sets up the SQL broker cache.
"""
self._cache_lock = threading.Lock()
self._mac_cache = {}
self._subnet_cache = {}
def flushCache(self):
"""
Resets the cache to an empty state, forcing all lookups to pull fresh
data.
"""
if conf.USE_CACHE:
self._cache_lock.acquire()
try:
self._mac_cache = {}
self._subnet_cache = {}
src.logging.writeLog("Flushed DHCP cache")
finally:
self._cache_lock.release()
def lookupMAC(self, mac):
"""
Queries the database for the given MAC address and returns the IP and
associated details if the MAC is known.
If enabled, the cache is checked and updated by this function.
@type mac: basestring
@param mac: The MAC address to lookup.
@rtype: tuple(11)|None
@return: (ip:basestring, hostname:basestring|None,
gateway:basestring|None, subnet_mask:basestring|None,
broadcast_address:basestring|None,
domain_name:basestring|None, domain_name_servers:basestring|None,
ntp_servers:basestring|None, lease_time:int,
subnet:basestring, serial:int) or None if no match was
found.
@raise Exception: If a problem occurs while accessing the database.
"""
if conf.USE_CACHE:
self._cache_lock.acquire()
try:
data = self._mac_cache.get(mac)
if data:
(ip, hostname, subnet_id) = data
return (ip, hostname,) + self._subnet_cache[subnet_id] + subnet_id
finally:
self._cache_lock.release()
self._resource_lock.acquire()
try:
data = self._lookupMAC(mac)
if conf.USE_CACHE:
if data:
(ip, hostname,
gateway, subnet_mask, broadcast_address,
domain_name, domain_name_servers, ntp_servers,
lease_time, subnet, serial) = data
subnet_id = (subnet, serial)
self._cache_lock.acquire()
try:
self._mac_cache[mac] = (ip, hostname, subnet_id,)
self._subnet_cache[subnet_id] = (
gateway, subnet_mask, broadcast_address,
domain_name, domain_name_servers, ntp_servers,
lease_time,
)
finally:
self._cache_lock.release()
return data
finally:
self._resource_lock.release()
class _DB20Broker(_SQLBroker):
"""
Defines bevahiour for a DB API 2.0-compatible broker.
"""
_module = None #: The db2api-compliant module to use.
_connection_details = None #: The module-specific details needed to connect to a database.
_query_mac = None #: The string used to look up a MAC's binding.
def _lookupMAC(self, mac):
"""
Queries the database for the given MAC address and returns the IP and
associated details if the MAC is known.
@type mac: basestring
@param mac: The MAC address to lookup.
@rtype: tuple(11)|None
@return: (ip:basestring, hostname:basestring|None,
gateway:basestring|None, subnet_mask:basestring|None,
broadcast_address:basestring|None,
domain_name:basestring|None, domain_name_servers:basestring|None,
ntp_servers:basestring|None, lease_time:int,
subnet:basestring, serial:int) or None if no match was
found.
@raise Exception: If a problem occurs while accessing the database.
"""
try:
db = self._getConnection()
cur = db.cursor()
cur.execute(self._query_mac, (mac,))
result = cur.fetchone()
resul=(str(src.ipcalc.IP(result[0])),result[1],result[2],result[3],result[4],result[5],result[6],result[7],result[8],result[9],result[10])
if resul:
return resul
return None
finally:
try:
cur.close()
except Exception:
pass
try:
db.close()
except Exception:
pass
class _PoolingBroker(_DB20Broker):
"""
Defines bevahiour for a connection-pooling-capable DB API 2.0-compatible
broker.
"""
_pool = None #: The database connection pool.
_eventlet__db_pool = None #: A reference to the eventlet.db_pool module.
def _setupBroker(self, concurrency_limit):
"""
Sets up connection-pooling, if it's supported by the environment.
Also completes the broker-setup process.
L{_connection_details} must be defined before calling this function.
@type concurrency_limit: int
@param concurrent_limit: The number of concurrent database hits to
permit.
"""
_DB20Broker._setupBroker(self, concurrency_limit)
if conf.USE_POOL:
try:
import eventlet.db_pool
self._eventlet__db_pool = eventlet.db_pool
except ImportError:
return
else:
self._pool = self._eventlet__db_pool.ConnectionPool(
SQL_MODULE,
max_size=concurrency_limit, max_idle=30, max_age=600, connect_timeout=5,
**self._connection_details
)
def _getConnection(self):
"""
Provides a connection to the database.
@return: The connection object to be used.
@raise Exception: If a problem occurs while accessing the database.
"""
if not self._pool is None:
return self._eventlet__db_pool.PooledConnectionWrapper(self._pool.get(), self._pool)
else:
return SQL_MODULE.connect(**self._connection_details)
class _NonPoolingBroker(_DB20Broker):
"""
Defines bevahiour for a non-connection-pooling-capable DB API 2.0-compatible
broker.
"""
def _getConnection(self):
"""
Provides a connection to the database.
@return: The connection object to be used.
@raise Exception: If a problem occurs while accessing the database.
"""
return SQL_MODULE.connect(**self._connection_details)
class _MySQL(_PoolingBroker):
"""
Implements a MySQL broker.
"""
_query_mac = """
SELECT
m.ip, m.hostname,
g.gateway, s.subnet_mask, s.broadcast_address, g.domain_name, g.domain_name_servers,
g.ntp_servers, s.lease_time, s.subnet, s.serial
FROM maps m, subnets s, groups g
WHERE
m.mac = %s AND m.subnet = s.subnet AND m.serial = s.serial AND g.id = m.parentgroup
LIMIT 1
"""
def __init__(self):
"""
Constructs the broker.
"""
self._connection_details = {
'db': conf.MYSQL_DATABASE,
'user': conf.MYSQL_USERNAME,
'passwd': conf.MYSQL_PASSWORD,
}
if conf.MYSQL_HOST is None:
self._connection_details['host'] = 'localhost'
else:
self._connection_details['host'] = conf.MYSQL_HOST
self._connection_details['port'] = conf.MYSQL_PORT
self._setupBroker(conf.MYSQL_MAXIMUM_CONNECTIONS)
class _PostgreSQL(_PoolingBroker):
"""
Implements a PostgreSQL broker.
"""
_query_mac = """
SELECT
m.ip, m.hostname,
g.gateway, s.subnet_mask, s.broadcast_address, g.domain_name, g.domain_name_servers,
g.ntp_servers, s.lease_time, s.subnet, s.serial
FROM maps m, subnets s, groups g
WHERE
m.mac = %s AND m.subnet = s.subnet AND m.serial = s.serial AND g.id = m.parentgroup
LIMIT 1
"""
def __init__(self):
"""
Constructs the broker.
"""
self._connection_details = {
'database': conf.POSTGRESQL_DATABASE,
'user': conf.POSTGRESQL_USERNAME,
'password': conf.POSTGRESQL_PASSWORD,
}
if not conf.POSTGRESQL_HOST is None:
self._connection_details['host'] = conf.POSTGRESQL_HOST
self._connection_details['port'] = conf.POSTGRESQL_PORT
self._connection_details['sslmode'] = conf.POSTGRESQL_SSLMODE
self._setupBroker(conf.POSTGRESQL_MAXIMUM_CONNECTIONS)
class _Oracle(_PoolingBroker):
"""
Implements an Oracle broker.
"""
_query_mac = """
SELECT
m.ip, m.hostname,
g.gateway, s.subnet_mask, s.broadcast_address, g.domain_name, g.domain_name_servers,
g.ntp_servers, s.lease_time, s.subnet, s.serial
FROM maps m, subnets s ,groups g
WHERE
m.mac = :1 AND m.subnet = s.subnet AND m.serial = s.serial AND g.id = m.parentgroup
LIMIT 1
"""
def __init__(self):
"""
Constructs the broker.
"""
self._connection_details = {
'user': conf.ORACLE_USERNAME,
'password': conf.ORACLE_PASSWORD,
'dsn': conf.ORACLE_DATABASE,
}
self._setupBroker(conf.ORACLE_MAXIMUM_CONNECTIONS)
class _SQLite(_NonPoolingBroker):
"""
Implements a SQLite broker.
"""
_query_mac = """
SELECT
m.ip, m.hostname,
g.gateway, s.subnet_mask, s.broadcast_address, g.domain_name, g.domain_name_servers,
g.ntp_servers, s.lease_time, s.subnet, s.serial
FROM maps m, subnets s, groups g
WHERE
m.mac = ? AND m.subnet = s.subnet AND m.serial = s.serial AND g.id = m.parentgroup
LIMIT 1
"""
def __init__(self):
"""
Constructs the broker.
"""
self._connection_details = {
'database': conf.SQLITE_FILE,
}
self._setupBroker(1)
#Decide which SQL engine to use and store the class in SQL_BROKER
#################################################################
SQL_BROKER = None #: The class of the SQL engine to use.
SQL_MODULE = None #: The module of the SQL engine to use.
if conf.DATABASE_ENGINE == 'MySQL':
import MySQLdb as SQL_MODULE
SQL_BROKER = _MySQL
elif conf.DATABASE_ENGINE == 'PostgreSQL':
import psycopg2 as SQL_MODULE
SQL_BROKER = _PostgreSQL
elif conf.DATABASE_ENGINE == 'Oracle':
import cx_Oracle as SQL_MODULE
SQL_BROKER = _Oracle
elif conf.DATABASE_ENGINE == 'SQLite':
import sqlite3 as SQL_MODULE
SQL_BROKER = _SQLite
else:
raise ValueError("Unknown database engine: %(engine)s" % {
'engine': conf.DATABASE_ENGINE
})
| Python |
# -*- encoding: utf-8 -*-
"""
staticDHCPd module: src.web
Purpose
=======
Provides a web interface for viewing and interacting with a staticDHCPd server.
Legal
=====
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2009 <red.hamsterx@gmail.com>
"""
import BaseHTTPServer
import cgi
import hashlib
import os
import select
import threading
import time
try:
from urlparse import parse_qs
except:
from cgi import parse_qs
import src.conf_buffer as conf
import src.dhcp
import src.logging
import src
class _WebServer(BaseHTTPServer.BaseHTTPRequestHandler):
"""
The handler that responds to all received HTTP requests.
"""
_allowed_pages = ('/', '/index.html',) #: A collection of all paths that will be allowed.
def do_GET(self):
"""
Handles all HTTP GET requests.
"""
if not self.path in self._allowed_pages:
self.send_response(404)
return
self._doResponse()
def do_HEAD(self):
"""
Handles all HTTP HEAD requests.
This involves lying about the existence of files and telling the browser
to always pull a fresh copy.
"""
if not self.path in self._allowed_pages:
self.send_response(404)
return
try:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Last-modified', time.strftime('%a, %d %b %Y %H:%M:%S %Z'))
self.end_headers()
except Exception, e:
src.logging.writeLog("Problem while processing HEAD in Web module: %(error)s" % {'error': str(e),})
def do_POST(self):
"""
Handles all HTTP POST requests.
This checks to see if the user entered the flush key and, if so,
flushes the cache and writes the memory-log to disk.
"""
try:
(ctype, pdict) = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'application/x-www-form-urlencoded':
query = parse_qs(self.rfile.read(int(self.headers.getheader('content-length'))))
key = query.get('key')
if key:
if hashlib.md5(key[0]).hexdigest() == conf.WEB_RELOAD_KEY:
src.dhcp.flushCache()
if src.logging.logToDisk():
src.logging.writeLog("Wrote log to '%(log)s'" % {'log': conf.LOG_FILE,})
else:
src.logging.writeLog("Unable to write log to '%(log)s'" % {'log': conf.LOG_FILE,})
else:
src.logging.writeLog("Invalid Web-access-key provided")
except Exception, e:
src.logging.writeLog("Problem while processing POST in Web module: %(error)s" % {'error': str(e),})
self._doResponse()
def _doResponse(self):
"""
Renders the current state of the memory-log as HTML for consumption by
the client.
"""
try:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Last-modified', time.strftime('%a, %d %b %Y %H:%M:%S %Z'))
self.end_headers()
self.wfile.write('<html><head><title>%(name)s log</title></head><body>' % {'name': conf.SYSTEM_NAME,})
self.wfile.write('<div style="width: 950px; margin-left: auto; margin-right: auto; border: 1px solid black;">')
self.wfile.write('<div>Statistics:<div style="text-size: 0.9em; margin-left: 20px;">')
for (timestamp, packets, discarded, time_taken, ignored_macs) in src.logging.readPollRecords():
if packets:
turnaround = time_taken / packets
else:
turnaround = 0.0
self.wfile.write("%(time)s : received: %(received)i; discarded: %(discarded)i; turnaround: %(turnaround)fs/pkt; ignored MACs: %(ignored)i<br/>" % {
'time': time.ctime(timestamp),
'received': packets,
'discarded': discarded,
'turnaround': turnaround,
'ignored': ignored_macs,
})
self.wfile.write("</div></div><br/>")
self.wfile.write('<div>Events:<div style="text-size: 0.9em; margin-left: 20px;">')
for (timestamp, line) in src.logging.readLog():
self.wfile.write("%(time)s : %(line)s<br/>" % {
'time': time.ctime(timestamp),
'line': cgi.escape(line),
})
self.wfile.write("</div></div><br/>")
self.wfile.write('<div style="text-align: center;">')
self.wfile.write('<small>Summary generated %(time)s</small><br/>' % {
'time': time.asctime(),
})
self.wfile.write('<small>%(server)s:%(port)i | PID: %(pid)i | v%(core_version)s | <a href="http://uguu.ca/" onclick="window.open(this.href); return false;">uguu.ca</a></small><br/>' % {
'pid': os.getpid(),
'server': conf.DHCP_SERVER_IP,
'port': conf.DHCP_SERVER_PORT,
'core_version': src.VERSION,
})
self.wfile.write('<form action="/" method="post"><div style="display: inline;">')
self.wfile.write('<label for="key">Key: </label><input type="password" name="key" id="key"/>')
if conf.USE_CACHE:
self.wfile.write('<input type="submit" value="Flush cache and write log to disk"/>')
else:
self.wfile.write('<input type="submit" value="Write log to disk"/>')
self.wfile.write('</div></form>')
self.wfile.write('</div>')
self.wfile.write("</div></body></html>")
except Exception, e:
src.logging.writeLog("Problem while serving response in Web module: %(error)s" % {'error': str(e),})
def log_message(*args):
"""
Just a stub to suppress automatic webserver log messages.
"""
pass
class WebService(threading.Thread):
"""
A thread that handles HTTP requests indefinitely, daemonically.
"""
_web_server = None #: The handler that responds to HTTP requests.
def __init__(self):
"""
Sets up the Web server.
@raise Exception: If a problem occurs while binding the sockets needed
to handle HTTP traffic.
"""
threading.Thread.__init__(self)
self.daemon = True
self._web_server = BaseHTTPServer.HTTPServer(
(
'.'.join([str(int(o)) for o in conf.WEB_IP.split('.')]),
int(conf.WEB_PORT)
),
_WebServer
)
src.logging.writeLog('Configured Web server')
def run(self):
"""
Runs the Web server indefinitely.
In the event of an unexpected error, e-mail will be sent and processing
will continue with the next request.
"""
src.logging.writeLog('Running Web server')
while True:
try:
self._web_server.handle_request()
except select.error:
src.logging.writeLog('Suppressed non-fatal select() error in Web module')
except Exception, e:
src.logging.sendErrorReport('Unhandled exception', e)
| Python |
# -*- encoding: utf-8 -*-
"""
staticDHCPd module: src.logging
Purpose
=======
Provides a means of logging information for a staticDHCPd server.
Legal
=====
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2009 <red.hamsterx@gmail.com>
"""
import email
import smtplib
import traceback
import threading
import time
import conf_buffer as conf #For whatever reason, src.conf_buffer makes this fail.
_LOG_LOCK = threading.Lock() #: A lock used to synchronize access to the memory-log.
_LOG = [] #: The memory-log.
_POLL_RECORDS_LOCK = threading.Lock() #: A lock used to synchronize access to the stats-log.
_POLL_RECORDS = [] #: The stats-log.
_EMAIL_LOCK = threading.Lock() #: A lock used to synchronize access to the e-mail routines.
_EMAIL_TIMEOUT = 0 #: The number of seconds left before another e-mail can be sent.
#Status-recording functions
def writeLog(data):
"""
Adds an entry to the memory-log.
@type data: basestring
@param data: The event to be logged.
"""
global _LOG
_LOG_LOCK.acquire()
try:
_LOG = [(time.time(), data)] + _LOG[:conf.LOG_CAPACITY - 1]
if conf.DEBUG:
print '%(time)s : %(event)s' % {
'time': time.asctime(),
'event': data,
}
finally:
_LOG_LOCK.release()
def readLog():
"""
Returns a static, immutable copy of the memory-log.
@rtype: tuple
@return: A collection of
(timestamp:float, details:basestring) values, in reverse-chronological
order.
"""
_LOG_LOCK.acquire()
try:
return tuple(_LOG)
finally:
_LOG_LOCK.release()
def writePollRecord(packets, discarded, time_taken, ignored_macs):
"""
Adds statistics to the stats-log.
@type packets: int
@param packets: The number of packets processed.
@type discarded: int
@param discarded: The number of processed packets that were discarded before
being fully processed.
@type time_taken: float
@param time_taken: The number of seconds spent handling all received,
non-ignored requests.
@type ignored_macs: int
@param ignored_macs: The number of MAC addresses being actively ignored.
"""
global _POLL_RECORDS
_POLL_RECORDS_LOCK.acquire()
try:
_POLL_RECORDS = [(time.time(), packets, discarded, time_taken, ignored_macs)] + _POLL_RECORDS[:conf.POLL_INTERVALS_TO_TRACK - 1]
finally:
_POLL_RECORDS_LOCK.release()
def readPollRecords():
"""
Returns a static, immutable copy of the stats-log.
@rtype: tuple
@return: A collection of
(timestamp:float, processed:int, discarded:int,
processing_time:float, ignored_macs:int) values, in reverse-chronological
order.
"""
_POLL_RECORDS_LOCK.acquire()
try:
return tuple(_POLL_RECORDS)
finally:
_POLL_RECORDS_LOCK.release()
#Logging functions
def logToDisk():
"""
Writes the current memory-log and stats-log to disk, making it possible to
export information for use by a developer or to track a misbehaving client.
If logging fails, a message will be written to the memory-log.
@rtype: bool
@return: True if the logfile was written.
"""
try:
log_file = None
if conf.LOG_FILE_TIMESTAMP:
log_file = open(conf.LOG_FILE + time.strftime(".%Y%m%d%H%M%S"), 'w')
else:
log_file = open(conf.LOG_FILE, 'w')
log_file.write("Summary generated %(time)s\n" % {'time': time.asctime(),})
log_file.write("\nStatistics:\n")
for (timestamp, packets, discarded, time_taken, ignored_macs) in readPollRecords():
if packets:
turnaround = time_taken / packets
else:
turnaround = 0.0
log_file.write("%(time)s : received: %(received)i; discarded: %(discarded)i; turnaround: %(turnaround)fs/pkt; ignored MACs: %(ignored)i\n" % {
'time': time.ctime(timestamp),
'received': packets,
'discarded': discarded,
'turnaround': turnaround,
'ignored': ignored_macs,
})
log_file.write("\nEvents:\n")
for (timestamp, line) in readLog():
log_file.write("%(time)s : %(line)s\n" % {
'time': time.ctime(timestamp),
'line': line,
})
log_file.close()
return True
except Exception, e:
writeLog('Writing to disk failed: %(error)s' % {'error': str(e),})
return False
#E-mail functions
def emailTimeoutCooldown():
"""
Ticks the e-mail timeout value, possibly allowing another e-mail to be sent.
"""
global _EMAIL_TIMEOUT
_EMAIL_LOCK.acquire()
_EMAIL_TIMEOUT = max(0, _EMAIL_TIMEOUT - conf.POLLING_INTERVAL)
_EMAIL_LOCK.release()
def _buildEmail(subject, report):
message = email.MIMEMultipart.MIMEMultipart()
message['From'] = conf.EMAIL_SOURCE
message['To'] = conf.EMAIL_DESTINATION
message['Date'] = email.Utils.formatdate(localtime=True)
message['Subject'] = subject
message.attach(email.MIMEText.MIMEText(report))
return message
def _sendEmail(message):
"""
Sends the given message via the e-mail subsystem.
@type message: C{email.MIMEMultipart.MIMEMultipart}
@param message: The message to be sent.
@raise Exception: A problem occurred while sending the message.
"""
smtp_server = smtplib.SMTP(
host=conf.EMAIL_SERVER,
port=(hasattr(conf, 'EMAIL_PORT') and conf.EMAIL_PORT or 25),
timeout=(hasattr(conf, 'EMAIL_TIMEOUT') and conf.EMAIL_TIMEOUT or 10)
)
if conf.EMAIL_USER:
smtp_server.login(conf.EMAIL_USER, conf.EMAIL_PASSWORD)
smtp_server.sendmail(
conf.EMAIL_SOURCE,
(conf.EMAIL_DESTINATION,),
message.as_string()
)
smtp_server.close()
def sendErrorReport(summary, exception):
"""
Sends e-mail using the config options specified, if e-mail is enabled.
Since it's highly likely that any error that needs to be reported will fire
for most, if not all, DHCP requests received, a cooldown is imposed to avoid
flooding the recipient's inbox too quickly.
If this function is unable to send e-mail, a summary of the error being
reported will be written to the memory-log.
@type summary: basestring
@param summary: A short description of the error, including a probable
cause, if known.
@type exception: Exception
@param exception: The C{Exception} raised to result in this message being
sent.
"""
report ="""
A problem occurred with the DHCP server running on %(server)s.
Given description:
\t%(summary)s
Exception type:
\t%(type)s
Exception details:
\t%(details)s
Exception traceback:
%(traceback)s
""" % {
'server': conf.DHCP_SERVER_IP,
'summary': summary,
'type': str(type(exception)),
'details': str(exception),
'traceback': traceback.format_exc(),
}
if conf.DEBUG:
print report
if not conf.EMAIL_ENABLED:
writeLog(report)
return
global _EMAIL_TIMEOUT
_EMAIL_LOCK.acquire()
try:
if _EMAIL_TIMEOUT > 0:
return
_EMAIL_TIMEOUT = conf.EMAIL_TIMEOUT
finally:
_EMAIL_LOCK.release()
try:
_sendEmail(
_buildEmail(
'Problem with DHCP server',
report
)
)
writeLog("E-mail about '%(error)s' sent to %(destination)s" % {
'error': str(exception),
'destination': conf.EMAIL_DESTINATION,
})
except Exception, e:
writeLog("Unable to send e-mail about '%(error)s': %(e)s" % {
'error': str(exception),
'e': str(e),
})
writeLog(report)
def sendDeclineReport(mac, ip_4, subnet, subnet_serial):
"""
Sends e-mail using the config options specified, if e-mail is enabled.
@type mac: basestring
@param mac: The MAC of the host that identified the conflict.
@type ip_4: basestring
@param ip_4: The IPv4 that resulted in a DHCPDECLINE.
@type subnet: basestring
@param subnet: The subnet on which the conflict occurred.
@type subnet_serial: int
@param subnet_serial: The serial of the subnet on which the conflict
occurred.
"""
report ="""
A duplicate IPv4 address assignment was attempted by the DHCP server running on
%(server)s.
Manual intervention may be required.
Reporting MAC:
\t%(mac)s
Affected IPv4:
\t%(ip_4)s
Subnet:
\t(%(subnet)s, %(subnet_serial)i)
""" % {
'server': conf.DHCP_SERVER_IP,
'mac': mac,
'ip_4': ip_4,
'subnet': subnet,
'subnet_serial': subnet_serial,
}
if conf.DEBUG:
print report
if not conf.EMAIL_ENABLED:
writeLog(report)
return
try:
_sendEmail(
_buildEmail(
'Duplicate IPv4 assignment detected by DHCP server',
report
)
)
writeLog("E-mail about DHCPDECLINE from '%(mac)s' sent to %(destination)s" % {
'mac': mac,
'destination': conf.EMAIL_DESTINATION,
})
except Exception, e:
writeLog("Unable to send e-mail about DHCPDECLINE from '%(mac)s': %(e)s" % {
'mac': mac,
'e': str(e),
})
writeLog(report)
| Python |
# -*- encoding: utf-8 -*-
"""
staticDHCPd package: src
Purpose
=======
Provides the logical implementation of a staticDHCPd daemon.
Legal
=====
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2011 <red.hamsterx@gmail.com>
"""
VERSION = '1.5.6'
| Python |
# -*- encoding: utf-8 -*-
"""
libpydhcpserver module: type_strlist
Purpose
=======
Defines the libpydhcpserver-specific strlist type.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2010 <red.hamsterx@gmail.com>
(C) Mathieu Ignacio, 2008 <mignacio@april.org>
"""
class strlist(object):
"""
Evaluates and encodes a string for use as part of a DHCP packet.
"""
_list = None #: An encoded list of characters.
_str = None #: A human-reaable string.
def __init__(self, data=""):
"""
Accepts data and ensures that both human-readable and packet-encodable
values are made available.
@type data: str|list|tuple
@param data: The data to be processed.
@raise TypeError: Unsupported data-type was supplied.
@raise ValueError: Invalid data supplied.
"""
if type(data) == str:
self._str = data
self._list = map(ord, self._str)
elif type(data) in (list, tuple):
self._list = list(data)
self._str = ''.join(map(chr, self._list))
else:
raise TypeError('Expected str or [int]; got %(type)s' % {
'type': type(data),
})
def list(self):
"""
Returns the packet-encodable data contained within this object.
@rtype: list
@return: A collection of bytes.
"""
return self._list
def str(self):
"""
Returns the human-readable data contained within this object.
@rtype: str
@return: A human-readable value.
"""
return self._str
def __cmp__(self, other):
if self._str == other:
return 0
return 1
def __hash__(self):
return self._str.__hash__()
def __nonzero__(self) :
if self._str:
return 1
return 0
def __repr__(self):
return self._str
| Python |
# -*- encoding: utf-8 -*-
"""
libpydhcpserver module: dhcp_constants
Purpose
=======
Contains constants needed by libpydhcpserver.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2010 <red.hamsterx@gmail.com>
(C) Mathieu Ignacio, 2008 <mignacio@april.org>
"""
MAGIC_COOKIE = [99,130,83,99] #: The DHCP magic cookie value, defined in RFC 1048.
DHCP_FIELDS_NAMES = {
'op': {0: 'ERROR_UNDEF', 1: 'BOOTREQUEST', 2: 'BOOTREPLY',},
'dhcp_message_type': {
0: 'ERROR_UNDEF',
1: 'DHCP_DISCOVER', 2: 'DHCP_OFFER',
3: 'DHCP_REQUEST', 4:'DHCP_DECLINE',
5: 'DHCP_ACK', 6: 'DHCP_NACK',
7: 'DHCP_RELEASE',
8: 'DHCP_INFORM',
9: 'DHCP_FORCERENEW',
10: 'DHCP_LEASEQUERY', 11: 'DHCP_LEASEUNASSIGNED',
12: 'DHCP_LEASEUNKNOWN', 13: 'DHCP_LEASEACTIVE',
}
} #: Mapping from DHCP option values to human-readable names.
DHCP_NAMES = {
'ERROR_UNDEF': 0,
'BOOTREQUEST': 1, 'BOOTREPLY': 2,
'DHCP_DISCOVER': 1, 'DHCP_OFFER': 2,
'DHCP_REQUEST': 3, 'DHCP_DECLINE': 4,
'DHCP_ACK': 5, 'DHCP_NACK': 6,
'DHCP_RELEASE': 7,
'DHCP_INFORM': 8,
'DHCP_FORCERENEW': 9,
'DHCP_LEASEQUERY': 10, 'DHCP_LEASEUNASSIGNED': 11,
'DHCP_LEASEUNKNOWN': 12, 'DHCP_LEASEACTIVE': 13,
} #: Mapping from human-readable names to DHCP option values.
DHCP_FIELDS = {
'op': (0, 1),
'htype': (1, 1),
'hlen': (2, 1),
'hops': (3, 1),
'xid': (4, 4),
'secs': (8, 2),
'flags': (10, 2),
'ciaddr': (12, 4),
'yiaddr': (16, 4),
'siaddr': (20, 4),
'giaddr': (24, 4),
'chaddr': (28, 6),
'sname': (44, 64),
'file': (108, 128),
} #: Maps from human-readable option field names their position within the fixed-size core packet body and the length of each field.
DHCP_FIELDS_SPECS = {
"ipv4": (4, 0, 1), "ipv4+": (0, 4, 4), "ipv4*": (0, 0, 4),
"byte": (1, 0, 1), "byte+": (0, 1, 1),
"char": (1, 0, 1), "char+": (0, 1, 1),
"string": (0, 0, 1),
"bool": (1, 0, 1),
"16-bits": (2, 0, 1), "16-bits+": (0, 2, 2),
"32-bits": (4, 0, 1), "32-bits+": (0, 4, 4),
"identifier": (0, 2, 1),
"none": (0, 0, 1),
}
"""
Provides information about how to validate each basic DHCP option type.
The human-readable format-name is mapped against a
(fixed_length, minimum_length, multiple) tuple, which is handled by the
following algorithm:
if C{fixed_length} == 0:
C{minimum_length} and C{multiple} apply
resulting length must be a multiple of C{multiple}
else:
only C{fixed_length} is considered
"""
DHCP_FIELDS_TYPES = {
'op': "byte",
'htype': "byte",
'hlen': "byte",
'hops': "byte",
'xid': "32-bits",
'secs': "16-bits",
'flags': "16-bits",
'ciaddr': "ipv4",
'yiaddr': "ipv4",
'siaddr': "ipv4",
'giaddr': "ipv4",
'chaddr': "hwmac",
'sname': "string",
'file': "string",
} #: Maps human-readable field-names to DHCP fields specs.
DHCP_OPTIONS_TYPES = {
0: "none",
1: "ipv4",
2: "32-bits",
3: "ipv4+",
4: "ipv4+",
5: "ipv4+",
6: "ipv4+",
7: "ipv4+",
8: "ipv4+",
9: "ipv4+",
10: "ipv4+",
11: "ipv4+",
12: "string",
13: "16-bits",
14: "string",
15: "string",
16: "ipv4",
17: "string",
18: "string",
19: "bool",
20: "bool",
21: "ipv4+",
22: "16-bits",
23: "byte",
24: "32-bits",
25: "16-bits+",
26: "16-bits",
27: "bool",
28: "ipv4",
29: "bool",
30: "bool",
31: "bool",
32: "ipv4",
33: "ipv4+",
34: "bool",
35: "32-bits",
36: "bool",
37: "byte",
38: "32-bits",
39: "bool",
40: "string",
41: "ipv4+",
42: "ipv4+",
43: "byte+",
44: "ipv4+",
45: "ipv4+",
46: "byte",
47: "string",
48: "ipv4+",
49: "ipv4+",
50: "ipv4",
51: "32-bits",
52: "byte",
53: "byte",
54: "ipv4",
55: "byte+",
56: "string",
57: "16-bits",
58: "32-bits",
59: "32-bits",
60: "string",
61: "identifier",
62: "string",
63: "byte+",
64: "string",
65: "ipv4+",
66: "string",
67: "string",
68: "ipv4*",
69: "ipv4+",
70: "ipv4+",
71: "ipv4+",
72: "ipv4+",
73: "ipv4+",
74: "ipv4+",
75: "ipv4+",
76: "ipv4+",
77: "RFC3004_77", #Not implemented; not necessary for static model
78: "RFC2610_78", #Implemented
79: "RFC2610_79", #Implemented
80: "none",
81: "string",
82: "byte+",
83: "RFC4174_83", #Implemented
84: "Unassigned",
85: "ipv4+",
86: "byte+",
87: "byte+",
88: "RFC4280_88", #Implemented
89: "ipv4+",
90: "RFC3118_90", #Not implemented; not necessary for static model
91: "32-bits",
92: "ipv4+",
93: "16-bits+",
94: "byte+",
95: "string", #Specifications not published
96: "Unassigned",
97: "byte+",
98: "string",
99: "byte+",
100: "string",
101: "string",
102: "Unassigned", 103: "Unassigned", 104: "Unassigned", 105: "Unassigned",
106: "Unassigned", 107: "Unassigned", 108: "Unassigned", 109: "Unassigned",
110: "Unassigned", 111: "Unassigned",
112: "string", #Specifications not published
113: "string", #Specifications not published
114: "string", #Specifications not published
115: "Unassigned",
116: "bool",
117: "16-bits+",
118: "ipv4",
119: "RFC3397_119", #Implemented
120: "RFC3361_120", #Implemented
121: "byte+",
122: "string",
123: "byte+",
124: "string",
125: "string",
126: "Unassigned", 127: "Unassigned",
128: "string",
129: "string",
130: "string",
131: "string",
132: "string",
133: "string",
134: "string",
135: "string",
136: "ipv4+",
137: "RFC5223_137", #Implemented
138: "ipv4+",
139: "RFC5678_139", #Implemented
140: "RFC5678_140", #Implemented
141: "Unassigned", 142: "Unassigned", 143: "Unassigned", 144: "Unassigned",
145: "Unassigned", 146: "Unassigned", 147: "Unassigned", 148: "Unassigned",
149: "Unassigned",
150: "Unassigned", #TFTP server address
151: "Unassigned", 152: "Unassigned", 153: "Unassigned", 154: "Unassigned",
155: "Unassigned", 156: "Unassigned", 157: "Unassigned", 158: "Unassigned",
159: "Unassigned", 160: "Unassigned", 161: "Unassigned", 162: "Unassigned",
163: "Unassigned", 164: "Unassigned", 165: "Unassigned", 166: "Unassigned",
167: "Unassigned", 168: "Unassigned", 169: "Unassigned", 170: "Unassigned",
171: "Unassigned", 172: "Unassigned", 173: "Unassigned", 174: "Unassigned",
175: "Unassigned", #Etherboot
176: "Unassigned", #IP Telephone
177: "Unassigned", #Etherboot
178: "Unassigned", 179: "Unassigned", 180: "Unassigned", 181: "Unassigned",
182: "Unassigned", 183: "Unassigned", 184: "Unassigned", 185: "Unassigned",
186: "Unassigned", 187: "Unassigned", 188: "Unassigned", 189: "Unassigned",
190: "Unassigned", 191: "Unassigned", 192: "Unassigned", 193: "Unassigned",
194: "Unassigned", 195: "Unassigned", 196: "Unassigned", 197: "Unassigned",
198: "Unassigned", 199: "Unassigned", 200: "Unassigned", 201: "Unassigned",
202: "Unassigned", 203: "Unassigned", 204: "Unassigned", 205: "Unassigned",
206: "Unassigned", 207: "Unassigned",
208: "32-bits",
209: "string",
210: "string",
211: "32-bits",
212: "Unassigned", 213: "Unassigned", 214: "Unassigned", 215: "Unassigned",
216: "Unassigned", 217: "Unassigned", 218: "Unassigned", 219: "Unassigned",
220: "Unassigned", #Subnet Allocation Option
221: "Unassigned", #Virtual Subnet Selection Option
222: "Unassigned", 223: "Unassigned",
224: "Reserved", 225: "Reserved", 226: "Reserved", 227: "Reserved",
228: "Reserved", 229: "Reserved", 230: "Reserved", 231: "Reserved",
232: "Reserved", 233: "Reserved", 234: "Reserved", 235: "Reserved",
236: "Reserved", 237: "Reserved", 238: "Reserved", 239: "Reserved",
240: "Reserved", 241: "Reserved", 242: "Reserved", 243: "Reserved",
244: "Reserved", 245: "Reserved", 246: "Reserved", 247: "Reserved",
248: "Reserved", 249: "Reserved", 250: "Reserved", 251: "Reserved",
252: "Reserved", 253: "Reserved", 254: "Reserved",
255: "none",
}
"""
Maps DHCP option-numbers to DHCP fields specs.
All values derived from http://www.iana.org/assignments/bootp-dhcp-parameters
"""
DHCP_OPTIONS = {
'pad': 0,
# Vendor Extension
'subnet_mask': 1,
'time_offset': 2,
'router': 3,
'time_server': 4,
'name_server': 5,
'domain_name_servers': 6,
'log_server': 7,
'cookie_server': 8,
'lpr_server': 9,
'impress_server': 10,
'resource_location_server': 11,
'hostname': 12,
'bootfile': 13,
'merit_dump_file': 14,
'domain_name': 15,
'swap_server': 16,
'root_path': 17,
'extensions_path': 18,
# IP layer parameters per host
'ip_forwarding': 19,
'nonlocal_source_routing': 20,
'policy_filter': 21,
'maximum_datagram_reassembly_size': 22,
'default_ip_time-to-live': 23,
'path_mtu_aging_timeout': 24,
'path_mtu_table': 25,
# IP layer parameters per interface
'interface_mtu': 26,
'all_subnets_are_local': 27,
'broadcast_address': 28,
'perform_mask_discovery': 29,
'mask_supplier': 30,
'perform_router_discovery': 31,
'router_solicitation_address': 32,
'static_routes': 33,
# link layer parameters per interface
'trailer_encapsulation': 34,
'arp_cache_timeout': 35,
'ethernet_encapsulation': 36,
# TCP parameters
'tcp_default_ttl': 37,
'tcp_keepalive_interval': 38,
'tcp_keepalive_garbage': 39,
# Applications and service parameters
'nis_domain': 40,
'nis_servers': 41,
'ntp_servers': 42,
'vendor_specific_information': 43,
'nbns': 44,
'nbdd': 45,'nb_node_type': 46,
'nb_scope': 47,
'x_window_system_font_server': 48,
'x_window_system_display_manager': 49,
# DHCP extensions
'requested_ip_address': 50,
'ip_address_lease_time': 51,
'overload': 52,
'dhcp_message_type': 53,
'server_identifier': 54,
'parameter_request_list': 55,
'message': 56,
'maximum_dhcp_message_size': 57,
'renewal_time_value': 58,
'rebinding_time_value': 59,
'vendor_class_identifier': 60,
'client_identifier': 61,
# Add from RFC 2132
'netware_ip_domain_name': 62,
'netware_ip_sub_options': 63,
'nis+_domain': 64,
'nis+_servers': 65,
'tftp_server_name': 66,
'bootfile_name': 67,
'mobile_ip_home_agent': 68,
'smtp_servers': 69,
'pop_servers': 70,
'nntp_servers': 71,
'default_www_server': 72,
'default_finger_server': 73,
'default_irc_server': 74,
'streettalk_server': 75,
'streettalk_directory_assistance_server': 76,
'user_class': 77,
'directory_agent': 78,
'service_scope': 79,
'rapid_commit': 80,
'client_fqdn': 81,
'relay_agent': 82,
'internet_storage_name_service': 83,
#Hole.
'nds_server': 85,
'nds_tree_name': 86,
'nds_context': 87,
'bcmcs_domain_list': 88,
'bcmcs_ipv4_list': 89,
'authentication': 90,
'client_last_transaction_time': 91,
'associated_ip': 92,
'client_system': 93,
'client_ndi': 94,
'ldap': 95,
#Hole
'uuid_guid': 97,
'open_group_user_auth': 98,
'geoconf_civic': 99,
'pcode': 100,
'tcode': 101,
#Hole.
'netinfo_address': 112,
'netinfo_tag': 113,
'url': 114,
#Hole.
'auto_config': 116,
'name_service_search': 117,
'subnet_selection': 118,
'domain_search': 119,
'sip_servers': 120,
'classless_static_route': 121,
'cablelabs_client_configuration': 122,
'geoconf': 123,
'vendor_class': 124,
'vendor_specific': 125,
#Hole.
'pxe_128': 128,
'pxe_129': 129,
'pxe_130': 130,
'pxe_131': 131,
'pxe_132': 132,
'pxe_133': 133,
'pxe_134': 134,
'pxe_135': 135,
'pana_agent': 136,
'v4_lost': 137,
'capwap_ac_v4': 138,
'ipv4_mos': 139,
'fqdn_mos': 140,
#Hole.
'pxelinux_magic': 208,
'configuration_file': 209,
'path_prefix': 210,
'reboot_time': 211,
#Hole.
'end': 255
} #: Maps human-readable DHCP option names to integer values.
DHCP_OPTIONS_REVERSE = dict([(v, k) for (k, v) in DHCP_OPTIONS.iteritems()]) #: Maps integer values to human-readable DHCP option names.
| Python |
# -*- encoding: utf-8 -*-
"""
libpydhcpserver module: type_ipv4
Purpose
=======
Defines the libpydhcpserver-specific ipv4 type.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2010 <red.hamsterx@gmail.com>
(C) Mathieu Ignacio, 2008 <mignacio@april.org>
"""
# Check if _ip_numlist is valid.
def checkNumList(value):
"""
Ensures that the given value is made up of four single bytes.
@type value: sequence
@param value: The value to be tested.
@rtype: bool
@return: True if validation succeeds.
"""
if not len(value) == 4:
return False
for part in value:
if not 0 <= part <= 255:
return False
return True
def checkString(value):
"""
Ensures that the given value is made up of four dot-delimited single bytes.
@type value: basestring
@param value: The value to be tested.
@rtype: bool
@return: True if validation succeeds.
"""
octets = value.strip().split('.')
if not len(octets) == 4:
return False
for o in octets:
if not o.isdigit():
return False
if not 0 <= int(o) <= 255:
return False
return True
class ipv4(object):
"""
Evaluates and encodes an IPv4 for use as part of a DHCP packet.
"""
_ip_long = None #: A long-encoded IPv4.
_ip_numlist = None #: An encoded list of bytes.
_ip_string = None #: A human-readable string.
def __init__(self, data="0.0.0.0"):
"""
Accepts data and ensures that both human-readable and packet-encodable
values are made available.
@type data: str|list|tuple
@param data: The data to be processed.
@raise TypeError: Unsupported data-type was supplied.
@raise ValueError: Invalid data supplied.
"""
if type(data) == str:
if not checkString(data):
raise ValueError("'%(ip)s' is not a valid IP" % {
'ip': data,
})
self._ip_string = data
self._stringToNumlist()
self._numlistToLong()
elif type(data) in (list, tuple):
if not checkNumList(data):
raise ValueError("'%(ip)s' is not a valid IP" % {
'ip': str(data),
})
self._ip_numlist = data
self._numlistToString()
self._numlistToLong()
elif type(data) in (int, long):
if not 0 <= data <= 4294967295:
raise ValueError("'%(ip)i' is not a valid IP" % {
'ip': data,
})
self._ip_long = data
self._longToNumlist()
self._numlistToString()
else:
raise TypeError('Expected str, list, or long; got %(type)s' % {
'type': ip_type,
})
def _longToNumlist(self):
"""
Converts a long value into a collection of bytes.
"""
self._ip_numlist = [self._ip_long >> 24 & 0xFF]
self._ip_numlist.append(self._ip_long >> 16 & 0xFF)
self._ip_numlist.append(self._ip_long >> 8 & 0xFF)
self._ip_numlist.append(self._ip_long & 0xFF)
def _numlistToLong(self):
"""
Converts a collection of bytes into a long value.
"""
self._ip_long = sum([x * 256 ** i for (i, x) in enumerate(reversed(self._ip_numlist))])
def _numlistToString(self):
"""
Converts a collection of bytes into a human-readable string.
"""
self._ip_string = ".".join(map(str, self._ip_numlist))
def _stringToNumlist(self):
"""
Converts an IP string into a collection of bytes.
"""
self._ip_numlist = map(int, self._ip_string.split('.'))
def int(self):
"""
Returns the integer data contained within this object.
@rtype: int
@return: A long value.
"""
return self._ip_long
def list(self):
"""
Returns the packet-encodable data contained within this object.
@rtype: list
@return: A collection of bytes.
"""
return self._ip_numlist
def str(self):
"""
Returns the human-readable data contained within this object.
@rtype: str
@return: A human-readable value.
"""
return self._ip_string
def __cmp__(self,other):
return cmp(self._ip_long, other._ip_long)
def __hash__(self):
return self._ip_long.__hash__()
def __nonzero__(self):
if self._ip_long:
return 1
return 0
def __repr__(self):
return self._ip_string
| Python |
# -*- encoding: utf-8 -*-
"""
libpydhcpserver module: type_rfc
Purpose
=======
Defines the libpydhcpserver-specific RFC types.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2010 <red.hamsterx@gmail.com>
"""
import type_ipv4
import type_strlist
def ipToList(ip):
"""
Converts an IPv4 address into a collection of four bytes.
@type ip: basestring
@param ip: The IPv4 to process.
@rtype: list
@return: The IPv4 expressed as bytes.
"""
return [int(i) for i in ip.split('.')]
def ipsToList(ips):
"""
Converts a comma-delimited list of IPv4s into bytes.
@type ips: basestring
@param ips: The list of IPv4s to process.
@rtype: list
@return: A collection of bytes corresponding to the given IPv4s.
"""
quads = []
for ip in ips.split(','):
quads += ipToList(ip.strip())
return quads
def intToList(i):
"""
A convenience function that converts an int into a pair of bytes.
@type i: int
@param i: The int value to convert.
@rtype: list
@return: The converted bytes.
"""
return [(i / 256) % 256, i % 256]
def intsToList(l):
"""
A convenience function that converts a sequence of ints into pairs of bytes.
@type l: sequence
@param l: The int values to convert.
@rtype: list
@return: The converted bytes.
"""
pairs = []
for i in l:
pairs += intToList(i)
return pairs
def longToList(l):
"""
A convenience function that converts a long into a set of four bytes.
@type l: int
@param l: The long value to convert.
@rtype: list
@return: The converted bytes.
"""
q = [l % 256]
l /= 256
q.insert(0, l % 256)
l /= 256
q.insert(0, l % 256)
l /= 256
q.insert(0, l % 256)
return q
def longsToList(l):
"""
A convenience function that converts a sequence of longs into quads of
bytes.
@type l: sequence
@param l: The long values to convert.
@rtype: list
@return: The converted bytes.
"""
quads = []
for i in l:
quads += longToList(i)
return quads
def strToList(s):
"""
Converts the given string into an encoded byte format.
@type s: basestring
@param s: The string to be converted.
@rtype: list
@return: An encoded byte version of the given string.
"""
return type_strlist.strlist(str(s)).list()
def strToPaddedList(s, l):
"""
Converts the given string into an encoded byte format, exactly equal to the
specified length.
Strings longer than the given length will be truncated, while those shorter
will be null-padded.
@type s: basestring
@param s: The string to be converted.
@type l: int
@param l: The length of the list.
@rtype: list
@return: An encoded byte version of the given string of the specified length.
"""
padded_list = strToList(s)
if len(padded_list) < l:
padded_list += [0] * (l - len(padded_list))
return padded_list[:l]
def rfc3046_decode(l):
"""
Extracts sub-options from an RFC3046 option (82).
@type l: list
@param l: The option's raw data.
@rtype: dict
@return: The sub-options, as byte-lists, keyed by ID.
"""
sub_options = {}
while l:
id = l.pop(0)
length = l.pop(0)
sub_options[id] = l[:length]
l = l[length:]
return sub_options
def _rfc1035Parse(domain_name):
"""
Splits an FQDN on dots, outputting data like
['g', 'o', 'o', 'g', 'l', 'e', 2, 'c', 'a', 0], in conformance with
RFC1035.
@type domain_name: basestring
@param domain_name: The FQDN to be converted.
@rtype: list
@return: The converted FQDN.
"""
bytes = []
for fragment in domain_name.split('.'):
bytes += [len(fragment)] + [ord(c) for c in fragment]
return bytes + [0]
class RFC(object):
"""
A generic special RFC object, used to simplify the process of setting
complex options.
"""
_value = None #: The bytes associated with this object.
def getValue(self):
return self._value
def __hash__(self):
return self._value.__hash__()
def __repr__(self):
return repr(self._value)
def __nonzero__(self) :
return 1
def __cmp__(self, other):
if self._value == other:
return 0
return 1
class rfc1035_plus(RFC):
def __init__(self, data):
"""
Parses the given data and stores multiple RFC1035-formatted strings.
@type data: basestring
@param data: The comma-delimited FQDNs to process.
"""
self._value = []
for token in [tok for tok in [t.strip() for t in data.split(',')] if tok]:
self._value += _rfc1035Parse(token)
class rfc2610_78(RFC):
def __init__(self, mandatory, data):
"""
Parses the given data and stores multiple IPv4 addresses.
@type mandatory: bool
@param mandatory: True if the IPv4 addresses have to be respected.
@type data: basestring
@param data: The comma-delimited IPv4s to process.
"""
self._value = [int(mandatory)]
for token in [tok for tok in [t.strip() for t in data.split(',')] if tok]:
self._value += type_ipv4.ipv4(token).list()
class rfc2610_79(RFC):
def __init__(self, mandatory, data):
"""
Parses the given data and stores a scope-list.
@type mandatory: bool
@param mandatory: True if the scope-list has to be respected.
@type data: basestring
@param data: The scope-list to process.
"""
self._value = [int(mandatory)] + [ord(c) for c in data.encode('utf-8')]
class rfc3361_120(RFC):
def __init__(self, data):
"""
Parses the given data and stores multiple IPv4 addresses or
RFC1035-formatted strings.
@type data: basestring
@param data: The comma-delimited IPv4s or FQDNs to process.
@raise ValueError: Both IPv4s and FQDNs were specified.
"""
ip_4_mode = False
dns_mode = False
self._value = []
for token in [tok for tok in [t.strip() for t in data.split(',')] if tok]:
try:
self._value += type_ipv4.ipv4(token).list()
ip_4_mode = True
except ValueError:
self._value += _rfc1035Parse(token)
dns_mode = True
if ip_4_mode == dns_mode:
raise ValueError("'%(data)s contains both IPv4 and DNS-based entries" % {
'data': data,
})
self._value.insert(0, int(ip_4_mode))
class rfc3397_119(rfc1035_plus): pass
class rfc3925_124(RFC):
def __init__(self, data):
"""
Sets vendor_class data.
@type data: list
@param data: A list of the form [(enterprise_number:int, data:string)].
"""
self._value = []
for (enterprise_number, payload) in data:
self._value += longToList(enterprise_number)
self._value.append(chr(len(payload)))
self._value += payload
class rfc3925_125(RFC):
def __init__(self, data):
"""
Sets vendor_specific data.
@type data: list
@param data: A list of the form
[(enterprise_number:int, [(subopt_code:byte, data:string)])].
"""
self._value = []
for (enterprise_number, payload) in data:
self._value += longToList(enterprise_number)
subdata = []
for (subopt_code, subpayload) in payload:
subdata.append(chr(subopt_code))
subdata.append(chr(len(subpayload)))
subdata += subpayload
self._value.append(chr(len(subdata)))
self._value += subdata
class rfc4174_83(RFC):
def __init__(self, isns_functions, dd_access, admin_flags, isns_security, ips):
"""
Sets iSNS configuration parameters.
@type isns_functions: int
@param isns_functions: Two bytes.
@type dd_access: int
@param dd_access: Two bytes.
@type admin_flags: int
@param admin_flags: Two bytes.
@type isns_security: int
@param isns_security: Four bytes.
@type ips: basestring
@param ips: The comma-delimited IPv4s to process.
"""
isns_functions = intToList(isns_functions)
dd_access = intToList(dd_access)
admin_flags = intToList(admin_flags)
isns_security = longToList(isns_security)
self._value = isns_functions + dd_access + admin_flags + isns_security
for token in [tok for tok in [t.strip() for t in ips.split(',')] if tok]:
self._value += type_ipv4.ipv4(token).list()
class rfc4280_88(rfc1035_plus): pass
class rfc5223_137(rfc1035_plus): pass
class rfc5678_139(RFC):
def __init__(self, values):
"""
Parses the given data and stores multiple IPv4 addresses
associated with sub-option codes.
@type values: tuple
@param values: A collection of (code:int, IPv4s:string) elements.
"""
self._value = []
for (code, addresses) in values:
self._value.append(code)
for token in [tok for tok in [address.strip() for address in addresses.split(',')] if tok]:
self._value += type_ipv4.ipv4(token).list()
class rfc5678_140(RFC):
def __init__(self, values):
"""
Parses the given data and stores multiple RFC1035-formatted strings
associated with sub-option codes.
@type values: tuple
@param values: A collection of (code:int, FQDNs:string) elements.
"""
self._value = []
for (code, addresses) in values:
self._value.append(code)
self._value += rfc1035_plus(addresses).getValue()
| Python |
# -*- encoding: utf-8 -*-
"""
libpydhcpserver package
Purpose
=======
Provides the pythonic core of a DHCP server.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2011 <red.hamsterx@gmail.com>
(C) Mathieu Ignacio, 2008 <mignacio@april.org>
"""
VERSION = '1.1.5'
| Python |
# -*- encoding: utf-8 -*-
"""
pydhcplib module: dhcp_network
Purpose
=======
Processes DHCP packets.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2011 <red.hamsterx@gmail.com>
(C) Matthew Boedicker, 2011 <matthewm@boedicker.org>
(C) Mathieu Ignacio, 2008 <mignacio@april.org>
"""
import select
import socket
import threading
import dhcp_packet
class DHCPNetwork(object):
"""
Handles the actual network I/O and internal packet-path-routing logic.
"""
_server_address = None #: The IP address of the DHCP service.
_server_port = None #: The port on which DHCP servers and relays listen in this network.
_client_port = None #: The port on which DHCP clients listen in this network.
_pxe_port = None #: The port on which DHCP servers listen for PXE traffic in this network.
_dhcp_socket = None #: The socket used to receive DHCP requests.
_response_socket = None #: The socket used to send DHCP responses. Necessary because of how Linux handles broadcast.
_pxe_socket = None #: The socket used to receive PXE requests.
def __init__(self, server_address, server_port, client_port, pxe_port):
"""
Sets up the DHCP network infrastructure.
@type server_address: basestring
@param server_address: The IP address on which to run the DHCP service.
@type server_port: int
@param server_port: The port on which DHCP servers and relays listen in this network.
@type client_port: int
@param client_port: The port on which DHCP clients listen in this network.
@type pxe_port: int|NoneType
@param pxe_port: The port on which DHCP servers listen for PXE traffic in this network.
@raise Exception: A problem occurred during setup.
"""
self._server_address = server_address
self._server_port = server_port
self._client_port = client_port
self._pxe_port = pxe_port
self._createSocket()
self._bindToAddress()
def _bindToAddress(self):
"""
Binds the server and response sockets so they may be used.
@raise Exception: A problem occurred while binding the sockets.
"""
try:
if self._server_address:
self._response_socket.bind((self._server_address, 0))
self._dhcp_socket.bind(('', self._server_port))
if self._pxe_port:
self._pxe_socket.bind(('', self._pxe_port))
except socket.error, e:
raise Exception('Unable to bind sockets: %(error)s' % {
'error': str(e),
})
def _createSocket(self):
"""
Creates and configures the server and response sockets.
@raise Exception: A socket was in use or the OS doesn't support proper
broadcast or reuse flags.
"""
try:
self._dhcp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if self._server_address:
self._response_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
else:
self._response_socket = self._dhcp_socket
if self._pxe_port:
self._pxe_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except socket.error, msg:
raise Exception('Unable to create socket: %(err)s' % {'err': str(msg),})
try:
self._response_socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
except socket.error, msg:
raise Exception('Unable to set SO_BROADCAST: %(err)s' % {'err': str(msg),})
try:
self._dhcp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self._pxe_socket:
self._pxe_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error, msg :
raise Exception('Unable to set SO_REUSEADDR: %(err)s' % {'err': str(msg),})
def _getNextDHCPPacket(self, timeout=60):
"""
Blocks for up to C{timeout} seconds while waiting for a packet to
arrive; if one does, a thread is spawned to process it.
@type timeout: int
@param timeout: The number of seconds to wait before returning.
@rtype: bool
@return: True if something was received; False otherwise.
"""
active_sockets = None
if self._pxe_socket:
active_sockets = select.select([self._dhcp_socket, self._pxe_socket], [], [], timeout)[0]
else:
active_sockets = select.select([self._dhcp_socket], [], [], timeout)[0]
if active_sockets:
active_socket = active_sockets[0]
(data, source_address) = active_socket.recvfrom(4096)
if data:
packet = dhcp_packet.DHCPPacket(data)
if packet.isDHCPPacket():
pxe = active_socket == self._pxe_socket
if packet.isDHCPRequestPacket():
threading.Thread(target=self._handleDHCPRequest, args=(packet, source_address, pxe)).start()
elif packet.isDHCPDiscoverPacket():
threading.Thread(target=self._handleDHCPDiscover, args=(packet, source_address, pxe)).start()
elif packet.isDHCPInformPacket():
threading.Thread(target=self._handleDHCPInform, args=(packet, source_address, pxe)).start()
elif packet.isDHCPReleasePacket():
threading.Thread(target=self._handleDHCPRelease, args=(packet, source_address, pxe)).start()
elif packet.isDHCPDeclinePacket():
threading.Thread(target=self._handleDHCPDecline, args=(packet, source_address, pxe)).start()
elif packet.isDHCPLeaseQueryPacket():
threading.Thread(target=self._handleDHCPLeaseQuery, args=(packet, source_address, pxe)).start()
return True
return False
def _handleDHCPDecline(self, packet, source_address, pxe):
"""
Processes a DECLINE packet.
@type packet: L{dhcp_packet.DHCPPacket}
@param packet: The packet to be processed.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
pass
def _handleDHCPDiscover(self, packet, source_address, pxe):
"""
Processes a DISCOVER packet.
@type packet: L{dhcp_packet.DHCPPacket}
@param packet: The packet to be processed.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
pass
def _handleDHCPInform(self, packet, source_address, pxe):
"""
Processes an INFORM packet.
@type packet: L{dhcp_packet.DHCPPacket}
@param packet: The packet to be processed.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
pass
def _handleDHCPLeaseQuery(self, packet, source_address, pxe):
"""
Processes a LEASEQUERY packet.
@type packet: L{dhcp_packet.DHCPPacket}
@param packet: The packet to be processed.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
pass
def _handleDHCPRelease(self, packet, source_address):
"""
Processes a RELEASE packet.
@type packet: L{dhcp_packet.DHCPPacket}
@param packet: The packet to be processed.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
"""
pass
def _handleDHCPRequest(self, packet, source_address, pxe):
"""
Processes a REQUEST packet.
@type packet: L{dhcp_packet.DHCPPacket}
@param packet: The packet to be processed.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
pass
def _sendDHCPPacketTo(self, packet, ip, port, pxe):
"""
Encodes and sends a DHCP packet to its destination.
@type packet: L{dhcp_packet.DHCPPacket}
@param packet: The packet to be sent.
@type ip: basestring
@param ip: The IP address to which the packet is to be sent.
@type port: int
@param port: The port to which the packet is to be addressed.
@type pxe: bool
@param pxe: True if the packet was received via the PXE port
"""
packet_encoded = packet.encodePacket()
# When responding to a relay, the packet will be unicast, so use
# self._dhcp_socket so the source port will be 67. Some relays
# will not relay when the source port is not 67. Or, if PXE is in
# use, use that socket instead.
#
# Otherwise use self._response_socket because it has SO_BROADCAST.
#
# If self._dhcp_socket is anonymously bound, the two sockets will
# actually be one and the same, so this change has no potentially
# damaging effects.
if not ip == '255.255.255.255':
if pxe:
return self._pxe_socket.sendto(packet_encoded, (ip, port))
else:
return self._dhcp_socket.sendto(packet_encoded, (ip, port))
else:
return self._response_socket.sendto(packet_encoded, (ip, port))
| Python |
# -*- encoding: utf-8 -*-
"""
libpydhcpserver module: type_hwmac
Purpose
=======
Defines the libpydhcpserver-specific hwmac type.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2010 <red.hamsterx@gmail.com>
(C) Mathieu Ignacio, 2008 <mignacio@april.org>
"""
from binascii import unhexlify, hexlify
class hwmac(object):
"""
Evaluates and encodes a MAC for use as part of a DHCP packet.
"""
_hw_numlist = None #: An encoded list of bytes.
_hw_string = None #: A human-readable MAC.
def __init__(self, data="00:00:00:00:00:00"):
"""
Accepts data and ensures that both human-readable and packet-encodable
values are made available.
@type data: str|list|tuple
@param data: The data to be processed.
@raise TypeError: Unsupported data-type was supplied.
@raise ValueError: Invalid data supplied.
"""
if type(data) == str:
self._hw_string = data.strip()
self._stringToNumlist()
self._checkNumList()
elif type(data) in (list, tuple):
self._hw_numlist = list(data)
self._checkNumList()
self._numlistToString()
else:
raise TypeError('Expected str or [int]; got %(type)s' % {
'type': type(data),
})
def _checkNumList(self):
"""
Validates the MAC address contained within this object.
@raise TypeError: Unsupported data-type was supplied.
@raise ValueError: Invalid data supplied.
"""
if not len(self._hw_numlist) == 6:
raise ValueError("Expected six octets; received %(count)i" % {
'count': len(self._hw_numlist),
})
for part in self._hw_numlist:
if not type(part) == int:
raise TypeError('Expected int; got %(type)s' % {
'type': type(part),
})
if part < 0 or part > 255:
raise ValueError("Expected 0 <= x <= 255; received %(x)i" % {
'x': part,
})
# Convert NumList type to String type
def _numlistToString(self):
"""
Converts a collection of bytes into a human-readable MAC address.
"""
self._hw_string = ":".join(map(hexlify, map(chr, self._hw_numlist)))
def _stringToNumlist(self):
"""
Converts a human-readable MAC address into a collection of bytes.
"""
self._hw_string = self._hw_string.replace("-", ":").replace(".", ":").lower()
self._hw_numlist = [ord(unhexlify(twochar)) for twochar in self._hw_string.split(":")]
def list(self):
"""
Returns the packet-encodable data contained within this object.
@rtype: list
@return: A collection of bytes.
"""
return self._hw_numlist
def str(self):
"""
Returns the human-readable data contained within this object.
@rtype: str
@return: A human-readable value.
"""
return self._hw_string
def __cmp__(self, other) :
if self._hw_string == other:
return 0
return 1
def __hash__(self):
return self._hw_string.__hash__()
def __nonzero__(self) :
if not self._hw_string == "00:00:00:00:00:00":
return 1
return 0
def __repr__(self) :
return self._hw_string
| Python |
# -*- encoding: utf-8 -*-
"""
libpydhcpserver module: dhcp_packet
Purpose
=======
Extended class to offer convenience functions and processing for DHCP packets.
Legal
=====
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2010 <red.hamsterx@gmail.com>
(C) Mathieu Ignacio, 2008 <mignacio@april.org>
"""
import operator
from struct import unpack
from struct import pack
import warnings
from dhcp_constants import *
from type_hwmac import hwmac
from type_ipv4 import ipv4
from type_strlist import strlist
from type_rfc import *
class DHCPPacket(object):
"""
Handles the construction, management, and export of DHCP packets.
"""
_packet_data = None #: The core 240 bytes that make up a DHCP packet.
_options_data = None #: Any additional options attached to this packet.
_requested_options = None #: Any options explicitly requested by the client.
def __init__(self, data=None):
"""
Initializes a DHCP packet, using real data, if possible.
@type data: str|None
@param data: The raw packet from which this object should be instantiated or None if a
blank packet should be created.
"""
self._options_data = {}
if not data: #Just create a blank packet and bail.
self._packet_data = [0]*240
self._packet_data[236:240] = MAGIC_COOKIE
return
#Transform all data to a list of bytes by unpacking it as C-chars.
unpack_fmt = str(len(data)) + "c"
self._packet_data = [ord(i) for i in unpack(unpack_fmt, data)]
#Some servers or clients don't place the magic cookie immediately
#after the end of the headers block, adding unnecessary padding.
#It's necessary to find the magic cookie before proceding.
position = 236
end_position = len(self._packet_data)
while not self._packet_data[position:position + 4] == MAGIC_COOKIE and position < end_position:
position += 1
position += 4 #Jump to the point immediately after the cookie.
#Extract extended options from the payload.
while position < end_position:
if self._packet_data[position] == 0: #Pad option; skip byte.
opt_first = position + 1
position += 1
elif self._packet_data[position] == 255: #End option; stop processing.
break
elif DHCP_OPTIONS_TYPES.has_key(self._packet_data[position]) and not self._packet_data[position] == 255:
opt_len = self._packet_data[position + 1]
opt_first = position + 1
opt_id = self._packet_data[position]
opt_val = self._packet_data[opt_first + 1:opt_len + opt_first + 1]
try:
self._options_data[DHCP_OPTIONS_REVERSE[opt_id]] = opt_val
except Exception, e:
warnings.warn("Unable to assign '%(value)s' to '%(id)s': %(error)s" % {
'value': opt_val,
'id': opt_id,
'error': str(e),
})
if opt_id == 55: #Handle requested options.
self._requested_options = tuple(set(
[int(i) for i in opt_val] + [1, 3, 6, 15, 51, 53, 54, 58, 59]
))
position += self._packet_data[opt_first] + 2
else:
opt_first = position + 1
position += self._packet_data[opt_first] + 2
#Cut the packet data down to 240 bytes.
self._packet_data = self._packet_data[:236] + MAGIC_COOKIE
def encodePacket(self):
"""
Assembles all data into a single, C-char-packed struct.
All options are arranged in order, per RFC2131 (details under 'router').
@rtype: str
@return: The encoded packet.
"""
#Pull options out of the payload, excluding options not specifically
#requested, assuming any specific requests were made.
options = {}
for key in self._options_data.keys():
option_id = DHCP_OPTIONS[key]
if self._requested_options is None or option_id in self._requested_options:
option_value = self._options_data[key]
options[option_id] = option = []
while True:
if len(option_value) > 255:
option += [option_id, 255] + option_value[:255]
option_value = option_value[255:]
else:
option += [option_id, len(option_value)] + option_value
break
#Order options by number and add them to the output data.
ordered_options = []
for (option_id, value) in sorted(options.iteritems()):
ordered_options += value
#Assemble data.
packet = self._packet_data[:240] + ordered_options
packet.append(255) #Add End option.
#Encode packet.
pack_fmt = str(len(packet)) + "c"
packet = map(chr, packet)
return pack(pack_fmt, *packet)
def _setRfcOption(self, name, value, expected_type):
"""
Handles the process of setting RFC options, digesting the object's
contents if an object of the appropriate type is provided, or directly
assigning the list otherwise.
@type name: basestring
@param name: The option's name.
@type value: L{RFC}|list
@param value: The value to be assigned.
@type expected_type: L{RFC}
@param expected_type: The type of special RFC object associated with
the given option name.
@rtype: bool
@return: True if assignment succeeded.
"""
if type(value) == expected_type:
self._options_data[name] = value.getValue()
return True
else:
self._options_data[name] = value
return True
return False
def deleteOption(self, name):
"""
Drops a value from the DHCP data-set.
If the value is part of the DHCP core, it is set to zero. Otherwise, it
is removed from the option-pool.
@type name: basestring|int
@param name: The option's name or numeric value.
@rtype: bool
@return: True if the deletion succeeded.
"""
if DHCP_FIELDS.has_key(name):
dhcp_field = DHCP_FIELDS[name]
begin = dhcp_field[0]
end = dhcp_field[0] + dhcp_field[1]
self._packet_data[begin:end] = [0]*dhcp_field[1]
return True
else:
if type(name) == int: #Translate int to string.
name = DHCP_OPTIONS_REVERSE.get(name)
if self._options_data.has_key(name):
del self._options_data[name]
return True
return False
def forceOption(self, option, value):
"""
Bypasses validation checks and adds the option number to the
request-list. Useful to force poorly designed clients to perform
specific tasks.
@type name: basestring|int
@param name: The option's name or numeric value.
@type value: list|tuple
@param value: The bytes to assign to this option.
@raise ValueError: The specified option does not exist.
"""
name = id = None
if type(option) == int: #Translate int to string.
name = DHCP_OPTIONS_REVERSE.get(option)
id = option
else: #Translate string into int.
id = DHCP_OPTIONS.get(option)
name = option
if name and id:
if self._requested_options:
self._requested_options += (option,)
self._options_data[name] = list(value)
else:
raise ValueError("Unknown option: %(option)s" % {
'option': option,
})
def getOption(self, name):
"""
Retrieves the value of an option in the packet's data.
@type name: basestring|int
@param name: The option's name or numeric value.
@rtype: list|None
@return: The value of the specified option or None if it hasn't been
set.
"""
if DHCP_FIELDS.has_key(name):
option_info = DHCP_FIELDS[name]
return self._packet_data[option_info[0]:option_info[0] + option_info[1]]
else:
if type(name) == int: #Translate int to string.
name = DHCP_OPTIONS_REVERSE.get(name)
if self._options_data.has_key(name):
return self._options_data[name]
return None
def isOption(self, name):
"""
Indicates whether an option is currently set within the packet.
@type name: basestring|int
@param name: The option's name or numeric value.
@rtype: bool
@return: True if the option has been set.
"""
if type(name) == int: #Translate int to string.
self._options_data.has_key(DHCP_OPTIONS_REVERSE.get(name))
return self._options_data.has_key(name) or DHCP_FIELDS.has_key(name)
def setOption(self, name, value):
"""
Validates and sets the value of a DHCP option associated with this
packet.
@type name: basestring|int
@param name: The option's name or numeric value.
@type value: list|tuple|L{RFC}
@param value: The bytes to assign to this option or the special RFC
object from which they are to be derived.
@rtype: bool
@return: True if the value was set successfully.
@raise ValueError: The specified option does not exist.
"""
if not isinstance(value, RFC):
if not type(value) in (list, tuple):
return False
if [None for v in value if not type(v) == int or not 0 <= v <= 255]:
return False
value = list(value)
#Basic checking: is the length of the value valid?
if DHCP_FIELDS.has_key(name):
dhcp_field = DHCP_FIELDS[name]
if not len(value) == dhcp_field[1]:
return False
begin = dhcp_field[0]
end = dhcp_field[0] + dhcp_field[1]
self._packet_data[begin:end] = value
return True
else:
if type(name) == int:
name = DHCP_OPTIONS_REVERSE.get(name)
dhcp_field_type = DHCP_OPTIONS_TYPES.get(DHCP_OPTIONS.get(name))
if not dhcp_field_type:
return False
#Process normal options.
dhcp_field_specs = DHCP_FIELDS_SPECS.get(dhcp_field_type)
if dhcp_field_specs:
(fixed_length, minimum_length, multiple) = dhcp_field_specs
length = len(value)
if fixed_length == length or (minimum_length <= length and length % multiple == 0):
self._options_data[name] = value
return True
return False
else:
#Process special RFC options.
if dhcp_field_type == 'RFC2610_78':
return self._setRfcOption(name, value, rfc2610_78)
elif dhcp_field_type == 'RFC2610_79':
return self._setRfcOption(name, value, rfc2610_79)
elif dhcp_field_type == 'RFC3361_120':
return self._setRfcOption(name, value, rfc3361_120)
elif dhcp_field_type == 'RFC3397_119':
return self._setRfcOption(name, value, rfc3397_119)
elif dhcp_field_type == 'RFC4174_83':
return self._setRfcOption(name, value, rfc4174_83)
elif dhcp_field_type == 'RFC4280_88':
return self._setRfcOption(name, value, rfc4280_88)
elif dhcp_field_type == 'RFC5223_137':
return self._setRfcOption(name, value, rfc5223_137)
elif dhcp_field_type == 'RFC5678_139':
return self._setRfcOption(name, value, rfc5678_139)
elif dhcp_field_type == 'RFC5678_140':
return self._setRfcOption(name, value, rfc5678_140)
raise ValueError("Unknown option: %(name)s" % {
'name': name,
})
def isDHCPPacket(self):
"""
Indicates whether this packet is a DHCP packet or not.
@rtype: bool
@return: True if this packet is a DHCP packet.
"""
return self._packet_data[236:240] == MAGIC_COOKIE
def _getDHCPMessageType(self):
"""
Returns the DHCP message-type of this packet.
@rtype: int
@return: The DHCP message type of this packet or -1 if the
message-type is undefined.
"""
dhcp_message_type = self.getOption('dhcp_message_type')
if dhcp_message_type is None:
return -1
return dhcp_message_type[0]
def isDHCPDeclinePacket(self):
"""
Indicates whether this is a DECLINE packet.
@rtype: bool
@return: True if this is a DECLINE packet.
"""
return self._getDHCPMessageType() == 4
def isDHCPDiscoverPacket(self):
"""
Indicates whether this is a DISCOVER packet.
@rtype: bool
@return: True if this is a DISCOVER packet.
"""
return self._getDHCPMessageType() == 1
def isDHCPInformPacket(self):
"""
Indicates whether this is an INFORM packet.
@rtype: bool
@return: True if this is an INFORM packet.
"""
return self._getDHCPMessageType() == 8
def isDHCPLeaseQueryPacket(self):
"""
Indicates whether this is a LEASEQUERY packet.
@rtype: bool
@return: True if this is a LEASEQUERY packet.
"""
return self._getDHCPMessageType() == 10
def isDHCPReleasePacket(self):
"""
Indicates whether this is a RELEASE packet.
@rtype: bool
@return: True if this is a RELEASE packet.
"""
return self._getDHCPMessageType() == 7
def isDHCPRequestPacket(self):
"""
Indicates whether this is a REQUEST packet.
@rtype: bool
@return: True if this is a REQUEST packet.
"""
return self._getDHCPMessageType() == 3
def extractPXEOptions(self):
"""
Strips out PXE-specific options from the packet, returning them
separately.
This function is good for scrubbing information that needs to be sent
monodirectionally from the client.
@rtype: tuple(3)
@return: A triple containing, in order, option 93 (client_system) as
a sequence of ints, option 94 (client_ndi) as a sequence of three
bytes, and option 97 (uuid_guid) as digested data:
(type:byte, data:[byte]).
Any unset options are presented as None.
"""
opt_93 = self.getOption("client_system")
opt_94 = self.getOption("client_ndi")
opt_97 = self.getOption("uuid_guid")
if opt_93:
value = []
for i in xrange(0, len(opt_93), 2):
value.append(opt_93[i] * 256 + opt_93[i + 1])
opt_93 = value
if opt_94:
opt_94 = tuple(opt_94)
if opt_97:
opt_97 = (opt_97[0], opt_97[1:])
self.deleteOption("client_system")
self.deleteOption("client_ndi")
self.deleteOption("uuid_guid")
return (opt_93, opt_94, opt_97)
def extractVendorOptions(self):
"""
Strips out vendor-specific options from the packet, returning them
separately.
This function is good for scrubbing information that needs to be sent
monodirectionally from the client.
@rtype: tuple(4)
@return: A four-tuple containing, in order, option 43
(vendor_specific_information) as a string of bytes, option 60
(vendor_class_identifier) as a string, and both option 124
(vendor_class) and option 125 (vendor_specific) as digested data:
[(enterprise_number:int, data:string)] and
[(enterprise_number:int, [(subopt_code:byte, data:string)])],
respectively. Any unset options are presented as None.
"""
opt_43 = self.getOption("vendor_specific_information")
opt_60 = self.getOption("vendor_class_identifier")
opt_124 = self.getOption("vendor_class")
opt_125 = self.getOption("vendor_specific")
if opt_124:
data = []
while opt_124:
enterprise_number = ipv4(opt_124[:4]).int()
opt_124 = opt_124[4:]
payload_size = ord(opt_124[0])
payload = opt_124[1:1 + payload_size]
opt_124 = opt_124[1 + payload_size:]
data.append(enterprise_number, payload)
opt_124 = data
if opt_125:
data = []
while opt_125:
enterprise_number = ipv4(opt_125[:4]).int()
opt_125 = opt_125[4:]
payload_size = ord(opt_125[0])
payload = opt_125[1:1 + payload_size]
opt_125 = opt_125[1 + payload_size:]
subdata = []
while payload:
subopt = ord(payload[0])
subopt_size = ord(payload[1])
subpayload = payload[2:2 + subopt_size]
payload = payload[2 + subopt_size:]
subdata.append(subopt, subpayload)
data.append(enterprise_number, subdata)
opt_125 = data
self.deleteOption("vendor_specific_information")
self.deleteOption("vendor_class_identifier")
self.deleteOption("vendor_class")
self.deleteOption("vendor_specific")
return (opt_43, opt_60, opt_124, opt_125)
def _transformBase(self):
"""
Sets and removes options from the DHCP packet to make it suitable for
returning to the client.
"""
self.setOption("op", [2])
self.setOption("hlen", [6])
self.deleteOption("client_identifier")
self.deleteOption("maximum_message_size")
self.deleteOption("parameter_request_list")
self.deleteOption("request_ip_address")
self.deleteOption("secs")
self.deleteOption("subnet_selection")
def transformToDHCPAckPacket(self):
"""
Transforms a DHCP packet received from a client into an ACK
packet to be returned to the client.
"""
self._transformBase()
self.setOption("dhcp_message_type", [5])
def transformToDHCPLeaseActivePacket(self):
"""
Transforms a DHCP packet received from a client into a LEASEACTIVE
packet to be returned to the client.
"""
self._transformBase()
self.setOption("dhcp_message_type", [13])
self.deleteOption("ciaddr")
self.deleteOption("file")
self.deleteOption("sname")
def transformToDHCPLeaseUnknownPacket(self):
"""
Transforms a DHCP packet received from a client into a LEASEUNKNOWN
packet to be returned to the client.
"""
self._transformBase()
self.setOption("dhcp_message_type", [12])
self.deleteOption("ciaddr")
self.deleteOption("file")
self.deleteOption("sname")
def transformToDHCPOfferPacket(self):
"""
Transforms a DHCP packet received from a client into an OFFER
packet to be returned to the client.
"""
self._transformBase()
self.setOption("dhcp_message_type", [2])
self.deleteOption("ciaddr")
def transformToDHCPNackPacket(self):
"""
Transforms a DHCP packet received from a client into a NAK
packet to be returned to the client.
"""
self._transformBase()
self.setOption("dhcp_message_type", [6])
self.deleteOption("ciaddr")
self.deleteOption("siaddr")
self.deleteOption("yiaddr")
self.deleteOption("file")
self.deleteOption("sname")
self.deleteOption("ip_address_lease_time_option")
def getHardwareAddress(self):
"""
Extracts the client's MAC address from the DHCP packet.
@rtype: str
@return: The client's MAC address.
"""
length = self.getOption("hlen")[0]
full_hw = self.getOption("chaddr")
if length and length < len(full_hw):
return hwmac(full_hw[0:length]).str()
return hwmac(full_hw).str()
def getRequestedOptions(self):
"""
Returns the options requested by the client from which this packet
was sent.
@rtype: tuple|None
@return: The options requested by the client or None if option 55 was
omitted.
"""
return self._requested_options
def isRequestedOption(self, name):
"""
Indicates whether the specified option was requested by the client or
the client omitted option 55, necessitating delivery of all values.
@type name: basestring|int
@param name: The name (or numeric value) of the DHCP option being
tested.
@rtype: bool
@return: True if the option was requested by the client.
"""
if self._requested_options is None:
return True
if not type(name) == int:
return DHCP_OPTIONS.get(name) in self._requested_options
return name in self._requested_options
def __str__(self):
"""
Renders this packet's data in human-readable form.
@rtype: str
@return: A human-readable summary of this packet.
"""
output = ['#Header fields']
op = self._packet_data[DHCP_FIELDS['op'][0]:DHCP_FIELDS['op'][0] + DHCP_FIELDS['op'][1]]
output.append("op: %(type)s" % {
'type': DHCP_FIELDS_NAMES['op'][op[0]],
})
for opt in (
'htype','hlen','hops','xid','secs','flags',
'ciaddr','yiaddr','siaddr','giaddr','chaddr',
'sname','file',
):
begin = DHCP_FIELDS[opt][0]
end = DHCP_FIELDS[opt][0] + DHCP_FIELDS[opt][1]
data = self._packet_data[begin:end]
result = None
if DHCP_FIELDS_TYPES[opt] == "byte":
result = str(data[0])
elif DHCP_FIELDS_TYPES[opt] == "16-bits":
result = str(data[0] * 256 + data[1])
elif DHCP_FIELDS_TYPES[opt] == "32-bits":
result = str(ipv4(data).int())
elif DHCP_FIELDS_TYPES[opt] == "string":
result = []
for c in data:
if c:
result.append(chr(c))
else:
break
result = ''.join(result)
elif DHCP_FIELDS_TYPES[opt] == "ipv4":
result = ipv4(data).str()
elif DHCP_FIELDS_TYPES[opt] == "hwmac":
result = []
hexsym = ('0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f',)
for iterator in xrange(6):
result.append(str(hexsym[data[iterator] / 16] + hexsym[data[iterator] % 16]))
result = ':'.join(result)
output.append("%(opt)s: %(result)s" % {
'opt': opt,
'result': result,
})
output.append('')
output.append("#Options fields")
for opt in self._options_data.keys():
data = self._options_data[opt]
result = None
optnum = DHCP_OPTIONS[opt]
if opt == 'dhcp_message_type':
result = DHCP_FIELDS_NAMES['dhcp_message_type'][data[0]]
elif DHCP_OPTIONS_TYPES[optnum] in ("byte", "byte+", "string"):
result = str(data)
elif DHCP_OPTIONS_TYPES[optnum] in ("char", "char+"):
if optnum == 55: # parameter_request_list
requested_options = []
for d in data:
requested_options.append(DHCP_OPTIONS_REVERSE[int(d)])
result = ', '.join(requested_options)
else:
result = []
for c in data:
if 32 <= c <= 126:
result.append(chr(c))
else:
result.append(str(c))
result = ', '.join(result)
elif DHCP_OPTIONS_TYPES[optnum] in ("16-bits", "16-bits+"):
result = []
for i in xrange(0, len(data), 2):
result.append(str(data[i] * 256 + data[i + 1]))
result = ', '.join(result)
elif DHCP_OPTIONS_TYPES[optnum] in ("32-bits", "32-bits+"):
result = []
for i in xrange(0, len(data), 4):
result.append(str(ipv4(data[i:i+4]).int()))
result = ', '.join(result)
elif DHCP_OPTIONS_TYPES[optnum] in ("ipv4", "ipv4+", "ipv4*"):
result = []
for i in xrange(0, len(data), 4):
result.append(ipv4(data[i:i+4]).str())
result = ', '.join(result)
else:
result = str(data)
output.append("%(opt)s: %(result)s" % {
'opt': opt,
'result': result,
})
return '\n'.join(output)
| Python |
# -*- encoding: utf-8 -*-
"""
staticDHCPd module: src.dhcp
Purpose
=======
Provides the DHCPd side of a staticDHCPd server.
Legal
=====
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2009 <red.hamsterx@gmail.com>
"""
import select
import threading
import time
import src.conf_buffer as conf
import src.logging
import src.sql
import libpydhcpserver.dhcp_network
from libpydhcpserver.type_rfc import (
ipToList, ipsToList,
intToList, intsToList,
longToList, longsToList,
strToList, strToPaddedList,
)
_dhcp_servers = [] #: A collection of all instantiated DHCP servers; this should only ever be one element long.
def flushCache():
"""
Flushes all cached DHCP data.
"""
for dhcp_server in _dhcp_servers:
dhcp_server.flushCache()
def _logInvalidValue(name, value, subnet, serial):
src.logging.writeLog("Invalid value for %(subnet)s:%(serial)i:%(name)s: %(value)s" % {
'subnet': subnet,
'serial': serial,
'name': name,
'value': value,
})
class _DHCPServer(libpydhcpserver.dhcp_network.DHCPNetwork):
"""
The handler that responds to all received DHCP requests.
"""
_sql_broker = None #: The SQL broker to be used when handling MAC lookups.
_stats_lock = None #: A lock used to ensure synchronous access to performance statistics.
_dhcp_assignments = None #: The MACs and the number of DHCP "leases" granted to each since the last polling interval.
_ignored_addresses = None #: A list of all MACs currently ignored, plus the time remaining until requests will be honoured again.
_packets_discarded = 0 #: The number of packets discarded since the last polling interval.
_packets_processed = 0 #: The number of packets processed since the last polling interval.
_time_taken = 0.0 #: The amount of time taken since the last polling interval.
def __init__(self, server_address, server_port, client_port, pxe_port):
"""
Constructs the DHCP handler.
@type server_address: basestring
@param server_address: The IP of the interface from which DHCP responses
are to be sent.
@type server_port: int
@param server_port: The port on which DHCP requests are expected to
arrive.
@type client_port: int
@param client_port: The port on which clients expect DHCP responses to
be sent.
@type pxe_port: int|NoneType
@param pxe_port: The port on which to listen for PXE requests, or a
NoneType if PXE support is disabled.
@raise Exception: If a problem occurs while initializing the sockets
required to process DHCP messages.
"""
self._stats_lock = threading.Lock()
self._dhcp_assignments = {}
self._ignored_addresses = []
libpydhcpserver.dhcp_network.DHCPNetwork.__init__(
self, server_address, server_port, client_port, pxe_port
)
self._sql_broker = src.sql.SQL_BROKER()
def _evaluateRelay(self, packet, pxe):
"""
Determines whether the received packet belongs to a relayed request or
not and decides whether it should be allowed based on policy.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The packet to be evaluated.
@type pxe: bool
@param pxe: Whether the request is PXE
"""
giaddr = packet.getOption("giaddr")
if not giaddr == [0,0,0,0]: #Relayed request.
if not conf.ALLOW_DHCP_RELAYS: #Ignore it.
return False
elif conf.ALLOWED_DHCP_RELAYS and not '.'.join(map(str, giaddr)) in conf.ALLOWED_DHCP_RELAYS:
src.logging.writeLog('Relayed request from unauthorized relay %(ip)s ignored' % {
'ip': '.'.join(map(str, giaddr)),
})
return False
elif not conf.ALLOW_LOCAL_DHCP and not pxe: #Local request, but denied.
return False
return True
def _handleDHCPDecline(self, packet, source_address, pxe):
"""
Informs the operator of a potential IP collision on the network.
This function checks to make sure the MAC isn't ignored or acting
maliciously, then checks the database to see whether it has an assigned
IP. If it does, and the IP it thinks it has a right to matches this IP,
then a benign message is logged and the operator is informed; if not,
the decline is flagged as a malicious act.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The DHCPDISCOVER to be evaluated.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
if not self._evaluateRelay(packet, pxe):
return
start_time = time.time()
mac = packet.getHardwareAddress()
if not [None for (ignored_mac, timeout) in self._ignored_addresses if mac == ignored_mac]:
if not self._logDHCPAccess(mac):
self._logDiscardedPacket()
return
if '.'.join(map(str, packet.getOption("server_identifier"))) == self._server_address: #Rejected!
ip = '.'.join(map(str, packet.getOption("requested_ip_address")))
result = self._sql_broker.lookupMAC(mac)
if result and result[0] == ip: #Known client.
src.logging.writeLog('DHCPDECLINE from %(mac)s for %(ip)s on (%(subnet)s, %(serial)i)' % {
'ip': ip,
'mac': mac,
'subnet': result[9],
'serial': result[10],
})
src.logging.sendDeclineReport(mac, ip, result[9], result[10])
else:
src.logging.writeLog('Misconfigured client %(mac)s sent DHCPDECLINE for %(ip)s' % {
'ip': ip,
'mac': mac,
})
else:
self._logDiscardedPacket()
else:
self._logDiscardedPacket()
self._logTimeTaken(time.time() - start_time)
def _handleDHCPDiscover(self, packet, source_address, pxe):
"""
Evaluates a DHCPDISCOVER request from a client and determines whether a
DHCPOFFER should be sent.
The logic here is to make sure the MAC isn't ignored or acting
maliciously, then check the database to see whether it has an assigned
IP. If it does, that IP is offered, along with all relevant options; if
not, the MAC is ignored to mitigate spam from follow-up DHCPDISCOVERS.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The DHCPDISCOVER to be evaluated.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
if not self._evaluateRelay(packet, pxe):
return
start_time = time.time()
mac = packet.getHardwareAddress()
if not [None for (ignored_mac, timeout) in self._ignored_addresses if mac == ignored_mac]:
if not self._logDHCPAccess(mac):
self._logDiscardedPacket()
return
src.logging.writeLog('DHCPDISCOVER from %(mac)s' % {
'mac': mac,
})
try:
result = self._sql_broker.lookupMAC(mac)
if result:
rapid_commit = not packet.getOption('rapid_commit') is None
if rapid_commit:
packet.transformToDHCPAckPacket()
packet.forceOption('rapid_commit', [])
else:
packet.transformToDHCPOfferPacket()
pxe_options = packet.extractPXEOptions()
vendor_options = packet.extractVendorOptions()
self._loadDHCPPacket(packet, result)
giaddr = packet.getOption("giaddr")
if not giaddr or giaddr == [0,0,0,0]:
giaddr = None
else:
giaddr = tuple(giaddr)
if conf.loadDHCPPacket(
packet,
mac, tuple(ipToList(result[0])), giaddr,
result[9], result[10],
pxe and pxe_options, vendor_options
):
if rapid_commit:
self._sendDHCPPacket(packet, source_address, 'ACK-rapid', mac, result[0], pxe)
else:
self._sendDHCPPacket(packet, source_address, 'OFFER', mac, result[0], pxe)
else:
src.logging.writeLog('Ignoring %(mac)s per loadDHCPPacket()' % {
'mac': mac,
})
self._logDiscardedPacket()
else:
if conf.AUTHORITATIVE:
packet.transformToDHCPNackPacket()
self._sendDHCPPacket(packet, source_address, 'NAK', mac, '?.?.?.?', pxe)
else:
src.logging.writeLog('%(mac)s unknown; ignoring for %(time)i seconds' % {
'mac': mac,
'time': conf.UNAUTHORIZED_CLIENT_TIMEOUT,
})
self._stats_lock.acquire()
self._ignored_addresses.append([mac, conf.UNAUTHORIZED_CLIENT_TIMEOUT])
self._stats_lock.release()
except Exception, e:
src.logging.sendErrorReport('Unable to respond to %(mac)s' % {'mac': mac,}, e)
else:
self._logDiscardedPacket()
self._logTimeTaken(time.time() - start_time)
def _handleDHCPLeaseQuery(self, packet, source_address, pxe):
"""
Evaluates a DHCPLEASEQUERY request from a relay and determines whether
a DHCPLEASEACTIVE or DHCPLEASEUNKNOWN should be sent.
The logic here is to make sure the MAC isn't ignored or acting
maliciously, then check the database to see whether it has an assigned
IP. If it does, DHCPLEASEACTIVE is sent. Otherwise, DHCPLEASEUNKNOWN is
sent.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The DHCPREQUEST to be evaluated.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
if not self._evaluateRelay(packet, pxe):
return
start_time = time.time()
mac = None
try:
mac = packet.getHardwareAddress()
except:
pass
if not mac: #IP/client-ID-based lookup; not supported.
self._logDiscardedPacket()
return
if not [None for (ignored_mac, timeout) in self._ignored_addresses if mac == ignored_mac]:
if not self._logDHCPAccess(mac):
self._logDiscardedPacket()
return
src.logging.writeLog('DHCPLEASEQUERY for %(mac)s' % {
'mac': mac,
})
try:
result = self._sql_broker.lookupMAC(mac)
if result:
packet.transformToDHCPLeaseActivePacket()
if packet.setOption('yiaddr', ipToList(result[0])):
self._sendDHCPPacket(packet, source_address, 'LEASEACTIVE', mac, result[0], pxe)
else:
_logInvalidValue('ip', result[0], result[-2], result[-1])
else:
packet.transformToDHCPLeaseUnknownPacket()
self._sendDHCPPacket(packet, source_address, 'LEASEUNKNOWN', mac, '?.?.?.?', pxe)
except Exception, e:
src.logging.sendErrorReport('Unable to respond for %(mac)s' % {'mac': mac,}, e)
else:
self._logDiscardedPacket()
self._logTimeTaken(time.time() - start_time)
def _handleDHCPRequest(self, packet, source_address, pxe):
"""
Evaluates a DHCPREQUEST request from a client and determines whether a
DHCPACK should be sent.
The logic here is to make sure the MAC isn't ignored or acting
maliciously, then check the database to see whether it has an assigned
IP. If it does, and the IP it thinks it has a right to matches this IP,
then an ACK is sent, along with all relevant options; if not, a DHCPNAK
is sent to inform the client that it is not allowed to use the requested
IP, forcing it to DISCOVER a new one.
If policy forbids RENEW and REBIND operations, perhaps to prepare for a
new configuration rollout, all such requests are NAKed immediately.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The DHCPREQUEST to be evaluated.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
if not self._evaluateRelay(packet, pxe):
return
start_time = time.time()
mac = packet.getHardwareAddress()
if not [None for (ignored_mac, timeout) in self._ignored_addresses if mac == ignored_mac]:
if not self._logDHCPAccess(mac):
self._logDiscardedPacket()
return
ip = packet.getOption("requested_ip_address")
sid = packet.getOption("server_identifier")
ciaddr = packet.getOption("ciaddr")
giaddr = packet.getOption("giaddr")
s_ip = ip and '.'.join(map(str, ip))
s_sid = sid and '.'.join(map(str, sid))
s_ciaddr = ciaddr and '.'.join(map(str, ciaddr))
if not ip or ip == [0,0,0,0]:
ip = None
if not sid or sid == [0,0,0,0]:
sid = None
if not ciaddr or ciaddr == [0,0,0,0]:
ciaddr = None
if not giaddr or giaddr == [0,0,0,0]:
giaddr = None
else:
giaddr = tuple(giaddr)
if sid and not ciaddr: #SELECTING
if s_sid == self._server_address: #Chosen!
src.logging.writeLog('DHCPREQUEST:SELECTING from %(mac)s' % {
'mac': mac,
})
try:
result = self._sql_broker.lookupMAC(mac)
if result and (not ip or result[0] == s_ip):
packet.transformToDHCPAckPacket()
pxe_options = packet.extractPXEOptions()
vendor_options = packet.extractVendorOptions()
self._loadDHCPPacket(packet, result)
if conf.loadDHCPPacket(
packet,
mac, tuple(ipToList(result[0])), giaddr,
result[9], result[10],
pxe and pxe_options, vendor_options
):
self._sendDHCPPacket(packet, source_address, 'ACK', mac, s_ip, pxe)
else:
src.logging.writeLog('Ignoring %(mac)s per loadDHCPPacket()' % {
'mac': mac,
})
self._logDiscardedPacket()
else:
packet.transformToDHCPNackPacket()
self._sendDHCPPacket(packet, source_address, 'NAK', mac, 'NO-MATCH', pxe)
except Exception, e:
src.logging.sendErrorReport('Unable to respond to %(mac)s' % {'mac': mac,}, e)
else:
self._logDiscardedPacket()
elif not sid and not ciaddr and ip: #INIT-REBOOT
src.logging.writeLog('DHCPREQUEST:INIT-REBOOT from %(mac)s' % {
'mac': mac,
})
try:
result = self._sql_broker.lookupMAC(mac)
if result and result[0] == s_ip:
packet.transformToDHCPAckPacket()
pxe_options = packet.extractPXEOptions()
vendor_options = packet.extractVendorOptions()
self._loadDHCPPacket(packet, result)
if conf.loadDHCPPacket(
packet,
mac, tuple(ip), giaddr,
result[9], result[10],
pxe and pxe_options, vendor_options
):
self._sendDHCPPacket(packet, source_address, 'ACK', mac, s_ip, pxe)
else:
src.logging.writeLog('Ignoring %(mac)s per loadDHCPPacket()' % {
'mac': mac,
})
self._logDiscardedPacket()
else:
packet.transformToDHCPNackPacket()
self._sendDHCPPacket(packet, source_address, 'NAK', mac, s_ip, pxe)
except Exception, e:
src.logging.sendErrorReport('Unable to respond to %(mac)s' % {'mac': mac,}, e)
elif not sid and ciaddr and not ip: #RENEWING or REBINDING
if conf.NAK_RENEWALS and not pxe:
packet.transformToDHCPNackPacket()
self._sendDHCPPacket(packet, source_address, 'NAK', mac, 'NAK_RENEWALS', pxe)
else:
renew = source_address[0] not in ('255.255.255.255', '0.0.0.0', '')
if renew:
src.logging.writeLog('DHCPREQUEST:RENEW from %(mac)s' % {
'mac': mac,
})
else:
src.logging.writeLog('DHCPREQUEST:REBIND from %(mac)s' % {
'mac': mac,
})
try:
result = self._sql_broker.lookupMAC(mac)
if result and result[0] == s_ciaddr:
packet.transformToDHCPAckPacket()
pxe_options = packet.extractPXEOptions()
vendor_options = packet.extractVendorOptions()
packet.setOption('yiaddr', ciaddr)
self._loadDHCPPacket(packet, result)
if conf.loadDHCPPacket(
packet,
mac, tuple(ciaddr), giaddr,
result[9], result[10],
pxe and pxe_options, vendor_options
):
self._sendDHCPPacket(packet, (s_ciaddr, 0), 'ACK', mac, s_ciaddr, pxe)
else:
src.logging.writeLog('Ignoring %(mac)s per loadDHCPPacket()' % {
'mac': mac,
})
self._logDiscardedPacket()
else:
if renew:
packet.transformToDHCPNackPacket()
self._sendDHCPPacket(packet, (s_ciaddr, 0), 'NAK', mac, s_ciaddr, pxe)
else:
self._logDiscardedPacket()
except Exception, e:
src.logging.sendErrorReport('Unable to respond to %(mac)s' % {'mac': mac,}, e)
else:
src.logging.writeLog('DHCPREQUEST:UNKNOWN (%(sid)s %(ciaddr)s %(ip)s) from %(mac)s' % {
'sid': str(sid),
'ciaddr': str(ciaddr),
'ip': str(ip),
'mac': mac,
})
self._logDiscardedPacket()
else:
self._logDiscardedPacket()
self._logTimeTaken(time.time() - start_time)
def _handleDHCPInform(self, packet, source_address, pxe):
"""
Evaluates a DHCPINFORM request from a client and determines whether a
DHCPACK should be sent.
The logic here is to make sure the MAC isn't ignored or acting
maliciously, then check the database to see whether it has an assigned
IP. If it does, and the IP it thinks it has a right to matches this IP,
then an ACK is sent, along with all relevant options; if not, the
request is ignored.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The DHCPREQUEST to be evaluated.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
if not self._evaluateRelay(packet, pxe):
return
start_time = time.time()
mac = packet.getHardwareAddress()
if not [None for (ignored_mac, timeout) in self._ignored_addresses if mac == ignored_mac]:
if not self._logDHCPAccess(mac):
self._logDiscardedPacket()
return
ciaddr = packet.getOption("ciaddr")
giaddr = packet.getOption("giaddr")
s_ciaddr = '.'.join(map(str, ciaddr))
if not ciaddr or ciaddr == [0,0,0,0]:
ciaddr = None
if not giaddr or giaddr == [0,0,0,0]:
giaddr = None
else:
giaddr = tuple(giaddr)
src.logging.writeLog('DHCPINFORM from %(mac)s' % {
'mac': mac,
})
if not ciaddr:
src.logging.writeLog('%(mac)s sent malformed packet; ignoring for %(time)i seconds' % {
'mac': mac,
'time': conf.UNAUTHORIZED_CLIENT_TIMEOUT,
})
self._stats_lock.acquire()
self._ignored_addresses.append([mac, conf.UNAUTHORIZED_CLIENT_TIMEOUT])
self._stats_lock.release()
self._logDiscardedPacket()
return
try:
result = self._sql_broker.lookupMAC(mac)
if result:
packet.transformToDHCPAckPacket()
pxe_options = packet.extractPXEOptions()
vendor_options = packet.extractVendorOptions()
self._loadDHCPPacket(packet, result, True)
if conf.loadDHCPPacket(
packet,
mac, tuple(ipToList(result[0])), giaddr,
result[9], result[10],
pxe and pxe_options, vendor_options
):
self._sendDHCPPacket(packet, source_address, 'ACK', mac, s_ciaddr, pxe)
else:
src.logging.writeLog('Ignoring %(mac)s per loadDHCPPacket()' % {
'mac': mac,
})
self._logDiscardedPacket()
else:
src.logging.writeLog('%(mac)s unknown; ignoring for %(time)i seconds' % {
'mac': mac,
'time': conf.UNAUTHORIZED_CLIENT_TIMEOUT,
})
self._stats_lock.acquire()
self._ignored_addresses.append([mac, conf.UNAUTHORIZED_CLIENT_TIMEOUT])
self._stats_lock.release()
self._logDiscardedPacket()
except Exception, e:
src.logging.sendErrorReport('Unable to respond to %(mac)s' % {'mac': mac,}, e)
else:
self._logDiscardedPacket()
self._logTimeTaken(time.time() - start_time)
def _handleDHCPRelease(self, packet, source_address, pxe):
"""
Informs the DHCP operator that a client has terminated its "lease".
This function checks to make sure the MAC isn't ignored or acting
maliciously, then checks the database to see whether it has an assigned
IP. If it does, and the IP it thinks it has a right to matches this IP,
then a benign message is logged; if not, the release is flagged as
a malicious act.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The DHCPDISCOVER to be evaluated.
@type source_address: tuple
@param source_address: The address (host, port) from which the request
was received.
@type pxe: bool
@param pxe: True if the packet was received on the PXE port.
"""
if not self._evaluateRelay(packet, pxe):
return
start_time = time.time()
mac = packet.getHardwareAddress()
if not [None for (ignored_mac, timeout) in self._ignored_addresses if mac == ignored_mac]:
if not self._logDHCPAccess(mac):
self._logDiscardedPacket()
return
if '.'.join(map(str, packet.getOption("server_identifier"))) == self._server_address: #Released!
ip = '.'.join(map(str, packet.getOption("ciaddr")))
result = self._sql_broker.lookupMAC(mac)
if result and result[0] == ip: #Known client.
src.logging.writeLog('DHCPRELEASE from %(mac)s for %(ip)s' % {
'ip': ip,
'mac': mac,
})
else:
src.logging.writeLog('Misconfigured client %(mac)s sent DHCPRELEASE for %(ip)s' % {
'ip': ip,
'mac': mac,
})
else:
self._logDiscardedPacket()
else:
self._logDiscardedPacket()
self._logTimeTaken(time.time() - start_time)
def _loadDHCPPacket(self, packet, result, inform=False):
"""
Sets DHCP option fields based on values returned from the database.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The packet being updated.
@type result: tuple(11)
@param result: The value returned from the SQL broker.
@type inform: bool
@param inform: True if this is a response to a DHCPINFORM message.
"""
(ip, hostname,
gateway, subnet_mask, broadcast_address,
domain_name, domain_name_servers, ntp_servers,
lease_time, subnet, serial) = result
#Core parameters.
if not inform:
if not packet.setOption('yiaddr', ipToList(ip)):
_logInvalidValue('ip', ip, subnet, serial)
if not packet.setOption('ip_address_lease_time', longToList(int(lease_time))):
_logInvalidValue('lease_time', lease_time, subnet, serial)
#Default gateway, subnet mask, and broadcast address.
if gateway:
if not packet.setOption('router', ipToList(gateway)):
_logInvalidValue('gateway', gateway, subnet, serial)
if subnet_mask:
if not packet.setOption('subnet_mask', ipToList(subnet_mask)):
_logInvalidValue('subnet_mask', subnet_mask, subnet, serial)
if broadcast_address:
if not packet.setOption('broadcast_address', ipToList(broadcast_address)):
_logInvalidValue('broadcast_address', broadcast_address, subnet, serial)
#Domain details.
if hostname:
if not packet.setOption('hostname', strToList(hostname)):
_logInvalidValue('hostname', hostname, subnet, serial)
if domain_name:
if not packet.setOption('domain_name', strToList(domain_name)):
_logInvalidValue('domain_name', domain_name, subnet, serial)
if domain_name_servers:
if not packet.setOption('domain_name_servers', ipsToList(domain_name_servers)):
_logInvalidValue('domain_name_servers', domain_name_servers, subnet, serial)
#NTP servers.
if ntp_servers:
if not packet.setOption('ntp_servers', ipsToList(ntp_servers)):
_logInvalidValue('ntp_servers', ntp_servers, subnet, serial)
def _logDHCPAccess(self, mac):
"""
Increments the number of times the given MAC address has accessed this
server. If the value exceeds the policy threshold, the MAC is ignored as
potentially belonging to a malicious user.
@type mac: basestring
@param mac: The MAC being evaluated.
@rtype: bool
@return: True if the MAC's request should be processed.
"""
if conf.ENABLE_SUSPEND:
self._stats_lock.acquire()
try:
assignments = self._dhcp_assignments.get(mac)
if not assignments:
self._dhcp_assignments[mac] = 1
else:
self._dhcp_assignments[mac] = assignments + 1
if assignments + 1 > conf.SUSPEND_THRESHOLD:
src.logging.writeLog('%(mac)s issuing too many requests; ignoring for %(time)i seconds' % {
'mac': mac,
'time': conf.MISBEHAVING_CLIENT_TIMEOUT,
})
self._ignored_addresses.append([mac, conf.MISBEHAVING_CLIENT_TIMEOUT])
return False
finally:
self._stats_lock.release()
return True
def _logDiscardedPacket(self):
"""
Increments the number of packets discarded.
"""
self._stats_lock.acquire()
self._packets_discarded += 1
self._stats_lock.release()
def _logTimeTaken(self, time_taken):
"""
Records the time taken to process a packet.
@type time_taken: float
@param time_taken: The number of seconds the request took.
"""
self._stats_lock.acquire()
self._time_taken += time_taken
self._stats_lock.release()
def _sendDHCPPacket(self, packet, address, response_type, mac, client_ip, pxe):
"""
Sends the given packet to the right destination based on its properties.
If the request originated from a host that knows its own IP, the packet
is transmitted via unicast; in the event of a relayed request, it is sent
to the 'server port', rather than the 'client port', per RFC 2131.
If it was picked up as a broadcast packet, it is sent to the local subnet
via the same mechanism, but to the 'client port'.
@type packet: L{libpydhcpserver.dhcp_packet.DHCPPacket}
@param packet: The packet to be transmitted.
@type address: tuple
@param address: The address from which the packet was received:
(host, port)
@type response_type: basestring
@param response_type: The DHCP subtype of this response: 'OFFER', 'ACK',
'NAK'
@type mac: basestring
@param mac: The MAC of the client for which this packet is destined.
@type client_ip: basestring
@param client_ip: The IP being assigned to the client.
@type pxe: bool
@param pxe: True if the packet was received via the PXE port
@rtype: int
@return: The number of bytes transmitted.
"""
ip = port = None
if address[0] not in ('255.255.255.255', '0.0.0.0', ''): #Unicast.
giaddr = packet.getOption("giaddr")
if giaddr and not giaddr == [0,0,0,0]: #Relayed request.
ip = '.'.join(map(str, giaddr))
port = self._server_port
else: #Request directly from client, routed or otherwise.
ip = address[0]
if pxe:
port = address[1] or self._client_port #BSD doesn't seem to preserve port information
else:
port = self._client_port
else: #Broadcast.
ip = '255.255.255.255'
port = self._client_port
packet.setOption('server_identifier', ipToList(self._server_address))
bytes = self._sendDHCPPacketTo(packet, ip, port, pxe)
src.logging.writeLog('DHCP%(type)s sent to %(mac)s for %(client)s via %(ip)s:%(port)i %(pxe)s[%(bytes)i bytes]' % {
'type': response_type,
'mac': mac,
'client': client_ip,
'bytes': bytes,
'ip': ip,
'port': port,
'pxe': pxe and '(PXE) ' or '',
})
return bytes
def flushCache(self):
"""
Flushes the DHCP cache.
"""
self._sql_broker.flushCache()
def getNextDHCPPacket(self):
"""
Listens for a DHCP packet and initiates processing upon receipt.
"""
if self._getNextDHCPPacket():
self._stats_lock.acquire()
self._packets_processed += 1
self._stats_lock.release()
def getStats(self):
"""
Returns the performance statistics of all operations performed since the
last polling event, resets all counters, and updates the time left before
ignored MACs' requests will be processed again.
"""
self._stats_lock.acquire()
try:
for i in range(len(self._ignored_addresses)):
self._ignored_addresses[i][1] -= conf.POLLING_INTERVAL
self._ignored_addresses = [address for address in self._ignored_addresses if address[1] > 0]
stats = (self._packets_processed, self._packets_discarded, self._time_taken, len(self._ignored_addresses))
self._packets_processed = 0
self._packets_discarded = 0
self._time_taken = 0.0
if conf.ENABLE_SUSPEND:
self._dhcp_assignments = {}
return stats
finally:
self._stats_lock.release()
class DHCPService(threading.Thread):
"""
A thread that handles DHCP requests indefinitely, daemonically.
"""
_dhcp_server = None #: The handler that responds to DHCP requests.
def __init__(self):
"""
Sets up the DHCP server.
@raise Exception: If a problem occurs while binding the sockets needed
to handle DHCP traffic.
"""
threading.Thread.__init__(self)
self.daemon = True
self._dhcp_server = _DHCPServer(
'.'.join([str(int(o)) for o in conf.DHCP_SERVER_IP.split('.')]),
int(conf.DHCP_SERVER_PORT),
int(conf.DHCP_CLIENT_PORT),
conf.PXE_PORT and int(conf.PXE_PORT)
)
_dhcp_servers.append(self._dhcp_server) #Add this server to the global list.
src.logging.writeLog('Configured DHCP server')
def run(self):
"""
Runs the DHCP server indefinitely.
In the event of an unexpected error, e-mail will be sent and processing
will continue with the next request.
"""
src.logging.writeLog('Running DHCP server')
while True:
try:
self._dhcp_server.getNextDHCPPacket()
except select.error:
src.logging.writeLog('Suppressed non-fatal select() error in DHCP module')
except Exception, e:
src.logging.sendErrorReport('Unhandled exception', e)
def pollStats(self):
"""
Updates the performance statistics in the in-memory stats-log and
implicitly updates the ignored MACs values.
"""
(processed, discarded, time_taken, ignored_macs) = self._dhcp_server.getStats()
src.logging.writePollRecord(processed, discarded, time_taken, ignored_macs)
| Python |
#This file is interpreted by Python and it may be extended with any Python code
#you wish, allowing you to do things like query web services to get values.
#General settings
#######################################
#If True, all events will be printed to console.
DEBUG = True
#The name to use when referring to this system.
SYSTEM_NAME = 'staticDHCPd'
#The file to which logs should be dumped on receipt of TERM or HUP.
LOG_FILE = '/var/log/' + SYSTEM_NAME + '.log'
#True to write logfiles with the current timestamp; requires that staticDHCPd
#has write-access to the directory above, which may be a security risk.
LOG_FILE_TIMESTAMP = False
#The file to which PID information should be written.
PID_FILE = '/var/run/' + SYSTEM_NAME + '.pid'
#The frequency at which the DHCP server's stats will be polled, in seconds.
POLLING_INTERVAL = 30
#The number of events to keep in the server's log-buffer.
LOG_CAPACITY = 1000
#The amount of statistical information to track.
POLL_INTERVALS_TO_TRACK = 20
#Server settings
#######################################
#The UID that will run this daemon.
UID = 0
#The GID that will run this daemon.
GID = 0
#The IP of the interface on which DHCP responses should be sent.
#This value must be set to a real IP.
DHCP_SERVER_IP = '192.168.56.101'
#The port on which DHCP requests are to be received; 67 is the standard.
DHCP_SERVER_PORT = 67
#The port on which clients wait for DHCP responses; 68 is the standard.
DHCP_CLIENT_PORT = 68
#Set this to a port-number (4011 is standard) to enable PXE-processing.
PXE_PORT = None
#True to enable access to server statistics and logs.
WEB_ENABLED = True
#The IP of the interface on which the HTTP interface should be served.
#Use '0.0.0.0' to serve on all interfaces.
WEB_IP = '0.0.0.0'
#The port on which the HTTP interface should be served.
WEB_PORT = 8080 # This is useless (port is always 8080)
#Server behaviour settings
#######################################
#If False, local DHCP requests will be ignored.
ALLOW_LOCAL_DHCP = True
#If False, relayed DHCP requests will be ignored.
ALLOW_DHCP_RELAYS = False
#A list of all IPs allowed to relay requests; if empty, all are allowed.
ALLOWED_DHCP_RELAYS = []
#If True, any unknown MACs will be NAKed instead of ignored. If you may have
#more than one DHCP server serving a single LAN, this is NOT something you
#should enable.
AUTHORITATIVE = False
#If True, REBIND and RENEW requests are NAKed when received, forcing clients to
#either wait out their lease or return to the DISCOVER phase.
NAK_RENEWALS = False
#The number of seconds for which to ignore unknown MACs.
UNAUTHORIZED_CLIENT_TIMEOUT = 60
#The number of seconds for which to ignore potentially malicious MACs.
MISBEHAVING_CLIENT_TIMEOUT = 150
#If True, MACs requesting too many addresses will be flagged as malicious.
ENABLE_SUSPEND = True
#The number of times a well-behaved MAC can search for or request an IP
#within the polling interval.
SUSPEND_THRESHOLD = 10
#MD5 of the password needed to reload config.
WEB_RELOAD_KEY = '5f4dcc3b5aa765d61d8327deb882cf99'
#Database settings
#######################################
#Allowed values: MySQL, PostgreSQL, Oracle, SQLite
DATABASE_ENGINE = 'PostgreSQL'
#Controls whether DHCP data gleaned from database lookups should be cached until
#manually flushed; consumes more resources and adds a step before a MAC can be
#automatically NAKed or have its details updated, but dramatically improves
#performance under heavy loads.
USE_CACHE = False
#Controls whether SQL daemon connections are pooled. This only works if the
#eventlet library has been installed and you've chosen a pooling-friendly
#engine, which excludes SQLite.
USE_POOL = True
#MYSQL_* values used only with 'MySQL' engine.
#The name of your database.
MYSQL_DATABASE = 'dhcp'
#The name of a user with SELECT access.
MYSQL_USERNAME = 'dhcp_user'
#The password of the user.
MYSQL_PASSWORD = 'dhcp_pass'
#The host on which MySQL is running. None for 'localhost'.
MYSQL_HOST = None
#The port on which MySQL is running; ignored when HOST is None.
MYSQL_PORT = 3306
#The number of threads that may read the database at once.
MYSQL_MAXIMUM_CONNECTIONS = 4
#POSTGRESQL_* values used only with 'PostgreSQL' engine.
#The name of your database.
POSTGRESQL_DATABASE = 'dhcp'
#The name of a user with SELECT access.
POSTGRESQL_USERNAME = 'dhcp_user'
#The password of the user.
POSTGRESQL_PASSWORD = '1234'
#The host on which PostgreSQL is running. None for 'localhost'.
POSTGRESQL_HOST = '127.0.0.1'
#The port on which PostgreSQL is running; ignored when HOST is None.
POSTGRESQL_PORT = 5432
#The SSL mode to use; ignored when HOST is None.
#http://www.postgresql.org/docs/9.0/static/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS
POSTGRESQL_SSLMODE = 'allow'
#The number of threads that may read the database at once.
POSTGRESQL_MAXIMUM_CONNECTIONS = 4
#ORACLE_* values used only with 'Oracle' engine.
#The name of your database (from tnsnames.ora).
ORACLE_DATABASE = 'dhcp'
#The name of a user with SELECT access.
ORACLE_USERNAME = 'dhcp_user'
#The password of the user.
ORACLE_PASSWORD = 'dhcp_pass'
#The number of threads that may read the database at once.
ORACLE_MAXIMUM_CONNECTIONS = 4
#SQLITE_* values used only with 'SQLite' engine.
#The file that contains your SQLite database.
SQLITE_FILE = '/root/Development/py-dhcpd/dhcp.sqlite3'
#E-mail settings
#######################################
#True to allow staticDHCPd to inform you of any problems it cannot handle by
#itself. (*Very* useful for development and fast troubleshooting)
EMAIL_ENABLED = False
#The server that receives your e-mail.
EMAIL_SERVER = 'mail.yourdomain.com'
#The port on the server that receives your e-mail.
EMAIL_PORT = 25
#The number of seconds to wait for e-mail to be accepted before timing out.
EMAIL_TIMEOUT = 10
#The user from which e-mail should claim to be sent.
EMAIL_SOURCE = 'you@yourdomain.com'
#The user to whom e-mail should be sent.
EMAIL_DESTINATION = 'problems@yourdomain.com'
#The user who should authenticate to the mail server.
#If None, SMTP authentication is not used.
EMAIL_USER = 'you'
#The password of the user who should authenticate to the mail server.
EMAIL_PASSWORD = 'password'
#The number of seconds to wait between sending e-mails.
EMAIL_TIMEOUT = 600
#DHCP-processing functions
#######################################
def init():
#Perform any required imports or setup operations within this function.
pass
def loadDHCPPacket(packet, mac, client_ip, relay_ip, subnet, serial, pxe, vendor):
#This is a custom function, called before each packet is sent, that
#allows you to tweak the options attached to a DHCP response.
#
#If, for any reason, you want to abort sending the packet, return False.
#
#If you need to add, test for, or delete an option, consult staticDHCPd's
#rule-writing guide.
#
##### PARAMETERS #####
#mac is a human-readable MAC string, lower-case, separated by colons.
#client_ip is a quadruple of octets: (192, 168, 1, 1)
#relay_ip is either None or an address as a quadruple of octets,
# depending on whether this is a response to a relay request.
#subnet and serial are values passed through from the database, as a
# string and int, respectively.
#pxe is False if not used or a triple containing, in order, option 93
# (client_system) as a sequence of ints, option 94 (client_ndi) as a
# sequence of three bytes, and option 97 (uuid_guid) as digested data:
# (type:byte, data:[byte]). Any unset options are presented as None.
#vendor is a four-tuple containing, in order, option 43
# (vendor_specific_information) as a string of bytes, option 60
# (vendor_class_identifier) as a string, and both option 124
# (vendor_class) and option 125 (vendor_specific) as digested data:
# [(enterprise_number:int, data:string)] and
# [(enterprise_number:int, [(subopt_code:byte, data:string)])],
# respectively. Any unset options are presented as None.
return True
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test app for mainloop stuff."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import unittest
import weakref
import google3
import tornado.ioloop
import mainloop
REQUEST_STRING = 'this is the request\n'
idler = [0,0]
@mainloop.WaitUntilIdle
def IdleFunc():
print 'i0'
idler[0] += 1
class IdleClass(object):
@mainloop.WaitUntilIdle
def ClassIdleFunc(self):
print 'i1: %r' % self
idler[1] += 1
class MainLoopTest(unittest.TestCase):
"""Tests for mainloop.MainLoop."""
def _GotLine(self, line):
print 'got line: %r' % (line,)
tornado.ioloop.IOLoop.instance().stop()
self.assertEqual(line, REQUEST_STRING)
self.got += line
def _MakeHandler(self, sock, request):
lr = mainloop.LineReader(sock, request, self._GotLine)
self.handler = weakref.ref(lr)
def _SendRequest(self, stream):
stream.write(REQUEST_STRING)
def testMainLoop(self):
self.got = ''
loop = mainloop.MainLoop()
listener = loop.ListenInet(('', 0), self._MakeHandler)
stream = loop.Connect(listener.family, listener.address, self._SendRequest)
loop.Start(timeout=5)
print 'after loop 1'
self.assertEqual(self.got, REQUEST_STRING)
stream.close()
print 'after close'
loop.RunOnce(timeout=5)
print 'after loop 2'
# This slightly weird test ensures that the LineReader object actually
# gets destroyed after its client closes. If it didn't, we would have
# a memory leak. self.handler is itself a weakref so that its own
# existence doesn't prevent the object from being destroyed, thus
# defeating our test.
self.assertEqual(self.handler(), None)
def testMainLoop2(self):
loop = mainloop.MainLoop()
loop.RunOnce()
del loop
loop = mainloop.MainLoop()
loop.RunOnce()
def testIdler(self):
print
print 'testIdler'
loop = mainloop.MainLoop()
loop.RunOnce()
idler[0] = 0
idler[1] = 0
IdleFunc()
IdleFunc()
loop.RunOnce()
self.assertEquals(idler, [1, 0])
loop.RunOnce()
self.assertEquals(idler, [1, 0])
i1 = IdleClass()
i2 = IdleClass()
i1.ClassIdleFunc()
i1.ClassIdleFunc()
i2.ClassIdleFunc()
i2.ClassIdleFunc()
loop.RunOnce()
self.assertEquals(idler, [1, 2])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for tr-69 Download and Scheduled Download."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import collections
import datetime
import errno
import os
import shutil
import time
import urlparse
import google3
import tornado
import tornado.httpclient
import tornado.ioloop
import tornado.web
import core
import helpers
import http_download
import persistobj
# Persistent object storage filename
DNLDROOTNAME = 'tr69_dnld'
BOOTROOTNAME = 'tr69_boot'
class Installer(object):
"""Install a downloaded image and reboot.
This default implementation returns an error response. Platforms are
expected to implement their own Install object, and set
tr.download.INSTALLER = their object.
"""
def install(self, file_type, target_filename, callback):
INTERNAL_ERROR = 9002
self.callback(faultcode=INTERNAL_ERROR,
faultstring='No installer for this platform.',
must_reboot=False)
def reboot(self):
return False
# Class to be called after image is downloaded. Platform code is expected
# to put its own installer here, the default returns failed to install.
INSTALLER = Installer
# Unit tests can substitute mock objects here
DOWNLOAD_CLIENT = {
'http': http_download.HttpDownload,
'https': http_download.HttpDownload
}
# State machine description. Generate a diagram using Graphviz:
# ./download.py
graphviz = r"""
digraph DLstates {
node [shape=box]
START [label="START"]
WAITING [label="WAITING\nstart timer"]
DOWNLOADING [label="DOWNLOADING\nstart download"]
INSTALLING [label="INSTALLING\nstart install"]
REBOOTING [label="REBOOTING\ninitiate reboot"]
EXITING [label="EXITING\nsend TransferComplete"]
DONE [label="DONE\ncleanup, not a\nreal state"]
START -> WAITING
WAITING -> DOWNLOADING [label="timer\nexpired"]
DOWNLOADING -> INSTALLING [label="download\ncomplete"]
DOWNLOADING -> EXITING [label="download\nfailed"]
INSTALLING -> REBOOTING [label="install\ncomplete"]
INSTALLING -> EXITING [label="install\nfailed"]
INSTALLING -> EXITING [label="must_reboot=False"]
REBOOTING -> EXITING [label="rebooted,\ncorrect image"]
REBOOTING -> EXITING [label="rebooted,\nincorrect image"]
EXITING -> DONE [label="receive\nTransferCompleteResponse"]
}
"""
class Download(object):
"""A state machine to handle a single tr-69 Download RPC."""
# States in the state machine. See docs/download.dot for details
START = 'START'
WAITING = 'WAITING'
DOWNLOADING = 'DOWNLOADING'
INSTALLING = 'INSTALLING'
REBOOTING = 'REBOOTING'
EXITING = 'EXITING'
# State machine events
EV_START = 1
EV_TIMER = 2
EV_DOWNLOAD_COMPLETE = 3
EV_INSTALL_COMPLETE = 4
EV_REBOOT_COMPLETE = 5
EV_TCRESPONSE = 6
def __init__(self, stateobj, transfer_complete_cb,
download_dir=None, ioloop=None):
"""Download object.
Args:
stateobj: a PersistentObject to store state across reboots.
This class requires that command_key and url attributes be present.
transfer_complete_cb: function to send a TransferComplete message.
ioloop: Tornado ioloop. Unit tests can pass in a mock.
"""
self.stateobj = self._restore_dlstate(stateobj)
self.transfer_complete_cb = transfer_complete_cb
self.download_dir = download_dir
self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
self.download = None
self.downloaded_fileobj = None
self.downloaded_file = None
self.wait_handle = None
# the delay_seconds started when we received the RPC, even if we have
# downloaded other files and rebooted since then.
if not hasattr(self.stateobj, 'wait_start_time'):
self.stateobj.Update(wait_start_time=time.time())
def CommandKey(self):
return getattr(self.stateobj, 'command_key', None)
def _restore_dlstate(self, stateobj):
"""Re-enter the state machine at a sane state.
This state machine is supposed to download a file, install that file,
reboot, and send a completion. To do this it stores its state to
the filesystem so it can read it back in after a reboot.
If we reboot unexpectedly, like a power failure, we may have to backtrack.
For example if we had downloaded the file to /tmp and then powered off,
we lose the file and have to download it again.
The state machine can only resume into the START and REBOOTING states.
Args:
stateobj: the PersistentObject for this transfer
Returns:
the stateobj
"""
if not hasattr(stateobj, 'dlstate'):
stateobj.Update(dlstate=self.START)
dlstate = stateobj.dlstate
if dlstate == self.REBOOTING or dlstate == self.EXITING:
stateobj.Update(dlstate=self.REBOOTING)
else:
stateobj.Update(dlstate=self.START)
return stateobj
def _schedule_timer(self):
delay_seconds = getattr(self.stateobj, 'delay_seconds', 0)
now = time.time()
wait_start_time = self.stateobj.wait_start_time
# sanity checks
if wait_start_time > now:
wait_start_time = now
when = wait_start_time + delay_seconds
if when < now:
when = now
self.wait_handle = self.ioloop.add_timeout(
datetime.timedelta(seconds=when-now),
self.timer_callback)
def _new_download_object(self, stateobj):
url = getattr(stateobj, 'url', '')
username = getattr(stateobj, 'username', None)
password = getattr(stateobj, 'password', None)
o = urlparse.urlparse(url)
client = DOWNLOAD_CLIENT[o.scheme]
return client(url=url, username=username, password=password,
download_complete_cb=self.download_complete_callback,
download_dir=self.download_dir)
def _send_transfer_complete(self, faultcode, faultstring, start=0.0, end=0.0):
event_code = getattr(self.stateobj, 'event_code', 'M Download')
self.transfer_complete_cb(dl=self,
command_key=self.stateobj.command_key,
faultcode=faultcode,
faultstring=faultstring,
starttime=start, endtime=end,
event_code=event_code)
def state_machine(self, event, faultcode=0, faultstring='',
downloaded_file=None, must_reboot=False):
dlstate = self.stateobj.dlstate
if dlstate == self.START:
if event == self.EV_START or event == self.EV_REBOOT_COMPLETE:
self.stateobj.Update(dlstate=self.WAITING)
self._schedule_timer()
elif dlstate == self.WAITING:
if event == self.EV_TIMER:
self.download = self._new_download_object(self.stateobj)
self.stateobj.Update(dlstate=self.DOWNLOADING,
download_start_time=time.time())
self.download.fetch()
# TODO(dgentry) : need a timeout, in case download never finishes.
elif dlstate == self.DOWNLOADING:
if event == self.EV_DOWNLOAD_COMPLETE:
self.download = None # no longer needed
if faultcode == 0:
self.installer = INSTALLER(downloaded_file)
self.stateobj.Update(dlstate=self.INSTALLING)
file_type = getattr(self.stateobj, 'file_type', None)
target_filename = getattr(self.stateobj, 'target_filename', None)
self.installer.install(file_type=file_type,
target_filename=target_filename,
callback=self.installer_callback)
else:
self.stateobj.Update(dlstate=self.EXITING)
self._send_transfer_complete(faultcode, faultstring)
elif dlstate == self.INSTALLING:
if event == self.EV_INSTALL_COMPLETE:
if self.downloaded_file:
helpers.Unlink(self.downloaded_file)
if faultcode == 0:
if must_reboot:
self.stateobj.Update(dlstate=self.REBOOTING)
self.installer.reboot()
else:
end = time.time()
self.stateobj.Update(dlstate=self.EXITING,
download_complete_time=end)
start = getattr(self.stateobj, 'download_start_time', 0.0)
self._send_transfer_complete(faultcode=0, faultstring='',
start=start, end=end)
else:
self.stateobj.Update(dlstate=self.EXITING)
self._send_transfer_complete(faultcode, faultstring)
elif dlstate == self.REBOOTING:
if event == self.EV_REBOOT_COMPLETE:
# TODO(dgentry) check version, whether image was actually installed
end = time.time()
self.stateobj.Update(dlstate=self.EXITING, download_complete_time=end)
if faultcode == 0:
start = getattr(self.stateobj, 'download_start_time', 0.0)
self._send_transfer_complete(faultcode=0, faultstring='',
start=start, end=end)
else:
self._send_transfer_complete(faultcode, faultstring)
elif dlstate == self.EXITING:
pass
def do_start(self):
return self.state_machine(self.EV_START)
def timer_callback(self):
"""Called by timer code when timeout expires."""
return self.state_machine(self.EV_TIMER)
def download_complete_callback(self, faultcode, faultstring, tmpfile):
print 'Download complete callback.'
name = tmpfile and tmpfile.name or None
self.downloaded_fileobj = tmpfile # keep this around or it auto-deletes
self.downloaded_file = name
return self.state_machine(self.EV_DOWNLOAD_COMPLETE,
faultcode, faultstring,
downloaded_file=name)
def installer_callback(self, faultcode, faultstring, must_reboot):
return self.state_machine(self.EV_INSTALL_COMPLETE, faultcode, faultstring,
must_reboot=must_reboot)
def reboot_callback(self, faultcode, faultstring):
return self.state_machine(self.EV_REBOOT_COMPLETE, faultcode, faultstring)
def cleanup(self):
"""Attempt to stop all activity and clean up resources.
Returns:
False - successfully stopped and cleaned up
string - the reason download cannot be safely cancelled right now.
"""
dlstate = self.stateobj.dlstate
if dlstate == self.INSTALLING:
return 'Download is currently installing to flash'
if dlstate == self.REBOOTING:
return 'Download has been installed, awaiting reboot'
if self.wait_handle:
self.ioloop.remove_timeout(self.wait_handle)
self.wait_handle = None
if self.download:
self.download.close()
self.download = None
self.stateobj.Delete()
def get_queue_state(self):
"""Data needed for GetQueuedTransfers/GetAllQueuedTransfers RPC."""
q = collections.namedtuple(
'queued_transfer_struct',
('CommandKey State IsDownload FileType FileSize TargetFileName'))
q.CommandKey = self.stateobj.command_key
dlstate = self.stateobj.dlstate
if dlstate == self.START or dlstate == self.WAITING:
qstate = 1 # Not yet started
elif dlstate == self.EXITING:
qstate = 3 # Completed, finishing cleanup
else:
qstate = 2 # In progress
q.State = qstate
q.IsDownload = True
q.FileType = getattr(self.stateobj, 'file_type', None)
q.FileSize = getattr(self.stateobj, 'file_size', 0)
q.TargetFileName = getattr(self.stateobj, 'target_filename', '')
return q
# Object to track an individual Download RPC. Unit tests can override this.
DOWNLOADOBJ = Download
class DownloadManager(object):
"""Manage Download requests from the ACS.
Each RPC gets a Download object, which runs a state machine to track
the progress of the operation. The DownloadManager allocates, manages
and deletes the active Download objects.
SPEC: http://www.broadband-forum.org/technical/download/TR-069_Amendment-3.pdf
"""
# Maximum simultaneous downloads. tr-69 requires minimum of 3.
MAXDOWNLOADS = 1
def __init__(self, ioloop=None):
self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
self._downloads = list()
self._pending_complete = list()
self.config_dir = '/tmp/'
self.download_dir = '/tmp/'
# Function to send RPCs, to be filled in by parent object.
self.send_transfer_complete = None
def NewDownload(self, command_key=None, file_type=None, url=None,
username=None, password=None, file_size=0,
target_filename=None, delay_seconds=0):
"""Initiate a new download, handling a tr-69 Download RPC.
Args:
command_key, file_type, url, username, password, file_size:
target_filename, delay_seconds: as defined in tr-69 Amendment 3
(page 82 of $SPEC)
Raises:
core.ResourcesExceededError: too many simultaneous downloads
core.FileTransferProtocolError: Unsupported URL type, ex: ftp
Returns:
(code, starttime, endtime):
code = status to return (1 == send TransferComplete later, $SPEC pg 85)
starttime, endtime = two floating point numbers in seconds for the
StartTime and CompleteTime of the DownloadResponse.
"""
# TODO(dgentry) check free space?
if len(self._downloads) >= self.MAXDOWNLOADS:
faultstring = 'Max downloads (%d) reached.' % self.MAXDOWNLOADS
raise core.ResourcesExceededError(faultstring)
o = urlparse.urlparse(url)
if o.scheme not in DOWNLOAD_CLIENT:
raise core.FileTransferProtocolError(
'Unsupported URL scheme %s' % o.scheme)
kwargs = dict(command_key=command_key,
file_type=file_type,
url=url,
username=username,
password=password,
file_size=file_size,
target_filename=target_filename,
delay_seconds=delay_seconds,
event_code='M Download')
pobj = persistobj.PersistentObject(objdir=self.config_dir,
rootname=DNLDROOTNAME,
filename=None,
ignore_errors=True,
**kwargs)
dl = DOWNLOADOBJ(stateobj=pobj,
transfer_complete_cb=self.TransferCompleteCallback,
download_dir=self.download_dir)
self._downloads.append(dl)
dl.do_start()
return (1, 0.0, 0.0)
def TransferCompleteCallback(self, dl, command_key, faultcode, faultstring,
starttime, endtime, event_code):
self._downloads.remove(dl)
self._pending_complete.append(dl)
if self.send_transfer_complete:
self.send_transfer_complete(command_key, faultcode, faultstring,
starttime, endtime, event_code)
def RestoreDownloads(self):
pobjs = persistobj.GetPersistentObjects(objdir=self.config_dir,
rootname=DNLDROOTNAME)
for pobj in pobjs:
if not hasattr(pobj, 'command_key'):
print 'Download Object %s has no command_key' % pobj.filename
pobj.Delete()
continue
dl = DOWNLOADOBJ(stateobj=pobj,
transfer_complete_cb=self.TransferCompleteCallback,
download_dir=self.download_dir)
self._downloads.append(dl)
dl.reboot_callback(0, None)
def TransferCompleteResponseReceived(self):
dl = self._pending_complete.pop()
dl.cleanup()
def GetAllQueuedTransfers(self):
transfers = list()
for dl in self._downloads:
transfers.append(dl.get_queue_state())
for dl in self._pending_complete:
transfers.append(dl.get_queue_state())
return transfers
def CancelTransfer(self, command_key):
"""Cancel an in-progress transfer.
Args:
command_key: the command_key to cancel. There can be multiple transfers
with the same command_key. $SPEC says to attempt to cancel all of them,
return failure if any cannot be cancelled.
Raises:
core.CancelNotPermitted: download cannot be cancelled right now.
"""
for dl in self._downloads:
if dl.CommandKey() == command_key:
faultstring = dl.cleanup()
if faultstring:
raise core.CancelNotPermitted(faultstring)
else:
self._downloads.remove(dl)
for dl in self._pending_complete:
if dl.CommandKey() == command_key:
raise core.CancelNotPermitted(
'Installed, awaiting TransferCompleteResponse')
def _DelayedReboot(self):
installer = INSTALLER('')
installer.reboot()
def RestoreReboots(self):
pobjs = persistobj.GetPersistentObjects(objdir=self.config_dir,
rootname=BOOTROOTNAME)
reboots = []
for pobj in pobjs:
if hasattr(pobj, 'command_key'):
reboots.append(('M Reboot', pobj.command_key))
else:
print 'Reboot object %s has no command_key' % pobj.filename
pobj.Delete()
return reboots
def Reboot(self, command_key):
"""Reboot the system."""
kwargs = dict(command_key=command_key)
pobj = persistobj.PersistentObject(objdir=self.config_dir, rootname=BOOTROOTNAME,
filename=None, **kwargs)
self.ioloop.add_callback(self._DelayedReboot)
def _MakeDirsIgnoreError(self, directory):
"""Make sure a directory exists."""
try:
os.makedirs(directory, 0755)
except OSError:
pass
def SetDirectories(self, config_dir, download_dir):
self.config_dir = os.path.join(config_dir, 'state')
self.download_dir = os.path.join(download_dir, 'dnld')
self._MakeDirsIgnoreError(self.config_dir)
self._MakeDirsIgnoreError(self.download_dir)
def main():
# Generate diagram for Download state machine
import subprocess #pylint: disable-msg=C6204
cmd = ['dot', '-Tpdf', '-odownloadStateMachine.pdf']
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
print p.communicate(input=graphviz)[0]
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#pylint: disable-msg=W0404
#
"""Implement the TR-069 style request/response protocol over HTTP."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import binascii
import collections
import datetime
import os
import random
import socket
import sys
import time
import urllib
from curtain import digest
import tornado.httpclient
import tornado.ioloop
import tornado.util
import tornado.web
import api_soap
import cpe_management_server
import cwmp_session
import helpers
PROC_IF_INET6 = '/proc/net/if_inet6'
MAX_EVENT_QUEUE_SIZE = 64
def _Shorten(s, prefixofs, suffixofs, maxlen):
"""Shorten the given string if its length is >= maxlen.
Note: maxlen should generally be considerably bigger than
prefixofs + suffixofs. It's disconcerting to a reader when
you have a "..." to replace 10 bytes, but it feels fine when the
"..." replaces 500 bytes.
Args:
s: the string to shorten.
prefixofs: the number of chars to keep at the beginning of s.
suffixofs: the number of chars to keep at the end of s.
maxlen: if the string is longer than this, shorten it.
Returns:
A shortened version of the string.
"""
s = str(s)
if len(s) >= maxlen and not os.environ.get('DONT_SHORTEN'):
# When the string exceeds the limit, we deliberately shorten it to
# considerably less than the limit, because it's disconcerting when
# you have a "..." to replace 10 bytes, but it feels right when the
# "..." replaces 500 bytes.
s = s[0:prefixofs] + '\n........\n' + s[-suffixofs:]
return s
class LimitDeque(collections.deque):
"""Wrapper around a deque that limits the maximimum size.
If the maximum size is reached, call the supplied handler, or
exit if no handler is provided.
"""
def __init__(self, max_size=None, handler=None):
collections.deque.__init__(self)
self.max_size = max_size
self.handler = handler
def CheckSize(self):
if self.max_size and len(self) > self.max_size:
if self.handler:
self.handler()
else:
print 'Maximum length of deque (%d) was exceeded' % (self.max_size)
sys.exit(1)
def append(self, *args):
collections.deque.append(self, *args)
self.CheckSize()
def appendleft(self, *args):
collections.deque.appendleft(self, *args)
self.CheckSize()
def extend(self, *args):
collections.deque.extend(self, *args)
self.CheckSize()
def extendleft(self, *args):
collections.deque.extendleft(self, *args)
self.CheckSize()
# SPEC3 = TR-069_Amendment-3.pdf
# http://www.broadband-forum.org/technical/download/TR-069_Amendment-3.pdf
def SplitUrl(url):
Url = collections.namedtuple('Url', ('method host port path'))
method, rest = urllib.splittype(url)
hostport, path = urllib.splithost(rest)
host, port = urllib.splitport(hostport)
return Url(method, host, int(port or 0), path)
class PingHandler(digest.DigestAuthMixin, tornado.web.RequestHandler):
"""Handles accesses to the ConnectionRequestURL.
Args:
callback: the function to call when theURL is accessed.
cpe_ms: the cpe_management_server object, from which to retrieve
username and password.
"""
def initialize(self, callback, cpe_ms):
self.callback = callback
self.cpe_ms = cpe_ms
def getcredentials(self, username):
credentials = {'auth_username': self.cpe_ms.ConnectionRequestUsername,
'auth_password': self.cpe_ms.ConnectionRequestPassword}
if username == credentials['auth_username']:
return credentials
def get(self):
# Digest authentication handler
if self.get_authenticated_user(self.getcredentials, 'Authusers'):
return self.set_status(self.callback())
class Handler(tornado.web.RequestHandler):
def initialize(self, soap_handler):
self.soap_handler = soap_handler
def get(self):
self.write('This is the cpe/acs handler. It only takes POST requests.')
def post(self):
print 'TR-069 server: request received:\n%s' % self.request.body
if self.request.body.strip():
result = self.soap_handler(self.request.body)
self.write(str(result))
class CPEStateMachine(object):
"""A tr-69 Customer Premises Equipment implementation.
Args:
ip: local ip address to bind to. If None, find address automatically.
cpe: the api_soap.cpe object for this device
listenport: the port number to listen on for ACS ping requests.
acs_url: An ACS URL to use. This overrides platform_config.GetAcsUrl()
ping_path: URL path for the ACS Ping function
ping_ip6dev: ifname to use for the CPE Ping address.
fetch_args: kwargs to pass to HTTPClient.fetch
"""
def __init__(self, ip, cpe, listenport, platform_config, ping_path,
acs_url=None, ping_ip6dev=None, fetch_args=dict(), ioloop=None,
restrict_acs_hosts=None):
self.cpe = cpe
self.cpe_soap = api_soap.CPE(self.cpe)
self.encode = api_soap.Encode()
self.outstanding = None
self.response_queue = []
self.request_queue = []
self.event_queue = LimitDeque(MAX_EVENT_QUEUE_SIZE, self.EventQueueHandler)
self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
self.retry_count = 0 # for Inform.RetryCount
self.start_session_timeout = None # timer for CWMPRetryInterval
self.session = None
self.my_configured_ip = ip
self.ping_ip6dev = ping_ip6dev
self.fetch_args = fetch_args
self.rate_limit_seconds = 60
self.platform_config = platform_config
self.previous_ping_time = 0
self.ping_timeout_pending = None
self._changed_parameters = set()
self._changed_parameters_sent = set()
self.cpe_management_server = cpe_management_server.CpeManagementServer(
acs_url=acs_url, platform_config=platform_config, port=listenport,
ping_path=ping_path, get_parameter_key=cpe.getParameterKey,
start_periodic_session=self.NewPeriodicSession, ioloop=self.ioloop,
restrict_acs_hosts=restrict_acs_hosts)
def EventQueueHandler(self):
"""Called if the event queue goes beyond the maximum threshold."""
print 'Event queue has grown beyond the maximum size, restarting...'
print 'event_queue=%s' % (str(self.event_queue))
sys.exit(1)
def GetManagementServer(self):
"""Return the ManagementServer implementation for tr-98/181."""
return self.cpe_management_server
def Send(self, req):
self.request_queue.append(str(req))
self.Run()
def SendResponse(self, req):
self.response_queue.append(str(req))
self.Run()
def LookupDevIP6(self, name):
"""Returns the global IPv6 address for the named interface."""
with open(PROC_IF_INET6, 'r') as f:
for line in f:
fields = line.split()
if len(fields) < 6:
continue
scope = int(fields[3].strip())
dev = fields[5].strip()
if dev == name and scope == 0:
bin_ip = binascii.unhexlify(fields[0])
return socket.inet_ntop(socket.AF_INET6, bin_ip)
return 0
def _GetLocalAddr(self):
if self.my_configured_ip is not None:
return self.my_configured_ip
if self.ping_ip6dev is not None:
return self.LookupDevIP6(self.ping_ip6dev)
acs_url = self.cpe_management_server.URL
if not acs_url:
return 0
# If not configured with an address it gets a bit tricky: we try connecting
# to the ACS, non-blocking, so we can find out which local IP the kernel
# uses when connecting to that IP. The local address is returned with
# getsockname(). Then we can tell the ACS to use that address for
# connecting to us later. We use a nonblocking socket because we don't
# care about actually connecting; we just care what the local kernel does
# in its implicit bind() when we *start* connecting.
url = SplitUrl(acs_url)
host = url.host
port = url.port or 0
s = socket.socket()
s.setblocking(0)
try:
s.connect((host, port or 1)) # port doesn't matter, but can't be 0
except socket.error:
pass
return s.getsockname()[0]
def EncodeInform(self):
"""Return an Inform message for this session."""
if not self.session.my_ip:
my_ip = self._GetLocalAddr()
self.session.my_ip = my_ip
self.cpe_management_server.my_ip = my_ip
events = []
for ev in self.event_queue:
events.append(ev)
parameter_list = []
try:
ms = self.cpe.root.GetExport('InternetGatewayDevice.ManagementServer')
di = self.cpe.root.GetExport('InternetGatewayDevice.DeviceInfo')
parameter_list += [
('InternetGatewayDevice.ManagementServer.ConnectionRequestURL',
ms.ConnectionRequestURL),
('InternetGatewayDevice.ManagementServer.ParameterKey',
ms.ParameterKey),
('InternetGatewayDevice.DeviceInfo.HardwareVersion',
di.HardwareVersion),
('InternetGatewayDevice.DeviceInfo.SoftwareVersion',
di.SoftwareVersion),
('InternetGatewayDevice.DeviceInfo.SpecVersion', di.SpecVersion),
]
# NOTE(jnewlin): Changed parameters can be set to be sent either
# explicitly with a value change event, or to be sent with the
# periodic inform. So it's not a bug if there is no value change
# event in the event queue.
# Take all of the parameters and put union them with the another
# set that has been previously sent. When we receive an inform
# from the ACS we clear the _sent version. This fixes a bug where
# we send this list of params to the ACS, followed by a PerioidStat
# adding itself to the list here, followed by getting an ack from the
# ACS where we clear the list. Now we just clear the list of the
# params that was sent when the ACS acks.
self._changed_parameters_sent.update(self._changed_parameters)
self._changed_parameters.clear()
parameter_list += self._changed_parameters_sent
except (AttributeError, KeyError):
pass
req = self.encode.Inform(root=self.cpe.root, events=events,
retry_count=self.retry_count,
parameter_list=parameter_list)
return str(req)
def SendTransferComplete(self, command_key, faultcode, faultstring,
starttime, endtime, event_code):
if not self.session:
tc = ('7 TRANSFER COMPLETE', None)
if tc not in self.event_queue:
self.event_queue.appendleft(tc)
self.event_queue.append((event_code, command_key))
cmpl = self.encode.TransferComplete(command_key, faultcode, faultstring,
starttime, endtime)
self.Send(cmpl)
def GetNext(self):
if not self.session:
return None
if self.session.inform_required():
self.session.state_update(sent_inform=True)
return self.EncodeInform()
if self.response_queue and self.session.response_allowed():
return self.response_queue.pop(0)
if self.request_queue and self.session.request_allowed():
return self.request_queue.pop(0)
return ''
def Run(self):
print 'RUN'
if not self.session:
print 'No ACS session, returning.'
return
if not self.session.acs_url:
print 'No ACS URL populated, returning.'
self._ScheduleRetrySession(wait=60)
return
if self.session.should_close():
print 'Idle CWMP session, terminating.'
self.outstanding = None
ping_received = self.session.close()
self.platform_config.AcsAccessSuccess(self.session.acs_url)
self.session = None
self.retry_count = 0 # Successful close
if self._changed_parameters:
# Some values triggered during the prior session, start a new session
# with those changed params. This should also satisfy a ping.
self.NewValueChangeSession()
elif ping_received:
# Ping received during session, start another
self._NewPingSession()
return
if self.outstanding is not None:
# already an outstanding request
return
if self.outstanding is None:
self.outstanding = self.GetNext()
if self.outstanding is None:
# We're not allowed to send anything yet, session not fully open.
return
headers = {}
if self.session.cookies:
headers['Cookie'] = ';'.join(self.session.cookies)
if self.outstanding:
headers['Content-Type'] = 'text/xml; charset="utf-8"'
headers['SOAPAction'] = ''
else:
# Empty message
self.session.state_update(cpe_to_acs_empty=True)
self.platform_config.AcsAccessAttempt(self.session.acs_url)
print('CPE POST (at {0!s}):\n'
'ACS URL: {1!r}\n'
'{2!s}\n'
'{3!s}'.format(time.ctime(), self.session.acs_url,
headers, _Shorten(self.outstanding, 768, 256, 2048)))
req = tornado.httpclient.HTTPRequest(
url=self.session.acs_url, method='POST', headers=headers,
body=self.outstanding, follow_redirects=True, max_redirects=5,
request_timeout=30.0, use_gzip=True, allow_ipv6=True,
**self.fetch_args)
self.session.http.fetch(req, self.GotResponse)
def GotResponse(self, response):
self.outstanding = None
print 'CPE RECEIVED (at %s):' % time.ctime()
if not self.session:
print 'Session terminated, ignoring ACS message.'
return
if not response.error:
cookies = response.headers.get_list('Set-Cookie')
if cookies:
self.session.cookies = cookies
print _Shorten(response.body, 768, 256, 2048)
if response.body:
out = self.cpe_soap.Handle(response.body)
if out is not None:
self.SendResponse(out)
# TODO(dgentry): $SPEC3 3.7.1.6 ACS Fault 8005 == retry same request
else:
self.session.state_update(acs_to_cpe_empty=True)
else:
print 'HTTP ERROR {0!s}: {1}'.format(response.code, response.error)
self._ScheduleRetrySession()
self.Run()
return 200
def _ScheduleRetrySession(self, wait=None):
"""Start a timer to retry a CWMP session.
Args:
wait: Number of seconds to wait. If wait=None, choose a random wait
time according to $SPEC3 section 3.2.1
"""
if self.session:
self.session.close()
self.session = None
if wait is None:
self.retry_count += 1
wait = self.cpe_management_server.SessionRetryWait(self.retry_count)
self.start_session_timeout = self.ioloop.add_timeout(
datetime.timedelta(seconds=wait), self._SessionWaitTimer)
def _SessionWaitTimer(self):
"""Handler for the CWMP Retry timer, to start a new session."""
self.start_session_timeout = None
self.session = cwmp_session.CwmpSession(
acs_url=self.cpe_management_server.URL, ioloop=self.ioloop)
self.Run()
def _CancelSessionRetries(self):
"""Cancel any pending CWMP session retry."""
if self.start_session_timeout:
self.ioloop.remove_timeout(self.start_session_timeout)
self.start_session_timeout = None
self.retry_count = 0
def _NewSession(self, reason):
if not self.session:
self._CancelSessionRetries()
self.event_queue.appendleft((reason, None))
self.session = cwmp_session.CwmpSession(
acs_url=self.cpe_management_server.URL, ioloop=self.ioloop)
self.Run()
def _NewTimeoutPingSession(self):
if self.ping_timeout_pending:
self.ping_timeout_pending = None
self._NewPingSession()
def _NewPingSession(self):
if self.session:
# $SPEC3 3.2.2 initiate at most one new session after this one closes.
self.session.ping_received = True
return
# Rate limit how often new sessions can be started with ping to
# once a minute
current_time = helpers.monotime()
elapsed_time = current_time - self.previous_ping_time
allow_ping = (elapsed_time < 0 or
elapsed_time > self.rate_limit_seconds)
if allow_ping:
self.ping_timeout_pending = None
self.previous_ping_time = current_time
self._NewSession('6 CONNECTION REQUEST')
elif not self.ping_timeout_pending:
# Queue up a new session via tornado.
callback_time = self.rate_limit_seconds - elapsed_time
if callback_time < 1:
callback_time = 1
self.ping_timeout_pending = self.ioloop.add_timeout(
datetime.timedelta(seconds=callback_time),
self._NewTimeoutPingSession)
def NewPeriodicSession(self):
# If the ACS stops responding for some period of time, it's possible
# that we'll already have a periodic inform queued up.
# In this case, don't start the new inform, wait for the session
# retry. The retry has a maximum timer of periodic session.
reason = '2 PERIODIC'
if not (reason, None) in self.event_queue:
self._NewSession(reason)
def SetNotificationParameters(self, parameters):
"""Set the list of parameters that have changed.
The list of parameters that have triggered and should be sent either
with the next periodic inform, or the next active active value change
session.
Args:
parameters: An array of the parameters that have changed, these
need to be sent to the ACS in the parameter list.
"""
for param in parameters:
self._changed_parameters.add(param)
def NewValueChangeSession(self):
"""Start a new session to the ACS for the parameters that have changed."""
# If all the changed parameters have been reported, or there is already
# a session running, don't do anything. The run loop for the session
# will autmatically kick off a new session if there are new changed
# parameters.
if not self._changed_parameters or self.session:
return
reason = '4 VALUE CHANGE'
if not (reason, None) in self.event_queue:
self._NewSession(reason)
def PingReceived(self):
self._NewPingSession()
return 204 # No Content
def _RemoveFromDequeue(self, dq, rmset):
"""Return a new deque which removes events in rmset."""
newdq = collections.deque()
for event in dq:
(reason, unused_command_key) = event
if reason.lower() not in rmset:
newdq.append(event)
return newdq
def TransferCompleteReceived(self):
"""Called when the ACS sends a TransferCompleteResponse."""
reasons = frozenset(['7 transfer complete', 'm download',
'm scheduledownload', 'm upload'])
self.event_queue = self._RemoveFromDequeue(self.event_queue, reasons)
def InformResponseReceived(self):
"""Called when the ACS sends an InformResponse."""
reasons = frozenset(['0 bootstrap', '1 boot', '2 periodic',
'3 scheduled', '4 value change',
'6 connection request', '8 diagnostics complete',
'm reboot', 'm scheduleinform'])
self.event_queue = self._RemoveFromDequeue(self.event_queue, reasons)
self._changed_parameters_sent.clear()
def Startup(self):
rb = self.cpe.download_manager.RestoreReboots()
if rb:
self.event_queue.extend(rb)
# TODO(dgentry) Check whether we have a config, send '1 BOOT' instead
self._NewSession('0 BOOTSTRAP')
# This will call SendTransferComplete, so we have to already be in
# a session.
self.cpe.startup()
def Listen(ip, port, ping_path, acs, cpe, cpe_listener, platform_config,
acs_url=None, ping_ip6dev=None, fetch_args=dict(), ioloop=None,
restrict_acs_hosts=None):
if not ping_path:
ping_path = '/ping/%x' % random.getrandbits(120)
while ping_path.startswith('/'):
ping_path = ping_path[1:]
cpe_machine = CPEStateMachine(ip=ip, cpe=cpe, listenport=port,
platform_config=platform_config,
ping_path=ping_path,
restrict_acs_hosts=restrict_acs_hosts,
acs_url=acs_url, ping_ip6dev=ping_ip6dev,
fetch_args=fetch_args, ioloop=ioloop)
cpe.setCallbacks(cpe_machine.SendTransferComplete,
cpe_machine.TransferCompleteReceived,
cpe_machine.InformResponseReceived)
handlers = []
if acs:
acshandler = api_soap.ACS(acs).Handle
handlers.append(('/acs', Handler, dict(soap_handler=acshandler)))
print 'TR-069 ACS at http://*:%d/acs' % port
if cpe and cpe_listener:
cpehandler = cpe_machine.cpe_soap.Handle
handlers.append(('/cpe', Handler, dict(soap_handler=cpehandler)))
print 'TR-069 CPE at http://*:%d/cpe' % port
if ping_path:
handlers.append(('/' + ping_path, PingHandler,
dict(cpe_ms=cpe_machine.cpe_management_server,
callback=cpe_machine.PingReceived)))
print 'TR-069 callback at http://*:%d/%s' % (port, ping_path)
webapp = tornado.web.Application(handlers)
webapp.listen(port)
return cpe_machine
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Boolean handling for CWMP.
TR-069 Amendment 3, Annex A says:
Boolean, where the allowed values are "0", "1", "true", and "false".
The values "1" and "true" are considered interchangeable, where both
equivalently represent the logical value true. Similarly, the values
"0" and "false" are considered interchangeable, where both equivalently
represent the logical value false.
"""
__author__ = 'dgentry@google.com (Denton Gentry)'
def format(arg):
"""Print a CWMP boolean object."""
return '1' if arg else '0'
def parse(arg):
lower = str(arg).lower()
if lower == 'false' or lower == '0':
return False
elif lower == 'true' or lower == '1':
return True
else:
raise ValueError('Invalid CWMP boolean')
def valid(arg):
# pylint: disable-msg=W0702
try:
parse(arg)
except:
return False
return True
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Basic integration tests, sending messages from a fake ACS."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import collections
import datetime
import unittest
import xml.etree.ElementTree as ET
import google3
import api
import core
import http
SOAPNS = '{http://schemas.xmlsoap.org/soap/envelope/}'
CWMPNS = '{urn:dslforum-org:cwmp-1-2}'
XSINS = '{http://www.w3.org/2001/XMLSchema-instance}'
class TestDeviceModelObject(core.Exporter):
def __init__(self):
core.Exporter.__init__(self)
self.Foo = 'bar'
params = ['Foo']
objects = []
self.Export(params=params, objects=objects)
class TestDeviceModelRoot(core.Exporter):
"""A class to hold the device models."""
def __init__(self):
core.Exporter.__init__(self)
params = []
objects = []
self.Foo = 'bar'
params.append('Foo')
params.append('RaiseIndexError')
params.append('RaiseTypeError')
params.append('RaiseValueError')
params.append('RaiseSystemError')
params.append('BooleanParameter')
params.append('IntegerParameter')
params.append('FloatParameter')
params.append('DateTimeParameter')
params.append('StringParameter')
params.append('ReadOnlyParameter')
self.SubObject = TestDeviceModelObject()
objects.append('SubObject')
self.Export(params=params, objects=objects)
self.boolean_parameter = True
self.boolean_parameter_set = False
self.start_transaction_called = False
self.commit_transaction_called = False
self.abandon_transaction_called = False
self.IntegerParameter = 100
self.FloatParameter = 3.14159
self.DateTimeParameter = datetime.datetime(1999, 12, 31, 23, 59, 58)
self.StringParameter = 'StringParameter'
@property
def RaiseIndexError(self):
"""A parameter which, when accessed, will raise an IndexError."""
l = list()
return l[0]
def GetRaiseTypeError(self):
"""A parameter which, when accessed, will raise a TypeError."""
raise TypeError('RaiseTypeError Parameter')
def SetRaiseTypeError(self, value):
raise TypeError('RaiseTypeError Parameter')
RaiseTypeError = property(GetRaiseTypeError, SetRaiseTypeError, None,
'RaiseTypeError')
def GetRaiseValueError(self):
"""A parameter which, when accessed, will raise a ValueError."""
raise ValueError('RaiseValueError Parameter')
def SetRaiseValueError(self, value):
raise ValueError('RaiseValueError Parameter')
RaiseValueError = property(GetRaiseValueError, SetRaiseValueError, None,
'RaiseValueError')
def GetRaiseSystemError(self):
"""A parameter which, when accessed, will raise a SystemError."""
raise SystemError('RaiseSystemError Parameter')
def SetRaiseSystemError(self, value):
raise SystemError('RaiseSystemError Parameter')
RaiseSystemError = property(GetRaiseSystemError, SetRaiseSystemError, None,
'RaiseSystemError')
def GetBooleanParameter(self):
return self.boolean_parameter
def SetBooleanParameter(self, value):
self.boolean_parameter = value
self.boolean_parameter_set = True
BooleanParameter = property(GetBooleanParameter, SetBooleanParameter, None,
'BooleanParameter')
def GetReadOnlyParameter(self):
return True
ReadOnlyParameter = property(GetReadOnlyParameter, None, None, 'ReadOnlyParameter')
def StartTransaction(self):
self.start_transaction_called = True
def CommitTransaction(self):
self.commit_transaction_called = True
def AbandonTransaction(self):
self.abandon_transaction_called = True
class MockDownloadManager(object):
def __init__(self):
self.new_download_called = False
self.cancel_called = False
self.newdl_return = (1, 0.0, 0.0)
self.newdl_raise_resources = False
self.newdl_raise_protocol = False
self.cancel_raise = False
self.queue = list()
self.queue_num = 1
self.reboot_called = False
def NewDownload(self, command_key=None, file_type=None, url=None,
username=None, password=None, file_size=0,
target_filename=None, delay_seconds=0):
self.new_download_called = True
self.newdl_command_key = command_key
self.newdl_file_type = file_type
self.newdl_url = url
self.newdl_username = username
self.newdl_password = password
self.newdl_file_size = file_size
self.newdl_target_filename = target_filename
self.newdl_delay_seconds = delay_seconds
if self.newdl_raise_resources:
raise core.ResourcesExceededError('FaultString')
if self.newdl_raise_protocol:
raise core.FileTransferProtocolError('FaultString')
return self.newdl_return
def TransferCompleteResponseReceived(self):
return
def GetAllQueuedTransfers(self):
return self.queue
def AddQueuedTransfer(self):
q = collections.namedtuple(
'queued_transfer_struct',
('CommandKey State IsDownload FileType FileSize TargetFileName'))
q.CommandKey = 'CommandKey' + str(self.queue_num)
self.queue_num += 1
q.State = 2
q.IsDownload = True
q.FileType = 'FileType'
q.FileSize = 123
q.TargetFileName = 'TargetFileName'
self.queue.append(q)
def CancelTransfer(self, command_key):
self.cancel_called = True
self.cancel_command_key = command_key
if self.cancel_raise:
raise core.CancelNotPermitted('Refused')
def Reboot(self, command_key):
self.reboot_called = True
self.reboot_command_key = command_key
class FakePlatformConfig(object):
def GetAcsUrl(self):
return None
class TransferRpcTest(unittest.TestCase):
"""Test cases for RPCs relating to file transfers."""
def getCpe(self):
root = TestDeviceModelRoot()
cpe = api.CPE(root)
cpe.download_manager = MockDownloadManager()
cpe_machine = http.Listen(ip=None, port=0,
ping_path='/ping/acs_integration_test',
acs=None, cpe=cpe, cpe_listener=False,
platform_config=FakePlatformConfig())
return cpe_machine
def testDownloadSimple(self):
cpe = self.getCpe()
downloadXml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:Download><CommandKey>CommandKey</CommandKey><FileType>1 Firmware Upgrade Image</FileType><URL>http://example.com/image</URL><Username>Username</Username><Password>Password</Password><FileSize>123456</FileSize><TargetFileName>TargetFileName</TargetFileName><DelaySeconds>321</DelaySeconds><SuccessURL/><FailureURL/></cwmp:Download></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
dm = cpe.cpe.download_manager
responseXml = cpe.cpe_soap.Handle(downloadXml)
self.assertTrue(dm.new_download_called)
self.assertEqual(dm.newdl_command_key, 'CommandKey')
self.assertEqual(dm.newdl_file_type, '1 Firmware Upgrade Image')
self.assertEqual(dm.newdl_username, 'Username')
self.assertEqual(dm.newdl_password, 'Password')
self.assertEqual(dm.newdl_file_size, 123456)
self.assertEqual(dm.newdl_target_filename, 'TargetFileName')
self.assertEqual(dm.newdl_delay_seconds, 321)
root = ET.fromstring(str(responseXml))
dlresp = root.find(SOAPNS + 'Body/' + CWMPNS + 'DownloadResponse')
self.assertTrue(dlresp)
self.assertEqual(dlresp.find('Status').text, '1')
self.assertEqual(dlresp.find('StartTime').text, '0001-01-01T00:00:00Z')
self.assertEqual(dlresp.find('CompleteTime').text, '0001-01-01T00:00:00Z')
def testDownloadFailed(self):
cpe = self.getCpe()
downloadXml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:Download><CommandKey>CommandKey</CommandKey><FileType>1 Firmware Upgrade Image</FileType><URL>invalid</URL><Username>Username</Username><Password>Password</Password><FileSize>123456</FileSize><TargetFileName>TargetFileName</TargetFileName><DelaySeconds>321</DelaySeconds><SuccessURL/><FailureURL/></cwmp:Download></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
dm = cpe.cpe.download_manager
dm.newdl_raise_resources = True
responseXml = cpe.cpe_soap.Handle(downloadXml)
self.assertTrue(dm.new_download_called)
self.assertEqual(dm.newdl_command_key, 'CommandKey')
self.assertEqual(dm.newdl_file_type, '1 Firmware Upgrade Image')
self.assertEqual(dm.newdl_username, 'Username')
self.assertEqual(dm.newdl_password, 'Password')
self.assertEqual(dm.newdl_file_size, 123456)
self.assertEqual(dm.newdl_target_filename, 'TargetFileName')
self.assertEqual(dm.newdl_delay_seconds, 321)
root = ET.fromstring(str(responseXml))
dlresp = root.find(SOAPNS + 'Body/' + CWMPNS + 'DownloadResponse')
self.assertFalse(dlresp)
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Server')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9004')
self.assertEqual(detail.find('FaultString').text, 'FaultString')
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<soap:Fault>
<faultcode>Server</faultcode>
<faultstring>CWMP fault</faultstring>
<detail>
<cwmp:Fault>
<FaultCode>9004</FaultCode>
<FaultString>FaultString</FaultString>
</cwmp:Fault>
</detail>
</soap:Fault>
</soap:Body>
</soap:Envelope>"""
def testGetAllQueuedTransfers(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetAllQueuedTransfers></cwmp:GetAllQueuedTransfers></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
dm = cpe.cpe.download_manager
dm.AddQueuedTransfer()
dm.AddQueuedTransfer()
responseXml = cpe.cpe_soap.Handle(soapxml)
self.assertFalse(dm.new_download_called)
root = ET.fromstring(str(responseXml))
transfers = root.findall(SOAPNS + 'Body/' + CWMPNS +
'GetAllQueuedTransfersResponse/TransferList')
self.assertEqual(len(transfers), 2)
for i, t in enumerate(transfers):
self.assertEqual(t.find('CommandKey').text, 'CommandKey' + str(i+1))
self.assertEqual(t.find('State').text, '2')
self.assertEqual(t.find('IsDownload').text, 'True')
self.assertEqual(t.find('FileType').text, 'FileType')
self.assertEqual(t.find('FileSize').text, '123')
self.assertEqual(t.find('TargetFileName').text, 'TargetFileName')
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<cwmp:GetAllQueuedTransfersResponse>
<TransferList>
<CommandKey>CommandKey1</CommandKey>
<State>2</State>
<IsDownload>True</IsDownload>
<FileType>FileType</FileType>
<FileSize>123</FileSize>
<TargetFileName>TargetFileName</TargetFileName>
</TransferList>
<TransferList>
<CommandKey>CommandKey2</CommandKey>
<State>2</State>
<IsDownload>True</IsDownload>
<FileType>FileType</FileType>
<FileSize>123</FileSize>
<TargetFileName>TargetFileName</TargetFileName>
</TransferList>
</cwmp:GetAllQueuedTransfersResponse>
</soap:Body>
</soap:Envelope>"""
def testGetQueuedTransfers(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetQueuedTransfers></cwmp:GetQueuedTransfers></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
dm = cpe.cpe.download_manager
dm.AddQueuedTransfer()
dm.AddQueuedTransfer()
responseXml = cpe.cpe_soap.Handle(soapxml)
self.assertFalse(dm.new_download_called)
root = ET.fromstring(str(responseXml))
transfers = root.findall(SOAPNS + 'Body/' + CWMPNS +
'GetQueuedTransfersResponse/TransferList')
self.assertEqual(len(transfers), 2)
for i, t in enumerate(transfers):
self.assertEqual(t.find('CommandKey').text, 'CommandKey' + str(i+1))
self.assertEqual(t.find('State').text, '2')
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<cwmp:GetQueuedTransfersResponse>
<TransferList>
<CommandKey>CommandKey1</CommandKey>
<State>2</State>
</TransferList>
<TransferList>
<CommandKey>CommandKey2</CommandKey>
<State>2</State>
</TransferList>
</cwmp:GetQueuedTransfersResponse>
</soap:Body>
</soap:Envelope>"""
def testCancelTransfer(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:CancelTransfer><CommandKey>CommandKey</CommandKey></cwmp:CancelTransfer></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
self.assertTrue(root.findall(SOAPNS + 'Body/' + CWMPNS +
'CancelTransferResponse'))
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<cwmp:CancelTransferResponse />
</soap:Body>
</soap:Envelope>"""
def testCancelTransferRefused(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:CancelTransfer><CommandKey>CommandKey</CommandKey></cwmp:CancelTransfer></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
dm = cpe.cpe.download_manager
dm.cancel_raise = True
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Client')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9021')
self.assertEqual(detail.find('FaultString').text, 'Refused')
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<soap:Fault>
<faultcode>Client</faultcode>
<faultstring>CWMP fault</faultstring>
<detail>
<cwmp:Fault>
<FaultCode>9021</FaultCode>
<FaultString>Refused</FaultString>
</cwmp:Fault>
</detail>
</soap:Fault>
</soap:Body>
</soap:Envelope>"""
def testReboot(self):
cpe = self.getCpe()
dm = cpe.cpe.download_manager
downloadXml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:Reboot><CommandKey>CommandKey</CommandKey></cwmp:Reboot></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(downloadXml)
self.assertTrue(dm.reboot_called)
self.assertEqual(dm.reboot_command_key, 'CommandKey')
root = ET.fromstring(str(responseXml))
rbresp = root.find(SOAPNS + 'Body/' + CWMPNS + 'RebootResponse')
self.assertTrue(rbresp is not None)
class GetParamsRpcTest(unittest.TestCase):
"""Test cases for RPCs relating to Parameters."""
def getCpe(self):
root = TestDeviceModelRoot()
cpe = api.CPE(root)
cpe_machine = http.Listen(ip=None, port=0,
ping_path='/ping/acs_integration_test',
acs=None, cpe=cpe, cpe_listener=False,
platform_config=FakePlatformConfig())
return cpe_machine
def testGetParamValue(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterValues><ParameterNames soapenc:arrayType="{urn:dslforum-org:cwmp-1-2}string[1]"><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">Foo</ns3:string></ParameterNames></cwmp:GetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(
SOAPNS + 'Body/' + CWMPNS +
'GetParameterValuesResponse/ParameterList/ParameterValueStruct/Name')
self.assertTrue(name is not None)
self.assertEqual(name.text, 'Foo')
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<cwmp:GetParameterValuesResponse>
<ParameterList soap-enc:arrayType="cwmp:ParameterValueStruct[1]">
<ParameterValueStruct>
<Name>Foo</Name>
<Value xsi:type="xsd:string">bar</Value>
</ParameterValueStruct>
</ParameterList>
</cwmp:GetParameterValuesResponse>
</soap:Body>
</soap:Envelope>"""
def testXsiTypes(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterValues><ParameterNames soapenc:arrayType="{urn:dslforum-org:cwmp-1-2}string[1]"><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">BooleanParameter</ns3:string><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">IntegerParameter</ns3:string><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">FloatParameter</ns3:string><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">DateTimeParameter</ns3:string><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">StringParameter</ns3:string></ParameterNames></cwmp:GetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
params = root.findall(
SOAPNS + 'Body/' + CWMPNS +
'GetParameterValuesResponse/ParameterList/ParameterValueStruct')
self.assertEqual(len(params), 5)
self.assertEqual(params[0].find('Value').get(XSINS + 'type'), 'xsd:boolean')
self.assertEqual(params[1].find('Value').get(XSINS + 'type'), 'xsd:unsignedInt')
self.assertEqual(params[2].find('Value').get(XSINS + 'type'), 'xsd:double')
self.assertEqual(params[3].find('Value').get(XSINS + 'type'), 'xsd:dateTime')
self.assertEqual(params[4].find('Value').get(XSINS + 'type'), 'xsd:string')
def testGetParamName(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterNames><ParameterPath/><NextLevel>true</NextLevel></cwmp:GetParameterNames></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
names = root.findall(
SOAPNS + 'Body/' + CWMPNS +
'GetParameterNamesResponse/ParameterList/ParameterInfoStruct/Name')
self.assertEqual(len(names), 12)
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<cwmp:GetParameterNamesResponse>
<ParameterList soap-enc:arrayType="ParameterInfoStruct[1]">
<ParameterInfoStruct>
<Name>Foo</Name>
<Writable>1</Writable>
</ParameterInfoStruct>
</ParameterList>
</cwmp:GetParameterNamesResponse>
</soap:Body>
</soap:Envelope>"""
def _AssertCwmpFaultNopeNotHere(self, root):
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Client')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9005')
self.assertTrue(detail.find('FaultString').text.find('NopeNotHere'))
def testGetBadParamValue(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterValues><ParameterNames soapenc:arrayType="{urn:dslforum-org:cwmp-1-2}string[1]"><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">NopeNotHere</ns3:string></ParameterNames></cwmp:GetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(SOAPNS + 'Body/' + CWMPNS + 'GetParameterValuesResponse')
self.assertTrue(name is None)
self._AssertCwmpFaultNopeNotHere(root)
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<soap:Fault>
<faultcode>Client</faultcode>
<faultstring>CWMP fault</faultstring>
<detail>
<cwmp:Fault>
<FaultCode>9005</FaultCode>
<FaultString>No such parameter: NopeNotHere</FaultString>
</cwmp:Fault>
</detail>
</soap:Fault>
</soap:Body>
</soap:Envelope>"""
def testGetBadParamValueFullPath(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterValues><ParameterNames soapenc:arrayType="{urn:dslforum-org:cwmp-1-2}string[1]"><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">SubObject.NopeNotHere</ns3:string></ParameterNames></cwmp:GetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(SOAPNS + 'Body/' + CWMPNS + 'GetParameterValuesResponse')
self.assertTrue(name is None)
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Client')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9005')
self.assertTrue(
detail.find('FaultString').text.find('SubObject.NopeNotHere'))
def testGetBadParamName(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterNames><ParameterPath>NopeNotHere</ParameterPath><NextLevel>true</NextLevel></cwmp:GetParameterNames></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(SOAPNS + 'Body/' + CWMPNS + 'GetParameterNamesResponse')
self.assertTrue(name is None)
self._AssertCwmpFaultNopeNotHere(root)
# We don't do a string compare of the XML output, that is too fragile
# as a test. We parse the XML and look for expected values. Nonetheless
# here is roughly what responseXml should look like, if you need to debug
# this test case:
_ = r"""<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
<cwmp:ID soap:mustUnderstand="1">TestCwmpId</cwmp:ID>
</soap:Header>
<soap:Body>
<soap:Fault>
<faultcode>Client</faultcode>
<faultstring>CWMP fault</faultstring>
<detail>
<cwmp:Fault>
<FaultCode>9005</FaultCode>
<FaultString>No such parameter: NopeNotHere</FaultString>
</cwmp:Fault>
</detail>
</soap:Fault>
</soap:Body>
</soap:Envelope>"""
def testBadAddObjectName(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:AddObject><ObjectName>NopeNotHere.</ObjectName><ParameterKey>ParameterKey1</ParameterKey></cwmp:AddObject></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(SOAPNS + 'Body/' + CWMPNS + 'AddObjectResponse')
self.assertTrue(name is None)
self._AssertCwmpFaultNopeNotHere(root)
def testBadAddObjectNameNoDot(self):
"""<ObjectName> does not end in a dot, as spec requires."""
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:AddObject><ObjectName>NopeNotHere</ObjectName><ParameterKey>ParameterKey1</ParameterKey></cwmp:AddObject></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(SOAPNS + 'Body/' + CWMPNS + 'AddObjectResponse')
self.assertTrue(name is None)
self._AssertCwmpFaultNopeNotHere(root)
def testBadDelObjectName(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:DeleteObject><ObjectName>NopeNotHere.</ObjectName><ParameterKey>ParameterKey1</ParameterKey></cwmp:DeleteObject></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(SOAPNS + 'Body/' + CWMPNS + 'DeleteObjectResponse')
self.assertTrue(name is None)
self._AssertCwmpFaultNopeNotHere(root)
def testBadDelObjectNameNoDot(self):
"""<ObjectName> does not end in a dot, as spec requires."""
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:DeleteObject><ObjectName>NopeNotHere</ObjectName><ParameterKey>ParameterKey1</ParameterKey></cwmp:DeleteObject></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
name = root.find(SOAPNS + 'Body/' + CWMPNS + 'DeleteObjectResponse')
self.assertTrue(name is None)
self._AssertCwmpFaultNopeNotHere(root)
def testNoSuchMethod(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:NoSuchMethod><NoSuchArgument/></cwmp:NoSuchMethod></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Server')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9000')
self.assertTrue(detail.find('FaultString').text.find('NoSuchMethod'))
def testInvalidArgument(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterValues><ParameterNames soapenc:arrayType="{urn:dslforum-org:cwmp-1-2}string[1]"><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">RaiseIndexError</ns3:string></ParameterNames></cwmp:GetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Client')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9003')
def testSetParameterValues(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:SetParameterValues><ParameterList><ns2:ParameterValueStruct xmlns:ns2="urn:dslforum-org:cwmp-1-2"><Name>BooleanParameter</Name><Value xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:type="xs:boolean">true</Value></ns2:ParameterValueStruct></ParameterList><ParameterKey>myParamKey</ParameterKey></cwmp:SetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
resp = root.find(SOAPNS + 'Body/' + CWMPNS + 'SetParameterValuesResponse')
self.assertTrue(resp)
status = resp.find('Status')
self.assertEqual(status.text, '0')
self.assertTrue(cpe.cpe.root.start_transaction_called)
self.assertTrue(cpe.cpe.root.commit_transaction_called)
self.assertFalse(cpe.cpe.root.abandon_transaction_called)
def testSetParameterFault(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:SetParameterValues><ParameterList><ns2:ParameterValueStruct xmlns:ns2="urn:dslforum-org:cwmp-1-2"><Name>RaiseTypeError</Name><Value xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:type="xs:boolean">true</Value></ns2:ParameterValueStruct><ns2:ParameterValueStruct xmlns:ns2="urn:dslforum-org:cwmp-1-2"><Name>RaiseValueError</Name><Value xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:type="xs:boolean">true</Value></ns2:ParameterValueStruct><ns2:ParameterValueStruct xmlns:ns2="urn:dslforum-org:cwmp-1-2"><Name>NoSuchParameter</Name><Value xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:type="xs:boolean">true</Value></ns2:ParameterValueStruct><ns2:ParameterValueStruct xmlns:ns2="urn:dslforum-org:cwmp-1-2"><Name>ReadOnlyParameter</Name><Value xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:type="xs:boolean">true</Value></ns2:ParameterValueStruct></ParameterList><ParameterKey>myParamKey</ParameterKey></cwmp:SetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
self.assertFalse(root.find(SOAPNS + 'Body/' +
CWMPNS + 'SetParameterValuesResponse'))
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Client')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9003')
self.assertEqual(detail.find('FaultString').text, 'Invalid arguments')
setfaults = detail.findall('SetParameterValuesFault')
self.assertEqual(len(setfaults), 4)
self.assertEqual(setfaults[0].find('ParameterName').text, 'RaiseTypeError')
self.assertEqual(setfaults[0].find('FaultCode').text, '9006')
self.assertTrue(setfaults[0].find('FaultString').text)
self.assertEqual(setfaults[1].find('ParameterName').text, 'RaiseValueError')
self.assertEqual(setfaults[1].find('FaultCode').text, '9007')
self.assertTrue(setfaults[1].find('FaultString').text)
self.assertEqual(setfaults[2].find('ParameterName').text, 'NoSuchParameter')
self.assertEqual(setfaults[2].find('FaultCode').text, '9005')
self.assertTrue(setfaults[2].find('FaultString').text)
self.assertEqual(setfaults[3].find('ParameterName').text, 'ReadOnlyParameter')
self.assertEqual(setfaults[3].find('FaultCode').text, '9008')
self.assertTrue(setfaults[3].find('FaultString').text)
def testGetRPCMethods(self):
cpe = self.getCpe()
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetRPCMethods></cwmp:GetRPCMethods></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
methods = root.find(SOAPNS + 'Body/' + CWMPNS +
'GetRPCMethodsResponse/MethodList')
self.assertTrue(methods)
rpcs = methods.findall('string')
rpcnames = [r.text for r in rpcs]
# Before adding RPC Names to this list, READ THIS!
# If this test fails, its because the CPE is responding to a GetRPCMethods
# call with an RPC which is not defined in the standard. This is ALMOST
# CERTAINLY because what should be an internal method has been added to
# http.py:CPE where the first letter is capitalized. That is how we
# determine which methods to return: everything with a capitalized
# first letter.
# Don't just add the name here and think you are done. You need to
# make the first character of internal methods a lowercase letter
# or underscore.
# Don't feel bad. This comment is here because I made the same mistake.
expected = ['AddObject', 'CancelTransfer', 'ChangeDUState', 'DeleteObject',
'Download', 'FactoryReset', 'GetAllQueuedTransfers',
'GetOptions', 'GetParameterAttributes', 'GetParameterNames',
'GetParameterValues', 'GetQueuedTransfers', 'GetRPCMethods',
'Reboot', 'ScheduleDownload', 'ScheduleInform',
'SetParameterAttributes', 'SetParameterValues', 'SetVouchers',
'Upload']
self.assertEqual(rpcnames, expected)
def testInternalError(self):
cpe = self.getCpe()
# RaiseSystemError simulates an unexpected problem which should
# turn into a SOAP:Fault INTERNAL_ERROR
soapxml = r"""<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header><cwmp:ID soapenv:mustUnderstand="1">TestCwmpId</cwmp:ID><cwmp:HoldRequests>0</cwmp:HoldRequests></soapenv:Header><soapenv:Body><cwmp:GetParameterValues><ParameterNames soapenc:arrayType="{urn:dslforum-org:cwmp-1-2}string[1]"><ns3:string xmlns="urn:dslforum-org:cwmp-1-2" xmlns:ns1="http://schemas.xmlsoap.org/soap/encoding/" xmlns:ns3="urn:dslforum-org:cwmp-1-2">RaiseSystemError</ns3:string></ParameterNames></cwmp:GetParameterValues></soapenv:Body></soapenv:Envelope>""" #pylint: disable-msg=C6310
responseXml = cpe.cpe_soap.Handle(soapxml)
root = ET.fromstring(str(responseXml))
self.assertFalse(root.find(SOAPNS + 'Body/' +
CWMPNS + 'GetParameterValuesResponse'))
fault = root.find(SOAPNS + 'Body/' + SOAPNS + 'Fault')
self.assertTrue(fault)
self.assertEqual(fault.find('faultcode').text, 'Server')
self.assertEqual(fault.find('faultstring').text, 'CWMP fault')
detail = fault.find('detail/' + CWMPNS + 'Fault')
self.assertTrue(detail)
self.assertEqual(detail.find('FaultCode').text, '9002')
self.assertTrue(detail.find('FaultString').text)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple helper functions that don't belong elsewhere."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import errno
import os
import time
import tornado.util
def Unlink(filename):
"""Like os.unlink, but doesn't raise exception if file was missing already.
After all, you want the file gone. It's gone. Stop complaining.
Args:
filename: the filename to delete
Raises:
OSError: if os.unlink() failes with other than ENOENT.
"""
try:
os.unlink(filename)
except OSError, e:
if e.errno != errno.ENOENT:
raise
def WriteFileAtomic(tmp_file_name, final_file_name, data):
"""Writes data to tmp file, then moves it to the final file atomically."""
with file(tmp_file_name, 'w') as f:
f.write(data)
os.rename(tmp_file_name, final_file_name)
def monotime():
if hasattr(tornado.util, 'monotime'):
return tornado.util.monotime()
else:
return time.time()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for download.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import datetime
import shutil
import tempfile
import time
import unittest
import google3
import core
import download
import persistobj
mock_http_clients = []
mock_http_downloads = []
mock_installers = []
mock_downloads = []
class MockHttpClient(object):
def __init__(self, io_loop=None):
self.did_fetch = False
self.request = None
self.callback = None
mock_http_clients.append(self)
def fetch(self, request, callback):
self.did_fetch = True
self.request = request
self.callback = callback
class MockIoloop(object):
def __init__(self):
self.timeout = None
self.callback = None
def add_timeout(self, timeout, callback, monotonic=None):
self.timeout = timeout
self.callback = callback
class MockHttpDownload(object):
def __init__(self, url, username=None, password=None,
download_complete_cb=None, download_dir=None, ioloop=None):
self.url = url
self.username = username
self.password = password
self.download_complete_cb = download_complete_cb
self.download_dir = download_dir
self.ioloop = ioloop
self.did_fetch = False
mock_http_downloads.append(self)
def fetch(self):
self.did_fetch = True
class MockInstaller(object):
def __init__(self, filename):
self.filename = filename
self.did_install = False
self.did_reboot = False
self.file_type = None
self.targe_filename = None
self.install_callback = None
mock_installers.append(self)
def install(self, file_type, target_filename, callback):
self.did_install = True
self.file_type = file_type
self.target_filename = target_filename
self.install_callback = callback
return True
def reboot(self):
self.did_reboot = True
class MockTransferComplete(object):
def __init__(self):
self.transfer_complete_called = False
self.dl = None
self.command_key = None
self.faultcode = None
self.faultstring = None
self.starttime = None
self.endtime = None
def SendTransferComplete(self, dl, command_key, faultcode, faultstring,
starttime, endtime, event_code):
self.transfer_complete_called = True
self.dl = dl
self.command_key = command_key
self.faultcode = faultcode
self.faultstring = faultstring
self.starttime = starttime
self.endtime = endtime
self.event_code = event_code
class MockFile(object):
def __init__(self, name):
self.name = name
def _Delta(t):
return datetime.timedelta(seconds=t)
class DownloadTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
download.INSTALLER = MockInstaller
self.done_command_key = None
self.old_time = time.time
del mock_installers[:]
del mock_http_downloads[:]
download.DOWNLOAD_CLIENT['http'] = MockHttpDownload
download.DOWNLOAD_CLIENT['https'] = MockHttpDownload
def tearDown(self):
time.time = self.old_time
shutil.rmtree(self.tmpdir)
del mock_installers[:]
del mock_http_clients[:]
def mockTime(self):
return 123456.0
def QCheckBoring(self, dl, args):
"""Check get_queue_state() fields which don't change, and return qstate."""
q = dl.get_queue_state()
self.assertEqual(q.CommandKey, args['command_key'])
self.assertTrue(q.IsDownload)
self.assertEqual(q.FileType, args['file_type'])
self.assertEqual(q.FileSize, args['file_size'])
self.assertEqual(q.TargetFileName, args['target_filename'])
return q.State
def testSuccess(self):
ioloop = MockIoloop()
cmpl = MockTransferComplete()
time.time = self.mockTime
kwargs = dict(command_key='testCommandKey',
file_type='testFileType',
url='http://example.com/foo',
username='testUsername',
password='testPassword',
file_size=1000,
target_filename='testTargetFilename',
delay_seconds=99)
stateobj = persistobj.PersistentObject(objdir=self.tmpdir,
rootname='testObj',
filename=None, **kwargs)
dl = download.Download(stateobj=stateobj,
transfer_complete_cb=cmpl.SendTransferComplete,
ioloop=ioloop)
self.assertEqual(self.QCheckBoring(dl, kwargs), 1) # 1: Not Yet Started
# Step 1: Wait delay_seconds
dl.do_start()
self.assertEqual(ioloop.timeout, _Delta(kwargs['delay_seconds']))
self.assertEqual(self.QCheckBoring(dl, kwargs), 1) # 1: Not Yet Started
# Step 2: HTTP Download
dl.timer_callback()
self.assertEqual(len(mock_http_downloads), 1)
http = mock_http_downloads[0]
self.assertEqual(http.url, kwargs['url'])
self.assertEqual(http.username, kwargs['username'])
self.assertEqual(http.password, kwargs['password'])
self.assertTrue(http.download_complete_cb)
self.assertTrue(http.did_fetch)
self.assertEqual(self.QCheckBoring(dl, kwargs), 2) # 2: In process
# Step 3: Install
dlfile = MockFile('/path/to/downloaded/file')
http.download_complete_cb(0, '', dlfile)
self.assertEqual(len(mock_installers), 1)
inst = mock_installers[0]
self.assertTrue(inst.did_install)
self.assertEqual(inst.file_type, kwargs['file_type'])
self.assertEqual(inst.target_filename, kwargs['target_filename'])
self.assertEqual(inst.filename, dlfile.name)
self.assertFalse(inst.did_reboot)
self.assertEqual(self.QCheckBoring(dl, kwargs), 2) # 2: In process
# Step 4: Reboot
inst.install_callback(0, '', must_reboot=True)
self.assertTrue(inst.did_reboot)
self.assertEqual(self.QCheckBoring(dl, kwargs), 2) # 2: In process
# Step 5: Send Transfer Complete
dl.reboot_callback(0, '')
self.assertTrue(cmpl.transfer_complete_called)
self.assertEqual(cmpl.command_key, kwargs['command_key'])
self.assertEqual(cmpl.faultcode, 0)
self.assertEqual(cmpl.faultstring, '')
self.assertEqual(cmpl.starttime, self.mockTime())
self.assertEqual(cmpl.endtime, self.mockTime())
self.assertEqual(cmpl.event_code, 'M Download')
self.assertEqual(self.QCheckBoring(dl, kwargs), 3) # 3: Cleaning up
# Step 6: Wait for Transfer Complete Response
self.assertFalse(dl.cleanup())
self.assertEqual(self.QCheckBoring(dl, kwargs), 3) # 3: Cleaning up
def testDownloadFailed(self):
ioloop = MockIoloop()
cmpl = MockTransferComplete()
time.time = self.mockTime
kwargs = dict(command_key='testCommandKey',
url='http://example.com/foo',
delay_seconds=1)
stateobj = persistobj.PersistentObject(objdir=self.tmpdir,
rootname='testObj',
filename=None, **kwargs)
dl = download.Download(stateobj=stateobj,
transfer_complete_cb=cmpl.SendTransferComplete,
ioloop=ioloop)
# Step 1: Wait delay_seconds
dl.do_start()
self.assertEqual(ioloop.timeout, _Delta(kwargs['delay_seconds']))
# Step 2: HTTP Download
dl.timer_callback()
self.assertEqual(len(mock_http_downloads), 1)
http = mock_http_downloads[0]
self.assertEqual(http.url, kwargs['url'])
# Step 3: Download fails
http.download_complete_cb(100, 'TestDownloadError', None)
self.assertEqual(len(mock_installers), 0)
self.assertTrue(cmpl.transfer_complete_called)
self.assertEqual(cmpl.command_key, kwargs['command_key'])
self.assertEqual(cmpl.faultcode, 100)
self.assertEqual(cmpl.faultstring, 'TestDownloadError')
self.assertEqual(cmpl.starttime, 0.0)
self.assertEqual(cmpl.endtime, 0.0)
self.assertEqual(cmpl.event_code, 'M Download')
def testInstallFailed(self):
ioloop = MockIoloop()
cmpl = MockTransferComplete()
time.time = self.mockTime
kwargs = dict(command_key='testCommandKey',
url='http://example.com/foo',
delay_seconds=1)
stateobj = persistobj.PersistentObject(objdir=self.tmpdir,
rootname='testObj',
filename=None, **kwargs)
dl = download.Download(stateobj=stateobj,
transfer_complete_cb=cmpl.SendTransferComplete,
ioloop=ioloop)
# Step 1: Wait delay_seconds
dl.do_start()
self.assertEqual(ioloop.timeout, _Delta(kwargs['delay_seconds']))
# Step 2: HTTP Download
dl.timer_callback()
self.assertEqual(len(mock_http_downloads), 1)
http = mock_http_downloads[0]
self.assertEqual(http.url, kwargs['url'])
# Step 3: Install
dlfile = MockFile('/path/to/downloaded/file')
http.download_complete_cb(0, '', dlfile)
self.assertEqual(len(mock_installers), 1)
inst = mock_installers[0]
self.assertTrue(inst.did_install)
self.assertEqual(inst.filename, dlfile.name)
self.assertFalse(inst.did_reboot)
# Step 4: Install Failed
inst.install_callback(101, 'TestInstallError', must_reboot=False)
self.assertTrue(cmpl.transfer_complete_called)
self.assertEqual(cmpl.command_key, kwargs['command_key'])
self.assertEqual(cmpl.faultcode, 101)
self.assertEqual(cmpl.faultstring, 'TestInstallError')
self.assertEqual(cmpl.starttime, 0.0)
self.assertEqual(cmpl.endtime, 0.0)
self.assertEqual(cmpl.event_code, 'M Download')
def testInstallNoReboot(self):
ioloop = MockIoloop()
cmpl = MockTransferComplete()
time.time = self.mockTime
kwargs = dict(command_key='testCommandKey',
url='http://example.com/foo',
delay_seconds=1)
stateobj = persistobj.PersistentObject(objdir=self.tmpdir,
rootname='testObj',
filename=None, **kwargs)
dl = download.Download(stateobj=stateobj,
transfer_complete_cb=cmpl.SendTransferComplete,
ioloop=ioloop)
# Step 1: Wait delay_seconds
dl.do_start()
self.assertEqual(ioloop.timeout, _Delta(kwargs['delay_seconds']))
# Step 2: HTTP Download
dl.timer_callback()
self.assertEqual(len(mock_http_downloads), 1)
http = mock_http_downloads[0]
self.assertEqual(http.url, kwargs['url'])
# Step 3: Install
dlfile = MockFile('/path/to/downloaded/file')
http.download_complete_cb(0, '', dlfile)
self.assertEqual(len(mock_installers), 1)
inst = mock_installers[0]
self.assertTrue(inst.did_install)
self.assertEqual(inst.filename, dlfile.name)
self.assertFalse(inst.did_reboot)
# Step 4: Install Succeeded, no reboot
inst.install_callback(0, '', must_reboot=False)
self.assertTrue(cmpl.transfer_complete_called)
self.assertEqual(cmpl.command_key, kwargs['command_key'])
self.assertEqual(cmpl.faultcode, 0)
self.assertEqual(cmpl.faultstring, '')
self.assertEqual(cmpl.starttime, self.mockTime())
self.assertEqual(cmpl.endtime, self.mockTime())
self.assertEqual(cmpl.event_code, 'M Download')
def testCancelRefused(self):
ioloop = MockIoloop()
cmpl = MockTransferComplete()
kwargs = dict(command_key='testCommandKey',
url='http://example.com/foo')
stateobj = persistobj.PersistentObject(objdir=self.tmpdir,
rootname='testObj',
filename=None, **kwargs)
dl = download.Download(stateobj=stateobj,
transfer_complete_cb=cmpl.SendTransferComplete,
ioloop=ioloop)
dl.do_start() # Step 1: Wait delay_seconds
dl.timer_callback() # Step 2: HTTP Download
dl.download_complete_callback(0, None, None) # Step 3: Install
self.assertTrue(dl.cleanup())
dl.installer_callback(0, None, must_reboot=True) # Step 4: Reboot
self.assertTrue(dl.cleanup())
dl.reboot_callback(0, '') # Step 5: Rebooted
self.assertFalse(dl.cleanup())
def testCommandKey(self):
kwargs = dict(command_key='testCommandKey')
stateobj = persistobj.PersistentObject(objdir=self.tmpdir,
rootname='testObj',
filename=None, **kwargs)
dl = download.Download(stateobj=stateobj, transfer_complete_cb=None)
self.assertEqual(dl.CommandKey(), kwargs['command_key'])
kwargs = dict()
stateobj = persistobj.PersistentObject(objdir=self.tmpdir,
rootname='testObj',
filename=None, **kwargs)
dl = download.Download(stateobj=stateobj, transfer_complete_cb=None)
self.assertEqual(dl.CommandKey(), None)
class MockDownloadObj(object):
def __init__(self, stateobj, transfer_complete_cb, done_cb=None,
download_dir=None, ioloop=None):
self.stateobj = stateobj
self.transfer_complete_cb = transfer_complete_cb
self.done_cb = done_cb
self.download_dir = download_dir
self.ioloop = ioloop
self.do_start_called = False
self.immediate_complete_called = False
self.faultcode = None
self.faultstring = None
self.reboot_callback_called = False
mock_downloads.append(self)
def do_start(self):
self.do_start_called = True
def do_immediate_complete(self, faultcode, faultstring):
self.immediate_complete_called = True
self.faultcode = faultcode
self.faultstring = faultstring
def reboot_callback(self, faultcode, faultstring):
self.reboot_callback_called = True
def get_queue_state(self):
return 'This_is_not_a_real_queue_state.'
class DownloadManagerTest(unittest.TestCase):
def setUp(self):
self.old_DOWNLOADOBJ = download.DOWNLOADOBJ
download.DOWNLOADOBJ = MockDownloadObj
self.tmpdir = tempfile.mkdtemp()
del mock_downloads[:]
def tearDown(self):
download.DOWNLOADOBJ = self.old_DOWNLOADOBJ
shutil.rmtree(self.tmpdir)
del mock_downloads[:]
def allocTestDM(self):
dm = download.DownloadManager()
dm.SetDirectories(self.tmpdir, self.tmpdir)
cmpl = MockTransferComplete()
dm.send_transfer_complete = cmpl.SendTransferComplete
return (dm, cmpl)
def testSimpleDownload(self):
(dm, _) = self.allocTestDM()
args = {'command_key': 'TestCommandKey',
'file_type': 'TestFileType',
'url': 'http://example.com/',
'username': 'TestUser',
'password': 'TestPassword',
'file_size': 99,
'target_filename': 'TestFilename',
'delay_seconds': 30}
(code, start, end) = dm.NewDownload(**args)
self.assertEqual(code, 1)
self.assertEqual(start, 0.0)
self.assertEqual(end, 0.0)
self.assertEqual(len(mock_downloads), 1)
dl = mock_downloads[0]
self.assertEqual(dl.stateobj.command_key, args['command_key'])
self.assertEqual(dl.stateobj.file_type, args['file_type'])
self.assertEqual(dl.stateobj.url, args['url'])
self.assertEqual(dl.stateobj.username, args['username'])
self.assertEqual(dl.stateobj.password, args['password'])
self.assertEqual(dl.stateobj.file_size, args['file_size'])
self.assertEqual(dl.stateobj.target_filename, args['target_filename'])
self.assertEqual(dl.stateobj.delay_seconds, args['delay_seconds'])
def testReadonlyConfigDir(self):
(dm, _) = self.allocTestDM()
dm.SetDirectories(config_dir='/user/nonexist', download_dir=self.tmpdir)
args = {'command_key': 'TestCommandKey',
'file_type': 'TestFileType',
'url': 'http://example.com/',
'username': 'TestUser',
'password': 'TestPassword',
'file_size': 99,
'target_filename': 'TestFilename',
'delay_seconds': 30}
(code, start, end) = dm.NewDownload(**args)
self.assertEqual(code, 1)
self.assertEqual(start, 0.0)
self.assertEqual(end, 0.0)
self.assertEqual(len(mock_downloads), 1)
dl = mock_downloads[0]
self.assertEqual(dl.stateobj.command_key, args['command_key'])
self.assertEqual(dl.stateobj.file_type, args['file_type'])
self.assertEqual(dl.stateobj.url, args['url'])
self.assertEqual(dl.stateobj.username, args['username'])
self.assertEqual(dl.stateobj.password, args['password'])
self.assertEqual(dl.stateobj.file_size, args['file_size'])
self.assertEqual(dl.stateobj.target_filename, args['target_filename'])
self.assertEqual(dl.stateobj.delay_seconds, args['delay_seconds'])
def testMaxDownloads(self):
(dm, _) = self.allocTestDM()
maxdl = download.DownloadManager.MAXDOWNLOADS
for i in range(maxdl):
args = {'command_key': 'TestCommandKey' + str(i),
'url': 'http://example.com/'}
(code, start, end) = dm.NewDownload(**args)
self.assertEqual(code, 1)
self.assertEqual(start, 0.0)
self.assertEqual(end, 0.0)
self.assertEqual(len(mock_downloads), maxdl)
self.assertRaises(core.ResourcesExceededError, dm.NewDownload, **args)
def testBadUrlScheme(self):
(dm, _) = self.allocTestDM()
args = {'command_key': 'TestCommandKey',
'url': 'invalid://bad.url/'}
self.assertRaises(core.FileTransferProtocolError, dm.NewDownload, **args)
def testRestoreMultiple(self):
(dm, _) = self.allocTestDM()
numdl = 4
for i in range(numdl):
args = {'command_key': 'TestCommandKey' + str(i),
'file_type': 'TestFileType',
'url': 'http://example.com/',
'username': 'TestUser',
'password': 'TestPassword',
'file_size': 99,
'target_filename': 'TestFilename',
'delay_seconds': 30}
persistobj.PersistentObject(objdir=dm.config_dir,
rootname=download.DNLDROOTNAME,
filename=None, **args)
dm.RestoreDownloads()
self.assertEqual(len(mock_downloads), numdl)
for i in range(numdl):
dl = mock_downloads[i]
self.assertFalse(dl.do_start_called)
self.assertFalse(dl.immediate_complete_called)
self.assertTrue(dl.reboot_callback_called)
def testRestoreNoCommandKey(self):
(dm, _) = self.allocTestDM()
args = {'delay_seconds': 30}
persistobj.PersistentObject(objdir=dm.config_dir,
rootname=download.DNLDROOTNAME,
filename=None, **args)
dm.RestoreDownloads()
self.assertEqual(len(mock_downloads), 0)
def testRestoreReboots(self):
(dm, _) = self.allocTestDM()
expected = set()
numrb = 3
for i in range(numrb):
key = u'TestCommandKey' + str(i)
args = {'command_key': key}
persistobj.PersistentObject(objdir=dm.config_dir,
rootname=download.BOOTROOTNAME,
filename=None, **args)
expected.add(('M Reboot', key))
# Plus an invalid object
args = {'foo': 'bar'}
persistobj.PersistentObject(objdir=dm.config_dir,
rootname=download.BOOTROOTNAME,
filename=None, **args)
reboots = set(dm.RestoreReboots())
self.assertEqual(reboots, expected)
def testGetAllQueuedTransfers(self):
(dm, _) = self.allocTestDM()
numdl = 1
for i in range(numdl):
args = {'command_key': 'TestCommandKey' + str(i),
'file_type': 'TestFileType',
'url': 'http://example.com/',
'username': 'TestUser',
'password': 'TestPassword',
'file_size': 99,
'target_filename': 'TestFilename',
'delay_seconds': 30}
dm.NewDownload(**args)
transfers = dm.GetAllQueuedTransfers()
self.assertEqual(len(transfers), numdl)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""Tests for core.py."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import unittest
import core
class TestObject(core.Exporter):
def __init__(self):
core.Exporter.__init__(self)
self.Export(params=['TestParam'],
objects=['SubObj'],
lists=['Counter'])
self.TestParam = 5
self.SubObj = TestObject.SubObj()
self.CounterList = {}
self.Counter = TestObject.SubObj
class SubObj(core.Exporter):
gcount = [0]
def __init__(self):
core.Exporter.__init__(self)
self.Export(params=['Count'])
self.gcount[0] += 1
self.Count = self.gcount[0]
class CoreTest(unittest.TestCase):
def setUp(self):
# Reset the global gcount
TestObject.SubObj.gcount = [0]
def testCore(self):
o = TestObject()
self.assertTrue(o)
o.ValidateExports()
o.AddExportObject('Counter')
o.AddExportObject('Counter')
o.AddExportObject('Counter')
print o.ListExports(recursive=False)
print o.ListExports(recursive=True)
self.assertEqual(list(o.ListExports()),
['Counter.', 'SubObj.', 'TestParam'])
self.assertEqual(list(o.ListExports(recursive=True)),
['Counter.',
'Counter.0.', 'Counter.0.Count',
'Counter.1.', 'Counter.1.Count',
'Counter.2.', 'Counter.2.Count',
'SubObj.', 'SubObj.Count', 'TestParam'])
ds1 = core.DumpSchema(TestObject)
ds2 = core.DumpSchema(o)
self.assertEqual(ds1, ds2)
o.DeleteExportObject('Counter', 1)
self.assertEqual(list(o.ListExports(recursive=True)),
['Counter.',
'Counter.0.', 'Counter.0.Count',
'Counter.2.', 'Counter.2.Count',
'SubObj.', 'SubObj.Count', 'TestParam'])
self.assertEqual([(idx, i.Count) for idx, i in o.CounterList.items()],
[(0, 2), (2, 4)])
idx, eo = o.AddExportObject('Counter', 'fred')
eo.Count = 99
print o.ListExports(recursive=True)
self.assertEqual([(idx, i.Count) for idx, i in o.CounterList.items()],
[(0, 2), (2, 4), ('fred', 99)])
print core.Dump(o)
o.ValidateExports()
def testCanonicalName(self):
o = TestObject()
self.assertTrue(o)
o.ValidateExports()
name = o.GetCanonicalName(o.SubObj)
self.assertEqual('SubObj', name)
(idx1, obj1) = o.AddExportObject('Counter')
(idx2, obj2) = o.AddExportObject('Counter')
(idx3, obj3) = o.AddExportObject('Counter')
name = o.GetCanonicalName(obj3)
self.assertEqual('Counter.2', name)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for cwmpboolean.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import unittest
import google3
import cwmpbool
class CwmpBoolTest(unittest.TestCase):
"""Tests for boolean formatting."""
def testParse(self):
self.assertTrue(cwmpbool.parse('true'))
self.assertTrue(cwmpbool.parse('True'))
self.assertTrue(cwmpbool.parse('1'))
self.assertFalse(cwmpbool.parse('false'))
self.assertFalse(cwmpbool.parse('False'))
self.assertFalse(cwmpbool.parse('0'))
self.assertRaises(ValueError, cwmpbool.parse, 'booga')
def testFormat(self):
self.assertEqual(cwmpbool.format(True), '1')
self.assertEqual(cwmpbool.format(False), '0')
def testValid(self):
self.assertTrue(cwmpbool.valid('True'))
self.assertTrue(cwmpbool.valid('true'))
self.assertTrue(cwmpbool.valid('False'))
self.assertTrue(cwmpbool.valid('false'))
self.assertTrue(cwmpbool.valid('0'))
self.assertTrue(cwmpbool.valid('1'))
self.assertFalse(cwmpbool.valid(''))
self.assertFalse(cwmpbool.valid('booga'))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""A simple command protocol that lets us manipulate a TR-069 tree."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import traceback
import core
import download
import mainloop
import quotedblock
class RemoteCommandStreamer(quotedblock.QuotedBlockStreamer):
"""A simple command protocol that lets us manipulate a TR-069 tree."""
def __init__(self, sock, address, root):
"""Initialize a RemoteCommandStreamer.
Args:
sock: the socket provided by mainloop.Listen
address: the address provided by mainloop.Listen
root: the root of the TR-069 (core.Exporter) object tree.
"""
quotedblock.QuotedBlockStreamer.__init__(self, sock, address)
self.root = root
self.download_manager = download.DownloadManager()
def _ProcessBlock(self, lines):
if not lines:
raise Exception('try the "help" command')
for words in lines:
cmd, args = words[0], tuple(words[1:])
funcname = 'Cmd%s' % cmd.title()
print 'command: %r %r' % (cmd, args)
func = getattr(self, funcname, None)
if not func:
raise Exception('no such command %r' % (cmd,))
yield func(*args)
def ProcessBlock(self, lines):
"""Process an incoming list of commands and return the result."""
try:
out = sum((list(i) for i in self._ProcessBlock(lines)), [])
except EOFError:
raise
except Exception, e:
print traceback.format_exc()
return [['ERROR', '-1', str(e)]]
return [['OK']] + out
def CmdHelp(self):
"""Return a list of available commands."""
for name in sorted(dir(self)):
if name.startswith('Cmd'):
func = getattr(self, name)
yield [name[3:].lower(), func.__doc__ or '']
def CmdQuit(self):
"""Close the current connection."""
raise EOFError()
def CmdCompletions(self, prefix):
"""Return possible completions for the given name prefix."""
parts = prefix.split('.')
before, after = parts[:-1], parts[-1]
for name in self.root.ListExports('.'.join(before), recursive=False):
if name.lower().startswith(after.lower()):
print ' completion: %r %r' % (before, name)
yield ['.'.join(before + [name])]
def CmdGet(self, name):
"""Get the value of the given parameter."""
return [[name, self.root.GetExport(name)]]
def CmdSet(self, name, value):
"""Set the given parameter to the given value."""
self.root.SetExportParam(name, value)
return [[name, value]]
def _CmdList(self, name, recursive):
prefix = name and ('%s.' % name) or ''
for k in self.root.ListExports(name, recursive=recursive):
if k.endswith('.'):
yield [k]
else:
yield [k, self.root.GetExport(prefix + k)]
def CmdList(self, name=None):
"""Return a list of objects, non-recursively starting at the given name."""
return self._CmdList(name, recursive=False)
CmdLs = CmdList
def CmdRlist(self, name=None):
"""Return a list of objects, recursively starting at the given name."""
return self._CmdList(name, recursive=True)
def CmdAdd(self, name, idx=None):
"""Add a sub-object to the given list with the given (optional) index."""
#pylint: disable-msg=W0612
idx, obj = self.root.AddExportObject(name, idx)
return [[idx]]
def CmdDel(self, name, *idxlist):
"""Delete one or more sub-objects from the given list."""
for idx in idxlist:
self.root.DeleteExportObject(name, idx)
yield [idx]
def CmdDownload(self, url):
"""Download a system image, install it, and reboot."""
self.download_manager.NewDownload(
command_key='rcmd',
file_type='1 IMAGE',
url=url,
username=None,
password=None,
file_size=0,
target_filename='rcmd.gi',
delay_seconds=0)
return [['OK', 'Starting download.']]
def MakeRemoteCommandStreamer(root):
def Fn(sock, address):
return RemoteCommandStreamer(sock, address, root)
return Fn
def main():
loop = mainloop.MainLoop()
class Sub(core.Exporter):
def __init__(self):
core.Exporter.__init__(self)
self.Export(params=['Value'])
self.Value = 0
root = core.Exporter()
root.Sub = Sub
root.SubList = {}
root.Test = 'this is a test string'
root.Export(params=['Test'], lists=['Sub'])
loop.ListenInet(('', 12999), MakeRemoteCommandStreamer(root))
loop.ListenUnix('/tmp/cwmpd.sock', MakeRemoteCommandStreamer(root))
loop.Start()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Parser for tr-069-style data model .xml files."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import os.path
import re
import string
import sys
import xml.etree.ElementTree
import google3
import bup.options
optspec = """
parse-schema.py [-d dir] files...
--
d,output-dir= Directory to write files to
"""
DEFAULT_BASE_CLASS = 'core.Exporter'
chunks = {}
imports = {}
def Log(s):
sys.stdout.flush()
sys.stderr.write('%s\n' % s)
def AddChunk(spec, objtype, name, root):
key = (spec, objtype, name)
assert not chunks.has_key(key)
chunks[key] = root
def FixSpec(spec):
# When a spec refers to tr-xxx-1-0-0 or tr-xxx-1-0, we might have to
# substitute in tr-xxx-1-0-1 instead (a bugfix revision). Let's just
# drop out the third version digit so it's easier to use as a dictionary
# key.
return re.sub(r':(tr|wt)-(\d+-\d+-\d+)-\d+$', r':tr-\2', spec)
def NiceSpec(spec):
spec = re.sub(r'^urn:broadband-forum-org:', '', spec)
spec = re.sub(r'^urn:google-com:', '', spec)
spec = re.sub(r'^urn:catawampus-org:', '', spec)
return spec
def SpecNameForPython(spec):
spec = NiceSpec(spec)
spec = re.sub(r'tr-(\d+)-(\d+)-(\d+)', r'tr\1_v\2_\3', spec)
spec = spec.translate(string.maketrans('-', '_'))
return spec
def ObjNameForPython(name):
name = re.sub(r':(\d+)\.(\d+)', r'_v\1_\2', name)
name = name.replace('-', '_') # X_EXAMPLE-COM_foo vendor data models
return name
def Indented(prefix, s):
s = unicode(s)
s = re.sub(re.compile(r'^', re.M), prefix, s)
s = re.sub(re.compile(r'^\s+$', re.M), '', s)
return s
IMPORT_BUG_FIXES = {
# bugs in tr-181-2-0-1. It tries to import *_Device2, which doesn't
# seem to exist anywhere.
('urn:broadband-forum-org:tr-143-1-0', 'component',
'DownloadDiagnostics_Device2'):
('urn:broadband-forum-org:tr-143-1-0', 'component',
'DownloadDiagnostics'),
('urn:broadband-forum-org:tr-143-1-0', 'component',
'UploadDiagnostics_Device2'):
('urn:broadband-forum-org:tr-143-1-0', 'component',
'UploadDiagnostics'),
}
def ParseImports(into_spec, root):
from_spec = FixSpec(root.attrib['spec'])
for node in root:
if node.tag in ('component', 'model'):
from_name = node.attrib.get('ref', node.attrib['name'])
into_name = node.attrib['name']
from_key = (from_spec, node.tag, from_name)
into_key = (into_spec, node.tag, into_name)
if from_key in IMPORT_BUG_FIXES:
from_key = IMPORT_BUG_FIXES[from_key]
assert not chunks.has_key(into_key)
assert not imports.has_key(into_key)
imports[into_key] = from_key
elif node.tag == 'dataType':
continue
else:
raise KeyError(node.tag)
def ParseFile(filename):
Log(filename)
root = xml.etree.ElementTree.parse(open(filename)).getroot()
spec = FixSpec(root.attrib['spec'])
Log(NiceSpec(spec))
for node in root:
if node.tag == 'import':
ParseImports(spec, node)
elif node.tag in ('component', 'model'):
name = node.attrib['name']
Log('%-12s %-9s %s' % (NiceSpec(spec), node.tag, name))
AddChunk(spec, node.tag, name, (spec, name, node))
elif node.tag in ('description', 'dataType', 'bibliography'):
continue
else:
Log('skip %s' % node.tag)
def ResolveImports():
for k, v in sorted(imports.items()):
prefix = ' %-12s %-9s %-20s ' % (NiceSpec(k[0]), k[1], k[2])
Log('%s\n=%-12s %-9s %s' % (prefix, NiceSpec(v[0]), v[1], v[2]))
while v in imports:
v = imports[v]
Log('=%-12s %-9s %s' % (NiceSpec(v[0]), v[1], v[2]))
(into_spec, objtype, into_name) = k
(from_spec, objtype, from_name) = v
if objtype in ('component', 'model'):
AddChunk(into_spec, objtype, into_name,
chunks[(from_spec, objtype, from_name)])
else:
raise KeyError(objtype)
class Object(object):
"""Represents an <object> tag."""
def __init__(self, model, name, prefix):
self.model = model
self.name = re.sub(r'-{i}', '', name)
self.is_sequence = (self.name != name)
self.prefix = prefix
self.params = []
self.object_sequence = []
def __str__(self):
pre = []
out = []
parent_class_name = DEFAULT_BASE_CLASS
if self.model.parent_model_name:
parent_class = self.FindParentClass()
if parent_class:
parent_class_name = '%s.%s' % (self.model.parent_model_name,
parent_class.FullName())
if parent_class_name.endswith('.'):
# Only happens for toplevel Model objects
parent_class_name = parent_class_name[:-1]
fullname_with_seq = re.sub(r'-{i}', '.{i}', '.'.join(self.prefix[:-1]))
classname = self.name.translate(string.maketrans('-', '_'))
pre.append('class %s(%s):' % (classname, parent_class_name))
classpath = '%s.%s' % (self.model.name, fullname_with_seq)
if classpath.endswith('.'):
classpath = classpath[:-1]
pre.append(' """Represents %s."""' % classpath)
if self.params or self.object_sequence:
pre.append('')
pre.append(' def __init__(self, **defaults):')
pre.append(' %s.__init__(self, defaults=defaults)'
% parent_class_name)
bits = []
space = ',\n '
if self.params:
quoted_param_list = ["'%s'" % param for param in self.params]
quoted_params = (space+' ').join(quoted_param_list)
bits.append('params=[%s]' % quoted_params)
obj_list = [obj.name for obj in self.object_sequence
if not obj.is_sequence]
if obj_list:
quoted_obj_list = ["'%s'" % obj for obj in obj_list]
quoted_objs = (space+' ').join(quoted_obj_list)
bits.append('objects=[%s]' % quoted_objs)
objlist_list = [obj.name for obj in self.object_sequence
if obj.is_sequence]
if objlist_list:
quoted_objlist_list = ["'%s'" % obj for obj in objlist_list]
quoted_objlists = (space+' ').join(quoted_objlist_list)
bits.append('lists=[%s]' % quoted_objlists)
pre.append(' self.Export(%s)' % (space.join(bits)))
for obj in self.object_sequence:
out.append('')
out.append(Indented(' ', obj))
if not self.params and not out:
out.append(' pass')
return '\n'.join(pre + out)
def FindParentClass(self):
parent_model = models.get((self.model.spec.name,
self.model.parent_model_name), None)
while parent_model:
parent_class = parent_model.objects.get(self.prefix, None)
if parent_class:
return parent_class
parent_model = models.get((parent_model.spec.name,
parent_model.parent_model_name), None)
return None
def FullName(self):
return re.sub(r'-{i}', '', '.'.join(self.prefix[:-1]))
models = {}
class Model(object):
"""Represents a <model> tag."""
def __init__(self, spec, name, parent_model_name):
self.spec = spec
self.name = ObjNameForPython(name)
if parent_model_name:
self.parent_model_name = ObjNameForPython(parent_model_name)
else:
self.parent_model_name = None
self.items = {}
self.objects = {}
self.object_sequence = []
models[(self.spec.name, self.name)] = self
def _AddItem(self, parts):
self.items[parts] = 1
if not parts[-1]:
if len(parts) > 2:
self._AddItem(parts[:-2] + ('',))
else:
if len(parts) > 1:
self._AddItem(parts[:-1] + ('',))
def AddItem(self, name):
parts = tuple(re.sub(r'\.{i}', r'-{i}', name).split('.'))
self._AddItem(parts)
def ItemsMatchingPrefix(self, prefix):
assert (not prefix) or (not prefix[-1])
for i in sorted(self.items):
if i[:len(prefix)-1] == prefix[:-1] and i != prefix:
yield i[len(prefix)-1:]
def Objectify(self, name, prefix):
"""Using self.items, fill self.objects and self.object_sequence.
Args:
name: the basename of this object in the hierarchy.
prefix: a list of parent object names.
Returns:
The toplevel Object generated, which corresponds to the Model itself.
"""
assert (not prefix) or (not prefix[-1])
obj = Object(self, name, prefix)
self.objects[prefix] = obj
for i in self.ItemsMatchingPrefix(prefix):
if len(i) == 1 and i[0]:
# a parameter of this object
obj.params.append(i[0])
elif len(i) == 2 and not i[1]:
# a sub-object of this object
subobj = self.Objectify(i[0], prefix[:-1] + i)
obj.object_sequence.append(subobj)
return obj
def MakeObjects(self):
assert not self.object_sequence
obj = self.Objectify(self.name, ('',))
self.object_sequence = [obj]
def __str__(self):
out = []
for obj in self.object_sequence:
out.append(Indented('', obj))
out.append('')
return '\n'.join(out)
def RenderParameter(model, prefix, xmlelement):
name = xmlelement.attrib.get('base', xmlelement.attrib.get('name', '<??>'))
model.AddItem('%s%s' % (prefix, name))
def RenderObject(model, prefix, spec, xmlelement):
name = xmlelement.attrib.get('base', xmlelement.attrib.get('name', '<??>'))
prefix += name
model.AddItem(prefix)
for i in xmlelement:
if i.tag == 'parameter':
RenderParameter(model, prefix, i)
elif i.tag == 'object':
RenderObject(model, prefix, spec, i)
elif i.tag in ('description', 'uniqueKey'):
pass
else:
raise KeyError(i.tag)
def RenderComponent(model, prefix, spec, xmlelement):
for i in xmlelement:
if i.tag == 'parameter':
RenderParameter(model, prefix, i)
elif i.tag == 'object':
RenderObject(model, prefix, spec, i)
elif i.tag == 'component':
#pylint: disable-msg=W0612
refspec, refname, ref = chunks[spec, 'component', i.attrib['ref']]
refpath = ref.attrib.get('path', ref.attrib.get('name', '<?>'))
RenderComponent(model, prefix, refspec, ref)
elif i.tag in ('profile', 'description'):
pass
else:
raise KeyError(i.tag)
specs = {}
class Spec(object):
"""Represents a <spec> tag."""
def __init__(self, name):
self.name = SpecNameForPython(name)
self.aliases = []
self.models = []
self.deps = []
specs[name] = self
def __str__(self):
out = []
implist = []
for (fromspec, fromname), (tospec, toname) in self.aliases:
fromname = ObjNameForPython(fromname)
tospec = SpecNameForPython(tospec)
toname = ObjNameForPython(toname)
if (fromspec, fromname) not in models:
models[(fromspec, fromname)] = models[(tospec, toname)]
Log('aliased %r' % ((fromspec, fromname),))
if toname != fromname:
implist.append((tospec,
'from %s import %s as %s'
% (tospec, toname, fromname)))
else:
implist.append((tospec,
'from %s import %s'
% (tospec, toname)))
for imp in sorted(implist):
out.append(imp[1])
out.append('')
out.append('')
for model in self.models:
out.append(str(model))
out.append('')
if self.models:
out.append("if __name__ == '__main__':")
for model in self.models:
out.append(' print core.DumpSchema(%s)' % model.name)
return '\n'.join(out) + '\n'
def MakeObjects(self):
for (fromspec, fromname), (tospec, toname) in self.aliases:
fromname = ObjNameForPython(fromname)
tospec = SpecNameForPython(tospec)
toname = ObjNameForPython(toname)
if (fromspec, fromname) not in models:
models[(fromspec, fromname)] = models[(tospec, toname)]
Log('aliased %r' % ((fromspec, fromname),))
def main():
o = bup.options.Options(optspec)
(opt, unused_flags, extra) = o.parse(sys.argv[1:])
output_dir = opt.output_dir or '.'
Log('Output directory for generated files is %s' % output_dir)
for filename in extra:
ParseFile(filename)
ResolveImports()
Log('Finished parsing and importing.')
items = sorted(chunks.items())
for (specname, objtype, name), (refspec, refname, xmlelement) in items:
spec = specs.get(specname, None) or Spec(specname)
if objtype == 'model':
objname = ObjNameForPython(name)
parent = xmlelement.attrib.get('base', None)
if SpecNameForPython(refspec) != spec.name:
spec.deps.append(refspec)
spec.aliases.append(((spec.name, objname),
(refspec, refname)))
else:
if parent:
model = Model(spec, objname, parent_model_name=parent)
else:
model = Model(spec, objname, parent_model_name=None)
RenderComponent(model, '', refspec, xmlelement)
model.MakeObjects()
spec.models.append(model)
Log('Finished models.')
for spec in specs.values():
spec.MakeObjects()
for specname, spec in sorted(specs.items()):
pyspec = SpecNameForPython(specname)
assert pyspec.startswith('tr') or pyspec.startswith('x_')
outf = open(os.path.join(output_dir, '%s.py' % pyspec), 'w')
outf.write('#!/usr/bin/python\n'
'# Copyright 2011 Google Inc. All Rights Reserved.\n'
'#\n'
'# AUTO-GENERATED BY parse-schema.py\n'
'#\n'
'# DO NOT EDIT!!\n'
'#\n'
'#pylint: disable-msg=C6202\n'
'#pylint: disable-msg=C6409\n'
'#pylint: disable-msg=C6310\n'
'# These should not actually be necessary (bugs in gpylint?):\n'
'#pylint: disable-msg=E1101\n'
'#pylint: disable-msg=W0231\n'
'#\n'
'"""Auto-generated from spec: %s."""\n'
'\n'
'import core\n'
% specname)
outf.write(str(spec))
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for cwmp_session.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import time
import unittest
import google3
import cwmp_session
class CwmpSessionTest(unittest.TestCase):
"""tests for CwmpSession."""
def testStateConnect(self):
cs = cwmp_session.CwmpSession('')
self.assertTrue(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
# should be no change
cs.state_update(on_hold=True)
self.assertTrue(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
cs.state_update(cpe_to_acs_empty=True)
self.assertTrue(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
cs.state_update(acs_to_cpe_empty=True)
self.assertTrue(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
# transition to ACTIVE
cs.state_update(sent_inform=True)
self.assertFalse(cs.inform_required())
self.assertTrue(cs.request_allowed())
self.assertTrue(cs.response_allowed())
def testActive(self):
cs = cwmp_session.CwmpSession('')
cs.state_update(sent_inform=True)
self.assertFalse(cs.inform_required())
self.assertTrue(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# should be no change
cs.state_update(sent_inform=True)
self.assertFalse(cs.inform_required())
self.assertTrue(cs.request_allowed())
self.assertTrue(cs.response_allowed())
cs.state_update(acs_to_cpe_empty=True)
self.assertFalse(cs.inform_required())
self.assertTrue(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# transition to ONHOLD
cs.state_update(on_hold=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# transition back to ACTIVE
cs.state_update(on_hold=False)
self.assertFalse(cs.inform_required())
self.assertTrue(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# transition to NOMORE
cs.state_update(cpe_to_acs_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
def testOnHold(self):
cs = cwmp_session.CwmpSession('')
cs.state_update(sent_inform=True)
cs.state_update(on_hold=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# No change
cs.state_update(on_hold=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
cs.state_update(sent_inform=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
cs.state_update(cpe_to_acs_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
cs.state_update(acs_to_cpe_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# back to ACTIVE
cs.state_update(on_hold=False)
self.assertFalse(cs.inform_required())
self.assertTrue(cs.request_allowed())
self.assertTrue(cs.response_allowed())
def testNoMore(self):
cs = cwmp_session.CwmpSession('')
# transition to NOMORE
cs.state_update(sent_inform=True)
cs.state_update(cpe_to_acs_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# should be no change
cs.state_update(sent_inform=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
cs.state_update(on_hold=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertTrue(cs.response_allowed())
# transition to DONE
cs.state_update(acs_to_cpe_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
def testDone(self):
cs = cwmp_session.CwmpSession('')
cs.state_update(sent_inform=True)
cs.state_update(cpe_to_acs_empty=True)
cs.state_update(acs_to_cpe_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
cs.state_update(sent_inform=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
cs.state_update(cpe_to_acs_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
cs.state_update(acs_to_cpe_empty=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
cs.state_update(sent_inform=True)
self.assertFalse(cs.inform_required())
self.assertFalse(cs.request_allowed())
self.assertFalse(cs.response_allowed())
class SimpleCacheObject(object):
def __init__(self):
self.cache_this_function_n = 0
self.cache_this_function_args_n = 0
@cwmp_session.cache
def cache_this_function(self):
self.cache_this_function_n += 1
@cwmp_session.cache
def cache_function_with_args(self, arg1, arg2): #pylint: disable-msg=W0613
self.cache_this_function_args_n += 1
@cwmp_session.cache
def SimpleCacheFunction():
return time.time()
class SessionCacheTest(unittest.TestCase):
"""tests for SessionCache."""
def testCacheObject(self):
t1 = SimpleCacheObject()
t2 = SimpleCacheObject()
t3 = SimpleCacheObject()
for _ in range(1001):
t1.cache_this_function()
t2.cache_this_function()
t3.cache_this_function()
self.assertEqual(t1.cache_this_function_n, 1)
self.assertEqual(t2.cache_this_function_n, 1)
self.assertEqual(t3.cache_this_function_n, 1)
cwmp_session.cache.flush()
for _ in range(101):
t1.cache_this_function()
t2.cache_this_function()
self.assertEqual(t1.cache_this_function_n, 2)
self.assertEqual(t2.cache_this_function_n, 2)
self.assertEqual(t3.cache_this_function_n, 1)
def testCacheFunction(self):
t = SimpleCacheFunction()
for _ in range(1000):
self.assertEqual(t, SimpleCacheFunction())
cwmp_session.cache.flush()
self.assertNotEqual(t, SimpleCacheFunction())
def testCacheFunctionArgs(self):
t = SimpleCacheObject()
for i in range(100):
t.cache_function_with_args(i, 0)
self.assertEqual(t.cache_this_function_args_n, 100)
def testCacheFunctionComplicatedArgs(self):
t = SimpleCacheObject()
arg = [1, 2, [3, 4], [5, 6, [7, 8, [9, 10]]], 11, 12]
for i in range(10):
t.cache_function_with_args(i, arg)
self.assertEqual(t.cache_this_function_args_n, 10)
for i in range(10):
t.cache_function_with_args(99, arg)
self.assertEqual(t.cache_this_function_args_n, 11)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fix sys.path so it can find our libraries.
This file is named google3.py because gpylint specifically ignores it when
complaining about the order of import statements - google3 should always
come before other non-python-standard imports.
"""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import fix_path #pylint: disable-msg=C6204,W0611
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mappings from api.ACS and api.CPE to SOAP encodings."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import datetime
import time
import traceback
import google3
import api
import core
import cwmpbool
import cwmpdate
import soap
def Soapify(value):
if hasattr(value, 'xsitype'):
return (value.xsitype, str(value))
elif isinstance(value, bool):
return ('xsd:boolean', cwmpbool.format(value))
elif isinstance(value, int):
return ('xsd:unsignedInt', str(value))
elif isinstance(value, float):
return ('xsd:double', str(value))
elif isinstance(value, datetime.datetime):
return ('xsd:dateTime', cwmpdate.format(value))
else:
return ('xsd:string', str(value))
class Encode(object):
def __init__(self):
self.request_id = 'catawampus.{0!r}'.format(time.time())
self.hold_requests = None
def _Envelope(self):
return soap.Envelope(self.request_id, self.hold_requests)
def GetRPCMethods(self):
with self._Envelope() as xml:
xml['cwmp:GetRPCMethods'](None)
return xml
def Inform(self, root, events=[], max_envelopes=1,
current_time=None, retry_count=0, parameter_list=[]):
with self._Envelope() as xml:
with xml['cwmp:Inform']:
with xml.DeviceId:
try:
di = root.GetExport('Device.DeviceInfo')
except AttributeError:
di = root.GetExport('InternetGatewayDevice.DeviceInfo')
xml.Manufacturer(di.Manufacturer)
xml.OUI(di.ManufacturerOUI)
xml.ProductClass(di.ProductClass)
xml.SerialNumber(di.SerialNumber)
soaptype = 'EventStruct[{0}]'.format(len(events))
event_attrs = {'soap-enc:arrayType': soaptype}
with xml.Event(**event_attrs):
for event in events:
with xml.EventStruct:
xml.EventCode(str(event[0]))
if event[1] is not None:
xml.CommandKey(str(event[1]))
else:
xml.CommandKey(None)
if current_time is None:
current_time = time.time()
xml.MaxEnvelopes(str(max_envelopes))
xml.CurrentTime(cwmpdate.format(current_time))
xml.RetryCount(str(retry_count))
soaptype = 'cwmp:ParameterValueStruct[{0}]'.format(len(parameter_list))
parameter_list_attrs = {'soap-enc:arrayType': soaptype}
with xml.ParameterList(**parameter_list_attrs):
for name, value in parameter_list:
with xml.ParameterValueStruct:
xml.Name(name)
soapyvalue = Soapify(value)
xml.Value(soapyvalue[1], xsi__type=soapyvalue[0])
return xml
def GetParameterNames(self, parameter_path, next_level_only):
with self._Envelope() as xml:
with xml['cwmp:GetParameterNames']:
xml.ParameterPath(parameter_path)
xml.NextLevel(next_level_only and '1' or '0')
return xml
def GetParameterValues(self, parameter_names):
with self._Envelope() as xml:
with xml['cwmp:GetParameterValues']:
with xml.ParameterNames:
for name in parameter_names:
xml.string(name)
return xml
def SetParameterValues(self, parameter_list, parameter_key):
with self._Envelope() as xml:
with xml['cwmp:SetParameterValues']:
soaptype = 'cwmp:ParameterValueStruct[{0}]'.format(len(parameter_list))
parameter_list_attrs = {'soap-enc:arrayType': soaptype}
with xml.ParameterList(**parameter_list_attrs):
for name, value in parameter_list:
with xml.ParameterValueStruct:
xml.Name(str(name))
xml.Value(str(value))
xml.ParameterKey(str(parameter_key))
return xml
def AddObject(self, object_name, parameter_key):
with self._Envelope() as xml:
with xml['cwmp:AddObject']:
xml.ObjectName(str(object_name))
xml.ParameterKey(str(parameter_key))
return xml
def DeleteObject(self, object_name, parameter_key):
with self._Envelope() as xml:
with xml['cwmp:DeleteObject']:
xml.ObjectName(str(object_name))
xml.ParameterKey(str(parameter_key))
return xml
def TransferComplete(self, command_key, faultcode, faultstring,
starttime=None, endtime=None):
with self._Envelope() as xml:
with xml['cwmp:TransferComplete']:
xml.CommandKey(str(command_key))
with xml['FaultStruct']:
xml.FaultCode(str(faultcode))
xml.FaultString(str(faultstring))
xml.StartTime(cwmpdate.format(starttime))
xml.CompleteTime(cwmpdate.format(endtime))
return xml
class SoapHandler(object):
def __init__(self, impl):
self.impl = impl
def _ExceptionListToFaultList(self, errors):
"""Generate a list of Soap Faults for SetParameterValues.
Turn a list of api.Parameter{Type,Value}Exception objects returned
from api.SetParameterValues into a list suitable for
soap.SetParameterValuesFault.
"""
faults = []
for error in errors:
if isinstance(error, api.ParameterTypeError):
code = soap.CpeFault.INVALID_PARAM_TYPE
elif isinstance(error, api.ParameterValueError):
code = soap.CpeFault.INVALID_PARAM_VALUE
elif isinstance(error, api.ParameterNameError):
code = soap.CpeFault.INVALID_PARAM_NAME
elif isinstance(error, api.ParameterNotWritableError):
code = soap.CpeFault.NON_WRITABLE_PARAM
else:
code = soap.CpeFault.INTERNAL_ERROR
faults.append((error.parameter, code, str(error)))
return faults
def Handle(self, body):
body = str(body)
obj = soap.Parse(body)
request_id = obj.Header.get('ID', None)
req = obj.Body[0]
method = req.name
with soap.Envelope(request_id, None) as xml:
try:
responder = self._GetResponder(method)
result = responder(xml, req)
except api.SetParameterErrors as e:
faults = self._ExceptionListToFaultList(e.error_list)
result = soap.SetParameterValuesFault(xml, faults)
except KeyError as e:
result = soap.SimpleFault(
xml, cpefault=soap.CpeFault.INVALID_PARAM_NAME,
faultstring='No such parameter: %s' % e.args[0])
except IndexError as e:
result = soap.SimpleFault(
xml, cpefault=soap.CpeFault.INVALID_ARGUMENTS,
faultstring=str(e))
except NotImplementedError:
cpefault = soap.CpeFault.METHOD_NOT_SUPPORTED
faultstring = 'Unsupported RPC method: %s' % method
result = soap.SimpleFault(xml, cpefault, faultstring)
except:
result = soap.SimpleFault(
xml, cpefault=soap.CpeFault.INTERNAL_ERROR,
faultstring=traceback.format_exc())
if result is not None:
return xml
else:
return None
def _GetResponder(self, method):
try:
return getattr(self, method)
except:
raise NotImplementedError()
def GetRPCMethods(self, xml, req):
with xml['cwmp:GetRPCMethodsResponse']:
with xml.MethodList:
for method in self.impl.GetRPCMethods():
xml.string(method)
return xml
class ACS(SoapHandler):
def __init__(self, acs):
SoapHandler.__init__(self, impl=acs)
def Inform(self, xml, req):
with xml['cwmp:InformResponse']:
self.impl.Inform(None, req.DeviceId, req.Event, req.MaxEnvelopes,
req.CurrentTime, req.RetryCount, req.ParameterList)
xml.MaxEnvelopes(str(1))
return xml
class CPE(SoapHandler):
def __init__(self, cpe):
SoapHandler.__init__(self, impl=cpe)
def InformResponse(self, xml, req):
self.impl.informResponseReceived()
return None
def GetParameterNames(self, xml, req):
path = str(req.ParameterPath)
if path.endswith('.'):
path = path[:-1]
nextlevel = cwmpbool.parse(req.NextLevel)
names = list(self.impl.GetParameterNames(path, nextlevel))
soaptype = 'ParameterInfoStruct[{0}]'.format(len(names))
parameter_list_attrs = {'soap-enc:arrayType': soaptype}
with xml['cwmp:GetParameterNamesResponse']:
with xml.ParameterList(**parameter_list_attrs):
for name in names:
with xml['ParameterInfoStruct']:
xml.Name(name)
xml.Writable('1') # TODO(apenwarr): detect true writability here
return xml
def GetParameterValues(self, xml, req):
names = [str(i) for i in req.ParameterNames]
values = self.impl.GetParameterValues(names)
soaptype = 'cwmp:ParameterValueStruct[{0}]'.format(len(values))
parameter_list_attrs = {'soap-enc:arrayType': soaptype}
with xml['cwmp:GetParameterValuesResponse']:
with xml.ParameterList(**parameter_list_attrs):
for name, value in values:
with xml.ParameterValueStruct:
xml.Name(name)
soapyvalue = Soapify(value)
xml.Value(soapyvalue[1], xsi__type=soapyvalue[0])
return xml
def SetParameterValues(self, xml, req):
names = [(str(p[0]), str(p[1])) for p in req.ParameterList]
code = self.impl.SetParameterValues(names, req.ParameterKey)
with xml['cwmp:SetParameterValuesResponse']:
xml.Status(str(int(code)))
return xml
def _CheckObjectName(self, name):
if not name.endswith('.'):
raise KeyError('ObjectName must end in period: %s' % name)
def AddObject(self, xml, req):
self._CheckObjectName(req.ObjectName)
idx, status = self.impl.AddObject(req.ObjectName, req.ParameterKey)
with xml['cwmp:AddObjectResponse']:
xml.InstanceNumber(str(idx))
xml.Status(str(int(status)))
return xml
def DeleteObject(self, xml, req):
self._CheckObjectName(req.ObjectName)
code = self.impl.DeleteObject(req.ObjectName, req.ParameterKey)
with xml['cwmp:DeleteObjectResponse']:
xml.Status(str(int(code)))
return xml
def SetParameterAttributes(self, xml, req):
# ParameterList will be an array of NodeWrapper's, and each NodeWrapper
# will have a list of values, for example:
# (Name, InternetGatewayDevice.PeriodicStatistics.SampleSet.0.Status)
# (Notification, true)
# (NotificationChange, true)
for param in req.ParameterList:
self.impl.SetParameterAttributes(param)
xml['cwmp:SetParameterAttributesResponse'](None)
return xml
def Download(self, xml, req):
try:
username = req.Username
password = req.Password
except:
username = password = None
try:
(code, starttime, endtime) = self.impl.Download(
command_key=req.CommandKey, file_type=req.FileType,
url=req.URL, username=username, password=password,
file_size=int(req.FileSize), target_filename=req.TargetFileName,
delay_seconds=int(req.DelaySeconds),
success_url=req.SuccessURL, failure_url=req.FailureURL)
except core.ResourcesExceededError as e:
return soap.SimpleFault(xml, soap.CpeFault.RESOURCES_EXCEEDED, str(e))
except core.FileTransferProtocolError as e:
return soap.SimpleFault(xml, soap.CpeFault.FILE_TRANSFER_PROTOCOL, str(e))
with xml['cwmp:DownloadResponse']:
xml.Status(str(code))
xml.StartTime(cwmpdate.format(starttime))
xml.CompleteTime(cwmpdate.format(endtime))
return xml
def TransferCompleteResponse(self, xml, req):
"""Response to a TransferComplete sent by the CPE."""
self.impl.transferCompleteResponseReceived()
return None
def GetQueuedTransfers(self, xml, req):
transfers = self.impl.GetAllQueuedTransfers()
with xml['cwmp:GetQueuedTransfersResponse']:
for q in transfers:
with xml['TransferList']:
xml.CommandKey(q.CommandKey)
xml.State(str(q.State))
return xml
def GetAllQueuedTransfers(self, xml, req):
transfers = self.impl.GetAllQueuedTransfers()
with xml['cwmp:GetAllQueuedTransfersResponse']:
for q in transfers:
with xml['TransferList']:
xml.CommandKey(q.CommandKey)
xml.State(str(q.State))
xml.IsDownload(str(q.IsDownload))
xml.FileType(str(q.FileType))
xml.FileSize(str(q.FileSize))
xml.TargetFileName(str(q.TargetFileName))
return xml
def CancelTransfer(self, xml, req):
try:
self.impl.CancelTransfer(req.CommandKey)
except core.CancelNotPermitted as e:
return soap.SimpleFault(xml, soap.CpeFault.DOWNLOAD_CANCEL_NOTPERMITTED,
str(e))
xml['cwmp:CancelTransferResponse'](None)
return xml
def Reboot(self, xml, req):
self.impl.Reboot(req.CommandKey)
xml['cwmp:RebootResponse'](None)
return xml
def main():
class FakeDeviceInfo(object):
Manufacturer = 'manufacturer'
ManufacturerOUI = 'oui'
ProductClass = 'productclass'
SerialNumber = 'serialnumber'
root = core.Exporter()
root.Export(params=['Test', 'Test2'], lists=['Sub'])
root.Test = '5'
root.Test2 = 6
root.SubList = {}
root.Sub = core.Exporter
root.DeviceInfo = FakeDeviceInfo()
real_acs = api.ACS()
real_cpe = api.CPE(real_acs, root, None)
cpe = CPE(real_cpe)
acs = ACS(real_acs)
encode = Encode()
print cpe.Handle(encode.GetRPCMethods())
print cpe.Handle(encode.GetParameterNames('', False))
print cpe.Handle(encode.GetParameterValues(['Test']))
print cpe.Handle(encode.SetParameterValues([('Test', 6), ('Test2', 7)], 77))
print cpe.Handle(encode.GetParameterValues(['Test', 'Test2']))
print cpe.Handle(encode.AddObject('Sub.', 5))
print cpe.Handle(encode.DeleteObject('Sub.0', 5))
print acs.Handle(encode.Inform(root, [], 1, None, 1, []))
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for persistobj.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import os
import shutil
import tempfile
import unittest
import google3
import persistobj
class PersistentObjectTest(unittest.TestCase):
"""Tests for persistobj.py PersistentObject."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testPersistentObjectAttrs(self):
kwargs = {'foo1': 'bar1', 'foo2': 'bar2', 'foo3': 3}
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
self.assertEqual(tobj.foo1, 'bar1')
self.assertEqual(tobj.foo2, 'bar2')
self.assertEqual(tobj.foo3, 3)
def testReversibleEncoding(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
encoded = tobj._ToJson()
decoded = tobj._FromJson(encoded)
self.assertEqual(sorted(kwargs.items()), sorted(decoded.items()))
def testWriteToFile(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
encoded = open(tobj.filename).read()
decoded = tobj._FromJson(encoded)
self.assertEqual(sorted(kwargs.items()), sorted(decoded.items()))
def testReadFromFile(self):
contents = '{"foo": "bar", "baz": 4}'
with tempfile.NamedTemporaryFile(dir=self.tmpdir, delete=False) as f:
f.write(contents)
f.close()
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj',
filename=f.name)
self.assertEqual(tobj.foo, 'bar')
self.assertEqual(tobj.baz, 4)
def testReadFromCorruptFile(self):
contents = 'this is not a JSON file'
f = tempfile.NamedTemporaryFile(dir=self.tmpdir, delete=False)
f.write(contents)
f.close()
self.assertRaises(ValueError, persistobj.PersistentObject,
self.tmpdir, 'TestObj', filename=f.name)
def testUpdate(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
tobj2 = persistobj.PersistentObject(self.tmpdir, 'TestObj',
filename=tobj.filename)
self.assertEqual(list(sorted(tobj.items())), list(sorted(tobj2.items())))
kwargs['foo1'] = 'bar2'
tobj.Update(**kwargs)
tobj3 = persistobj.PersistentObject(self.tmpdir, 'TestObj',
filename=tobj.filename)
self.assertEqual(list(sorted(tobj.items())), list(sorted(tobj3.items())))
def testUpdateInline(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
tobj.Update(foo1='bar2')
self.assertEqual(tobj.foo1, 'bar2')
def testUpdateInlineMultiple(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
tobj.Update(foo1='bar2', foo3=4)
self.assertEqual(tobj.foo1, 'bar2')
self.assertEqual(tobj.foo3, 4)
def testUpdateInlineDict(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
tobj.Update(**dict(foo1='bar2'))
self.assertEqual(tobj.foo1, 'bar2')
def testUpdateFails(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
tobj.objdir = '/this_path_should_not_exist_hijhgvWRQ4MVVSDHuheifuh'
kwargs['foo1'] = 'bar2'
self.assertRaises(OSError, tobj.Update, **kwargs)
def testGetPersistentObjects(self):
expected = ['{"foo": "bar1", "baz": 4}',
'{"foo": "bar2", "baz": 5}',
'{"foo": "bar3", "baz": 6}',
'This is not a JSON file'] # test corrupt file hanlding
for obj in expected:
with tempfile.NamedTemporaryFile(
dir=self.tmpdir, prefix='tr69_dnld', delete=False) as f:
f.write(obj)
actual = persistobj.GetPersistentObjects(self.tmpdir)
self.assertEqual(len(actual), len(expected)-1)
found = [False, False, False]
for entry in actual:
if entry.foo == 'bar1' and entry.baz == 4:
found[0] = True
if entry.foo == 'bar2' and entry.baz == 5:
found[1] = True
if entry.foo == 'bar3' and entry.baz == 6:
found[2] = True
self.assertTrue(found[0])
self.assertTrue(found[1])
self.assertTrue(found[2])
def testDefaultValue(self):
kwargs = dict(foo=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
self.assertEqual(getattr(tobj, 'foo2', 2), 2)
def testDelete(self):
kwargs = dict(foo1='bar1', foo3=3)
tobj = persistobj.PersistentObject(self.tmpdir, 'TestObj', **kwargs)
tobj.Delete()
self.assertRaises(OSError, os.stat, tobj.filename)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for api_soap.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import datetime
import unittest
import xml.etree.ElementTree as ET
import google3
import api_soap
expectedTransferComplete = """<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:cwmp="urn:dslforum-org:cwmp-1-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:soap-enc="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Header>
</soap:Header>
<soap:Body>
<cwmp:TransferComplete>
<CommandKey>cmdkey</CommandKey>
<FaultStruct>
<FaultCode>123</FaultCode>
<FaultString>faultstring</FaultString>
</FaultStruct>
<StartTime>2011-12-05T12:01:02Z</StartTime>
<CompleteTime>2011-12-05T12:01:03Z</CompleteTime>
</cwmp:TransferComplete>
</soap:Body>
</soap:Envelope>"""
SOAPNS = '{http://schemas.xmlsoap.org/soap/envelope/}'
CWMPNS = '{urn:dslforum-org:cwmp-1-2}'
class RpcMessageTest(unittest.TestCase):
"""Tests for formatting of XML objects."""
def testTransferComplete(self):
encode = api_soap.Encode()
start = datetime.datetime(2011, 12, 5, 12, 01, 02)
end = datetime.datetime(2011, 12, 5, 12, 01, 03)
xml = str(encode.TransferComplete('cmdkey', 123, 'faultstring', start, end))
root = ET.fromstring(str(xml))
xfer = root.find(SOAPNS + 'Body/' + CWMPNS + 'TransferComplete')
self.assertTrue(xfer)
self.assertEqual(xfer.find('CommandKey').text, 'cmdkey')
self.assertEqual(xfer.find('FaultStruct/FaultCode').text, '123')
self.assertEqual(xfer.find('FaultStruct/FaultString').text, 'faultstring')
self.assertTrue(xfer.find('StartTime').text)
self.assertTrue(xfer.find('CompleteTime').text)
class ApiSoapTest(unittest.TestCase):
"""Tests for methods in api_soap.py."""
class ThisHasXsiType(object):
xsitype = 'xsd:foo'
def __str__(self):
return 'foo'
def testSoapify(self):
tobj = self.ThisHasXsiType()
self.assertEqual(api_soap.Soapify(tobj), ('xsd:foo', 'foo'))
self.assertEqual(api_soap.Soapify(True), ('xsd:boolean', '1'))
self.assertEqual(api_soap.Soapify(False), ('xsd:boolean', '0'))
self.assertEqual(api_soap.Soapify(100), ('xsd:unsignedInt', '100'))
self.assertEqual(api_soap.Soapify(3.14159), ('xsd:double', '3.14159'))
dt = datetime.datetime(1999, 12, 31, 23, 59, 58, 999999)
self.assertEqual(api_soap.Soapify(dt), ('xsd:dateTime', '1999-12-31T23:59:58.999999Z'))
dt2 = datetime.datetime(1999, 12, 31, 23, 59, 58)
self.assertEqual(api_soap.Soapify(dt2), ('xsd:dateTime', '1999-12-31T23:59:58Z'))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Protocol helpers for multiline sh-style-quoted blocks.
Blocks are formatted as lines, separated by newline characters, each
containing one or more quoted words. The block ends at the first line
containing zero words.
For example:
this is a line
so "is this
stuff"
this 'also' is a "'line'"
this\ is\ one\ word
""
still going
Parses as follows:
[['this', 'is', 'a', 'line'],
['so', 'is this\nstuff'],
['this', 'also', 'is', 'a', "'line'"],
['this is one word'],
[''],
['still', 'going']]
The net result is a human-friendly protocol that can be used for many
purposes.
"""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import bup.shquote
import mainloop
class QuotedBlockProtocol(object):
"""Implement the QuotedBlock protocol.
You should call GotData() every time you receive incoming data.
Calls a callback function with an array of lines at the end of each block.
The callback function returns a list that is then quoted and returned
from GotData().
Try using QuotedBlockStreamer for a wrapper that ties this into a mainloop.
"""
def __init__(self, handle_lines_func):
"""Initialize a QuotedBlockProtocol instance.
Args:
handle_lines_func: called as handle_lines_func(lines) at the end of
each incoming block. Returns a list of lines to send back.
"""
self.handle_lines_func = handle_lines_func
self.partial_line = ''
self.lines = []
def GotData(self, data):
"""Call this method every time you receive incoming bytes.
It will call the handle_lines_func at the end of a block. When this
function returns non-None, it will be an encoded block string you should
send back to the remote end.
This function knows how to handle lines that contain a quoted newline
character. It merges the two lines into a single one and then calls
self.GotLine().
Args:
data: a string of bytes you received from the remote.
Returns:
None or a string that should be returned to the remote.
"""
line = self.partial_line + data
#pylint: disable-msg=W0612
firstchar, word = bup.shquote.unfinished_word(line)
if word:
self.partial_line = line
else:
self.partial_line = ''
return self.GotLine(line)
def GotLine(self, line):
"""Call this method every time you receive a parseable line of data.
Most of the time you will call GotData() instead. Only use this method
if you're absolutely sure the line does not have any unfinished quoted
sections.
Args:
line: a parseable string of bytes you received from the remote.
Returns:
None or a string that should be returned to the remote.
"""
if line.strip():
# a new line in this block
parts = bup.shquote.quotesplit(line)
#pylint: disable-msg=W0612
self.lines.append([word for offset, word in parts])
else:
# blank line means end of block
lines = self.lines
self.lines = []
result = self.handle_lines_func(lines)
if result is None:
return None
else:
return self.RenderBlock(result)
def RenderBlock(self, lines):
"""Quote the given lines array back into a parseable string."""
out = []
lines = lines or []
for line in lines:
line = [str(word) for word in line]
out.append(bup.shquote.quotify_list(line) + '\r\n')
out.append('\r\n')
return ''.join(out)
class QuotedBlockStreamer(object):
"""A simple helper that can be used as the callback to MainLoop.Listen.
Derive from this class and override ProcessBlock() to change how you want
to interpret and respond to blocks. The listener will automatically
accept incoming connections, and ProcessBlock() will be called
automatically for each full block received from the remote. It should
return the lines that should be sent back to the remote. We send back
the lines automatically using tornado.IOStream.
Example:
loop = mainloop.MainLoop()
loop.ListenUnix('/tmp/cwmpd.sock', QuotedBlockStreamer)
loop.Start()
"""
def __init__(self, sock, address):
"""Initialize a QuotedBlockStreamer.
Args:
sock: the socket provided by MainLoop.Listen
address: the address provided by MainLoop.Listen
"""
self.sock = sock
self.address = address
qb = QuotedBlockProtocol(self.ProcessBlock)
mainloop.LineReader(sock, address, qb.GotData)
def ProcessBlock(self, lines):
"""Redefine this function to respond to incoming requests how you want."""
print 'lines: %r' % (lines,)
return [['RESPONSE:']] + lines + [['EOR']]
def main():
loop = mainloop.MainLoop()
loop.ListenInet6(('', 12999), QuotedBlockStreamer)
loop.ListenUnix('/tmp/cwmpd.sock', QuotedBlockStreamer)
print 'hello'
loop.Start()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#pylint: disable-msg=W0404
#
"""Implement the inner handling for tr-98/181 ManagementServer."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import copy
import datetime
import math
import random
import re
import socket
import time
import urlparse
import google3
import tornado.ioloop
import cwmpbool
import cwmpdate
# Allow unit tests to override with a mock
PERIODIC_CALLBACK = tornado.ioloop.PeriodicCallback
class DefaultSetAcsUrl(object):
def SetAcsUrl(self, url):
return False
class ServerParameters(object):
"""Class to hold parameters of CpeManagementServer."""
def __init__(self):
self.CWMPRetryMinimumWaitInterval = 5
self.CWMPRetryIntervalMultiplier = 2000
# The default password is trivial. In the initial Inform exchange
# the ACS generally sets ConnectionRequest{Username,Password}
# to values which only it knows. If something goes wrong, we want
# the password to be well known so the ACS can wake us up and
# try again.
self.ConnectionRequestPassword = 'cwmp'
self.ConnectionRequestUsername = 'catawampus'
self.DefaultActiveNotificationThrottle = 0
self.EnableCWMP = True
self._PeriodicInformEnable = True
# Once every 15 minutes plus or minus one minute (3 minute spread)
self._PeriodicInformInterval = (15 * 60) + random.randint(-60, 60)
self._PeriodicInformTime = 0
self.Password = ''
self.Username = ''
class CpeManagementServer(object):
"""Inner class implementing tr-98 & 181 ManagementServer."""
def __init__(self, platform_config, port, ping_path,
acs_url=None, get_parameter_key=None,
start_periodic_session=None, ioloop=None,
restrict_acs_hosts=None):
self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
self.restrict_acs_hosts = restrict_acs_hosts
self.ValidateAcsUrl(acs_url)
if platform_config:
self.ValidateAcsUrl(platform_config.GetAcsUrl())
self.acs_url = acs_url
self.platform_config = platform_config
self.port = port
self.ping_path = ping_path
self.get_parameter_key = get_parameter_key
self.start_periodic_session = start_periodic_session
self.my_ip = None
self._periodic_callback = None
self._start_periodic_timeout = None
self.config_copy = None
self.config = ServerParameters()
self.ConfigurePeriodicInform()
def StartTransaction(self):
if self.config_copy is None:
self.config_copy = copy.deepcopy(self.config)
def CommitTransaction(self):
self.config_copy = None
def AbandonTransaction(self):
self.config = self.config_copy
self.config_copy = None
self.ConfigurePeriodicInform()
def ValidateAcsUrl(self, value):
"""Checks if the URL passed is acceptable. If not raises an exception."""
if not self.restrict_acs_hosts or not value:
return
# Require https for the url scheme.
split_url = urlparse.urlsplit(value)
if split_url.scheme != 'https':
raise ValueError('The ACS Host must be https: %s' % str(value))
# Iterate over the restrict domain name list and see if one of
# the restricted domain names matches the supplied url host name.
restrict_hosts = re.split(r'[\s,]+', self.restrict_acs_hosts)
for host in restrict_hosts:
# Check the full hostname.
if split_url.hostname == host:
return
# Check against the restrict host of form '.foo.com'
if not host.startswith('.'):
dotted_host = '.' + host
else:
dotted_host = host
if split_url.hostname.endswith(dotted_host):
return
# If we don't find a valid host, raise an exception.
raise ValueError('The ACS Host is not permissible: %s' % str(value))
@property
def CWMPRetryMinimumWaitInterval(self):
return self.config.CWMPRetryMinimumWaitInterval
@CWMPRetryMinimumWaitInterval.setter
def CWMPRetryMinimumWaitInterval(self, value):
self.config.CWMPRetryMinimumWaitInterval = int(value)
@property
def CWMPRetryIntervalMultiplier(self):
return self.config.CWMPRetryIntervalMultiplier
@CWMPRetryIntervalMultiplier.setter
def CWMPRetryIntervalMultiplier(self, value):
self.config.CWMPRetryIntervalMultiplier = int(value)
@property
def ConnectionRequestPassword(self):
return self.config.ConnectionRequestPassword
@ConnectionRequestPassword.setter
def ConnectionRequestPassword(self, value):
self.config.ConnectionRequestPassword = value
@property
def ConnectionRequestUsername(self):
return self.config.ConnectionRequestUsername
@ConnectionRequestUsername.setter
def ConnectionRequestUsername(self, value):
self.config.ConnectionRequestUsername = value
@property
def DefaultActiveNotificationThrottle(self):
return self.config.DefaultActiveNotificationThrottle
@DefaultActiveNotificationThrottle.setter
def DefaultActiveNotificationThrottle(self, value):
self.config.DefaultActiveNotificationThrottle = int(value)
@property
def EnableCWMP(self):
return True
@property
def Password(self):
return self.config.Password
@Password.setter
def Password(self, value):
self.config.Password = value
@property
def Username(self):
return self.config.Username
@Username.setter
def Username(self, value):
self.config.Username = value
def GetURL(self):
return self.acs_url or self.platform_config.GetAcsUrl()
def SetURL(self, value):
self.ValidateAcsUrl(value)
if self.acs_url:
self.acs_url = value
else:
self.platform_config.SetAcsUrl(value)
URL = property(GetURL, SetURL, None, 'tr-98/181 ManagementServer.URL')
def _isIp6Address(self, ip):
# pylint: disable-msg=W0702
try:
socket.inet_pton(socket.AF_INET6, ip)
except:
return False
return True
def _formatIP(self, ip):
if self._isIp6Address(ip):
return '[' + ip + ']'
else:
return ip
def GetConnectionRequestURL(self):
if self.my_ip and self.port and self.ping_path:
path = self.ping_path if self.ping_path[0] != '/' else self.ping_path[1:]
ip = self._formatIP(self.my_ip)
return 'http://{0}:{1!s}/{2}'.format(ip, self.port, path)
else:
return ''
ConnectionRequestURL = property(
GetConnectionRequestURL, None, None,
'tr-98/181 ManagementServer.ConnectionRequestURL')
def GetParameterKey(self):
if self.get_parameter_key is not None:
return self.get_parameter_key()
else:
return ''
ParameterKey = property(GetParameterKey, None, None,
'tr-98/181 ManagementServer.ParameterKey')
def GetPeriodicInformEnable(self):
return self.config._PeriodicInformEnable
def SetPeriodicInformEnable(self, value):
self.config._PeriodicInformEnable = cwmpbool.parse(value)
self.ConfigurePeriodicInform()
PeriodicInformEnable = property(
GetPeriodicInformEnable, SetPeriodicInformEnable, None,
'tr-98/181 ManagementServer.PeriodicInformEnable')
def GetPeriodicInformInterval(self):
return self.config._PeriodicInformInterval
def SetPeriodicInformInterval(self, value):
self.config._PeriodicInformInterval = int(value)
self.ConfigurePeriodicInform()
PeriodicInformInterval = property(
GetPeriodicInformInterval, SetPeriodicInformInterval, None,
'tr-98/181 ManagementServer.PeriodicInformInterval')
def GetPeriodicInformTime(self):
return self.config._PeriodicInformTime
def SetPeriodicInformTime(self, value):
self.config._PeriodicInformTime = value
self.ConfigurePeriodicInform()
PeriodicInformTime = property(
GetPeriodicInformTime, SetPeriodicInformTime, None,
'tr-98/181 ManagementServer.PeriodicInformTime')
def ConfigurePeriodicInform(self):
"""Commit changes to PeriodicInform parameters."""
if self._periodic_callback:
self._periodic_callback.stop()
self._periodic_callback = None
if self._start_periodic_timeout:
self.ioloop.remove_timeout(self._start_periodic_timeout)
self._start_periodic_timeout = None
# Delete the old periodic callback.
if self._periodic_callback:
self._periodic_callback.stop()
self._periodic_callback = None
if (self.config._PeriodicInformEnable and
self.config._PeriodicInformInterval > 0):
msec = self.config._PeriodicInformInterval * 1000
self._periodic_callback = PERIODIC_CALLBACK(self.start_periodic_session,
msec, self.ioloop)
if self.config._PeriodicInformTime:
# PeriodicInformTime is just meant as an offset, not an actual time.
# So if it's 25.5 hours in the future and the interval is 1 hour, then
# the interesting part is the 0.5 hours, not the 25.
#
# timetuple might be in the past, but that's okay; the modulus
# makes sure it's never negative. (ie. (-3 % 5) == 2, in python)
timetuple = cwmpdate.parse(self.config._PeriodicInformTime).timetuple()
offset = ((time.mktime(timetuple) - time.time())
% float(self.config._PeriodicInformInterval))
else:
offset = 0.0
self._start_periodic_timeout = self.ioloop.add_timeout(
datetime.timedelta(seconds=offset), self.StartPeriodicInform)
def StartPeriodicInform(self):
self._periodic_callback.start()
def SessionRetryWait(self, retry_count):
"""Calculate wait time before next session retry.
See $SPEC3 section 3.2.1 for a description of the algorithm.
Args:
retry_count: integer number of retries attempted so far.
Returns:
Number of seconds to wait before initiating next session.
"""
if retry_count == 0:
return 0
periodic_interval = self.config._PeriodicInformInterval
if self.config._PeriodicInformInterval <= 0:
periodic_interval = 30
c = 10 if retry_count >= 10 else retry_count
m = float(self.config.CWMPRetryMinimumWaitInterval)
k = float(self.config.CWMPRetryIntervalMultiplier) / 1000.0
start = m * math.pow(k, c-1)
stop = start * k
# pin start/stop to have a maximum value of PerdiodInfomInterval
start = int(min(start, periodic_interval/k))
stop = int(min(stop, periodic_interval))
return random.randrange(start, stop)
def main():
pass
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for http_download.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
from collections import namedtuple #pylint: disable-msg=C6202
import shutil
import tempfile
import unittest
import google3
import http_download
mock_http_clients = []
class MockHttpClient(object):
def __init__(self, io_loop=None):
self.did_fetch = False
self.request = None
self.callback = None
mock_http_clients.append(self)
def fetch(self, request, callback):
self.did_fetch = True
self.request = request
self.callback = callback
class MockHttpResponse(object):
def __init__(self, errorcode):
self.error = namedtuple('error', 'code')
self.error.code = errorcode
self.headers = []
class MockIoloop(object):
def __init__(self):
self.time = None
self.callback = None
def add_timeout(self, time, callback):
self.time = time
self.callback = callback
class HttpDownloadTest(unittest.TestCase):
"""tests for http_download.py HttpDownload."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
http_download.HTTPCLIENT = MockHttpClient
self.dl_cb_faultcode = None
self.dl_cb_faultstring = None
del mock_http_clients[:]
def tearDown(self):
shutil.rmtree(self.tmpdir)
del mock_http_clients[:]
def testDigest(self):
expected = '6629fae49393a05397450978507c4ef1'
actual = http_download.calc_http_digest(
'GET',
'/dir/index.html',
'auth',
nonce='dcd98b7102dd2f0e8b11d0f600bfb0c093',
cnonce='0a4f113b',
nc='00000001',
username='Mufasa',
password='Circle Of Life',
realm='testrealm@host.com')
self.assertEqual(expected, actual)
def downloadCallback(self, faultcode, faultstring, filename):
self.dl_cb_faultcode = faultcode
self.dl_cb_faultstring = faultstring
self.dl_cb_filename = filename
def testFetch(self):
ioloop = MockIoloop()
username = 'uname'
password = 'pword'
url = 'scheme://host:port/'
dl = http_download.HttpDownload(url, username=username, password=password,
download_complete_cb=self.downloadCallback,
ioloop=ioloop)
dl.fetch()
self.assertEqual(len(mock_http_clients), 1)
ht = mock_http_clients[0]
self.assertTrue(ht.did_fetch)
self.assertTrue(ht.request is not None)
self.assertEqual(ht.request.auth_username, username)
self.assertEqual(ht.request.auth_password, password)
self.assertEqual(ht.request.url, url)
resp = MockHttpResponse(418)
ht.callback(resp)
self.assertEqual(self.dl_cb_faultcode, 9010) # DOWNLOAD_FAILED
self.assertTrue(self.dl_cb_faultstring)
self.assertFalse(self.dl_cb_filename)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""Tests for types.py."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import unittest
import google3
import tr.types
class TestObject(object):
a = tr.types.Attr()
b = tr.types.Bool()
s = tr.types.String('defaultstring')
i = tr.types.Int()
u = tr.types.Unsigned()
f = tr.types.Float(4)
e = tr.types.Enum(['one', 'two', 'three', 7, None])
e2 = tr.types.Enum(['thing'])
class TriggerObject(object):
def __init__(self):
self.xval = 7
self.triggers = 0
def Triggered(self):
self.triggers += 1
@property
def val(self):
return self.xval
@tr.types.Trigger
@val.setter
def val(self, value):
self.xval = value
a = tr.types.Trigger(tr.types.Attr())
b = tr.types.Trigger(tr.types.Bool())
i = tr.types.Trigger(tr.types.Int())
class ReadOnlyObject(object):
b = tr.types.ReadOnlyBool(True)
i = tr.types.ReadOnlyInt('5')
s = tr.types.ReadOnlyString('foo')
e = tr.types.ReadOnlyEnum(['x', 'y', 'z'])
class TypesTest(unittest.TestCase):
def testTypes(self):
obj = TestObject()
self.assertEquals(obj.a, None)
self.assertEquals(obj.b, None)
self.assertEquals(obj.s, 'defaultstring')
self.assertEquals(obj.i, None)
self.assertEquals(obj.e, None)
self.assertEquals(obj.e2, None)
o1 = object()
obj.a = o1
self.assertEquals(obj.a, o1)
obj.a = None
self.assertEquals(obj.a, None)
obj.b = 0
self.assertEquals(obj.b, 0)
self.assertNotEqual(obj.b, None)
obj.b = False
self.assertEquals(obj.b, 0)
obj.b = 'FaLSe'
self.assertEquals(obj.b, 0)
self.assertTrue(obj.b is False)
self.assertTrue(obj.b is not 0)
obj.b = 'tRuE'
self.assertEquals(obj.b, 1)
self.assertTrue(obj.b is True)
self.assertTrue(obj.b is not 1)
self.assertRaises(ValueError, setattr, obj, 'b', '5')
self.assertRaises(ValueError, setattr, obj, 'b', '')
self.assertRaises(ValueError, setattr, obj, 'b', object())
self.assertRaises(ValueError, setattr, obj, 'b', [])
self.assertEquals(obj.s, 'defaultstring')
obj.s = 1
self.assertEquals(obj.s, '1')
obj.s = o1
self.assertEquals(obj.s, str(o1))
obj.s = None
self.assertEquals(obj.s, None)
self.assertNotEqual(obj.s, str(None))
obj.s = ''
self.assertEquals(obj.s, '')
self.assertNotEqual(obj.s, None)
obj.i = 7
self.assertEquals(obj.i, 7)
obj.i = '8'
self.assertEquals(obj.i, 8)
self.assertRaises(ValueError, setattr, obj, 'i', '')
obj.u = '5'
self.assertEquals(obj.u, 5)
obj.u = 0
self.assertEquals(obj.u, 0)
self.assertRaises(ValueError, setattr, obj, 'u', '-5')
self.assertRaises(ValueError, setattr, obj, 'u', -5)
obj.f = '5'
self.assertEquals(obj.f, 5.0)
obj.f = 0
self.assertEquals(obj.f, 0)
obj.f = 5e60
self.assertEquals(obj.f, 5e60)
obj.e = 'one'
self.assertEquals(obj.e, 'one')
obj.e = 7
self.assertEquals(obj.e, 7)
self.assertRaises(ValueError, setattr, obj, 'e', '7')
obj.e = None
obj.e2 = 'thing'
self.assertRaises(ValueError, setattr, obj, 'e2', None)
def testTriggers(self):
obj = TriggerObject()
self.assertEquals(obj.xval, 7)
self.assertEquals(obj.triggers, 0)
obj.val = 99
self.assertEquals(obj.xval, 99)
self.assertEquals(obj.val, 99)
self.assertEquals(obj.triggers, 1)
obj.val = 99
self.assertEquals(obj.triggers, 1)
obj.val = 98
self.assertEquals(obj.triggers, 2)
obj.a = 5
self.assertEquals(obj.triggers, 3)
obj.a = '5'
self.assertEquals(obj.triggers, 4)
obj.a = '5'
self.assertEquals(obj.triggers, 4)
obj.b = 0
self.assertEquals(obj.triggers, 5)
obj.b = '0'
self.assertEquals(obj.triggers, 5)
obj.b = 'TRuE'
self.assertEquals(obj.b, 1)
self.assertEquals(obj.triggers, 6)
# test that exceptions get passed through
obj.i = 9
self.assertEquals(obj.triggers, 7)
self.assertRaises(ValueError, setattr, obj, 'i', '1.2')
self.assertEquals(obj.triggers, 7)
def testReadOnly(self):
obj = ReadOnlyObject()
self.assertRaises(AttributeError, setattr, obj, 'b', True)
self.assertRaises(AttributeError, setattr, obj, 'b', False)
self.assertEquals(obj.b, True)
type(obj).b.Set(obj, False)
self.assertEquals(obj.b, False)
self.assertEquals(obj.i, 5)
self.assertEquals(obj.s, 'foo')
self.assertEquals(obj.e, None)
self.assertRaises(AttributeError, setattr, obj, 'i', 5)
self.assertRaises(AttributeError, setattr, obj, 's', 'foo')
self.assertRaises(AttributeError, setattr, obj, 'e', None)
if __name__ == '__main__':
unittest.main()
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time handling for CWMP.
CWMP uses ISO 8601 time strings, and further specifies that UTC time be
used unless otherwise specified (and then, to my knowledge, never
specifies a case where another timezone can be used).
Python datetime objects are suitable for use with CWMP so long as
they contain a tzinfo specifying UTC offset=0. Most Python programmers
create datetime objects with no tzinfo, so we add one.
"""
__author__ = 'dgentry@google.com (Denton Gentry)'
import datetime
def format(arg):
"""Print a datetime with 'Z' for the UTC timezone, as CWMP requires."""
if not arg:
return '0001-01-01T00:00:00Z' # CWMP Unknown Time
elif isinstance(arg, float):
dt = datetime.datetime.utcfromtimestamp(arg)
else:
dt = arg
if not dt.tzinfo or not dt.tzinfo.utcoffset(dt):
if dt.microsecond:
return dt.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
else:
return dt.strftime('%Y-%m-%dT%H:%M:%SZ')
else:
return dt.isoformat()
def parse(arg):
# TODO(dgentry) handle timezone properly
try:
dt = datetime.datetime.strptime(arg, '%Y-%m-%dT%H:%M:%SZ')
except ValueError:
dt = datetime.datetime.strptime(arg, '%Y-%m-%dT%H:%M:%S.%fZ')
return dt
def valid(arg):
# pylint: disable-msg=W0702
try:
parse(arg)
except:
return False
return True
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for cwmpdate.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import calendar
import datetime
import unittest
import google3
import cwmpdate
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return 'UTC'
def dst(self, dt):
return datetime.timedelta(0)
class OTH(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(0, 3600)
def tzname(self, dt):
return 'OTH'
def dst(self, dt):
return datetime.timedelta(0, 3600)
class CwmpDateTest(unittest.TestCase):
"""Tests for date formatting."""
def testDatetimeNone(self):
self.assertEqual('0001-01-01T00:00:00Z', cwmpdate.format(None))
def testDatetimeNaive(self):
dt = datetime.datetime(1999, 12, 31, 23, 59, 58, 999999)
self.assertEqual('1999-12-31T23:59:58.999999Z', cwmpdate.format(dt))
dt2 = datetime.datetime(1999, 12, 31, 23, 59, 58)
self.assertEqual('1999-12-31T23:59:58Z', cwmpdate.format(dt2))
def testDatetimeUTC(self):
dt = datetime.datetime(1999, 12, 31, 23, 59, 58, 999999, tzinfo=UTC())
self.assertEqual('1999-12-31T23:59:58.999999Z', cwmpdate.format(dt))
dt2 = datetime.datetime(1999, 12, 31, 23, 59, 58, tzinfo=UTC())
self.assertEqual('1999-12-31T23:59:58Z', cwmpdate.format(dt2))
def testDatetimeOTH(self):
dt = datetime.datetime(1999, 12, 31, 23, 59, 58, 999999, tzinfo=OTH())
self.assertEqual('1999-12-31T23:59:58.999999+01:00',
cwmpdate.format(dt))
def testTimedelta(self):
t = 1234567890.987654
self.assertEqual('2009-02-13T23:31:30.987654Z', cwmpdate.format(t))
def testParse(self):
dt = cwmpdate.parse('2012-01-12T00:20:03.217691Z')
timestamp = calendar.timegm(dt.timetuple())
self.assertEqual(timestamp, 1326327603.0)
def testValid(self):
self.assertTrue(cwmpdate.valid('2009-02-13T23:31:30.987654Z'))
self.assertTrue(cwmpdate.valid('2009-02-13T23:31:30Z'))
self.assertFalse(cwmpdate.valid('2009-02-13T23:31:30'))
self.assertFalse(cwmpdate.valid('booga'))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A persistence layer that works with tr/core.py data structures."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import json
import sqlite3
import google3
import core
#TODO(apenwarr): consider not just using json encoding for values.
# The only offender is really 'list' type parameters, which sqlite3 can't
# store natively. If it weren't for that, we could just use sqlite3's
# native types, which would be much more elegant.
class Store(object):
"""A data store for persisting tr.core.Exporter objects in sqlite3.
Usage: call Load() to populate a data model hierarchy from the
database. Call Save() to save the data model hierarchy to the database.
"""
def __init__(self, filename, root):
self.filename = filename
self.root = root
self.db = sqlite3.connect(self.filename)
#TODO(apenwarr): support schema versioning of some sort
try:
self.db.execute('create table cfg (key primary key, value)')
except sqlite3.OperationalError:
pass
#TODO(apenwarr): delete objects that exist but are not in the store.
def Load(self):
"""Load the data model objects from the database."""
ignore_prefix = 'NOTHING'
q = 'select key, value from cfg order by key'
for key, json_value in self.db.execute(q):
if key.startswith(ignore_prefix):
print 'Skipping %s' % key
continue
print 'Loading %s' % key
value = json.loads(json_value)
if key.endswith('.'):
# an object
key = key[:-1]
try:
parent, subname = self.root.FindExport(key, allow_create=True)
except core.NotAddableError:
print 'Warning: %s cannot be created manually.' % key
ignore_prefix = key
except KeyError:
print 'Warning: %s does not exist' % key
else:
print ' got %r, %r' % (parent, subname)
else:
# a value
try:
self.root.SetExportParam(key, value)
except Exception, e: #pylint: disable-msg=W0703
print "Warning: can't set %r=%r:\n\t%s" % (key, value, e)
#TODO(apenwarr): save only params marked with a "persist" flag.
#TODO(apenwarr): invent a "persist" flag.
def Save(self):
"""Save the data model objects into the database."""
for key in self.root.ListExports(recursive=True):
#TODO(apenwarr): ListExports should return a value; this is inefficient!
if key.endswith('.'):
value = None
else:
value = self.root.GetExport(key)
try:
self.db.execute('insert or replace into cfg (key, value) values (?,?)',
(key, json.dumps(value)))
except sqlite3.InterfaceError:
print 'sqlite3 error: key=%r value=%r' % (key, value)
raise
self.db.commit()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for http.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import datetime
import mox
import os
import shutil
import sys
import tempfile
import time
import unittest
import xml.etree.ElementTree as ET
import google3
import dm_root
import tornado.httpclient
import tornado.ioloop
import tornado.testing
import tornado.util
import api
import cwmp_session
import cwmpdate
import download
import http
mock_http_client_stop = None
mock_http_clients = []
SOAPNS = '{http://schemas.xmlsoap.org/soap/envelope/}'
CWMPNS = '{urn:dslforum-org:cwmp-1-2}'
def GetMonotime():
"""Older tornado doesn't have monotime(); stay compatible."""
if hasattr(tornado.util, 'monotime_impl'):
return tornado.util.monotime_impl
else:
return time.time
def SetMonotime(func):
"""Older tornado doesn't have monotime(); stay compatible."""
if hasattr(tornado.util, 'monotime_impl'):
tornado.util.monotime_impl = func
else:
time.time = func
def StubOutMonotime(moxinstance):
if hasattr(tornado.util, 'monotime_impl'):
moxinstance.StubOutWithMock(tornado.util, 'monotime_impl')
else:
moxinstance.StubOutWithMock(time, 'time')
class MockHttpClient(object):
def __init__(self, **kwargs):
self.ResetMock()
mock_http_clients.append(self)
def ResetMock(self):
self.req = None
self.fetch_called = False
def fetch(self, req, callback):
print '%s: fetching: %s %s' % (self, req, callback)
self.fetch_req = req
self.fetch_callback = callback
self.fetch_called = True
mock_http_client_stop()
class MockPlatformConfig(object):
def GetAcsUrl(self):
return 'http://example.com/cwmp'
def AcsAccessAttempt(self, url):
pass
def AcsAccessSuccess(self, url):
pass
class HttpTest(tornado.testing.AsyncTestCase):
def setUp(self):
super(HttpTest, self).setUp()
self.old_monotime = GetMonotime()
self.advance_time = 0
self.old_HTTPCLIENT = cwmp_session.HTTPCLIENT
cwmp_session.HTTPCLIENT = MockHttpClient
global mock_http_client_stop
mock_http_client_stop = self.stop
self.removedirs = list()
self.removefiles = list()
del mock_http_clients[:]
def tearDown(self):
super(HttpTest, self).tearDown()
SetMonotime(self.old_monotime)
cwmp_session.HTTPCLIENT = self.old_HTTPCLIENT
for d in self.removedirs:
shutil.rmtree(d)
for f in self.removefiles:
os.remove(f)
del mock_http_clients[:]
def advanceTime(self):
return 420000.0 + self.advance_time
def getCpe(self):
dm_root.PLATFORMDIR = '../platform'
root = dm_root.DeviceModelRoot(self.io_loop, 'fakecpe')
cpe = api.CPE(root)
dldir = tempfile.mkdtemp()
self.removedirs.append(dldir)
cfdir = tempfile.mkdtemp()
self.removedirs.append(cfdir)
cpe.download_manager.SetDirectories(config_dir=cfdir, download_dir=dldir)
cpe_machine = http.Listen(ip=None, port=0,
ping_path='/ping/http_test',
acs=None, cpe=cpe, cpe_listener=False,
platform_config=MockPlatformConfig(),
ioloop=self.io_loop)
return cpe_machine
def testMaxEnvelopes(self):
SetMonotime(self.advanceTime)
cpe_machine = self.getCpe()
cpe_machine.Startup()
self.wait()
self.assertEqual(len(mock_http_clients), 1)
ht = mock_http_clients[0]
self.assertTrue(ht.fetch_called)
root = ET.fromstring(ht.fetch_req.body)
envelope = root.find(SOAPNS + 'Body/' + CWMPNS + 'Inform/MaxEnvelopes')
self.assertTrue(envelope is not None)
self.assertEqual(envelope.text, '1')
def testCurrentTime(self):
SetMonotime(self.advanceTime)
cpe_machine = self.getCpe()
cpe_machine.Startup()
self.wait()
self.assertEqual(len(mock_http_clients), 1)
ht = mock_http_clients[0]
self.assertTrue(ht.fetch_called)
root = ET.fromstring(ht.fetch_req.body)
ctime = root.find(SOAPNS + 'Body/' + CWMPNS + 'Inform/CurrentTime')
self.assertTrue(ctime is not None)
self.assertTrue(cwmpdate.valid(ctime.text))
def testLookupDevIP6(self):
http.PROC_IF_INET6 = 'testdata/http/if_inet6'
cpe_machine = self.getCpe()
self.assertEqual(cpe_machine.LookupDevIP6('eth0'),
'11:2233:4455:6677:8899:aabb:ccdd:eeff')
self.assertEqual(cpe_machine.LookupDevIP6('foo0'), 0)
def testRetryCount(self):
SetMonotime(self.advanceTime)
cpe_machine = self.getCpe()
cpe_machine.Startup()
self.wait(timeout=20)
self.assertEqual(len(mock_http_clients), 1)
ht = mock_http_clients[0]
self.assertTrue(ht.fetch_called)
root = ET.fromstring(ht.fetch_req.body)
retry = root.find(SOAPNS + 'Body/' + CWMPNS + 'Inform/RetryCount')
self.assertTrue(retry is not None)
self.assertEqual(retry.text, '0')
# Fail the first request
httpresp = tornado.httpclient.HTTPResponse(ht.fetch_req, 404)
ht.fetch_callback(httpresp)
self.advance_time += 10
self.wait(timeout=20)
self.assertEqual(len(mock_http_clients), 2)
ht = mock_http_clients[1]
root = ET.fromstring(ht.fetch_req.body)
retry = root.find(SOAPNS + 'Body/' + CWMPNS + 'Inform/RetryCount')
self.assertTrue(retry is not None)
self.assertEqual(retry.text, '1')
def testNewPingSession(self):
cpe_machine = self.getCpe()
cpe_machine.previous_ping_time = 0
# Create mocks of ioloop, and stubout the time function.
m = mox.Mox()
ioloop_mock = m.CreateMock(tornado.ioloop.IOLoop)
m.StubOutWithMock(cpe_machine, "_NewSession")
StubOutMonotime(m)
# First call to _NewSession should get the time and trigger a new session
GetMonotime()().AndReturn(1000)
cpe_machine._NewSession(mox.IsA(str))
# Second call to _NewSession should queue a session
GetMonotime()().AndReturn(1001)
ioloop_mock.add_timeout(mox.IsA(datetime.timedelta),
mox.IgnoreArg()).AndReturn(1)
# Third call should get the time and then not do anything
# since a session is queued.
GetMonotime()().AndReturn(1001)
# And the call to _NewTimeoutSession should call through to
# NewPingSession, and start a new session
GetMonotime()().AndReturn(1000 + cpe_machine.rate_limit_seconds)
ioloop_mock.add_timeout(mox.IsA(datetime.timedelta),
mox.IgnoreArg()).AndReturn(2)
cpe_machine.ioloop = ioloop_mock
m.ReplayAll()
# Real test starts here.
cpe_machine._NewPingSession()
cpe_machine._NewPingSession()
cpe_machine._NewPingSession()
cpe_machine._NewTimeoutPingSession()
# Verify everything was called correctly.
m.VerifyAll()
def testNewPeriodicSession(self):
"""Tests that _NewSession is called if the event queue is empty."""
cpe_machine = self.getCpe()
# Create mocks of ioloop, and stubout the time function.
m = mox.Mox()
m.StubOutWithMock(cpe_machine, '_NewSession')
cpe_machine._NewSession('2 PERIODIC')
m.ReplayAll()
cpe_machine.NewPeriodicSession()
m.VerifyAll()
def testNewPeriodicSessionPending(self):
"""Tests that no new periodic session starts if there is one pending."""
cpe_machine = self.getCpe()
# Create mocks of ioloop, and stubout the time function.
m = mox.Mox()
m.StubOutWithMock(cpe_machine, 'Run')
cpe_machine.Run()
m.ReplayAll()
self.assertFalse(('2 PERIODIC', None) in cpe_machine.event_queue)
cpe_machine.NewPeriodicSession()
self.assertTrue(('2 PERIODIC', None) in cpe_machine.event_queue)
cpe_machine.NewPeriodicSession()
m.ReplayAll()
def testEventQueue(self):
cpe_machine = self.getCpe()
m = mox.Mox()
m.StubOutWithMock(sys, 'exit')
sys.exit(1)
sys.exit(1)
sys.exit(1)
sys.exit(1)
m.ReplayAll()
for i in range(64):
cpe_machine.event_queue.append(i)
cpe_machine.event_queue.append(100)
cpe_machine.event_queue.appendleft(200)
cpe_machine.event_queue.extend([300])
cpe_machine.event_queue.extendleft([400])
cpe_machine.event_queue.clear()
cpe_machine.event_queue.append(10)
cpe_machine.event_queue.clear()
m.VerifyAll()
class TestManagementServer(object):
ConnectionRequestUsername = 'username'
ConnectionRequestPassword = 'password'
class PingTest(tornado.testing.AsyncHTTPTestCase):
def ping_callback(self):
self.ping_calledback = True
def get_app(self):
return tornado.web.Application(
[('/', http.PingHandler, dict(cpe_ms=TestManagementServer(),
callback=self.ping_callback))])
def test_ping(self):
self.ping_calledback = False
self.http_client.fetch(self.get_url('/'), self.stop)
response = self.wait()
self.assertEqual(response.error.code, 401)
self.assertFalse(self.ping_calledback)
self.assertTrue(response.body.find('qop'))
class TestManagementServer(object):
ConnectionRequestUsername = 'username'
ConnectionRequestPassword = 'password'
class PingTest(tornado.testing.AsyncHTTPTestCase):
def ping_callback(self):
self.ping_calledback = True
def get_app(self):
return tornado.web.Application(
[('/', http.PingHandler, dict(cpe_ms=TestManagementServer(),
callback=self.ping_callback))])
def test_ping(self):
self.ping_calledback = False
self.http_client.fetch(self.get_url('/'), self.stop)
response = self.wait()
self.assertEqual(response.error.code, 401)
self.assertFalse(self.ping_calledback)
self.assertTrue(response.body.find('qop'))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An application mainloop based on tornado.ioloop.
This lets us build single-threaded async networking applications that can
listen on sockets, connect to sockets, implement a tornado web server, and
so on.
"""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import datetime
import errno
import os
import socket
import google3
import tornado.ioloop
import tornado.iostream #pylint: disable-msg=W0404
import helpers
def _DeleteOldSock(family, address):
tsock = socket.socket(family, socket.SOCK_STREAM, 0)
try:
tsock.connect(address)
except socket.error, e:
if e.errno == errno.ECONNREFUSED:
helpers.Unlink(address)
def _ListenSocket(family, address):
"""Return a new listening socket on the given family and address."""
sock = socket.socket(family, socket.SOCK_STREAM, 0)
if family == socket.AF_UNIX:
_DeleteOldSock(family, address)
else:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
sock.bind(address)
sock.listen(10)
return sock
class ListenSocket(object):
"""A class for listening on a socket using the mainloop.
We create the requested socket, bind() it, listen() it, register it
with tornado, and then accept() on it whenever an incoming connection
arrives. Then we pass the incoming connection to the given callback.
"""
def __init__(self, family, address, onaccept_func):
"""Initialize a ListenSocket.
Args:
family: eg. socket.AF_INET, socket.AF_INET6, socket.AF_UNIX
address: eg. ('0.0.0.0', 1234) or '/tmp/unix/socket/path'
onaccept_func: called with newly-accepted socket, with parameters
(address, sock).
"""
self.onaccept_func = onaccept_func
self.family = family
self.address = address
self.sock = None
self.sock = _ListenSocket(family, address)
if family != socket.AF_UNIX:
self.address = self.sock.getsockname()[:2]
print 'Listening on %r' % (self.address,)
ioloop = tornado.ioloop.IOLoop.instance()
ioloop.add_handler(self.sock.fileno(), self._Accept, ioloop.READ)
def __del__(self):
print 'deleting listener: %r' % (self.address,)
if self.family == socket.AF_UNIX and self.sock:
self.sock.close()
helpers.Unlink(self.address)
def _Accept(self, fd, events): #pylint: disable-msg=W0613
try:
sock, address = self.sock.accept()
except socket.error, e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return
raise
sock.setblocking(0) #pylint: disable-msg=E1101
print 'got a connection from %r' % (address,)
self.onaccept_func(sock, address)
class LineReader(object):
"""A helper for sockets that read newline-delimited data.
We register with the mainloop to get called whenever incoming data arrives
on the socket. Then, we call a callback for each line (ie. text ending
in newline) we receive.
"""
def __init__(self, sock, address, gotline_func):
"""Initialize a LineReader.
Args:
sock: a socket.socket() object.
address: the remote address of the socket.
gotline_func: called for each line of data, with parameter (line).
"""
self.address = address
self.gotline_func = gotline_func
self.stream = tornado.iostream.IOStream(sock)
self.stream.set_close_callback(self.OnClose)
self._StartRead()
def __del__(self):
print 'deleting linereader: %r' % (self.address,)
def _StartRead(self):
self.stream.read_until('\n', self.GotData)
def GotData(self, line):
try:
result = self.gotline_func(line)
if result:
self.Write(result)
except EOFError:
self.stream.close()
return
self._StartRead()
def Write(self, bytestring):
return self.stream.write(bytestring)
def OnClose(self):
print 'closing %r' % (self.address,)
self.stream._read_callback = None
self.stream.set_close_callback(None)
class MainLoop(object):
"""A slightly more convenient wrapper for tornado.ioloop.IOLoop."""
def __init__(self):
self.loop_timeout = None
self.ioloop = None
self.ioloop = tornado.ioloop.IOLoop.instance()
def __del__(self):
# we have to do this so objects who have registered with the ioloop
# can get their refcounts down to zero, so their destructors can be
# called
if self.ioloop:
#gpylint: disable-msg=W0212
for fd in self.ioloop._handlers.keys():
self.ioloop.remove_handler(fd)
self.ioloop._handlers.clear()
self.ioloop._events.clear()
def Start(self, timeout=None):
"""Run the mainloop repetitively until the program is finished.
"Finished" means one of three things: no event handlers remain (unlikely),
the timeout expires, or someone calls ioloop.stop().
Args:
timeout: the time at which the loop will be forcibly stopped. Mostly
useful in unit tests. None means no timeout; 0 means stop instantly.
"""
tmo = None
if timeout is not None:
self.loop_timeout = tmo = self.ioloop.add_timeout(
datetime.timedelta(seconds=timeout), self._TimedOut)
try:
self.ioloop.start()
finally:
if tmo:
self.ioloop.remove_timeout(tmo)
self.loop_timeout = None
def RunOnce(self, timeout=None):
"""Run the mainloop for exactly one iteration.
Processes all events that occur during that iteration, including
timeouts.
Args:
timeout: same meaning as in Start().
"""
# TODO(apenwarr): timeout is effectively always 0 for now. Oops.
r, w = os.pipe()
try:
os.write(w, 'x')
self.ioloop.add_handler(r, lambda fd, events: self.ioloop.stop(),
self.ioloop.READ)
self.Start(timeout)
finally:
os.close(r)
os.close(w)
self.ioloop.remove_handler(r)
def _TimedOut(self):
self.ioloop.stop()
self.ioloop.remove_timeout(self.loop_timeout)
self.loop_timeout = None
def _IsIPv4Addr(self, address):
try:
socket.inet_aton(address[0])
except socket.error:
return False
else:
return True
def Listen(self, family, address, onaccept_func):
return ListenSocket(family, address, onaccept_func)
def ListenInet(self, address, onaccept_func):
if self._IsIPv4Addr(address):
return self.Listen(socket.AF_INET, address, onaccept_func)
else:
return self.Listen(socket.AF_INET6, address, onaccept_func)
def ListenUnix(self, filename, onaccept_func):
return self.Listen(socket.AF_UNIX, filename, onaccept_func)
def Connect(self, family, address, onconnect_func):
sock = socket.socket(family, socket.SOCK_STREAM, 0)
stream = tornado.iostream.IOStream(sock)
stream.set_close_callback(lambda: onconnect_func(None))
stream.connect(address, lambda: onconnect_func(stream))
return stream
def ConnectInet(self, address, onconnect_func):
if self._IsIPv4Addr(address):
return self.Connect(socket.AF_INET, address, onconnect_func)
else:
return self.Connect(socket.AF_INET6, address, onconnect_func)
def ConnectUnix(self, filename, onconnect_func):
return self.Connect(socket.AF_UNIX, filename, onconnect_func)
class _WaitUntilIdle(object):
"""Manage some state variables for WaitUntilIdle."""
def __init__(self, func):
self.func = func
self.timeouts = {}
def __del__(self):
timeouts = self.timeouts
self.timeouts = {}
for tmo in timeouts:
try:
tornado.ioloop.IOLoop.instance().remove_timeout(tmo)
except: #gpylint: disable-msg=W0702
pass # must catch all exceptions in a destructor
def _Call(self, *args, **kwargs):
"""Actually call the wrapped function and mark the timeout as done."""
key = (args, tuple(sorted(kwargs.items())))
del self.timeouts[key]
self.func(*args, **kwargs) # note: discards return value
def Schedule(self, *args, **kwargs):
"""Schedule a delayed call of the wrapped function with the given args."""
key = (args, tuple(sorted(kwargs.items())))
if key not in self.timeouts:
if hasattr(tornado.util, 'monotonic'):
self.timeouts[key] = tornado.ioloop.IOLoop.instance().add_timeout(
0, lambda: self._Call(*args, **kwargs), monotonic=True)
else:
self.timeouts[key] = tornado.ioloop.IOLoop.instance().add_timeout(
0, lambda: self._Call(*args, **kwargs))
def WaitUntilIdle(func):
"""A decorator that calls the given function when the loop is idle.
If you call this more than once with the same method and args before the
mainloop becomes idle, it will only run once, not once per call.
Args:
func: the function to decorate.
Returns:
A variation of func() that waits until the ioloop is idle.
Example:
class X(object):
@tr.mainloop.WaitUntilIdle
def Func(self):
print 'running!'
x = X()
x.Func()
x.Func()
loop.Start() # runs Func exactly once
"""
# These w and ScheduleIt objects are are created once when you *declare*
# a @WaitUntilIdle function...
w = _WaitUntilIdle(func)
def ScheduleIt(*args, **kwargs):
# ...and ScheduleIt() is called when you *call* the declared function
w.Schedule(*args, **kwargs)
return ScheduleIt
def _TestGotLine(line):
print 'got line: %r' % line
return 'response\r\n'
def main():
loop = MainLoop()
#pylint: disable-msg=C6402
loop.ListenInet(('', 12999),
lambda sock, address: LineReader(sock, address,
_TestGotLine))
loop.Start()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409,W0212
#
"""Type descriptors for common TR-069 data types."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
class Attr(object):
"""A descriptor that holds an arbitrary attribute.
This isn't very useful on its own, but we declare type-specific child
classes that enforce the data type. For example:
class X(object):
a = Attr()
b = Bool()
s = String()
i = Int()
e = Enum('Bob', 'Fred')
x = X()
x.a = object()
x.b = '0' # actually gets set to integer 0
x.s = [1,2] # gets set to the string str([1, 2])
x.i = '9' # auto-converts to a real int
x.e = 'Stinky' # raises exception since it's not an allowed value
"""
def __init__(self, init=None):
self.init = init
def _MakeAttrs(self, obj):
try:
return obj.__Attrs
except AttributeError:
obj.__Attrs = {}
return obj.__Attrs
def __get__(self, obj, _):
# Type descriptors (ie. this class) are weird because they only have
# one instance per member of a class, not per member of an *object*.
# That is, all the objects of a given class share the same type
# descriptor instance. Thus, we have to store the actual property
# value in a hidden variable in each obj, rather than in self.
if obj is None:
return self
d = self._MakeAttrs(obj)
try:
return d[id(self)]
except KeyError:
if self.init is not None:
self.__set__(obj, self.init)
else:
# special case: if init==None, don't do consistency checking, in
# order to support initially-invalid variables
d[id(self)] = self.init
return d[id(self)]
def __set__(self, obj, value):
d = self._MakeAttrs(obj)
d[id(self)] = value
class Bool(Attr):
"""An attribute that is always either 0 or 1.
You can set it to the strings 'true' or 'false' (case insensitive) or
'0' or '1' or the numbers 0, 1, True, or False.
"""
def __set__(self, obj, value):
s = str(value).lower()
if s in ('true', '1'):
Attr.__set__(self, obj, True)
elif s in ('false', '0'):
Attr.__set__(self, obj, False)
else:
raise ValueError('%r is not a valid boolean' % value)
class Int(Attr):
"""An attribute that is always an integer."""
def __set__(self, obj, value):
Attr.__set__(self, obj, int(value))
class Unsigned(Attr):
"""An attribute that is always an integer >= 0."""
def __set__(self, obj, value):
v = int(value)
if v < 0:
raise ValueError('%r must be >= 0' % value)
Attr.__set__(self, obj, v)
class Float(Attr):
"""An attribute that is always a floating point number."""
def __set__(self, obj, value):
Attr.__set__(self, obj, float(value))
class String(Attr):
"""An attribute that is always a string or None."""
def __set__(self, obj, value):
if value is None:
Attr.__set__(self, obj, None)
else:
Attr.__set__(self, obj, str(value))
class Enum(Attr):
"""An attribute that is always one of the given values.
The values are usually strings in TR-069, but this is not enforced.
"""
def __init__(self, values, init=None):
super(Enum, self).__init__(init=init)
self.values = set(values)
def __set__(self, obj, value):
if value not in self.values:
raise ValueError('%r invalid; value values are %r'
% (value, self.values))
Attr.__set__(self, obj, value)
class Trigger(object):
"""A type descriptor that calls obj.Triggered() whenever its value changes.
The 'attr' parameter to __init__ must be a descriptor itself. So it
could be an object derived from Attr (above), or an @property. Examples:
class X(object):
def __init__(self):
self._thing = 7
def Triggered(self):
print 'woke up!'
a = Trigger(Attr())
b = Trigger(Bool())
@property
def thing(self):
return self._thing
@Trigger
@thing.setter
def thing(self, value):
self._thing = value
x = X()
x.a = 'hello' # triggers
x.a = 'hello' # unchanged: no trigger
b = False # default value was None, so triggers
b = '0' # still false; no trigger
thing = 7 # same as original value; no trigger
thing = None # triggers
"""
def __init__(self, attr):
self.attr = attr
def __get__(self, obj, _):
if obj is None:
return self
return self.attr.__get__(obj, None)
def __set__(self, obj, value):
old = self.__get__(obj, None)
self.attr.__set__(obj, value)
new = self.__get__(obj, None)
if old != new:
# the attr's __set__ function might have rejected the change; only
# call Triggered if it *really* changed.
obj.Triggered()
def TriggerBool(*args, **kwargs):
return Trigger(Bool(*args, **kwargs))
def TriggerInt(*args, **kwargs):
return Trigger(Int(*args, **kwargs))
def TriggerUnsigned(*args, **kwargs):
return Trigger(Int(*args, **kwargs))
def TriggerFloat(*args, **kwargs):
return Trigger(Int(*args, **kwargs))
def TriggerString(*args, **kwargs):
return Trigger(String(*args, **kwargs))
def TriggerEnum(*args, **kwargs):
return Trigger(Enum(*args, **kwargs))
class ReadOnly(object):
"""A type descriptor that prevents setting the wrapped Attr().
Since usually *someone* needs to be able to set the value, we also add a
Set() method that overrides the read-only-ness. The syntax for doing it
is a little weird, which is a good reminder that you're not supposed to
change read-only objects.
Example:
class X(object):
b = ReadOnly(Bool(True))
x = X()
print x.b # True
x.b = False # raises AttributeError
X.b.Set(x, False) # actually sets the bool
"""
def __init__(self, attr):
self.attr = attr
def __get__(self, obj, _):
if obj is None:
return self
return self.attr.__get__(obj, None)
def __set__(self, obj, _):
# this is the same exception raised by a read-only @property
raise AttributeError("can't set attribute")
def Set(self, obj, value):
"""Override the read-only-ness; generally for internal use."""
return self.attr.__set__(obj, value)
def ReadOnlyBool(*args, **kwargs):
return ReadOnly(Bool(*args, **kwargs))
def ReadOnlyInt(*args, **kwargs):
return ReadOnly(Int(*args, **kwargs))
def ReadOnlyUnsigned(*args, **kwargs):
return ReadOnly(Unsigned(*args, **kwargs))
def ReadOnlyFloat(*args, **kwargs):
return ReadOnly(Float(*args, **kwargs))
def ReadOnlyString(*args, **kwargs):
return ReadOnly(String(*args, **kwargs))
def ReadOnlyEnum(*args, **kwargs):
return ReadOnly(Enum(*args, **kwargs))
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for cpe_management_server.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import copy
import datetime
import unittest
import google3
import cpe_management_server as ms
import cwmpdate
periodic_callbacks = []
class MockIoloop(object):
def __init__(self):
self.timeout_time = None
self.timeout_callback = None
self.remove_handle = None
self.handle = 1
def add_timeout(self, time, callback, monotonic=None):
self.timeout_time = time
self.timeout_callback = callback
return self.handle
def remove_timeout(self, timeout):
self.remove_handle = timeout
class MockPeriodicCallback(object):
def __init__(self, callback, callback_time, io_loop=None):
self.callback = callback
self.callback_time = callback_time
self.io_loop = io_loop
self.start_called = False
self.stop_called = False
periodic_callbacks.append(self)
def start(self):
self.start_called = True
def stop(self):
self.stop_called = True
periodic_callbacks.remove(self)
class MockPlatformConfig(object):
def __init__(self):
self.set_acs_raise = True
self.set_acs_url_called = False
self.acs_url = 'http://acs.example.com/cwmp'
def SetAcsUrl(self, url):
self.set_acs_url_called = True
if self.set_acs_raise:
raise AttributeError('read-only param')
else:
self.acs_url = url
def GetAcsUrl(self):
return self.acs_url
class FakePlatformConfig(object):
def GetAcsUrl(self):
return None
class CpeManagementServerTest(unittest.TestCase):
"""tests for http.py CpeManagementServer."""
def setUp(self):
self.start_session_called = False
del periodic_callbacks[:]
def testIsIp6Address(self):
cpe_ms = ms.CpeManagementServer(platform_config=FakePlatformConfig(), port=5,
ping_path='/ping/path')
self.assertTrue(cpe_ms._isIp6Address('fe80::21d:9ff:fe11:f55f'))
self.assertTrue(cpe_ms._isIp6Address('2620:0:1000:5200:222:3ff:fe44:5555'))
self.assertFalse(cpe_ms._isIp6Address('1.2.3.4'))
self.assertFalse(cpe_ms._isIp6Address('foobar'))
def testConnectionRequestURL(self):
cpe_ms = ms.CpeManagementServer(platform_config=FakePlatformConfig(), port=5,
ping_path='/ping/path')
cpe_ms.my_ip = '1.2.3.4'
self.assertEqual(cpe_ms.ConnectionRequestURL, 'http://1.2.3.4:5/ping/path')
cpe_ms.my_ip = '2620:0:1000:5200:222:3ff:fe44:5555'
self.assertEqual(cpe_ms.ConnectionRequestURL,
'http://[2620:0:1000:5200:222:3ff:fe44:5555]:5/ping/path')
def testAcsUrl(self):
pc = MockPlatformConfig()
cpe_ms = ms.CpeManagementServer(platform_config=pc, port=0, ping_path='')
self.assertEqual(cpe_ms.URL, 'http://acs.example.com/cwmp')
self.assertRaises(AttributeError, cpe_ms.SetURL, 'http://example.com/')
self.assertTrue(pc.set_acs_url_called)
pc.set_acs_raise = False
pc.set_acs_url_called = False
cpe_ms.URL = 'http://example.com/'
self.assertTrue(pc.set_acs_url_called)
self.assertEqual(pc.acs_url, 'http://example.com/')
def GetParameterKey(self):
return 'ParameterKey'
def testParameterKey(self):
cpe_ms = ms.CpeManagementServer(platform_config=FakePlatformConfig(), port=0, ping_path='/',
get_parameter_key=self.GetParameterKey)
self.assertEqual(cpe_ms.ParameterKey, self.GetParameterKey())
def start_session(self):
self.start_session_called = True
def testPeriodicEnable(self):
ms.PERIODIC_CALLBACK = MockPeriodicCallback
io = MockIoloop()
cpe_ms = ms.CpeManagementServer(platform_config=FakePlatformConfig(), port=0, ping_path='/',
start_periodic_session=self.start_session,
ioloop=io)
cpe_ms.PeriodicInformEnable = 'true'
cpe_ms.PeriodicInformInterval = '15'
# cpe_ms should schedule the callbacks when Enable and Interval both set
self.assertEqual(io.timeout_time, datetime.timedelta(0.0))
self.assertEqual(len(periodic_callbacks), 1)
cb = periodic_callbacks[0]
self.assertTrue(cb.callback)
self.assertEqual(cb.callback_time, 15 * 1000)
self.assertEqual(cb.io_loop, io)
io.timeout_callback()
self.assertTrue(cb.start_called)
def testPeriodicLongInterval(self):
ms.PERIODIC_CALLBACK = MockPeriodicCallback
io = MockIoloop()
cpe_ms = ms.CpeManagementServer(platform_config=FakePlatformConfig(), port=0, ping_path='/',
start_periodic_session=self.start_session,
ioloop=io)
cpe_ms.PeriodicInformEnable = 'true'
cpe_ms.PeriodicInformTime = cwmpdate.format(datetime.datetime.now())
cpe_ms.PeriodicInformInterval = '1200'
# Just check that the delay is reasonable
self.assertNotEqual(io.timeout_time, datetime.timedelta(seconds=0))
def assertWithinRange(self, c, minr, maxr):
self.assertTrue(minr <= c <= maxr)
def testSessionRetryWait(self):
"""Test $SPEC3 Table3 timings."""
cpe_ms = ms.CpeManagementServer(platform_config=FakePlatformConfig(), port=5, ping_path='/')
cpe_ms.PeriodicInformInterval = 100000
for _ in range(1000):
self.assertEqual(cpe_ms.SessionRetryWait(0), 0)
self.assertTrue(5 <= cpe_ms.SessionRetryWait(1) <= 10)
self.assertTrue(10 <= cpe_ms.SessionRetryWait(2) <= 20)
self.assertTrue(20 <= cpe_ms.SessionRetryWait(3) <= 40)
self.assertTrue(40 <= cpe_ms.SessionRetryWait(4) <= 80)
self.assertTrue(80 <= cpe_ms.SessionRetryWait(5) <= 160)
self.assertTrue(160 <= cpe_ms.SessionRetryWait(6) <= 320)
self.assertTrue(320 <= cpe_ms.SessionRetryWait(7) <= 640)
self.assertTrue(640 <= cpe_ms.SessionRetryWait(8) <= 1280)
self.assertTrue(1280 <= cpe_ms.SessionRetryWait(9) <= 2560)
self.assertTrue(2560 <= cpe_ms.SessionRetryWait(10) <= 5120)
self.assertTrue(2560 <= cpe_ms.SessionRetryWait(99) <= 5120)
cpe_ms.CWMPRetryMinimumWaitInterval = 10
cpe_ms.CWMPRetryIntervalMultiplier = 2500
for _ in range(1000):
self.assertEqual(cpe_ms.SessionRetryWait(0), 0)
self.assertTrue(10 <= cpe_ms.SessionRetryWait(1) <= 25)
self.assertTrue(25 <= cpe_ms.SessionRetryWait(2) <= 62)
self.assertTrue(62 <= cpe_ms.SessionRetryWait(3) <= 156)
self.assertTrue(156 <= cpe_ms.SessionRetryWait(4) <= 390)
self.assertTrue(390 <= cpe_ms.SessionRetryWait(5) <= 976)
self.assertTrue(976 <= cpe_ms.SessionRetryWait(6) <= 2441)
self.assertTrue(2441 <= cpe_ms.SessionRetryWait(7) <= 6103)
self.assertTrue(6103 <= cpe_ms.SessionRetryWait(8) <= 15258)
self.assertTrue(15258 <= cpe_ms.SessionRetryWait(9) <= 38146)
self.assertTrue(38146 <= cpe_ms.SessionRetryWait(10) <= 95367)
self.assertTrue(38146 <= cpe_ms.SessionRetryWait(99) <= 95367)
# Check that the time never exceeds the periodic inform time.
cpe_ms.PeriodicInformInterval = 30
for _ in range(1000):
self.assertEqual(cpe_ms.SessionRetryWait(0), 0)
self.assertTrue(10 <= cpe_ms.SessionRetryWait(1) <= 25)
self.assertTrue(12 <= cpe_ms.SessionRetryWait(2) <= 30)
self.assertTrue(12 <= cpe_ms.SessionRetryWait(3) <= 30)
def testValidateServer(self):
def TryUrl(cpe, value):
valid = True
try:
cpe_ms.ValidateAcsUrl(value)
except ValueError:
valid = False
return valid
cpe_ms = ms.CpeManagementServer(
platform_config=FakePlatformConfig(), port=5, ping_path='/',
restrict_acs_hosts='google.com .gfsvc.com foo.com')
self.assertTrue(TryUrl(cpe_ms, 'https://bugger.gfsvc.com'))
self.assertTrue(TryUrl(cpe_ms, 'https://acs.prod.gfsvc.com'))
self.assertTrue(TryUrl(cpe_ms, 'https://acs.prod.google.com'))
self.assertTrue(TryUrl(cpe_ms, 'https://google.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://imposter.evilgfsvc.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://evilgfsvc.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://gfsvc.com.evil.com'))
# No restrictions
cpe_ms = ms.CpeManagementServer(
platform_config=FakePlatformConfig(), port=5, ping_path='/')
self.assertTrue(TryUrl(cpe_ms, 'https://bugger.gfsvc.com'))
self.assertTrue(TryUrl(cpe_ms, 'https://gfsvc.com.evil.com'))
# Single domain
cpe_ms = ms.CpeManagementServer(
platform_config=FakePlatformConfig(), port=5, ping_path='/',
restrict_acs_hosts='.gfsvc.com')
self.assertTrue(TryUrl(cpe_ms, 'https://bugger.gfsvc.com'))
self.assertTrue(TryUrl(cpe_ms, 'https://acs.prod.gfsvc.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://acs.prod.google.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://google.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://imposter.evilgfsvc.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://evilgfsvc.com'))
self.assertFalse(TryUrl(cpe_ms, 'https://gfsvc.com.evil.com'))
def testReadParameters(self):
cpe_ms = ms.CpeManagementServer(
platform_config=None, port=5, ping_path='/',
restrict_acs_hosts='.gfsvc.com')
_ = cpe_ms.CWMPRetryMinimumWaitInterval
_ = cpe_ms.CWMPRetryIntervalMultiplier
_ = cpe_ms.ConnectionRequestPassword
_ = cpe_ms.ConnectionRequestUsername
_ = cpe_ms.DefaultActiveNotificationThrottle
_ = cpe_ms.EnableCWMP
_ = cpe_ms.PeriodicInformEnable
_ = cpe_ms.PeriodicInformInterval
_ = cpe_ms.PeriodicInformTime
_ = cpe_ms.Password
_ = cpe_ms.Username
def testWriteParameters(self):
cpe_ms = ms.CpeManagementServer(
platform_config=None, port=5, ping_path='/',
restrict_acs_hosts='.gfsvc.com')
cpe_ms.CWMPRetryMinimumWaitInterval = 10
cpe_ms.CWMPRetryIntervalMultiplier = 100
cpe_ms.ConnectionRequestPassword = 'pass'
cpe_ms.ConnectionRequestUsername = 'user'
cpe_ms.DefaultActiveNotificationThrottle = True
cpe_ms.PeriodicInformEnable = True
cpe_ms.PeriodicInformInterval = 10
cpe_ms.PeriodicInformTime = '2012-08-22T15:50:14.725772Z'
cpe_ms.Password = ' pass'
cpe_ms.Username = ' user'
def testTransaction(self):
cpe_ms = ms.CpeManagementServer(
platform_config=None, port=5, ping_path='/',
restrict_acs_hosts='.gfsvc.com')
orig = copy.deepcopy(cpe_ms.config)
# sanity
self.assertEqual(orig.CWMPRetryMinimumWaitInterval,
cpe_ms.CWMPRetryMinimumWaitInterval)
cpe_ms.StartTransaction()
cpe_ms.AbandonTransaction()
self.assertEqual(orig.CWMPRetryMinimumWaitInterval,
cpe_ms.CWMPRetryMinimumWaitInterval)
cpe_ms.StartTransaction()
cpe_ms.CommitTransaction()
self.assertEqual(orig.CWMPRetryMinimumWaitInterval,
cpe_ms.CWMPRetryMinimumWaitInterval)
cpe_ms.StartTransaction()
cpe_ms.CWMPRetryMinimumWaitInterval *= 2
cpe_ms.AbandonTransaction()
self.assertEqual(orig.CWMPRetryMinimumWaitInterval,
cpe_ms.CWMPRetryMinimumWaitInterval)
cpe_ms.StartTransaction()
cpe_ms.CWMPRetryMinimumWaitInterval *= 2
cpe_ms.CommitTransaction()
self.assertEqual(orig.CWMPRetryMinimumWaitInterval * 2,
cpe_ms.CWMPRetryMinimumWaitInterval)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""Test app for TR-069 CPE/ACS interface library."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import unittest
import google3
import api
import core
class Word(core.Exporter):
def __init__(self):
core.Exporter.__init__(self)
self.Export(params=['word'])
self.word = None
class TestObject(core.Exporter):
def __init__(self):
core.Exporter.__init__(self)
self.Export(lists=['Thingy'])
self.ThingyList = {}
self.Thingy = Word
class TestSimpleRoot(core.Exporter):
def __init__(self):
core.Exporter.__init__(self)
self.Export(params=['SomeParam'])
self.SomeParam = 'SomeParamValue'
class ApiTest(unittest.TestCase):
def testObject(self):
root = core.Exporter()
root.Export(objects=['Test'])
root.Test = TestObject()
root.ValidateExports()
cpe = api.CPE(root)
#pylint: disable-msg=W0612
(idx, status) = cpe.AddObject('Test.Thingy.', 0)
name = 'Test.Thingy.%d' % int(idx)
#pylint: disable-msg=E1103
cpe.SetParameterValues([('%s.word' % name, 'word1')], 0)
self.assertEqual(root.GetExport(name).word, 'word1')
self.assertRaises(KeyError, cpe._SetParameterValue,
'%s.not_exist' % name, 'word1')
result = cpe.GetParameterValues(['%s.word' % name])
self.assertEqual(result, [('%s.word' % name, 'word1')])
def testGetParameterValuesEmpty(self):
cpe = api.CPE(TestSimpleRoot())
result = cpe.GetParameterValues([''])
self.assertTrue(result)
self.assertEqual(result[0], ('SomeParam', 'SomeParamValue'))
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""Tests for auto-generated tr???_*.py."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import unittest
import core
import tr098_v1_2 as tr098
class MyModel(tr098.InternetGatewayDevice_v1_4):
def __init__(self):
tr098.InternetGatewayDevice_v1_4.__init__(self)
self.InternetGatewayDevice = core.TODO()
u = self.UDPEchoConfig = self.UDPEchoConfig()
u.BytesReceived = 0
u.Enable = True
u.PacketsReceived = 0
u.TimeFirstPacketReceived = 0
u.EchoPlusEnabled = False
u.UDPPort = 0
u.EchoPlusSupported = False
u.Interface = ''
u.PacketsResponded = ''
u.SourceIPAddress = '1.2.3.4'
u.TimeLastPacketReceived = 0
u.BytesResponded = 0
self.UploadDiagnostics = core.TODO()
self.Capabilities = core.TODO()
self.DownloadDiagnostics = core.TODO()
class StdTest(unittest.TestCase):
def testStd(self):
o = MyModel()
o.ValidateExports()
print core.Dump(o)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#pylint: disable-msg=W0404
#
"""Implement the TR-069 CWMP Sesion handling."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import functools
import tornado.httpclient
import tornado.ioloop
# SPEC3 = TR-069_Amendment-3.pdf
# http://www.broadband-forum.org/technical/download/TR-069_Amendment-3.pdf
graphviz = r"""
digraph DLstates {
node [shape=box]
CONNECT [label="CONNECT"]
ACTIVE [label="ACTIVE\nsend responses or requests"]
ONHOLD [label="ONHOLD\nsend responses"]
NOMORE [label="NOMORE\nsend responses"]
DONE [label="DONE\nclose session"]
CONNECT -> ACTIVE [label="Send Inform"]
ACTIVE -> ONHOLD [label="onhold=True"]
ONHOLD -> ACTIVE [label="onhold=False"]
ACTIVE -> NOMORE [label="send empty POST"]
NOMORE -> DONE [label="receive empty Body"]
}
"""
HTTPCLIENT = tornado.httpclient.AsyncHTTPClient
class CwmpSession(object):
"""State machine to handle the lifecycle of a TCP session with the ACS."""
CONNECT = 'CONNECT'
ACTIVE = 'ACTIVE'
ONHOLD = 'ONHOLD'
NOMORE = 'NOMORE'
DONE = 'DONE'
def __init__(self, acs_url, ioloop=None):
self.http = HTTPCLIENT(max_simultaneous_connections=1,
io_loop=ioloop or tornado.ioloop.IOLoop.instance())
self.acs_url = acs_url
self.cookies = None
self.my_ip = None
self.ping_received = False
self.state = self.CONNECT
def state_update(self, sent_inform=None, on_hold=None,
cpe_to_acs_empty=None, acs_to_cpe_empty=None):
if self.state == self.CONNECT:
if sent_inform:
self.state = self.ACTIVE
elif self._active():
if on_hold:
self.state = self.ONHOLD
elif cpe_to_acs_empty:
self.state = self.NOMORE
elif self._onhold():
if on_hold is False: # not just the default None; explicitly False
self.state = self.ACTIVE
elif self._nomore():
if acs_to_cpe_empty:
self.state = self.DONE
def _connect(self):
return self.state == self.CONNECT
def _active(self):
return self.state == self.ACTIVE
def _onhold(self):
return self.state == self.ONHOLD
def _nomore(self):
return self.state == self.NOMORE
def _done(self):
return self.state == self.DONE
def inform_required(self):
return True if self._connect() else False
def request_allowed(self):
return True if self._active() else False
def response_allowed(self):
return True if self._active() or self._onhold() or self._nomore() else False
def should_close(self):
return True if self._done() else False
def __del__(self):
self.close()
def close(self):
cache.flush()
self.http = None
return self.ping_received
class cache(object):
"""A global cache of arbitrary data for the lifetime of one CWMP session.
@cwmp_session.cache is a decorator to cache the return
value of a function for the remainder of the session with the ACS.
Calling the function again with the same arguments will be serviced
from the cache.
This is intended for very expensive operations, particularly where
a process is forked and its output parsed.
"""
_thecache = dict()
@staticmethod
def flush():
"""Flush all cached data."""
for k in cache._thecache.keys():
del cache._thecache[k]
def __init__(self, func):
self.func = func
self.obj = None
def __get__(self, obj, objtype):
"""Support instance methods."""
self.obj = obj
return functools.partial(self.__call__, obj)
def __call__(self, *args):
key = self._cache_key(args)
try:
return cache._thecache[key]
except KeyError:
val = self.func(*args)
cache._thecache[key] = val
return val
def _cache_key(self, *args):
"""Concatenate the function, object, and all arguments."""
return '\0'.join([repr(x) for x in [self.func, self.obj, args]])
def main():
# pylint: disable-msg=C6003
print('# pipe this to grapviz, ex:')
print('# ./cwmp_session.py | dot -Tpdf -ocwmp_session.pdf')
print(graphviz)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""Tests for persist.py."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import os
import tempfile
import unittest
import google3
import core
import persist
class Leaf(core.Exporter):
def __init__(self, n):
core.Exporter.__init__(self, defaults=dict(Number=n))
self.Export(params=['Number'])
class Tree(Leaf):
def __init__(self, n=0, subtree=None):
Leaf.__init__(self, n)
if not subtree:
subtree = Leaf(0)
self.Export(objects=['Tree'], lists=['Sub'])
self.Tree = subtree
self.SubList = {}
Tree.Sub = Tree
class PersistTest(unittest.TestCase):
def testPersist(self):
fd, dbname = tempfile.mkstemp()
os.close(fd)
print 'database file: %s' % dbname
t = Tree(5, Leaf(6))
t.SubList[7] = Tree(77, Tree(777, Leaf(7777)))
t.SubList[11] = Tree(88, Tree(888, Leaf(8888)))
t.SubList[11].SubList[9] = Tree(99, Leaf(9999))
print core.Dump(t)
p = persist.Store(dbname, t)
p.Save()
t2 = Tree(0, Leaf(0))
p2 = persist.Store(dbname, t2)
p2.Load()
print core.Dump(t2)
self.assertEqual(t.Number, 5)
self.assertEqual(t2.Number, 5)
self.assertEqual(t.SubList[11].SubList[9].Tree.Number, 9999)
self.assertEqual(t2.SubList[11].SubList[9].Tree.Number, 9999)
self.assertEqual(t.SubList[11].Tree.Tree.Number, 8888)
self.assertRaises(AttributeError, lambda: t2.SubList[11].Tree.Tree.Number)
self.assertRaises(KeyError, lambda: t2.SubList[12].Tree.Tree.Number)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fix sys.path so it can find our libraries."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import os.path
import sys
mydir = os.path.dirname(__file__)
sys.path = [
os.path.join(mydir, 'vendor/tornado'),
os.path.join(mydir, 'vendor/bup/lib'),
os.path.join(mydir, 'vendor'),
os.path.join(mydir, '..'),
] + sys.path
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for tr-69 Download and Scheduled Download."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import hashlib
import json
import os
import random
import sys
import tempfile
import google3
import helpers
import tornado
import tornado.httpclient
import tornado.ioloop
import tornado.web
# Unit tests can override this to pass in a mock
HTTPCLIENT = tornado.httpclient.AsyncHTTPClient
# tr-69 fault codes
DOWNLOAD_FAILED = 9010
def _uri_path(url):
pos = url.find('://')
if pos >= 0:
url = url[pos+3:]
pos = url.find('/')
if pos >= 0:
url = url[pos:]
return url
def calc_http_digest(method, uripath, qop, nonce, cnonce, nc,
username, realm, password):
def H(s):
return hashlib.md5(s).hexdigest()
def KD(secret, data):
return H(secret + ':' + data)
A1 = username + ':' + realm + ':' + password
A2 = method + ':' + uripath
digest = KD(H(A1), nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + H(A2))
return digest
class HttpDownload(object):
def __init__(self, url, username=None, password=None,
download_complete_cb=None, ioloop=None, download_dir=None):
self.url = str(url)
self.username = str(username)
self.password = str(password)
self.download_complete_cb = download_complete_cb
self.ioloop = ioloop or tornado.ioloop.IOLoop.instance()
self.download_dir = download_dir
def fetch(self):
"""Begin downloading file."""
self.auth_header = None
self.tempfile = None
return self._start_download()
def _start_download(self):
print 'starting (auth_header=%r)' % self.auth_header
if not self.tempfile:
self.tempfile = tempfile.NamedTemporaryFile(delete=True,
dir=self.download_dir)
kwargs = dict(url=self.url,
request_timeout=3600.0,
streaming_callback=self.tempfile.write,
use_gzip=True, allow_ipv6=True,
user_agent='catawampus-tr69')
if self.auth_header:
kwargs.update(dict(headers=dict(Authorization=self.auth_header)))
elif self.username and self.password:
kwargs.update(dict(auth_username=self.username,
auth_password=self.password))
req = tornado.httpclient.HTTPRequest(**kwargs)
self.http_client = HTTPCLIENT(io_loop=self.ioloop)
self.http_client.fetch(req, self._async_fetch_callback)
def _calculate_auth_header(self, response):
"""HTTP Digest Authentication."""
h = response.headers.get('www-authenticate', None)
if not h:
return
authtype, paramstr = h.split(' ', 1)
if authtype != 'Digest':
return
params = {}
for param in paramstr.split(','):
name, value = param.split('=')
assert(value.startswith('"') and value.endswith('"'))
params[name] = value[1:-1]
uripath = _uri_path(self.url)
nc = '00000001'
nonce = params['nonce']
realm = params['realm']
opaque = params.get('opaque', None)
cnonce = str(random.getrandbits(32))
username = self.username
password = self.password
qop = 'auth'
returns = dict(uri=uripath,
qop=qop,
nc=nc,
cnonce=cnonce,
nonce=nonce,
username=username,
realm=realm)
if opaque:
returns['opaque'] = opaque
returns['response'] = calc_http_digest(method='GET',
uripath=uripath,
qop=qop,
nonce=nonce,
cnonce=cnonce,
nc=nc,
username=username,
realm=realm,
password=password)
returnlist = [('%s="%s"' % (k, v)) for k, v in returns.items()]
return 'Digest %s' % ','.join(returnlist)
def _async_fetch_callback(self, response):
"""Called for each chunk of data downloaded."""
if (response.error and response.error.code == 401 and
not self.auth_header and self.username and self.password):
print '401 error, attempting Digest auth'
self.auth_header = self._calculate_auth_header(response)
if self.auth_header:
self._start_download()
return
self.tempfile.flush()
if response.error:
print('Download failed: {0!r}'.format(response.error))
print json.dumps(response.headers, indent=2)
self.tempfile.close()
self.download_complete_cb(
DOWNLOAD_FAILED,
'Download failed {0!s}'.format(response.error.code),
None)
else:
self.download_complete_cb(0, '', self.tempfile)
print('Download success: {0}'.format(self.tempfile.name))
def main():
ioloop = tornado.ioloop.IOLoop.instance()
dl = HttpDownload(ioloop)
url = len(sys.argv) > 1 and sys.argv[1] or 'http://www.google.com/'
username = len(sys.argv) > 2 and sys.argv[2]
password = len(sys.argv) > 3 and sys.argv[3]
print 'using URL: %s' % url
dl.download(url=url, username=username, password=password, delay_seconds=0)
ioloop.start()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External API for TR-069 support.
The classes here represent the ACS (Auto Configuration Server) and CPE
(Customer Premises Equipment) endpoints in the TR-069 standard API. You can
hand them an hierarchy of tr.core.Exporter and use the
TR-069 access methods to manipulate it.
This file doesn't implement the XML (SOAP-like) wrapper around the TR-069
API calls; it's just a python version of the API.
"""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import download
class SetParameterErrors(Exception):
"""A list of exceptions which occurred during a SetParameterValues transaction."""
def __init__(self, error_list, msg):
Exception.__init__(self, msg)
self.error_list = error_list
class ParameterNameError(KeyError):
"""Raised for a SetParameterValue to a nonexistant parameter."""
def __init__(self, parameter, msg):
KeyError.__init__(self, msg)
self.parameter = parameter
class ParameterTypeError(TypeError):
"""Raised when a SetParameterValue has the wrong type."""
def __init__(self, parameter, msg):
TypeError.__init__(self, msg)
self.parameter = parameter
class ParameterValueError(ValueError):
"""Raised when a SetParameterValue has an invalid value."""
def __init__(self, parameter, msg):
ValueError.__init__(self, msg)
self.parameter = parameter
class ParameterNotWritableError(AttributeError):
"""Raised when a SetParameterValue tries to set a read-only parameter."""
def __init__(self, parameter, msg):
AttributeError.__init__(self, msg)
self.parameter = parameter
class TR069Service(object):
"""Represents a TR-069 SOAP RPC service."""
def __init__(self):
pass
def GetRPCMethods(self):
"""Return a list of callable methods."""
methods = []
for i in sorted(dir(self)):
if i[0].isupper():
methods.append(i)
return methods
class ACS(TR069Service):
"""Represents a TR-069 ACS (Auto Configuration Server)."""
def __init__(self):
TR069Service.__init__(self)
self.cpe = None
def Inform(self, cpe, root, events, max_envelopes,
current_time, retry_count, parameter_list):
"""Called when the CPE first connects to the ACS."""
print 'ACS.Inform'
self.cpe = cpe
def TransferComplete(self, command_key, fault_struct,
start_time, complete_time):
"""A file transfer requested by the ACS has been completed."""
raise NotImplementedError()
def AutonomousTransferComplete(self,
announce_url, transfer_url,
is_download, file_type,
file_size, target_filename, fault_struct,
start_time, complete_time):
"""A file transfer *not* requested by the ACS has been completed."""
raise NotImplementedError()
def Kicked(self, command, referer, arg, next_url):
"""Called whenever the CPE is kicked by the ACS."""
raise NotImplementedError()
def RequestDownload(self, file_type, file_type_args):
"""The CPE wants us to tell it to download something."""
raise NotImplementedError()
def DUStateChangeComplete(self, results, command_key):
"""A requested ChangeDUState has completed."""
raise NotImplementedError()
def AutonomousDUStateChangeComplete(self, results):
"""A DU state change that was not requested by the ACS has completed."""
raise NotImplementedError()
class CPE(TR069Service):
"""Represents a TR-069 CPE (Customer Premises Equipment)."""
def __init__(self, root):
TR069Service.__init__(self)
self._last_parameter_key = ''
self.root = root
self.download_manager = download.DownloadManager()
self.transfer_complete_received_cb = None
self.inform_response_received_cb = None
def setCallbacks(self, send_transfer_complete,
transfer_complete_received,
inform_response_received):
self.download_manager.send_transfer_complete = send_transfer_complete
self.transfer_complete_received_cb = transfer_complete_received
self.inform_response_received_cb = inform_response_received
def startup(self):
"""Handle any initialization after reboot."""
self.download_manager.RestoreDownloads()
def _SetParameterKey(self, value):
self._last_parameter_key = value
def getParameterKey(self):
return self._last_parameter_key
def _SplitParameterName(self, name):
"""Split a name like Top.Object.1.Name into (Top.Object.1, Name)."""
result = name.rsplit('.', 1)
if len(result) == 2:
return result[0], result[1]
elif len(result) == 1:
return None, result[0]
elif not result:
return None
else:
assert False
def _SetParameterValue(self, name, value):
"""Given a parameter (which can include an object), set its value."""
if name == 'ParameterKey':
self._SetParameterKey(value)
return None
else:
return self.root.SetExportParam(name, value)
def _ConcludeTransaction(self, objects, do_commit):
"""Commit or abandon all pending writes.
Args:
objects: list of dirty objects to commit
do_commit: call CommitTransaction if True, else AbandonTransaction
Returns: the response code to return
SetParameterValues is an atomic transaction, all parameters are set or
none of them are. We set obj.dirty and call obj.StartTransaction on
every object written to. Now we walk back through the dirtied objects
to finish the transaction.
"""
# TODO(dgentry) At some point there will be interdependencies between
# objects. We'll need to develop a means to express those dependencies
# and walk the dirty objects in a specific order.
for obj in objects:
assert obj.dirty
obj.dirty = False
if do_commit:
obj.CommitTransaction()
else:
obj.AbandonTransaction()
return 0 # all values changed successfully
def SetParameterValues(self, parameter_list, parameter_key):
"""Sets parameters on some objects."""
dirty = set()
error_list = []
for name, value in parameter_list:
obj = None
try:
obj = self._SetParameterValue(name, value)
except TypeError as e:
error_list.append(ParameterTypeError(parameter=name, msg=str(e)))
except ValueError as e:
error_list.append(ParameterValueError(parameter=name, msg=str(e)))
except KeyError as e:
error_list.append(ParameterNameError(parameter=name, msg=str(e)))
except AttributeError as e:
error_list.append(ParameterNotWritableError(parameter=name, msg=str(e)))
if obj:
dirty.add(obj)
if error_list:
self._ConcludeTransaction(objects=dirty, do_commit=False)
raise SetParameterErrors(error_list=error_list,
msg='Transaction Errors: %d' % len(error_list))
else:
self._SetParameterKey(parameter_key)
return self._ConcludeTransaction(dirty, True)
def _GetParameterValue(self, name):
"""Given a parameter (which can include an object), return its value."""
if name == 'ParameterKey':
return self._last_parameter_key
else:
return self.root.GetExport(name)
def GetParameterValues(self, parameter_names):
"""Gets parameters from some objects.
Args:
parameter_names: a list of parameter name strings.
Returns:
A list of (name, value) tuples.
"""
result = []
for param in parameter_names:
if not param:
# tr69 A.3.2.2: empty string indicates top of the name hierarchy.
paramlist = self.root.ListExports(None, False)
parameter_names.extend(paramlist)
elif param.endswith('.'):
paramlist = self.root.ListExports(param[:-1], False)
for p in paramlist:
parameter_names.append(param + p)
else:
result.append((param, self._GetParameterValue(param)))
return result
def GetParameterNames(self, parameter_path, next_level_only):
"""Get the names of parameters or objects (possibly recursively)."""
return self.root.ListExports(parameter_path, not next_level_only)
def _SetParameterAttribute(self, param, attr, attr_value):
"""Set an attribute of a parameter."""
(param, unused_param_name) = self._SplitParameterName(param)
self.root.SetExportAttr(param, attr, attr_value)
def SetParameterAttributes(self, parameter_list):
"""Set attributes (access control, notifications) on some parameters."""
param_name = parameter_list['Name']
for attr, attr_value in parameter_list.iteritems():
if attr != 'Name':
self._SetParameterAttribute(param_name, attr, attr_value)
def GetParameterAttributes(self, parameter_names):
"""Get attributes (access control, notifications) on some parameters."""
raise NotImplementedError()
def AddObject(self, object_name, parameter_key):
"""Create a new object with default parameters."""
assert object_name.endswith('.')
#pylint: disable-msg=W0612
(idx, obj) = self.root.AddExportObject(object_name[:-1])
self._SetParameterKey(parameter_key)
return (idx, 0) # successfully created
def DeleteObject(self, object_name, parameter_key):
"""Delete an object and its sub-objects/parameters."""
assert object_name.endswith('.')
path = object_name.split('.')
self.root.DeleteExportObject('.'.join(path[:-2]), path[-2])
self._SetParameterKey(parameter_key)
return 0 # successfully deleted
def Download(self, command_key, file_type, url, username, password,
file_size, target_filename, delay_seconds,
success_url, failure_url): #pylint: disable-msg=W0613
"""Initiate a download immediately or after a delay."""
return self.download_manager.NewDownload(
command_key=command_key,
file_type=file_type,
url=url,
username=username,
password=password,
file_size=file_size,
target_filename=target_filename,
delay_seconds=delay_seconds)
def Reboot(self, command_key):
"""Reboot the CPE."""
self.download_manager.Reboot(command_key)
def GetQueuedTransfers(self):
"""Retrieve a list of queued file transfers (downloads and uploads)."""
return self.download_manager.GetAllQueuedTransfers()
def ScheduleInform(self, delay_seconds, command_key):
"""Request that this CPE call Inform() at some point in the future."""
raise NotImplementedError()
def SetVouchers(self, voucher_list):
"""Set option vouchers (deprecated)."""
raise NotImplementedError()
def GetOptions(self, option_name):
"""Get option vouchers (deprecated)."""
raise NotImplementedError()
def Upload(self, command_key, file_type, url,
username, password, delay_seconds):
"""Initiate a file upload immediately or after a delay."""
raise NotImplementedError()
def FactoryReset(self):
"""Factory reset the CPE."""
raise NotImplementedError()
def GetAllQueuedTransfers(self):
"""Get a list of all uploads/downloads that are still in the queue."""
return self.download_manager.GetAllQueuedTransfers()
def ScheduleDownload(self, command_key, file_type, url,
username, password, file_size, target_filename,
time_window_list):
"""Schedule a download for some time in the future."""
raise NotImplementedError()
def CancelTransfer(self, command_key):
"""Cancel a scheduled file transfer."""
return self.download_manager.CancelTransfer(command_key)
def ChangeDUState(self, operations, command_key):
"""Trigger an install, update, or uninstall operation."""
raise NotImplementedError()
def transferCompleteResponseReceived(self):
if self.transfer_complete_received_cb:
self.transfer_complete_received_cb()
return self.download_manager.TransferCompleteResponseReceived()
def informResponseReceived(self):
if self.inform_response_received_cb:
self.inform_response_received_cb()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""Base classes for TR-069 model objects.
The standard subclasses of these objects are auto-generated from the
tr-*.xml schema files and named tr???_*.py in this directory. You can
also define nonstandard data models by extending those classes or
Exporter yourself.
"""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import string
class NotAddableError(KeyError):
"""Raised when AddObject is not allowed on an object list."""
pass
class SchemaError(Exception):
"""Raised when an object claims to implement a schema but doesn't."""
pass
class AutoDict(object):
"""Class for simulating a dict that has dynamically-generated content.
For example, a dict with a list of files in a given directory would be
iterable (returning a list of filenames and objects corresponding to the
filenames) and indexable (return an object given a filename) but there is
no reason to actually cache the list of filenames; the kernel already has
that list in real time. So we provide a dict-like interface, and you
can implement iteritems, getitem, setitem, etc separately.
Use this class by either deriving from it or by just passing your own
iteritems, getitems, etc to the constructor. The choice depends on how
you want to do your namespacing.
"""
def __init__(self, name, iteritems=None,
getitem=None, setitem=None, delitem=None):
self.__name = name
self.__iteritems = iteritems or self._Bad('iteritems')
self.__getitem = getitem or self._Bad('getitem')
self.__setitem = setitem or self._Bad('setitem')
self.__delitem = delitem or self._Bad('delitem')
def _Bad(self, funcname):
#pylint: disable-msg=W0613
def Fn(*args, **kwargs):
raise NotImplementedError('%r must override %s'
% (self.__name, funcname))
return Fn
def iteritems(self): #pylint: disable-msg=C6409
return self.__iteritems()
def __getitem__(self, key):
return self.__getitem(key)
def __setitem__(self, key, value):
return self.__setitem(key, value)
def __delitem__(self, key):
return self.__delitem(key)
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
return True
def iterkeys(self): #pylint: disable-msg=C6409
for (k, v) in self.iteritems(): #pylint: disable-msg=W0612
yield k
def itervalues(self): #pylint: disable-msg=C6409
for (k, v) in self.iteritems(): #pylint: disable-msg=W0612
yield v
def __iter__(self):
return self.iterkeys()
def __len__(self):
count = 0
for i in self: #pylint: disable-msg=W0612
count += 1
return count
def keys(self): #pylint: disable-msg=C6409
return list(self.iterkeys())
def values(self): #pylint: disable-msg=C6409
return list(self.itervalues())
def items(self): #pylint: disable-msg=C6409
return list(self.iteritems())
def _Int(s):
"""Try to convert s to an int. If we can't, just return s."""
try:
return int(s)
except ValueError:
assert '.' not in s # dots aren't allowed in individual element names
return s
class Exporter(object):
"""An object containing named parameters that can be get/set.
It can also contain sub-objects with their own parameters, and attributes
that represent lists of sub-objects.
"""
def __init__(self, defaults=None):
"""Initialize an Exporter.
Args:
defaults: (optional) a dictionary of attrs to set on the object.
"""
self.__lastindex = -1
self.export_params = set()
self.export_objects = set()
self.export_object_lists = set()
self.dirty = False # object has pending SetParameters to be committed.
if defaults:
for (key, value) in defaults.iteritems():
setattr(self, key, value)
def Export(self, params=None, objects=None, lists=None):
"""Export some parameters, objects, or lists to make them visible.
Once you export these, you still have to manually declare attributes
named after the exported names. The idea is that mostly auto-generated
classes will call Export(), but manually-written subclasses will declare
the actual attributes. If you forget to declare an attribute (or you
make a typo) then ValidateExports will fail.
Args:
params: a list of parameters in this object.
objects: a list of sub-objects in this object.
lists: a list of object-list names (lists containing objects) in this
object.
"""
if params:
self.export_params.update(params)
if objects:
self.export_objects.update(objects)
if lists:
self.export_object_lists.update(lists)
def Unexport(self, params=None, objects=None, lists=None):
"""Remove some parameters, objects, or lists to make them invisible.
Some parameters are optional. Auto-generated classes will Export()
all possible attributes. If an implementation chooses not to support
some fields, it must explicitly Unexport them.
The implementation has to deliberately choose not to implement a
parameter, not just overlook it or skip it out of laziness.
Args:
params: a list of parameters to remove
objects: a list of sub-objects to remove
lists: a list of object-list names (lists containing objects) to remove.
"""
if params:
self.export_params.remove(params)
if objects:
self.export_objects.remove(objects)
if lists:
self.export_object_lists.remove(lists)
def GetCanonicalName(self, obj_to_find):
"""Generate a canonical name for an object.
Walk through the tree and generate the canonical name for an
object. The tree walk starts with this object.
Args:
obj: The object to generate the canonical for.
Returns:
The canonical path to the object.
"""
for name in self.export_objects:
exp_obj = self._GetExport(self, name)
if exp_obj == obj_to_find:
return name
tmp_path = exp_obj.GetCanonicalName(obj_to_find)
if tmp_path:
return name + '.' + tmp_path
for name in self.export_object_lists:
objlist = self._GetExport(self, name)
if objlist == obj_to_find:
return name
for (idx, child_obj) in objlist.iteritems():
if child_obj == obj_to_find:
return name + '.' + str(idx)
tmp_path = child_obj.GetCanonicalName(obj_to_find)
if tmp_path:
return name + '.' + str(idx) + '.' + tmp_path
return None
def ValidateExports(self, path=None):
"""Trace through this object's exports to make no attributes are missing.
Also goes through child objects.
Args:
path: (optional) a list of object name elements for use when printing
errors, so it's easier to see which one caused the problem.
Raises:
SchemaError: if schema validation fails.
"""
if not path:
path = ['root']
def Exc(name, msg):
fullname = '.'.join(path + [name])
return SchemaError('%s %s %s' % (fullname, name, msg))
for name in self.export_params:
self.AssertValidExport(name, path=path)
self._GetExport(self, name)
for name in self.export_objects:
self.AssertValidExport(name, path=path)
obj = self._GetExport(self, name)
if isinstance(obj, type):
raise Exc(name, 'is a type; instantiate it')
try:
obj.Export()
except AttributeError:
raise Exc(name, 'is %r, must implement core.Exporter'
% type(obj))
obj.ValidateExports(path + [name])
for name in self.export_object_lists:
self.AssertValidExport(name, path=path)
l = self._GetExport(self, name)
try:
for (iname, obj) in l.iteritems(): #pylint: disable-msg=W0612
pass
except AttributeError:
raise Exc(name + 'List', 'is an objlist but failed to iteritems')
for (iname, obj) in l.iteritems():
if isinstance(obj, type):
raise Exc('%s.%s' % (name, iname),
'is a type; instantiate it')
try:
obj.Export()
except AttributeError:
raise Exc(name, 'is %r, must implement core.Exporter'
% type(obj))
obj.ValidateExports(path + [name])
def IsValidExport(self, name):
if (name in self.export_params or
name in self.export_objects or
name in self.export_object_lists):
return True
else:
return False
def AssertValidExport(self, name, path=None):
if not self.IsValidExport(name):
raise KeyError(name)
ename = self._GetExportName(self, name)
if not hasattr(self, ename):
if not path:
path = ['root']
fullname = '.'.join(path + [ename])
raise SchemaError('%s is exported but does not exist' % fullname)
def _GetExportName(self, parent, name):
if name in parent.export_object_lists:
return name.replace('-', '_') + 'List'
else:
# Vendor models contain a dash in the domain name.
return name.replace('-', '_')
def _GetExport(self, parent, name):
if hasattr(parent, 'IsValidExport') and not parent.IsValidExport(name):
raise KeyError(name)
if hasattr(parent, '_GetExport'):
return getattr(parent, self._GetExportName(parent, name))
elif _Int(name) in parent:
return parent[_Int(name)]
else:
return parent[name]
def FindExport(self, name, allow_create=False):
"""Navigate through the export hierarchy to find the parent of 'name'.
Args:
name: the name of the sub-object to find the parent of.
Returns:
(parent, subname): the parent object and the name of the parameter or
object referred to by 'name', relative to the parent.
"""
parent = None
o = self
assert not name.endswith('.')
parts = name.split('.')
for i in parts[:-1]:
parent = o
o = self._GetExport(o, i)
if allow_create:
try:
self._GetExport(o, parts[-1])
except KeyError:
parent.AddExportObject(parts[-2], parts[-1])
return o, parts[-1]
def GetExport(self, name):
"""Get a child of this object (a parameter or object).
Args:
name: a dot-separated sub-object name to retrieve.
Returns:
An Exporter instance or a parameter value.
"""
parent, subname = self.FindExport(name)
try:
return self._GetExport(parent, subname) #pylint: disable-msg=W0212
except KeyError:
# re-raise the KeyError with the full name, not just the subname.
raise KeyError(name)
def SetExportParam(self, name, value):
"""Set the value of a parameter of this object.
Args:
name: the parameter name to set (parameters only, not objects or lists).
value: the value to set it to.
Returns:
the object modified
Raises:
KeyError: if the name is not an exported parameter.
"""
parent, subname = self.FindExport(name)
if subname not in parent.export_params:
raise KeyError(name)
if not parent.dirty:
parent.StartTransaction()
parent.dirty = True
setattr(parent, subname, value)
return parent
def SetExportAttr(self, param, attr, value):
"""Set the attribute of a given parameter.
Args:
param: the parameter whose attribute is going to be set.
attr: the attribute to set on the parameter.
value: the value of the attribute being set.
Returns:
the object whose attribute was modified.
Raises:
KeyError: if the param is not exported.
"""
obj = self.GetExport(param)
obj.SetAttribute(attr, value)
return obj
def _AddExportObject(self, name, idx):
objlist = self._GetExport(self, name)
if name not in self.export_object_lists:
raise KeyError(name)
try:
constructor = getattr(self, name)
except KeyError:
raise NotAddableError(name)
if idx is None:
self.__lastindex += 1
while str(self.__lastindex) in objlist:
self.__lastindex += 1
idx = self.__lastindex
idx = str(idx)
assert '.' not in idx
newobj = constructor()
try:
newobj.ValidateExports()
except SchemaError:
raise NotAddableError(name)
objlist[_Int(idx)] = newobj
return idx, newobj
def AddExportObject(self, name, idx=None):
"""Create a new object of type 'name' in the list self.'name'List.
Args:
name: the name of the object class. The list name is self.(name+'List').
idx: the dictionary key to store it under. Default is auto-generated.
Returns:
An tuple of (idx, obj), where idx is the key and obj is the new object.
Raises:
KeyError: if 'name' is not an exported sub-object type.
"""
parent, subname = self.FindExport(name)
#pylint: disable-msg=W0212
return parent._AddExportObject(subname, idx)
def DeleteExportObject(self, name, idx):
"""Delete the object with index idx in the list named name.
Args:
name: the sub-object list to delete from.
idx: the index of the objet to delete.
Raises:
KeyError: if the given index is not in the dictionary.
"""
objlist = self.GetExport(name)
idx = str(idx)
try:
if _Int(idx) in objlist:
del objlist[_Int(idx)]
else:
del objlist[idx]
except KeyError:
raise KeyError((name, idx))
def _ListExportsFromDict(self, objlist, recursive):
for (idx, obj) in sorted(objlist.iteritems()):
if obj is not None:
yield '%s.' % (idx,)
if recursive:
for i in obj._ListExports(recursive): #pylint: disable-msg=W0212
yield '%s.%s' % (idx, i)
def _ListExports(self, recursive):
for name in sorted(set().union(self.export_params,
self.export_objects,
self.export_object_lists)):
if name in self.export_params:
yield name
elif name in self.export_objects:
yield name + '.'
if recursive:
obj = self._GetExport(self, name)
#pylint: disable-msg=W0212
for i in obj._ListExports(recursive):
yield name + '.' + i
if name in self.export_object_lists:
yield name + '.'
if recursive:
objlist = self._GetExport(self, name)
for i in self._ListExportsFromDict(objlist, recursive=recursive):
yield '%s.%s' % (name, i)
def ListExports(self, name=None, recursive=False):
"""Return a sorted list of sub-objects and parameters.
Args:
name: subobject name to start from (if None, starts at this object).
recursive: true if you want to include children of children.
Returns:
An iterable of strings that can be passed to GetExport().
"""
obj = self
if name:
obj = self.GetExport(name)
if hasattr(obj, '_ListExports'):
if recursive:
obj.ValidateExports()
#pylint: disable-msg=W0212
return obj._ListExports(recursive=recursive)
else:
return self._ListExportsFromDict(obj, recursive=recursive)
def StartTransaction(self):
"""Prepare for a series of Set operations, to be applied atomically.
After StartTransaction the object will receive zero or more set operations
to its exported parameters. Each Set should check its arguments as best it
can, and raise ValueError or TypeError if there is a problem.
The transaction will conclude with either an AbandonTransaction or
CommitTransaction."""
pass
def AbandonTransaction(self):
"""Discard a pending transaction; do not apply the changes to the system."""
pass
def CommitTransaction(self):
"""Apply a pending modification to the system."""
pass
class TODO(Exporter):
"""Use this class to fake out an Exporter instance.
Useful when you're implementing a big TR-069 Model hierarchy and you don't
want to implement every single class right now. As a bonus, it'll show up
when you grep for TODO in the source code.
"""
def __init__(self):
Exporter.__init__(self)
self.Export(params=['TODO'])
self.TODO = 'CLASS NOT IMPLEMENTED YET'
def Dump(root):
"""Return a string representing the contents of an object.
This function works only if root.ValidateExports() would pass.
Args:
root: the object to dump.
Returns:
A big string containing lines of the format:
Object.SubObject.
Object.SubObject.ParameterName = %r
"""
out = []
for i in root.ListExports(recursive=True):
if i.endswith('.'):
out.append(' %s' % (i,))
else:
out.append(' %s = %r' % (i, root.GetExport(i)))
return '\n'.join(out)
def _DumpSchema(root, out, path):
if isinstance(root, type):
root = root()
for i in root.export_params:
name = i.replace('-', '_')
out.append('.'.join(path + [name]))
for i in root.export_objects:
name = i.replace('-', '_')
out.append('.'.join(path + [name, '']))
_DumpSchema(getattr(root, name), out, path + [name])
for i in root.export_object_lists:
name = i.replace('-', '_')
out.append('.'.join(path + [name, '']))
out.append('.'.join(path + [name, '{i}']))
_DumpSchema(getattr(root, name), out, path + [name, '{i}'])
def DumpSchema(root):
"""Return a string representing the object model implemented by the object.
You can use this to show which objects, sub-objects, and parameters *should*
be implemented by an object, even if that object isn't fully implemented
yet by adding the right attrs in a subclass. This is useful for figuring
out which attrs you *need* to add in a subclass. Auto-generated tr*.py
files run this automatically when you execute them from the command line.
This function works even if root.ValidateExports() would fail.
Args:
root: the object or type to dump. If a type, instantiates it.
Returns:
A big string of the format:
Object.SubObject.
Object.SubObject.ParameterName
"""
out = []
if isinstance(root, type):
root = root()
_DumpSchema(root, out, [root.__class__.__name__])
return '\n'.join(sorted(out))
class ResourcesExceededError(BufferError):
"""Exception to send a RESOURCES_EXCEEDED SOAP:Fault."""
pass
class FileTransferProtocolError(NotImplementedError):
"""Exception to send a FILE_TRANSFER_PROTOCOL SOAP:Fault."""
pass
class CancelNotPermitted(Exception):
"""Exception to send a DOWNLOAD_CANCEL_NOTPERMITTED SOAP:Fault."""
pass
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Persistent objects; objects which store themselves to disk."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import glob
import json
import os
import tempfile
class PersistentObject(object):
"""Object holding simple data fields which can persist itself to json."""
def __init__(self, objdir, rootname='object', filename=None,
ignore_errors=False, **kwargs):
"""Create either a fresh new object, or restored state from filesystem.
Raises:
ValueError: reading an object from a JSON file failed.
Args:
objdir: the directory to write the json file to
rootname: the tag for the root of the json file for this object.
filename: name of an json file on disk, to restore object state from.
If filename is None then this is a new object, and will create
a file for itself in dir.
ignore_errors: True if you want to ignore common errors (like read-only
or nonexistent directories) when saving/loading state.
Otherwise this object will raise exceptions in those cases.
kwargs parameters will be passed to self.Update
"""
self.objdir = objdir
self.rootname = rootname
self._fields = {}
self.ignore_errors = ignore_errors
if filename:
self._ReadFromFS(filename)
else:
prefix = rootname + '_'
try:
f = tempfile.NamedTemporaryFile(mode='a+', prefix=prefix,
dir=objdir, delete=False)
except OSError:
if self.ignore_errors:
filename = objdir
else:
raise
else:
filename = f.name
f.close()
self.filename = filename
if kwargs:
self.Update(**kwargs)
def __getattr__(self, name):
try:
return self.__getitem__(name)
except KeyError:
raise AttributeError
def __getitem__(self, name):
return self._fields[str(name)]
def __str__(self):
return self._ToJson()
def __unicode__(self):
return self.__str__()
def Update(self, **kwargs):
"""Atomically update one or more parameters of the object.
One might reasonably ask why this is an explicit call and not just
setting parameters like self.foo="Bar". The motivation is atomicity.
We want the state saved to the filesystem to be consistent, and not
write out a partially updated object each time a parameter is changed.
When this call returns, the state has been safely written to the
filesystem. Any errors are reported by raising an exception.
Args:
**kwargs: Parameters to be updated.
"""
self._fields.update(kwargs)
self._WriteToFS()
def Get(self, name):
return self._fields.get(name, None)
def values(self):
return self._fields.values()
def items(self):
return self._fields.items()
def _ToJson(self):
return json.dumps(self._fields, indent=2)
def _FromJson(self, string):
d = json.loads(str(string))
assert isinstance(d, dict)
return d
def _ReadFromFS(self, filename):
"""Read a json file back to an PersistentState object."""
d = self._FromJson(open(filename).read())
self._fields.update(d)
def _WriteToFS(self):
"""Write PersistentState object out to a json file."""
try:
f = tempfile.NamedTemporaryFile(
mode='a+', prefix='tmpwrite', dir=self.objdir, delete=False)
except OSError:
if not self.ignore_errors:
raise
else:
f.write(self._ToJson())
f.close()
os.rename(f.name, self.filename)
def Delete(self):
"""Remove backing file from filesystem, immediately."""
os.remove(self.filename)
def GetPersistentObjects(objdir, rootname=''):
globstr = objdir + '/' + rootname + '*'
objs = []
for filename in glob.glob(globstr):
try:
obj = PersistentObject(objdir, rootname=rootname, filename=filename)
except ValueError:
os.remove(filename)
continue
objs.append(obj)
return objs
def main():
pass
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encodings for the SOAP-based protocol used by TR-069."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import re
import xml.etree.ElementTree
import google3
import xmlwitch
class FaultType(object):
SERVER = 'Server'
CLIENT = 'Client'
class CpeFault(object):
"""CPE Fault codes for SOAP:Fault messages."""
METHOD_NOT_SUPPORTED = 9000, FaultType.SERVER
REQUEST_DENIED = 9001, FaultType.SERVER
INTERNAL_ERROR = 9002, FaultType.SERVER
INVALID_ARGUMENTS = 9003, FaultType.CLIENT
RESOURCES_EXCEEDED = 9004, FaultType.SERVER
INVALID_PARAM_NAME = 9005, FaultType.CLIENT
INVALID_PARAM_TYPE = 9006, FaultType.CLIENT
INVALID_PARAM_VALUE = 9007, FaultType.CLIENT
NON_WRITABLE_PARAM = 9008, FaultType.CLIENT
NOTIFICATION_REQUEST_REJECTED = 9009, FaultType.SERVER
DOWNLOAD_FAILURE = 9010, FaultType.SERVER
UPLOAD_FAILURE = 9011, FaultType.SERVER
FILE_TRANSFER_AUTH = 9012, FaultType.SERVER
FILE_TRANSFER_PROTOCOL = 9013, FaultType.SERVER
DOWNLOAD_MULTICAST = 9014, FaultType.SERVER
DOWNLOAD_CONNECT = 9015, FaultType.SERVER
DOWNLOAD_ACCESS = 9016, FaultType.SERVER
DOWNLOAD_INCOMPLETE = 9017, FaultType.SERVER
DOWNLOAD_CORRUPTED = 9018, FaultType.SERVER
DOWNLOAD_AUTH = 9019, FaultType.SERVER
DOWNLOAD_TIMEOUT = 9020, FaultType.CLIENT
DOWNLOAD_CANCEL_NOTPERMITTED = 9021, FaultType.CLIENT
# codes 9800-9899: vendor-defined faults
class AcsFault(object):
"""ACS Fault codes for SOAP:Fault messages."""
METHOD_NOT_SUPPORTED = 8000, FaultType.SERVER
REQUEST_DENIED = 8001, FaultType.SERVER
INTERNAL_ERROR = 8002, FaultType.SERVER
INVALID_ARGUMENTS = 8003, FaultType.CLIENT
RESOURCES_EXCEEDED = 8004, FaultType.SERVER
RETRY_REQUEST = 8005, FaultType.SERVER
# codes 8800-8899: vendor-defined faults
class _Enterable(object):
def __init__(self, iterable):
self.iter = iterable
def __iter__(self):
return self.iter
def __enter__(self):
return self.iter.next()
def __exit__(self, type, value, tb):
try:
self.iter.next()
except StopIteration:
pass
def Enterable(func):
def Wrap(*args, **kwargs):
return _Enterable(func(*args, **kwargs))
return Wrap
@Enterable
def Envelope(request_id, hold_requests):
xml = xmlwitch.Builder(version='1.0', encoding='utf-8')
attrs = {'xmlns:soap': 'http://schemas.xmlsoap.org/soap/envelope/',
'xmlns:soap-enc': 'http://schemas.xmlsoap.org/soap/encoding/',
'xmlns:xsd': 'http://www.w3.org/2001/XMLSchema',
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'xmlns:cwmp': 'urn:dslforum-org:cwmp-1-2'}
with xml['soap:Envelope'](**attrs):
with xml['soap:Header']:
must_understand_attrs = {'soap:mustUnderstand': '1'}
if request_id is not None:
xml['cwmp:ID'](str(request_id), **must_understand_attrs)
if hold_requests is not None:
xml['cwmp:HoldRequests'](hold_requests and '1' or '0',
**must_understand_attrs)
with xml['soap:Body']:
yield xml
@Enterable
def Fault(xml, fault, faultstring):
fault_code, fault_type = fault
with xml['soap:Fault']:
xml.faultcode(fault_type)
xml.faultstring('CWMP fault')
with xml.detail:
with xml['cwmp:Fault']:
xml.FaultCode(str(fault_code))
xml.FaultString(faultstring)
yield xml
def GetParameterNames(xml, path, nextlevel):
with xml['cwmp:GetParameterNames']:
xml.ParameterPath(path)
xml.NextLevel(nextlevel and '1' or '0')
return xml
def SetParameterValuesFault(xml, faults):
with Fault(xml, CpeFault.INVALID_ARGUMENTS, 'Invalid arguments') as xml:
for parameter, code, string in faults:
with xml.SetParameterValuesFault:
xml.ParameterName(parameter)
xml.FaultCode(str(int(code[0])))
xml.FaultString(string)
return xml
def SimpleFault(xml, cpefault, faultstring):
with Fault(xml, cpefault, faultstring) as xml:
return xml
def _StripNamespace(tagname):
return re.sub(r'^\{.*\}', '', tagname)
class NodeWrapper(object):
def __init__(self, name, attrib, items):
self.name = name
self.attrib = attrib
self._list = []
self._dict = {}
for key, value in items:
self._list.append((key, value))
self._dict[key] = value
def _Get(self, key):
if isinstance(key, slice):
return self._list[key]
try:
return self._dict[key]
except KeyError, e:
try:
idx = int(key)
except ValueError:
pass
else:
return self._list[idx][1]
raise e
def get(self, key, defval=None):
try:
return self._Get(key)
except KeyError:
return defval
def __getattr__(self, key):
return self._Get(key)
def __getitem__(self, key):
return self._Get(key)
def iteritems(self):
return self._dict.iteritems()
def __str__(self):
out = []
for key, value in self._list:
value = str(value)
if '\n' in value:
value = '\n' + re.sub(re.compile(r'^', re.M), ' ', value)
out.append('%s: %s' % (key, value))
return '\n'.join(out)
def __repr__(self):
return str(self._list)
def _Parse(node):
if node.text and node.text.strip():
return node.text
else:
return NodeWrapper(_StripNamespace(node.tag), node.attrib,
[(_StripNamespace(sub.tag), _Parse(sub))
for sub in node])
def Parse(xmlstring):
root = xml.etree.ElementTree.fromstring(xmlstring)
return _Parse(root)
def main():
with Envelope(1234, False) as xml:
print GetParameterNames(xml, 'System.', 1)
with Envelope(11, None) as xml:
print SetParameterValuesFault(
xml,
[('Object.x.y', CpeFault.INVALID_PARAM_TYPE, 'stupid error'),
('Object.y.z', CpeFault.INVALID_PARAM_NAME, 'blah error')])
parsed = Parse(str(xml))
print repr(parsed)
print parsed.Body
print parsed.Body.Fault.detail.Fault[2:4]
with Envelope(12, None) as xml:
print SimpleFault(xml, CpeFault.DOWNLOAD_CORRUPTED, 'bad mojo')
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#
"""Command-line client for tr/rcommand.py."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import os.path
import re
import socket
import sys
import traceback
import google3
import bup.options
import bup.shquote
import tr.mainloop
import tr.quotedblock
try:
import readline #pylint: disable-msg=C6204
except ImportError:
readline = None
optspec = """
cwmp [options] [command...]
--
u,unix-path= Unix socket server is listening on [/tmp/cwmpd.sock]
i,ip= IP hostname/ipaddr server is listening on (default: unix socket)
p,port= IP port server is listening on [12999]
"""
HISTORY_FILE = os.path.expanduser('~/.cwmp_history')
def Log(s, *args):
s = str(s)
if args:
sys.stderr.write((s + '\n') % args)
else:
sys.stderr.write(s + '\n')
_want_verbose = False
def Verbose(s, *args):
if _want_verbose:
Log(s, *args)
class Fatal(Exception):
pass
def HandleFatal(func):
def Fn(*args, **kwargs):
try:
return func(*args, **kwargs)
except Fatal, e:
Log('Fatal: %s' % e)
sys.exit(1)
return Fn
def _NormalizePath(path):
"""Like os.path.normpath, but doesn't remove any trailing slash."""
result = os.path.normpath(path)
if path.endswith('/') and not result.endswith('/'):
result += '/'
return result
def _DotsToSlashes(s):
return re.sub(r'([^/.])\.', r'\1/', s)
def _SlashesToDots(s):
name = s.replace('/', '.')
if name.startswith('.'):
name = name[1:]
return name
class Client(object):
"""Manage the client-side state of an rcommand connection."""
def __init__(self, loop, connector):
self.loop = loop
self.connector = connector
self.stream = None
self.result = None
self._last_res = None
self.cwd = '/'
self.quotedblock = tr.quotedblock.QuotedBlockProtocol(
HandleFatal(self.GotBlock))
self._StartConnect()
def _StartConnect(self):
self.stream = None
try:
self.connector(HandleFatal(self.OnConnect))
except socket.error, e:
raise Fatal(str(e))
def Close(self):
if self.stream:
self.stream.close()
def OnConnect(self, stream):
if not stream:
raise Fatal('connection failed')
Verbose('Connected to server.\n')
self.stream = stream
self.stream.set_close_callback(HandleFatal(self.OnClose))
self._StartRead()
self.loop.ioloop.stop()
def _StartRead(self):
self.stream.read_until('\n', HandleFatal(self.GotData))
def OnClose(self):
Log('Server connection closed!')
self._StartConnect()
def GotData(self, data):
self.quotedblock.GotData(data)
self._StartRead()
def GotBlock(self, lines):
self.result = lines
self.loop.ioloop.stop()
def Send(self, lines):
s = self.quotedblock.RenderBlock(lines)
self.stream.write(s)
def Run(self, lines):
self.Send(lines)
self.loop.Start()
result = self.result
self.result = None
return result
def _RequestCompletions(self, prefix):
prefix = _NormalizePath(prefix)
completions = self.Run([['completions', _SlashesToDots(prefix)]])[1:]
for [i] in completions:
yield i
def _GetSubstitutions(self, line):
(qtype, lastword) = bup.shquote.unfinished_word(line)
request = os.path.join(self.cwd, _DotsToSlashes(lastword))
subs = list(self._RequestCompletions(request))
cmd = line.split(' ', 1)[0]
if cmd.lower() in ('cd', 'ls', 'list', 'rlist', 'add', 'del'):
# only return object names, not parameters
subs = [i for i in subs if i.endswith('.')]
return (qtype, lastword, subs)
def _StripPathPrefix(self, oldword, newword):
# readline is weird: we have to remove all the parts that were before
# the last '/', but not parts before the last '.', because we have to
# tell it what to replace everything after the last '/' with.
after_slash = oldword.split('/')[-1]
dots = after_slash.split('.')
if newword.endswith('.'):
new_last_dot = '.'.join(newword.split('.')[-2:])
else:
new_last_dot = newword.split('.')[-1]
dots[-1] = new_last_dot
return '.'.join(dots)
def ReadlineCompleter(self, text, state):
"""Callback for the readline library to autocomplete a line of text.
Args:
text: the current input word (basename following the last slash)
state: a number of 0..n, where n is the number of substitutions.
Returns:
One of the available substitutions.
"""
try:
text = _DotsToSlashes(text)
line = readline.get_line_buffer()[:readline.get_endidx()]
if not state:
self._last_res = self._GetSubstitutions(line)
(qtype, lastword, subs) = self._last_res
if state < len(subs):
new_last_slash = _DotsToSlashes(self._StripPathPrefix(lastword,
subs[state]))
is_param = not new_last_slash.endswith('/')
if is_param and qtype:
new_last_slash += qtype
return new_last_slash
except Exception, e: #pylint: disable-msg=W0703
Log('\n')
try:
traceback.print_tb(sys.exc_traceback)
except Exception, e2: #pylint: disable-msg=W0703
Log('Error printing traceback: %s\n' % e2)
Log('\nError in completion: %s\n' % e)
def DoCmd(client, words):
cmd, args = (words[0].lower(), words[1:])
if cmd in ('cd', 'ls', 'list', 'rlist', 'add', 'del', 'get', 'set'):
if not args:
args = [client.cwd]
relpath = _DotsToSlashes(args[0])
abspath = os.path.normpath(os.path.join(client.cwd, relpath))
args[0] = _SlashesToDots(abspath)
if cmd == 'cd':
client.cwd = os.path.normpath(os.path.join(client.cwd, relpath))
else:
line = [cmd] + args
result = client.Run([line])
return result
def Interactive(client):
global _want_verbose
_want_verbose = True
if readline:
readline.set_completer_delims(' \t\n\r/')
readline.set_completer(client.ReadlineCompleter)
readline.parse_and_bind('bind ^I rl_complete') # MacOS
readline.parse_and_bind('tab: complete') # other
while True:
print
line = raw_input('%s> ' % client.cwd) + '\n'
while 1:
word = bup.shquote.unfinished_word(line)[1]
if not word:
break
line += raw_input('%*s> ' % (len(client.cwd), '')) + '\n'
#pylint: disable-msg=W0612
words = [word for (idx, word) in bup.shquote.quotesplit(line)]
if not words:
continue
result = DoCmd(client, words)
print client.quotedblock.RenderBlock(result).strip()
def main():
o = bup.options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:]) #pylint: disable-msg=W0612
if readline and os.path.exists(HISTORY_FILE):
readline.read_history_file(HISTORY_FILE)
client = None
try: #pylint: disable-msg=C6405
loop = tr.mainloop.MainLoop()
if opt.ip is not None:
connector = lambda err: loop.ConnectInet((opt.ip, opt.port), err)
else:
connector = lambda err: loop.ConnectUnix(opt.unix_path, err)
client = Client(loop, connector)
loop.Start()
if extra:
result = DoCmd(client, extra)
code = result.pop(0)
if code[0] != 'OK':
raise Fatal(' '.join(code))
for line in result:
print ' '.join(line)
else:
Interactive(client)
except Fatal, e:
Log(e)
sys.exit(1)
except EOFError:
pass
finally:
if readline:
readline.write_history_file(HISTORY_FILE)
if client:
client.Close()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
#pylint: disable-msg=W0404
#
"""The main server process for our TR-069 CPE device."""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import os.path
import sys
import google3
import bup.options
import tornado.autoreload
import tornado.httpclient
import dm_root
import tr.api
import tr.core
import tr.http
import tr.mainloop
import tr.rcommand
optspec = """
cwmpd [options]
--
r,rcmd-port= TCP port to listen for rcommands on; 0 to disable [12999]
u,unix-path= Unix socket to listen on [/tmp/cwmpd.sock]
i,ip= IP address to report to ACS. (default=finds interface IP address)
l,listenip= IP address to listen on [::1]
p,port= TCP port to listen for TR-069 on [7547]
ping-path= Force CPE ping listener to this URL path (default=random)
acs-url= URL of the TR-069 ACS server to connect to
fake-acs Run a fake ACS (and auto-set --acs-url to that)
no-cpe Don't run a CPE (and thus never connect to ACS)
cpe-listener Let CPE listen for http requests (not TR-069 compliant)
platform= Activate the platform-specific device tree (see platform/ dir)
close-stdio Close stdout after listeners are running; exit when stdin closes
ping_ip6dev= Use the IPv6 address from dev for the CPE Ping address
ca-certs= SSL ca_certificates.crt file to use
client-cert= SSL client certificate to use
client-key= SSL client private key to use
restrict-acs-hosts= Domain names allowed for ACS URL. Default=unrestricted. Example: 'google.com gfsvc.com'
"""
#pylint: disable-msg=W0613
def _GotData(loop, fd, flags):
if not os.read(fd, 1024):
loop.ioloop.stop()
def main():
o = bup.options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:]) #pylint: disable-msg=W0612
tornado.httpclient.AsyncHTTPClient.configure(
'tornado.curl_httpclient.CurlAsyncHTTPClient')
loop = tr.mainloop.MainLoop()
root = dm_root.DeviceModelRoot(loop, opt.platform)
if opt.rcmd_port:
loop.ListenInet((opt.listenip, opt.rcmd_port),
tr.rcommand.MakeRemoteCommandStreamer(root))
if opt.unix_path:
loop.ListenUnix(opt.unix_path,
tr.rcommand.MakeRemoteCommandStreamer(root))
if opt.port:
acs = cpe = None
if opt.fake_acs:
acs = tr.api.ACS()
if not opt.acs_url:
opt.acs_url = 'http://localhost:%d/acs' % opt.port
if opt.cpe:
cpe = tr.api.CPE(root)
if not opt.cpe_listener:
print 'CPE API is client mode only.'
if cpe:
# Arguments to pass to Tornado HTTPClient.fetch
fetch_args = {'user_agent': 'catawampus-tr69'}
if opt.ca_certs:
fetch_args['ca_certs'] = opt.ca_certs
fetch_args['validate_cert'] = True
if opt.client_cert and opt.client_key:
fetch_args['client_cert'] = opt.client_cert
fetch_args['client_key'] = opt.client_key
pc = root.get_platform_config(ioloop=loop.ioloop)
cpe.download_manager.SetDirectories(config_dir=pc.ConfigDir(),
download_dir=pc.DownloadDir())
cpe_machine = tr.http.Listen(ip=opt.ip, port=opt.port,
ping_path=opt.ping_path,
acs=acs, cpe=cpe,
restrict_acs_hosts=opt.restrict_acs_hosts,
cpe_listener=opt.cpe_listener,
platform_config=pc,
acs_url=opt.acs_url,
ping_ip6dev=opt.ping_ip6dev,
fetch_args=fetch_args)
ms = cpe_machine.GetManagementServer()
root.add_management_server(ms)
root.configure_tr157(cpe_machine)
cpe_machine.Startup()
if opt.close_stdio:
nullf = open('/dev/null', 'w+')
os.dup2(nullf.fileno(), 1)
nullf.close()
loop.ioloop.add_handler(sys.stdin.fileno(),
lambda *args: _GotData(loop, *args),
loop.ioloop.READ)
loop.Start()
if __name__ == '__main__':
sys.stdout = os.fdopen(1, 'w', 1) # force line buffering even if redirected
sys.stderr = os.fdopen(2, 'w', 1) # force line buffering even if redirected
print
main()
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fix sys.path so it can find our libraries.
This file is named google3.py because gpylint specifically ignores it when
complaining about the order of import statements - google3 should always
come before other non-python-standard imports.
"""
__author__ = 'apenwarr@google.com (Avery Pennarun)'
import os.path
import sys
mydir = os.path.dirname(__file__)
sys.path += [
os.path.join(mydir, '../..'),
]
import tr.google3 #pylint: disable-msg=W0611,C6204
| Python |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
#pylint: disable-msg=C6409
"""Device Models for a simulated CPE."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import os
import sys
import google3
import dm.device_info
import dm.igd_time
import dm.periodic_statistics
import dm.storage
import platform_config
import tornado.ioloop
import tr.core
import tr.download
import tr.tr181_v2_2 as tr181
FAKECPEINSTANCE = None
INTERNAL_ERROR = 9002
BASE98IGD = tr.tr098_v1_4.InternetGatewayDevice_v1_10.InternetGatewayDevice
class PlatformConfig(platform_config.PlatformConfigMeta):
"""PlatformConfig for FakeCPE."""
def __init__(self, ioloop=None):
platform_config.PlatformConfigMeta.__init__(self)
def ConfigDir(self):
return '/tmp/catawampus.%s/config/' % FakeCPEInstance()
def DownloadDir(self):
return '/tmp/catawampus.%s/download/' % FakeCPEInstance()
def GetAcsUrl(self):
"""FakeCPE requires a --acs_url parameter, there is no platform handling."""
return None
def SetAcsUrl(self, url):
raise AttributeError('URL is read-only')
def AcsAccessAttempt(self, url):
pass
def AcsAccessSuccess(self, url):
pass
class InstallerFakeCPE(tr.download.Installer):
"""Fake Installer to install fake images on a fake CPE."""
def __init__(self, filename, ioloop=None):
tr.download.Installer.__init__(self)
self.filename = filename
self._install_cb = None
self._ioloop = ioloop or tornado.ioloop.IOLoop.instance()
def _call_callback(self, faultcode, faultstring):
if self._install_cb:
self._install_cb(faultcode, faultstring, must_reboot=True)
def install(self, file_type, target_filename, callback):
ftype = file_type.split()
if ftype and ftype[0] != '1':
self._call_callback(INTERNAL_ERROR,
'Unsupported file_type {0}'.format(type[0]))
return False
self._install_cb = callback
os.rename(self.filename, 'download.tgz')
self._call_callback(0, '')
return True
def reboot(self):
sys.exit(32)
def FakeCPEInstance():
global FAKECPEINSTANCE
if FAKECPEINSTANCE is None:
FAKECPEINSTANCE = os.getenv('FAKECPEINSTANCE', '99999999')
return FAKECPEINSTANCE
class DeviceIdFakeCPE(dm.device_info.DeviceIdMeta):
"""Parameters for the DeviceInfo object for a FakeCPE platform."""
@property
def Manufacturer(self):
return 'Catawampus'
@property
def ManufacturerOUI(self):
return '001A11'
@property
def ModelName(self):
return 'FakeCPE'
@property
def Description(self):
return 'Simulated CPE device'
@property
def SerialNumber(self):
return str(FakeCPEInstance())
@property
def HardwareVersion(self):
return '0'
@property
def AdditionalHardwareVersion(self):
return '0'
@property
def SoftwareVersion(self):
try:
with open('platform/fakecpe/version', 'r') as f:
return f.readline().strip()
except IOError:
return 'unknown_version'
@property
def AdditionalSoftwareVersion(self):
return '0'
@property
def ProductClass(self):
return 'Simulation'
@property
def ModemFirmwareVersion(self):
return '0'
class ServicesFakeCPE(tr181.Device_v2_2.Device.Services):
def __init__(self):
tr181.Device_v2_2.Device.Services.__init__(self)
self.Export(objects=['StorageServices'])
self.StorageServices = dm.storage.StorageServiceLinux26()
class DeviceFakeCPE(tr181.Device_v2_2.Device):
"""Device implementation for a simulated CPE device."""
def __init__(self, device_id, periodic_stats):
super(DeviceFakeCPE, self).__init__()
self.Unexport(objects='ATM')
self.Unexport(objects='Bridging')
self.Unexport(objects='CaptivePortal')
self.Export(objects=['DeviceInfo'])
self.Unexport(objects='DHCPv4')
self.Unexport(objects='DHCPv6')
self.Unexport(objects='DNS')
self.Unexport(objects='DSL')
self.Unexport(objects='DSLite')
self.Unexport(objects='Ethernet')
self.Unexport(objects='Firewall')
self.Unexport(objects='GatewayInfo')
self.Unexport(objects='HPNA')
self.Unexport(objects='HomePlug')
self.Unexport(objects='Hosts')
self.Unexport(objects='IEEE8021x')
self.Unexport(objects='IP')
self.Unexport(objects='IPv6rd')
self.Unexport(objects='LANConfigSecurity')
self.Unexport(objects='MoCA')
self.Unexport(objects='NAT')
self.Unexport(objects='NeighborDiscovery')
self.Unexport(objects='PPP')
self.Unexport(objects='PTM')
self.Unexport(objects='QoS')
self.Unexport(objects='RouterAdvertisement')
self.Unexport(objects='Routing')
self.Unexport(objects='SmartCardReaders')
self.Unexport(objects='UPA')
self.Unexport(objects='USB')
self.Unexport(objects='Users')
self.Unexport(objects='WiFi')
self.DeviceInfo = dm.device_info.DeviceInfo181Linux26(device_id)
self.ManagementServer = tr.core.TODO() # Higher layer code splices this in
self.Services = ServicesFakeCPE()
self.InterfaceStackNumberOfEntries = 0
self.InterfaceStackList = {}
self.Export(objects=['PeriodicStatistics'])
self.PeriodicStatistics = periodic_stats
class InternetGatewayDeviceFakeCPE(BASE98IGD):
"""Implements tr-98 InternetGatewayDevice."""
def __init__(self, device_id, periodic_stats):
super(InternetGatewayDeviceFakeCPE, self).__init__()
self.Unexport(objects='CaptivePortal')
self.Unexport(objects='DeviceConfig')
self.Unexport(params='DeviceSummary')
self.Unexport(objects='DownloadDiagnostics')
self.Unexport(objects='IPPingDiagnostics')
self.Unexport(objects='LANConfigSecurity')
self.Unexport(lists='LANDevice')
self.Unexport(objects='LANInterfaces')
self.Unexport(objects='Layer2Bridging')
self.Unexport(objects='Layer3Forwarding')
self.ManagementServer = tr.core.TODO() # higher level code splices this in
self.Unexport(objects='QueueManagement')
self.Unexport(objects='Services')
self.Unexport(objects='TraceRouteDiagnostics')
self.Unexport(objects='UploadDiagnostics')
self.Unexport(objects='UserInterface')
self.Unexport(lists='WANDevice')
self.DeviceInfo = dm.device_info.DeviceInfo98Linux26(device_id)
tzfile = '/tmp/catawampus.%s/TZ' % FakeCPEInstance()
self.Time = dm.igd_time.TimeTZ(tzfile=tzfile)
self.Export(objects=['PeriodicStatistics'])
self.PeriodicStatistics = periodic_stats
@property
def LANDeviceNumberOfEntries(self):
return 0
@property
def WANDeviceNumberOfEntries(self):
return 0
def PlatformInit(name, device_model_root):
"""Create platform-specific device models and initialize platform."""
tr.download.INSTALLER = InstallerFakeCPE
params = list()
objects = list()
periodic_stats = dm.periodic_statistics.PeriodicStatistics()
devid = DeviceIdFakeCPE()
device_model_root.Device = DeviceFakeCPE(devid, periodic_stats)
objects.append('Device')
device_model_root.InternetGatewayDevice = InternetGatewayDeviceFakeCPE(
devid, periodic_stats)
objects.append('InternetGatewayDevice')
return (params, objects)
def main():
periodic_stats = dm.periodic_statistics.PeriodicStatistics()
devid = DeviceIdFakeCPE()
device = DeviceFakeCPE(devid, periodic_stats)
igd = InternetGatewayDeviceFakeCPE(devid, periodic_stats)
tr.core.Dump(device)
tr.core.Dump(igd)
device.ValidateExports()
igd.ValidateExports()
print 'done'
if __name__ == '__main__':
main()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.