code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Manages dynamic properties of an application and/or its modules.
An application must explicitly declare properties and provide a type, doc string
and default value for each. The default property values are overridden by
the new values found in the environment variable with the same name. Those are
further overridden by the values found in the datastore. We also try to do all
of this with performance in mind.
"""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import logging
import os
import threading
import time
import appengine_config
import entities
import transforms
from google.appengine.api import namespace_manager
from google.appengine.ext import db
# The default update interval supported.
DEFAULT_UPDATE_INTERVAL_SEC = 60
# The longest update interval supported.
MAX_UPDATE_INTERVAL_SEC = 60 * 5
# Allowed property types.
TYPE_INT = int
TYPE_STR = str
TYPE_BOOL = bool
ALLOWED_TYPES = frozenset([TYPE_INT, TYPE_STR, TYPE_BOOL])
class ConfigProperty(object):
"""A property with name, type, doc_string and a default value."""
def __init__(
self, name, value_type, doc_string,
default_value=None, multiline=False, validator=None):
if value_type not in ALLOWED_TYPES:
raise Exception('Bad value type: %s' % value_type)
self._validator = validator
self._multiline = multiline
self._name = name
self._type = value_type
self._doc_string = doc_string
self._default_value = value_type(default_value)
errors = []
if self._validator and self._default_value:
self._validator(self._default_value, errors)
if errors:
raise Exception('Default value is invalid: %s.' % errors)
Registry.registered[name] = self
@property
def validator(self):
return self._validator
@property
def multiline(self):
return self._multiline
@property
def name(self):
return self._name
@property
def value_type(self):
return self._type
@property
def doc_string(self):
return self._doc_string
@property
def default_value(self):
return self._default_value
def get_environ_value(self):
"""Tries to get value from the environment variables."""
# Look for a name in lower or upper case.
name = None
if self._name.lower() in os.environ:
name = self._name.lower()
else:
if self._name.upper() in os.environ:
name = self._name.upper()
if name:
try:
return True, transforms.string_to_value(
os.environ[name], self.value_type)
except Exception: # pylint: disable-msg=broad-except
logging.error(
'Property %s failed to cast to type %s; removing.',
self._name, self._type)
del os.environ[name]
return False, None
def get_value(self, db_overrides=None):
"""Gets value from overrides (datastore, environment) or default."""
# Try testing overrides.
overrides = Registry.test_overrides
if overrides and self.name in overrides:
return overrides[self.name]
# Try datastore overrides.
if db_overrides and self.name in db_overrides:
return db_overrides[self.name]
# Try environment variable overrides.
has_value, environ_value = self.get_environ_value()
if has_value:
return environ_value
# Use default value as last resort.
return self._default_value
@property
def value(self):
return self.get_value(db_overrides=Registry.get_overrides())
class Registry(object):
"""Holds all registered properties and their various overrides."""
registered = {}
test_overrides = {}
db_overrides = {}
names_with_draft = {}
last_update_time = 0
update_index = 0
threadlocal = threading.local()
REENTRY_ATTR_NAME = 'busy'
@classmethod
def get_overrides(cls, force_update=False):
"""Returns current property overrides, maybe cached."""
now = long(time.time())
age = now - cls.last_update_time
max_age = UPDATE_INTERVAL_SEC.get_value(db_overrides=cls.db_overrides)
# do not update if call is reentrant or outer db transaction exists
busy = hasattr(cls.threadlocal, cls.REENTRY_ATTR_NAME) or (
db.is_in_transaction())
if (not busy) and (force_update or age < 0 or age >= max_age):
# Value of '0' disables all datastore overrides.
if UPDATE_INTERVAL_SEC.get_value() == 0:
cls.db_overrides = {}
return cls.db_overrides
# Load overrides from a datastore.
setattr(cls.threadlocal, cls.REENTRY_ATTR_NAME, True)
try:
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(
appengine_config.DEFAULT_NAMESPACE_NAME)
cls._load_from_db()
finally:
namespace_manager.set_namespace(old_namespace)
except Exception as e: # pylint: disable-msg=broad-except
logging.error(
'Failed to load properties from a database: %s.', str(e))
finally:
delattr(cls.threadlocal, cls.REENTRY_ATTR_NAME)
# Avoid overload and update timestamp even if we failed.
cls.last_update_time = now
cls.update_index += 1
return cls.db_overrides
@classmethod
def _load_from_db(cls):
"""Loads dynamic properties from db."""
logging.info('Reloading properties.')
overrides = {}
drafts = set()
for item in ConfigPropertyEntity.all().fetch(1000):
name = item.key().name()
if name not in cls.registered:
logging.error(
'Property is not registered (skipped): %s', name)
continue
target = cls.registered[name]
if target and item.is_draft:
drafts.add(name)
if target and not item.is_draft:
# Enforce value type.
try:
value = transforms.string_to_value(
item.value, target.value_type)
except Exception: # pylint: disable-msg=broad-except
logging.error(
'Property %s failed to cast to a type %s; removing.',
target.name, target.value_type)
continue
# Enforce value validator.
if target.validator:
errors = []
try:
target.validator(value, errors)
except Exception as e: # pylint: disable-msg=broad-except
errors.append(
'Error validating property %s.\n%s',
(target.name, e))
if errors:
logging.error(
'Property %s has invalid value:\n%s',
target.name, '\n'.join(errors))
continue
overrides[name] = value
cls.db_overrides = overrides
cls.names_with_draft = drafts
class ConfigPropertyEntity(entities.BaseEntity):
"""A class that represents a named configuration property."""
value = db.TextProperty(indexed=False)
is_draft = db.BooleanProperty(indexed=False)
def run_all_unit_tests():
"""Runs all unit tests for this modules."""
str_prop = ConfigProperty('gcb-str-prop', str, ('doc for str_prop'), 'foo')
int_prop = ConfigProperty('gcb-int-prop', int, ('doc for int_prop'), 123)
assert str_prop.default_value == 'foo'
assert str_prop.value == 'foo'
assert int_prop.default_value == 123
assert int_prop.value == 123
# Check os.environ override works.
os.environ[str_prop.name] = 'bar'
assert str_prop.value == 'bar'
del os.environ[str_prop.name]
assert str_prop.value == 'foo'
# Check os.environ override with type casting.
os.environ[int_prop.name] = '12345'
assert int_prop.value == 12345
# Check setting of value is disallowed.
try:
str_prop.value = 'foo'
raise Exception()
except AttributeError:
pass
# Check value of bad type is disregarded.
os.environ[int_prop.name] = 'foo bar'
assert int_prop.value == int_prop.default_value
def validate_update_interval(value, errors):
value = int(value)
if value <= 0 or value >= MAX_UPDATE_INTERVAL_SEC:
errors.append(
'Expected a value between 0 and %s, exclusive.' % (
MAX_UPDATE_INTERVAL_SEC))
UPDATE_INTERVAL_SEC = ConfigProperty(
'gcb_config_update_interval_sec', int, (
'An update interval (in seconds) for reloading runtime properties '
'from a datastore. Using this editor, you can set this value to an '
'integer between 1 and %s, inclusive. To completely disable reloading '
'properties from a datastore, you must set the value to 0. However, '
'you can only set the value to 0 by directly modifying the app.yaml '
'file.' % MAX_UPDATE_INTERVAL_SEC),
default_value=DEFAULT_UPDATE_INTERVAL_SEC,
validator=validate_update_interval)
if __name__ == '__main__':
run_all_unit_tests()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions to work with various models."""
__author__ = [
'johncox@google.com (John Cox)',
'sll@google.com (Sean Lip)',
]
import logging
import transforms
_LOG = logging.getLogger('models.utils')
logging.basicConfig()
class Error(Exception):
"""Base error class."""
class StopMapping(Error):
"""Raised by user's map function to stop execution."""
class QueryMapper(object):
"""Mapper that applies a function to each result of a db.query.
QueryMapper works with result sets larger than 1000.
Usage:
def map_fn(model, named_arg, keyword_arg=None):
[...]
query = MyModel.all()
# We manipulate query, so it cannot be reused after it's fed to
# QueryMapper.
mapper = QueryMapper(query)
mapper.run(map_fn, 'foo', keyword_arg='bar')
"""
def __init__(self, query, batch_size=20, counter=None, report_every=None):
"""Constructs a new QueryMapper.
Args:
query: db.Query. The query to run. Cannot be reused after the
query mapper's run() method is invoked.
batch_size: int. Number of results to fetch per batch.
counter: entities.PerfCounter or None. If given, the counter to
increment once for every entity retrieved by query.
report_every: int or None. If specified, every report_every results
we will log the number of results processed at level info. By
default we will do this every 10 batches. Set to 0 to disable
logging.
"""
if report_every is None:
report_every = 10 * batch_size
self._batch_size = batch_size
self._counter = counter
self._query = query
self._report_every = report_every
def run(self, fn, *fn_args, **fn_kwargs):
"""Runs the query in batches, applying a function to each result.
Args:
fn: function. Takes a single query result (either a db.Key or
db.Model) instance as its first arg, then any number of
positional and keyword arguments. Called on each result returned
by the query.
*fn_args: positional args delegated to fn.
**fn_kwargs: keyword args delegated to fn.
Returns:
Integer. Total number of results processed.
"""
total_count = 0
cursor = None
while True:
batch_count, cursor = self._handle_batch(
cursor, fn, *fn_args, **fn_kwargs)
total_count += batch_count
if not (batch_count and cursor):
return total_count
if self._report_every != 0 and not total_count % self._report_every:
_LOG.info(
'Models processed by %s.%s so far: %s',
fn.__module__, fn.func_name, total_count)
def _handle_batch(self, cursor, fn, *fn_args, **fn_kwargs):
if cursor:
self._query.with_cursor(start_cursor=cursor)
count = 0
empty = True
batch = self._query.fetch(limit=self._batch_size)
if self._counter:
self._counter.inc(increment=len(batch))
for result in batch:
try:
fn(result, *fn_args, **fn_kwargs)
except StopMapping:
return count, None
count += 1
empty = False
cursor = None
if not empty:
cursor = self._query.cursor()
return count, cursor
def set_answer(answers, assessment_name, answer):
"""Stores the answer array for the given student and assessment.
The caller must call answers.put() to commit.
This does not do any type-checking on 'answer'; it just stores whatever
is passed in.
Args:
answers: the StudentAnswers entity in which the answer should be stored.
assessment_name: the name of the assessment.
answer: an array containing the student's answers.
"""
if not answers.data:
score_dict = {}
else:
score_dict = transforms.loads(answers.data)
score_dict[assessment_name] = answer
answers.data = transforms.dumps(score_dict)
def set_score(student, assessment_name, score):
"""Stores the score for the given student and assessment.
The caller must call student.put() to commit.
This does not do any type-checking on 'score'; it just stores whatever
is passed in.
Args:
student: the student whose answer should be stored.
assessment_name: the name of the assessment.
score: the student's score.
"""
if not student.scores:
score_dict = {}
else:
score_dict = transforms.loads(student.scores)
score_dict[assessment_name] = score
student.scores = transforms.dumps(score_dict)
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Properties and its collections."""
__author__ = 'Abhinav Khandelwal (abhinavk@google.com)'
import collections
class Property(object):
"""Property."""
def __init__(
self, name, label, property_type, select_data=None, description=None,
optional=False, extra_schema_dict_values=None):
self._name = name
self._label = label
self._property_type = property_type
self._select_data = select_data
self._description = description
self._optional = optional
self._extra_schema_dict_values = extra_schema_dict_values
@property
def name(self):
return self._name
class Registry(object):
"""Registry is a collection of Property's."""
def __init__(self, title, description=None, extra_schema_dict_values=None):
self._title = title
self._registry = {'id': title, 'type': 'object'}
self._description = description
if description:
self._registry['description'] = description
self._extra_schema_dict_values = extra_schema_dict_values
self._properties = []
self._sub_registories = collections.OrderedDict()
@property
def title(self):
return self._title
def add_property(self, schema_field):
"""Add a Property to this Registry."""
self._properties.append(schema_field)
def add_sub_registry(
self, name, title=None, description=None, registry=None):
"""Add a sub registry to for this Registry."""
if not registry:
registry = Registry(title, description)
self._sub_registories[name] = registry
return registry
def has_subregistries(self):
return True if self._sub_registories else False
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common classes and methods for processing text content."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
from pyparsing import alphas
from pyparsing import Combine
from pyparsing import Each
from pyparsing import Group
from pyparsing import Literal
from pyparsing import nums
from pyparsing import Optional
from pyparsing import QuotedString
from pyparsing import Regex
from pyparsing import Suppress
from pyparsing import Word
from pyparsing import ZeroOrMore
from tools import verify
def sep(text):
"""Makes a separator."""
return Suppress(Literal(text))
def key(name):
"""Makes grammar expression for a key."""
return (
Literal(name) ^
(sep('\'') + Literal(name) + sep('\'')) ^
(sep('"') + Literal(name) + sep('"')))
def list_of(term):
"""Makes a delimited list of terms."""
return (
Optional(
term +
ZeroOrMore(Suppress(Literal(',')) + term) +
Optional(Suppress(Literal(',')))
)
)
def chunks(l, n):
"""Partitions the list l into disjoint sub-lists of length n."""
if len(l) % n != 0:
raise Exception('List length is not a multiple on %s', n)
return [l[i:i+n] for i in range(0, len(l), n)]
def make_dict(unused_s, unused_l, toks):
"""Makes a dict from the list using even items as keys, odd as values."""
result = {}
key_value_pairs = chunks(toks, 2)
for key_value_pair in key_value_pairs:
result[key_value_pair[0]] = key_value_pair[1]
return result
def make_list(unused_s, unused_l, toks):
"""Makes a list out of a token tuple holding a list."""
result = []
for item in toks:
result.append(item.asList())
return result
def make_bool(value):
"""Makes a boolean value lambda."""
def make_value():
return verify.Term(verify.BOOLEAN, value)
return make_value
def make_int(value):
"""Makes an int value lambda."""
return int(value[0])
def make_float(value):
"""Makes an float value lambda."""
return float(value[0])
class AssessmentParser13(object):
"""Grammar and parser for the assessment."""
string = (
QuotedString('\'', escChar='\\', multiline=True) ^
QuotedString('"', escChar='\\', multiline=True))
boolean = (
Literal('true').setParseAction(make_bool(True)) ^
Literal('false').setParseAction(make_bool(False)))
float = Combine(
Word(nums) + Optional(Literal('.') + Word(nums))
).setParseAction(make_float)
integer = Word(nums).setParseAction(make_int)
choice_decl = (
string ^
Combine(
sep('correct(') + string + sep(')')
).setParseAction(lambda x: verify.Term(verify.CORRECT, x[0]))
)
regex = (
Regex('/(.*)/i') ^
Combine(
sep('regex(') +
QuotedString('"', escChar='\\') +
sep(')')
).setParseAction(lambda x: verify.Term(verify.REGEX, x[0]))
)
question_decl = (
sep('{') +
Each(
Optional(
key('questionHTML') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('lesson') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('correctAnswerString') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('correctAnswerRegex') + sep(':') +
regex + Optional(sep(','))) +
Optional(
key('correctAnswerNumeric') + sep(':') +
float + Optional(sep(','))) +
Optional(
key('choiceScores') + sep(':') +
sep('[') +
Group(list_of(float)).setParseAction(make_list) +
sep(']') +
Optional(sep(','))) +
Optional(
key('weight') + sep(':') + integer + Optional(sep(','))) +
Optional(
key('multiLine') + sep(':') +
boolean + Optional(sep(','))) +
Optional(
key('choices') + sep(':') +
sep('[') +
Group(list_of(choice_decl)).setParseAction(make_list) +
sep(']') +
Optional(sep(',')))
) +
sep('}')).setParseAction(make_dict)
assessment_grammar = (
sep('assessment') +
sep('=') +
sep('{') +
Each(
Optional(
key('assessmentName') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('preamble') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('checkAnswers') + sep(':') +
boolean + Optional(sep(','))) +
Optional(
key('questionsList') + sep(':') +
sep('[') +
Group(list_of(question_decl)).setParseAction(make_list) +
sep(']') +
Optional(sep(',')))
) +
sep('}') +
Optional(sep(';'))).setParseAction(make_dict)
@classmethod
def parse_string(cls, content):
return cls.assessment_grammar.parseString(content)
@classmethod
def parse_string_in_scope(cls, content, scope, root_name):
"""Parses assessment text following grammar."""
if 'assessment' != root_name:
raise Exception('Unsupported schema: %s', root_name)
# we need to extract the results as a dictionary; so we remove the
# outer array holding it
ast = cls.parse_string(content).asList()
if len(ast) == 1:
ast = ast[0]
return dict(
scope.items() +
{'__builtins__': {}}.items() +
{root_name: ast}.items())
class ActivityParser13(object):
"""Grammar and parser for the activity."""
variable = Word(alphas)
integer = Word(nums).setParseAction(make_int)
string = (
QuotedString('\'', escChar='\\', multiline=True) ^
QuotedString('"', escChar='\\', multiline=True))
boolean = (
Literal('true').setParseAction(make_bool(True)) ^
Literal('false').setParseAction(make_bool(False)))
regex = (
Regex('/(.*)/i') ^
Combine(
sep('regex(') +
QuotedString('"', escChar='\\') +
sep(')')
).setParseAction(lambda x: verify.Term(verify.REGEX, x[0]))
)
choice_decl = Group(
sep('[') +
string + sep(',') +
boolean + sep(',') +
string +
sep(']')
)
choices_decl = Group(
sep('[') +
Optional(list_of(choice_decl)) +
sep(']')
).setParseAction(make_list)
multiple_choice_decl = (
key('questionType') + sep(':') + key('multiple choice') +
Optional(sep(','))
)
multiple_choice = (
sep('{') +
multiple_choice_decl +
Each(
Optional(
key('questionHTML') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('choices') + sep(':') +
choices_decl + Optional(sep(',')))
) +
sep('}')
).setParseAction(make_dict)
free_text_decl = (
key('questionType') + sep(':') + key('freetext') +
Optional(sep(','))
)
free_text = (
sep('{') +
free_text_decl +
Each(
Optional(
key('questionHTML') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('correctAnswerRegex') + sep(':') +
regex + Optional(sep(','))) +
Optional(
key('correctAnswerOutput') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('incorrectAnswerOutput') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('showAnswerPrompt') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('showAnswerOutput') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('outputHeight') + sep(':') +
string + Optional(sep(',')))
) +
sep('}')
).setParseAction(make_dict)
question_list_decl = (
sep('{') +
Each(
Optional(
key('questionHTML') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('choices') + sep(':') +
sep('[') +
Group(list_of(string)).setParseAction(make_list) +
sep(']') +
Optional(sep(','))) +
Optional(
key('correctIndex') + sep(':') +
(integer ^ (
sep('[') +
Group(list_of(integer)).setParseAction(make_list) +
sep(']'))) +
Optional(sep(','))) +
Optional(
key('multiSelect') + sep(':') +
boolean + Optional(sep(','))),
) +
sep('}')).setParseAction(make_dict)
questions_list_decl = Group(
sep('[') +
Optional(list_of(question_list_decl)) +
sep(']')
).setParseAction(make_list)
multiple_choice_group_decl = (
key('questionType') + sep(':') + key('multiple choice group') +
Optional(sep(','))
)
multiple_choice_group = (
sep('{') +
multiple_choice_group_decl +
Each(
Optional(
key('questionGroupHTML') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('allCorrectMinCount') + sep(':') +
integer + Optional(sep(','))) +
Optional(
key('allCorrectOutput') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('someIncorrectOutput') + sep(':') +
string + Optional(sep(','))) +
Optional(
key('questionsList') + sep(':') +
questions_list_decl + Optional(sep(',')))
) +
sep('}')
).setParseAction(make_dict)
activity_grammar = (
sep('activity') +
sep('=') +
sep('[') +
Optional(list_of(
string ^ multiple_choice ^ free_text ^ multiple_choice_group)) +
sep(']') +
Optional(sep(';')))
@classmethod
def parse_string(cls, content):
return cls.activity_grammar.parseString(content)
@classmethod
def parse_string_in_scope(cls, content, scope, root_name):
"""Parses activity text following grammar."""
if 'activity' != root_name:
raise Exception('Unsupported schema: %s', root_name)
return dict(
scope.items() +
{'__builtins__': {}}.items() +
{root_name: cls.parse_string(content).asList()}.items())
# here we register all the parser
SUPPORTED_PARSERS = {
'activity': ActivityParser13, 'assessment': AssessmentParser13}
def verify_activity(activity_text):
"""Parses and semantically verifies activity."""
activity = ActivityParser13.parse_string_in_scope(
activity_text, verify.Activity().scope, 'activity')
assert activity
verifier = verify.Verifier()
verifier.verify_activity_instance(activity, 'test')
def verify_assessment(assessment_text):
"""Parses and semantically verifies assessment."""
assessment = AssessmentParser13.parse_string_in_scope(
assessment_text, verify.Assessment().scope, 'assessment')
assert assessment
verifier = verify.Verifier()
verifier.verify_assessment_instance(assessment, 'test')
def parse_string_in_scope(content, scope, root_name):
parser = SUPPORTED_PARSERS.get(root_name)
if not parser:
raise Exception('Unsupported schema: %s', root_name)
return parser.parse_string_in_scope(content, scope, root_name)
def test_activity_multiple_choice_group():
"""Test activity parsing."""
activity_text = (
"""activity = [
'<p>This is text.</p>',
{
questionType: 'multiple choice group',
questionGroupHTML: '<p>This is text.</p>',
allCorrectMinCount: 55,
allCorrectOutput: '<p>This is text.</p>',
someIncorrectOutput: '<p>This is text.</p>',
questionsList: [
{questionHTML: '<p>This is text.</p>'},
{correctIndex: [1, 2, 3]},
{questionHTML: '<p>This is text.</p>',
correctIndex: 0, multiSelect: false,
choices: ['foo', 'bar'],},
]
},
{
"questionType": 'multiple choice group',
questionGroupHTML:
'<p>This section will test you on colors and numbers.</p>',
questionsList: [
{questionHTML: 'Pick all <i>odd</i> numbers:',
choices: ['1', '2', '3', '4', '5'], correctIndex: [0, 2, 4]},
{questionHTML: 'Pick one <i>even</i> number:',
choices: ['1', '2', '3', '4', '5'], correctIndex: [1, 3],
multiSelect: false},
{questionHTML: 'What color is the sky?',
choices: ['#00FF00', '#00FF00', '#0000FF'], correctIndex: 2}
],
allCorrectMinCount: 2,
allCorrectOutput: 'Great job! You know the material well.',
someIncorrectOutput: 'You must answer at least two questions correctly.'
}
];
""")
verify_activity(activity_text)
def test_activity_multiple_choice():
"""Test activity parsing."""
activity_text = (
"""activity = [
'<p>This is text.</p>',
{
questionType: 'multiple choice',
questionHTML: '<p>This is text.</p>',
choices: [
['<p>This is text.</p>', false, '<p>This is text.</p>'],
['<p>This is text.</p>', true, '<p>This is text.</p>'],
]
}
];
""")
verify_activity(activity_text)
def test_activity_free_text():
"""Test activity parsing."""
activity_text = (
"""activity = [
'<p>This is text.</p>',
{
'questionType': 'freetext',
questionHTML: '<p>This is text.</p>',
showAnswerPrompt: '<p>This is text.</p>',
showAnswerOutput: '<p>This is text.</p>',
correctAnswerRegex: regex("/4|four/i"),
correctAnswerOutput: '<p>This is text.</p>',
incorrectAnswerOutput: '<p>This is text.</p>',
},
{
questionType: 'freetext',
questionHTML: '<p>What color is the snow?</p>',
correctAnswerRegex: regex("/white/i"),
correctAnswerOutput: 'Correct!',
incorrectAnswerOutput: 'Try again.',
showAnswerOutput: 'Our search expert says: white!' },
];
""")
verify_activity(activity_text)
def test_assessment():
"""Test assessment parsing."""
# pylint: disable-msg=anomalous-backslash-in-string
assessment_text = (
"""assessment = {
assessmentName: '12345',
preamble: '<p>This is text.</p>',
checkAnswers: false,
questionsList: [
{questionHTML: '<p>This is text.</p>',
choices:
["A and B", "D and B", correct("A and C"), "C and D", "I don't know"]
},
{questionHTML: '<p>This is text.</p>',
choiceScores: [0, 0.5, 1.0],
weight: 3,
choices: [correct("True"), "False", "I don't know"]
},
{questionHTML: '<p>This is text.</p>',
correctAnswerString: 'sunrise',
correctAnswerNumeric: 7.9
},
{questionHTML: '<p>This is text.</p>',
correctAnswerNumeric: 7,
correctAnswerRegex: regex("/354\s*[+]\s*651/")
}
],
};
""")
# pylint: enable-msg=anomalous-backslash-in-string
verify_assessment(assessment_text)
def test_activity_ast():
"""Test a mix of various activities using legacy and new parser."""
activity_text = (
"""activity = [
'<p>This is just some <i>HTML</i> text!</p>',
{ questionType: 'multiple choice',
questionHTML: '<p>What letter am I thinking about now?</p>',
choices: [
['A', false, '"A" is wrong, try again.'],
['B', true, '"B" is correct!'],
['C', false, '"C" is wrong, try again.'],
['D', false, '"D" is wrong, try again.']
]
},
{ questionType: 'freetext',
questionHTML: '<p>What color is the snow?</p>',
correctAnswerRegex: regex("/white/i"),
correctAnswerOutput: 'Correct!',
incorrectAnswerOutput: 'Try again.',
showAnswerOutput: 'Our search expert says: white!' },
{ questionType: 'multiple choice group',
questionGroupHTML:
'<p>This section will test you on colors and numbers.</p>',
allCorrectMinCount: 2,
questionsList: [
{questionHTML: 'Pick all <i>odd</i> numbers:',
choices: ['1', '2', '3', '4', '5'], correctIndex: [0, 2, 4]},
{questionHTML: 'Pick one <i>even</i> number:',
choices: ['1', '2', '3', '4', '5'], correctIndex: [1, 3],
multiSelect: false},
{questionHTML: 'What color is the sky?',
choices: ['#00FF00', '#00FF00', '#0000FF'], correctIndex: 2}
],
allCorrectOutput: 'Great job! You know the material well.',
someIncorrectOutput: 'You must answer at least two questions correctly.'
}
];
""")
verify_activity(activity_text)
scope = verify.Activity().scope
current_ast = ActivityParser13.parse_string_in_scope(
activity_text, scope, 'activity')
expected_ast = verify.legacy_eval_python_expression_for_test(
activity_text, scope, 'activity')
same = (
len(current_ast.get('activity')) == 4 and
current_ast.get('activity') == expected_ast.get('activity') and
current_ast == expected_ast)
if not same:
import pprint # # pylint: disable-msg=g-import-not-at-top
pprint.pprint(current_ast.get('activity'))
pprint.pprint(expected_ast.get('activity'))
assert same
def test_assessment_ast():
"""Test a mix of various activities using legacy and new parser."""
# pylint: disable-msg=anomalous-backslash-in-string
assessment_text = (
"""assessment = {
preamble: '<p>This is text.</p>',
questionsList: [
{'questionHTML': '<p>This is text.</p>',
choices:
["A and B", "D and B", correct("A and C"), "C and D", "I don't know"]
},
{"questionHTML": '<p>This is text.</p>',
choices: [correct("True"), "False", "I don't know"],
choiceScores: [0, 0.5, 1.0],
weight: 3
},
{questionHTML: '<p>This is text.</p>',
correctAnswerString: 'sunrise'
},
{questionHTML: '<p>This is text.</p>',
correctAnswerRegex: regex("/354\s*[+]\s*651/")
}
],
assessmentName: 'Pre',
checkAnswers: false
}
""")
# pylint: enable-msg=anomalous-backslash-in-string
verify_assessment(assessment_text)
scope = verify.Assessment().scope
current_ast = AssessmentParser13.parse_string_in_scope(
assessment_text, scope, 'assessment')
expected_ast = verify.legacy_eval_python_expression_for_test(
assessment_text, scope, 'assessment')
same = (
len(current_ast.get('assessment')) == 4 and
len(current_ast.get('assessment').get('questionsList')) == 4 and
current_ast.get('assessment') == expected_ast.get('assessment') and
current_ast == expected_ast)
if not same:
import pprint # # pylint: disable-msg=g-import-not-at-top
pprint.pprint(current_ast.get('assessment'))
pprint.pprint(expected_ast.get('assessment'))
assert same
def test_list_of():
"""Test delimited list."""
grammar = Optional(
Literal('[') +
Optional(list_of(Literal('a') ^ Literal('b'))) +
Literal(']'))
assert str(['[', ']']) == str(grammar.parseString('[]'))
assert str(['[', 'a', ']']) == str(grammar.parseString('[a]'))
assert str(['[', 'b', ']']) == str(grammar.parseString('[b]'))
assert str(['[', 'a', ']']) == str(grammar.parseString('[a,]'))
assert str(['[', 'b', ']']) == str(grammar.parseString('[b,]'))
assert str(['[', 'a', 'a', 'a', 'a', ']']) == str(
grammar.parseString('[a, a, a, a]'))
assert str(['[', 'a', 'a', 'a', 'a', ']']) == str(
grammar.parseString('[a,a,a,a]'))
assert str(['[', 'a', 'a', 'a', 'a', ']']) == str(
grammar.parseString('[a,a,a,a,]'))
assert str(['[', 'a', 'b', 'a', 'b', ']']) == str(
grammar.parseString('[a,b,a,b]'))
assert str(['[', 'b', 'a', 'b', 'a', ']']) == str(
grammar.parseString('[b,a,b,a]'))
assert str(['[', 'b', 'b', 'b', 'b', ']']) == str(
grammar.parseString('[b,b,b,b]'))
assert not grammar.parseString('')
assert not grammar.parseString('[c]')
assert not grammar.parseString('[a,c,b]')
def run_all_unit_tests():
"""Run all unit tests."""
original = verify.parse_content
try:
verify.parse_content = parse_string_in_scope
test_list_of()
test_activity_multiple_choice()
test_activity_free_text()
test_activity_multiple_choice_group()
test_activity_ast()
test_assessment()
test_assessment_ast()
# test existing verifier using parsing instead of exec/compile
verify.test_sample_assets()
finally:
verify.parse_content = original
if __name__ == '__main__':
run_all_unit_tests()
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes supporting dynamically registering custom modules."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
class Module(object):
"""A class that holds module information."""
def __init__(self, name, desc, global_routes, namespaced_routes):
self._name = name
self._desc = desc
self._global_routes = global_routes
self._namespaced_routes = namespaced_routes
Registry.registered_modules[self._name] = self
def disable(self):
Registry.enabled_module_names.remove(self.name)
def enable(self):
Registry.enabled_module_names.add(self.name)
@property
def enabled(self):
return self.name in Registry.enabled_module_names
@property
def name(self):
return self._name
@property
def desc(self):
return self._desc
@property
def global_routes(self):
if self.name in Registry.enabled_module_names:
return self._global_routes
else:
return []
@property
def namespaced_routes(self):
if self.name in Registry.enabled_module_names:
return self._namespaced_routes
else:
return []
class Registry(object):
"""A registry that holds all custom modules."""
registered_modules = {}
enabled_module_names = set()
@classmethod
def get_all_routes(cls):
global_routes = []
namespaced_routes = []
for registered_module in cls.registered_modules.values():
if registered_module.enabled:
# Only populate the routing table with enabled modules.
global_routes += registered_module.global_routes
namespaced_routes += registered_module.namespaced_routes
return global_routes, namespaced_routes
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Review processor that is used for managing human-reviewed assessments."""
__author__ = [
'sll@google.com (Sean Lip)',
]
from modules.review import domain
import entities
import student_work
import transforms
# Indicates that a human-graded assessment is peer-graded.
PEER_MATCHER = 'peer'
# Allowed matchers.
ALLOWED_MATCHERS = [PEER_MATCHER]
class ReviewsProcessor(object):
"""A class that processes review arrangements."""
TYPE_IMPL_MAPPING = {
PEER_MATCHER: None,
}
@classmethod
def set_peer_matcher(cls, matcher):
cls.TYPE_IMPL_MAPPING[PEER_MATCHER] = matcher
def __init__(self, course):
self._course = course
def _get_course(self):
return self._course
def _get_impl(self, unit_id):
unit = self._get_course().find_unit_by_id(unit_id)
return self.TYPE_IMPL_MAPPING[unit.workflow.get_matcher()]
def _get_review_step_keys_by(self, unit_id, reviewer_key):
impl = self._get_impl(unit_id)
return impl.get_review_step_keys_by(str(unit_id), reviewer_key)
def _get_submission_and_review_step_keys(self, unit_id, reviewee_key):
impl = self._get_impl(unit_id)
return impl.get_submission_and_review_step_keys(
str(unit_id), reviewee_key)
def add_reviewer(self, unit_id, reviewee_key, reviewer_key):
submission_key = student_work.Submission.get_key(unit_id, reviewee_key)
impl = self._get_impl(unit_id)
return impl.add_reviewer(
str(unit_id), submission_key, reviewee_key, reviewer_key)
def delete_reviewer(self, unit_id, review_step_key):
impl = self._get_impl(unit_id)
return impl.delete_reviewer(review_step_key)
def get_new_review(self, unit_id, reviewer_key):
impl = self._get_impl(unit_id)
return impl.get_new_review(str(unit_id), reviewer_key)
def get_review_steps_by(self, unit_id, reviewer_key):
review_step_keys = self._get_review_step_keys_by(unit_id, reviewer_key)
return self.get_review_steps_by_keys(unit_id, review_step_keys)
def get_reviews_by_keys(
self, unit_id, review_keys, handle_empty_keys=False):
"""Gets a list of reviews, given their review keys.
If handle_empty_keys is True, then no error is thrown on supplied keys
that are None; the elements in the result list corresponding to those
keys simply return None. This usually arises when this method is called
immediately after get_review_steps_by_keys().
Args:
unit_id: string. Id of the unit to get the reviews for.
review_keys: [db.Key of peer.ReviewStep]. May include None, if
handle_empty_keys is True.
handle_empty_keys: if True, the return value contains None for keys
that are None. If False, the method throws if empty keys are
supplied.
Returns:
List with the same number of elements as review_keys. It contains:
- the JSON-decoded contents of the review corresponding to that
review_key, or
- None if either:
- no review has been submitted for that review key, or
- handle_empty_keys == True and the review_key is None.
"""
impl = self._get_impl(unit_id)
reviews = []
if not handle_empty_keys:
reviews = impl.get_reviews_by_keys(review_keys)
else:
nonempty_review_indices = []
nonempty_review_keys = []
for idx, review_key in enumerate(review_keys):
if review_key is not None:
nonempty_review_indices.append(idx)
nonempty_review_keys.append(review_key)
tmp_reviews = impl.get_reviews_by_keys(nonempty_review_keys)
reviews = [None] * len(review_keys)
for (i, idx) in enumerate(nonempty_review_indices):
reviews[idx] = tmp_reviews[i]
return [(transforms.loads(rev.contents) if rev else None)
for rev in reviews]
def get_review_steps_by_keys(self, unit_id, review_step_keys):
impl = self._get_impl(unit_id)
return impl.get_review_steps_by_keys(review_step_keys)
def get_submission_and_review_steps(self, unit_id, reviewee_key):
"""Gets the submission and a list of review steps for a unit/reviewee.
Note that review steps marked removed are included in the result set.
Args:
unit_id: string. Id of the unit to get the data for.
reviewee_key: db.Key of models.models.Student. The student to get
the data for.
Returns:
- None if no submission was found for the given unit_id,
reviewee_key pair.
- (Object, [peer.ReviewStep]) otherwise. The first element is the
de-JSONified content of the reviewee's submission. The second
element is a list of review steps for this submission, sorted
by creation date.
"""
submission_and_review_step_keys = (
self._get_submission_and_review_step_keys(unit_id, reviewee_key))
if submission_and_review_step_keys is None:
return None
submission_contents = student_work.Submission.get_contents_by_key(
submission_and_review_step_keys[0])
review_step_keys = submission_and_review_step_keys[1]
sorted_review_steps = sorted(
self.get_review_steps_by_keys(unit_id, review_step_keys),
key=lambda r: r.create_date)
return [submission_contents, sorted_review_steps]
def does_submission_exist(self, unit_id, reviewee_key):
submission_key = student_work.Submission.get_key(unit_id, reviewee_key)
return bool(entities.get(submission_key))
def start_review_process_for(self, unit_id, submission_key, reviewee_key):
impl = self._get_impl(unit_id)
return impl.start_review_process_for(
str(unit_id), submission_key, reviewee_key)
def write_review(
self, unit_id, review_step_key, review_payload, mark_completed):
impl = self._get_impl(unit_id)
return impl.write_review(
review_step_key, transforms.dumps(review_payload),
mark_completed=mark_completed)
class ReviewUtils(object):
"""A utility class for processing data relating to assessment reviews."""
@classmethod
def count_completed_reviews(cls, review_steps):
"""Counts the number of completed reviews in the given set."""
count = 0
for review_step in review_steps:
if review_step.state == domain.REVIEW_STATE_COMPLETED:
count += 1
return count
@classmethod
def has_completed_all_assigned_reviews(cls, review_steps):
"""Returns whether the student has completed all assigned reviews."""
for review_step in review_steps:
if review_step.state != domain.REVIEW_STATE_COMPLETED:
return False
return True
@classmethod
def has_completed_enough_reviews(cls, reviews, review_min_count):
"""Checks whether the review count is at least the minimum required."""
return cls.count_completed_reviews(reviews) >= review_min_count
@classmethod
def get_review_progress(
cls, review_steps, review_min_count, progress_tracker):
"""Gets the progress value based on the number of reviews done.
Args:
review_steps: a list of ReviewStep objects.
review_min_count: the minimum number of reviews that the student is
required to complete for this assessment.
progress_tracker: the course progress tracker.
Returns:
the corresponding progress value: 0 (not started), 1 (in progress) or
2 (completed).
"""
completed_reviews = cls.count_completed_reviews(review_steps)
if cls.has_completed_enough_reviews(review_steps, review_min_count):
return progress_tracker.COMPLETED_STATE
elif completed_reviews > 0:
return progress_tracker.IN_PROGRESS_STATE
else:
return progress_tracker.NOT_STARTED_STATE
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Virtual file system for managing files locally or in the cloud."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import datetime
import os
from common import jinja_filters
import jinja2
from entities import BaseEntity
from models import MemcacheManager
from google.appengine.api import namespace_manager
from google.appengine.ext import db
# We want to use memcache for both objects that exist and do not exist in the
# datastore. If object exists we cache its instance, if object does not exist
# we cache this object below.
NO_OBJECT = {}
class AbstractFileSystem(object):
"""A generic file system interface that forwards to an implementation."""
def __init__(self, impl):
self._impl = impl
@property
def impl(self):
return self._impl
@classmethod
def normpath(cls, path):
"""Make Windows and Linux filenames to have the same separator '/'."""
# Replace '\' into '/' and force Unicode.
if not path:
return path
return u'' + path.replace('\\', '/')
def isfile(self, filename):
"""Checks if file exists, similar to os.path.isfile(...)."""
return self._impl.isfile(filename)
def open(self, filename):
"""Returns a stream with the file content, similar to open(...)."""
return self._impl.get(filename)
def get(self, filename):
"""Returns bytes with the file content, but no metadata."""
return self._impl.get(filename).read()
def put(self, filename, stream, **kwargs):
"""Replaces the contents of the file with the bytes in the stream."""
self._impl.put(filename, stream, **kwargs)
def delete(self, filename):
"""Deletes a file and metadata associated with it."""
self._impl.delete(filename)
def list(self, dir_name):
"""Lists all files in a directory."""
return self._impl.list(dir_name)
def get_jinja_environ(self, dir_names):
"""Configures jinja environment loaders for this file system."""
return self._impl.get_jinja_environ(dir_names)
def is_read_write(self):
return self._impl.is_read_write()
def is_draft(self, stream):
if not hasattr(stream, 'metadata'):
return False
if not stream.metadata:
return False
return stream.metadata.is_draft
class LocalReadOnlyFileSystem(object):
"""A read-only file system serving only local files."""
def __init__(self, logical_home_folder=None, physical_home_folder=None):
"""Creates a new instance of the disk-backed read-only file system.
Args:
logical_home_folder: A logical home dir of all files (/a/b/c/...).
physical_home_folder: A physical location on the file system (/x/y).
Returns:
A new instance of the object.
"""
self._logical_home_folder = AbstractFileSystem.normpath(
logical_home_folder)
self._physical_home_folder = AbstractFileSystem.normpath(
physical_home_folder)
def _logical_to_physical(self, filename):
filename = AbstractFileSystem.normpath(filename)
if not (self._logical_home_folder and self._physical_home_folder):
return filename
filename = os.path.join(
self._physical_home_folder,
os.path.relpath(filename, self._logical_home_folder))
return AbstractFileSystem.normpath(filename)
def _physical_to_logical(self, filename):
filename = AbstractFileSystem.normpath(filename)
if not (self._logical_home_folder and self._physical_home_folder):
return filename
filename = os.path.join(
self._logical_home_folder,
os.path.relpath(filename, self._physical_home_folder))
return AbstractFileSystem.normpath(filename)
def isfile(self, filename):
return os.path.isfile(self._logical_to_physical(filename))
def get(self, filename):
return open(self._logical_to_physical(filename), 'rb')
def put(self, unused_filename, unused_stream):
raise Exception('Not implemented.')
def delete(self, unused_filename):
raise Exception('Not implemented.')
def list(self, root_dir):
"""Lists all files in a directory."""
files = []
for dirname, unused_dirnames, filenames in os.walk(
self._logical_to_physical(root_dir)):
for filename in filenames:
files.append(
self._physical_to_logical(os.path.join(dirname, filename)))
return sorted(files)
def get_jinja_environ(self, dir_names):
"""Configure the environment for Jinja templates."""
physical_dir_names = []
for dir_name in dir_names:
physical_dir_names.append(self._logical_to_physical(dir_name))
jinja_environment = jinja2.Environment(
autoescape=True, finalize=jinja_filters.finalize,
extensions=['jinja2.ext.i18n'],
loader=jinja2.FileSystemLoader(physical_dir_names))
jinja_environment.filters['js_string'] = jinja_filters.js_string
return jinja_environment
def is_read_write(self):
return False
class FileMetadataEntity(BaseEntity):
"""An entity to represent a file metadata; absolute file name is a key."""
# TODO(psimakov): do we need 'version' to support concurrent updates
# TODO(psimakov): can we put 'data' here and still have fast isfile/list?
created_on = db.DateTimeProperty(auto_now_add=True, indexed=False)
updated_on = db.DateTimeProperty(indexed=True)
# Draft file is just as any other file. It's up to the consumer of the file
# to decide whether to treat draft differently (not to serve it to the
# public, for example). This class does not care and just stores the bit.
is_draft = db.BooleanProperty(indexed=False)
size = db.IntegerProperty(indexed=False)
class FileDataEntity(BaseEntity):
"""An entity to represent file content; absolute file name is a key."""
data = db.BlobProperty()
class FileStreamWrapped(object):
"""A class that wraps a file stream, but adds extra attributes to it."""
def __init__(self, metadata, data):
self._metadata = metadata
self._data = data
def read(self):
"""Emulates stream.read(). Returns all bytes and emulates EOF."""
data = self._data
self._data = ''
return data
@property
def metadata(self):
return self._metadata
class StringStream(object):
"""A wrapper to pose a string as a UTF-8 byte stream."""
def __init__(self, text):
self._data = unicode.encode(text, 'utf-8')
def read(self):
"""Emulates stream.read(). Returns all bytes and emulates EOF."""
data = self._data
self._data = ''
return data
def string_to_stream(text):
return StringStream(text)
def stream_to_string(stream):
return stream.read().decode('utf-8')
class VirtualFileSystemTemplateLoader(jinja2.BaseLoader):
"""Loader of jinja2 templates from a virtual file system."""
def __init__(self, fs, logical_home_folder, dir_names):
self._fs = fs
self._logical_home_folder = AbstractFileSystem.normpath(
logical_home_folder)
self._dir_names = []
if dir_names:
for dir_name in dir_names:
self._dir_names.append(AbstractFileSystem.normpath(dir_name))
def get_source(self, unused_environment, template):
for dir_name in self._dir_names:
filename = AbstractFileSystem.normpath(
os.path.join(dir_name, template))
if self._fs.isfile(filename):
return self._fs.get(
filename).read().decode('utf-8'), filename, True
raise jinja2.TemplateNotFound(template)
def list_templates(self):
all_templates = []
for dir_name in self._dir_names:
all_templates += self._fs.list(dir_name)
return all_templates
class DatastoreBackedFileSystem(object):
"""A read-write file system backed by a datastore."""
@classmethod
def make_key(cls, filename):
return 'vfs:dsbfs:%s' % filename
def __init__(
self, ns, logical_home_folder,
inherits_from=None, inheritable_folders=None):
"""Creates a new instance of the datastore-backed file system.
Args:
ns: A datastore namespace to use for storing all data and metadata.
logical_home_folder: A logical home dir of all files (/a/b/c/...).
inherits_from: A file system to use for the inheritance.
inheritable_folders: A list of folders that support inheritance.
Returns:
A new instance of the object.
Raises:
Exception: if invalid inherits_from is given.
"""
# We cache files loaded via inherited fs; make sure they don't change.
if inherits_from and not isinstance(
inherits_from, LocalReadOnlyFileSystem):
raise Exception('Can only inherit from LocalReadOnlyFileSystem.')
self._ns = ns
self._logical_home_folder = AbstractFileSystem.normpath(
logical_home_folder)
self._inherits_from = inherits_from
self._inheritable_folders = []
if inheritable_folders:
for folder in inheritable_folders:
self._inheritable_folders.append(AbstractFileSystem.normpath(
folder))
def __getattribute__(self, name):
attr = object.__getattribute__(self, name)
# Don't intercept access to private methods and attributes.
if name.startswith('_'):
return attr
# Do intercept all methods.
if hasattr(attr, '__call__'):
def newfunc(*args, **kwargs):
"""Set proper namespace for each method call."""
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(self._ns)
return attr(*args, **kwargs)
finally:
namespace_manager.set_namespace(old_namespace)
return newfunc
# Don't intercept access to non-method attributes.
return attr
def _logical_to_physical(self, filename):
filename = AbstractFileSystem.normpath(filename)
# For now we only support '/' as a physical folder name.
if self._logical_home_folder == '/':
return filename
if not filename.startswith(self._logical_home_folder):
raise Exception(
'Expected path \'%s\' to start with a prefix \'%s\'.' % (
filename, self._logical_home_folder))
rel_path = filename[len(self._logical_home_folder):]
if not rel_path.startswith('/'):
rel_path = '/%s' % rel_path
return rel_path
def physical_to_logical(self, filename):
"""Converts an internal filename to and external filename."""
# This class receives and stores absolute file names. The logical
# filename is the external file name. The physical filename is an
# internal filename. This function does the convertions.
# Let's say you want to store a file named '/assets/img/foo.png'.
# This would be a physical filename in the VFS. But the put() operation
# expects an absolute filename from the root of the app installation,
# i.e. something like '/dev/apps/coursebuilder/assets/img/foo.png',
# which is called a logical filename. This is a legacy expectation from
# the days the course was defined as files on the file system.
#
# This function will do the conversion you need.
return self._physical_to_logical(filename)
def _physical_to_logical(self, filename):
filename = AbstractFileSystem.normpath(filename)
# For now we only support '/' as a physical folder name.
if filename and not filename.startswith('/'):
filename = '/' + filename
if self._logical_home_folder == '/':
return filename
return '%s%s' % (self._logical_home_folder, filename)
def _can_inherit(self, filename):
"""Checks if a file can be inherited from a parent file system."""
for prefix in self._inheritable_folders:
if filename.startswith(prefix):
return True
return False
def get(self, afilename):
"""Gets a file from a datastore. Raw bytes stream, no encodings."""
filename = self._logical_to_physical(afilename)
# Load from cache.
result = MemcacheManager.get(
self.make_key(filename), namespace=self._ns)
if result:
return result
if NO_OBJECT == result:
return None
# Load from a datastore.
metadata = FileMetadataEntity.get_by_key_name(filename)
if metadata:
data = FileDataEntity.get_by_key_name(filename)
if data:
result = FileStreamWrapped(metadata, data.data)
MemcacheManager.set(
self.make_key(filename), result, namespace=self._ns)
return result
result = None
metadata = None
# Load from parent fs.
if self._inherits_from and self._can_inherit(filename):
result = self._inherits_from.get(afilename)
# Cache result.
if result:
result = FileStreamWrapped(metadata, result.read())
MemcacheManager.set(
self.make_key(filename), result, namespace=self._ns)
else:
MemcacheManager.set(
self.make_key(filename), NO_OBJECT, namespace=self._ns)
return result
@db.transactional(xg=True)
def put(self, filename, stream, is_draft=False, metadata_only=False):
"""Puts a file stream to a database. Raw bytes stream, no encodings."""
self.non_transactional_put(
filename, stream, is_draft=is_draft, metadata_only=metadata_only)
def non_transactional_put(
self, filename, stream, is_draft=False, metadata_only=False):
"""Non-transactional put; use only when transactions are impossible."""
filename = self._logical_to_physical(filename)
metadata = FileMetadataEntity.get_by_key_name(filename)
if not metadata:
metadata = FileMetadataEntity(key_name=filename)
metadata.updated_on = datetime.datetime.now()
metadata.is_draft = is_draft
if not metadata_only:
# We operate with raw bytes. The consumer must deal with encoding.
raw_bytes = stream.read()
metadata.size = len(raw_bytes)
data = FileDataEntity(key_name=filename)
data.data = raw_bytes
data.put()
metadata.put()
MemcacheManager.delete(self.make_key(filename), namespace=self._ns)
@db.transactional(xg=True)
def delete(self, filename):
filename = self._logical_to_physical(filename)
metadata = FileMetadataEntity.get_by_key_name(filename)
if metadata:
metadata.delete()
data = FileDataEntity(key_name=filename)
if data:
data.delete()
MemcacheManager.delete(self.make_key(filename), namespace=self._ns)
def isfile(self, afilename):
"""Checks file existence by looking up the datastore row."""
filename = self._logical_to_physical(afilename)
# Check cache.
result = MemcacheManager.get(
self.make_key(filename), namespace=self._ns)
if result:
return True
if NO_OBJECT == result:
return False
# Check datastore.
metadata = FileMetadataEntity.get_by_key_name(filename)
if metadata:
return True
result = False
# Check with parent fs.
if self._inherits_from and self._can_inherit(filename):
result = self._inherits_from.isfile(afilename)
# Put NO_OBJECT marker into memcache to avoid repeated lookups.
if not result:
MemcacheManager.set(
self.make_key(filename), NO_OBJECT, namespace=self._ns)
return result
def list(self, dir_name, include_inherited=False):
"""Lists all files in a directory by using datastore query.
Args:
dir_name: string. Directory to list contents of.
include_inherited: boolean. If True, includes all inheritable files
from the parent filesystem.
Returns:
List of string. Lexicographically-sorted unique filenames
recursively found in dir_name.
"""
dir_name = self._logical_to_physical(dir_name)
result = set()
keys = FileMetadataEntity.all(keys_only=True)
for key in keys.fetch(1000):
filename = key.name()
if filename.startswith(dir_name):
result.add(self._physical_to_logical(filename))
if include_inherited and self._inherits_from:
for inheritable_folder in self._inheritable_folders:
result.update(set(self._inherits_from.list(
self._physical_to_logical(inheritable_folder))))
return sorted(list(result))
def get_jinja_environ(self, dir_names):
jinja_environment = jinja2.Environment(
autoescape=True, finalize=jinja_filters.finalize,
extensions=['jinja2.ext.i18n'],
loader=VirtualFileSystemTemplateLoader(
self, self._logical_home_folder, dir_names))
jinja_environment.filters['js_string'] = jinja_filters.js_string
return jinja_environment
def is_read_write(self):
return True
def run_all_unit_tests():
"""Runs all unit tests in the project."""
if __name__ == '__main__':
run_all_unit_tests()
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Student progress trackers."""
__author__ = 'Sean Lip (sll@google.com)'
import datetime
import os
from tools import verify
from models import StudentPropertyEntity
import transforms
class UnitLessonCompletionTracker(object):
"""Tracks student completion for a unit/lesson-based linear course."""
PROPERTY_KEY = 'linear-course-completion'
# Here are representative examples of the keys for the various entities
# used in this class:
# Unit 1: u.1
# Unit 1, Lesson 1: u.1.l.1
# Unit 1, Lesson 1, Video 1: u.1.l.1.v.1
# Unit 1, Lesson 1, Activity 2: u.1.l.1.a.2
# Unit 1, Lesson 1, Activity 2, Block 4: u.1.l.1.a.2.b.4
# Assessment 'Pre': s.Pre
# At the moment, we do not divide assessments into blocks.
#
# IMPORTANT NOTE: The values of the keys mean different things depending on
# whether the entity is a composite entity or not.
# If it is a composite entity (unit, lesson, activity), then the value is
# - 0 if none of its sub-entities has been completed
# - 1 if some, but not all, of its sub-entities have been completed
# - 2 if all its sub-entities have been completed.
# If it is not a composite entity (video, block, assessment), then the value
# is just the number of times the event has been triggered.
# Constants for recording the state of composite entities.
# TODO(sll): Change these to enums.
NOT_STARTED_STATE = 0
IN_PROGRESS_STATE = 1
COMPLETED_STATE = 2
EVENT_CODE_MAPPING = {
'unit': 'u',
'lesson': 'l',
'video': 'v',
'activity': 'a',
'block': 'b',
'assessment': 's',
}
def __init__(self, course):
self._course = course
def _get_course(self):
return self._course
def get_activity_as_python(self, unit_id, lesson_id):
"""Gets the corresponding activity as a Python object."""
root_name = 'activity'
course = self._get_course()
activity_text = course.app_context.fs.get(
os.path.join(course.app_context.get_home(),
course.get_activity_filename(unit_id, lesson_id)))
content, noverify_text = verify.convert_javascript_to_python(
activity_text, root_name)
activity = verify.evaluate_python_expression_from_text(
content, root_name, verify.Activity().scope, noverify_text)
return activity
def _get_unit_key(self, unit_id):
return '%s.%s' % (self.EVENT_CODE_MAPPING['unit'], unit_id)
def _get_lesson_key(self, unit_id, lesson_id):
return '%s.%s.%s.%s' % (
self.EVENT_CODE_MAPPING['unit'], unit_id,
self.EVENT_CODE_MAPPING['lesson'], lesson_id
)
def _get_video_key(self, unit_id, lesson_id, video_id):
return '%s.%s.%s.%s.%s.%s' % (
self.EVENT_CODE_MAPPING['unit'], unit_id,
self.EVENT_CODE_MAPPING['lesson'], lesson_id,
self.EVENT_CODE_MAPPING['video'], video_id
)
def _get_activity_key(self, unit_id, lesson_id, activity_id):
return '%s.%s.%s.%s.%s.%s' % (
self.EVENT_CODE_MAPPING['unit'], unit_id,
self.EVENT_CODE_MAPPING['lesson'], lesson_id,
self.EVENT_CODE_MAPPING['activity'], activity_id
)
def _get_block_key(self, unit_id, lesson_id, activity_id, block_id):
return '%s.%s.%s.%s.%s.%s.%s.%s' % (
self.EVENT_CODE_MAPPING['unit'], unit_id,
self.EVENT_CODE_MAPPING['lesson'], lesson_id,
self.EVENT_CODE_MAPPING['activity'], activity_id,
self.EVENT_CODE_MAPPING['block'], block_id
)
def _get_assessment_key(self, assessment_id):
return '%s.%s' % (self.EVENT_CODE_MAPPING['assessment'], assessment_id)
def get_valid_block_ids(self, unit_id, lesson_id):
"""Returns a list of block ids representing interactive activities."""
valid_block_ids = []
# Get the activity corresponding to this unit/lesson combination.
activity = self.get_activity_as_python(unit_id, lesson_id)
for block_id in range(len(activity['activity'])):
block = activity['activity'][block_id]
if isinstance(block, dict):
valid_block_ids.append(block_id)
return valid_block_ids
def _update_unit(self, progress, event_key):
"""Updates a unit's progress if all its lessons have been completed."""
split_event_key = event_key.split('.')
assert len(split_event_key) == 2
unit_id = split_event_key[1]
if self._get_entity_value(progress, event_key) == self.COMPLETED_STATE:
return
# Record that at least one lesson in this unit has been completed.
self._set_entity_value(progress, event_key, self.IN_PROGRESS_STATE)
# Check if all lessons in this unit have been completed.
lessons = self._get_course().get_lessons(unit_id)
for lesson in lessons:
# Skip lessons that do not have activities associated with them.
if not lesson.activity:
continue
if (self.get_lesson_status(
progress,
unit_id, lesson.lesson_id) != self.COMPLETED_STATE):
return
# Record that all lessons in this unit have been completed.
self._set_entity_value(progress, event_key, self.COMPLETED_STATE)
def _update_lesson(self, progress, event_key):
"""Updates a lesson's progress if its activities have been completed."""
split_event_key = event_key.split('.')
assert len(split_event_key) == 4
unit_id = split_event_key[1]
lesson_id = split_event_key[3]
if self._get_entity_value(progress, event_key) == self.COMPLETED_STATE:
return
# Record that at least one activity in this lesson has been completed.
self._set_entity_value(progress, event_key, self.IN_PROGRESS_STATE)
lessons = self._get_course().get_lessons(unit_id)
for lesson in lessons:
if str(lesson.lesson_id) == lesson_id and lesson:
if (self.get_activity_status(
progress, unit_id, lesson_id) != self.COMPLETED_STATE):
return
# Record that all activities in this lesson have been completed.
self._set_entity_value(progress, event_key, self.COMPLETED_STATE)
def _update_activity(self, progress, event_key):
"""Updates activity's progress when all interactive blocks are done."""
split_event_key = event_key.split('.')
assert len(split_event_key) == 6
unit_id = split_event_key[1]
lesson_id = split_event_key[3]
if self._get_entity_value(progress, event_key) == self.COMPLETED_STATE:
return
# Record that at least one block in this activity has been completed.
self._set_entity_value(progress, event_key, self.IN_PROGRESS_STATE)
valid_block_ids = self.get_valid_block_ids(unit_id, lesson_id)
for block_id in valid_block_ids:
if not self.is_block_completed(
progress, unit_id, lesson_id, block_id):
return
# Record that all blocks in this activity have been completed.
self._set_entity_value(progress, event_key, self.COMPLETED_STATE)
UPDATER_MAPPING = {
'activity': _update_activity,
'lesson': _update_lesson,
'unit': _update_unit
}
# Dependencies for recording derived events. The key is the current
# event, and the value is a tuple, each element of which contains:
# - the dependent entity to be updated
# - the transformation to apply to the id of the current event to get the
# id for the new event
DERIVED_EVENTS = {
'block': (
{
'entity': 'activity',
'generate_new_id': (lambda s: '.'.join(s.split('.')[:-2])),
},
),
'activity': (
{
'entity': 'lesson',
'generate_new_id': (lambda s: '.'.join(s.split('.')[:-2])),
},
),
'lesson': (
{
'entity': 'unit',
'generate_new_id': (lambda s: '.'.join(s.split('.')[:-2])),
},
),
}
def put_video_completed(self, student, unit_id, lesson_id):
"""Records that the given student has completed a video."""
if not self._get_course().is_valid_unit_lesson_id(unit_id, lesson_id):
return
self._put_event(
student, 'video', self._get_video_key(unit_id, lesson_id, 0))
def put_activity_completed(self, student, unit_id, lesson_id):
"""Records that the given student has completed an activity."""
if not self._get_course().is_valid_unit_lesson_id(unit_id, lesson_id):
return
self._put_event(
student, 'activity', self._get_activity_key(unit_id, lesson_id, 0))
def put_block_completed(self, student, unit_id, lesson_id, block_id):
"""Records that the given student has completed an activity block."""
if not self._get_course().is_valid_unit_lesson_id(unit_id, lesson_id):
return
if block_id not in self.get_valid_block_ids(unit_id, lesson_id):
return
self._put_event(
student,
'block',
self._get_block_key(unit_id, lesson_id, 0, block_id)
)
def put_assessment_completed(self, student, assessment_id):
"""Records that the given student has completed the given assessment."""
if not self._get_course().is_valid_assessment_id(assessment_id):
return
self._put_event(
student, 'assessment', self._get_assessment_key(assessment_id))
def put_activity_accessed(self, student, unit_id, lesson_id):
"""Records that the given student has accessed this activity."""
# This method currently exists because we need to mark activities
# without interactive blocks as 'completed' when they are accessed.
if not self.get_valid_block_ids(unit_id, lesson_id):
self.put_activity_completed(student, unit_id, lesson_id)
def _put_event(self, student, event_entity, event_key):
"""Starts a cascade of updates in response to an event taking place."""
if event_entity not in self.EVENT_CODE_MAPPING:
return
progress = self.get_or_create_progress(student)
self._update_event(
student, progress, event_entity, event_key, direct_update=True)
progress.updated_on = datetime.datetime.now()
progress.put()
def _update_event(self, student, progress, event_entity, event_key,
direct_update=False):
"""Updates statistics for the given event, and for derived events.
Args:
student: the student
progress: the StudentProgressEntity for the student
event_entity: the name of the affected entity (unit, video, etc.)
event_key: the key for the recorded event
direct_update: True if this event is being updated explicitly; False
if it is being auto-updated.
"""
if direct_update or event_entity not in self.UPDATER_MAPPING:
if event_entity in self.UPDATER_MAPPING:
# This is a derived event, so directly mark it as completed.
self._set_entity_value(
progress, event_key, self.COMPLETED_STATE)
else:
# This is not a derived event, so increment its counter by one.
self._inc(progress, event_key)
else:
self.UPDATER_MAPPING[event_entity](self, progress, event_key)
if event_entity in self.DERIVED_EVENTS:
for derived_event in self.DERIVED_EVENTS[event_entity]:
self._update_event(
student=student,
progress=progress,
event_entity=derived_event['entity'],
event_key=derived_event['generate_new_id'](event_key),
)
def get_unit_status(self, progress, unit_id):
return self._get_entity_value(progress, self._get_unit_key(unit_id))
def get_lesson_status(self, progress, unit_id, lesson_id):
return self._get_entity_value(
progress, self._get_lesson_key(unit_id, lesson_id))
def get_video_status(self, progress, unit_id, lesson_id):
return self._get_entity_value(
progress, self._get_video_key(unit_id, lesson_id, 0))
def get_activity_status(self, progress, unit_id, lesson_id):
return self._get_entity_value(
progress, self._get_activity_key(unit_id, lesson_id, 0))
def get_block_status(self, progress, unit_id, lesson_id, block_id):
return self._get_entity_value(
progress, self._get_block_key(unit_id, lesson_id, 0, block_id))
def get_assessment_status(self, progress, assessment_id):
return self._get_entity_value(
progress, self._get_assessment_key(assessment_id))
def is_video_completed(self, progress, unit_id, lesson_id):
value = self._get_entity_value(
progress, self._get_video_key(unit_id, lesson_id, 0))
return value is not None and value > 0
def is_block_completed(self, progress, unit_id, lesson_id, block_id):
value = self._get_entity_value(
progress, self._get_block_key(unit_id, lesson_id, 0, block_id))
return value is not None and value > 0
def is_assessment_completed(self, progress, assessment_id):
value = self._get_entity_value(
progress, self._get_assessment_key(assessment_id))
return value is not None and value > 0
@classmethod
def get_or_create_progress(cls, student):
progress = StudentPropertyEntity.get(student, cls.PROPERTY_KEY)
if not progress:
progress = StudentPropertyEntity.create(
student=student, property_name=cls.PROPERTY_KEY)
progress.put()
return progress
def get_unit_progress(self, student):
"""Returns a dict with the states of each unit."""
units = self._get_course().get_units()
progress = self.get_or_create_progress(student)
result = {}
for unit in units:
if unit.type == 'A':
result[unit.unit_id] = self.is_assessment_completed(
progress, unit.unit_id)
elif unit.type == 'U':
value = self.get_unit_status(progress, unit.unit_id)
if value is None:
value = 0
result[unit.unit_id] = value
return result
def get_lesson_progress(self, student, unit_id):
"""Returns a dict saying which lessons in this unit are completed."""
lessons = self._get_course().get_lessons(unit_id)
progress = self.get_or_create_progress(student)
result = {}
for lesson in lessons:
value = self.get_lesson_status(progress, unit_id, lesson.lesson_id)
if value is None:
value = 0
result[lesson.lesson_id] = value
return result
def _get_entity_value(self, progress, event_key):
if not progress.value:
return None
return transforms.loads(progress.value).get(event_key)
def _set_entity_value(self, student_property, key, value):
"""Sets the integer value of a student property.
Note: this method does not commit the change. The calling method should
call put() on the StudentPropertyEntity.
Args:
student_property: the StudentPropertyEntity
key: the student property whose value should be incremented
value: the value to increment this property by
"""
try:
progress_dict = transforms.loads(student_property.value)
except (AttributeError, TypeError):
progress_dict = {}
progress_dict[key] = value
student_property.value = transforms.dumps(progress_dict)
def _inc(self, student_property, key, value=1):
"""Increments the integer value of a student property.
Note: this method does not commit the change. The calling method should
call put() on the StudentPropertyEntity.
Args:
student_property: the StudentPropertyEntity
key: the student property whose value should be incremented
value: the value to increment this property by
"""
try:
progress_dict = transforms.loads(student_property.value)
except (AttributeError, TypeError):
progress_dict = {}
if key not in progress_dict:
progress_dict[key] = 0
progress_dict[key] += value
student_property.value = transforms.dumps(progress_dict)
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""Enforces schema and verifies course files for referential integrity.
Use this script to verify referential integrity of your course definition files
before you import them into the production instance of Google AppEngine.
Here is how to use the script:
- prepare your course files
- edit the data/unit.csv file
- edit the data/lesson.csv file
- edit the assets/js/activity-*.*.js files
- edit the assets/js/assessment-*.js files
- run the script from a command line by navigating to the root
directory of the app and then typing "python tools/verify.py"
- review the report printed to the console for errors and warnings
Good luck!
"""
import csv
import json
import os
import re
from StringIO import StringIO
import sys
BOOLEAN = object()
STRING = object()
FLOAT = object()
INTEGER = object()
CORRECT = object()
REGEX = object()
INTEGER_OR_INTEGER_LIST = object()
SCHEMA = {
'assessment': {
'assessmentName': STRING,
'preamble': STRING,
'checkAnswers': BOOLEAN,
'questionsList': [{
'questionHTML': STRING,
'lesson': STRING,
'choices': [STRING, CORRECT],
# The fractional score for each choice in this question, if it is
# multiple-choice. Each of these values should be between 0.0 and
# 1.0, inclusive.
'choiceScores': [FLOAT],
# The weight given to the entire question.
'weight': INTEGER,
'multiLine': BOOLEAN,
'correctAnswerNumeric': FLOAT,
'correctAnswerString': STRING,
'correctAnswerRegex': REGEX}]
}, 'activity': [
STRING,
{
'questionType': 'multiple choice',
'questionHTML': STRING,
'choices': [[STRING, BOOLEAN, STRING]]
}, {
'questionType': 'multiple choice group',
'questionGroupHTML': STRING,
'questionsList': [{
'questionHTML': STRING,
'choices': [STRING],
'correctIndex': INTEGER_OR_INTEGER_LIST,
'multiSelect': BOOLEAN}],
'allCorrectMinCount': INTEGER,
'allCorrectOutput': STRING,
'someIncorrectOutput': STRING
}, {
'questionType': 'freetext',
'questionHTML': STRING,
'correctAnswerRegex': REGEX,
'correctAnswerOutput': STRING,
'incorrectAnswerOutput': STRING,
'showAnswerOutput': STRING,
'showAnswerPrompt': STRING,
'outputHeight': STRING
}]}
UNIT_TYPE_UNIT = 'U'
UNIT_TYPE_LINK = 'O'
UNIT_TYPE_ASSESSMENT = 'A'
UNIT_TYPES = [UNIT_TYPE_UNIT, UNIT_TYPE_LINK, UNIT_TYPE_ASSESSMENT]
UNIT_TYPE_NAMES = {
UNIT_TYPE_UNIT: 'Unit',
UNIT_TYPE_LINK: 'Link',
UNIT_TYPE_ASSESSMENT: 'Assessment'}
UNITS_HEADER = (
'id,type,unit_id,title,release_date,now_available')
LESSONS_HEADER = (
'unit_id,unit_title,lesson_id,lesson_title,lesson_activity,'
'lesson_activity_name,lesson_notes,lesson_video_id,lesson_objectives')
UNIT_CSV_TO_DB_CONVERTER = {
'id': None,
'type': ('type', unicode),
'unit_id': ('unit_id', unicode),
'title': ('title', unicode),
'release_date': ('release_date', unicode),
'now_available': ('now_available', lambda value: value == 'True')
}
LESSON_CSV_TO_DB_CONVERTER = {
'unit_id': ('unit_id', int),
# Field 'unit_title' is a duplicate of Unit.title. We enforce that both
# values are the same and ignore this value altogether.
'unit_title': None,
'lesson_id': ('lesson_id', int),
'lesson_title': ('title', unicode),
'lesson_activity': ('activity', lambda value: value == 'yes'),
'lesson_activity_name': ('activity_title', unicode),
'lesson_video_id': ('video', unicode),
'lesson_objectives': ('objectives', unicode),
'lesson_notes': ('notes', unicode)
}
# pylint: disable-msg=anomalous-backslash-in-string
NO_VERIFY_TAG_NAME_OPEN = '<gcb-no-verify>\s*\n'
# pylint: enable-msg=anomalous-backslash-in-string
NO_VERIFY_TAG_NAME_CLOSE = '</gcb-no-verify>'
OUTPUT_FINE_LOG = False
OUTPUT_DEBUG_LOG = False
class Term(object):
def __init__(self, term_type, value=None):
self.term_type = term_type
self.value = value
def __eq__(self, other):
if type(other) is not Term:
return False
else:
return ((self.term_type == other.term_type) and
(self.value == other.value))
class SchemaException(Exception):
"""A class to represent a schema error."""
def format_primitive_value_name(self, name):
if name == REGEX:
return 'REGEX(...)'
if name == CORRECT:
return 'CORRECT(...)'
if name == BOOLEAN:
return 'BOOLEAN'
return name
def format_primitive_type_name(self, name):
"""Formats a name for a primitive type."""
if name == BOOLEAN:
return 'BOOLEAN'
if name == REGEX:
return 'REGEX(...)'
if name == CORRECT:
return 'CORRECT(...)'
if name == STRING or isinstance(name, basestring):
return 'STRING'
if name == FLOAT:
return 'FLOAT'
if name == INTEGER_OR_INTEGER_LIST:
return 'INTEGER_OR_INTEGER_LIST'
if name == INTEGER:
return 'INTEGER'
if isinstance(name, dict):
return '{...}'
if isinstance(name, list):
return '[...]'
return 'Unknown type name \'%s\'' % name.__class__.__name__
def format_type_names(self, names):
if isinstance(names, list):
captions = []
for name in names:
captions.append(self.format_primitive_type_name(name))
return captions
else:
return self.format_primitive_type_name(names)
def __init__(self, message, value=None, types=None, path=None):
prefix = ''
if path:
prefix = 'Error at %s\n' % path
if types:
if value:
message = prefix + message % (
self.format_primitive_value_name(value),
self.format_type_names(types))
else:
message = prefix + message % self.format_type_names(types)
else:
if value:
message = prefix + (
message % self.format_primitive_value_name(value))
else:
message = prefix + message
super(SchemaException, self).__init__(message)
class Context(object):
""""A class that manages a stack of traversal contexts."""
def __init__(self):
self.parent = None
self.path = ['/']
def new(self, names):
""""Derives a new context from the current one."""
context = Context()
context.parent = self
context.path = list(self.path)
if names:
if isinstance(names, list):
for name in names:
if name:
context.path.append('/' + '%s' % name)
else:
context.path.append('/' + '%s' % names)
return context
def format_path(self):
"""Formats the canonical name of this context."""
return ''.join(self.path)
class SchemaHelper(object):
"""A class that knows how to apply the schema."""
def __init__(self):
self.type_stats = {}
def visit_element(self, atype, value, context, is_terminal=True):
"""Callback for each schema element being traversed."""
if atype in self.type_stats:
count = self.type_stats[atype]
else:
count = 0
self.type_stats[atype] = count + 1
if is_terminal:
self.parse_log.append(' TERMINAL: %s %s = %s' % (
atype, context.format_path(), value))
else:
self.parse_log.append(' NON-TERMINAL: %s %s' % (
atype, context.format_path()))
def extract_all_terms_to_depth(self, key, values, type_map):
"""Walks schema type map recursively to depth."""
# Walks schema type map recursively to depth and creates a list of all
# possible {key: value} pairs. The latter is a list of all non-terminal
# and terminal terms allowed in the schema. The list of terms from this
# method can be bound to an execution context for evaluating whether a
# given instance's map complies with the schema.
if key:
type_map.update({key: key})
if values == REGEX:
type_map.update({'regex': lambda x: Term(REGEX, x)})
return
if values == CORRECT:
type_map.update({'correct': lambda x: Term(CORRECT, x)})
return
if values == BOOLEAN:
type_map.update(
{'true': Term(BOOLEAN, True), 'false': Term(BOOLEAN, False)})
return
if values == STRING or values == INTEGER:
return
if isinstance(values, dict):
for new_key, new_value in values.items():
self.extract_all_terms_to_depth(new_key, new_value, type_map)
return
if isinstance(values, list):
for new_value in values:
self.extract_all_terms_to_depth(None, new_value, type_map)
return
def find_selectors(self, type_map):
"""Finds all type selectors."""
# Finds all elements in the type map where both a key and a value are
# strings. These elements are used to find one specific type map among
# several alternative type maps.
selector = {}
for akey, avalue in type_map.items():
if isinstance(akey, basestring) and isinstance(avalue, basestring):
selector.update({akey: avalue})
return selector
def find_compatible_dict(self, value_map, type_map, unused_context):
"""Find the type map most compatible with the value map."""
# A value map is considered compatible with a type map when former
# contains the same key names and the value types as the type map.
# special case when we have just one type; check name and type are the
# same
if len(type_map) == 1:
for value_key in value_map.keys():
for key in type_map[0].keys():
if value_key == key:
return key, type_map[0]
raise SchemaException(
"Expected: '%s'\nfound: %s", type_map[0].keys()[0], value_map)
# case when we have several types to choose from
for adict in type_map:
dict_selector = self.find_selectors(adict)
for akey, avalue in dict_selector.items():
if value_map[akey] == avalue:
return akey, adict
return None, None
def check_single_value_matches_type(self, value, atype, context):
"""Checks if a single value matches a specific (primitive) type."""
if atype == BOOLEAN:
if isinstance(value, bool) or value.term_type == BOOLEAN:
self.visit_element('BOOLEAN', value, context)
return True
else:
raise SchemaException(
'Expected: \'true\' or \'false\'\nfound: %s', value)
if isinstance(atype, basestring):
if isinstance(value, basestring):
self.visit_element('str', value, context)
return True
else:
raise SchemaException('Expected: \'string\'\nfound: %s', value)
if atype == STRING:
if isinstance(value, basestring):
self.visit_element('STRING', value, context)
return True
else:
raise SchemaException('Expected: \'string\'\nfound: %s', value)
if atype == REGEX and value.term_type == REGEX:
self.visit_element('REGEX', value, context)
return True
if atype == CORRECT and value.term_type == CORRECT:
self.visit_element('CORRECT', value, context)
return True
if atype == FLOAT:
if is_number(value):
self.visit_element('NUMBER', value, context)
return True
else:
raise SchemaException('Expected: \'number\'\nfound: %s', value)
if atype == INTEGER_OR_INTEGER_LIST:
if is_integer(value):
self.visit_element('INTEGER', value, context)
return True
if is_integer_list(value):
self.visit_element('INTEGER_OR_INTEGER_LIST', value, context)
return True
raise SchemaException(
'Expected: \'integer\' or '
'\'array of integer\'\nfound: %s', value,
path=context.format_path())
if atype == INTEGER:
if is_integer(value):
self.visit_element('INTEGER', value, context)
return True
else:
raise SchemaException(
'Expected: \'integer\'\nfound: %s', value,
path=context.format_path())
raise SchemaException(
'Unexpected value \'%s\'\n'
'for type %s', value, atype, path=context.format_path())
def check_value_list_matches_type(self, value, atype, context):
"""Checks if all items in value list match a specific type."""
for value_item in value:
found = False
for atype_item in atype:
if isinstance(atype_item, list):
for atype_item_item in atype_item:
if self.does_value_match_type(
value_item, atype_item_item, context):
found = True
break
else:
if self.does_value_match_type(
value_item, atype_item, context):
found = True
break
if not found:
raise SchemaException(
'Expected: \'%s\'\nfound: %s', atype, value)
return True
def check_value_matches_type(self, value, atype, context):
"""Checks if single value or a list of values match a specific type."""
if isinstance(atype, list) and isinstance(value, list):
return self.check_value_list_matches_type(value, atype, context)
else:
return self.check_single_value_matches_type(value, atype, context)
def does_value_match_type(self, value, atype, context):
"""Same as other method, but does not throw an exception."""
try:
return self.check_value_matches_type(value, atype, context)
except SchemaException:
return False
def does_value_match_one_of_types(self, value, types, context):
"""Checks if a value matches to one of the types in the list."""
type_names = None
if isinstance(types, list):
type_names = types
if type_names:
for i in range(0, len(type_names)):
if self.does_value_match_type(value, type_names[i], context):
return True
return False
def does_value_match_map_of_type(self, value, types, context):
"""Checks if value matches any variation of {...} type."""
# find all possible map types
maps = []
for atype in types:
if isinstance(atype, dict):
maps.append(atype)
if not maps and isinstance(types, dict):
maps.append(types)
# check if the structure of value matches one of the maps
if isinstance(value, dict):
aname, adict = self.find_compatible_dict(value, maps, context)
if adict:
self.visit_element(
'dict', value, context.new(aname), is_terminal=False)
for akey, avalue in value.items():
if akey not in adict:
raise SchemaException(
'Unknown term \'%s\'', akey,
path=context.format_path())
self.check_value_of_valid_type(
avalue, adict[akey], context.new([aname, akey]))
return True
raise SchemaException(
'The value:\n %s\n'
'is incompatible with expected type(s):\n %s',
value, types, path=context.format_path())
return False
def format_name_with_index(self, alist, aindex):
"""A function to format a context name with an array element index."""
if len(alist) == 1:
return ''
else:
return '[%s]' % aindex
def does_value_match_list_of_types_in_order(
self, value, types, context, target):
"""Iterates the value and types in given order and checks for match."""
all_values_are_lists = True
for avalue in value:
if not isinstance(avalue, list):
all_values_are_lists = False
if all_values_are_lists:
for i in range(0, len(value)):
self.check_value_of_valid_type(value[i], types, context.new(
self.format_name_with_index(value, i)), in_order=True)
else:
if len(target) != len(value):
raise SchemaException(
'Expected: \'%s\' values\n' + 'found: %s.' % value,
len(target), path=context.format_path())
for i in range(0, len(value)):
self.check_value_of_valid_type(value[i], target[i], context.new(
self.format_name_with_index(value, i)))
return True
def does_value_match_list_of_types_any_order(self, value, types,
context, lists):
"""Iterates the value and types, checks if they match in any order."""
target = lists
if not target:
if not isinstance(types, list):
raise SchemaException(
'Unsupported type %s',
None, types, path=context.format_path())
target = types
for i in range(0, len(value)):
found = False
for atarget in target:
try:
self.check_value_of_valid_type(
value[i], atarget,
context.new(self.format_name_with_index(value, i)))
found = True
break
except SchemaException as unused_e:
continue
if not found:
raise SchemaException(
'The value:\n %s\n'
'is incompatible with expected type(s):\n %s',
value, types, path=context.format_path())
return True
def does_value_match_list_of_type(self, value, types, context, in_order):
"""Checks if a value matches a variation of [...] type."""
# Extra argument controls whether matching must be done in a specific
# or in any order. A specific order is demanded by [[...]]] construct,
# i.e. [[STRING, INTEGER, BOOLEAN]], while sub elements inside {...} and
# [...] can be matched in any order.
# prepare a list of list types
lists = []
for atype in types:
if isinstance(atype, list):
lists.append(atype)
if len(lists) > 1:
raise SchemaException(
'Unable to validate types with multiple alternative '
'lists %s', None, types, path=context.format_path())
if isinstance(value, list):
if len(lists) > 1:
raise SchemaException(
'Allowed at most one list\nfound: %s.',
None, types, path=context.format_path())
# determine if list is in order or not as hinted by double array
# [[..]]; [STRING, NUMBER] is in any order, but [[STRING, NUMBER]]
# demands order
ordered = len(lists) == 1 and isinstance(types, list)
if in_order or ordered:
return self.does_value_match_list_of_types_in_order(
value, types, context, lists[0])
else:
return self.does_value_match_list_of_types_any_order(
value, types, context, lists)
return False
def check_value_of_valid_type(self, value, types, context, in_order=None):
"""Check if a value matches any of the given types."""
if not (isinstance(types, list) or isinstance(types, dict)):
self.check_value_matches_type(value, types, context)
return
if (self.does_value_match_list_of_type(value, types,
context, in_order) or
self.does_value_match_map_of_type(value, types, context) or
self.does_value_match_one_of_types(value, types, context)):
return
raise SchemaException(
'Unknown type %s', value, path=context.format_path())
def check_instances_match_schema(self, values, types, name):
"""Recursively decompose 'values' to see if they match schema types."""
self.parse_log = []
context = Context().new(name)
self.parse_log.append(' ROOT %s' % context.format_path())
# pylint: disable-msg=protected-access
values_class = values.__class__
# pylint: enable-msg=protected-access
# handle {..} containers
if isinstance(types, dict):
if not isinstance(values, dict):
raise SchemaException(
'Error at \'/\': expected {...}, found %s' % (
values_class.__name__))
self.check_value_of_valid_type(values, types, context.new([]))
return
# handle [...] containers
if isinstance(types, list):
if not isinstance(values, list):
raise SchemaException(
'Error at \'/\': expected [...], found %s' % (
values_class.__name__))
for i in range(0, len(values)):
self.check_value_of_valid_type(
values[i], types, context.new('[%s]' % i))
return
raise SchemaException(
'Expected an array or a dictionary.', None,
path=context.format_path())
def escape_quote(value):
return unicode(value).replace('\'', r'\'')
class Unit(object):
"""A class to represent a Unit."""
def __init__(self):
self.id = 0
self.type = ''
self.unit_id = ''
self.title = ''
self.release_date = ''
self.now_available = False
def list_properties(self, name, output):
"""Outputs all properties of the unit."""
output.append('%s[\'id\'] = %s;' % (name, self.id))
output.append('%s[\'type\'] = \'%s\';' % (
name, escape_quote(self.type)))
output.append('%s[\'unit_id\'] = \'%s\';' % (
name, escape_quote(self.unit_id)))
output.append('%s[\'title\'] = \'%s\';' % (
name, escape_quote(self.title)))
output.append('%s[\'release_date\'] = \'%s\';' % (
name, escape_quote(self.release_date)))
output.append('%s[\'now_available\'] = %s;' % (
name, str(self.now_available).lower()))
class Lesson(object):
"""A class to represent a Lesson."""
def __init__(self):
self.unit_id = 0
self.unit_title = ''
self.lesson_id = 0
self.lesson_title = ''
self.lesson_activity = ''
self.lesson_activity_name = ''
self.lesson_notes = ''
self.lesson_video_id = ''
self.lesson_objectives = ''
def list_properties(self, name, output):
"""Outputs all properties of the lesson."""
activity = 'false'
if self.lesson_activity == 'yes':
activity = 'true'
output.append('%s[\'unit_id\'] = %s;' % (name, self.unit_id))
output.append('%s[\'unit_title\'] = \'%s\';' % (
name, escape_quote(self.unit_title)))
output.append('%s[\'lesson_id\'] = %s;' % (name, self.lesson_id))
output.append('%s[\'lesson_title\'] = \'%s\';' % (
name, escape_quote(self.lesson_title)))
output.append('%s[\'lesson_activity\'] = %s;' % (name, activity))
output.append('%s[\'lesson_activity_name\'] = \'%s\';' % (
name, escape_quote(self.lesson_activity_name)))
output.append('%s[\'lesson_notes\'] = \'%s\';' % (
name, escape_quote(self.lesson_notes)))
output.append('%s[\'lesson_video_id\'] = \'%s\';' % (
name, escape_quote(self.lesson_video_id)))
output.append('%s[\'lesson_objectives\'] = \'%s\';' % (
name, escape_quote(self.lesson_objectives)))
def to_id_string(self):
return '%s.%s.%s' % (self.unit_id, self.lesson_id, self.lesson_title)
class Assessment(object):
"""A class to represent a Assessment."""
def __init__(self):
self.scope = {}
SchemaHelper().extract_all_terms_to_depth(
'assessment', SCHEMA['assessment'], self.scope)
class Activity(object):
"""A class to represent a Activity."""
def __init__(self):
self.scope = {}
SchemaHelper().extract_all_terms_to_depth(
'activity', SCHEMA['activity'], self.scope)
def silent_echo(unused_message):
pass
def echo(message):
print message
def is_integer_list(s):
try:
if not isinstance(s, list):
return False
for item in s:
if not isinstance(item, int):
return False
return True
except ValueError:
return False
def is_integer(s):
try:
return int(s) == float(s)
except Exception: # pylint: disable-msg=broad-except
return False
def is_boolean(s):
try:
return s == 'True' or s == 'False'
except ValueError:
return False
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def is_one_of(value, values):
for current in values:
if value == current:
return True
return False
def text_to_line_numbered_text(text):
"""Adds line numbers to the provided text."""
lines = text.split('\n')
results = []
i = 1
for line in lines:
results.append(str(i) + ': ' + line)
i += 1
return '\n '.join(results)
def set_object_attributes(target_object, names, values, converter=None):
"""Sets object attributes from provided values."""
if len(names) != len(values):
raise SchemaException(
'The number of elements must match: %s and %s' % (names, values))
for i in range(len(names)):
if converter:
target_def = converter.get(names[i])
if target_def:
target_name = target_def[0]
target_type = target_def[1]
setattr(target_object, target_name, target_type(values[i]))
continue
if is_integer(values[i]):
# if we are setting an attribute of an object that support
# metadata, try to infer the target type and convert 'int' into
# 'str' here
target_type = None
if hasattr(target_object.__class__, names[i]):
attribute = getattr(target_object.__class__, names[i])
if hasattr(attribute, 'data_type'):
target_type = attribute.data_type.__name__
if target_type and (target_type == 'str' or
target_type == 'basestring'):
setattr(target_object, names[i], str(values[i]))
else:
setattr(target_object, names[i], int(values[i]))
continue
if is_boolean(values[i]):
setattr(target_object, names[i], bool(values[i]))
continue
setattr(target_object, names[i], values[i])
def read_objects_from_csv_stream(stream, header, new_object, converter=None):
return read_objects_from_csv(
csv.reader(StringIO(stream.read())), header, new_object,
converter=converter)
def read_objects_from_csv_file(fname, header, new_object):
return read_objects_from_csv_stream(open(fname), header, new_object)
def read_objects_from_csv(value_rows, header, new_object, converter=None):
"""Reads objects from the rows of a CSV file."""
values = []
for row in value_rows:
if not row:
continue
values.append(row)
names = header.split(',')
if names != values[0]:
raise SchemaException(
'Error reading CSV header.\n '
'Header row had %s element(s): %s\n '
'Expected header row with %s element(s): %s' % (
len(values[0]), values[0], len(names), names))
items = []
for i in range(1, len(values)):
if len(names) != len(values[i]):
raise SchemaException(
'Error reading CSV data row.\n '
'Row #%s had %s element(s): %s\n '
'Expected %s element(s): %s' % (
i, len(values[i]), values[i], len(names), names))
# Decode string values in case they were encoded in UTF-8. The CSV
# reader should do this automatically, but it does not. The issue is
# discussed here: http://docs.python.org/2/library/csv.html
decoded_values = []
for value in values[i]:
if isinstance(value, basestring):
value = unicode(value.decode('utf-8'))
decoded_values.append(value)
item = new_object()
set_object_attributes(item, names, decoded_values, converter=converter)
items.append(item)
return items
def escape_javascript_regex(text):
return re.sub(
r'correctAnswerRegex([:][ ]*)([/])(.*)([/][ismx]*)',
r'correctAnswerRegex: regex("\2\3\4")', text)
def remove_javascript_single_line_comment(text):
text = re.sub(re.compile('^(.*?)[ ]+//(.*)$', re.MULTILINE), r'\1', text)
text = re.sub(re.compile('^//(.*)$', re.MULTILINE), r'', text)
return text
def remove_javascript_multi_line_comment(text):
# pylint: disable-msg=anomalous-backslash-in-string
return re.sub(
re.compile('/\*(.*)\*/', re.MULTILINE + re.DOTALL), r'', text)
# pylint: enable-msg=anomalous-backslash-in-string
def parse_content_marked_no_verify(content):
"""Parses and returns a tuple of real content and no-verify text."""
# If you have any free-form JavaScript in the activity file, you need
# to place it between //<gcb-no-verify> ... //</gcb-no-verify> tags
# so that the verifier can selectively ignore it.
pattern = re.compile('%s(.*)%s' % (
NO_VERIFY_TAG_NAME_OPEN, NO_VERIFY_TAG_NAME_CLOSE), re.DOTALL)
m = pattern.search(content)
noverify_text = None
if m:
noverify_text = m.group(1)
return (re.sub(pattern, '', content), noverify_text)
def convert_javascript_to_python(content, root_name):
"""Removes JavaScript specific syntactic constructs and returns a tuple."""
# Reads the content and removes JavaScript comments, var's, and escapes
# regular expressions.
(content, noverify_text) = parse_content_marked_no_verify(content)
content = remove_javascript_multi_line_comment(content)
content = remove_javascript_single_line_comment(content)
content = content.replace('var %s = ' % root_name, '%s = ' % root_name)
content = escape_javascript_regex(content)
return (content, noverify_text)
def convert_javascript_file_to_python(fname, root_name):
return convert_javascript_to_python(
''.join(open(fname, 'r').readlines()), root_name)
def legacy_eval_python_expression_for_test(content, scope, unused_root_name):
"""Legacy content parsing function using compile/exec."""
print 'WARNING! This code is unsafe and uses compile/exec!'
# First compiles and then evaluates a Python script text in a restricted
# environment using provided bindings. Returns the resulting bindings if
# evaluation completed.
# create a new execution scope that has only the schema terms defined;
# remove all other languages constructs including __builtins__
restricted_scope = {}
restricted_scope.update(scope)
restricted_scope.update({'__builtins__': {}})
code = compile(content, '<string>', 'exec')
# pylint: disable-msg=exec-statement
exec code in restricted_scope
# pylint: enable-msg=exec-statement
return restricted_scope
def not_implemented_parse_content(
unused_content, unused_scope, unused_root_name):
raise Exception('Not implemented.')
# by default no parser method is configured; set custom parser if you have it
parse_content = not_implemented_parse_content
def evaluate_python_expression_from_text(content, root_name, scope,
noverify_text):
"""Compiles and evaluates a Python script in a restricted environment."""
restricted_scope = parse_content(content, scope, root_name)
if noverify_text:
restricted_scope['noverify'] = noverify_text
if restricted_scope.get(root_name) is None:
raise Exception('Unable to find \'%s\'' % root_name)
return restricted_scope
def evaluate_javascript_expression_from_file(fname, root_name, scope, error):
(content, noverify_text) = convert_javascript_file_to_python(fname,
root_name)
try:
return evaluate_python_expression_from_text(content, root_name, scope,
noverify_text)
except:
error('Unable to parse %s in file %s\n %s' % (
root_name, fname, text_to_line_numbered_text(content)))
for message in sys.exc_info():
error(str(message))
raise
class Verifier(object):
"""Verifies Units, Lessons, Assessments, Activities and their relations."""
def __init__(self):
self.echo_func = silent_echo
self.schema_helper = SchemaHelper()
self.errors = 0
self.warnings = 0
self.export = []
def verify_unit_fields(self, units):
self.export.append('units = Array();')
for unit in units:
if not is_one_of(unit.now_available, [True, False]):
self.error(
'Bad now_available \'%s\' for unit id %s; expected '
'\'True\' or \'False\'' % (unit.now_available, unit.id))
if not is_one_of(unit.type, UNIT_TYPES):
self.error(
'Bad type \'%s\' for unit id %s; '
'expected: %s.' % (unit.type, unit.id, UNIT_TYPES))
if unit.type == 'U':
if not is_integer(unit.unit_id):
self.error(
'Expected integer unit_id, found %s in unit id '
' %s' % (unit.unit_id, unit.id))
self.export.append('')
self.export.append('units[%s] = Array();' % unit.id)
self.export.append('units[%s][\'lessons\'] = Array();' % unit.id)
unit.list_properties('units[%s]' % unit.id, self.export)
def verify_lesson_fields(self, lessons):
for lesson in lessons:
if not is_one_of(lesson.lesson_activity, ['yes', '']):
self.error('Bad lesson_activity \'%s\' for lesson_id %s' % (
lesson.lesson_activity, lesson.lesson_id))
self.export.append('')
self.export.append('units[%s][\'lessons\'][%s] = Array();' % (
lesson.unit_id, lesson.lesson_id))
lesson.list_properties('units[%s][\'lessons\'][%s]' % (
lesson.unit_id, lesson.lesson_id), self.export)
def verify_unit_lesson_relationships(self, units, lessons):
"""Checks each lesson points to a unit and all lessons are in use."""
used_lessons = []
units.sort(key=lambda x: x.id)
# for unit in units:
for i in range(0, len(units)):
unit = units[i]
# check that unit ids are 1-based and sequential
if unit.id != i + 1:
self.error('Unit out of order: %s' % (unit.id))
# get the list of lessons for each unit
self.fine('Unit %s: %s' % (unit.id, unit.title))
unit_lessons = []
for lesson in lessons:
if lesson.unit_id == unit.unit_id:
if lesson.unit_title != unit.title:
raise Exception(''.join([
'A unit_title of a lesson (id=%s) must match ',
'title of a unit (id=%s) the lesson belongs to.'
]) % (lesson.lesson_id, lesson.unit_id))
unit_lessons.append(lesson)
used_lessons.append(lesson)
# inspect all lessons for the current unit
unit_lessons.sort(key=lambda x: x.lesson_id)
for j in range(0, len(unit_lessons)):
lesson = unit_lessons[j]
# check that lesson_ids are 1-based and sequential
if lesson.lesson_id != j + 1:
self.warn(
'Lesson lesson_id is out of order: expected %s, found '
' %s (%s)' % (
j + 1, lesson.lesson_id, lesson.to_id_string()))
self.fine(' Lesson %s: %s' % (
lesson.lesson_id, lesson.lesson_title))
# find lessons not used by any of the units
unused_lessons = list(lessons)
for lesson in used_lessons:
unused_lessons.remove(lesson)
for lesson in unused_lessons:
self.warn('Unused lesson_id %s (%s)' % (
lesson.lesson_id, lesson.to_id_string()))
# check all lessons point to known units
for lesson in lessons:
has = False
for unit in units:
if lesson.unit_id == unit.unit_id:
has = True
break
if not has:
self.error('Lesson has unknown unit_id %s (%s)' % (
lesson.unit_id, lesson.to_id_string()))
def get_activity_as_python(self, unit_id, lesson_id):
fname = os.path.join(
os.path.dirname(__file__),
'../assets/js/activity-%s.%s.js' % (unit_id, lesson_id))
if not os.path.exists(fname):
self.error(' Missing activity: %s' % fname)
else:
activity = evaluate_javascript_expression_from_file(
fname, 'activity', Activity().scope, self.error)
self.verify_activity_instance(activity, fname)
return activity
def verify_activities(self, lessons):
"""Loads and verifies all activities."""
self.info('Loading activities:')
count = 0
for lesson in lessons:
if lesson.lesson_activity == 'yes':
count += 1
activity = self.get_activity_as_python(
lesson.unit_id, lesson.lesson_id)
self.export.append('')
self.encode_activity_json(
activity, lesson.unit_id, lesson.lesson_id)
self.info('Read %s activities' % count)
def verify_assessment(self, units):
"""Loads and verifies all assessments."""
self.export.append('')
self.export.append('assessments = Array();')
self.info('Loading assessment:')
count = 0
for unit in units:
if unit.type == 'A':
count += 1
assessment_name = str(unit.unit_id)
fname = os.path.join(
os.path.dirname(__file__),
'../assets/js/assessment-%s.js' % assessment_name)
if not os.path.exists(fname):
self.error(' Missing assessment: %s' % fname)
else:
assessment = evaluate_javascript_expression_from_file(
fname, 'assessment', Assessment().scope, self.error)
self.verify_assessment_instance(assessment, fname)
self.export.append('')
self.encode_assessment_json(assessment, assessment_name)
self.info('Read %s assessments' % count)
# NB: The exported script needs to define a gcb_regex() wrapper function
@staticmethod
def encode_regex(regex_str):
"""Encodes a JavaScript-style regex into a Python gcb_regex call."""
# parse the regex into the base and modifiers. e.g., for /foo/i
# base is 'foo' and modifiers is 'i'
assert regex_str[0] == '/'
# find the LAST '/' in regex_str (because there might be other
# escaped '/' characters in the middle of regex_str)
final_slash_index = regex_str.rfind('/')
assert final_slash_index > 0
base = regex_str[1:final_slash_index]
modifiers = regex_str[final_slash_index+1:]
func_str = 'gcb_regex(' + repr(base) + ', ' + repr(modifiers) + ')'
return func_str
def encode_activity_json(self, activity_dict, unit_id, lesson_id):
"""Encodes an activity dictionary into JSON."""
output = []
for elt in activity_dict['activity']:
t = type(elt)
encoded_elt = None
if t is str:
encoded_elt = {'type': 'string', 'value': elt}
elif t is dict:
qt = elt['questionType']
encoded_elt = {'type': qt}
if qt == 'multiple choice':
choices = elt['choices']
encoded_choices = [[x, y.value, z] for x, y, z in choices]
encoded_elt['choices'] = encoded_choices
elif qt == 'multiple choice group':
# everything inside are primitive types that can be encoded
elt_copy = dict(elt)
del elt_copy['questionType'] # redundant
encoded_elt['value'] = elt_copy
elif qt == 'freetext':
for k in elt.keys():
if k == 'questionType':
continue
elif k == 'correctAnswerRegex':
encoded_elt[k] = Verifier.encode_regex(elt[k].value)
else:
# ordinary string
encoded_elt[k] = elt[k]
else:
assert False
else:
assert False
assert encoded_elt
output.append(encoded_elt)
# N.B.: make sure to get the string quoting right!
code_str = "units[%s]['lessons'][%s]['activity'] = " % (
unit_id, lesson_id) + repr(json.dumps(output)) + ';'
self.export.append(code_str)
if 'noverify' in activity_dict:
self.export.append('')
noverify_code_str = "units[%s]['lessons'][%s]['code'] = " % (
unit_id, lesson_id) + repr(activity_dict['noverify']) + ';'
self.export.append(noverify_code_str)
def encode_assessment_json(self, assessment_dict, assessment_name):
"""Encodes an assessment dictionary into JSON."""
real_dict = assessment_dict['assessment']
output = {}
output['assessmentName'] = real_dict['assessmentName']
if 'preamble' in real_dict:
output['preamble'] = real_dict['preamble']
output['checkAnswers'] = real_dict['checkAnswers'].value
encoded_questions_list = []
for elt in real_dict['questionsList']:
encoded_elt = {}
encoded_elt['questionHTML'] = elt['questionHTML']
if 'lesson' in elt:
encoded_elt['lesson'] = elt['lesson']
if 'correctAnswerNumeric' in elt:
encoded_elt['correctAnswerNumeric'] = elt[
'correctAnswerNumeric']
if 'correctAnswerString' in elt:
encoded_elt['correctAnswerString'] = elt['correctAnswerString']
if 'correctAnswerRegex' in elt:
encoded_elt['correctAnswerRegex'] = Verifier.encode_regex(
elt['correctAnswerRegex'].value)
if 'choices' in elt:
encoded_choices = []
correct_answer_index = None
for (ind, e) in enumerate(elt['choices']):
if type(e) is str:
encoded_choices.append(e)
elif e.term_type == CORRECT:
encoded_choices.append(e.value)
correct_answer_index = ind
else:
raise Exception("Invalid type in 'choices'")
encoded_elt['choices'] = encoded_choices
encoded_elt['correctAnswerIndex'] = correct_answer_index
encoded_questions_list.append(encoded_elt)
output['questionsList'] = encoded_questions_list
# N.B.: make sure to get the string quoting right!
code_str = 'assessments[\'' + assessment_name + '\'] = ' + repr(
json.dumps(output)) + ';'
self.export.append(code_str)
if 'noverify' in assessment_dict:
self.export.append('')
noverify_code_str = ('assessments[\'' + assessment_name +
'\'] = ' + repr(assessment_dict['noverify']) +
';')
self.export.append(noverify_code_str)
def format_parse_log(self):
return 'Parse log:\n%s' % '\n'.join(self.schema_helper.parse_log)
def verify_assessment_instance(self, scope, fname):
"""Verifies compliance of assessment with schema."""
if scope:
try:
self.schema_helper.check_instances_match_schema(
scope['assessment'], SCHEMA['assessment'], 'assessment')
self.info(' Verified assessment %s' % fname)
if OUTPUT_DEBUG_LOG:
self.info(self.format_parse_log())
except SchemaException as e:
self.error(' Error in assessment %s\n%s' % (
fname, self.format_parse_log()))
raise e
else:
self.error(' Unable to evaluate \'assessment =\' in %s' % fname)
def verify_activity_instance(self, scope, fname):
"""Verifies compliance of activity with schema."""
if scope:
try:
self.schema_helper.check_instances_match_schema(
scope['activity'], SCHEMA['activity'], 'activity')
self.info(' Verified activity %s' % fname)
if OUTPUT_DEBUG_LOG:
self.info(self.format_parse_log())
except SchemaException as e:
self.error(' Error in activity %s\n%s' % (
fname, self.format_parse_log()))
raise e
else:
self.error(' Unable to evaluate \'activity =\' in %s' % fname)
def fine(self, x):
if OUTPUT_FINE_LOG:
self.echo_func('FINE: ' + x)
def info(self, x):
self.echo_func('INFO: ' + x)
def warn(self, x):
self.warnings += 1
self.echo_func('WARNING: ' + x)
def error(self, x):
self.errors += 1
self.echo_func('ERROR: ' + x)
def load_and_verify_model(self, echo_func):
"""Loads, parses and verifies all content for a course."""
self.echo_func = echo_func
self.info('Started verification in: %s' % __file__)
unit_file = os.path.join(os.path.dirname(__file__), '../data/unit.csv')
lesson_file = os.path.join(
os.path.dirname(__file__), '../data/lesson.csv')
self.info('Loading units from: %s' % unit_file)
units = read_objects_from_csv_file(unit_file, UNITS_HEADER, Unit)
self.info('Read %s units' % len(units))
self.info('Loading lessons from: %s' % lesson_file)
lessons = read_objects_from_csv_file(
lesson_file, LESSONS_HEADER, Lesson)
self.info('Read %s lessons' % len(lessons))
self.verify_unit_fields(units)
self.verify_lesson_fields(lessons)
self.verify_unit_lesson_relationships(units, lessons)
try:
self.verify_activities(lessons)
self.verify_assessment(units)
except SchemaException as e:
self.error(str(e))
info = (
'Schema usage statistics: %s'
'Completed verification: %s warnings, %s errors.' % (
self.schema_helper.type_stats, self.warnings, self.errors))
self.info(info)
return self.warnings, self.errors, info
def run_all_regex_unit_tests():
"""Executes all tests related to regular expressions."""
# pylint: disable-msg=anomalous-backslash-in-string
assert escape_javascript_regex(
'correctAnswerRegex: /site:bls.gov?/i, blah') == (
'correctAnswerRegex: regex(\"/site:bls.gov?/i\"), blah')
assert escape_javascript_regex(
'correctAnswerRegex: /site:http:\/\/www.google.com?q=abc/i, blah') == (
'correctAnswerRegex: '
'regex(\"/site:http:\/\/www.google.com?q=abc/i\"), blah')
assert remove_javascript_multi_line_comment(
'blah\n/*\ncomment\n*/\nblah') == 'blah\n\nblah'
assert remove_javascript_multi_line_comment(
'blah\nblah /*\ncomment\nblah */\nblah') == ('blah\nblah \nblah')
assert remove_javascript_single_line_comment(
'blah\n// comment\nblah') == 'blah\n\nblah'
assert remove_javascript_single_line_comment(
'blah\nblah http://www.foo.com\nblah') == (
'blah\nblah http://www.foo.com\nblah')
assert remove_javascript_single_line_comment(
'blah\nblah // comment\nblah') == 'blah\nblah\nblah'
assert remove_javascript_single_line_comment(
'blah\nblah // comment http://www.foo.com\nblah') == (
'blah\nblah\nblah')
assert parse_content_marked_no_verify(
'blah1\n// <gcb-no-verify>\n/blah2\n// </gcb-no-verify>\nblah3')[0] == (
'blah1\n// \nblah3')
# pylint: enable-msg=anomalous-backslash-in-string
assert Verifier.encode_regex('/white?/i') == """gcb_regex('white?', 'i')"""
assert (Verifier.encode_regex('/jane austen (book|books) \\-price/i') ==
r"""gcb_regex('jane austen (book|books) \\-price', 'i')""")
assert (Verifier.encode_regex('/Kozanji|Kozan-ji|Kosanji|Kosan-ji/i') ==
r"""gcb_regex('Kozanji|Kozan-ji|Kosanji|Kosan-ji', 'i')""")
assert (Verifier.encode_regex('/Big Time College Sport?/i') ==
"gcb_regex('Big Time College Sport?', 'i')")
assert (Verifier.encode_regex('/354\\s*[+]\\s*651/') ==
r"""gcb_regex('354\\s*[+]\\s*651', '')""")
def run_all_schema_helper_unit_tests():
"""Executes all tests related to schema validation."""
def assert_same(a, b):
if a != b:
raise Exception('Expected:\n %s\nFound:\n %s' % (a, b))
def assert_pass(instances, types, expected_result=None):
try:
schema_helper = SchemaHelper()
result = schema_helper.check_instances_match_schema(
instances, types, 'test')
if OUTPUT_DEBUG_LOG:
print '\n'.join(schema_helper.parse_log)
if expected_result:
assert_same(expected_result, result)
except SchemaException as e:
if OUTPUT_DEBUG_LOG:
print str(e)
print '\n'.join(schema_helper.parse_log)
raise
def assert_fails(func):
try:
func()
raise Exception('Expected to fail')
except SchemaException as e:
if OUTPUT_DEBUG_LOG:
print str(e)
def assert_fail(instances, types):
assert_fails(lambda: assert_pass(instances, types))
def create_python_dict_from_js_object(js_object):
python_str, noverify = convert_javascript_to_python(
'var x = ' + js_object, 'x')
ret = evaluate_python_expression_from_text(
python_str, 'x', Assessment().scope, noverify)
return ret['x']
# CSV tests
units = read_objects_from_csv(
[
['id', 'type', 'now_available'],
[1, 'U', 'True'],
[1, 'U', 'False']],
'id,type,now_available', Unit, converter=UNIT_CSV_TO_DB_CONVERTER)
assert units[0].now_available
assert not units[1].now_available
read_objects_from_csv(
[['id', 'type'], [1, 'none']], 'id,type', Unit)
def reader_one():
return read_objects_from_csv(
[['id', 'type'], [1, 'none']], 'id,type,title', Unit)
assert_fails(reader_one)
def reader_two():
read_objects_from_csv(
[['id', 'type', 'title'], [1, 'none']], 'id,type,title', Unit)
assert_fails(reader_two)
# context tests
assert_same(Context().new([]).new(['a']).new(['b', 'c']).format_path(),
('//a/b/c'))
# simple map tests
assert_pass({'name': 'Bob'}, {'name': STRING})
assert_fail('foo', 'bar')
assert_fail({'name': 'Bob'}, {'name': INTEGER})
assert_fail({'name': 12345}, {'name': STRING})
assert_fail({'amount': 12345}, {'name': INTEGER})
assert_fail({'regex': Term(CORRECT)}, {'regex': Term(REGEX)})
assert_pass({'name': 'Bob'}, {'name': STRING, 'phone': STRING})
assert_pass({'name': 'Bob'}, {'phone': STRING, 'name': STRING})
assert_pass({'name': 'Bob'},
{'phone': STRING, 'name': STRING, 'age': INTEGER})
# mixed attributes tests
assert_pass({'colors': ['red', 'blue']}, {'colors': [STRING]})
assert_pass({'colors': []}, {'colors': [STRING]})
assert_fail({'colors': {'red': 'blue'}}, {'colors': [STRING]})
assert_fail({'colors': {'red': 'blue'}}, {'colors': [FLOAT]})
assert_fail({'colors': ['red', 'blue', 5.5]}, {'colors': [STRING]})
assert_fail({'colors': ['red', 'blue', {'foo': 'bar'}]},
{'colors': [STRING]})
assert_fail({'colors': ['red', 'blue'], 'foo': 'bar'},
{'colors': [STRING]})
assert_pass({'colors': ['red', 1]}, {'colors': [[STRING, INTEGER]]})
assert_fail({'colors': ['red', 'blue']}, {'colors': [[STRING, INTEGER]]})
assert_fail({'colors': [1, 2, 3]}, {'colors': [[STRING, INTEGER]]})
assert_fail({'colors': ['red', 1, 5.3]}, {'colors': [[STRING, INTEGER]]})
assert_pass({'colors': ['red', 'blue']}, {'colors': [STRING]})
assert_fail({'colors': ['red', 'blue']}, {'colors': [[STRING]]})
assert_fail({'colors': ['red', ['blue']]}, {'colors': [STRING]})
assert_fail({'colors': ['red', ['blue', 'green']]}, {'colors': [STRING]})
# required attribute tests
assert_pass({'colors': ['red', 5]}, {'colors': [[STRING, INTEGER]]})
assert_fail({'colors': ['red', 5]}, {'colors': [[INTEGER, STRING]]})
assert_pass({'colors': ['red', 5]}, {'colors': [STRING, INTEGER]})
assert_pass({'colors': ['red', 5]}, {'colors': [INTEGER, STRING]})
assert_fail({'colors': ['red', 5, 'FF0000']},
{'colors': [[STRING, INTEGER]]})
# an array and a map of primitive type tests
assert_pass({'color': {'name': 'red', 'rgb': 'FF0000'}},
{'color': {'name': STRING, 'rgb': STRING}})
assert_fail({'color': {'name': 'red', 'rgb': ['FF0000']}},
{'color': {'name': STRING, 'rgb': STRING}})
assert_fail({'color': {'name': 'red', 'rgb': 'FF0000'}},
{'color': {'name': STRING, 'rgb': INTEGER}})
assert_fail({'color': {'name': 'red', 'rgb': 'FF0000'}},
{'color': {'name': STRING, 'rgb': {'hex': STRING}}})
assert_pass({'color': {'name': 'red', 'rgb': 'FF0000'}},
{'color': {'name': STRING, 'rgb': STRING}})
assert_pass({'colors':
[{'name': 'red', 'rgb': 'FF0000'},
{'name': 'blue', 'rgb': '0000FF'}]},
{'colors': [{'name': STRING, 'rgb': STRING}]})
assert_fail({'colors':
[{'name': 'red', 'rgb': 'FF0000'},
{'phone': 'blue', 'rgb': '0000FF'}]},
{'colors': [{'name': STRING, 'rgb': STRING}]})
# boolean type tests
assert_pass({'name': 'Bob', 'active': True},
{'name': STRING, 'active': BOOLEAN})
assert_pass({'name': 'Bob', 'active': [5, True, False]},
{'name': STRING, 'active': [INTEGER, BOOLEAN]})
assert_pass({'name': 'Bob', 'active': [5, True, 'false']},
{'name': STRING, 'active': [STRING, INTEGER, BOOLEAN]})
assert_fail({'name': 'Bob', 'active': [5, True, 'False']},
{'name': STRING, 'active': [[INTEGER, BOOLEAN]]})
# optional attribute tests
assert_pass({'points':
[{'x': 1, 'y': 2, 'z': 3}, {'x': 3, 'y': 2, 'z': 1},
{'x': 2, 'y': 3, 'z': 1}]},
{'points': [{'x': INTEGER, 'y': INTEGER, 'z': INTEGER}]})
assert_pass({'points':
[{'x': 1, 'z': 3}, {'x': 3, 'y': 2}, {'y': 3, 'z': 1}]},
{'points': [{'x': INTEGER, 'y': INTEGER, 'z': INTEGER}]})
assert_pass({'account':
[{'name': 'Bob', 'age': 25, 'active': True}]},
{'account':
[{'age': INTEGER, 'name': STRING, 'active': BOOLEAN}]})
assert_pass({'account':
[{'name': 'Bob', 'active': True}]},
{'account':
[{'age': INTEGER, 'name': STRING, 'active': BOOLEAN}]})
# nested array tests
assert_fail({'name': 'Bob', 'active': [5, True, 'false']},
{'name': STRING, 'active': [[BOOLEAN]]})
assert_fail({'name': 'Bob', 'active': [True]},
{'name': STRING, 'active': [[STRING]]})
assert_pass({'name': 'Bob', 'active': ['true']},
{'name': STRING, 'active': [[STRING]]})
assert_pass({'name': 'flowers', 'price': ['USD', 9.99]},
{'name': STRING, 'price': [[STRING, FLOAT]]})
assert_pass({'name': 'flowers', 'price':
[['USD', 9.99], ['CAD', 11.79], ['RUB', 250.23]]},
{'name': STRING, 'price': [[STRING, FLOAT]]})
# selector tests
assert_pass({'likes': [{'state': 'CA', 'food': 'cheese'},
{'state': 'NY', 'drink': 'wine'}]},
{'likes': [{'state': 'CA', 'food': STRING},
{'state': 'NY', 'drink': STRING}]})
assert_pass({'likes': [{'state': 'CA', 'food': 'cheese'},
{'state': 'CA', 'food': 'nuts'}]},
{'likes': [{'state': 'CA', 'food': STRING},
{'state': 'NY', 'drink': STRING}]})
assert_fail({'likes': {'state': 'CA', 'drink': 'cheese'}},
{'likes': [{'state': 'CA', 'food': STRING},
{'state': 'NY', 'drink': STRING}]})
# creating from dict tests
assert_same(create_python_dict_from_js_object('{"active": true}'),
{'active': Term(BOOLEAN, True)})
assert_same(create_python_dict_from_js_object(
'{"a": correct("hello world")}'),
{'a': Term(CORRECT, 'hello world')})
assert_same(create_python_dict_from_js_object(
'{correctAnswerRegex: /hello/i}'),
{'correctAnswerRegex': Term(REGEX, '/hello/i')})
def run_example_activity_tests():
"""Parses and validates example activity file."""
fname = os.path.join(
os.path.dirname(__file__), '../assets/js/activity-examples.js')
if not os.path.exists(fname):
raise Exception('Missing file: %s', fname)
verifier = Verifier()
verifier.echo_func = echo
activity = evaluate_javascript_expression_from_file(
fname, 'activity', Activity().scope, verifier.echo_func)
verifier.verify_activity_instance(activity, fname)
def test_exec():
"""This test shows that exec/compile are explitable, thus not safe."""
content = """
foo = [
c for c in ().__class__.__base__.__subclasses__()
if c.__name__ == 'catch_warnings'
][0]()._module.__builtins__
"""
restricted_scope = {}
restricted_scope.update({'__builtins__': {}})
code = compile(content, '<string>', 'exec')
# pylint: disable-msg=exec-statement
exec code in restricted_scope
# pylint: enable-msg=exec-statement
assert 'isinstance' in restricted_scope.get('foo')
def test_sample_assets():
"""Test assets shipped with the sample course."""
_, _, output = Verifier().load_and_verify_model(echo)
if (
'Schema usage statistics: {'
'\'REGEX\': 19, \'STRING\': 415, \'NUMBER\': 1, '
'\'BOOLEAN\': 81, \'dict\': 73, \'str\': 41, \'INTEGER\': 9, '
'\'CORRECT\': 9}' not in output
or 'Completed verification: 0 warnings, 0 errors.' not in output):
raise Exception('Sample course verification failed.\n%s' % output)
def run_all_unit_tests():
"""Runs all unit tests in this module."""
global parse_content
original = parse_content
try:
parse_content = legacy_eval_python_expression_for_test
run_all_regex_unit_tests()
run_all_schema_helper_unit_tests()
run_example_activity_tests()
test_exec()
test_sample_assets()
finally:
parse_content = original
if __name__ == '__main__':
run_all_unit_tests()
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remote environment manager for extract-transform-load utilities."""
__author__ = [
'johncox@google.com',
]
import os
import sys
import appengine_config
# Override SERVER_SOFTWARE before doing any App Engine imports so import-time
# detection of dev mode, done against SERVER_SOFTWARE of 'Development*', fails.
# Once imports are done, this environment variable can be reset as needed (for
# tests, etc.). pylint: disable-msg=g-import-not-at-top
SERVER_SOFTWARE = 'Production Emulation'
if appengine_config.PRODUCTION_MODE:
sys.exit('Running etl/tools/remote.py in production is not supported.')
os.environ['SERVER_SOFTWARE'] = SERVER_SOFTWARE
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.tools import appengine_rpc
from google.appengine.tools import remote_api_shell
# String. Used to detect appspot.com servers.
_APPSPOT_SERVER_SUFFIX = 'appspot.com'
# String. Password used when a password is not necessary.
_BOGUS_PASSWORD = 'bogus_password'
# String. Infix for google.com application ids.
_GOOGLE_APPLICATION_INFIX = 'google.com'
# String. Prefix App Engine uses application ids in the dev appserver.
_LOCAL_APPLICATION_ID_PREFIX = 'dev~'
# String. Prefix used to detect if a server is running locally.
_LOCAL_SERVER_PREFIX = 'localhost'
# String. Prefix App Engine uses for application ids in production.
_REMOTE_APPLICATION_ID_PREFIX = 's~'
# String. Email address used unless os.environ['USER_EMAIL'] is set in tests.
_TEST_EMAIL = 'test@example.com'
# String. os.ENVIRON['SERVER_SOFTWARE'] value that indicates we're running under
# the test environment.
TEST_SERVER_SOFTWARE = 'Test'
class Error(Exception):
"""Base error type."""
class EnvironmentAuthenticationError(Error):
"""Raised when establishing an environment fails due to bad credentials."""
class Environment(object):
"""Sets up the execution environment to use remote_api for RPCs.
As with any use of remote_api, this has three important caveats:
1. By going through the Remote API rather than your application's handlers,
you are bypassing any business logic in those handlers. It is easy in
this way to accidentally corrupt the system receiving your RPCs.
2. There is no guarantee that the code running on the system receiving your
RPCs is the same version as the code running locally. It is easy to have
version skew that corrupts the destination system.
3. Execution is markedly slower than running in production.
"""
def __init__(
self, application_id, server, path='/_ah/remote_api'):
"""Constructs a new Environment.
Args:
application_id: string. The application id of the environment
(myapp).
server: string. The full name of the server to connect to
(myurl.appspot.com).
path: string. The URL of your app's remote api entry point.
"""
self._application_id = application_id
self._path = path
self._server = server
@staticmethod
def _dev_appserver_auth_func():
"""Auth function to run for dev_appserver (bogus password)."""
return raw_input('Email: '), _BOGUS_PASSWORD
@staticmethod
def _test_auth_func():
"""Auth function to run in tests (bogus username and password)."""
return os.environ.get('USER_EMAIL', _TEST_EMAIL), _BOGUS_PASSWORD
def _get_auth_func(self):
"""Returns authentication function for the remote API."""
if os.environ.get('SERVER_SOFTWARE', '').startswith(
TEST_SERVER_SOFTWARE):
return self._test_auth_func
elif self._is_localhost():
return self._dev_appserver_auth_func
else:
return remote_api_shell.auth_func
def _get_internal_application_id(self):
"""Returns string containing App Engine's internal id representation."""
prefix = _REMOTE_APPLICATION_ID_PREFIX
if self._is_localhost():
prefix = _LOCAL_APPLICATION_ID_PREFIX
elif not self._is_appspot():
prefix = '%s%s:' % (prefix, _GOOGLE_APPLICATION_INFIX)
return prefix + self._application_id
def _get_secure(self):
"""Returns boolean indicating whether or not to use https."""
return not self._is_localhost()
def _is_appspot(self):
"""Returns True iff server is appspot.com."""
return self._server.endswith(_APPSPOT_SERVER_SUFFIX)
def _is_localhost(self):
"""Returns True if environment is dev_appserver and False otherwise."""
return self._server.startswith(_LOCAL_SERVER_PREFIX)
def establish(self):
"""Establishes the environment for RPC execution."""
try:
remote_api_stub.ConfigureRemoteApi(
self._get_internal_application_id(), self._path,
self._get_auth_func(), servername=self._server,
save_cookies=True, secure=self._get_secure(),
rpc_server_factory=appengine_rpc.HttpRpcServer)
remote_api_stub.MaybeInvokeAuthentication()
except AttributeError:
raise EnvironmentAuthenticationError
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Libraries for writing extract-transform-load scripts."""
__author__ = [
'johncox@google.com',
]
import argparse
from controllers import sites
def get_context(course_url_prefix):
"""Gets requested application context from the given course URL prefix.
Args:
course_url_prefix: string. Value of etl.py's course_url_prefix flag.
Returns:
sites.ApplicationContext.
"""
found = None
for context in sites.get_all_courses():
if context.raw.startswith('course:%s:' % course_url_prefix):
found = context
break
return found
class Job(object):
"""Abstract base class for user-defined custom ETL jobs.
Custom jobs can be executed by etl.py. The advantage of this is that they
can run arbitrary local computations, but calls to App Engine services
(db.get() or db.put(), for example) are executed against a remove server.
This allows you to perform arbitrary computations against your app's data,
and to construct data pipelines that are not possible within the App Engine
execution environment.
When you run your custom job under etl.py in this way, it authenticates
against the remove server, prompting the user for credentials if necessary.
It then configures the local environment so RPCs execute against the
requested remote endpoint.
It then imports your custom job. Your job must be a Python class that is
a child of this class. Before invoking etl.py, you must configure sys.path
so all required libraries are importable. See etl.py for details. Your
class must override main() with the computations you want your job to
perform.
You invoke your custom job via etl.py:
$ python etl.py run path.to.my.Job /cs101 myapp server.appspot.com \
--job_args='more_args --delegated_to my.Job'
Before main() is executed, arguments are parsed. The full set of parsed
arguments passed to etl.py are available in your job as self.etl_args. The
arguments passed as a quote-enclosed string to --job_args, if any, are
delegated to your job. An argument parser is available as self.parser. You
must override self._configure_parser to register command-line arguments for
parsing. They will be parsed in advance of running main() and will be
available as self.args.
See tools/etl/examples.py for some nontrivial sample job implementations.
"""
def __init__(self, parsed_etl_args):
"""Constructs a new job.
Args:
parsed_etl_args: argparse.Namespace. Parsed arguments passed to
etl.py.
"""
self._parsed_args = None
self._parsed_etl_args = parsed_etl_args
self._parser = None
def _configure_parser(self):
"""Configures custom command line parser for this job, if any.
For example:
self.parser.add_argument(
'my_arg', help='A required argument', type=str)
"""
pass
def main(self):
"""Computations made by this job; must be overridden in subclass."""
pass
@property
def args(self):
"""Returns etl.py's parsed --job_args, or None if run() not invoked."""
return self._parsed_args
@property
def etl_args(self):
"""Returns parsed etl.py arguments."""
return self._parsed_etl_args
@property
def parser(self):
"""Returns argparse.ArgumentParser, or None if run() not yet invoked."""
if not self._parser:
self._parser = argparse.ArgumentParser(
prog='%s.%s' % (
self.__class__.__module__, self.__class__.__name__),
usage=(
'etl.py run %(prog)s [etl.py options] [--job_args] '
'[%(prog)s options]'))
return self._parser
def _parse_args(self):
self._configure_parser()
self._parsed_args = self.parser.parse_args(
self._parsed_etl_args.job_args)
def run(self):
"""Executes the job; called for you by etl.py."""
self._parse_args()
self.main()
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extract-transform-load utility.
There are three features:
1. Download and upload of Course Builder 1.3 data:
$ python etl.py download course /cs101 myapp server.appspot.com archive.zip
This will result in a file called archive.zip that contains the files that make
up the Course Builder 1.3 course found at the URL /cs101 on the application with
id myapp running on the server named server.appspot.com. archive.zip will
contain assets and data files from the course along with a manifest.json
enumerating them. The format of archive.zip will change and should not be relied
upon.
For upload,
$ python etl.py upload course /cs101 myapp server.appspot.com \
--archive_path archive.zip
2. Download of datastore entities. This feature is experimental and upload is
not supported:
$ python etl.py download datastore /cs101 myapp server.appspot.com \
--archive_path archive.zip --datastore_types model1,model2
This will result in a file called archive.zip that contains a dump of all model1
and model2 instances found in the specified course, identified as above. The
archive will contain serialized data along with a manifest. The format of
archive.zip will change and should not be relied upon.
3. Execution of custom jobs.
$ python etl.py run path.to.my.Job /cs101 myapp server.appspot.com \
--job_args='more_args --delegated_to my.Job'
This requires that you have written a custom class named Job found in the
directory path/to/my, relative to the Course Builder root. Job's main method
will be executed against the specified course, identified as above. See
etl_lib.Job for more information.
In order to run this script, you must add the following to the head of sys.path:
1. The absolute path of your Course Builder installation.
2. The absolute path of your App Engine SDK.
3. The absolute paths of third party libraries from the SDK used by Course
Builder:
fancy_urllib
jinja2
webapp2
webob
Their locations in the supported 1.7.7 App Engine SDK are
<sdk_path>/lib/fancy_urllib
<sdk_path>/lib/jinja2-2.6
<sdk_path>/lib/webapp2-2.5.2
<sdk_path>/lib/webob-1.2.3
where <sdk_path> is the absolute path of the 1.7.7 App Engine SDK.
4. If you are running a custom job, the absolute paths of all code required by
your custom job, unless covered above.
When running etl.py against a remote endpoint you will be prompted for a
username and password. If the remote endpoint is a development server, you may
enter any username and password. If the remote endpoint is in production, enter
your username and an application-specific password. See
http://support.google.com/accounts/bin/answer.py?hl=en&answer=185833 for help on
application-specific passwords.
Pass --help for additional usage information.
"""
__author__ = [
'johncox@google.com (John Cox)',
]
import argparse
import logging
import os
import re
import sys
import traceback
import zipfile
import yaml
# Placeholders for modules we'll import after setting up sys.path. This allows
# us to avoid lint suppressions at every callsite.
announcements = None
appengine_config = None
courses = None
db = None
etl_lib = None
jobs = None
metadata = None
namespace_manager = None
remote = None
transforms = None
vfs = None
# String. Prefix for files stored in an archive.
_ARCHIVE_PATH_PREFIX = 'files'
# String. End of the path to course.json in an archive.
_COURSE_JSON_PATH_SUFFIX = 'data/course.json'
# String. End of the path to course.yaml in an archive.
_COURSE_YAML_PATH_SUFFIX = 'course.yaml'
# Regex. Format of __internal_names__ used by datastore kinds.
_INTERNAL_DATASTORE_KIND_REGEX = re.compile(r'^__.*__$')
# Path prefix strings from local disk that will be included in the archive.
_LOCAL_WHITELIST = frozenset([_COURSE_YAML_PATH_SUFFIX, 'assets', 'data'])
# Path prefix strings that are subdirectories of the whitelist that we actually
# want to exclude because they aren't userland code and will cause conflicts.
_LOCAL_WHITELIST_EXCLUDES = frozenset(['assets/lib'])
# logging.Logger. Module logger.
_LOG = logging.getLogger('coursebuilder.tools.etl')
logging.basicConfig()
# List of string. Valid values for --log_level.
_LOG_LEVEL_CHOICES = ['DEBUG', 'ERROR', 'INFO', 'WARNING']
# String. Name of the manifest file.
_MANIFEST_FILENAME = 'manifest.json'
# String. Identifier for download mode.
_MODE_DOWNLOAD = 'download'
# String. Identifier for custom run mode.
_MODE_RUN = 'run'
# String. Identifier for upload mode.
_MODE_UPLOAD = 'upload'
# List of all modes.
_MODES = [_MODE_DOWNLOAD, _MODE_RUN, _MODE_UPLOAD]
# Int. The number of times to retry remote_api calls.
_RETRIES = 3
# String. Identifier for type corresponding to course definition data.
_TYPE_COURSE = 'course'
# String. Identifier for type corresponding to datastore entities.
_TYPE_DATASTORE = 'datastore'
# Command-line argument configuration.
PARSER = argparse.ArgumentParser()
PARSER.add_argument(
'mode', choices=_MODES,
help='Indicates whether we are downloading or uploading data', type=str)
PARSER.add_argument(
'type',
help=(
'Type of entity to process. If mode is %s or %s, should be one of '
'%s or %s. If mode is %s, should be an importable dotted path to your '
'etl_lib.Job subclass') % (
_MODE_DOWNLOAD, _MODE_UPLOAD, _TYPE_COURSE, _TYPE_DATASTORE,
_MODE_RUN),
type=str)
PARSER.add_argument(
'course_url_prefix',
help=(
"URL prefix of the course you want to download (e.g. '/foo' in "
"'course:/foo:/directory:namespace'"), type=str)
PARSER.add_argument(
'application_id',
help="The id of the application to read from (e.g. 'myapp')", type=str)
PARSER.add_argument(
'server',
help=(
'The full name of the source application to read from (e.g. '
'myapp.appspot.com)'), type=str)
PARSER.add_argument(
'--archive_path',
help=(
'Absolute path of the archive file to read or write; required if mode '
'is %s or %s' % (_MODE_DOWNLOAD, _MODE_UPLOAD)), type=str)
PARSER.add_argument(
'--batch_size',
help='Number of results to attempt to retrieve per batch',
default=20, type=int)
PARSER.add_argument(
'--datastore_types',
help=(
"When type is '%s', comma-separated list of datastore model types to "
'process; all models are processed by default' % _TYPE_DATASTORE),
type=lambda s: s.split(','))
PARSER.add_argument(
'--job_args', default=[],
help=(
'If mode is %s, string containing args delegated to etl_lib.Job '
'subclass') % _MODE_RUN, type=lambda s: s.split())
PARSER.add_argument(
'--log_level', choices=_LOG_LEVEL_CHOICES,
help='Level of logging messages to emit', default='INFO',
type=lambda s: s.upper())
class _Archive(object):
"""Manager for local archives of Course Builder data.
The internal format of the archive may change from version to version; users
must not depend on it.
Archives contain assets and data from a single course, along with a manifest
detailing the course's raw definition string, version of Course Builder the
course is compatible with, and the list of course files contained within
the archive.
# TODO(johncox): possibly obfuscate this archive so it cannot be unzipped
# outside etl.py. Add a command-line flag for creating a zip instead. For
# uploads, require an obfuscated archive, not a zip.
"""
def __init__(self, path):
"""Constructs a new archive.
Args:
path: string. Absolute path where the archive will be written.
"""
self._path = path
self._zipfile = None
@classmethod
def get_external_path(cls, internal_path):
"""Gets external path string from results of cls.get_internal_path."""
prefix = _ARCHIVE_PATH_PREFIX + os.sep
assert internal_path.startswith(prefix)
return internal_path.split(prefix)[1]
@classmethod
def get_internal_path(cls, external_path):
"""Get path string used in the archive from an external path string.
Generates the path used within an archive for an asset. All assets
(meaning all archive contents except the manifest file) must have
their paths generated this way, and those paths must be re-translated to
external paths via cls.get_external_path before use with systems
external to the archive file.
Args:
external_path: string. Path to generate an internal archive path
from.
Returns:
String. Internal archive path.
"""
assert not external_path.startswith(_ARCHIVE_PATH_PREFIX)
return os.path.join(
_ARCHIVE_PATH_PREFIX, _remove_bundle_root(external_path))
def add(self, filename, contents):
"""Adds contents to the archive.
Args:
filename: string. Path of the contents to add.
contents: bytes. Contents to add.
"""
self._zipfile.writestr(filename, contents)
def add_local_file(self, local_filename, internal_filename):
"""Adds a file from local disk to the archive.
Args:
local_filename: string. Path on disk of file to add.
internal_filename: string. Internal archive path to write to.
"""
self._zipfile.write(local_filename, arcname=internal_filename)
def close(self):
"""Closes archive and test for integrity; must close before read."""
self._zipfile.testzip()
self._zipfile.close()
def get(self, path):
"""Return the raw bytes of the archive entity found at path.
Returns None if path is not in the archive.
Args:
path: string. Path of file to retrieve from the archive.
Returns:
Bytes of file contents.
"""
assert self._zipfile
try:
return self._zipfile.read(path)
except KeyError:
pass
def open(self, mode):
"""Opens archive in the mode given by mode string ('r', 'w', 'a')."""
assert not self._zipfile
self._zipfile = zipfile.ZipFile(self._path, mode)
@property
def manifest(self):
"""Returns the archive's manifest."""
return _Manifest.from_json(self.get(_MANIFEST_FILENAME))
@property
def path(self):
return self._path
class _Manifest(object):
"""Manifest that lists the contents and version of an archive folder."""
def __init__(self, raw, version):
"""Constructs a new manifest.
Args:
raw: string. Raw course definition string.
version: string. Version of Course Builder course this manifest was
generated from.
"""
self._entities = []
self._raw = raw
self._version = version
@classmethod
def from_json(cls, json):
"""Returns a manifest for the given JSON string."""
parsed = transforms.loads(json)
instance = cls(parsed['raw'], parsed['version'])
for entity in parsed['entities']:
instance.add(_ManifestEntity(entity['path'], entity['is_draft']))
return instance
def add(self, entity):
self._entities.append(entity)
def get(self, path):
"""Gets _Entity by path string; returns None if not found."""
for entity in self._entities:
if entity.path == path:
return entity
@property
def entities(self):
return sorted(self._entities, key=lambda e: e.path)
@property
def raw(self):
return self._raw
@property
def version(self):
return self._version
def __str__(self):
"""Returns JSON representation of the manifest."""
manifest = {
'entities': [e.__dict__ for e in self.entities],
'raw': self.raw,
'version': self.version,
}
return transforms.dumps(manifest, indent=2, sort_keys=2)
class _ManifestEntity(object):
"""Object that represents an entity in a manifest."""
def __init__(self, path, is_draft):
self.is_draft = is_draft
self.path = path
class _ReadWrapper(object):
"""Wrapper for raw bytes that supports read()."""
def __init__(self, data):
"""Constructs a new read wrapper.
Args:
data: bytes. The bytes to return on read().
"""
self._data = data
def read(self):
return self._data
def _die(message, with_trace=False):
if with_trace: # Also logs most recent traceback.
info = sys.exc_info()
message = '%s%s%s%s%s%s%s' % (
message, os.linesep,
info[0], os.linesep, # exception class name
info[1], os.linesep, # exception message
''.join(traceback.format_tb(info[2]))) # exception stack
_LOG.critical(message)
sys.exit(1)
def _download(
download_type, archive_path, course_url_prefix, datastore_types,
batch_size):
"""Validates and dispatches to a specific download method."""
archive_path = os.path.abspath(archive_path)
context = etl_lib.get_context(course_url_prefix)
if not context:
_die('No course found with course_url_prefix %s' % course_url_prefix)
course = _get_course_from(context)
if download_type == _TYPE_COURSE:
_download_course(context, course, archive_path, course_url_prefix)
if download_type == _TYPE_DATASTORE:
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(context.get_namespace_name())
_download_datastore(
context, course, archive_path, datastore_types, batch_size)
finally:
namespace_manager.set_namespace(old_namespace)
def _download_course(context, course, archive_path, course_url_prefix):
if course.version < courses.COURSE_MODEL_VERSION_1_3:
_die(
'Cannot export course made with Course Builder version < %s' % (
courses.COURSE_MODEL_VERSION_1_3))
archive = _Archive(archive_path)
archive.open('w')
manifest = _Manifest(context.raw, course.version)
_LOG.info('Processing course with URL prefix ' + course_url_prefix)
datastore_files = set(_list_all(context))
all_files = set(_filter_filesystem_files(_list_all(
context, include_inherited=True)))
filesystem_files = all_files - datastore_files
_LOG.info('Adding files from datastore')
for external_path in datastore_files:
internal_path = _Archive.get_internal_path(external_path)
stream = _get_stream(context, external_path)
is_draft = False
if stream.metadata and hasattr(stream.metadata, 'is_draft'):
is_draft = stream.metadata.is_draft
entity = _ManifestEntity(internal_path, is_draft)
archive.add(internal_path, stream.read())
manifest.add(entity)
_LOG.info('Adding files from filesystem')
for external_path in filesystem_files:
with open(external_path) as f:
internal_path = _Archive.get_internal_path(external_path)
archive.add(internal_path, f.read())
manifest.add(_ManifestEntity(internal_path, False))
_finalize_download(archive, manifest)
def _download_datastore(
context, course, archive_path, datastore_types, batch_size):
available_types = set(_get_datastore_kinds())
if not datastore_types:
datastore_types = available_types
requested_types = set(datastore_types)
missing_types = requested_types - available_types
if missing_types:
_die(
'Requested types not found: %s%sAvailable types are: %s' % (
', '.join(missing_types), os.linesep,
', '.join(available_types)))
found_types = requested_types & available_types
archive = _Archive(archive_path)
archive.open('w')
manifest = _Manifest(context.raw, course.version)
for found_type in found_types:
json_path = os.path.join(
os.path.dirname(archive_path), '%s.json' % found_type)
_LOG.info(
'Adding entities of type %s to temporary file %s',
found_type, json_path)
json_file = transforms.JsonFile(json_path)
json_file.open('w')
_process_models(db.class_for_kind(found_type), json_file, batch_size)
json_file.close()
internal_path = _Archive.get_internal_path(
os.path.basename(json_file.name))
_LOG.info('Adding %s to archive', internal_path)
archive.add_local_file(json_file.name, internal_path)
manifest.add(_ManifestEntity(internal_path, False))
_LOG.info('Removing temporary file ' + json_file.name)
os.remove(json_file.name)
_finalize_download(archive, manifest)
def _filter_filesystem_files(files):
"""Filters out unnecessary files from a local filesystem.
If we just read from disk, we'll pick up and archive lots of files that we
don't need to upload later, plus non-userland code that on reupload will
shadow the system versions (views, assets/lib, etc.).
Args:
files: list of string. Absolute file paths.
Returns:
List of string. Absolute filepaths we want to archive.
"""
filtered_files = []
for path in files:
relative_name = _remove_bundle_root(path)
not_in_excludes = not any(
[relative_name.startswith(e) for e in _LOCAL_WHITELIST_EXCLUDES])
head_directory = relative_name.split(os.path.sep)[0]
if not_in_excludes and head_directory in _LOCAL_WHITELIST:
filtered_files.append(path)
return filtered_files
def _finalize_download(archive, manifest):
_LOG.info('Adding manifest')
archive.add(_MANIFEST_FILENAME, str(manifest))
archive.close()
_LOG.info('Done; archive saved to ' + archive.path)
def _get_course_from(app_context):
"""Gets a courses.Course from the given sites.ApplicationContext."""
class _Adapter(object):
def __init__(self, app_context):
self.app_context = app_context
return courses.Course(_Adapter(app_context))
def _import_entity_modules():
"""Import modules that define persistent datastore entities."""
# TODO(psimakov): Ideally, we would learn how to load entities from the
# datastore without importing their classes; then we wouldn't need any of
# these imports below
# pylint: disable-msg=g-import-not-at-top,global-variable-not-assigned,
# pylint: disable-msg=redefined-outer-name,unused-variable
global announcements
global jobs
try:
from models import jobs
from modules.announcements import announcements
except ImportError, e:
_die((
'Unable to import required modules; see tools/etl/etl.py for '
'docs.'), with_trace=True)
def _import_modules_into_global_scope():
"""Import helper; run after _set_up_sys_path() for imports to resolve."""
# pylint: disable-msg=g-import-not-at-top,global-variable-not-assigned,
# pylint: disable-msg=redefined-outer-name,unused-variable
global appengine_config
global namespace_manager
global db
global metadata
global courses
global transforms
global vfs
global etl_lib
global remote
try:
import appengine_config
from google.appengine.api import namespace_manager
from google.appengine.ext import db
from google.appengine.ext.db import metadata
from models import courses
from models import transforms
from models import vfs
from tools.etl import etl_lib
from tools.etl import remote
except ImportError, e:
_die((
'Unable to import required modules; see tools/etl/etl.py for '
'docs.'), with_trace=True)
def _remove_bundle_root(path):
"""Removes BUNDLE_ROOT prefix from a path."""
if path.startswith(appengine_config.BUNDLE_ROOT):
path = path.split(appengine_config.BUNDLE_ROOT)[1]
# Path must not start with path separator so it is os.path.join()able.
if path.startswith(os.path.sep):
path = path[1:]
return path
def _retry(message=None, times=_RETRIES):
"""Returns a decorator that automatically retries functions on error.
Args:
message: string or None. The optional message to log on retry.
times: int. Number of times to retry.
Returns:
Function wrapper.
"""
assert times > 0
def decorator(fn):
"""Real decorator."""
def wrapped(*args, **kwargs):
failures = 0
while failures < times:
try:
return fn(*args, **kwargs)
# We can't be more specific by default.
# pylint: disable-msg=broad-except
except Exception as e:
if message:
_LOG.info(message)
failures += 1
if failures == times:
raise e
return wrapped
return decorator
@_retry(message='Clearing course cache failed; retrying')
def _clear_course_cache(context):
courses.CachedCourse13.delete(context) # Force update in UI.
@_retry(message='Getting list of datastore_types failed; retrying')
def _get_datastore_kinds():
# Return only user-defined names, not __internal_appengine_names__.
return [
k for k in metadata.get_kinds()
if not _INTERNAL_DATASTORE_KIND_REGEX.match(k)]
@_retry(message='Getting contents for entity failed; retrying')
def _get_stream(context, path):
return context.fs.impl.get(path)
@_retry(message='Fetching asset list failed; retrying')
def _list_all(context, include_inherited=False):
return context.fs.impl.list(
appengine_config.BUNDLE_ROOT, include_inherited=include_inherited)
def _process_models(kind, json_file, batch_size):
"""Fetch all rows in batches."""
reportable_chunk = batch_size * 10
total_count = 0
cursor = None
while True:
batch_count, cursor = _process_models_batch(
kind, cursor, batch_size, json_file)
if not batch_count:
break
if not cursor:
break
total_count += batch_count
if not total_count % reportable_chunk:
_LOG.info('Loaded records: %s', total_count)
@_retry(message='Fetching datastore entity batch failed; retrying')
def _process_models_batch(kind, cursor, batch_size, json_file):
"""Fetch and write out a batch_size number of rows using cursor query."""
query = kind.all()
if cursor:
query.with_cursor(start_cursor=cursor)
count = 0
empty = True
for model in query.fetch(limit=batch_size):
entity_dict = transforms.entity_to_dict(
model, force_utf_8_encoding=True)
entity_dict['key.name'] = unicode(model.key().name())
json_file.write(transforms.dict_to_json(entity_dict, None))
count += 1
empty = False
cursor = None
if not empty:
cursor = query.cursor()
return count, cursor
@_retry(message='Upload failed; retrying')
def _put(context, content, path, is_draft):
context.fs.impl.non_transactional_put(
os.path.join(appengine_config.BUNDLE_ROOT, path), content,
is_draft=is_draft)
def _run_custom(parsed_args):
try:
module_name, job_class_name = parsed_args.type.rsplit('.', 1)
module = __import__(module_name, globals(), locals(), [job_class_name])
job_class = getattr(module, job_class_name)
assert issubclass(job_class, etl_lib.Job)
job = job_class(parsed_args)
except: # Any error means death. pylint: disable-msg=bare-except
_die(
'Unable to import and instantiate %s, or not of type %s' % (
parsed_args.type, etl_lib.Job.__name__),
with_trace=True)
job.run()
def _upload(upload_type, archive_path, course_url_prefix):
_LOG.info(
'Processing course with URL prefix %s from archive path %s',
course_url_prefix, archive_path)
context = etl_lib.get_context(course_url_prefix)
if not context:
_die('No course found with course_url_prefix %s' % course_url_prefix)
if upload_type == _TYPE_COURSE:
_upload_course(context, archive_path, course_url_prefix)
elif upload_type == _TYPE_DATASTORE:
_upload_datastore()
def _upload_course(context, archive_path, course_url_prefix):
course = _get_course_from(context)
if course.get_units():
_die(
'Cannot upload to non-empty course with course_url_prefix %s' % (
course_url_prefix))
archive = _Archive(archive_path)
try:
archive.open('r')
except IOError:
_die('Cannot open archive_path ' + archive_path)
course_json = archive.get(
_Archive.get_internal_path(_COURSE_JSON_PATH_SUFFIX))
if course_json:
try:
courses.PersistentCourse13().deserialize(course_json)
except (AttributeError, ValueError):
_die((
'Cannot upload archive at %s containing malformed '
'course.json') % archive_path)
course_yaml = archive.get(
_Archive.get_internal_path(_COURSE_YAML_PATH_SUFFIX))
if course_yaml:
try:
yaml.safe_load(course_yaml)
except Exception: # pylint: disable-msg=broad-except
_die((
'Cannot upload archive at %s containing malformed '
'course.yaml') % archive_path)
_LOG.info('Validation passed; beginning upload')
count = 0
for entity in archive.manifest.entities:
external_path = _Archive.get_external_path(entity.path)
_put(
context, _ReadWrapper(archive.get(entity.path)), external_path,
entity.is_draft)
count += 1
_LOG.info('Uploaded ' + external_path)
_clear_course_cache(context)
_LOG.info(
'Done; %s entit%s uploaded', count, 'y' if count == 1 else 'ies')
def _upload_datastore():
"""Stub for future datastore entity uploader."""
raise NotImplementedError
def _validate_arguments(parsed_args):
"""Validate parsed args for additional constraints."""
if (parsed_args.mode in {_MODE_DOWNLOAD, _MODE_UPLOAD}
and not parsed_args.archive_path):
_die('--archive_path missing')
if parsed_args.batch_size < 1:
_die('--batch_size must be a positive value')
if (parsed_args.mode == _MODE_DOWNLOAD and
os.path.exists(parsed_args.archive_path)):
_die(
'Cannot download to archive path %s; file already exists' % (
parsed_args.archive_path))
def main(parsed_args, environment_class=None):
"""Performs the requested ETL operation.
Args:
parsed_args: argparse.Namespace. Parsed command-line arguments.
environment_class: None or remote.Environment. Environment setup class
used to configure the service stub map. Injectable for tests only;
defaults to remote.Environment if not specified.
"""
_validate_arguments(parsed_args)
_LOG.setLevel(parsed_args.log_level.upper())
_import_modules_into_global_scope()
_import_entity_modules()
if not environment_class:
environment_class = remote.Environment
_LOG.info('Mode is %s', parsed_args.mode)
_LOG.info(
'Target is url %s from application_id %s on server %s',
parsed_args.course_url_prefix, parsed_args.application_id,
parsed_args.server)
environment_class(
parsed_args.application_id, parsed_args.server).establish()
if parsed_args.mode == _MODE_DOWNLOAD:
_download(
parsed_args.type, parsed_args.archive_path,
parsed_args.course_url_prefix, parsed_args.datastore_types,
parsed_args.batch_size)
if parsed_args.mode == _MODE_RUN:
_run_custom(parsed_args)
elif parsed_args.mode == _MODE_UPLOAD:
_upload(
parsed_args.type, parsed_args.archive_path,
parsed_args.course_url_prefix)
if __name__ == '__main__':
main(PARSER.parse_args())
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Examples of custom extract-transform-load jobs.
Custom jobs are run via tools/etl/etl.py. You must do environment setup before
etl.py can be invoked; see its module docstring for details.
See tools/etl/etl_lib.py for documentation on writing Job subclasses.
"""
__author__ = [
'johncox@google.com',
]
import os
import sys
import appengine_config
from models import models
from tools.etl import etl_lib
from google.appengine.api import memcache
from google.appengine.api import namespace_manager
class PrintMemcacheStats(etl_lib.Job):
"""Example job that prints remote memcache statistics.
Usage:
etl.py run tools.etl.examples.PrintMemcacheStats /course myapp \
server.appspot.com
Arguments to etl.py are documented in tools/etl/etl.py. You must do some
environment configuration (setting up imports, mostly) before you can run
etl.py; see the tools/etl/etl.py module-level docstring for details.
"""
# String. Template to use when printing memcache stats.
_STATS_TEMPLATE = """Global memcache stats:
\tHits: %(hits)s
\tItems in cache: %(items)s
\tMisses: %(misses)s
\tOldest item in seconds: %(oldest_item_age)s
\tTotal bytes in cache: %(bytes)s
\tTotal bytes retrieved via get: %(byte_hits)s"""
def main(self):
# Custom jobs execute locally, but can talk to remote services like the
# datastore and memcache. Here we get the same memcache stats you can
# see in the Memcache Viewer part of App Engine's admin console.
print self._STATS_TEMPLATE % memcache.get_stats()
class UploadFileToCourse(etl_lib.Job):
"""Example job that writes a single local file to a remote server.
Usage:
etl.py run tools.etl.examples.UploadFileToCourse /course myapp \
server.appspot.com --job_args='/path/to/local/file path/to/remote/file'
Arguments to etl.py are documented in tools/etl/etl.py. You must do some
environment configuration (setting up imports, mostly) before you can run
etl.py; see the tools/etl/etl.py module-level docstring for details.
"""
def _configure_parser(self):
# Add custom arguments by manipulating self.parser:
self.parser.add_argument(
'path', help='Absolute path of the file to upload', type=str)
self.parser.add_argument(
'target',
help=('Internal Course Builder path to upload to (e.g. '
'"assets/img/logo.png")'), type=str)
def main(self):
# By the time main() is invoked, arguments are parsed and available as
# self.args. If you need more complicated argument validation than
# argparse gives you, do it here:
if not os.path.exists(self.args.path):
sys.exit('%s does not exist' % self.args.path)
# Arguments passed to etl.py are also parsed and available as
# self.etl_args. Here we use them to figure out the requested course's
# context.
context = etl_lib.get_context(self.etl_args.course_url_prefix)
# Create the absolute path we'll write to.
remote_path = os.path.join(
appengine_config.BUNDLE_ROOT, self.args.target)
with open(self.args.path) as f:
# Perform the write using the context's filesystem. In a real
# program you'd probably want to do additional work (preventing
# overwrites of existing files, etc.).
context.fs.impl.put(remote_path, f, is_draft=False)
class WriteStudentEmailsToFile(etl_lib.Job):
"""Example job that reads student emails from remote server to local file.
Usage:
etl.py run tools.etl.examples.WriteStudentEmailsToFile /course myapp \
server.appspot.com --job_args=/path/to/output_file
Arguments to etl.py are documented in tools/etl/etl.py. You must do some
environment configuration (setting up imports, mostly) before you can run
etl.py; see the tools/etl/etl.py module-level docstring for details.
"""
def _configure_parser(self):
# Add custom arguments by manipulating self.parser.
self.parser.add_argument(
'path', help='Absolute path to save output to', type=str)
self.parser.add_argument(
'--batch_size', default=20,
help='Number of students to download in each batch', type=int)
def main(self):
# By the time main() is invoked, arguments are parsed and available as
# self.args. If you need more complicated argument validation than
# argparse gives you, do it here:
if self.args.batch_size < 1:
sys.exit('--batch size must be positive')
if os.path.exists(self.args.path):
sys.exit('Cannot download to %s; file exists' % self.args.path)
# Arguments passed to etl.py are also parsed and available as
# self.etl_args. Here we use them to figure out the requested course's
# namespace.
namespace = etl_lib.get_context(
self.etl_args.course_url_prefix).get_namespace_name()
# Because our models are namespaced, we need to change to the requested
# course's namespace before doing datastore reads or we won't find its
# data. Get the current namespace so we can change back when we're done.
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(namespace)
# For this example, we'll only process the first 1000 results. Can
# do a keys_only query because the student's email is key.name().
keys = models.Student.all(keys_only=True).fetch(1000)
finally:
# The current namespace is global state. We must change it back to
# the old value no matter what to prevent corrupting datastore
# operations that run after us.
namespace_manager.set_namespace(old_namespace)
# Write the results. Done!
with open(self.args.path, 'w') as f:
for key in keys:
f.write(str(key.name() + '\n'))
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Allows export of Lessons and Units to other systems."""
__author__ = 'psimakov@google.com (Pavel Simakov)'
from datetime import datetime
import os
import verify
RELEASE_TAG = '1.0'
def echo(unused_x):
pass
JS_GCB_REGEX = """
function gcb_regex(base, modifiers) {
// NB: base should already have backslashes escaped
return new RegExp(base, modifiers);
}
"""
def export_to_javascript(filename, lines, date):
"""Creates JavaScript export function from given lines and writes a file."""
code = []
code.append(JS_GCB_REGEX)
code.append('function gcb_import(){')
for line in lines:
if line:
code.append(' %s' % line)
else:
code.append('')
code.append('')
code.append(' course = Array();')
code.append(' course["units"] = units;')
code.append(' course["assessments"] = assessments;')
code.append(' return course;')
code.append('}')
afile = open('%s.js' % filename, 'w')
afile.write('// Course Builder %s JavaScript Export on %s\n' % (
RELEASE_TAG, date))
afile.write('// begin\n')
afile.write('\n'.join(code))
afile.write('\n// end')
afile.close()
PYTHON_GCB_REGEX = """
import re
def gcb_regex(base, modifiers):
flags = 0
if 'i' in modifiers:
flags |= re.IGNORECASE
if 'm' in modifiers:
flags |= re.MULTILINE
return re.compile(base, flags)
"""
def export_to_python(filename, lines, date):
"""Creates Python export function from given lines and writes a file."""
code = []
code.append('class Array(dict):')
code.append(' pass')
code.append('')
code.append('true = True')
code.append('false = False')
code.append(PYTHON_GCB_REGEX)
code.append('def gcb_import():')
for line in lines:
code.append(' %s' % line)
code.append('')
code.append(' course = Array();')
code.append(' course["units"] = units;')
code.append(' course["assessments"] = assessments;')
code.append(' return course;')
afile = open('%s.py' % filename, 'w')
afile.write('# Course Builder %s Python Export on %s\n' % (
RELEASE_TAG, date))
afile.write('# begin\n')
afile.write('\n'.join(code))
afile.write('\n# end')
afile.close()
# TODO(psimakov): implement PHP_GCB_REGEX, but it's unclear how to return a new
# regexp object in PHP. maybe see http://www.regular-expressions.info/php.html
def export_to_php(filename, lines, date):
"""Creates PHP export function from given lines and writes a file."""
code = []
code.append('function gcb_import(){')
for line in lines:
if line:
code.append(' $%s' % line)
else:
code.append('')
code.append('')
code.append(' $course = Array();')
code.append(' $course["units"] = $units;')
code.append(' $course["assessments"] = $assessments;')
code.append(' return $course;')
code.append('}')
afile = open('%s.php' % filename, 'w')
afile.write('<?php\n')
afile.write('// Course Builder %s PHP Export on %s\n' %
(RELEASE_TAG, date))
afile.write('// begin\n')
afile.write('\n'.join(code))
afile.write('\n// end')
afile.write('?>')
afile.close()
def export_to_file(filename, lines):
date = datetime.utcnow()
export_to_javascript(filename, lines, date)
export_to_python(filename, lines, date)
export_to_php(filename, lines, date)
if __name__ == '__main__':
print 'Export started using %s' % os.path.realpath(__file__)
verifier = verify.Verifier()
errors = verifier.load_and_verify_model(echo)
if errors:
raise Exception('Please fix all errors reported by tools/verify.py '
'before continuing!')
fname = os.path.join(os.getcwd(), 'coursebuilder_course')
export_to_file(fname, verifier.export)
print 'Export complete to %s' % fname
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods to manage all aspects of student assessments."""
__author__ = 'pgbovine@google.com (Philip Guo)'
import datetime
import logging
from models import courses
from models import models
from models import review
from models import student_work
from models import transforms
from models import utils
from models.models import Student
from models.models import StudentAnswersEntity
from tools import verify
from utils import BaseHandler
from utils import HUMAN_READABLE_DATETIME_FORMAT
from google.appengine.ext import db
def store_score(course, student, assessment_type, score):
"""Stores a student's score on a particular assessment.
Args:
course: the course containing the assessment.
student: the student whose data is stored.
assessment_type: the type of the assessment.
score: the student's score on this assessment.
Returns:
the result of the assessment, if appropriate.
"""
# FIXME: Course creators can edit this code to implement custom
# assessment scoring and storage behavior
# TODO(pgbovine): Note that the latest version of answers are always saved,
# but scores are only saved if they're higher than the previous attempt.
# This can lead to unexpected analytics behavior. Resolve this.
existing_score = course.get_score(student, assessment_type)
# remember to cast to int for comparison
if (existing_score is None) or (score > int(existing_score)):
utils.set_score(student, assessment_type, score)
class AnswerHandler(BaseHandler):
"""Handler for saving assessment answers."""
# Find student entity and save answers
@db.transactional(xg=True)
def update_assessment_transaction(
self, email, assessment_type, new_answers, score):
"""Stores answer and updates user scores.
Args:
email: the student's email address.
assessment_type: the type of the assessment (as stated in unit.csv).
new_answers: the latest set of answers supplied by the student.
score: the numerical assessment score.
Returns:
the student instance.
"""
student = Student.get_enrolled_student_by_email(email)
course = self.get_course()
# It may be that old Student entities don't have user_id set; fix it.
if not student.user_id:
student.user_id = self.get_user().user_id()
answers = StudentAnswersEntity.get_by_key_name(student.user_id)
if not answers:
answers = StudentAnswersEntity(key_name=student.user_id)
answers.updated_on = datetime.datetime.now()
utils.set_answer(answers, assessment_type, new_answers)
store_score(course, student, assessment_type, score)
student.put()
answers.put()
# Also record the event, which is useful for tracking multiple
# submissions and history.
models.EventEntity.record(
'submit-assessment', self.get_user(), transforms.dumps({
'type': 'assessment-%s' % assessment_type,
'values': new_answers, 'location': 'AnswerHandler'}))
return student
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'assessment-post'):
return
course = self.get_course()
assessment_type = self.request.get('assessment_type')
if not assessment_type:
self.error(404)
logging.error('No assessment type supplied.')
return
unit = course.find_unit_by_id(assessment_type)
if unit is None or unit.type != verify.UNIT_TYPE_ASSESSMENT:
self.error(404)
logging.error('No assessment named %s exists.', assessment_type)
return
self.template_value['navbar'] = {'course': True}
self.template_value['assessment'] = assessment_type
self.template_value['assessment_name'] = unit.title
self.template_value['is_last_assessment'] = (
course.is_last_assessment(unit))
# Convert answers from JSON to dict.
answers = self.request.get('answers')
answers = transforms.loads(answers) if answers else []
grader = unit.workflow.get_grader()
# Scores are not recorded for human-reviewed assignments.
score = 0
if grader == courses.AUTO_GRADER:
score = int(round(float(self.request.get('score'))))
# Record assessment transaction.
student = self.update_assessment_transaction(
student.key().name(), assessment_type, answers, score)
if grader == courses.HUMAN_GRADER:
rp = course.get_reviews_processor()
# Guard against duplicate submissions of a human-graded assessment.
previously_submitted = rp.does_submission_exist(
unit.unit_id, student.get_key())
if not previously_submitted:
# Check that the submission due date has not passed.
time_now = datetime.datetime.now()
submission_due_date = unit.workflow.get_submission_due_date()
if time_now > submission_due_date:
self.template_value['time_now'] = time_now.strftime(
HUMAN_READABLE_DATETIME_FORMAT)
self.template_value['submission_due_date'] = (
submission_due_date.strftime(
HUMAN_READABLE_DATETIME_FORMAT))
self.template_value['error_code'] = (
'assignment_deadline_exceeded')
self.render('error.html')
return
submission_key = student_work.Submission.write(
unit.unit_id, student.get_key(), answers)
rp.start_review_process_for(
unit.unit_id, submission_key, student.get_key())
# Record completion event in progress tracker.
course.get_progress_tracker().put_assessment_completed(
student, assessment_type)
self.template_value['previously_submitted'] = previously_submitted
matcher = unit.workflow.get_matcher()
self.template_value['matcher'] = matcher
if matcher == review.PEER_MATCHER:
self.template_value['review_dashboard_url'] = (
'reviewdashboard?unit=%s' % unit.unit_id
)
self.render('reviewed_assessment_confirmation.html')
return
else:
# Record completion event in progress tracker.
course.get_progress_tracker().put_assessment_completed(
student, assessment_type)
# Save the submission in the datastore, overwriting the earlier
# version if it exists.
submission_key = student_work.Submission.write(
unit.unit_id, student.get_key(), answers)
self.template_value['result'] = course.get_overall_result(student)
self.template_value['score'] = score
self.template_value['overall_score'] = course.get_overall_score(
student)
self.render('test_confirmation.html')
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for generating various frontend pages."""
__author__ = 'Saifu Angto (saifu@google.com)'
import datetime
import urllib
import urlparse
from models import models
from models import student_work
from models import transforms
from models.config import ConfigProperty
from models.counters import PerfCounter
from models.review import ReviewUtils
from models.roles import Roles
from models.student_work import StudentWorkUtils
from modules.review import domain
from tools import verify
from utils import BaseHandler
from utils import BaseRESTHandler
from utils import CAN_PERSIST_PAGE_EVENTS
from utils import HUMAN_READABLE_DATETIME_FORMAT
from utils import XsrfTokenManager
from google.appengine.ext import db
# Whether to record events in a database.
CAN_PERSIST_ACTIVITY_EVENTS = ConfigProperty(
'gcb_can_persist_activity_events', bool, (
'Whether or not to record student activity interactions in a '
'datastore. Without event recording, you cannot analyze student '
'activity interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
COURSE_EVENTS_RECEIVED = PerfCounter(
'gcb-course-events-received',
'A number of activity/assessment events received by the server.')
COURSE_EVENTS_RECORDED = PerfCounter(
'gcb-course-events-recorded',
'A number of activity/assessment events recorded in a datastore.')
UNIT_PAGE_TYPE = 'unit'
ACTIVITY_PAGE_TYPE = 'activity'
def extract_unit_and_lesson(handler):
"""Loads unit and lesson specified in the request."""
# Finds unit requested or a first unit in the course.
u = handler.request.get('unit')
unit = handler.get_course().find_unit_by_id(u)
if not unit:
units = handler.get_course().get_units()
for current_unit in units:
if verify.UNIT_TYPE_UNIT == current_unit.type:
unit = current_unit
break
if not unit:
return None, None
# Find lesson requested or a first lesson in the unit.
l = handler.request.get('lesson')
lesson = None
lessons = handler.get_course().get_lessons(unit.unit_id)
if not l:
if lessons:
lesson = lessons[0]
else:
lesson = handler.get_course().find_lesson_by_id(unit, l)
return unit, lesson
def get_unit_and_lesson_id_from_url(url):
"""Extracts unit and lesson ids from a URL."""
url_components = urlparse.urlparse(url)
query_dict = urlparse.parse_qs(url_components.query)
unit_id, lesson_id = query_dict['unit'][0], query_dict['lesson'][0]
return unit_id, lesson_id
def create_readonly_assessment_params(content, answers):
"""Creates parameters for a readonly assessment in the view templates."""
assessment_params = {
'preamble': content['assessment']['preamble'],
'questionsList': content['assessment']['questionsList'],
'answers': answers,
}
return assessment_params
class CourseHandler(BaseHandler):
"""Handler for generating course page."""
@classmethod
def get_child_routes(cls):
"""Add child handlers for REST."""
return [('/rest/events', EventsRESTHandler)]
def augment_assessment_units(self, student):
"""Adds additional fields to assessment units."""
course = self.get_course()
rp = course.get_reviews_processor()
for unit in self.template_value['units']:
if unit.type == 'A':
unit.needs_human_grader = course.needs_human_grader(unit)
if unit.needs_human_grader:
review_steps = rp.get_review_steps_by(
unit.unit_id, student.get_key())
review_min_count = unit.workflow.get_review_min_count()
unit.matcher = unit.workflow.get_matcher()
unit.review_progress = ReviewUtils.get_review_progress(
review_steps, review_min_count,
course.get_progress_tracker()
)
unit.is_submitted = rp.does_submission_exist(
unit.unit_id, student.get_key())
def get(self):
"""Handles GET requests."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect('/preview')
return None
student = self.personalize_page_and_get_enrolled()
if not student:
return
self.template_value['units'] = self.get_units()
self.augment_assessment_units(student)
self.template_value['progress'] = (
self.get_progress_tracker().get_unit_progress(student))
self.template_value['is_progress_recorded'] = (
CAN_PERSIST_ACTIVITY_EVENTS.value)
self.template_value['navbar'] = {'course': True}
self.render('course.html')
class UnitHandler(BaseHandler):
"""Handler for generating unit page."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
# Extract incoming args
unit, lesson = extract_unit_and_lesson(self)
unit_id = unit.unit_id
# If the unit is not currently available, and the user is not an admin,
# redirect to the main page.
if (not unit.now_available and
not Roles.is_course_admin(self.app_context)):
self.redirect('/')
return
# Set template values for nav bar and page type.
self.template_value['navbar'] = {'course': True}
self.template_value['page_type'] = UNIT_PAGE_TYPE
lessons = self.get_lessons(unit_id)
# Set template values for a unit and its lesson entities
self.template_value['unit'] = unit
self.template_value['unit_id'] = unit_id
self.template_value['lesson'] = lesson
if lesson:
self.template_value['objectives'] = lesson.objectives
self.template_value['lessons'] = lessons
# If this unit contains no lessons, return.
if not lesson:
self.render('unit.html')
return
lesson_id = lesson.lesson_id
self.template_value['lesson_id'] = lesson_id
index = lesson.index - 1 # indexes are 1-based
# Format back button.
if index == 0:
self.template_value['back_button_url'] = ''
else:
prev_lesson = lessons[index - 1]
if prev_lesson.activity:
self.template_value['back_button_url'] = (
'activity?unit=%s&lesson=%s' % (
unit_id, prev_lesson.lesson_id))
else:
self.template_value['back_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, prev_lesson.lesson_id))
# Format next button.
if lesson.activity:
self.template_value['next_button_url'] = (
'activity?unit=%s&lesson=%s' % (
unit_id, lesson_id))
else:
if index >= len(lessons) - 1:
self.template_value['next_button_url'] = ''
else:
next_lesson = lessons[index + 1]
self.template_value['next_button_url'] = (
'unit?unit=%s&lesson=%s' % (
unit_id, next_lesson.lesson_id))
# Set template values for student progress
self.template_value['is_progress_recorded'] = (
CAN_PERSIST_ACTIVITY_EVENTS.value)
if CAN_PERSIST_ACTIVITY_EVENTS.value:
self.template_value['progress'] = (
self.get_progress_tracker().get_lesson_progress(
student, unit_id))
self.render('unit.html')
class ActivityHandler(BaseHandler):
"""Handler for generating activity page and receiving submissions."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
# Extract incoming args
unit, lesson = extract_unit_and_lesson(self)
unit_id = unit.unit_id
# If the unit is not currently available, and the user is not an admin,
# redirect to the main page.
if (not unit.now_available and
not Roles.is_course_admin(self.app_context)):
self.redirect('/')
return
# Set template values for nav bar and page type.
self.template_value['navbar'] = {'course': True}
self.template_value['page_type'] = ACTIVITY_PAGE_TYPE
lessons = self.get_lessons(unit_id)
# Set template values for a unit and its lesson entities
self.template_value['unit'] = unit
self.template_value['unit_id'] = unit_id
self.template_value['lesson'] = lesson
self.template_value['lessons'] = lessons
# If this unit contains no lessons, return.
if not lesson:
self.render('activity.html')
return
lesson_id = lesson.lesson_id
self.template_value['lesson_id'] = lesson_id
self.template_value['activity_script_src'] = (
self.get_course().get_activity_filename(unit_id, lesson_id))
index = lesson.index - 1 # indexes are 1-based
# Format back button.
self.template_value['back_button_url'] = (
'unit?unit=%s&lesson=%s' % (unit_id, lesson_id))
# Format next button.
if index >= len(lessons) - 1:
self.template_value['next_button_url'] = ''
else:
next_lesson = lessons[index + 1]
self.template_value['next_button_url'] = (
'unit?unit=%s&lesson=%s' % (
unit_id, next_lesson.lesson_id))
# Set template value for event recording
self.template_value['record_events'] = CAN_PERSIST_ACTIVITY_EVENTS.value
# Set template values for student progress
self.template_value['is_progress_recorded'] = (
CAN_PERSIST_ACTIVITY_EVENTS.value)
if CAN_PERSIST_ACTIVITY_EVENTS.value:
self.template_value['progress'] = (
self.get_progress_tracker().get_lesson_progress(
student, unit_id))
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
# Mark this page as accessed. This is done after setting the student
# progress template value, so that the mark only shows up after the
# student visits the page for the first time.
self.get_course().get_progress_tracker().put_activity_accessed(
student, unit_id, lesson_id)
self.render('activity.html')
class AssessmentHandler(BaseHandler):
"""Handler for generating assessment page."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
# Extract incoming args
unit_id = self.request.get('name')
course = self.get_course()
unit = course.find_unit_by_id(unit_id)
if not unit:
self.error(404)
return
self.template_value['navbar'] = {'course': True}
self.template_value['unit_id'] = unit_id
self.template_value['record_events'] = CAN_PERSIST_ACTIVITY_EVENTS.value
self.template_value['assessment_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('assessment-post'))
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
self.template_value['grader'] = unit.workflow.get_grader()
readonly_view = False
due_date_exceeded = False
submission_due_date = unit.workflow.get_submission_due_date()
if submission_due_date:
self.template_value['submission_due_date'] = (
submission_due_date.strftime(HUMAN_READABLE_DATETIME_FORMAT))
time_now = datetime.datetime.now()
if time_now > submission_due_date:
readonly_view = True
due_date_exceeded = True
self.template_value['due_date_exceeded'] = True
if course.needs_human_grader(unit):
self.template_value['matcher'] = unit.workflow.get_matcher()
rp = course.get_reviews_processor()
review_steps_by = rp.get_review_steps_by(
unit.unit_id, student.get_key())
# Determine if the student can see others' reviews of his/her work.
if (ReviewUtils.has_completed_enough_reviews(
review_steps_by, unit.workflow.get_review_min_count())):
submission_and_review_steps = (
rp.get_submission_and_review_steps(
unit.unit_id, student.get_key()))
submission_contents = submission_and_review_steps[0]
review_steps_for = submission_and_review_steps[1]
review_keys_for_student = []
for review_step in review_steps_for:
can_show_review = (
review_step.state == domain.REVIEW_STATE_COMPLETED
and not review_step.removed
and review_step.review_key
)
if can_show_review:
review_keys_for_student.append(review_step.review_key)
reviews_for_student = rp.get_reviews_by_keys(
unit.unit_id, review_keys_for_student)
self.template_value['reviews_received'] = [
create_readonly_assessment_params(
course.get_review_form_content(unit),
StudentWorkUtils.get_answer_list(review)
) for review in reviews_for_student]
else:
submission_contents = student_work.Submission.get_contents(
unit.unit_id, student.get_key())
# Determine whether to show the assessment in readonly mode.
if submission_contents or due_date_exceeded:
readonly_view = True
self.template_value['readonly_student_assessment'] = (
create_readonly_assessment_params(
course.get_assessment_content(unit),
StudentWorkUtils.get_answer_list(submission_contents)
)
)
if not readonly_view:
self.template_value['assessment_script_src'] = (
self.get_course().get_assessment_filename(unit_id))
# If a previous submission exists, reinstate it.
submission_contents = student_work.Submission.get_contents(
unit.unit_id, student.get_key())
saved_answers = (
StudentWorkUtils.get_answer_list(submission_contents)
if submission_contents else [])
self.template_value['saved_answers'] = transforms.dumps(
saved_answers)
self.render('assessment.html')
class ReviewDashboardHandler(BaseHandler):
"""Handler for generating the index of reviews that a student has to do."""
def populate_template(self, unit, review_steps):
"""Adds variables to the template for the review dashboard."""
self.template_value['assessment_name'] = unit.title
self.template_value['unit_id'] = unit.unit_id
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
self.template_value['review_dashboard_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('review-dashboard-post'))
self.template_value['REVIEW_STATE_COMPLETED'] = (
domain.REVIEW_STATE_COMPLETED)
self.template_value['review_steps'] = review_steps
self.template_value['review_min_count'] = (
unit.workflow.get_review_min_count())
review_due_date = unit.workflow.get_review_due_date()
if review_due_date:
self.template_value['review_due_date'] = review_due_date.strftime(
HUMAN_READABLE_DATETIME_FORMAT)
time_now = datetime.datetime.now()
self.template_value['due_date_exceeded'] = (time_now > review_due_date)
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
course = self.get_course()
rp = course.get_reviews_processor()
unit, _ = extract_unit_and_lesson(self)
if not unit:
self.error(404)
return
self.template_value['navbar'] = {'course': True}
if not course.needs_human_grader(unit):
self.error(404)
return
# Check that the student has submitted the corresponding assignment.
if not rp.does_submission_exist(unit.unit_id, student.get_key()):
self.template_value['error_code'] = (
'cannot_review_before_submitting_assignment')
self.render('error.html')
return
review_steps = rp.get_review_steps_by(unit.unit_id, student.get_key())
self.populate_template(unit, review_steps)
required_review_count = unit.workflow.get_review_min_count()
# The student can request a new submission if:
# - all his/her current reviews are in Draft/Completed state, and
# - he/she is not in the state where the required number of reviews
# has already been requested, but not all of these are completed.
self.template_value['can_request_new_review'] = (
len(review_steps) < required_review_count or
ReviewUtils.has_completed_all_assigned_reviews(review_steps)
)
self.render('review_dashboard.html')
def post(self):
"""Allows a reviewer to request a new review."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(
self.request, 'review-dashboard-post'):
return
course = self.get_course()
unit, unused_lesson = extract_unit_and_lesson(self)
if not unit:
self.error(404)
return
rp = course.get_reviews_processor()
review_steps = rp.get_review_steps_by(unit.unit_id, student.get_key())
self.template_value['navbar'] = {'course': True}
if not course.needs_human_grader(unit):
self.error(404)
return
# Check that the student has submitted the corresponding assignment.
if not rp.does_submission_exist(unit.unit_id, student.get_key()):
self.template_value['error_code'] = (
'cannot_review_before_submitting_assignment')
self.render('error.html')
return
# Check that the review due date has not passed.
time_now = datetime.datetime.now()
review_due_date = unit.workflow.get_review_due_date()
if time_now > review_due_date:
self.template_value['error_code'] = (
'cannot_request_review_after_deadline')
self.render('error.html')
return
# Check that the student can request a new review.
review_min_count = unit.workflow.get_review_min_count()
can_request_new_review = (
len(review_steps) < review_min_count or
ReviewUtils.has_completed_all_assigned_reviews(review_steps))
if not can_request_new_review:
self.template_value['review_min_count'] = review_min_count
self.template_value['error_code'] = 'must_complete_more_reviews'
self.render('error.html')
return
self.template_value['no_submissions_available'] = True
try:
review_step_key = rp.get_new_review(unit.unit_id, student.get_key())
redirect_params = {
'key': review_step_key,
'unit': unit.unit_id,
}
self.redirect('/review?%s' % urllib.urlencode(redirect_params))
except Exception: # pylint: disable-msg=broad-except
review_steps = rp.get_review_steps_by(
unit.unit_id, student.get_key())
self.populate_template(unit, review_steps)
self.render('review_dashboard.html')
class ReviewHandler(BaseHandler):
"""Handler for generating the submission page for individual reviews."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
course = self.get_course()
rp = course.get_reviews_processor()
unit, unused_lesson = extract_unit_and_lesson(self)
if not course.needs_human_grader(unit):
self.error(404)
return
review_step_key = self.request.get('key')
if not unit or not review_step_key:
self.error(404)
return
try:
review_step_key = db.Key(encoded=review_step_key)
review_step = rp.get_review_steps_by_keys(
unit.unit_id, [review_step_key])[0]
except Exception: # pylint: disable-msg=broad-except
self.error(404)
return
if not review_step:
self.error(404)
return
# Check that the student is allowed to review this submission.
if not student.has_same_key_as(review_step.reviewer_key):
self.error(404)
return
self.template_value['navbar'] = {'course': True}
self.template_value['unit_id'] = unit.unit_id
self.template_value['key'] = review_step_key
submission_key = review_step.submission_key
submission_contents = student_work.Submission.get_contents_by_key(
submission_key)
readonly_student_assessment = create_readonly_assessment_params(
course.get_assessment_content(unit),
StudentWorkUtils.get_answer_list(submission_contents)
)
self.template_value['readonly_student_assessment'] = (
readonly_student_assessment
)
review_due_date = unit.workflow.get_review_due_date()
if review_due_date:
self.template_value['review_due_date'] = review_due_date.strftime(
HUMAN_READABLE_DATETIME_FORMAT)
review_key = review_step.review_key
rev = rp.get_reviews_by_keys(
unit.unit_id, [review_key])[0] if review_key else None
time_now = datetime.datetime.now()
show_readonly_review = (
review_step.state == domain.REVIEW_STATE_COMPLETED or
time_now > review_due_date)
self.template_value['due_date_exceeded'] = (time_now > review_due_date)
if show_readonly_review:
readonly_review_form = create_readonly_assessment_params(
course.get_review_form_content(unit),
StudentWorkUtils.get_answer_list(rev)
)
self.template_value['readonly_review_form'] = readonly_review_form
else:
# Populate the review form,
self.template_value['assessment_script_src'] = (
self.get_course().get_review_form_filename(unit.unit_id))
saved_answers = (StudentWorkUtils.get_answer_list(rev)
if rev else [])
self.template_value['saved_answers'] = transforms.dumps(
saved_answers)
self.template_value['record_events'] = CAN_PERSIST_ACTIVITY_EVENTS.value
self.template_value['assessment_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('review-post'))
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
self.render('review.html')
def post(self):
"""Handles POST requests, when a reviewer submits a review."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'review-post'):
return
course = self.get_course()
rp = course.get_reviews_processor()
unit_id = self.request.get('unit_id')
unit = self.find_unit_by_id(unit_id)
if not unit or not course.needs_human_grader(unit):
self.error(404)
return
review_step_key = self.request.get('key')
if not review_step_key:
self.error(404)
return
try:
review_step_key = db.Key(encoded=review_step_key)
review_step = rp.get_review_steps_by_keys(
unit.unit_id, [review_step_key])[0]
except Exception: # pylint: disable-msg=broad-except
self.error(404)
return
# Check that the student is allowed to review this submission.
if not student.has_same_key_as(review_step.reviewer_key):
self.error(404)
return
self.template_value['navbar'] = {'course': True}
self.template_value['unit_id'] = unit.unit_id
# Check that the review due date has not passed.
time_now = datetime.datetime.now()
review_due_date = unit.workflow.get_review_due_date()
if time_now > review_due_date:
self.template_value['time_now'] = time_now.strftime(
HUMAN_READABLE_DATETIME_FORMAT)
self.template_value['review_due_date'] = (
review_due_date.strftime(HUMAN_READABLE_DATETIME_FORMAT))
self.template_value['error_code'] = 'review_deadline_exceeded'
self.render('error.html')
return
mark_completed = (self.request.get('is_draft') == 'false')
self.template_value['is_draft'] = (not mark_completed)
review_payload = self.request.get('answers')
review_payload = transforms.loads(
review_payload) if review_payload else []
try:
rp.write_review(
unit.unit_id, review_step_key, review_payload, mark_completed)
except domain.TransitionError:
self.template_value['error_code'] = 'review_already_submitted'
self.render('error.html')
return
self.render('review_confirmation.html')
class EventsRESTHandler(BaseRESTHandler):
"""Provides REST API for an Event."""
def post(self):
"""Receives event and puts it into datastore."""
COURSE_EVENTS_RECEIVED.inc()
can = CAN_PERSIST_ACTIVITY_EVENTS.value or CAN_PERSIST_PAGE_EVENTS.value
if not can:
return
request = transforms.loads(self.request.get('request'))
if not self.assert_xsrf_token_or_fail(request, 'event-post', {}):
return
user = self.get_user()
if not user:
return
source = request.get('source')
payload_json = request.get('payload')
models.EventEntity.record(source, user, payload_json)
COURSE_EVENTS_RECORDED.inc()
self.process_event(user, source, payload_json)
def process_event(self, user, source, payload_json):
"""Processes an event after it has been recorded in the event stream."""
if source == 'attempt-activity':
student = models.Student.get_enrolled_student_by_email(user.email())
if not student:
return
payload = transforms.loads(payload_json)
source_url = payload['location']
unit_id, lesson_id = get_unit_and_lesson_id_from_url(source_url)
if unit_id is not None and lesson_id is not None:
self.get_course().get_progress_tracker().put_block_completed(
student, unit_id, lesson_id, payload['index'])
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers that are not directly related to course content."""
__author__ = 'Saifu Angto (saifu@google.com)'
import base64
import hmac
import os
import time
import urlparse
import appengine_config
from models import transforms
from models.config import ConfigProperty
from models.config import ConfigPropertyEntity
from models.courses import Course
from models.models import Student
from models.roles import Roles
import webapp2
from google.appengine.api import namespace_manager
from google.appengine.api import users
# The name of the template dict key that stores a course's base location.
COURSE_BASE_KEY = 'gcb_course_base'
# The name of the template dict key that stores data from course.yaml.
COURSE_INFO_KEY = 'course_info'
XSRF_SECRET_LENGTH = 20
XSRF_SECRET = ConfigProperty(
'gcb_xsrf_secret', str, (
'Text used to encrypt tokens, which help prevent Cross-site request '
'forgery (CSRF, XSRF). You can set the value to any alphanumeric text, '
'preferably using 16-64 characters. Once you change this value, the '
'server rejects all subsequent requests issued using an old value for '
'this variable.'),
'course builder XSRF secret')
# Whether to record page load/unload events in a database.
CAN_PERSIST_PAGE_EVENTS = ConfigProperty(
'gcb_can_persist_page_events', bool, (
'Whether or not to record student page interactions in a '
'datastore. Without event recording, you cannot analyze student '
'page interactions. On the other hand, no event recording reduces '
'the number of datastore operations and minimizes the use of Google '
'App Engine quota. Turn event recording on if you want to analyze '
'this data.'),
False)
# Date format string for displaying datetimes in UTC.
# Example: 2013-03-21 13:00 UTC
HUMAN_READABLE_DATETIME_FORMAT = '%Y-%m-%d, %H:%M UTC'
# Date format string for displaying dates. Example: 2013-03-21
HUMAN_READABLE_DATE_FORMAT = '%Y-%m-%d'
# Time format string for displaying times. Example: 01:16:40 UTC.
HUMAN_READABLE_TIME_FORMAT = '%H:%M:%S UTC'
class ReflectiveRequestHandler(object):
"""Uses reflection to handle custom get() and post() requests.
Use this class as a mix-in with any webapp2.RequestHandler to allow request
dispatching to multiple get() and post() methods based on the 'action'
parameter.
Open your existing webapp2.RequestHandler, add this class as a mix-in.
Define the following class variables:
default_action = 'list'
get_actions = ['default_action', 'edit']
post_actions = ['save']
Add instance methods named get_list(self), get_edit(self), post_save(self).
These methods will now be called automatically based on the 'action'
GET/POST parameter.
"""
def create_xsrf_token(self, action):
return XsrfTokenManager.create_xsrf_token(action)
def get(self):
"""Handles GET."""
action = self.request.get('action')
if not action:
action = self.default_action
if action not in self.get_actions:
self.error(404)
return
handler = getattr(self, 'get_%s' % action)
if not handler:
self.error(404)
return
return handler()
def post(self):
"""Handles POST."""
action = self.request.get('action')
if not action or action not in self.post_actions:
self.error(404)
return
handler = getattr(self, 'post_%s' % action)
if not handler:
self.error(404)
return
# Each POST request must have valid XSRF token.
xsrf_token = self.request.get('xsrf_token')
if not XsrfTokenManager.is_xsrf_token_valid(xsrf_token, action):
self.error(403)
return
return handler()
class ApplicationHandler(webapp2.RequestHandler):
"""A handler that is aware of the application context."""
@classmethod
def is_absolute(cls, url):
return bool(urlparse.urlparse(url).scheme)
@classmethod
def get_base_href(cls, handler):
"""Computes current course <base> href."""
base = handler.app_context.get_slug()
if not base.endswith('/'):
base = '%s/' % base
# For IE to work with the <base> tag, its href must be an absolute URL.
if not cls.is_absolute(base):
parts = urlparse.urlparse(handler.request.url)
base = urlparse.urlunparse(
(parts.scheme, parts.netloc, base, None, None, None))
return base
def __init__(self, *args, **kwargs):
super(ApplicationHandler, self).__init__(*args, **kwargs)
self.template_value = {}
def get_template(self, template_file, additional_dirs=None):
"""Computes location of template files for the current namespace."""
self.template_value[COURSE_INFO_KEY] = self.app_context.get_environ()
self.template_value['is_course_admin'] = Roles.is_course_admin(
self.app_context)
self.template_value[
'is_read_write_course'] = self.app_context.fs.is_read_write()
self.template_value['is_super_admin'] = Roles.is_super_admin()
self.template_value[COURSE_BASE_KEY] = self.get_base_href(self)
return self.app_context.get_template_environ(
self.template_value[COURSE_INFO_KEY]['course']['locale'],
additional_dirs
).get_template(template_file)
def canonicalize_url(self, location):
"""Adds the current namespace URL prefix to the relative 'location'."""
is_relative = (
not self.is_absolute(location) and
not location.startswith(self.app_context.get_slug()))
has_slug = (
self.app_context.get_slug() and self.app_context.get_slug() != '/')
if is_relative and has_slug:
location = '%s%s' % (self.app_context.get_slug(), location)
return location
def redirect(self, location, normalize=True):
if normalize:
location = self.canonicalize_url(location)
super(ApplicationHandler, self).redirect(location)
class BaseHandler(ApplicationHandler):
"""Base handler."""
def __init__(self, *args, **kwargs):
super(BaseHandler, self).__init__(*args, **kwargs)
self.course = None
def get_course(self):
if not self.course:
self.course = Course(self)
return self.course
def find_unit_by_id(self, unit_id):
"""Gets a unit with a specific id or fails with an exception."""
return self.get_course().find_unit_by_id(unit_id)
def get_units(self):
"""Gets all units in the course."""
return self.get_course().get_units()
def get_lessons(self, unit_id):
"""Gets all lessons (in order) in the specific course unit."""
return self.get_course().get_lessons(unit_id)
def get_progress_tracker(self):
"""Gets the progress tracker for the course."""
return self.get_course().get_progress_tracker()
def get_user(self):
"""Validate user exists."""
user = users.get_current_user()
if not user:
self.redirect(
users.create_login_url(self.request.uri), normalize=False)
else:
return user
def personalize_page_and_get_user(self):
"""If the user exists, add personalized fields to the navbar."""
user = self.get_user()
if user:
self.template_value['email'] = user.email()
self.template_value['logoutUrl'] = (
users.create_logout_url(self.request.uri))
# configure page events
self.template_value['record_page_events'] = (
CAN_PERSIST_PAGE_EVENTS.value)
self.template_value['event_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('event-post'))
return user
def personalize_page_and_get_enrolled(self):
"""If the user is enrolled, add personalized fields to the navbar."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(
users.create_login_url(self.request.uri), normalize=False)
return None
student = Student.get_enrolled_student_by_email(user.email())
if not student:
self.redirect('/preview')
return None
# Patch Student models which (for legacy reasons) do not have a user_id
# attribute set.
if not student.user_id:
student.user_id = user.user_id()
student.put()
return student
def assert_xsrf_token_or_fail(self, request, action):
"""Asserts the current request has proper XSRF token or fails."""
token = request.get('xsrf_token')
if not token or not XsrfTokenManager.is_xsrf_token_valid(token, action):
self.error(403)
return False
return True
def render(self, template_file):
"""Renders a template."""
template = self.get_template(template_file)
self.response.out.write(template.render(self.template_value))
class BaseRESTHandler(BaseHandler):
"""Base REST handler."""
def assert_xsrf_token_or_fail(self, token_dict, action, args_dict):
"""Asserts that current request has proper XSRF token or fails."""
token = token_dict.get('xsrf_token')
if not token or not XsrfTokenManager.is_xsrf_token_valid(token, action):
transforms.send_json_response(
self, 403,
'Bad XSRF token. Please reload the page and try again',
args_dict)
return False
return True
class PreviewHandler(BaseHandler):
"""Handler for viewing course preview."""
def get(self):
"""Handles GET requests."""
user = users.get_current_user()
if not user:
self.template_value['loginUrl'] = (
users.create_login_url(self.request.uri))
else:
self.template_value['email'] = user.email()
self.template_value['logoutUrl'] = (
users.create_logout_url(self.request.uri))
self.template_value['navbar'] = {'course': True}
self.template_value['units'] = self.get_units()
if user and Student.get_enrolled_student_by_email(user.email()):
self.redirect('/course')
else:
self.render('preview.html')
class RegisterHandler(BaseHandler):
"""Handler for course registration."""
def get(self):
"""Handles GET request."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(
users.create_login_url(self.request.uri), normalize=False)
return
student = Student.get_enrolled_student_by_email(user.email())
if student:
self.redirect('/course')
return
self.template_value['navbar'] = {'registration': True}
self.template_value['register_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('register-post'))
self.render('register.html')
def post(self):
"""Handles POST requests."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(
users.create_login_url(self.request.uri), normalize=False)
return
if not self.assert_xsrf_token_or_fail(self.request, 'register-post'):
return
can_register = self.app_context.get_environ(
)['reg_form']['can_register']
if not can_register:
self.template_value['course_status'] = 'full'
else:
name = self.request.get('form01')
additional_fields = transforms.dumps(self.request.POST.items())
# create new or re-enroll old student
student = Student.get_by_email(user.email())
if not student:
student = Student(key_name=user.email())
student.user_id = user.user_id()
student_by_uid = Student.get_student_by_user_id(user.user_id())
is_valid_student = (student_by_uid is None or
student_by_uid.user_id == student.user_id)
assert is_valid_student, (
'Student\'s email and user id do not match.')
student.user_id = user.user_id()
student.is_enrolled = True
student.name = name
student.additional_fields = additional_fields
student.put()
# Render registration confirmation page
self.template_value['navbar'] = {'registration': True}
self.render('confirmation.html')
class ForumHandler(BaseHandler):
"""Handler for forum page."""
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
self.template_value['navbar'] = {'forum': True}
self.render('forum.html')
class StudentProfileHandler(BaseHandler):
"""Handles the click to 'My Profile' link in the nav bar."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
course = self.get_course()
self.template_value['navbar'] = {'myprofile': True}
self.template_value['student'] = student
self.template_value['date_enrolled'] = student.enrolled_on.strftime(
HUMAN_READABLE_DATE_FORMAT)
self.template_value['score_list'] = course.get_all_scores(student)
self.template_value['overall_score'] = course.get_overall_score(student)
self.template_value['student_edit_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('student-edit'))
self.render('student_profile.html')
class StudentEditStudentHandler(BaseHandler):
"""Handles edits to student records by students."""
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'student-edit'):
return
Student.rename_current(self.request.get('name'))
self.redirect('/student/home')
class StudentUnenrollHandler(BaseHandler):
"""Handler for students to unenroll themselves."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
self.template_value['student'] = student
self.template_value['navbar'] = {'registration': True}
self.template_value['student_unenroll_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('student-unenroll'))
self.render('unenroll_confirmation_check.html')
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'student-unenroll'):
return
Student.set_enrollment_status_for_current(False)
self.template_value['navbar'] = {'registration': True}
self.render('unenroll_confirmation.html')
class XsrfTokenManager(object):
"""Provides XSRF protection by managing action/user tokens in memcache."""
# Max age of the token (4 hours).
XSRF_TOKEN_AGE_SECS = 60 * 60 * 4
# Token delimiters.
DELIMITER_PRIVATE = ':'
DELIMITER_PUBLIC = '/'
# Default nickname to use if a user does not have a nickname,
USER_ID_DEFAULT = 'default'
@classmethod
def init_xsrf_secret_if_none(cls):
"""Verifies that non-default XSRF secret exists; creates one if not."""
# Any non-default value is fine.
if XSRF_SECRET.value and XSRF_SECRET.value != XSRF_SECRET.default_value:
return
# All property manipulations must run in the default namespace.
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(
appengine_config.DEFAULT_NAMESPACE_NAME)
# Look in the datastore directly.
entity = ConfigPropertyEntity.get_by_key_name(XSRF_SECRET.name)
if not entity:
entity = ConfigPropertyEntity(key_name=XSRF_SECRET.name)
# Any non-default non-None value is fine.
if (entity.value and not entity.is_draft and
(str(entity.value) != str(XSRF_SECRET.default_value))):
return
# Initialize to random value.
entity.value = base64.urlsafe_b64encode(
os.urandom(XSRF_SECRET_LENGTH))
entity.is_draft = False
entity.put()
finally:
namespace_manager.set_namespace(old_namespace)
@classmethod
def _create_token(cls, action_id, issued_on):
"""Creates a string representation (digest) of a token."""
cls.init_xsrf_secret_if_none()
# We have decided to use transient tokens stored in memcache to reduce
# datastore costs. The token has 4 parts: hash of the actor user id,
# hash of the action, hash of the time issued and the plain text of time
# issued.
# Lookup user id.
user = users.get_current_user()
if user:
user_id = user.user_id()
else:
user_id = cls.USER_ID_DEFAULT
# Round time to seconds.
issued_on = long(issued_on)
digester = hmac.new(str(XSRF_SECRET.value))
digester.update(str(user_id))
digester.update(cls.DELIMITER_PRIVATE)
digester.update(str(action_id))
digester.update(cls.DELIMITER_PRIVATE)
digester.update(str(issued_on))
digest = digester.digest()
token = '%s%s%s' % (
issued_on, cls.DELIMITER_PUBLIC, base64.urlsafe_b64encode(digest))
return token
@classmethod
def create_xsrf_token(cls, action):
return cls._create_token(action, time.time())
@classmethod
def is_xsrf_token_valid(cls, token, action):
"""Validate a given XSRF token by retrieving it from memcache."""
try:
parts = token.split(cls.DELIMITER_PUBLIC)
if len(parts) != 2:
return False
issued_on = long(parts[0])
age = time.time() - issued_on
if age > cls.XSRF_TOKEN_AGE_SECS:
return False
authentic_token = cls._create_token(action, issued_on)
if authentic_token == token:
return True
return False
except Exception: # pylint: disable-msg=broad-except
return False
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: psimakov@google.com (Pavel Simakov)
"""Enables hosting of multiple courses in one application instance.
We used to allow hosting of only one course in one Google App Engine instance.
Now we allow hosting of many courses simultaneously. To configure multiple
courses one must set an environment variable in app.yaml file, for example:
...
env_variables:
GCB_COURSES_CONFIG: 'course:/coursea:/courses/a, course:/courseb:/courses/b'
...
This variable holds a ',' or newline separated list of course entries. Each
course entry has four ':' separated parts: the word 'course', the URL prefix,
and the file system location for the site files. If the third part is empty,
the course assets are stored in a datastore instead of the file system. The
fourth, optional part, is the name of the course namespace.
The URL prefix specifies, how will the course URL appear in the browser. In the
example above, the courses will be mapped to http://www.example.com[/coursea]
and http://www.example.com[/courseb].
The file system location of the files specifies, which files to serve for the
course. For each course we expect three sub-folders: 'assets', 'views', and
'data'. The 'data' folder must contain the CSV files that define the course
layout, the 'assets' and 'views' should contain the course specific files and
jinja2 templates respectively. In the example above, the course files are
expected to be placed into folders '/courses/a' and '/courses/b' of your Google
App Engine installation respectively. If this value is absent a datastore is
used to store course assets, not the file system.
By default Course Builder handles static '/assets' files using a custom
handler. You may choose to handle '/assets' files of your course as 'static'
files using Google App Engine handler. You can do so by creating a new static
file handler entry in your app.yaml and placing it before our main course
handler.
If you have an existing course developed using Course Builder and do NOT want
to host multiple courses, there is nothing for you to do. A following default
rule is silently created for you:
...
env_variables:
GCB_COURSES_CONFIG: 'course:/:/'
...
It sets the '/' as the base URL for the course, uses root folder of your Google
App Engine installation to look for course /assets/..., /data/..., and
/views/... and uses blank datastore and memcache namespace. All in all,
everything behaves just as it did in the prior version of Course Builder when
only one course was supported.
If you have existing course developed using Course Builder and DO want to start
hosting multiple courses here are the steps. First, define the courses
configuration environment variable as described above. Second, copy existing
'assets', 'data' and 'views' folders of your course into the new location, for
example '/courses/mycourse'.
If you have an existing course built on a previous version of Course Builder
and you now decided to use new URL prefix, which is not '/', you will need
to update your old course html template and JavaScript files. You typically
would have to make two modifications. First, replace all absolute URLs with
the relative URLs. For example, if you had <a href='/forum'>..</a>, you will
need to replace it with <a href='forum'>..</a>. Second, you need to add <base>
tag at the top of you course 'base.html' and 'base_registration.html' files,
like this:
...
<head>
<base href="{{ gcb_course_base }}" />
...
Current Course Builder release already has all these modifications.
Note, that each 'course' runs in a separate Google App Engine namespace. The
name of the namespace is derived from the course files location. In the example
above, the course files are stored in the folder '/courses/a', which be mapped
to the namespace name 'gcb-courses-a'. The namespaces can't contain '/', so we
replace them with '-' and prefix the namespace with the project abbreviation
'gcb'. Remember these namespace names, you will need to use them if/when
accessing server administration panel, viewing objects in the datastore, etc.
Don't move the files to another folder after your course starts as a new folder
name will create a new namespace name and old data will no longer be used. You
are free to rename the course URL prefix at any time. Once again, if you are
not hosting multiple courses, your course will run in a default namespace
(None).
Good luck!
"""
import logging
import mimetypes
import os
import posixpath
import re
import threading
import urlparse
import zipfile
import appengine_config
from common import jinja_filters
from common import safe_dom
from models import transforms
from models.config import ConfigProperty
from models.config import ConfigPropertyEntity
from models.config import Registry
from models.counters import PerfCounter
from models.courses import Course
from models.roles import Roles
from models.vfs import AbstractFileSystem
from models.vfs import DatastoreBackedFileSystem
from models.vfs import LocalReadOnlyFileSystem
import webapp2
from webapp2_extras import i18n
import utils
from google.appengine.api import namespace_manager
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import zipserve
# base name for all course namespaces
GCB_BASE_COURSE_NAMESPACE = 'gcb-course'
# these folder and file names are reserved
GCB_ASSETS_FOLDER_NAME = os.path.normpath('/assets/')
GCB_VIEWS_FOLDER_NAME = os.path.normpath('/views/')
GCB_DATA_FOLDER_NAME = os.path.normpath('/data/')
GCB_CONFIG_FILENAME = os.path.normpath('/course.yaml')
# modules do have files that must be inheritable, like oeditor.html
GCB_MODULES_FOLDER_NAME = os.path.normpath('/modules/')
# Files in these folders are inheritable between file systems.
GCB_INHERITABLE_FOLDER_NAMES = [
os.path.join(GCB_ASSETS_FOLDER_NAME, 'css/'),
os.path.join(GCB_ASSETS_FOLDER_NAME, 'img/'),
os.path.join(GCB_ASSETS_FOLDER_NAME, 'lib/'),
GCB_VIEWS_FOLDER_NAME,
GCB_MODULES_FOLDER_NAME]
# supported site types
SITE_TYPE_COURSE = 'course'
# default 'Cache-Control' HTTP header for static files
DEFAULT_CACHE_CONTROL_MAX_AGE = 600
DEFAULT_CACHE_CONTROL_PUBLIC = 'public'
# default HTTP headers for dynamic responses
DEFAULT_EXPIRY_DATE = 'Mon, 01 Jan 1990 00:00:00 GMT'
DEFAULT_PRAGMA = 'no-cache'
# enable debug output
DEBUG_INFO = False
# thread local storage for current request PATH_INFO
PATH_INFO_THREAD_LOCAL = threading.local()
# performance counters
STATIC_HANDLER_COUNT = PerfCounter(
'gcb-sites-handler-static',
'A number of times request was served via static handler.')
DYNAMIC_HANDLER_COUNT = PerfCounter(
'gcb-sites-handler-dynamic',
'A number of times request was served via dynamic handler.')
ZIP_HANDLER_COUNT = PerfCounter(
'gcb-sites-handler-zip',
'A number of times request was served via zip handler.')
NO_HANDLER_COUNT = PerfCounter(
'gcb-sites-handler-none',
'A number of times request was not matched to any handler.')
HTTP_BYTES_IN = PerfCounter(
'gcb-sites-bytes-in',
'A number of bytes received from clients by the handler.')
HTTP_BYTES_OUT = PerfCounter(
'gcb-sites-bytes-out',
'A number of bytes sent out from the handler to clients.')
HTTP_STATUS_200 = PerfCounter(
'gcb-sites-http-20x',
'A number of times HTTP status code 20x was returned.')
HTTP_STATUS_300 = PerfCounter(
'gcb-sites-http-30x',
'A number of times HTTP status code 30x was returned.')
HTTP_STATUS_400 = PerfCounter(
'gcb-sites-http-40x',
'A number of times HTTP status code 40x was returned.')
HTTP_STATUS_500 = PerfCounter(
'gcb-sites-http-50x',
'A number of times HTTP status code 50x was returned.')
COUNTER_BY_HTTP_CODE = {
200: HTTP_STATUS_200, 300: HTTP_STATUS_300, 400: HTTP_STATUS_400,
500: HTTP_STATUS_500}
def count_stats(handler):
"""Records statistics about the request and the response."""
try:
# Record request bytes in.
if handler.request and handler.request.content_length:
HTTP_BYTES_IN.inc(handler.request.content_length)
# Record response HTTP status code.
if handler.response and handler.response.status_int:
rounded_status_code = (handler.response.status_int / 100) * 100
counter = COUNTER_BY_HTTP_CODE[rounded_status_code]
if not counter:
logging.error(
'Unknown HTTP status code: %s.',
handler.response.status_code)
else:
counter.inc()
# Record response bytes out.
if handler.response and handler.response.content_length:
HTTP_BYTES_OUT.inc(handler.response.content_length)
except Exception as e: # pylint: disable-msg=broad-except
logging.error('Failed to count_stats(): %s.', str(e))
def has_path_info():
"""Checks if PATH_INFO is defined for the thread local."""
return hasattr(PATH_INFO_THREAD_LOCAL, 'path')
def set_path_info(path):
"""Stores PATH_INFO in thread local."""
if not path:
raise Exception('Use \'unset()\' instead.')
if has_path_info():
raise Exception('Expected no path set.')
PATH_INFO_THREAD_LOCAL.path = path
PATH_INFO_THREAD_LOCAL.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace(
ApplicationContext.get_namespace_name_for_request())
def get_path_info():
"""Gets PATH_INFO from thread local."""
return PATH_INFO_THREAD_LOCAL.path
def unset_path_info():
"""Removed PATH_INFO from thread local."""
if not has_path_info():
raise Exception('Expected valid path already set.')
namespace_manager.set_namespace(
PATH_INFO_THREAD_LOCAL.old_namespace)
del PATH_INFO_THREAD_LOCAL.old_namespace
del PATH_INFO_THREAD_LOCAL.path
def debug(message):
if DEBUG_INFO:
logging.info(message)
def _validate_appcontext_list(contexts, strict=False):
"""Validates a list of application contexts."""
# Check rule order is enforced. If we allowed any order and '/a' was before
# '/aa', the '/aa' would never match.
for i in range(len(contexts)):
for j in range(i + 1, len(contexts)):
above = contexts[i]
below = contexts[j]
if below.get_slug().startswith(above.get_slug()):
raise Exception(
'Please reorder course entries to have '
'\'%s\' before \'%s\'.' % (
below.get_slug(), above.get_slug()))
# Make sure '/' is mapped.
if strict:
is_root_mapped = False
for context in contexts:
if context.slug == '/':
is_root_mapped = True
break
if not is_root_mapped:
raise Exception(
'Please add an entry with \'/\' as course URL prefix.')
def get_all_courses(rules_text=None):
"""Reads all course rewrite rule definitions from environment variable."""
# Normalize text definition.
if not rules_text:
rules_text = GCB_COURSES_CONFIG.value
rules_text = rules_text.replace(',', '\n')
# Use cached value if exists.
cached = ApplicationContext.ALL_COURSE_CONTEXTS_CACHE.get(rules_text)
if cached:
return cached
# Compute the list of contexts.
rules = rules_text.split('\n')
slugs = {}
namespaces = {}
all_contexts = []
for rule in rules:
rule = rule.strip()
if not rule or rule.startswith('#'):
continue
parts = rule.split(':')
# validate length
if len(parts) < 3:
raise Exception('Expected rule definition of the form '
' \'type:slug:folder[:ns]\', got %s: ' % rule)
# validate type
if parts[0] != SITE_TYPE_COURSE:
raise Exception('Expected \'%s\', found: \'%s\'.'
% (SITE_TYPE_COURSE, parts[0]))
site_type = parts[0]
# validate slug
slug = parts[1]
slug_parts = urlparse.urlparse(slug)
if slug != slug_parts[2]:
raise Exception(
'Bad rule: \'%s\'. '
'Course URL prefix \'%s\' must be a simple URL fragment.' % (
rule, slug))
if slug in slugs:
raise Exception(
'Bad rule: \'%s\'. '
'Course URL prefix \'%s\' is already defined.' % (rule, slug))
slugs[slug] = True
# validate folder name
if parts[2]:
folder = parts[2]
# pylint: disable-msg=g-long-lambda
create_fs = lambda unused_ns: LocalReadOnlyFileSystem(
logical_home_folder=folder)
else:
folder = '/'
# pylint: disable-msg=g-long-lambda
create_fs = lambda ns: DatastoreBackedFileSystem(
ns=ns,
logical_home_folder=appengine_config.BUNDLE_ROOT,
inherits_from=LocalReadOnlyFileSystem(logical_home_folder='/'),
inheritable_folders=GCB_INHERITABLE_FOLDER_NAMES)
# validate or derive namespace
namespace = appengine_config.DEFAULT_NAMESPACE_NAME
if len(parts) == 4:
namespace = parts[3]
else:
if folder and folder != '/':
namespace = '%s%s' % (GCB_BASE_COURSE_NAMESPACE,
folder.replace('/', '-'))
try:
namespace_manager.validate_namespace(namespace)
except Exception as e:
raise Exception(
'Error validating namespace "%s" in rule "%s"; %s.' % (
namespace, rule, e))
if namespace in namespaces:
raise Exception(
'Bad rule \'%s\'. '
'Namespace \'%s\' is already defined.' % (rule, namespace))
namespaces[namespace] = True
all_contexts.append(ApplicationContext(
site_type, slug, folder, namespace,
AbstractFileSystem(create_fs(namespace)),
raw=rule))
_validate_appcontext_list(all_contexts)
# Cache result to avoid re-parsing over and over.
ApplicationContext.ALL_COURSE_CONTEXTS_CACHE = {rules_text: all_contexts}
return all_contexts
def get_course_for_current_request():
"""Chooses course that matches current request context path."""
# get path if defined
if not has_path_info():
return None
path = get_path_info()
# Get all rules.
courses = get_all_courses()
# Match a path to a course.
# TODO(psimakov): linear search is unacceptable
for course in courses:
if path == course.get_slug() or path.startswith(
'%s/' % course.get_slug()) or course.get_slug() == '/':
return course
debug('No mapping for: %s' % path)
return None
def path_join(base, path):
"""Joins 'base' and 'path' ('path' is interpreted as a relative path).
This method is like os.path.join(), but 'path' is interpreted relatively.
E.g., os.path.join('/a/b', '/c') yields '/c', but this function yields
'/a/b/c'.
Args:
base: The base path.
path: The path to append to base; this is treated as a relative path.
Returns:
The path obtaining by appending 'path' to 'base'.
"""
if os.path.isabs(path):
# Remove drive letter (if we are on Windows).
unused_drive, path_no_drive = os.path.splitdrive(path)
# Remove leading path separator.
path = path_no_drive[1:]
return AbstractFileSystem.normpath(os.path.join(base, path))
def abspath(home_folder, filename):
"""Creates an absolute URL for a filename in a home folder."""
return path_join(appengine_config.BUNDLE_ROOT,
path_join(home_folder, filename))
def unprefix(path, prefix):
"""Remove the prefix from path. Append '/' if an empty string results."""
if not path.startswith(prefix):
raise Exception('Not prefixed.')
if prefix != '/':
path = path[len(prefix):]
if not path:
path = '/'
return path
def set_static_resource_cache_control(handler):
"""Properly sets Cache-Control for a WebOb/webapp2 response."""
handler.response.cache_control.no_cache = None
handler.response.cache_control.public = DEFAULT_CACHE_CONTROL_PUBLIC
handler.response.cache_control.max_age = DEFAULT_CACHE_CONTROL_MAX_AGE
def set_default_response_headers(handler):
"""Sets the default headers for outgoing responses."""
# This conditional is needed for the unit tests to pass, since their
# handlers do not have a response attribute.
if handler.response:
# Only set the headers for dynamic responses. This happens precisely
# when the handler is an instance of utils.ApplicationHandler.
if isinstance(handler, utils.ApplicationHandler):
handler.response.cache_control.no_cache = True
handler.response.cache_control.must_revalidate = True
handler.response.expires = DEFAULT_EXPIRY_DATE
handler.response.pragma = DEFAULT_PRAGMA
def make_zip_handler(zipfilename):
"""Creates a handler that serves files from a zip file."""
class CustomZipHandler(zipserve.ZipHandler):
"""Custom ZipHandler that properly controls caching."""
def get(self, *args):
"""Handles GET request."""
path = None
# try to use path passed explicitly
if args and len(args) >= 1:
path = args[0]
# use path_translated if no name was passed explicitly
if not path:
path = self.path_translated
# we need to remove leading slash and all filenames inside zip
# file must be relative
if path and path.startswith('/') and len(path) > 1:
path = path[1:]
if not path:
self.error(404)
return
ZIP_HANDLER_COUNT.inc()
self.ServeFromZipFile(zipfilename, path)
count_stats(self)
def SetCachingHeaders(self): # pylint: disable=C6409
"""Properly controls caching."""
set_static_resource_cache_control(self)
return CustomZipHandler
class CssComboZipHandler(zipserve.ZipHandler):
"""A handler which combines a files served from a zip file.
The paths for the files within the zip file are presented
as query parameters.
"""
zipfile_cache = {}
def get(self):
raise NotImplementedError()
def SetCachingHeaders(self): # pylint: disable=C6409
"""Properly controls caching."""
set_static_resource_cache_control(self)
def serve_from_zip_file(self, zipfilename, static_file_handler):
"""Assemble the download by reading file from zip file."""
zipfile_object = self.zipfile_cache.get(zipfilename)
if zipfile_object is None:
try:
zipfile_object = zipfile.ZipFile(zipfilename)
except (IOError, RuntimeError, zipfile.BadZipfile), err:
# If the zipfile can't be opened, that's probably a
# configuration error in the app, so it's logged as an error.
logging.error('Can\'t open zipfile %s: %s', zipfilename, err)
zipfile_object = '' # Special value to cache negative results.
self.zipfile_cache[zipfilename] = zipfile_object
if not zipfile_object:
self.error(404)
return
all_content_types = set()
for name in self.request.GET:
all_content_types.add(mimetypes.guess_type(name))
if len(all_content_types) == 1:
content_type = all_content_types.pop()[0]
else:
content_type = 'text/plain'
self.response.headers['Content-Type'] = content_type
self.SetCachingHeaders()
for name in self.request.GET:
try:
content = zipfile_object.read(name)
if content_type == 'text/css':
content = self._fix_css_paths(
name, content, static_file_handler)
self.response.out.write(content)
except (KeyError, RuntimeError), err:
logging.error('Not found %s in %s', name, zipfilename)
def _fix_css_paths(self, path, css, static_file_handler):
"""Transform relative url() settings in CSS to absolute.
This is necessary because a url setting, e.g., url(foo.png), is
interpreted as relative to the location of the CSS file. However
in the case of a bundled CSS file, obtained from a URL such as
http://place.com/cb/combo?a/b/c/foo.css
the browser would believe that the location for foo.png was
http://place.com/cb/foo.png
and not
http://place.com/cb/a/b/c/foo.png
Thus we transform the url from
url(foo.png)
to
url(/static_file_service/a/b/c/foo.png)
Args:
path: the path to the CSS file within the ZIP file
css: the content of the CSS file
static_file_handler: the base handler to serve the referenced file
Returns:
The CSS with all relative URIs rewritten to absolute URIs.
"""
base = static_file_handler + posixpath.split(path)[0] + '/'
css = css.decode('utf-8')
css = re.sub(r'url\(([^http|^https]\S+)\)', r'url(%s\1)' % base, css)
return css
def make_css_combo_zip_handler(zipfilename, static_file_handler):
class CustomCssComboZipHandler(CssComboZipHandler):
def get(self):
self.serve_from_zip_file(zipfilename, static_file_handler)
return CustomCssComboZipHandler
class AssetHandler(webapp2.RequestHandler):
"""Handles serving of static resources located on the file system."""
def __init__(self, app_context, filename):
self.app_context = app_context
self.filename = filename
def get_mime_type(self, filename, default='application/octet-stream'):
guess = mimetypes.guess_type(filename)[0]
if guess is None:
return default
return guess
def _can_view(self, fs, stream):
"""Checks if current user can view stream."""
public = not fs.is_draft(stream)
return public or Roles.is_course_admin(self.app_context)
def get(self):
"""Handles GET requests."""
debug('File: %s' % self.filename)
if not self.app_context.fs.isfile(self.filename):
self.error(404)
return
stream = self.app_context.fs.open(self.filename)
if not self._can_view(self.app_context.fs, stream):
self.error(403)
return
set_static_resource_cache_control(self)
self.response.headers['Content-Type'] = self.get_mime_type(
self.filename)
self.response.write(stream.read())
class ApplicationContext(object):
"""An application context for a request/response."""
# Here we store a map of a text definition of the courses to be parsed, and
# a fully validated array of ApplicationContext objects that they define.
# This is cached in process and automatically recomputed when text
# definition changes.
ALL_COURSE_CONTEXTS_CACHE = {}
@classmethod
def get_namespace_name_for_request(cls):
"""Gets the name of the namespace to use for this request.
(Examples of such namespaces are NDB and memcache.)
Returns:
The namespace for the current request, or None if no course matches
the current request context path.
"""
course = get_course_for_current_request()
if course:
return course.namespace
return appengine_config.DEFAULT_NAMESPACE_NAME
@classmethod
def after_create(cls, instance):
"""Override this method to manipulate freshly created instance."""
pass
def __init__(self, site_type, slug, homefolder, namespace, fs, raw=None):
"""Creates new application context.
Args:
site_type: Specifies the type of context. Must be 'course' for now.
slug: A common context path prefix for all URLs in the context.
homefolder: A folder with the assets belonging to this context.
namespace: A name of a datastore namespace for use by this context.
fs: A file system object to be used for accessing homefolder.
raw: A raw representation of this course rule (course:/:/).
Returns:
The new instance of namespace object.
"""
self.type = site_type
self.slug = slug
self.homefolder = homefolder
self.namespace = namespace
self._fs = fs
self._raw = raw
self.after_create(self)
@ property
def raw(self):
return self._raw
@ property
def fs(self):
return self._fs
@property
def now_available(self):
course = self.get_environ().get('course')
return course and course.get('now_available')
def get_title(self):
return self.get_environ()['course']['title']
def get_namespace_name(self):
return self.namespace
def get_home_folder(self):
return self.homefolder
def get_slug(self):
return self.slug
def get_config_filename(self):
"""Returns absolute location of a course configuration file."""
filename = abspath(self.get_home_folder(), GCB_CONFIG_FILENAME)
debug('Config file: %s' % filename)
return filename
def get_environ(self):
return Course.get_environ(self)
def get_home(self):
"""Returns absolute location of a course folder."""
path = abspath(self.get_home_folder(), '')
return path
def get_template_home(self):
"""Returns absolute location of a course template folder."""
path = abspath(self.get_home_folder(), GCB_VIEWS_FOLDER_NAME)
return path
def get_data_home(self):
"""Returns absolute location of a course data folder."""
path = abspath(self.get_home_folder(), GCB_DATA_FOLDER_NAME)
return path
def get_template_environ(self, locale, additional_dirs):
"""Create and configure jinja template evaluation environment."""
template_dir = self.get_template_home()
dirs = [template_dir]
if additional_dirs:
dirs += additional_dirs
jinja_environment = self.fs.get_jinja_environ(dirs)
i18n.get_i18n().set_locale(locale)
jinja_environment.install_gettext_translations(i18n)
jinja_environment.filters['gcb_tags'] = jinja_filters.gcb_tags
return jinja_environment
def _courses_config_validator(rules_text, errors):
"""Validates a textual definition of courses entries."""
try:
_validate_appcontext_list(
get_all_courses(rules_text=rules_text), strict=True)
except Exception as e: # pylint: disable-msg=broad-except
errors.append(str(e))
def validate_new_course_entry_attributes(name, title, admin_email, errors):
"""Validates new course attributes."""
if not name or len(name) < 3:
errors.append(
'The unique name associated with the course must be at least '
'three characters long.')
if not re.match('[_a-z0-9]+$', name, re.IGNORECASE):
errors.append(
'The unique name associated with the course should contain only '
'lowercase letters, numbers, or underscores.')
if not title or len(title) < 3:
errors.append('The course title is too short.')
if not admin_email or '@' not in admin_email:
errors.append('Please enter a valid email address.')
@db.transactional()
def _add_new_course_entry_to_persistent_configuration(raw):
"""Adds new raw course entry definition to the datastore settings.
This loads all current datastore course entries and adds a new one. It
also find the best place to add the new entry at the further down the list
the better, because entries are applied in the order of declaration.
Args:
raw: The course entry rule: 'course:/foo::ns_foo'.
Returns:
True if added, False if not. False almost always means a duplicate rule.
"""
# Get all current entries from a datastore.
entity = ConfigPropertyEntity.get_by_key_name(GCB_COURSES_CONFIG.name)
if not entity:
entity = ConfigPropertyEntity(key_name=GCB_COURSES_CONFIG.name)
entity.is_draft = False
if not entity.value:
entity.value = GCB_COURSES_CONFIG.value
lines = entity.value.splitlines()
# Add new entry to the rest of the entries. Since entries are matched
# in the order of declaration, try to find insertion point further down.
final_lines_text = None
for index in reversed(range(0, len(lines) + 1)):
# Create new rule list putting new item at index position.
new_lines = lines[:]
new_lines.insert(index, raw)
new_lines_text = '\n'.join(new_lines)
# Validate the rule list definition.
errors = []
_courses_config_validator(new_lines_text, errors)
if not errors:
final_lines_text = new_lines_text
break
# Save updated course entries.
if final_lines_text:
entity.value = final_lines_text
entity.put()
return True
return False
def add_new_course_entry(unique_name, title, admin_email, errors):
"""Validates course attributes and adds the course."""
# Validate.
validate_new_course_entry_attributes(
unique_name, title, admin_email, errors)
if errors:
return
# Create new entry and check it is valid.
raw = 'course:/%s::ns_%s' % (unique_name, unique_name)
try:
get_all_courses(rules_text=raw)
except Exception as e: # pylint: disable-msg=broad-except
errors.append('Failed to add entry: %s.\n%s' % (raw, e))
if errors:
return
# Add new entry to persistence.
if not _add_new_course_entry_to_persistent_configuration(raw):
errors.append(
'Unable to add new entry \'%s\'. Entry with the '
'same name \'%s\' already exists.' % (raw, unique_name))
return
return raw
GCB_COURSES_CONFIG = ConfigProperty(
'gcb_courses_config', str,
safe_dom.NodeList().append(
safe_dom.Element('p').add_text("""
A newline separated list of course entries. Each course entry has
four parts, separated by colons (':'). The four parts are:""")
).append(
safe_dom.Element('ol').add_child(
safe_dom.Element('li').add_text(
'The word \'course\', which is a required element.')
).add_child(
safe_dom.Element('li').add_text("""
A unique course URL prefix. Examples could be '/cs101' or '/art'.
Default: '/'""")
).add_child(
safe_dom.Element('li').add_text("""
A file system location of course asset files. If location is left empty,
the course assets are stored in a datastore instead of the file system. A course
with assets in a datastore can be edited online. A course with assets on file
system must be re-deployed to Google App Engine manually.""")
).add_child(
safe_dom.Element('li').add_text("""
A course datastore namespace where course data is stored in App Engine.
Note: this value cannot be changed after the course is created."""))
).append(
safe_dom.Text(
'For example, consider the following two course entries:')
).append(safe_dom.Element('br')).append(
safe_dom.Element('blockquote').add_text(
'course:/cs101::/ns_cs101'
).add_child(
safe_dom.Element('br')
).add_text('course:/:/')
).append(
safe_dom.Element('p').add_text("""
Assuming you are hosting Course Builder on http:/www.example.com, the first
entry defines a course on a http://www.example.com/cs101 and both its assets
and student data are stored in the datastore namespace 'ns_cs101'. The second
entry defines a course hosted on http://www.example.com/, with its assets
stored in the '/' folder of the installation and its data stored in the default
empty datastore namespace.""")
).append(
safe_dom.Element('p').add_text("""
A line that starts with '#' is ignored. Course entries are applied in the
order they are defined.""")
), 'course:/:/:', multiline=True, validator=_courses_config_validator)
class ApplicationRequestHandler(webapp2.RequestHandler):
"""Handles dispatching of all URL's to proper handlers."""
# WARNING! never set this value to True, unless for the production load
# tests; setting this value to True will allow any anonymous third party to
# act as a Course Builder superuser
CAN_IMPERSONATE = False
# the name of the impersonation header
IMPERSONATE_HEADER_NAME = 'Gcb-Impersonate'
def dispatch(self):
if self.CAN_IMPERSONATE:
self.impersonate_and_dispatch()
else:
super(ApplicationRequestHandler, self).dispatch()
def impersonate_and_dispatch(self):
"""Dispatches request with user impersonation."""
impersonate_info = self.request.headers.get(
self.IMPERSONATE_HEADER_NAME)
if not impersonate_info:
super(ApplicationRequestHandler, self).dispatch()
return
impersonate_info = transforms.loads(impersonate_info)
email = impersonate_info.get('email')
user_id = impersonate_info.get('user_id')
def get_impersonated_user():
"""A method that returns impersonated user."""
try:
return users.User(email=email, _user_id=user_id)
except users.UserNotFoundError:
return None
old_get_current_user = users.get_current_user
try:
logging.info('Impersonating %s.', email)
users.get_current_user = get_impersonated_user
super(ApplicationRequestHandler, self).dispatch()
return
finally:
users.get_current_user = old_get_current_user
@classmethod
def bind_to(cls, urls, urls_map):
"""Recursively builds a map from a list of (URL, Handler) tuples."""
for url in urls:
path_prefix = url[0]
handler = url[1]
urls_map[path_prefix] = handler
# add child handlers
if hasattr(handler, 'get_child_routes'):
cls.bind_to(handler.get_child_routes(), urls_map)
@classmethod
def bind(cls, urls):
urls_map = {}
cls.bind_to(urls, urls_map)
cls.urls_map = urls_map
def get_handler(self):
"""Finds a course suitable for handling this request."""
course = get_course_for_current_request()
if not course:
return None
path = get_path_info()
if not path:
return None
return self.get_handler_for_course_type(
course, unprefix(path, course.get_slug()))
def can_handle_course_requests(self, context):
"""Reject all, but authors requests, to an unpublished course."""
return context.now_available or Roles.is_course_admin(context)
def _get_handler_factory_for_path(self, path):
"""Picks a handler to handle the path."""
# Checks if path maps in its entirety.
if path in ApplicationRequestHandler.urls_map:
return ApplicationRequestHandler.urls_map[path]
# Check if partial path maps. For now, let only zipserve.ZipHandler
# handle partial matches. We want to find the longest possible match.
parts = path.split('/')
candidate = None
partial_path = ''
for part in parts:
if part:
partial_path += '/' + part
if partial_path in ApplicationRequestHandler.urls_map:
handler = ApplicationRequestHandler.urls_map[partial_path]
if (
isinstance(handler, zipserve.ZipHandler) or
issubclass(handler, zipserve.ZipHandler)):
candidate = handler
return candidate
def get_handler_for_course_type(self, context, path):
"""Gets the right handler for the given context and path."""
if not self.can_handle_course_requests(context):
return None
# TODO(psimakov): Add docs (including args and returns).
norm_path = os.path.normpath(path)
# Handle static assets here.
if norm_path.startswith(GCB_ASSETS_FOLDER_NAME):
abs_file = abspath(context.get_home_folder(), norm_path)
handler = AssetHandler(self, abs_file)
handler.request = self.request
handler.response = self.response
handler.app_context = context
debug('Course asset: %s' % abs_file)
STATIC_HANDLER_COUNT.inc()
return handler
# Handle all dynamic handlers here.
handler_factory = self._get_handler_factory_for_path(path)
if handler_factory:
handler = handler_factory()
handler.app_context = context
handler.request = self.request
handler.response = self.response
# This variable represents the path after the namespace prefix is
# removed. The full path is still stored in self.request.path. For
# example, if self.request.path is '/new_course/foo/bar/baz/...',
# the path_translated would be '/foo/bar/baz/...'.
handler.path_translated = path
debug('Handler: %s > %s' % (path, handler.__class__.__name__))
DYNAMIC_HANDLER_COUNT.inc()
return handler
NO_HANDLER_COUNT.inc()
return None
def get(self, path):
try:
set_path_info(path)
handler = self.get_handler()
if not handler:
self.error(404)
else:
set_default_response_headers(handler)
handler.get()
finally:
count_stats(self)
unset_path_info()
def post(self, path):
try:
set_path_info(path)
handler = self.get_handler()
if not handler:
self.error(404)
else:
set_default_response_headers(handler)
handler.post()
finally:
count_stats(self)
unset_path_info()
def put(self, path):
try:
set_path_info(path)
handler = self.get_handler()
if not handler:
self.error(404)
else:
set_default_response_headers(handler)
handler.put()
finally:
count_stats(self)
unset_path_info()
def delete(self, path):
try:
set_path_info(path)
handler = self.get_handler()
if not handler:
self.error(404)
else:
set_default_response_headers(handler)
handler.delete()
finally:
count_stats(self)
unset_path_info()
def assert_mapped(src, dest):
try:
set_path_info(src)
course = get_course_for_current_request()
if not dest:
assert course is None
else:
assert course.get_slug() == dest
finally:
unset_path_info()
def assert_handled(src, target_handler):
try:
set_path_info(src)
app_handler = ApplicationRequestHandler()
# For unit tests to work we want all requests to be handled regardless
# of course.now_available flag value. Here we patch for that.
app_handler.can_handle_course_requests = lambda context: True
handler = app_handler.get_handler()
if handler is None and target_handler is None:
return None
assert isinstance(handler, target_handler)
return handler
finally:
unset_path_info()
def assert_fails(func):
success = False
try:
func()
success = True
except Exception: # pylint: disable=W0703
pass
if success:
raise Exception('Function \'%s\' was expected to fail.' % func)
def setup_courses(course_config):
"""Helper method that allows a test to setup courses on the fly."""
Registry.test_overrides[GCB_COURSES_CONFIG.name] = course_config
def reset_courses():
"""Cleanup method to complement setup_courses()."""
Registry.test_overrides[
GCB_COURSES_CONFIG.name] = GCB_COURSES_CONFIG.default_value
def test_unprefix():
assert unprefix('/', '/') == '/'
assert unprefix('/a/b/c', '/a/b') == '/c'
assert unprefix('/a/b/index.html', '/a/b') == '/index.html'
assert unprefix('/a/b', '/a/b') == '/'
def test_rule_validations():
"""Test rules validator."""
courses = get_all_courses(rules_text='course:/:/')
assert 1 == len(courses)
# Check comments.
setup_courses('course:/a:/nsa, course:/b:/nsb')
assert 2 == len(get_all_courses())
setup_courses('course:/a:/nsa, # course:/a:/nsb')
assert 1 == len(get_all_courses())
# Check slug collisions are not allowed.
setup_courses('course:/a:/nsa, course:/a:/nsb')
assert_fails(get_all_courses)
# Check namespace collisions are not allowed.
setup_courses('course:/a:/nsx, course:/b:/nsx')
assert_fails(get_all_courses)
# Check rule order is enforced. If we allowed any order and '/a' was before
# '/aa', the '/aa' would never match.
setup_courses('course:/a:/nsa, course:/aa:/nsaa, course:/aaa:/nsaaa')
assert_fails(get_all_courses)
# Check namespace names.
setup_courses('course:/a::/nsx')
assert_fails(get_all_courses)
# Check slug validity.
setup_courses('course:/a /b::nsa')
get_all_courses()
setup_courses('course:/a?/b::nsa')
assert_fails(get_all_courses)
# Cleanup.
reset_courses()
def test_rule_definitions():
"""Test various rewrite rule definitions."""
# Check that the default site is created when no rules are specified.
assert len(get_all_courses()) == 1
# Test one rule parsing.
setup_courses('course:/google/pswg:/sites/pswg')
rules = get_all_courses()
assert len(get_all_courses()) == 1
rule = rules[0]
assert rule.get_slug() == '/google/pswg'
assert rule.get_home_folder() == '/sites/pswg'
# Test two rule parsing.
setup_courses('course:/a/b:/c/d, course:/e/f:/g/h')
assert len(get_all_courses()) == 2
# Test that two of the same slugs are not allowed.
setup_courses('foo:/a/b:/c/d, bar:/a/b:/c/d')
assert_fails(get_all_courses)
# Test that only 'course' is supported.
setup_courses('foo:/a/b:/c/d, bar:/e/f:/g/h')
assert_fails(get_all_courses)
# Cleanup.
reset_courses()
# Test namespaces.
set_path_info('/')
try:
setup_courses('course:/:/c/d')
assert ApplicationContext.get_namespace_name_for_request() == (
'gcb-course-c-d')
finally:
unset_path_info()
# Cleanup.
reset_courses()
def test_url_to_rule_mapping():
"""Tests mapping of a URL to a rule."""
# default mapping
assert_mapped('/favicon.ico', '/')
assert_mapped('/assets/img/foo.png', '/')
# explicit mapping
setup_courses('course:/a/b:/c/d, course:/e/f:/g/h')
assert_mapped('/a/b', '/a/b')
assert_mapped('/a/b/', '/a/b')
assert_mapped('/a/b/c', '/a/b')
assert_mapped('/a/b/c', '/a/b')
assert_mapped('/e/f', '/e/f')
assert_mapped('/e/f/assets', '/e/f')
assert_mapped('/e/f/views', '/e/f')
assert_mapped('e/f', None)
assert_mapped('foo', None)
# Cleanup.
reset_courses()
def test_url_to_handler_mapping_for_course_type():
"""Tests mapping of a URL to a handler for course type."""
# setup rules
setup_courses('course:/a/b:/c/d, course:/e/f:/g/h')
# setup helper classes
class FakeHandler0(object):
def __init__(self):
self.app_context = None
class FakeHandler1(object):
def __init__(self):
self.app_context = None
class FakeHandler2(zipserve.ZipHandler):
def __init__(self):
self.app_context = None
class FakeHandler3(zipserve.ZipHandler):
def __init__(self):
self.app_context = None
class FakeHandler4(zipserve.ZipHandler):
def __init__(self):
self.app_context = None
# Setup handler.
handler0 = FakeHandler0
handler1 = FakeHandler1
handler2 = FakeHandler2
urls = [('/', handler0), ('/foo', handler1), ('/bar', handler2)]
ApplicationRequestHandler.bind(urls)
# Test proper handler mappings.
assert_handled('/a/b', FakeHandler0)
assert_handled('/a/b/', FakeHandler0)
assert_handled('/a/b/foo', FakeHandler1)
assert_handled('/a/b/bar', FakeHandler2)
# Test partial path match.
assert_handled('/a/b/foo/bee', None)
assert_handled('/a/b/bar/bee', FakeHandler2)
# Test assets mapping.
handler = assert_handled('/a/b/assets/img/foo.png', AssetHandler)
assert AbstractFileSystem.normpath(
handler.app_context.get_template_home()).endswith(
AbstractFileSystem.normpath('/c/d/views'))
# This is allowed as we don't go out of /assets/...
handler = assert_handled(
'/a/b/assets/foo/../models/models.py', AssetHandler)
assert AbstractFileSystem.normpath(handler.filename).endswith(
AbstractFileSystem.normpath('/c/d/assets/models/models.py'))
# This is not allowed as we do go out of /assets/...
assert_handled('/a/b/assets/foo/../../models/models.py', None)
# Test negative cases
assert_handled('/foo', None)
assert_handled('/baz', None)
# Site 'views' and 'data' are not accessible
assert_handled('/a/b/view/base.html', None)
assert_handled('/a/b/data/units.csv', None)
# Default mapping
reset_courses()
handler3 = FakeHandler3
handler4 = FakeHandler4
urls = [
('/', handler0),
('/foo', handler1),
('/bar', handler2),
('/zip', handler3),
('/zip/a/b', handler4)]
ApplicationRequestHandler.bind(urls)
# Positive cases
assert_handled('/', FakeHandler0)
assert_handled('/foo', FakeHandler1)
assert_handled('/bar', FakeHandler2)
handler = assert_handled('/assets/js/main.js', AssetHandler)
assert AbstractFileSystem.normpath(
handler.app_context.get_template_home()).endswith(
AbstractFileSystem.normpath('/views'))
# Partial URL matching cases test that the most specific match is found.
assert_handled('/zip', FakeHandler3)
assert_handled('/zip/a', FakeHandler3)
assert_handled('/zip/a/b', FakeHandler4)
assert_handled('/zip/a/b/c', FakeHandler4)
# Negative cases
assert_handled('/baz', None)
assert_handled('/favicon.ico', None)
assert_handled('/e/f/index.html', None)
assert_handled('/foo/foo.css', None)
# Clean up.
ApplicationRequestHandler.bind([])
def test_namespace_collisions_are_detected():
"""Test that namespace collisions are detected and are not allowed."""
setup_courses('foo:/a/b:/c/d, bar:/a/b:/c-d')
assert_fails(get_all_courses)
reset_courses()
def test_path_construction():
"""Checks that path_join() works correctly."""
# Test cases common to all platforms.
assert (os.path.normpath(path_join('/a/b', '/c')) ==
os.path.normpath('/a/b/c'))
assert (os.path.normpath(path_join('/a/b/', '/c')) ==
os.path.normpath('/a/b/c'))
assert (os.path.normpath(path_join('/a/b', 'c')) ==
os.path.normpath('/a/b/c'))
assert (os.path.normpath(path_join('/a/b/', 'c')) ==
os.path.normpath('/a/b/c'))
# Windows-specific test cases.
drive, unused_path = os.path.splitdrive('c:\\windows')
if drive:
assert (os.path.normpath(path_join('/a/b', 'c:/d')) ==
os.path.normpath('/a/b/d'))
assert (os.path.normpath(path_join('/a/b/', 'c:/d')) ==
os.path.normpath('/a/b/d'))
def run_all_unit_tests():
assert not ApplicationRequestHandler.CAN_IMPERSONATE
test_namespace_collisions_are_detected()
test_unprefix()
test_rule_definitions()
test_url_to_rule_mapping()
test_url_to_handler_mapping_for_course_type()
test_path_construction()
test_rule_validations()
if __name__ == '__main__':
DEBUG_INFO = True
run_all_unit_tests()
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for custom HTML tags."""
__author__ = 'John Orr (jorr@google.com)'
import inspect
import mimetypes
import os
import pkgutil
from xml.etree import cElementTree
import appengine_config
from common import schema_fields
from extensions import tags
import html5lib
from models import config
import webapp2
import safe_dom
CAN_USE_DYNAMIC_TAGS = config.ConfigProperty(
'gcb_can_use_dynamic_tags', bool, safe_dom.Text(
'Whether lesson content can make use of custom HTML tags such as '
'<gcb-youtube videoid="...">. If this is enabled some legacy content '
'may be rendered differently. '),
default_value=True)
class BaseTag(object):
"""Base class for the custom HTML tags."""
@classmethod
def name(cls):
return cls.__name__
@classmethod
def vendor(cls):
return cls.__module__
def render(self, unused_node):
"""Receive a node and return a node."""
return cElementTree.XML('[Unimplemented custom tag]')
def get_icon_url(self):
"""Provide an icon for the visual editor."""
return """
data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAAXNSR0IArs
4c6QAAAAZiS0dEAP8A/wD/oL2nkwAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAd0SU1FB90EGgAIFHpT6h
8AAAAZdEVYdENvbW1lbnQAQ3JlYXRlZCB3aXRoIEdJTVBXgQ4XAAAC30lEQVRo3u1ZP2sqQRCfVVGUXC
FqoZAmbSBYxFikMojBD2ErkgdC/AxpAn4A2wRMKptgCrWwSApBEG2DCidcI0gIxogXnXnFI5I87y6Jd6
seOHDN7LL7+83u/Nk5hoh/wMTCEJHMTMDGGDMzfrCAyWVL4DdCZLy72YwCxhgDIoKXlxcQRREeHx9BFE
WYTqfg9XohGAxCKBSCnZ0dcDqdhlrFEKlWq8QYIwD49ovFYjQajYiICBF17auLACLSbDaj3d3dObizsz
Nqt9v09PRE8Xhck0gul9NtONADnojI7XbPAXW73YV55XJZk8TFxcX6TuDk5GQORBAE1StxeXmpSaJery
99lWBZ69dqtQUgpVJJcW6/39cksL+/v/oTiEajC0DsdjvNZjPF+Q6HQ5PEsrJ0Huj1egs6WZbh+flZcX
4kEtFcr1KprDaRybKsqL++vlbU+/1+zfVEUVwtAZ/Pp6h/f39X1COi5nqBQGC1iaxUKine5eFwqDg/Fo
tx8QFdYfTm5uYLiPv7e0JExZD4OV/8/+3t7a0vkcmyTJIk0Xg8Vs0Dr6+vmta/vb1dbR74rTw8PKiCPz
09XV8m/qmEQiFF8IeHh7oLOq4EEJGazaam5ddajf5ElKJPNps1BDxXAohIjUbjC3CPx0OTycTQfbiewO
f3QDKZ5LIHVwIf4PP5vGFXZmUErq6uCAAok8lw9TFuBFKp1LxE4GF53eX0d10KSZLg+Pj4X/+SY/ePCw
HGGIzHYzg6OuLfG+W18MHBAYTDYf7daeLRLtv2RrcE9DdvC4UC5PN5mE6n3DvGhtU+RETn5+cLxVsikT
BHIru7u1N9uKTTaS4EDItCiAhWq1V13OVywWg02lwfGA6HmuNvb2+b7cQWi8XcUUgQBPB6varjWmMbE0
Y7nY5q4VYsFs0RRvv9PgmCMI8+VquVWq0WtzBqaC308bMPAGAwGAAiqvZQt8XcthbaELGZ/AbBX0kdVa
SPB+uxAAAAAElFTkSuQmCC
"""
def get_schema(self, unused_handler):
"""Get the schema for the tag's attributes using schema_fields."""
reg = schema_fields.FieldRegistry('Unimplemented Custom Tag')
return reg
class ResourcesHandler(webapp2.RequestHandler):
"""Content handler for resources associated with custom tags."""
def get(self):
"""Respond to HTTP GET methods."""
path = self.request.path
if path.startswith('/'):
path = path[1:]
path = os.path.normpath(path)
if not path.startswith(os.path.join('extensions', 'tags')):
self.error(404)
if os.path.basename(os.path.dirname(path)) != 'resources':
self.error(404)
resource_file = os.path.join(appengine_config.BUNDLE_ROOT, path)
mimetype = mimetypes.guess_type(resource_file)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
try:
self.response.status = 200
self.response.headers['Content-Type'] = mimetype
self.response.cache_control.no_cache = None
self.response.cache_control.public = 'public'
self.response.cache_control.max_age = 600
stream = open(resource_file)
self.response.write(stream.read())
except IOError:
self.error(404)
class Registry(object):
"""A class that holds all dynamically registered tags."""
_bindings = {}
@classmethod
def add_tag_binding(cls, tag_name, clazz):
"""Registeres a tag name to class binding."""
cls._bindings[tag_name] = clazz
@classmethod
def get_all_tags(cls):
return dict(cls._bindings.items())
def get_tag_bindings():
"""Return the bindings of tag names to implementing classes.
Tag bindings work by looking for classes which extend BaseTag and which
belong to packages inside extensions/tags. The tag name is then composed
from the package name and the class name, after lower-casing and separated
with a dash. E.g., the class
extensions.tags.gcb.YouTube
is bound to the tag name gcb-youtube.
Returns:
the bindings of tag names to implementing classes.
"""
bindings = {}
for loader, name, ispkg in pkgutil.walk_packages(tags.__path__):
if ispkg:
mod = loader.find_module(name).load_module(name)
for name, clazz in inspect.getmembers(mod, inspect.isclass):
if issubclass(clazz, BaseTag):
tag_name = ('%s-%s' % (mod.__name__, name)).lower()
bindings[tag_name] = clazz
return dict(bindings.items() + Registry.get_all_tags().items())
def html_to_safe_dom(html_string):
"""Render HTML text as a tree of safe_dom elements."""
tag_bindings = get_tag_bindings()
node_list = safe_dom.NodeList()
if not html_string:
return node_list
def _process_html_tree(elt):
node_list = safe_dom.NodeList()
tail = elt.tail
if elt.tag in tag_bindings:
elt = tag_bindings[elt.tag]().render(elt)
if elt.tag.lower() == 'script':
out_elt = safe_dom.ScriptElement()
else:
out_elt = safe_dom.Element(elt.tag)
out_elt.add_attribute(**elt.attrib)
if elt.text:
out_elt.add_text(elt.text)
for child in elt:
out_elt.add_children(_process_html_tree(child))
node_list.append(out_elt)
if tail:
node_list.append(safe_dom.Text(tail))
return node_list
parser = html5lib.HTMLParser(
tree=html5lib.treebuilders.getTreeBuilder('etree', cElementTree),
namespaceHTMLElements=False)
root = parser.parseFragment('<div>%s</div>' % html_string)[0]
if root.text:
node_list.append(safe_dom.Text(root.text))
for elt in root:
node_list.append(_process_html_tree(elt))
return node_list
| Python |
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom Jinja2 filters used in Course Builder."""
__author__ = 'John Orr (jorr@google.com)'
import jinja2
import safe_dom
import tags
def finalize(x):
"""A finalize method which will correctly handle safe_dom elements."""
if isinstance(x, safe_dom.Node) or isinstance(x, safe_dom.NodeList):
return jinja2.utils.Markup(x.sanitized)
return x
def js_string(data):
"""Escape a string so that it can be put in a JS quote."""
if not isinstance(data, basestring):
return data
data = data.replace('\\', '\\\\')
data = data.replace('\r', '\\r')
data = data.replace('\n', '\\n')
data = data.replace('\b', '\\b')
data = data.replace('"', '\\"')
data = data.replace("'", "\\'")
data = data.replace('<', '\\u003c')
data = data.replace('>', '\\u003e')
data = data.replace('&', '\\u0026')
return jinja2.utils.Markup(data)
def gcb_tags(data):
"""Apply GCB custom tags, if enabled. Otherwise pass as if by 'safe'."""
if not isinstance(data, basestring):
return data
if tags.CAN_USE_DYNAMIC_TAGS.value:
return jinja2.utils.Markup(tags.html_to_safe_dom(data))
else:
return jinja2.utils.Markup(data)
| Python |
"""Classes to build sanitized HTML."""
__author__ = 'John Orr (jorr@google.com)'
import cgi
import re
def escape(strg):
return cgi.escape(strg, quote=1).replace("'", ''').replace('`', '`')
class Node(object):
"""Base class for the sanitizing module."""
@property
def sanitized(self):
raise NotImplementedError()
def __str__(self):
return self.sanitized
class NodeList(object):
"""Holds a list of Nodes and can bulk sanitize them."""
def __init__(self):
self.list = []
def __len__(self):
return len(self.list)
def append(self, node):
assert node is not None, 'Cannot add an empty value to the node list'
self.list.append(node)
return self
@property
def sanitized(self):
sanitized_list = []
for node in self.list:
sanitized_list.append(node.sanitized)
return ''.join(sanitized_list)
def __str__(self):
return self.sanitized
class Text(Node):
"""Holds untrusted text which will be sanitized when accessed."""
def __init__(self, unsafe_string):
self._value = unsafe_string
@property
def sanitized(self):
return escape(self._value)
class Element(Node):
"""Embodies an HTML element which will be sanitized when accessed."""
_ALLOWED_NAME_PATTERN = re.compile('^[a-zA-Z][a-zA-Z0-9]*$')
_VOID_ELEMENTS = frozenset([
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'])
def __init__(self, tag_name, **attr):
"""Initializes an element with given tag name and attributes.
Tag name will be restricted to alpha chars, attribute names
will be quote-escaped.
Args:
tag_name: the name of the element, which must match
_ALLOWED_NAME_PATTERN.
**attr: the names and value of the attributes. Names must match
_ALLOWED_NAME_PATTERN and values will be quote-escaped.
"""
assert Element._ALLOWED_NAME_PATTERN.match(tag_name), (
'tag name %s is not allowed' % tag_name)
for attr_name in attr:
assert Element._ALLOWED_NAME_PATTERN.match(attr_name), (
'attribute name %s is not allowed' % attr_name)
self._tag_name = tag_name
self._attr = attr
self._children = []
def add_attribute(self, **attr):
for attr_name, value in attr.items():
assert Element._ALLOWED_NAME_PATTERN.match(attr_name), (
'attribute name %s is not allowed' % attr_name)
self._attr[attr_name] = value
return self
def add_child(self, node):
self._children.append(node)
return self
def add_children(self, node_list):
self._children += node_list.list
return self
def add_text(self, text):
return self.add_child(Text(text))
@property
def sanitized(self):
"""Santize the element and its descendants."""
assert Element._ALLOWED_NAME_PATTERN.match(self._tag_name), (
'tag name %s is not allowed' % self._tag_name)
buff = '<' + self._tag_name
for attr_name, value in sorted(self._attr.items()):
if attr_name == 'className':
attr_name = 'class'
if value is None:
value = ''
buff += ' %s="%s"' % (
attr_name, escape(value))
if self._children:
buff += '>'
for child in self._children:
buff += child.sanitized
buff += '</%s>' % self._tag_name
elif self._tag_name.lower() in Element._VOID_ELEMENTS:
buff += '/>'
else:
buff += '></%s>' % self._tag_name
return buff
class ScriptElement(Element):
"""Represents an HTML <script> element."""
def __init__(self, **attr):
super(ScriptElement, self).__init__('script', **attr)
def add_child(self, unused_node):
raise ValueError()
def add_children(self, unused_nodes):
raise ValueError()
def add_text(self, text):
"""Add the script body."""
class Script(Node):
def __init__(self, script):
self._script = script
@property
def sanitized(self):
if '</script>' in self._script:
raise ValueError('End script tag forbidden')
return self._script
self._children.append(Script(text))
class Entity(Node):
"""Holds an XML entity."""
ENTITY_PATTERN = re.compile('^&([a-zA-Z]+|#[0-9]+|#x[0-9a-fA-F]+);$')
def __init__(self, entity):
assert Entity.ENTITY_PATTERN.match(entity)
self._entity = entity
@property
def sanitized(self):
assert Entity.ENTITY_PATTERN.match(self._entity)
return self._entity
| Python |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapping from schema to backend properties."""
__author__ = 'Abhinav Khandelwal (abhinavk@google.com)'
import collections
import json
from models.property import Property
from models.property import Registry
class SchemaField(Property):
"""SchemaField defines a simple field in REST API."""
def get_json_schema_dict(self):
"""Get the JSCON schema for this field."""
prop = {}
prop['type'] = self._property_type
if self._optional:
prop['optional'] = self._optional
if self._description:
prop['description'] = self._description
return prop
def _get_schema_dict(self, prefix_key):
"""Get Schema annotation dictionary for this field."""
if self._extra_schema_dict_values:
schema = self._extra_schema_dict_values
else:
schema = {}
schema['label'] = self._label
if 'date' is self._property_type:
schema['dateFormat'] = 'Y/m/d'
schema['valueFormat'] = 'Y/m/d'
elif 'select' is self._property_type:
choices = []
for value, label in self._select_data:
choices.append({'value': value, 'label': label})
schema['choices'] = choices
if self._description:
schema['description'] = self._description
return [(prefix_key + ['_inputex'], schema)]
class FieldArray(SchemaField):
"""FieldArray is an array with object or simple items in the REST API."""
def __init__(
self, name, label, description=None, item_type=None,
extra_schema_dict_values=None):
super(FieldArray, self).__init__(
name, label, 'array', description=description,
extra_schema_dict_values=extra_schema_dict_values)
self._item_type = item_type
def get_json_schema_dict(self):
json_schema = super(FieldArray, self).get_json_schema_dict()
json_schema['items'] = self._item_type.get_json_schema_dict()
return json_schema
def _get_schema_dict(self, prefix_key):
dict_list = super(FieldArray, self)._get_schema_dict(prefix_key)
# pylint: disable-msg=protected-access
dict_list += self._item_type._get_schema_dict(prefix_key + ['items'])
# pylint: enable-msg=protected-access
return dict_list
class FieldRegistry(Registry):
"""FieldRegistry is an object with SchemaField properties in REST API."""
def add_sub_registry(
self, name, title=None, description=None, registry=None):
"""Add a sub registry to for this Registry."""
if not registry:
registry = FieldRegistry(title, description)
self._sub_registories[name] = registry
return registry
def get_json_schema_dict(self):
schema_dict = dict(self._registry)
schema_dict['properties'] = collections.OrderedDict()
for schema_field in self._properties:
schema_dict['properties'][schema_field.name] = (
schema_field.get_json_schema_dict())
for key in self._sub_registories.keys():
schema_dict['properties'][key] = (
self._sub_registories[key].get_json_schema_dict())
return schema_dict
def get_json_schema(self):
"""Get the json schema for this API."""
return json.dumps(self.get_json_schema_dict())
def _get_schema_dict(self, prefix_key):
"""Get schema dict for this API."""
title_key = list(prefix_key)
title_key.append('title')
schema_dict = [(title_key, self._title)]
if self._extra_schema_dict_values:
key = list(prefix_key)
key.append('_inputex')
schema_dict.append([key, self._extra_schema_dict_values])
base_key = list(prefix_key)
base_key.append('properties')
# pylint: disable-msg=protected-access
for schema_field in self._properties:
key = base_key + [schema_field.name]
schema_dict += schema_field._get_schema_dict(key)
# pylint: enable-msg=protected-access
for key in self._sub_registories.keys():
sub_registry_key_prefix = list(base_key)
sub_registry_key_prefix.append(key)
sub_registry = self._sub_registories[key]
# pylint: disable-msg=protected-access
for entry in sub_registry._get_schema_dict(sub_registry_key_prefix):
schema_dict.append(entry)
# pylint: enable-msg=protected-access
return schema_dict
def get_schema_dict(self):
"""Get schema dict for this API."""
return self._get_schema_dict(list())
def _add_entry(self, key_part_list, value, entity):
if len(key_part_list) == 1:
entity[key_part_list[0]] = value
return
key = key_part_list.pop()
if not entity.has_key(key):
entity[key] = {}
else:
assert type(entity[key]) == type(dict())
self._add_entry(key_part_list, value, entity[key])
def convert_json_to_entity(self, json_entry, entity):
assert type(json_entry) == type(dict())
for key in json_entry.keys():
if type(json_entry[key]) == type(dict()):
self.convert_json_to_entity(json_entry[key], entity)
else:
key_parts = key.split(':')
key_parts.reverse()
self._add_entry(key_parts, json_entry[key], entity)
def _get_field_value(self, key_part_list, entity):
if len(key_part_list) == 1:
if entity.has_key(key_part_list[0]):
return entity[key_part_list[0]]
return None
key = key_part_list.pop()
if entity.has_key(key):
return self._get_field_value(key_part_list, entity[key])
return None
def convert_entity_to_json_entity(self, entity, json_entry):
for schema_field in self._properties:
field_name = schema_field.name
field_name_parts = field_name.split(':')
field_name_parts.reverse()
value = self._get_field_value(field_name_parts, entity)
if type(value) != type(None):
json_entry[field_name] = value
for key in self._sub_registories.keys():
json_entry[key] = {}
self._sub_registories[key].convert_entity_to_json_entity(
entity, json_entry[key])
| Python |
#!/usr/bin/env python
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from StringIO import StringIO
from PIL import Image
import datauri
RGBA_BLACK = (0, 0, 0, 255)
sign_ = lambda n: -1 if n < 0 else (1 if n > 0 else 0)
def find_black_region_(im, sx, sy, ex, ey):
dx = sign_(ex - sx)
dy = sign_(ey - sy)
if abs(dx) == abs(dy):
raise 'findRegion_ can\'t look both horizontally and vertically at once.'
pixel_changes = []
pixel_on = False
x = sx
y = sy
while True:
if not pixel_on and im.getpixel((x, y)) == RGBA_BLACK:
pixel_changes.append((x, y))
pixel_on = True
elif pixel_on and im.getpixel((x, y)) != RGBA_BLACK:
pixel_changes.append((x, y))
pixel_on = False
x += dx
y += dy
if x == ex and y == ey:
break
return (pixel_changes[0][0 if dx else 1] - (sx if dx else sy),
pixel_changes[1][0 if dx else 1] - (sx if dx else sy))
def image_to_data_uri_(im):
f = StringIO()
im.save(f, 'PNG')
uri = datauri.to_data_uri(f.getvalue(), 'foo.png')
f.close()
return uri
def main():
src_im = Image.open(sys.argv[1])
# read and parse 9-patch stretch and padding regions
stretch_l, stretch_r = find_black_region_(src_im, 0, 0, src_im.size[0], 0)
stretch_t, stretch_b = find_black_region_(src_im, 0, 0, 0, src_im.size[1])
pad_l, pad_r = find_black_region_(src_im, 0, src_im.size[1] - 1, src_im.size[0], src_im.size[1] - 1)
pad_t, pad_b = find_black_region_(src_im, src_im.size[0] - 1, 0, src_im.size[0] - 1, src_im.size[1])
#padding_box = {}
template_params = {}
template_params['id'] = sys.argv[1]
template_params['icon_uri'] = image_to_data_uri_(src_im)
template_params['dim_constraint_attributes'] = '' # p:lockHeight="true"
template_params['image_uri'] = image_to_data_uri_(src_im.crop((1, 1, src_im.size[0] - 1, src_im.size[1] - 1)))
template_params['width_l'] = stretch_l - 1
template_params['width_r'] = src_im.size[0] - stretch_r - 1
template_params['height_t'] = stretch_t - 1
template_params['height_b'] = src_im.size[1] - stretch_b - 1
template_params['pad_l'] = pad_l - 1
template_params['pad_t'] = pad_t - 1
template_params['pad_r'] = src_im.size[0] - pad_r - 1
template_params['pad_b'] = src_im.size[1] - pad_b - 1
print open('res/shape_9patch_template.xml').read() % template_params
if __name__ == '__main__':
main() | Python |
#!/usr/bin/env python
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from StringIO import StringIO
from PIL import Image
import datauri
def image_to_data_uri_(im):
f = StringIO()
im.save(f, 'PNG')
uri = datauri.to_data_uri(f.getvalue(), 'foo.png')
f.close()
return uri
def main():
src_im = Image.open(sys.argv[1])
template_params = {}
template_params['id'] = sys.argv[1]
template_params['image_uri'] = image_to_data_uri_(src_im)
template_params['icon_uri'] = image_to_data_uri_(src_im)
template_params['width'] = src_im.size[0]
template_params['height'] = src_im.size[1]
print open('res/shape_png_template.xml').read() % template_params
if __name__ == '__main__':
main() | Python |
#!/usr/bin/env python
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import os.path
import shutil
import zipfile
def main():
params = {}
params['id'] = sys.argv[1]
params['displayname'] = sys.argv[2]
params['description'] = sys.argv[3]
zip_file = zipfile.ZipFile('dist/stencil-%s.zip' % params['id'], 'w',
zipfile.ZIP_DEFLATED)
# save stencil XML
shapes_xml = ''
shapes_folder = 'res/sets/%s/shapes' % params['id']
for shape_file in os.listdir(shapes_folder):
if not shape_file.endswith('.xml'):
continue
shape_xml = open(os.path.join(shapes_folder, shape_file)).read()
shapes_xml += shape_xml
params['shapes'] = shapes_xml
final_xml = open('res/stencil_template.xml').read() % params
zip_file.writestr('Definition.xml', final_xml)
# save icons
icons_folder = 'res/sets/%s/icons' % params['id']
for icon_file in os.listdir(icons_folder):
if not icon_file.endswith('.png'):
continue
zip_file.writestr(
'icons/%s' % icon_file,
open(os.path.join(icons_folder, icon_file), 'rb').read())
zip_file.close()
if __name__ == '__main__':
main() | Python |
#!/usr/bin/env python
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import sys
import mimetypes
def to_data_uri(data, file_name):
'''Takes a file object and returns its data: string.'''
mime_type = mimetypes.guess_type(file_name)
return 'data:%(mimetype)s;base64,%(data)s' % dict(mimetype=mime_type[0],
data=base64.b64encode(data))
def main():
print to_data_uri(open(sys.argv[1], 'rb').read(), sys.argv[1])
if __name__ == '__main__':
main() | Python |
'''
Experimental attempt to port the genetic algorithm to active learning,
thus removing the need for a test file. This is a big deal, as it
means we only need the data sources set up in order to produce a full,
working configuration.
The basic idea is stolen from this paper
http://svn.aksw.org/papers/2012/ESWC_EAGLE/public.pdf
The code is still fairly messy, and very inefficient, so more work is
needed.
'''
# Fine-tune comparators in more detail
# TestFileListener and multi-threading?
import random, sys, threading, time, os
from java.io import FileWriter
from java.util import ArrayList
from no.priv.garshol.duke import ConfigLoader, Processor, PropertyImpl, DukeConfigException, InMemoryLinkDatabase, Link, LinkKind, LinkStatus
from no.priv.garshol.duke.utils import ObjectUtils, LinkFileWriter, TestFileUtils, LinkDatabaseUtils
from no.priv.garshol.duke.matchers import TestFileListener, PrintMatchListener, AbstractMatchListener
POPULATION_SIZE = 100
GENERATIONS = 100
EXAMPLES = 10
SHOW_CONFIGS = True
def score(count):
'''Scoring function from original paper, slightly skewed towards wrong
matches.'''
return (POPULATION_SIZE - count) * (POPULATION_SIZE - (POPULATION_SIZE - count))
def score2(count):
'Adjusted scoring function to balance it.'
return score(count) + count
def score3(count):
'Helper scoring function to emphasize correct matches.'
return count
class MostImportantExemplarsTracker(AbstractMatchListener):
def __init__(self):
self._counts = {} # (id1, id2) -> count
def matches(self, r1, r2, conf):
key = makekey(getid(r1), getid(r2))
self._counts[key] = self._counts.get(key, 0) + 1
def get_examples(self):
if generation == 0:
func = score3
else:
func = score2
ex = [(func(count), (id1, id2)) for ((id1, id2), count)
in self._counts.items()
if not linkdb.inferLink(id1, id2)]
ex.sort()
ex.reverse()
ex = ex[ : EXAMPLES]
return [(alldb.findRecordById(id1), alldb.findRecordById(id2))
for (sc, (id1, id2)) in ex]
def makekey(id1, id2):
if id1 < id2:
return (id1, id2)
else:
return (id2, id1)
def getid(r):
for idprop in idprops:
v = r.getValue(idprop.getName())
if v:
return v
raise Exception("!!!")
def pick_examples(population):
tracker = MostImportantExemplarsTracker()
for tstconf in population:
run_with_config(tstconf, tracker)
return tracker.get_examples()
# def pick_examples(population):
# set1 = get_all(config.getDataSources(1))
# set2 = get_all(config.getDataSources(2))
# return [(random.choice(set1), random.choice(set2)) for ix
# in range(EXAMPLES)]
def ask_the_user(population):
for (r1, r2) in pick_examples(population):
if SHOW_CONFIGS:
PrintMatchListener.prettyCompare(r1, r2, 0.0, '=' * 75, properties)
print
print 'SAME? (y/n)',
if golddb:
link = golddb.inferLink(getid(r1), getid(r2))
if not link:
if SHOW_CONFIGS:
print ' ASSUMING FALSE'
resp = False
else:
resp = link.getKind() == LinkKind.SAME
if SHOW_CONFIGS:
print ' ORACLE SAYS', resp
else:
resp = (raw_input().strip().lower() == 'y')
if resp:
kind = LinkKind.SAME
else:
kind = LinkKind.DIFFERENT
linkdb.assertLink(Link(getid(r1), getid(r2), LinkStatus.ASSERTED, kind))
outf = open('answers.txt', 'a')
outf.write(str(Link(getid(r1), getid(r2), LinkStatus.ASSERTED, kind)) + '\n')
outf.close()
def get_all(datasources):
records = []
for src in datasources:
records += src.getRecords()
return records
def round(num):
return int(num * 100) / 100.0
def generate_random_configuration():
c = GeneticConfiguration()
c.set_threshold(round(random.uniform(lowlimit, 1.0)))
for name in props:
if name == "ID":
prop = PropertyImpl(name)
else:
low = round(random.uniform(0.0, 0.5))
high = round(random.uniform(0.5, 1.0))
prop = PropertyImpl(name, random.choice(comparators), low, high)
c.add_property(prop)
return c
def show_best(best, show = True):
print
print "BEST SO FAR: %s" % index[best]
if show:
print best
parent = best.get_parent()
while parent:
print "DERIVED FROM:", parent, index[parent]
parent = parent.get_parent()
print
def parent_info(c):
parent = c.get_parent()
if not parent:
return ""
return "#%s, %s" % (parent.get_rank(), index[parent])
def shortname(comparator):
comparator = str(comparator) # no...comparators.NumericComparator@6c742397
end = comparator.find("@")
start = comparator.rfind(".")
return comparator[start + 1 : end]
class Aspect:
"""Represents one specific aspect of a configuration that might be
changed by genetic programming."""
class ThresholdAspect(Aspect):
def modify(self, conf):
conf.set_threshold(round(random.uniform(lowlimit, 1.0)))
def get(self, conf):
return conf.get_threshold()
def set(self, conf, value):
conf.set_threshold(value)
class PropertyPropertyAspect(Aspect):
def __init__(self, name, method):
self._name = name
self._method = method
def modify(self, conf):
prop = self._get_prop(conf)
if self._method == "setComparator":
value = random.choice(comparators)
elif self._method == "setLowProbability":
value = round(random.uniform(0.0, 0.5))
else:
value = round(random.uniform(0.5, 1.0))
getattr(prop, self._method)(value)
def get(self, conf):
prop = self._get_prop(conf)
method = "g" + self._method[1 : ]
return getattr(prop, method)()
def set(self, conf, value):
prop = self._get_prop(conf)
getattr(prop, self._method)(value)
def _get_prop(self, conf):
for prop in conf.get_properties():
if prop.getName() == self._name:
return prop
class GeneticConfiguration:
def __init__(self, parent = None):
self._props = []
self._threshold = 0.0
self._parent = parent
self._rank = None
def set_threshold(self, threshold):
self._threshold = threshold
def add_property(self, prop):
self._props.append(prop)
def get_properties(self):
return self._props
def get_threshold(self):
return self._threshold
def get_parent(self):
return self._parent
def set_rank(self, rank):
self._rank = rank
def get_rank(self):
return self._rank
def make_new(self, population):
# either we make a number or random modifications, or we mate.
# draw a number, if 0 modifications, we mate.
mods = random.randint(0, 3)
if mods:
return self._mutate(mods)
else:
return self._mate(random.choice(population))
def _mutate(self, mods):
c = self._copy()
for ix in range(mods):
aspect = random.choice(aspects)
aspect.modify(c)
return c
def _mate(self, other):
c = self._copy()
for aspect in aspects:
aspect.set(c, aspect.get(random.choice([self, other])))
return c
def _copy(self):
c = GeneticConfiguration(self)
c.set_threshold(self._threshold)
for prop in self.get_properties():
if prop.getName() == "ID":
c.add_property(PropertyImpl(prop.getName()))
else:
c.add_property(PropertyImpl(prop.getName(),
prop.getComparator(),
prop.getLowProbability(),
prop.getHighProbability()))
return c
def __str__(self):
props = ["[Property %s %s %s %s" % (prop.getName(),
shortname(prop.getComparator()),
prop.getLowProbability(),
prop.getHighProbability())
for prop in self._props]
return "[GeneticConfiguration %s %s]" % \
(self._threshold, " ".join(map(str, props)))
def __eq__(self, other):
if self._threshold != other.get_threshold():
return False
for myprop in self._props:
for yourprop in other.get_properties():
if myprop.getName() == yourprop.getName():
if myprop.getComparator() != yourprop.getComparator():
return False
if myprop.getLowProbability() != yourprop.getLowProbability():
return False
if myprop.getHighProbability() != yourprop.getHighProbability():
return False
break
return True
def __hash__(self):
h = hash(self._threshold)
for prop in self._props:
h += hash(prop.getComparator())
h += hash(prop.getLowProbability())
h += hash(prop.getHighProbability())
return h
def evaluate(tstconf, linkdb, pessimistic = False):
# if index.has_key(tstconf):
# return index[tstconf]
testfile = TestFileListener(linkdb, config, False,
processor, False, True)
testfile.setQuiet(True)
testfile.setPessimistic(pessimistic)
try:
run_with_config(tstconf, testfile)
except DukeConfigException:
# this means there's no way to get above the threshold in this config.
# we consider that total failure, and just return.
print "FAILED"
index[tstconf] = 0.0
return 0.0
f = testfile.getFNumber()
if f > 1.0:
sys.exit(1)
index[tstconf] = f
return f
def run_with_config(tstconf, listener):
config.getProperties().clear()
config.setThreshold(tstconf.get_threshold())
config.setProperties(ArrayList(tstconf.get_properties()))
processor.getListeners().clear()
processor.addMatchListener(listener)
if not linking:
processor.linkRecords(config.getDataSources())
else:
processor.linkRecords(config.getDataSources(2), False)
# (0) decode command-line
configfile = sys.argv[1]
linkdb = InMemoryLinkDatabase()
linkdb.setDoInference(True)
if len(sys.argv) == 3:
golddb = InMemoryLinkDatabase()
golddb.setDoInference(True)
LinkDatabaseUtils.loadTestFile(sys.argv[2], golddb)
else:
golddb = None
# (1) load configuration
config = ConfigLoader.load(configfile)
properties = config.getProperties()[:]
idprops = config.getIdentityProperties()
linking = not config.isDeduplicationMode()
if linking:
lowlimit = 0.0
else:
lowlimit = 0.4
# (2) index up all the data
processor = Processor(config)
alldb = processor.getDatabase()
if not linking:
processor.index(config.getDataSources(), 40000)
else:
processor.index(config.getDataSources(1), 40000)
processor.index(config.getDataSources(2), 40000)
if linking:
config.setPath((config.getPath() or '/tmp/duke-active-ix-') + '2') # AHEM...
processor = Processor(config)
database = processor.getDatabase()
if not linking:
processor.index(config.getDataSources(), 40000)
else:
processor.index(config.getDataSources(1), 40000)
else:
database = alldb
try:
import os
os.unlink('answers.txt')
except OSError:
pass
# (3) actual genetic stuff
pkg = "no.priv.garshol.duke.comparators."
comparators = ["DiceCoefficientComparator",
"DifferentComparator",
"ExactComparator",
"JaroWinkler",
"JaroWinklerTokenized",
"Levenshtein",
"NumericComparator",
"PersonNameComparator",
"SoundexComparator",
"WeightedLevenshtein",
"NorphoneComparator",
"MetaphoneComparator",
"QGramComparator",
"GeopositionComparator"]
comparators = [ObjectUtils.instantiate(pkg + c) for c in comparators]
# (a) generate 100 random configurations
if linking:
src = config.getDataSources(2).iterator().next()
else:
src = config.getDataSources().iterator().next()
props = [col.getProperty() for col in src.getColumns()]
# preparation
aspects = [ThresholdAspect()]
for prop in props:
if prop != "ID":
aspects.append(PropertyPropertyAspect(prop, "setComparator"))
aspects.append(PropertyPropertyAspect(prop, "setLowProbability"))
aspects.append(PropertyPropertyAspect(prop, "setHighProbability"))
population = []
for ix in range(POPULATION_SIZE):
c = generate_random_configuration()
population.append(c)
# (b) evaluate each configuration by running through data
index = {}
for generation in range(GENERATIONS):
print "===== GENERATION %s ===================================" % generation
# now, ask the user to give us some examples
if generation % 2 == 0 or generation == 1:
ask_the_user(population)
best = None
highest = 0.0
# evaluate
for ix in range(len(population)):
c = population[ix]
if SHOW_CONFIGS:
print c, "#", ix
f = evaluate(c, linkdb)
if SHOW_CONFIGS:
print " ", f, parent_info(c)
if f > highest:
best = c
highest = f
if SHOW_CONFIGS:
show_best(best, False)
# make new generation
population = sorted(population, key = lambda c: 1.0 - index[c])
for ix in range(len(population)):
population[ix].set_rank(ix + 1)
if SHOW_CONFIGS:
print "SUMMARY:", [index[c] for c in population], "avg:", (sum([index[c] for c in population]) / float(POPULATION_SIZE))
# ditch lower quartile ++
population = population[ : int(POPULATION_SIZE * 0.7)]
# double upper quartile
population = (population[ : int(POPULATION_SIZE * 0.02)] +
population[ : int(POPULATION_SIZE * 0.03)] +
population[: int(POPULATION_SIZE * 0.25)] +
population[: int(POPULATION_SIZE * 0.25)] +
population[int(POPULATION_SIZE * 0.25) : ])
population = [c.make_new(population) for c in population]
if golddb:
print "EVALUATING BEST:", best, evaluate(best, golddb, True), parent_info(best)
show_best(best)
| Python |
'''
Genetic algorithm for automatically creating a configuration. See
http://code.google.com/p/duke/wiki/GeneticAlgorithm
for information on how to use it.
NOTE that this script has now been superceded by Java code in the
no.priv.garshol.duke.genetic package.
'''
import random, sys, threading, time, os
from java.util import ArrayList
from no.priv.garshol.duke import ConfigLoader, Processor, PropertyImpl, DukeConfigException
from no.priv.garshol.duke.utils import ObjectUtils
from no.priv.garshol.duke.matchers import TestFileListener
SOUND = False # This works only on MacOS X, using the 'say' command
POPULATION_SIZE = 100
POPULATIONS = 100
SHOW_CONFIGS = True
def round(num):
return int(num * 100) / 100.0
def one_is_alive(threads):
for thread in threads:
if thread.isAlive():
return True
return False
def generate_random_configuration():
c = GeneticConfiguration()
c.set_threshold(round(random.uniform(lowlimit, 1.0)))
for name in props:
if name == "ID":
prop = PropertyImpl(name)
else:
low = round(random.uniform(0.0, 0.5))
high = round(random.uniform(0.5, 1.0))
prop = PropertyImpl(name, random.choice(comparators), low, high)
c.add_property(prop)
return c
def show_best(best, show = True):
if SOUND:
os.system('say new best')
print
print "BEST SO FAR: %s" % index[best]
if show:
print best
parent = best.get_parent()
while parent:
print "DERIVED FROM:", parent, index[parent]
parent = parent.get_parent()
print
def parent_info(c):
parent = c.get_parent()
if not parent:
return ""
return "#%s, %s" % (parent.get_rank(), index[parent])
def shortname(comparator):
comparator = str(comparator) # no...comparators.NumericComparator@6c742397
end = comparator.find("@")
start = comparator.rfind(".")
return comparator[start + 1 : end]
class Aspect:
"""Represents one specific aspect of a configuration that might be
changed by genetic programming."""
class ThresholdAspect(Aspect):
def modify(self, conf):
conf.set_threshold(round(random.uniform(lowlimit, 1.0)))
def get(self, conf):
return conf.get_threshold()
def set(self, conf, value):
conf.set_threshold(value)
class PropertyPropertyAspect(Aspect):
def __init__(self, name, method):
self._name = name
self._method = method
def modify(self, conf):
prop = self._get_prop(conf)
if self._method == "setComparator":
value = random.choice(comparators)
elif self._method == "setLowProbability":
value = round(random.uniform(0.0, 0.5))
else:
value = round(random.uniform(0.5, 1.0))
getattr(prop, self._method)(value)
def get(self, conf):
prop = self._get_prop(conf)
method = "g" + self._method[1 : ]
return getattr(prop, method)()
def set(self, conf, value):
prop = self._get_prop(conf)
getattr(prop, self._method)(value)
def _get_prop(self, conf):
for prop in conf.get_properties():
if prop.getName() == self._name:
return prop
class GeneticConfiguration:
def __init__(self, parent = None):
self._props = []
self._threshold = 0.0
self._parent = parent
self._rank = None
def set_threshold(self, threshold):
self._threshold = threshold
def add_property(self, prop):
self._props.append(prop)
def get_properties(self):
return self._props
def get_threshold(self):
return self._threshold
def get_parent(self):
return self._parent
def set_rank(self, rank):
self._rank = rank
def get_rank(self):
return self._rank
def make_new(self, population):
# either we make a number or random modifications, or we mate.
# draw a number, if 0 modifications, we mate.
mods = random.randint(0, 3)
if mods:
return self._mutate(mods)
else:
return self._mate(random.choice(population))
def _mutate(self, mods):
c = self._copy()
for ix in range(mods):
aspect = random.choice(aspects)
aspect.modify(c)
return c
def _mate(self, other):
c = self._copy()
for aspect in aspects:
aspect.set(c, aspect.get(random.choice([self, other])))
return c
def _copy(self):
c = GeneticConfiguration(self)
c.set_threshold(self._threshold)
for prop in self.get_properties():
if prop.getName() == "ID":
c.add_property(PropertyImpl(prop.getName()))
else:
c.add_property(PropertyImpl(prop.getName(),
prop.getComparator(),
prop.getLowProbability(),
prop.getHighProbability()))
return c
def __str__(self):
props = ["[Property %s %s %s %s" % (prop.getName(),
shortname(prop.getComparator()),
prop.getLowProbability(),
prop.getHighProbability())
for prop in self._props]
return "[GeneticConfiguration %s %s]" % \
(self._threshold, " ".join(map(str, props)))
def __eq__(self, other):
if self._threshold != other.get_threshold():
return False
for myprop in self._props:
for yourprop in other.get_properties():
if myprop.getName() == yourprop.getName():
if myprop.getComparator() != yourprop.getComparator():
return False
if myprop.getLowProbability() != yourprop.getLowProbability():
return False
if myprop.getHighProbability() != yourprop.getHighProbability():
return False
break
return True
def __hash__(self):
h = hash(self._threshold)
for prop in self._props:
h += hash(prop.getComparator())
h += hash(prop.getLowProbability())
h += hash(prop.getHighProbability())
return h
def evaluate(tstconf):
if index.has_key(tstconf):
return index[tstconf]
config.getProperties().clear()
config.setThreshold(tstconf.get_threshold())
try:
config.setProperties(ArrayList(tstconf.get_properties()))
except DukeConfigException:
# this means there's no way to get above the threshold in this config.
# we consider that total failure, and just return.
index[tstconf] = 0.0
return 0.0
testfile = TestFileListener(testfilename, config, False,
processor, False, True)
testfile.setQuiet(True)
processor.getListeners().clear()
processor.addMatchListener(testfile)
if not linking:
processor.linkRecords(config.getDataSources())
else:
processor.linkRecords(config.getDataSources(2))
f = testfile.getFNumber()
index[tstconf] = f
return f
# (0) decode command-line
(configfile, testfilename) = sys.argv[1 : ]
# (1) load configuration
config = ConfigLoader.load(configfile)
linking = config.getDataSources().isEmpty()
if linking:
lowlimit = 0.0
else:
lowlimit = 0.4
# (2) index up all the data
processor = Processor(config)
database = processor.getDatabase()
if not linking:
processor.index(config.getDataSources(), 40000)
else:
processor.index(config.getDataSources(1), 40000)
# (3) actual genetic stuff
pkg = "no.priv.garshol.duke.comparators."
comparators = ["DiceCoefficientComparator",
"DifferentComparator",
"ExactComparator",
"JaroWinkler",
"JaroWinklerTokenized",
"Levenshtein",
"NumericComparator",
"PersonNameComparator",
"SoundexComparator",
"WeightedLevenshtein",
"NorphoneComparator",
"MetaphoneComparator",
"QGramComparator",
"GeopositionComparator"]
comparators = [ObjectUtils.instantiate(pkg + c) for c in comparators]
# (a) generate 100 random configurations
if linking:
src = config.getDataSources(2).iterator().next()
else:
src = config.getDataSources().iterator().next()
props = [col.getProperty() for col in src.getColumns()]
# preparation
aspects = [ThresholdAspect()]
for prop in props:
if prop != "ID":
aspects.append(PropertyPropertyAspect(prop, "setComparator"))
aspects.append(PropertyPropertyAspect(prop, "setLowProbability"))
aspects.append(PropertyPropertyAspect(prop, "setHighProbability"))
population = []
for ix in range(POPULATION_SIZE):
c = generate_random_configuration()
population.append(c)
# (b) evaluate each configuration by running through data
index = {}
best = None
highest = 0.0
for generation in range(POPULATIONS):
print "===== GENERATION %s ===================================" % generation
for ix in range(len(population)):
c = population[ix]
if SHOW_CONFIGS:
print c, "#", ix
f = evaluate(c)
if SHOW_CONFIGS:
print " ", f, parent_info(c)
if f > highest:
best = c
highest = f
if SHOW_CONFIGS:
show_best(best, False)
if highest == 1.0:
break
# if we achieved a perfect score, just stop
if highest == 1.0:
break
# make new generation
population = sorted(population, key = lambda c: 1.0 - index[c])
for ix in range(len(population)):
population[ix].set_rank(ix + 1)
if SHOW_CONFIGS:
print "SUMMARY:", [index[c] for c in population], "avg:", (sum([index[c] for c in population]) / float(POPULATION_SIZE))
else:
print 'BEST: ', index[population[0]]
# ditch lower quartile ++
population = population[ : int(POPULATION_SIZE * 0.7)]
# double upper quartile
population = (population[ : int(POPULATION_SIZE * 0.02)] +
population[ : int(POPULATION_SIZE * 0.03)] +
population[: int(POPULATION_SIZE * 0.25)] +
population[: int(POPULATION_SIZE * 0.25)] +
population[int(POPULATION_SIZE * 0.25) : ])
population = [c.make_new(population) for c in population]
show_best(best)
| Python |
#"This is a pain in the butt"
class party:
def __init__(self):
self.first_name = ""
self.last_name = ""
self.street_address = ""
self.city = ""
self.zip = ""
self.mobile = ""
self.home = ""
self.work = ""
self.email = ""
self.cases = [4,5,2,5,3,8] #Case IDs
self.account_id = ""
def party_change(obj, string):
pass
def case(self,string):
if type(string) == str:
for item in self.cases:
if string == item:
print(L)
def remove_case(string):
pass
| Python |
#!/usr/bin/python
import tkinter
#from tkinter.constants import *
#def prompt_question(question):
#class gui:
application = tkinter.Tk()
#label = tkinter.Label(application, text="bob")
#label.pack()
#frame = tkinter.Frame(self.application, relief=RIDGE, borderwidth=17)
#frame.pack(fill=BOTH,expand=1)#
label = tkinter.Label(frame, text=question)
#label.pack(fill=X, expand=1)
#confirm = tkinter.Button(frame, text="Yes")
#confirm.pack(side=LEFT)
#nope = tkinter.Button(frame, text="No")
#nope.pack(side=RIGHT)
application.mainloop()
| Python |
#Homicide
class crime:
def set_person(name):
if type(name) == str:
name = crime.actor()
return name
class person:
__definition = "\"Person,\" when referring to the victim of a homicide, means a human being who has been born and is alive."
def show_definition():
return(crime.person.__definition)
driving_a_vehicle_while_intoxicated = False
class vehicle:
wieght_rating_of_more_than_eighteen_thousand_pounds = False
#contains_flammable_gas_radioactive_matirals_or_explosives_in_violation_of_subdivision_one_of_section_eleven_hundred_ninety-two_of_the_vehicle_traffic_law = False
class mens_rea:
possible_states = set(['criminal negligence','recklessly','knowingly','intentionally'])
criminal_negligence = False
recklessly = False
knowingly = False
intentionally = False
state = ""
class actus_reus:
class criminal_soliciation:
pass
class conspiracy:
pass
class attempt:
pass
class criminal_facilitation:
pass
class actual: # or class mode?
pass
class causation:
def __init__(self):
self.state = False
definition = "Did the actor proximately cause the aforementioned prohibited social harm? "
def set_state(self):
self.state = input(self.definition)
class actor:
def __init__(self):
try:
self.name = party.defendant.name
except:
self.name = ""
def show_name(self):
return self.name
class prohibited_social_harm:
prohibited_social_harms = set(['criminal solicitation','conspiracy','attempt','criminal facilitation','assault and related offenses','strangulation and related offenses','homicide,abortion and related offenses','sex offenses','kidnapping, coercion and related offenses','burglary and related offenses','criminal mischief and related offenses','arson','larceny','offenses involving computers; definition of terms','welfare fraud','robbery','misapplicaiton of property','unauthorized use of a vehicle','unlawful use of secret scientific matieral','unauthorized us of a vehicle in the first degree','auto strpping','theft of services','unauthorized sale of certain transportation services','unlawful use of credit card, debit card or public benefit card','fraudulently obtaining a signature','jostling','fraudulent accosting','fortune telling','criminal possession of stolen property','trademark counterfeiting','seizure and destruction of goods bearing counterfeit trademarks','forgery','criminal possession of a forged instrument','criminal simulation','criminal possession of an anti-security item','unawlfully using slugs','forgery of a vehicle identification number','illegal possession of a vehicle identificatio number','fraudluent making of an electronic access device'])
class homicide:
definition = "Homicide means conduct which causes the death of a person or an unborn child with which a female has been pregnant for more than twenty-four weeks under circumstances constituting murder, manslaughter in the first degree, manslaughter in the second degree, criminally negligent homicide, abortion in the first degree or self-abortion in the first degree." #NY Penal Law 125.00
class manslaughter:
definition = ""
class first_degree:
definition = ""
class abortion:
pass
#justifiable_abortional_act
class psh(prohibited_social_harm):
pass
class victim:
pass
#police_officer_in_the_course_of_performing_regular_duties
#peace_officer_in_the_course_of_performing_regular_duties
# class victim(person):
#class causation:
# def definition():
# return "Homicide means conduct which causes the death of a person or an unborn child with which a female has been pregnant for more than twenty-four weeks under circumstances constituting murder, manslaughter in the first degree, manslaughter in the second degree, criminally negligent homicide, abortion in the first degree or self-abortion in the first degree."
#
# class person:
# def definition():
# return "\"Person,\" when referring to the victim of a homicide, means a human being who has been born and is alive." #PEN 125.05 1.
#
# class abortional_act:
# def definition():
# return "\"Abortional act\" means an act committed upon or with respect to a female, whether by another person or by the female herself, whether she is pregnant or not, whether directly upon her body or by the administering, taking or prescription of drugs or in any other manner, with intent to cause a miscarriage of such female."
#
# class justfiable_abortional_act(abortional_act):
# def definition():
# return "An abortional act is justifiable when committed upon a female with her consent by a duly licensed physician acting (a) under a reasonable belief that such is necessary to preserve her life, or, (b) within twenty-four weeks from the commencement of her pregnancy. A pregnant female's commission of an abortional act upon herself is justifiable when she acts upon the advice of a duly licensed physician (1) that such act is necessary to preserve her life, or, (2) within twenty-four weeks from the commencement of her pregnancy. The submission by a female to an abortional act is justifiable when she believes that it is being committed by a duly licensed physician, acting under a reasonable belief that such act is necessary to preserve her life, or, within twenty-four weeks from the commencement of her pregnancy."
#
# class criminally_negligent_homicide:
| Python |
from tkinter import *
from tkinter import ttk
class display:
def __init__(self):
self.question = "question"
root = Tk()
root.title("Tom's sample program")
mainframe = ttk.Frame(root, padding="3 8 13 18")
mainframe.pack()
def getvalue():
arb = display.entry.get()
#display.bottomlabel.configure(text = arb)
print (arb)
frame = Frame(mainframe)
frame.pack()
try:
display.question
except:
display.question = "question"
query = Label(frame, text = display.question)
query.grid(column =0, row=0, columnspan = 2)
entry = Entry(frame, width=40)
entry.grid(row = 1, column = 0, columnspan = 2)
confirm = Button(frame, text="Enter", command=display.getvalue)
confirm.grid(row = 2, column = 0)
confirm.bind('<l>', getvalue)
cancel = Button(frame, text="No") #, command=display.prohibited_social_harms)
cancel.grid(row = 2, column = 1)
bottomlabel = ttk.Label(frame, text = " ")
bottomlabel.grid(row-3, column =0, columnspan = 2)
frame2 = Frame(mainframe)
frame2.pack()
class crime:
def __init__():
actus_reus = ""
mens_rea = ""
prohibited_social_harm = ""
possible_mens_rea = "negligence", "reckless", "knowing", "purposeful"
#crime.prohibited_social_harms()
## def askyesno(question):
## frame = Frame(display.mainframe)
## frame.grid()
## query = Label(frame, text = question)
## query.grid(column =0, row=0, columnspan = 2)
## yesButton = Button(frame, text="Yes")
## yesButton.grid(column = 0, row = 1)
## noButton = Button(frame, text="No")
## noButton.grid(column = 1, row = 1)
##
## def askyesnomaybe(question):
## frame = Frame(display.mainframe)
## frame.grid()
## query = Label(frame, text = question)
## query.grid(column =0, row=0, columnspan = 3)
## yesButton = Button(frame, text="Yes")
## yesButton.grid(column = 0, row = 1)
## noButton = Button(frame, text="No")
## noButton.grid(column = 1, row = 1)
## maybeButton = Button(frame, text="Should be litigated")
## maybeButton.grid(column = 2, row = 1)
| Python |
import tkinter
import tkinter.constants as TKC
import tkinter.ttk as ttk
import time
import calendar
#from tkinter.constants import *
style = ttk.Style()
style.configure("day_header", width=20, foreground="black", background="white", relief=TKC.FLAT)
#Python handles time in a funny fashion
localtime = time.localtime()
current_year = localtime[0]
current_month = localtime[1]
current_date = localtime[2]
current_day = localtime[6]
days_in_current_month = calendar.monthrange(current_year, current_month)[1] #calendar.monthrange returns two values: the first day of the month AND the number of days in the month.
days_in_previous_month = calendar.monthrange(current_year, current_month)[1]
def columnOneDate(column):
if current_day == 0: #Monday
calendar_date =(current_date + column)
elif current_day == 1: #Tuesday
calendar_date = (current_date + (column -1))
elif current_day == 2: #Wednesday
calendar_date = (current_date + (column -2))
elif current_day == 3: #Thursday
calendar_date = (current_date + (column - 3))
elif current_day == 4: #Friday
calendar_date = (current_date + (column - 4))
elif current_day == 5: #Saturday
calendar_date = (current_date + (column - 5))
elif current_day == 6: #Sunday
calendar_date = (current_date + (column - 6))
if calendar_date > days_in_current_month:
return (calendar_date - days_in_current_month)
elif calendar_date <1 and current_month !=0:
return (calendar_date + days_in_previous_month)
elif calendar_date <1 and current_month == 0:
return (calendar_date + 31)
else:
return calendar_date
application = tkinter.Tk()
CALENDARFRAME = tkinter.Frame(application)
CALENDARFRAME.pack()
monday = tkinter.Label(CALENDARFRAME, text="Monday")
monday.grid(row = 0, column = 0,padx=15, pady=13)
tuesday = tkinter.Label(CALENDARFRAME, text="Tuesday")
tuesday.grid(row=0, column =1, padx=15, pady=13)
wednesday = tkinter.Label(CALENDARFRAME, text="Wednesday")
wednesday.grid(row=0, column=2, padx=15, pady=13)
thursday = tkinter.Label(CALENDARFRAME, text="Thursday")
thursday.grid(row=0, column =3, padx=15, pady=13)
friday = tkinter.Label(CALENDARFRAME, text="Friday")
friday.grid(row=0, column=4, padx=15, pady=13)
saturday = tkinter.Label(CALENDARFRAME, text="Saturday")
saturday.grid(row=0,column=5,padx=15, pady=13)
sunday = tkinter.Label(CALENDARFRAME, text="Sunday")
sunday.grid(row=0,column=6,padx=15, pady=13)
labelA0 = tkinter.Frame(CALENDARFRAME)
labelA0.grid(row=1,column=0)
labelA1 = tkinter.Frame(CALENDARFRAME)
labelA1.grid(row=1,column=1)
labelA2 = tkinter.Frame(CALENDARFRAME)
labelA2.grid(row=1,column=2)
labelA3 = tkinter.Frame(CALENDARFRAME)
labelA3.grid(row=1,column=3)
labelA4 = tkinter.Frame(CALENDARFRAME)
labelA4.grid(row=1,column=4)
labelA5 = tkinter.Frame(CALENDARFRAME)
labelA5.grid(row=1,column=5)
labelA6 = tkinter.Frame(CALENDARFRAME)
labelA6.grid(row=1,column=6)
labelB0 = tkinter.Frame(CALENDARFRAME)
labelB0.grid(row=2,column=0)
labelB1 = tkinter.Frame(CALENDARFRAME)
labelB1.grid(row=2,column=1)
labelB2 = tkinter.Frame(CALENDARFRAME)
labelB2.grid(row=2,column=2)
labelB3 = tkinter.Frame(CALENDARFRAME)
labelB3.grid(row=2,column=3)
labelB4 = tkinter.Frame(CALENDARFRAME)
labelB4.grid(row=2,column=4)
labelB5 = tkinter.Frame(CALENDARFRAME)
labelB5.grid(row=2,column=5)
labelB6 = tkinter.Frame(CALENDARFRAME)
labelB6.grid(row=2,column=6)
#######################Day Buttons##############################
ButtonA0 = ttk.Button(labelA0, style="day_header", text=columnOneDate(0))
ButtonA0.grid(row=1,column=0)
ButtonA1 = tkinter.Button(labelA1, text=columnOneDate(1))
ButtonA1.grid(row=1,column=1)
ButtonA2 = tkinter.Button(labelA2, relief=TKC.FLAT, text=columnOneDate(2))
ButtonA2.grid(row=1,column=2)
ButtonA3 = tkinter.Button(labelA3, relief=TKC.FLAT, text=columnOneDate(3))
ButtonA3.grid(row=1,column=3)
ButtonA4 = tkinter.Button(labelA4, relief=TKC.FLAT, text=columnOneDate(4))
ButtonA4.grid(row=1,column=4)
ButtonA5 = tkinter.Button(labelA5, relief=TKC.FLAT, text=columnOneDate(5))
ButtonA5.grid(row=1,column=5)
ButtonA6 = tkinter.Button(labelA6, relief=TKC.FLAT, text=columnOneDate(6))
ButtonA6.grid(row=1,column=6)
ButtonB0 = tkinter.Button(labelB0, width=20, text=columnOneDate(7))
ButtonB0.grid(row=1,column=0)
ButtonB1 = tkinter.Button(labelB1, relief=TKC.FLAT, text=columnOneDate(8))
ButtonB1.grid(row=1,column=1)
ButtonB2 = tkinter.Button(labelB2, relief=TKC.FLAT, text=columnOneDate(9))
ButtonB2.grid(row=1,column=2)
ButtonB3 = tkinter.Button(labelB3, relief=TKC.FLAT, text=columnOneDate(10))
ButtonB3.grid(row=1,column=3)
ButtonB4 = tkinter.Button(labelB4, relief=TKC.FLAT, text=columnOneDate(11))
ButtonB4.grid(row=1,column=4)
ButtonB5 = tkinter.Button(labelB5, relief=TKC.FLAT, text=columnOneDate(12))
ButtonB5.grid(row=1,column=5)
ButtonB6 = tkinter.Button(labelB6, relief=TKC.FLAT, text=columnOneDate(13))
ButtonB6.grid(row=1,column=6)
| Python |
#
class time_tracker:
benchmark = {} #activity will be the key; the value will be time()
def __init__():
from time import *
time_tracker.begin_time = time()
def set_benchmark(activity):
if type(activity) == str:
time_tracker.benchmark[activity] = time()
return time_rtime_tracker.benchmark[activity]
def get_benchmark(activity):
if type(activity) == str:
time_tracker.benchmark
| Python |
#!/usr/bin/python
#from tkinter import *
# Name
# Class
# Teacher
# Room Location
# Schedule
# Days
# Time on said days
# Other information from syllabus
# Assignment for each day.
#
#Example (Hello, World):
import tkinter
import tkinter.constants as TKC
import tkinter.ttk as ttk
class View(object):
def __init__(self):
global root
root = tkinter.Tk()
GlobalMenu()
Editor()
class GlobalMenu:
def __init__(self):
MenuBar = tkinter.Menu(root)
MenuBar.add_command(label="File", command="")
MenuBar.add_command(label="Open", command="")
MenuBar.add_cascade(label="File")
root.configure(menu=MenuBar)
def a_menu(self, parent):
self.global_menu_frame = tkinter.Frame(parent)
self.global_menu_frame.pack(side=TKC.TOP)
self.help_btn = ttk.Menubutton(self.global_menu_frame, text='Help', underline=0)
self.help_btn.pack(side=TKC.LEFT, padx="2m")
self.help_btn.menu = tkinter.Menu(self.help_btn, tearoff=0)
self.help_btn.menu.add_command(label="How To", underline=0, command="")
self.help_btn.menu.add_command(label="About", underline=0, command="")
self.help_btn['menu'] = self.help_btn.menu
class Editor(object):
def __init__(self):
self.window_frame = tkinter.Frame(root)
self.window_frame.pack()
self.label = tkinter.Label(self.window_frame, text="Hello, World")
self.label.pack(fill=TKC.X, expand=1)
#self.window_frame.configure(background="black")
self.canvas = tkinter.Canvas(self.window_frame, borderwidth=4, width=30, height=15)
#self.canvas.pack()
self.canvas.pack(fill=TKC.BOTH,expand = 0)
#Spinbox is pretty worthless self.spin = tkinter.Spinbox(self.canvas, value=('AL','AM','AZ')) #or specify a range of values: from_ = 0, to = 100
# self.spin.pack(fill=X, expand=1)
self.text = tkinter.Text(self.canvas)
self.text.config(font="Arial")
self.text.pack(fill=TKC.Y, expand=0)
def editorMenu(self):
self.editorMenu = tkinter.Menu(self.window_frame)
self.editorMenu.pack()
#return help_btn
## def left_frame():
## left_frame = tkinter.Frame(self.window_frame)
## left_frame.pack(side=LEFT)
## cheese = tkinter.Label(left_frame, text="something goes here")
## cheese.pack()
##
## def main_frame(self):
## self.main_frame = tkinter.Frame(self.window_frame, relief=RIDGE, borderwidth=2)
## self.main_frame.pack(fill=BOTH,expand=1)
##
## def menu_bar():
## menubar = tkinter.Menu(self.window_frame)
## menubar.add_command(label="Home", command=root.destroy)
## menubar.add_command(label="Calendar", command=root.destroy)
## menubar.add_command(label="Client Management", command=root.destroy)
## menubar.add_command(label="Case Management", command=root.destroy)
## menubar.add_separator()
## DropDown = menubar.add_cascade(label="DropDown")
## SecondDropDown = help_menu()
## SecondDropDown.pack()
## menubar.add_checkbutton (label="Green")
## menubar.add_radiobutton(label="Orange")
###menubar.insert_checkbutton(DropDown, label="JB")
### display the menu
## self.window_frame.config(menu=menubar)
##
## def text_field():
## label = tkinter.Label(self.main_frame, text="Hello, World")
## label.pack(fill=X, expand=1)
## cv = tkinter.Canvas(self.main_frame, borderwidth=4)
## self.main_frame.configure(background="black")
## cv.pack(fill=BOTH,expand = 0)
## spin = tkinter.Spinbox(cv, from_ = 0, to = 100)
## spin.pack(fill=X, expand=1)
## text = tkinter.Text(cv)
## text.config(font="Arial")
## text.pack(fill=Y, expand=0)
##
## def misc():
## button = tkinter.Button(self.main_frame, text="Exit",command=ben)
## button.pack(side=BOTTOM)
## entry = tkinter.Entry(self.main_frame)
## entry.delete(0, END)
## entry.insert(0, "a default value")
## entry.pack(side=BOTTOM)
## password = tkinter.Entry(self.main_frame, show="*")
## password.pack(side=BOTTOM)
a= View()
root.mainloop()
| Python |
class CalendarHelp:
def __init__(self):
from time import localtime
self.Months = {'1':'Jan','2':'Feb','3':'Mar','4':'Apr','5':'May','6':'Jun','7':'Jul','8':'Aug','9':'Sep','10':'Oct','11':'Nov','12':'Dec'}
#Python handles time in a funny fashion
self.localtime = localtime()
self.current_year = self.localtime[0]
self.current_month = self.localtime[1]
self.current_day = self.localtime[2]
self.current_weekday = self.localtime[6]
#self.days_in_current_month = monthrange(self.current_year, self.current_month)[1] #monthrange (imported from the calendar module) returns two values: the first day of the month AND the number of days in the month.
def getCalendarDate(self, grid_number): #0-6 is current week. 7-13 is for next week.
from calendar import monthrange
#Each day in my two-week Calendar view is given an id. Numbers 0-6 represent the days in the first week on the Calendar.
#This function recieves the id number and returns the Y-M-D corresponding to that id.
#The function compares the id with the current_day, which was defined at the instantiation of the class
#Returns a tuple (YYYY, MM, DD)
#######NON-RETURN VARIABLES###########
current_weekday = self.current_weekday #An integer corresponding to the current day of the week.
current_day = self.current_day #An integer corresponding to the current date(Day) of the month.
############ The Date Function is relative to each grid on the Calendar
## Compare the grid value with the current date of the week
## Basically, the higher the grid/column value, the later the date will be.
## Alright, I know how to explain it: Localtime returns the present date and the present weekday. We compare the weekday of the grid objects with the current weekday, and then adjust the date of the grid object accordingly.
day = current_day + grid_number - current_weekday
## You might assume that grid_number will always be in range(1,14). However, grid_number is arbitrary, so that could represent calendar grids far into the future or far into the past.
## The statements above just adjust the grid_number relative to the current day and current weekday. It will not tell what the date is.
## To find out what the date of grid_number should be, we have to find out which month grid_number will fall in.
## Lets get some initial values. I don't want to use self.values, because self.values should always be values of the present time.
## These values will be modified in loops.
month = self.current_month
year = self.current_year
days_in_month = (monthrange(year, month)[1])
## Now figure out both the month and date of the box represented by grid_number
## There are 2 branches: 1 for the future, one for the past.
if day > 1: #Finding the month of grid_numbers representing the present or future days
while day > days_in_month: #Check if the grid_number is in the current month or in a future month
# If grid_number is in a future month . . .
day = day - days_in_month # Subtract the number of days in the month we are checking against
month = month+1 # Then, set month to the next month
if month == 13: # If the next month is January of next year, set year to next year, and set month to 1.
month =1
year = year + 1
days_in_month = (monthrange(year, month)[1])
elif day < 1: #Finding the month of grid_numbers representing days
while day < 0:
day = day + days_in_month
month = month - 1
if month == 0: #If the previous month is December of previous year, set year to previous year, and set month to 12.
month = 12
year = year - 1
days_in_month = (monthrange(year, month)[1])
ISO_format = str(year)+"-"+str(month)+"-"+str(day)
return (year, month, day)
def getCalendarDateReadable(self, date_tuple=None):
if type(date_tuple) != tuple: #Usually a number
date_tuple = self.getCalendarDate(date_tuple) #(YYYY, MM, DD)
year=str(date_tuple[0])
month = str(date_tuple[1])
day = str(date_tuple[2])
month_string = self.Months[month]
return (month_string+" "+day)#Months is the dictionary I defined above. month is an integer. I cast to string, then ask Months for the name of the month
| Python |
class case:
plaintiffs = []
prosecution = plaintiffs
defendants = []
statement_of_case = ""
criminal = False
court = ""
def add_plaintiff(party):
case.plaintiffs.append(party)
case.show_plaintiffs()
def add_defendant(party):
case.defendants.append(party)
case.show_defendants()
def remove_plaintiff(party):
case.plaintiffs.remove(party)
case.show_plaintiffs()
def remove_defendant(party):
case.plaintiffs.remove(party)
case.show_defendants()
def show_plaintiffs():
return(case.plaintiffs)
def show_defendants():
return(case.defendants)
def statement_of_case():
return(case.statement_of_case)
def modify_statement_of_case(str):
case.statementment_of_case = str
return(case.statement_of_case)
def criminal_case(var):
if type(var) == bool:
case.criminal = var
#print(case.criminal)
return (case.criminal)
class jurisdiction:
federal_jurisdiction = False
state_jurisdiction = []
class conflictOfLaws:
state_conflicts_law = ""
| Python |
#Rules.py
#Agencies must should inherit
class FRCP:
def __init__(self):
if case.jurisdiction != "Federal":
return "This is not a federal matter."
if case.criminal == True:
return "This is not a civil case. Please use rules of criminal procedure."
class filing:
def filing():
case.civil_action = True #FRCP 3
class pleading:
definition = "A formal document in which a party to a legal proceeding (esp. a civil lawsuit) sets forth or responds to allegations, claims, denials, or defenses. • In federal civil procedure, the main pleadings are the plaintiff's complaint and the defendant's answer." #Black's Law Dictionary
class complaint(pleading): #'complaint' inherits from "pleading", because Black's law said that complaints are a type of pleading
definition = "The initial pleading that starts a civil action and states the basis for the court's jurisdiction, the basis for the plaintiff's claim, and the demand for relief." #Black's Law Dictionary: "complaint"
class summons:
class contents: ##FRCP 4(a)(1)
court_name = ""
plaintiffs = ""
defendants = ""
directed_to = ""#defendants #FRCP 4(a)(1)(C)
#plaintiff_attorney.name = ""
#plaintiff_attorney.address = ""
time_to_appear = "date"
notice_of_failure_to_appear = "notify the defendant that a failure to appear and defend will result in a default judgment against the defendant for the relief demanded in the complaint" #FRCP4(a)(1)(E)
clerk_signature = ""
clerk_signature = True
court_seal = True
class amendments:
permissible = False
#FRCP 4(a)(2)"The court may permit a summons to be amended."
def issue(): #FRCP 4(b)
#present summons to clerk for signature and seal
if party.plaintiff.forma_pauperis == True:
pass
class service:
def __init__():
if FRCP.complaint.complete == FALSE:
return "The complaint has not been completed yet"
if FRCP.summons.complete == FALSE:
return "The summons papers have not been completed yet."
# summons_service
# complaint_service
# The plaintiff is responsible for having the summons and complaint served within the time allowed by Rule 4(m) and must furnish the necessary copies to the person who makes service.
#(2) By Whom. Any person who is at least 18 years old and not a party may serve a summons and complaint.
#(3) By a Marshal or Someone Specially Appointed. At the plaintiff's request, the court may order that service be made by a United States marshal or deputy marshal or by a person specially appointed by the court. The court must so order if the plaintiff is authorized to proceed in forma pauperis under 28 U.S.C. §1915 or as a seaman under 28 U.S.C. §1916.
def file_a_complaint(court):
case.court = court
civil_action = True
rules.complaint = True
def summons(plaintff, defendant, plaintiff_attorney, court):
if FRCP.complaint() == False:
return "File a complaint first."
def save():
f = open(self.filename, 'w')
cPickle.dump(self, f)
f.close
| Python |
from tkinter import *
from tkinter import ttk
def calculate(*args):
try:
value = float(display.feet.get())
display.meters.set((0.3048 * value * 10000.0 + 0.5)/10000.0)
except ValueError:
pass
class display:
root = Tk()
root.title("Feet to Meters")
style = ttk.Style()
style.theme_use('default')
mainframe = ttk.Frame(root, padding="3 3 12 12")
mainframe.grid(column=0, row=0, sticky=(N, W, E, S))
mainframe.columnconfigure(0, weight=1)
mainframe.rowconfigure(0, weight=1)
feet = StringVar()
meters = StringVar()
feet_entry = ttk.Entry(mainframe, width=7, textvariable=feet)
feet_entry.grid(column=2, row=1, sticky=(W, E))
ttk.Label(mainframe, textvariable=meters).grid(column=2, row=2, sticky=(W, E))
ttk.Button(mainframe, text="Calculate", command=calculate).grid(column=3, row=3, sticky=W)
ttk.Label(mainframe, text="feet").grid(column=3, row=1, sticky=W)
ttk.Label(mainframe, text="is equivalent to").grid(column=1, row=2, sticky=E)
ttk.Label(mainframe, text="meters").grid(column=3, row=2, sticky=W)
l =Label(root, text="Starting...", width=50, bg="white", fg="brown")
l.grid(column=1, row=3,sticky=N)
l.bind('<Enter>', lambda e: display.l.configure(text='Moved mouse inside'))
l.bind('<Leave>', lambda e: display.l.configure(text='Moved mouse outside'))
l.bind('<1>', lambda e: display.l.configure(text='Clicked left mouse button'))
l.bind('<Double-1>', lambda e: display.l.configure(text='Double clicked'))
l.bind('<B3-Motion>', lambda e: display.l.configure(text='right button drag to %d,%d' % (e.x, e.y)))
for child in mainframe.winfo_children(): child.grid_configure(padx=5, pady=5)
feet_entry.focus()
root.bind('<Return>', calculate)
| Python |
import tkinter
import tkinter.constants as TKC
import tkinter.ttk as ttk
import database #my own class for database functions.
#import UI #my own class. It is an empty class to hold tkinter UI variables, so that they are not in the global namespace.
import calendarHelp
application = tkinter.Tk()
DB = database.Database()
class CalendarFrame:
CALENDARFRAME = ttk.Frame(application)
CALENDARFRAME.pack()
Weekdays = ["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"]
#Creates the labels for days of the week at the top of the calendar
for i in range(7):
header = ttk.Label(CALENDARFRAME, text=Weekdays[i])
header.grid(row=0, column=i, padx=15, pady=13)
def saveEvent():
array = {}
array["date"] = CalendarFrame.dateEntry.get()
array["heading"] = CalendarFrame.headingEntry.get()
array["details"] = CalendarFrame.detailsEntry.get('1.0','end') #The get method for the tkinter Text box is wierd, and has little documentation. see: http://www.tkdocs.com/tutorial/text.html
try:
array["location"] = CalendarFrame.locationEntry.get()
array["begin_time"] = CalendarFrame.begin_time.get()
array["end_time"] = CalendarFrame.end_time.get()
except:
pass
#print(array)
DB.addEvent(array)
def showMore():
#CalendarFrame.save.destroy()
CalendarFrame.showmore.destroy()
CalendarFrame.showless = ttk.Button(CalendarFrame.Frame1, text="Show less", command=CalendarFrame.showLess)
CalendarFrame.showless.grid(row=3, column=2)
CalendarFrame.moreDetailsFrame = ttk.Frame(CalendarFrame.dialog)
CalendarFrame.moreDetailsFrame.pack(side=TKC.BOTTOM)
CalendarFrame.locationLabel = ttk.Label(CalendarFrame.moreDetailsFrame, text="Location")
CalendarFrame.locationLabel.grid()
CalendarFrame.locationEntry = ttk.Entry(CalendarFrame.moreDetailsFrame)
CalendarFrame.locationEntry.grid()
CalendarFrame.invitees = ttk.Button(CalendarFrame.moreDetailsFrame, text = "Invitees")
CalendarFrame.invitees.grid()
CalendarFrame.begin_timeLabel = ttk.Label(CalendarFrame.moreDetailsFrame, text = "Begin Time")
CalendarFrame.begin_timeLabel.grid()
CalendarFrame.begin_time = ttk.Entry(CalendarFrame.moreDetailsFrame)
CalendarFrame.begin_time.grid()
CalendarFrame.end_timeLabel = ttk.Label(CalendarFrame.moreDetailsFrame, text = "End Time")
CalendarFrame.end_timeLabel.grid()
CalendarFrame.end_time = ttk.Entry(CalendarFrame.moreDetailsFrame)
CalendarFrame.end_time.grid()
CalendarFrame.save = ttk.Button(CalendarFrame.moreDetailsFrame, text="Save", command=CalendarFrame.saveEvent)
CalendarFrame.save.grid()
def showLess():
CalendarFrame.save.destroy()
CalendarFrame.showless.destroy()
CalendarFrame.moreDetailsFrame.destroy()
CalendarFrame.save = ttk.Button(CalendarFrame.Frame1, text="Save", command=CalendarFrame.saveEvent)
CalendarFrame.save.grid(row=3, column=3)
CalendarFrame.showmore = ttk.Button(CalendarFrame.Frame1, text="Show more", command=CalendarFrame.showMore)
CalendarFrame.showmore.grid(row=3, column=2)
def createEvent():
CalendarFrame.dialog = tkinter.Toplevel(application)
CalendarFrame.dialog.title("About this application...")
CalendarFrame.Frame1 = ttk.Frame(CalendarFrame.dialog)
CalendarFrame.Frame1.pack()
CalendarFrame.dateLabel = ttk.Label(CalendarFrame.Frame1, text="Date:")
CalendarFrame.dateLabel.grid(row=0, column=0)
CalendarFrame.dateEntry = ttk.Entry(CalendarFrame.Frame1)
CalendarFrame.dateEntry.grid(row=0, column=1)
CalendarFrame.headingLabel = ttk.Label(CalendarFrame.Frame1, text="Heading:")
CalendarFrame.headingLabel.grid(row=0, column =2)
CalendarFrame.headingEntry = ttk.Entry(CalendarFrame.Frame1)
CalendarFrame.headingEntry.grid(row=0, column=3)
CalendarFrame.detailsLabel = ttk.Label(CalendarFrame.Frame1, text="Details")
CalendarFrame.detailsLabel.grid(row=1, column = 0)
CalendarFrame.detailsEntry = tkinter.Text(CalendarFrame.Frame1, height=4, width =50)
CalendarFrame.detailsEntry.grid(row=2, column = 0, columnspan=4)
CalendarFrame.save = ttk.Button(CalendarFrame.Frame1, text="Save", command=CalendarFrame.saveEvent)
CalendarFrame.save.grid(row=3, column=3)
CalendarFrame.showmore = ttk.Button(CalendarFrame.Frame1, text="Show more", command=CalendarFrame.showMore)
CalendarFrame.showmore.grid(row=3, column=2)
CH = calendarHelp.CalendarHelp()
for i in range(7): #Creates the first calendar row of dates and events
row1 = ttk.Frame(CALENDARFRAME) #Each day is wrapped in a frame
row1.grid(row=1, column = i)
button1 = ttk.Button(row1, text=CH.getDate(i), command=createEvent) #This button shows the month and day. Clicking the button will allow user to create new event on that day.
button1.pack()
label1 = ttk.Label(row1, text=CH.getDate(i)) #Each event on a day is in a Label widget. clicking the Event allows the user to modify the event.
label1.bind('<1>', lambda e: print('r'))
label1.pack()
for i in range(7): #Creates the second calendar row of dates and events
row2 = ttk.Frame(CALENDARFRAME)
row2.grid(row=2, column = i)
label2 = ttk.Label(row2, text=CH.getDate(i+7))
label2.pack()
application.mainloop()
| Python |
########################## MODEL ######################################
class Model(object):
import database
DB = database.Database('puffin')
#OLD CODE: sql = sqlite3.connect('puffin') #connects to database. creates it if it does not exist.
#OLD CODE: queryCurs = sql.cursor() #cursor allows you to run queries.
def serializeDate(date_string):
#The DateString value will be a string "(Jan, 1, 2001)". This function removes the parentheses from the string and serializes the number values
date_string = date_string.replace('(','')
date_string = date_string.replace(')','')#http://www.daniweb.com/software-development/python/threads/71947
date_string = date_string.split(', ') #http://www.astro.ufl.edu/~warner/prog/python.html
return date_string
def dateHelp(dictionary):
#Even though dateHelp appears to accept a dictionary value, it actually receives a string value from the addEvent() and modifyEvent(). serializeDate() will turn the string into a dictionary.
date_string = Model.serializeDate(dictionary["date"])
dictionary.update({"year":date_string[0]})
dictionary["month"]=date_string[1]
dictionary["day"]=date_string[2]
return dictionary
def addEvent(dictionary):
#addEvent accepts a dictionary value
#My database table has a separate column for day, month, and year. It also has a convenience column for all three values together.
#Model.dateHelp() will take a string "(Jan, 1, 2001)", and transform it. This function removes the parentheses from the string for the convenience date column, and it serializes the date values for the d - m - y columns
dictionary = Model.dateHelp(dictionary)
Model.DB.insert("calendar", dictionary)
def modifyEvent(dictionary):
#modifyEvent accepts a dictionary value
#My database table has a separate column for day, month, and year. It also has a convenience column for all three values together.
#Model.dateHelp() will take a string "(Jan, 1, 2001)", and transform it. This function removes the parentheses from the string for the convenience date column, and it serializes the date values for the d - m - y columns
dictionary = Model.dateHelp(dictionary)
#(table_name, key_value, primary_key)
Model.DB.update("calendar",dictionary,"event_id")
def deleteEvent (event_id):
#(table_name, key, value)
Model.DB.delete("calendar","event_id",event_id)
def getEventsHeadings(date):
#get data from DB
#OLD CODE: data = Model.queryCurs.execute("SELECT event_id, heading FROM calendar WHERE date='"+date+"'")
#(table_name, parameters, requested_fields=*)
data = Model.DB.select("calendar","date='"+date+"'",["event_id","heading"])
#transform SQL object into a list
ldata = []
for row in data:
ldata.append(row)
return ldata
def getEventsDetails(event_id):
#OLD CODE: k=Model.queryCurs.execute("SELECT * FROM calendar WHERE event_id='"+event_id+"'")
k=Model.DB.select("calendar","event_id='"+event_id+"'")
for row in k:
return row
########################## VIEW ######################################
import tkinter
import tkinter.constants as TKC
import tkinter.ttk as ttk
import calendarHelp
CH = calendarHelp.CalendarHelp()
class View:
def __init__(self):
global root
root = tkinter.Tk()
class GlobalMenu(object):
def __init__(self):
self.window_frame = tkinter.Toplevel(root)
#window_frame.pack()
self.menu_frame = tkinter.Frame(self.window_frame)
self.menu_frame.pack(side=TKC.TOP)
self.label = tkinter.Label(self.menu_frame, text="a")
self.label.pack(side=TKC.TOP)
self.menu_bar = tkinter.Menu(self.label)
#DropDown = menu_bar.add_cascade(label="Action")
#DropDown.pack()
class CalendarView: #This creates the main screen when you are looking at the calendar
Weekdays = ["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"] #This is a little helper
def __init__(self):
self.Cal = tkinter.Frame(root);self.Cal.pack() #Create the top level frame for the Calendar View
self.week_counter = 0 #Contains state information for which week is being displayed. see showEventsOnCalendar() for more information.
######### STATE INFORMATION ###############
week_counter = self.week_counter #Remember that week_counter was initialized in the __init__. See the comments below for how week_counter works.
#positive values will display weeks in the future
#negative values will display weeks in the past
#if week_counter == 0: it will display the present
#the value is always changed in increments of 2, because we display 2 weeks at a time. I think it will give me more control in the future.
#see changeWeek() for more documentation
################ NAVIGATION BUTTONS #################
#navigation ahead in time or backwards in time
navigation_frame = tkinter.Frame(self.Cal); navigation_frame.pack() #Create the frame to hold the widgets
goToPast = lambda x=self: self.showDifferentWeek("past") #defining the command to view past events
last_week = tkinter.Button(self.Cal, text="Previous", command=goToPast); last_week.pack(side=TKC.LEFT, anchor=TKC.N) #creating the widget
goToPresent = lambda x=self: self.showDifferentWeek("present") #defining the command to return to view of present week
present = tkinter.Button(self.Cal, text="Current", command=goToPresent); present.pack(side=TKC.LEFT, anchor=TKC.N) #creating the widget
goToFuture = lambda x=self: self.showDifferentWeek("future") #defining the command to view future events
next_week = tkinter.Button(self.Cal, text="Upcoming", command=goToFuture); next_week.pack(side=TKC.LEFT, anchor=TKC.N) #creating the widget
#Creating the M,T,W,R,F,S,S at the top of the calendar
self.calendar_frame = tkinter.Frame(self.Cal)
self.calendar_frame.pack()
for i in range(7): #Create the Days as headers on top of the calendar
#Create a frame with the text of the Frame corresponding to the day of the week. Example: 0 = Monday, 1=Tuesday, 2=Wednesday, etc.
tkinter.Label(self.calendar_frame, text=CalendarView.Weekdays[i]).grid(row=0, column=i, padx=15, pady=13)
initial_value = week_counter*7 #where week_counter=0: the view of the present week.
#This is for the CalendarView.showDifferentWeek() to work. The list will hold all the objects that must be updated to show different weeks.
self.events_row = []
for i in range(initial_value, initial_value+7): #Creates the first calendar row of dates and events
var = CalendarView.WeekdayEvents(self.calendar_frame, i)
self.events_row.append(var)
for i in range(initial_value+7,initial_value+14): #Creates the second calendar row of dates and events
var = CalendarView.WeekdayEvents(self.calendar_frame, i, 2)
self.events_row.append(var)
def showDifferentWeek(self, switch):
if switch =="future":
n = 14 #moves all days in the calendar grid ahead by 2 weeks
self.week_counter = self.week_counter+2 #Increment the counter in a positive direction
elif switch =="past":
n = -14 #moves all days in the calendar grid behind by 2 weeks
self.week_counter = self.week_counter-2 #Increment the counter in a negative direction
elif switch =="present":
n = self.week_counter * -7 #we only multiply the week_counter by 7 because the week_counter increments by 2. If week_counter incremented by one, we should multiply by 14 (negative, of course).
#If pressed forward by 2 week, the week_counter would equal 4. All grid_objects would be 28 days ahead of the dates displayed by those grid_objects on present view
self.week_counter = 0 # Reset the Counter when we return to view the present weeks.
for i in self.events_row: #The objects in the events_row have an update() method that will handle destroying old memory and updating with new information.
i.update(n)
#One of the advantages of updating the information calendar grid by calendar grid, instead of simply asking the entire calendar to refresh, is this new method minimizes black flashing. tkinter performance in this regard is not ideal.
class WeekdayEvents:
def __init__(self, parent, i, _row=1):
self.i = i #This allows the update() function to work. It specifies which day the event occurs on
self.row = _row #This allows the update() function to work. It specifics which row the event occurs in.
self.parent = [parent] #This allows the update() function to work. It holds a reference to the parent object
self.registry = [] #This holds all sub-objects of WeekdayView so that old objects can be destroyed during the update() function
date = CH.getCalendarDate(i)
#Retrieves a tuple of the date (YYYY,MM,DD)
readable_date = CH.getCalendarDateReadable(date) #Converts date tuple (2012, 01, 01) into string "Jan 01"
weekday = tkinter.Frame(parent, height=13, width=18) #Each day is wrapped in a frame
self.register(weekday) #Only the weekday frame needs to be registered, because when the frame is destroyed, all sub-objects will be destroyed as well.
weekday.grid(row=_row, column = i%7, sticky=TKC.N)
createEvent = lambda x = date: EventView.createEvent(x)
button = tkinter.Button(weekday, text=readable_date, command=createEvent, relief=tkinter.GROOVE) #lambda: EventView.createEvent(date)) #This button shows the month and day. Clicking the button will allow user to create new event on that day.
button.pack()
event_info = CalendarController.getHeading(date)
if event_info != None:
for row in event_info:
event_id = str(row[0])
button_text = row[1]
modifyEvent = lambda y = event_id: EventView.modifyEvent(y)
eventButton = tkinter.Button(weekday, relief=tkinter.FLAT, text=button_text, command=modifyEvent) #Each event on a day is in a Label widget. clicking the Event allows the user to modify the event.
eventButton.pack(fill=tkinter.BOTH, expand=1)
else:
tkinter.Label(weekday).pack()
Observer.register([self,str(date)])
def update(self, i=0):
self.registry[0].destroy() #remove old data from memory. One might think this is removing the object from the Observer list, but instead, it is destroying the old frame.
#self.parent is a list containing a reference to the parent frame. [0] points to the only object in the list
#self.i specifies which day of the week that the event belongs to.
#self.row specifies which row (1st or 2nd) that the event belongs to.
self.__init__(self.parent[0], self.i+i, self.row)
def register(self, subobject):
self.registry.append(subobject)
def event_day(self, ifgj):
pass
class EventView:
def __init__(self):
self.dialog = tkinter.Toplevel(root)
self.dialog.title("Event Details")
escape = lambda e: self.dialog.destroy()#Pressing escape will exit the
self.dialog.bind("<Escape>", escape)
self.window_frame = ttk.Frame(self.dialog)
self.window_frame.pack()
def modifyEvent(event_id):
#Most of the stuff in here is tkinter boilerplate
total_columns=4 #This is the total number of columns in the grid layout
self = EventView()
event_details = CalendarController.getEventDetails(event_id)
event_details = list(event_details)
print (event_details) #For debugging DEBUGGING
for i in range(len(event_details)): #Turns None values into empty string values. It first finds the number of variables contained in event_details (which can be an arbitrary number as I expand my program). For each variable in event_details, perform the followin check.
if event_details[i] == None:
event_details[i] = ""
date = event_details[1]
self.content_frame = ttk.Frame(self.window_frame)
self.content_frame.pack()
self.dateLabel = ttk.Label(self.content_frame, text = "Date:"+date)
self.dateLabel.pack()
self.EVENTID = tkinter.StringVar()
self.EVENTID.set(event_id)
self.DATE = tkinter.StringVar()
self.DATE.set(date)
self.dateLabel.grid(row=0, column=0)
self.headingLabel = ttk.Label(self.content_frame, text="Heading:")
self.headingLabel.grid(row=0, column =2)
self.headingEntry = ttk.Entry(self.content_frame)
self.headingEntry.insert(0,event_details[5])
self.headingEntry.grid(row=0, column=3)
self.detailsLabel = ttk.Label(self.content_frame, text="Details")
self.detailsLabel.grid(row=1, column = 0)
self.detailsEntry = tkinter.Text(self.content_frame, height=4, width =50)
self.detailsEntry.insert(1.0, event_details[6])
self.detailsEntry.grid(row=2, column = 0, columnspan=total_columns)
self.locationLabel = ttk.Label(self.content_frame, text="Location")
self.locationLabel.grid(columnspan=total_columns)
self.locationEntry = ttk.Entry(self.content_frame)
self.locationEntry.insert(0, event_details[9])
self.locationEntry.grid(columnspan=total_columns)
self.invitees = ttk.Button(self.content_frame, text = "Invitees")
self.invitees.grid(columnspan=total_columns)
self.begin_timeLabel = ttk.Label(self.content_frame, text = "Begin Time")
self.begin_timeLabel.grid(columnspan=total_columns)
self.begin_time = ttk.Entry(self.content_frame)
self.begin_time.insert(0, event_details[7])
self.begin_time.grid(columnspan=total_columns)
self.end_timeLabel = ttk.Label(self.content_frame, text = "End Time")
self.end_timeLabel.grid(columnspan=total_columns)
self.end_time = ttk.Entry(self.content_frame)
self.end_time.insert(0, event_details[8])
self.end_time.grid(columnspan=total_columns)
deleteEvent = lambda b = self: CalendarController.deleteEvent(b)
self.deleteButton = ttk.Button(self.content_frame, text = "Delete", command=deleteEvent)
self.deleteButton.grid(columnspan=total_columns)
modifyEvent = lambda y = self: CalendarController.modifyEvent(y)
self.save = ttk.Button(self.content_frame, text="Save", command=modifyEvent)
self.save.grid(columnspan=total_columns)
def createEvent(date):
self = EventView()
self.content_frame = tkinter.Frame(self.window_frame)
self.content_frame.pack()
self.dateLabel = ttk.Label(self.content_frame, text="Date:"+str(date))
self.DATE = tkinter.StringVar()
self.DATE.set(date)
self.dateLabel.grid(row=0, column=0)
self.headingLabel = ttk.Label(self.content_frame, text="Heading:")
self.headingLabel.grid(row=0, column =2)
self.headingEntry = ttk.Entry(self.content_frame)
self.headingEntry.grid(row=0, column=3)
self.detailsLabel = ttk.Label(self.content_frame, text="Details")
self.detailsLabel.grid(row=1, column = 0)
self.detailsEntry = tkinter.Text(self.content_frame, height=4, width =50)
self.detailsEntry.grid(row=2, column = 0, columnspan=4)
self.saveEvent = lambda x = self: CalendarController.saveEvent(x)
self.save = ttk.Button(self.content_frame, text="Save", command=self.saveEvent)
self.save.grid(row=3, column=3)
self.showMore = lambda y = self: EventView.showMore(y)
self.showmore = ttk.Button(self.content_frame, text="Show more", command=self.showMore)
self.showmore.grid(row=3, column=2)
def showMore(self):
self.showmore.destroy()
self.showLess = lambda y = self: EventView.showLess(y) #This is wierd, but it works. Is the this lambda function overwriting the already defined method in the instantiated object?
self.showless = ttk.Button(self.content_frame, text="Show less", command=self.showLess)
self.showless.grid(row=3, column=2)
self.moreDetailsFrame = ttk.Frame(self.dialog)
self.moreDetailsFrame.pack(side=TKC.BOTTOM)
self.locationLabel = ttk.Label(self.moreDetailsFrame, text="Location")
self.locationLabel.grid()
self.locationEntry = ttk.Entry(self.moreDetailsFrame)
self.locationEntry.grid()
self.invitees = ttk.Button(self.moreDetailsFrame, text = "Invitees")
self.invitees.grid()
self.begin_timeLabel = ttk.Label(self.moreDetailsFrame, text = "Begin Time")
self.begin_timeLabel.grid()
self.begin_time = ttk.Entry(self.moreDetailsFrame)
self.begin_time.grid()
self.end_timeLabel = ttk.Label(self.moreDetailsFrame, text = "End Time")
self.end_timeLabel.grid()
self.end_time = ttk.Entry(self.moreDetailsFrame)
self.end_time.grid()
self.second_save = ttk.Button(self.moreDetailsFrame, text="Save", command=self.saveEvent)
self.second_save.grid()
def showLess(self):
self.second_save.destroy()
self.showless.destroy()
self.moreDetailsFrame.destroy()
self.showmore = ttk.Button(self.content_frame, text="Show more", command=self.showMore)
self.showmore.grid(row=3, column=2)
class Schedule:
def __init__(self):
#self.dialog = tkinter.Toplevel(root)
#self.dialog.title("Event Details")
self.window_frame = ttk.Frame(root)
self.window_frame.pack()
self.content_frame = tkinter.Frame(self.window_frame)
self.content_frame.pack()
################### OBSERVER ##################################
class Subject:#Observer Class. http://code.activestate.com/recipes/131499-observer-pattern/
def __init__(self):
self._observers =[] #instantiate the Observer Object.
#self._observers will hold a dictionary value
def register(self, observer):
if not observer in self._observers:
self._observers.append(observer)
def deregister(self, observer):
try:
self._observers.remove(observer)
except ValueError:
pass
def notify(self, modifier):
for observer in self._observers:
if modifier == observer[1]:
observer[0].update()
Observer = Subject()
########################## CONTROLLER ######################
class CalendarController:
def getUserInputFromCalendar(eventObject = None):
array = {}
array["date"] = eventObject.DATE.get()
array["heading"] = eventObject.headingEntry.get()
array["details"] = eventObject.detailsEntry.get('1.0','end') #The get method for the tkinter Text box is wierd, and has little documentation. see: http://www.tkdocs.com/tutorial/text.html
try:
array["location"] = eventObject.locationEntry.get()
array["begin_time"] = eventObject.begin_time.get()
array["end_time"] = eventObject.end_time.get()
except:
pass
return array
def getHeading(date):
a= Model.getEventsHeadings(str(date))
return a
def getEventDetails(event_id):
return Model.getEventsDetails(event_id)
def deleteEvent(eventObject):
event_id = eventObject.EVENTID.get()
print(event_id+"!!!!!!!!!")
date = eventObject.DATE.get()
Model.deleteEvent(event_id)
eventObject.dialog.destroy()
Observer.notify(date)
def saveEvent(eventObject):
array = CalendarController.getUserInputFromCalendar(eventObject)
eventObject.dialog.destroy()
Model.addEvent(array)
Observer.notify(array["date"])
def modifyEvent(eventObject):
array = CalendarController.getUserInputFromCalendar(eventObject)
array["event_id"] = eventObject.EVENTID.get()
eventObject.dialog.destroy()
Model.modifyEvent(array)
Observer.notify(array["date"])
class Controller:
View()
CalendarView()
#Schedule()
#GlobalMenu()
root.mainloop() #root must be called outside of the Controller Class, or else you can't call functions within the Controller class (03-01-2012)
| Python |
import sqlite3
class Database:
def __init__(self, databaseName="puffin"):
self.sql = sqlite3.connect('puffin') #connects to database. creates it if it does not exist.
self.queryCurs = self.sql.cursor() #cursor allows you to run queries.
def select(self, table_name, params, requested_fields="*"):
#table_name = str
#parameters = list containing strings or a string
#requested_fields = list containing strings or a string
parameters = "" #initialize paramters variable
fields = "" #initilize fields variable
if type(params) == list:
for i in params:
parameters += (" AND "+i)
parameters = parameters.lstrip(' AND ') #lstrip removes the first " AND " from the columns string so that SQL statement is properly formatted.
elif type(params) == str: parameters = params
if type(requested_fields) == list:
for i in requested_fields:
fields += (", "+i)
fields = fields.lstrip(', ') #lstrip removes the first " AND " from the columns string so that SQL statement is properly formatted.
elif type(requested_fields) == str: fields = requested_fields
data = self.queryCurs.execute("SELECT "+fields+" FROM "+table_name+" WHERE "+params)
return data
def insert(self, table_name, key_value):
#table_name = str. This is the name of the database table
#key_value = dict. This is the key: value paris, where the key is database table column, and the value is the value to be updated for that column.
#key_value = Model.dateHelp(key_value) #if I want convert a string to a dictionary
columns = "" #initialize the columns variable. This will hold the names of the database table columns
valuesList = [] #initialize the list for values. This will hold the values that correspond to the database table columns
var = "" #Python SQL sanitation is ... peculiar. In short I need an equal number of question marks ("?") as I have values to insert. This variable will hold the "?" vars for the SQL statement.
for i in key_value: #Now, let's turn our key:value pairs into an SQL statement. The for loop will transform keys and values into separate ordered lists, so that the keys and values correspond positionally.
if (key_value[i] != None) & (key_value[i] !="") & (key_value[i]!=" "): #Make sure that every key has a meaningful value
columns += (", "+i) #Add the key/column_name to the columns variable
#Now, we add the corresponding value to the valuesList.
#Remember that Python SQL sanitation is peculiar. The values must be contained in a list, instead of as a string.
valuesList.append(key_value[i].strip())
var += ", ?" #Add a question mark for every value to be inserted in order for SQL sanitation to work.
columns = columns.lstrip(', ') #lstrip removes the first ", " from the columns string so that SQL statement is properly formatted.
var = var.strip(', ')
self.queryCurs.execute("INSERT INTO "+table_name+" ("+columns+") VALUES ("+var+")", valuesList) #This executes a properly formatted, sanitized SQL statement
self.sql.commit() #Commit the changes to the hard disk.
def update(self, table_name, key_value, primary_key="id"):
#table_name = str. This is the name of the database table
#key_value = dict. This is the key: value paris, where the key is database table column, and the value is the value to be updated for that column.
#primary_key = str or None. This is the primary key of the database table
#key_value = Model.dateHelp(key_value) #This where you should call any helpder functions
valuesList = [] #initialize the list for values. This will hold the values that correspond to the database table columns
#SQL UPDATE is slightly different from SQL INSERT. Thus, instead of initializing a columns variable and a var variable (see insert()); we combine both into the command variable
command=""
for i in key_value: #Now, let's turn our key:value pairs into an SQL statement. The for loop will transform keys and values into separate ordered lists, so that the keys and values correspond positionally.
if (key_value[i] != None) & (key_value[i] !="") & (key_value[i]!=" ") & (i!=primary_key): #make sure that the key has a meaningful value, and that the key i not the primary key
command += (", "+i+" = ?") #Instead of adding the corresponding value, we are adding a question mark (?) so that sqlite module can sanitize the data.
valuesList.append(key_value[i].strip())
command = command.lstrip(', ') #Removes the leading comma in the command string.
command = 'UPDATE calendar SET '+command+" WHERE "+primary_key+"='"+key_value[primary_key]+"'"
self.queryCurs.execute(command, valuesList) #Execute a sanitized SQL statement.
self.sql.commit() #Commit the changes to the hard disk.
def delete (self, table_name, key="id", value="None"):
#table_name = str
#key = str
#value = str
self.queryCurs.execute("DELETE FROM "+table_name+" WHERE "+key+"='"+value+"'")
self.sql.commit()
## tableColumns = ('id', 'date', 'begin_time', 'end_time', 'heading', 'details',
## 'location', 'invitees', 'attendees',
## 'case_id', 'assignment_type', 'assignment_id', 'work_product_url')
def create_table(self, database_name):
#Try and create the table.
#If it fails, it's probably because the table exists
#try:
table_sql = '''CREATE TABLE calendar
(event_id INTEGER PRIMARY KEY,
date VARCHAR(255) NOT NULL,
year INT(5),
month INT(2),
day INT(2),
heading VARCHAR(255) NOT NULL,
details TEXT,
begin_time VARCHAR(255),
end_time VARCHAR(255),
location TEXT,
invitees TEXT,
attendees TEXT,
case_id INT,
assignment_type VARCHAR(255),
assignment_id INT,
work_product_url TEXT,
sequence BOOL,
sequence_ancestors TEXT,
sequence_dependents TEXT,
removable BOOL,
restricted_modification INT
)'''
#Open a connection to the SQLite database and create the table
sql_connection = Database.sqlite3.connect(database_name)
sql_cursor = sql_connection.cursor()
sql_cursor.execute(table_sql)
sql_connection.commit()
sql_connection.close()
#except:
# print('did not work')
#a = Database()
#a.create_table("puffin")
#a.addEvent({"heading":"b","date":"d"})
| Python |
import tkinter
import tkinter.constants as TKC
import tkinter.ttk as ttk
class View:
def __init__(self):
global root
root = tkinter.Tk()
class tk:
def Entry(parent, varRegister):
variable = tkinter.StringVar()
varRegister.append(variable)
self = tkinter.Entry(parent, textvariable=variable)
self.pack()
button = tkinter.Button(parent, text="button", command=lambda: print(variable.get()))
button.pack()
class Scheduler:
def __init__(self):
self.window_frame = tkinter.Frame(root)
self.window_frame.pack()
self.answers=[]
self.questions=(
"What is name of the class?",
"What is the name of the teacher?",
"What is the start date of the class?",
"What is the end date of the class?",
"What is the end date of the class?",
"How many classes are there?")
self.moreDetailedQuestion =(
"What is the room location?",
"What is the time?",
"What are the days of the week?"
)
def firstFrame(self):
total_width=2
from tkinter import StringVar
class_name = StringVar()
self.values = {"class_name":class_name,"teacher_name":StringVar(),"begin_date":StringVar(),"end_date":StringVar()}
self.content_frame = tkinter.Frame(self.window_frame).grid()
self.headingEntry = ttk.Entry(self.content_frame).pack()
self.explanation_label = tkinter.Label(self.content_frame, text="Class Details").pack()
spacer = tkinter.Label(self.content_frame, text="").pack()
self.label1 = tkinter.Label(self.content_frame, text="Class Name").pack()
self.entry1 = ttk.Entry(self.content_frame).pack()
self.label2 = tkinter.Label(self.content_frame, text= "Teacher Name").pack()
self.entry2 = ttk.Entry(self.content_frame).pack()
spacer = spacer
self.label3 = tkinter.Label(self.content_frame, text="Begin Date*").pack()
self.entry3 = ttk.Entry(self.content_frame).pack()
spacer
self.label3 = tkinter.Label(self.content_frame, text="End Date*").pack()
self.entry3 = ttk.Entry(self.content_frame).pack()
spacer
self.label4 = tkinter.Label(self.content_frame, text="Number of Classes*").pack()
self.entry4 = ttk.Entry(self.content_frame).pack()
self.explanatory_note = tkinter.Label(self.content_frame, text="* = Optional").pack()
action = lambda: self.printer()
tkinter.Button(self.content_frame, command=action, text="Proceed").pack()
def printer(self):
print (self.entry2.get())
## def firstFrame(self):
## total_width=2
## from tkinter import StringVar
## #class_name = StringVar()
## #self.values = {"class_name":class_name,"teacher_name":StringVar(),"begin_date":StringVar(),"end_date":StringVar()}
## self.content_frame = tkinter.Frame(self.window_frame).grid()
## self.explanation_label = tkinter.Label(self.content_frame, text="Class Details").grid(row=1, column=1)
## self.label1 = tkinter.Label(self.content_frame, text="Class Name").grid(row=1, column=0)
## self.entry1 = tkinter.Entry(self.content_frame).grid(row=1, column=1)
## self.label2 = tkinter.Label(self.content_frame, text= "Teacher Name").grid(row=2, column=0)
## self.entry2 = tkinter.Entry(self.content_frame).grid(row=2, column=1)
## #frame = tkinter.Frame(self.content_frame, relief=tkinter.RAISED).grid(row=3,columnspan=2)
## self.label3 = tkinter.Label(self.content_frame, text="Begin Date*").grid(column=0)
## self.entry3 = tkinter.Entry(self.content_frame).grid(row=3,column=1)
## self.label3 = tkinter.Label(self.content_frame, text="End Date*").grid(row=4, column=0)
## self.entry3 = tkinter.Entry(self.content_frame).grid(row=4, column=1)
## self.label4 = tkinter.Label(self.content_frame, text="Number of Classes*").grid()
## self.entry4 = tkinter.Entry(self.content_frame).grid(row=5, column=1)
## self.explanatory_note = tkinter.Label(self.content_frame, text="* Optional").grid()
#tkinter.Button(self.content_frame, command=lambda: print(self.values["class_name"]), text="Proceed").grid()
## def promptQuestion(self, questions, i=0):
## j = len(questions)
## try:
## a=self.answer.get()
## answers.append(a)
## except:
## pass
## if i < j:
## question="What is the class name"
## question_label = ttk.Label(self.window_frame, text = question)
## question_label.pack()
## self.answer = ttk.entry(self.window_frame)
## self.answer.pack()
## confirm = ttk.button(self.window_frame, command=self.promptQuestion)
## confirm.pack()
## cancel = ttk.button(self.window_frame, command=self.destroy())
## cancel.pack()
## def processQuestion(self, answers, i):
## pass
def secondFrame(self):
total_width=1
self.content_frame = tkinter.Frame(self.window_frame)
self.content_frame.pack()
self.explanation_label = tkinter.Label(self.content_frame)
self.explanation_label.grid(row=0,column=0, columnspan=total_width)
self.weekday_frame = tkinter.Frame(self.content_frame)
self.weekday_frame.grid(row=1, column=0)
Weekdays = ('Mon','Tues','Wedn','Thurs','Fri','Sat','Sun')
self.begin_time = ['','','','','','','']
self.weekday_values = [tkinter.StringVar(),tkinter.StringVar(),tkinter.StringVar(),tkinter.StringVar(),tkinter.StringVar(),tkinter.StringVar(),tkinter.StringVar()]
for i in range(7):
a = tkinter.Label(self.weekday_frame, text = Weekdays[i])
a.grid(row=0, column = i)
self.begin_time[i] = ttk.OptionMenu(self.weekday_frame, self.weekday_values[i], "00:00","8:00", "8:30","9:00","9:30","10:00","10:30","11:00","11:30","12:00","12:30","1:00","1:30","2:00","2:30","3:00","3:30","4:00","4:30")
self.begin_time[i].grid(row=1, column = i)
self.time_sheet_frame = tkinter.Frame(self.content_frame)
self.time_sheet_frame.grid()
a = tkinter.Listbox(self.time_sheet_frame, selectmode=tkinter.EXTENDED)
a.insert(TKC.END, "Bem","Bob","Bill","Bar")
a.grid()
self.b = tkinter.Menubutton(self.time_sheet_frame, text="b", relief="raised")
self.c = tkinter.Menu(self.b, tearoff=0)
self.c.add_cascade(label="file")
self.c.add_command(label="exit")
self.b.grid()
var=tkinter.StringVar()
ttk.OptionMenu(self.time_sheet_frame, var, "a","b","c").grid()
#self.time_sheet_frame.config(menu=self.b)
## for i in range(7):
## self.b = tkinter.Menubutton(self.time_sheet_frame, text=str(i))
## self.b.children = tkinter.Menu(self.b, tearoff=0)
## self.b.grid(row=1, column=i)
## self.b.children.add_checkbutton(label="mayo")
salad = tkinter.StringVar()
ttk.Checkbutton(self.weekday_frame, variable=salad, onvalue=1, offvalue=0).grid()
button = tkinter.Button(self.content_frame, text="press",command=lambda: print(salad.get()))
button.grid()
location_frame = tkinter.Frame(self.content_frame)
location_frame.grid()
View()
Scheduler().firstFrame()
root.mainloop()
| Python |
#!/usr/bin/env python
"""
Simple tests for basic python-csp functionality.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
import random
import sys
import time
sys.path.insert(0, "..")
from csp.csp import *
#from csp.os_process import *
#from csp.os_thread import *
from csp.guards import Timer
#set_debug(True)
@process
def foo(n):
time.sleep(random.random()*2)
print('foo() got argument {0}'.format(n))
return
@process
def send(cout):
"""
readset =
writeset = cout
"""
for i in range(5):
print('send() is sending {0}'.format(i))
cout.write(i)
return
@process
def recv(cin):
"""
readset = cin
writeset =
"""
for i in range(5):
data = cin.read()
print('recv() has received {0}'.format(str(data)))
return
@process
def send100(cout):
"""
readset =
writeset = cout
"""
for i in range(100):
print('send100() is sending {0}'.format(i))
cout.write(i)
return
@process
def recv100(cin):
"""
readset = cin
writeset =
"""
for i in range(100):
data = cin.read()
print('recv100() has received {0}'.format(str(data)))
return
class TestOOP(object):
def __init__(self):
self.chan = Channel()
return
@process
def send(self, msg):
"""
readset = self.chan
writeset =
"""
self.chan.write(msg)
return
@process
def recv(self):
"""
readset = self.chan
writeset =
"""
print(self.chan.read())
return
def testoop():
f = TestOOP()
Par(f.send('hello world'), f.recv()).start()
return
@process
def testpoison(chan):
print('Sending termination event...')
chan.poison()
return
@process
def sendAlt(cout, num):
"""
readset =
writeset = cout
"""
t = Timer()
t.sleep(1)
cout.write(num)
return
@process
def testAlt0():
alt = Alt(Skip(), Skip(), Skip())
for i in range(3):
print('*** TestAlt0 selecting...')
val = alt.select()
print('* Got this from Alt:' + str(val))
return
@process
def testAlt1(cin):
"""
readset = cin
writeset =
"""
alt = Alt(cin)
numeric = 0
while numeric < 1:
print('*** TestAlt1 selecting...')
val = alt.select()
if isinstance(val, int): numeric += 1
print('* Got this from Alt:' + str(val))
return
@process
def testAlt2(cin1, cin2, cin3):
"""
readset = cin1, cin2, cin3
writeset =
"""
alt = Alt(Skip(), cin1, cin2, cin3)
numeric = 0
while numeric < 3:
print('*** TestAlt2 selecting...')
val = alt.select()
if isinstance(val, int): numeric +=1
print('* Got this from Alt:' + str(val))
return
@process
def testAlt3(cin1, cin2, cin3):
"""
readset = cin1, cin2, cin3
writeset =
"""
# For obvious reasons, SKIP cannot go first
alt = Alt(cin1, cin2, cin3, Skip())
numeric = 0
while numeric < 3:
print('*** TestAlt3 selecting...')
val = alt.pri_select()
if isinstance(val, int): numeric +=1
print('* Got this from Alt:' + str(val))
return
@process
def testAlt4(cin1, cin2, cin3):
"""
readset = cin1, cin2, cin3
writeset =
"""
alt = Alt(Skip(), cin1, cin2, cin3)
numeric = 0
while numeric < 3:
print('*** TestAlt4 selecting...')
val = alt.fair_select()
if isinstance(val, int): numeric +=1
print('* Got this from Alt:' + str(val))
return
@process
def testOr(cin1, cin2):
"""
readset = cin1, cin2
writeset =
"""
print(cin1 | cin2)
print(cin1 | cin2)
return
@process
def testAltRRep(cin1, cin2, cin3):
"""
readset = cin1, cin2, cin3
writeset =
"""
gen = Alt(cin1, cin2, cin3) * 3
print(next(gen))
print(next(gen))
print(next(gen))
return
@process
def testAltLRep(cin1, cin2, cin3):
"""
readset = cin1, cin2, cin3
writeset =
"""
gen = 3 * Alt(cin1, cin2, cin3)
print(next(gen))
print(next(gen))
print(next(gen))
return
########## Top level stuff
def _printHeader(name):
random.seed(time.clock()) # Introduce a bit more randomness...
print('')
print('****************************************************')
print('* Testing {0}...'.format(name))
print('****************************************************')
print('')
return
def testSeq():
_printHeader('Seq')
print('With operator overloading...')
foo(1) > foo(2) > foo(3)
print('')
print('With process objects...')
Seq(foo(1), foo(2), foo(3)).start()
return
def testPar():
_printHeader('Par')
print('5 processes with operator overloading...')
foo(1) // (foo(2), foo(3), foo(4), foo(5))
print('')
print('8 processes with operator overloading...')
foo(1) // (foo(2), foo(3), foo(4), foo(5), foo(6), foo(7), foo(8))
print('')
print('5 processes with process objects...')
Par(foo(1), foo(2), foo(3), foo(4), foo(5)).start()
return
def testChan():
_printHeader('Channels')
print('1 producer, 1 consumer, 1 channel...')
c1 = Channel()
p = Par(recv(c1), send(c1))
p.start()
print('')
print('5 producers, 5 consumers, 5 channels...')
chans = [Channel() for i in range(5)]
p = [send(chan) for chan in chans] + [recv(chan) for chan in chans]
pp = Par(*p)
pp.start()
print('')
print('5 producers, 5 consumers, 1 channel...')
chan = Channel()
p = [send(chan) for i in range(5)] + [recv(chan) for i in range(5)]
pp = Par(*p)
pp.start()
return
def testOOP():
_printHeader('channel read/write using object methods...')
testoop()
return
def testPoison():
_printHeader('process termination (by poisoning)')
chanp = Channel()
Par(send100(chanp), recv100(chanp), testpoison(chanp)).start()
return
def testAlt():
_printHeader('Alt')
print('Alt with 3 SKIPs:')
ta0 = testAlt0()
ta0.start()
print('')
print('Alt with 1 channel read:')
ch1 = Channel()
Par(testAlt1(ch1), sendAlt(ch1, 100)).start()
print('')
print('Alt with 1 SKIP, 3 channel reads:')
ch2, ch3, ch4 = Channel(), Channel(), Channel()
Par(testAlt2(ch2, ch3, ch4),
sendAlt(ch2, 100),
sendAlt(ch3, 200),
sendAlt(ch4, 300)).start()
print('')
print('Alt with priSelect on 1 SKIP, 3 channel reads:')
ch5, ch6, ch7 = Channel(), Channel(), Channel()
ta3 = Par(testAlt3(ch5, ch6, ch7),
sendAlt(ch5, 100),
sendAlt(ch6, 200),
sendAlt(ch7, 300))
ta3.start()
print('')
print('Alt with fairSelect on 1 SKIP, 3 channel reads:')
ch8, ch9, ch10 = Channel(), Channel(), Channel()
Par(testAlt4(ch8, ch9, ch10),
sendAlt(ch8, 100),
sendAlt(ch9, 200),
sendAlt(ch10, 300)).start()
return
def testChoice():
_printHeader('Choice')
print('Choice with |:')
c1, c2 = Channel(), Channel()
Par(sendAlt(c1, 100), sendAlt(c2, 200), testOr(c1, c2)).start()
return
def testRep():
_printHeader('Repetition')
print('Repetition with Alt * int:')
ch1, ch2, ch3 = Channel(), Channel(), Channel()
Par(sendAlt(ch1, 100), sendAlt(ch2, 200), sendAlt(ch3, 300),
testAltRRep(ch1, ch2, ch3)).start()
print('')
print('Repetition with Alt * int:')
ch1, ch2, ch3 = Channel(), Channel(), Channel()
Par(sendAlt(ch1, 100), sendAlt(ch2, 200), sendAlt(ch3, 300),
testAltLRep(ch1, ch2, ch3)).start()
return
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-a', '--all', dest='all',
action='store_true',
help='Test all CSP features')
parser.add_option('-s', '--seq', dest='seq',
action='store_true',
help='Test Seq')
parser.add_option('-p', '--par', dest='par',
action='store_true',
help='Test Par')
parser.add_option('-c', '--chan', dest='chan',
action='store_true',
help='Test Channels')
parser.add_option('-o', '--oop', dest='oop',
action='store_true',
help='Test OOP')
parser.add_option('-t', '--poison', dest='term',
action='store_true',
help='Test process termination')
parser.add_option('-l', '--alt', dest='alt',
action='store_true',
help='Test Alternatives')
parser.add_option('-i', '--choice', dest='choice',
action='store_true',
help='Test syntactic sugar for choice.')
parser.add_option('-r', '--rep', dest='rep',
action='store_true',
help='Test syntactic sugar for repetition.')
(options, args) = parser.parse_args()
# _exit = '\nPress Ctrl-c to terminate CSP channel server.'
_exit = '\nTesting complete.'
if options.all:
testSeq()
testPar()
testChan()
testOOP()
testPoison()
testAlt()
testChoice()
testRep()
print(_exit)
sys.exit()
elif options.seq: testSeq()
elif options.par: testPar()
elif options.chan: testChan()
elif options.oop: testOOP()
elif options.term: testPoison()
elif options.alt: testAlt()
elif options.choice: testChoice()
elif options.rep: testRep()
else: parser.print_help()
print(_exit)
sys.exit()
| Python |
#!/usr/bin/env python
# -*- mode:python; coding:utf-8; -*-
# Calculation of Pi using quadrature. Using the python-csp package by Sarah Mount.
#
# Copyright © 2009-10 Russel Winder
import time
import multiprocessing
import sys
sys.path.insert(0, "../..")
from csp.csp import *
@process
def calculator ( channel , id , sliceSize , delta ) :
"""
readset =
writeset = channel
"""
sum = 0.0
for i in range ( 1 + id * sliceSize , ( id + 1 ) * sliceSize + 1 ) :
x = ( i - 0.5 ) * delta
sum += 1.0 / ( 1.0 + x * x )
channel.write ( sum )
@process
def accumulator ( channel , n , delta , startTime , processCount ) :
"""
readset = channel
writeset =
"""
pi = 4.0 * sum ( [ channel.read ( ) for i in range ( 0 , processCount ) ] ) * delta
elapseTime = time.time ( ) - startTime
print ( "==== Python CSP Single pi = " + str ( pi ) )
print ( "==== Python CSP Single iteration count = " + str ( n ) )
print ( "==== Python CSP Single elapse = " + str ( elapseTime ) )
print ( "==== Python CSP Single process count = " + str ( processCount ) )
print ( "==== Python CSP Single processor count = " + str ( multiprocessing.cpu_count ( ) ) )
def execute ( processCount ) :
n = 100000000 # 10 times fewer due to speed issues.
delta = 1.0 / n
startTime = time.time ( )
sliceSize = n / processCount
channel = Channel ( )
processes = [ ]
for i in range ( 0 , processCount ) : processes.append ( calculator ( channel , i , sliceSize , delta ) )
processes.append ( accumulator ( channel , n , delta , startTime , processCount ) )
Par ( *processes ).start ( )
if __name__ == '__main__' :
execute ( 1 )
print ( )
execute ( 2 )
print ( )
execute ( 8 )
print ( )
execute ( 32 )
| Python |
#!/usr/bin/env python
# -*- mode:python; coding:utf-8; -*-
# Calculation of Pi using quadrature. Using the python-csp package by Sarah Mount.
#
# Copyright © 2009-10 Russel Winder
import time
import multiprocessing
import sys
sys.path.insert(0, "../..")
from csp.csp import *
def execute ( processCount ) :
n = 10000000 # 100 times fewer due to speed issues.
delta = 1.0 / n
startTime = time.time ( )
slice = n / processCount
channel = Channel ( )
@process
def accumulator ( ) :
"""
readset = channel
writeset =
"""
pi = 4.0 * sum ( [ channel.read ( ) for i in range ( 0 , processCount ) ] ) * delta
elapseTime = time.time ( ) - startTime
print ( "==== Python CSP Single NestedDeep pi = " + str ( pi ) )
print ( "==== Python CSP Single NestedDeep iteration count = " + str ( n ) )
print ( "==== Python CSP Single NestedDeep elapse = " + str ( elapseTime ) )
print ( "==== Python CSP Single NestedDeep process count = " + str ( processCount ) )
print ( "==== Python CSP Single NestedDeep processor count = " + str ( multiprocessing.cpu_count ( ) ) )
processes = [ ]
for i in range ( 0 , processCount ) :
@process
def calculator ( ) :
"""
readset =
writeset = channel
"""
sum = 0.0
for j in range ( 1 + i * slice , ( i + 1 ) * slice ) :
x = ( j - 0.5 ) * delta
sum += 1.0 / ( 1.0 + x * x )
channel.write ( sum )
processes.append ( calculator ( ) )
processes.append ( accumulator ( ) )
Par ( *processes ).start ( )
if __name__ == '__main__' :
execute ( 1 )
print ( )
execute ( 2 )
print ( )
execute ( 8 )
print ( )
execute ( 32 )
| Python |
#!/usr/bin/env python
# -*- mode:python; coding:utf-8; -*-
# Calculation of Pi using quadrature. Using the python-csp package by Sarah Mount.
#
# Copyright © 2009-10 Russel Winder
import time
import multiprocessing
import sys
sys.path.insert(0, "../..")
from csp.csp import *
@process
def calculator ( channel , id , sliceSize , delta ) :
"""
readset =
writeset = channel
"""
sum = 0.0
for i in range ( 1 + id * sliceSize , ( id + 1 ) * sliceSize + 1 ) :
x = ( i - 0.5 ) * delta
sum += 1.0 / ( 1.0 + x * x )
channel.write ( sum )
@process
def accumulator ( channels , n , delta , startTime , processCount ) :
"""
readset = channels
writeset =
"""
pi = 4.0 * sum ( [ channel.read ( ) for channel in channels ] ) * delta
elapseTime = time.time ( ) - startTime
print ( "==== Python CSP Multiple pi = " + str ( pi ) )
print ( "==== Python CSP Multiple iteration count = " + str ( n ) )
print ( "==== Python CSP Multiple elapse = " + str ( elapseTime ) )
print ( "==== Python CSP Multiple process count = " + str ( processCount ) )
print ( "==== Python CSP Multiple processor count = " + str ( multiprocessing.cpu_count ( ) ) )
def execute ( processCount ) :
n = 100000000 # 10 times fewer due to speed issues.
delta = 1.0 / n
startTime = time.time ( )
sliceSize = n / processCount
channels = [ ]
processes = [ ]
for i in range ( 0 , processCount ) :
channel = Channel ( )
channels.append ( channel )
processes.append ( calculator ( channel , i , sliceSize , delta ) )
processes.append ( accumulator ( channels , n , delta , startTime , processCount ) )
Par ( *processes ).start ( )
if __name__ == '__main__' :
execute ( 1 )
print ( )
execute ( 2 )
print ( )
execute ( 8 )
print ( )
execute ( 32 )
| Python |
#!/usr/bin/env python
# -*- mode:python; coding:utf-8; -*-
# Calculation of Pi using quadrature. Using the python-csp package by Sarah Mount.
#
# Copyright © 2009-10 Russel Winder
import time
import multiprocessing
import sys
sys.path.insert(0, "../..")
from csp.csp import *
def execute ( processCount ) :
n = 10#0000000 # 100 times fewer due to speed issues.
delta = 1.0 / n
startTime = time.time ( )
slice = n / processCount
channels = [ ]
@process
def accumulator ( ) :
"""
readset = channel
writeset =
"""
pi = 4.0 * sum ( [ channel.read ( ) for channel in channels ] ) * delta
elapseTime = time.time ( ) - startTime
print ( "==== Python CSP Multiple NestedDeep pi = " + str ( pi ) )
print ( "==== Python CSP Multiple NestedDeep iteration count = " + str ( n ) )
print ( "==== Python CSP Multiple NestedDeep elapse = " + str ( elapseTime ) )
print ( "==== Python CSP Multiple NestedDeep process count = " + str ( processCount ) )
print ( "==== Python CSP Multiple NestedDeep processor count = " + str ( multiprocessing.cpu_count ( ) ) )
processes = [ ]
for i in range ( 0 , processCount ) :
channel = Channel ( )
channels.append ( channel )
@process
def calculator ( channel ) :
"""
readset =
writeset = channel
"""
sum = 0.0
for j in range ( 1 + i * slice , ( i + 1 ) * slice ) :
x = ( j - 0.5 ) * delta
sum += 1.0 / ( 1.0 + x * x )
channel.write ( sum )
processes.append ( calculator (channels[i] ) )
processes.append ( accumulator ( ) )
Par ( *processes ).start ( )
if __name__ == '__main__' :
import gc
gc.set_debug(True)
execute ( 1 )
print ( )
execute ( 2 )
print ( )
execute ( 8 )
print ( )
execute ( 32 )
| Python |
#!/usr/bin/env python
# -*- mode:python; coding:utf-8; -*-
# Calculation of Pi using quadrature. Using the python-csp package by Sarah Mount.
#
# Copyright © 2009-10 Russel Winder
import time
import multiprocessing
import sys
sys.path.insert(0, "../..")
from csp.csp import *
def execute ( processCount ) :
n = 100000000 # 100 times fewer due to speed issues.
delta = 1.0 / n
startTime = time.time ( )
sliceSize = n / processCount
channels = [ ]
@process
def calculator ( channel , id ) :
"""
readset =
writeset = channel
"""
sum = 0.0
for i in range ( 1 + id * sliceSize , ( id + 1 ) * sliceSize + 1 ) :
x = ( i - 0.5 ) * delta
sum += 1.0 / ( 1.0 + x * x )
channel.write ( sum )
@process
def accumulator ( ) :
"""
readset = channel
writeset =
"""
pi = 4.0 * sum ( [ channel.read ( ) for channel in channels ] ) * delta
elapseTime = time.time ( ) - startTime
print ( "==== Python CSP Multiple NestedShallow pi = " + str ( pi ) )
print ( "==== Python CSP Multiple NestedShallow iteration count = " + str ( n ) )
print ( "==== Python CSP Multiple NestedShallow elapse = " + str ( elapseTime ) )
print ( "==== Python CSP Multiple NestedShallow process count = " + str ( processCount ) )
print ( "==== Python CSP Multiple NestedShallow processor count = " + str ( multiprocessing.cpu_count ( ) ) )
processes = [ ]
for i in range ( 0 , processCount ) :
channel = Channel ( )
channels.append ( channel )
processes.append ( calculator ( channel , i ) )
processes.append ( accumulator ( ) )
Par ( *processes ).start ( )
if __name__ == '__main__' :
execute ( 1 )
print ( )
execute ( 2 )
print ( )
execute ( 8 )
print ( )
execute ( 32 )
| Python |
#!/usr/bin/env python
# -*- mode:python; coding:utf-8; -*-
# Calculation of Pi using quadrature. Using the python-csp package by Sarah Mount.
#
# Copyright © 2009-10 Russel Winder
import time
import multiprocessing
import sys
sys.path.insert(0, "../..")
from csp.csp import *
def execute ( processCount ) :
n = 10000000 # 100 times fewer due to speed issues.
delta = 1.0 / n
startTime = time.time ( )
sliceSize = n / processCount
channel = Channel ( )
@process
def calculator ( id ) :
"""
readset =
writeset = channel
"""
sum = 0.0
for i in range ( 1 + id * sliceSize , ( id + 1 ) * sliceSize + 1 ) :
x = ( i - 0.5 ) * delta
sum += 1.0 / ( 1.0 + x * x )
channel.write ( sum )
@process
def accumulator ( ) :
"""
readset = channel
writeset =
"""
pi = 4.0 * sum ( [ channel.read ( ) for i in range ( 0 , processCount ) ] ) * delta
elapseTime = time.time ( ) - startTime
print ( "==== Python CSP Single NestedShallow pi = " + str ( pi ) )
print ( "==== Python CSP Single NestedShallow iteration count = " + str ( n ) )
print ( "==== Python CSP Single NestedShallow elapse = " + str ( elapseTime ) )
print ( "==== Python CSP Single NestedShallow process count = " + str ( processCount ) )
print ( "==== Python CSP Single NestedShallow processor count = " + str ( multiprocessing.cpu_count ( ) ) )
processes = [ ]
for i in range ( 0 , processCount ) : processes.append ( calculator ( i ) )
processes.append ( accumulator ( ) )
Par ( *processes ).start ( )
if __name__ == '__main__' :
execute ( 1 )
print ( )
execute ( 2 )
print ( )
execute ( 8 )
print ( )
execute ( 32 )
| Python |
import socket
HOST = socket.gethostbyname(socket.gethostname())
PORT = 8887
data = 'flibble'
# sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# # Connect to server and send data
# sock.connect((HOST, PORT))
# sock.send(data + "\n")
# # Receive data from the server and shut down
# received = sock.recv(1024)
# sock.close()
# SOCK_DGRAM is the socket type to use for UDP sockets
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.sendto(data + "\n", (HOST, PORT))
received = sock.recv(1024)
print("Sent: {0}".format(data))
print("Received: {0}".format(received))
| Python |
import sys
sys.path.insert(0, "..")
from csp.csp import *
from csp.guards import Timer
@process
def hello():
t = Timer()
for i in range(5):
print i
t.sleep(1)
if __name__ == '__main__':
hello() * 3
2 * hello()
| Python |
#!/usr/bin/env python
import sys
sys.path.insert(0, "..")
from csp.csp import *
from csp.builtins import Generate, Plus, Printer
in1, in2, out = Channel(), Channel(), Channel()
@process
def foo():
# Previously deadlocked
Skip() // (Generate(in1), Generate(in2), Plus(in1, in2, out), Printer(out))
# Infinite stream of ints (OK)
#p = Skip()
#p //= [Generate(out), Printer(out)]
@process
def bar():
# Infinite stream of even ints (OK)
Par(Generate(in1), Generate(in2), Plus(in1, in2, out), Printer(out)).start()
#PAR //= [Generate(in1), Generate(in2), Plus(in1, in2, out), Printer(out)]
if __name__ == '__main__':
Generate(out) // (Printer(out),)
#
# bar().start()
# foo().start()
| Python |
from csp.csp import *
import random, time
@process
def customer_child(cchildout, n):
for i in xrange(3):
print "Customer's "+str(n)+" child sending "+str(i)
cchildout.write(i)
time.sleep(random.random() * 3)
return
@process
def customer(cparentout, cchildout, n):
for i in xrange(5):
#print 'customer ', n, ' sending: customer '+str(i)
print 'Customer '+str(n)+" sending "+str(i)
Par(customer_child(cchildout, n)).start()
cparentout.write(i)
time.sleep(random.random() * 5)
return
@process
def merchant(cin):
for i in xrange(15):
data = cin.read()
print 'Merchant got:', data
time.sleep(random.random() * 5)
return
@process
def merchantswife(cin):
for i in xrange(15):
data = cin.read()
print "Merchant's wife got: ", data
time.sleep(random.random() * 4)
return
@process
def terminator(chan):
time.sleep(10)
print 'Terminator is killing channel:', chan.name
chan.poison()
return
doomed = Channel()
doomed_children = Channel()
Par(customer(doomed, doomed_children, 1), merchant(doomed), merchantswife(doomed_children), customer(doomed, doomed_children, 2), customer(doomed, doomed_children, 3), terminator(doomed)).start()
#send5(doomed) // (recv(doomed), send52(doomed), interrupt(doomed))
| Python |
#!/usr/bin/env python
"""
Simple oscilloscope traces for python-csp.
Requires Pygame.
Features:
* Press 's' to save an oscilloscope trace as a PNG.
* Press UP and DOWN to scale the input more / less.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from csp.csp import *
import copy
import numpy
import pygame
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'November 2009'
__version__ = '0.2'
@forever
def Oscilloscope(inchan, scale=80.0, _process=None):
# Constants
WIDTH, HEIGHT = 512, 256
TRACE, GREY = (80, 255, 100), (110, 110, 110)
caption = 'Oscilloscope'
filename = caption + '.png'
# Open window
pygame.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT), 0)
pygame.display.set_caption(caption)
# Create a blank chart with vertical ticks, etc
blank = numpy.zeros((WIDTH, HEIGHT, 3), dtype=numpy.int8)
# Draw x-axis
xaxis = HEIGHT // 2
blank[::, xaxis] = GREY
# Draw vertical ticks
vticks = [-100, -50, +50, +100]
for vtick in vticks: blank[::5, xaxis + vtick] = GREY # Horizontals
blank[::50, ::5] = GREY # Verticals
# Draw the 'blank' screen.
pygame.surfarray.blit_array(screen, blank) # Blit the screen buffer
pygame.display.flip() # Flip the double buffer
# ydata stores data for the trace.
ydata = [0.0 for i in range(WIDTH)] # assert len(ydata) <= WIDTH
QUIT = False
while not QUIT:
pixels = copy.copy(blank)
ydata.append(inchan.read() * scale)
ydata.pop(0)
for x in range(WIDTH):
try: pixels[x][xaxis - int(ydata[x])] = TRACE
except: pass
pygame.surfarray.blit_array(screen, pixels) # Blit the screen buffer
pygame.display.flip() # Flip the double buffer
#pygame.display.update(0, xaxis-100, WIDTH, 201) # Flip the double buffer
del pixels # Use constant space.
for event in pygame.event.get():
if event.type == pygame.QUIT \
or event.type == pygame.KEYDOWN and event.key == pygame.K_q:
QUIT = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
print('Saving oscope image in: ' + str ( filename ) )
elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
scale += 10.0
print('Oscilloscope scaling by %f' % scale)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN:
if scale - 10.0 > 0.0: scale -= 10.0
print('Oscilloscope scaling by %f' % scale)
yield
inchan.poison()
pygame.display.quit()
return
if __name__ == '__main__':
print('For this tutorial run traces.py')
| Python |
#!/usr/bin/env python
#
# Example oscilloscope traces.
#
import sys
from csp.csp import *
from csp.builtins import Sin, Cos, GenerateFloats, Mux2, Delta2
from oscilloscope import Oscilloscope
@forever
def Random(outchan):
"""Random process.
Generates random data and writes it to outchan.
"""
import random
while True:
outchan.write(random.random())
yield
return
def trace_random():
"""Test the Oscilloscope with random data.
"""
channel = Channel()
par = Par(Random(channel), Oscilloscope(channel))
par.start()
return
def trace_sin():
"""Plot a sine wave on the oscilloscope.
"""
channels = Channel(), Channel()
par = Par(GenerateFloats(channels[0]),
Sin(channels[0], channels[1]),
Oscilloscope(channels[1]))
par.start()
return
def trace_cos():
"""Plot a cosine wave on the oscilloscope.
"""
channels = Channel(), Channel()
par = Par(GenerateFloats(channels[0]),
Cos(channels[0], channels[1]),
Oscilloscope(channels[1]))
par.start()
return
def trace_mux():
"""Plot sine and cosine waves on the oscilloscope.
"""
channels = [Channel() for i in range(6)]
par = Par(GenerateFloats(channels[0]),
Delta2(channels[0], channels[1], channels[2]),
Cos(channels[1], channels[3]),
Sin(channels[2], channels[4]),
Mux2(channels[3], channels[4], channels[5]),
Oscilloscope(channels[5]))
par.start()
return
EXAMPLES = {}
for name, func in globals().items():
if name.startswith('trace_'):
EXAMPLES[name[6:]] = func
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Syntax: python {0} {1}'.format(sys.argv[0],
' | '.join(EXAMPLES.keys())))
for name, func in EXAMPLES.items():
print(' {0:<9} {1}'.format(name, func.func_doc.strip()))
elif sys.argv[1] not in EXAMPLES:
print('Unknown example {0}'.format(sys.argv[1]))
else:
print('Use cursor up/down for scaling, s for save and q for quit')
EXAMPLES[sys.argv[1]]()
| Python |
# Example program from Part 01 of the python-csp tutorial
# Copyright (C) Sarah Mount, 2010.
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have rceeived a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import socket
import time
from csp.csp import *
def response(code, reason, page):
"""Construct and return a single HTTP response.
FIXME: Should read and return files from disk, not a static page.
FIXME: Should handle other MIME types.
"""
html = """
<html>
<head><title>%i %s</title></head>
<body>
%s
<hr/>
<p><strong>Date:</strong> %s</p>
</body>
</html>
""" % (code, reason, page, time.ctime())
template = """HTTP/1.0 %i %s
Content-Type: text/html
Content-Length: %i
%s
""" % (code, reason, len(html), html)
return template
@process
def server(host, port):
"""Simple CSP based web server.
"""
print('Running tutorial web-server on port {0}...'.format(port))
print('Interrupt with CTRL-C')
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(5)
while True:
conn_sock, conn_addr = sock.accept()
request = conn_sock.recv(4096).strip()
if request.startswith('GET'):
handler_ok(request, conn_sock).start()
else:
handler_not_found(request, conn_sock).start()
@process
def handler_ok(request, conn_sock):
"""Handle a single HTTP 200 OK request.
"""
page = '<h1>My python-csp web server!</h1>'
page += '<p><strong>You asked for:</strong><pre>%s</pre></p>' % request
conn_sock.send(response(200, 'OK', page))
conn_sock.shutdown(socket.SHUT_RDWR)
conn_sock.close()
return
@process
def handler_not_found(request, conn_sock):
"""Handle a single HTTP 404 Not Found request.
"""
page = '<h1>Cannot find your file</h1>'
page += '<p><strong>You asked for:</strong><pre>%s</pre></p>' % request
conn_sock.send(response(404, 'Not Found', page))
conn_sock.shutdown(socket.SHUT_RDWR)
conn_sock.close()
return
if __name__ == '__main__':
host = ''
port = 8888
server(host, port).start()
| Python |
#!/usr/bin/env python
"""
Count the words in every file in a given directory.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from csp.csp import *
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'July 2010'
@process
def word_count(filename):
fd = file(filename)
words = [line.split() for line in fd]
fd.close()
print '%s contains %i words.' % (filename, len(words))
@process
def directory_count(path):
import glob
import os.path
import sys
# Test if directory exists
if not os.path.exists(path):
print '%s does not exist. Exiting.' % path
sys.exit(1)
# Get all filenames in directory
paths = glob.glob(path + '/*')
files = [path for path in paths if not os.path.isdir(path) and os.path.isfile(path)]
procs = [word_count(fd) for fd in files]
Par(*procs).start()
if __name__ == '__main__':
import sys
if sys.argv <= 1:
print 'You need to provide this script with a directory path. Exiting.'
sys.exit(1)
else:
directory_count(sys.argv[1]).start()
| Python |
#!/usr/bin/env python
# Mandelbrot set computed in parallel using python-csp.
# Multiple-producer, single consumer architecture.
# Copyright (C) Sarah Mount, 2009.
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have rceeived a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
from csp.csp import *
import math
import numpy
import pygame
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
MAXITER = 100
"""@var: Number of iterations used to determine each pixel of the
fractal image.
@see: L{mandelbrot}
"""
def get_colour(mag, cmin=0, cmax=100):
"""Given a float, returns an RGB triple.
Recipe 9.10 from the Python Cookbook.
@type mag: C{int}
@param mag: Magnitude value from which to calculate RGB triple.
@type cmin: C{int}
@keyword cmin: Minimum possible value for C{mag}.
@type cmax: C{int}
@keyword cmax: Maximum possible value for C{mag}.
@rtype: C{tuple}
@return: An integer tuple representing an RGB value.
"""
assert cmin != cmax
a = float(mag-cmin)/(cmax-cmin)
blue = min((max((4*(0.75-a), 0.)), 1.))
red = min((max((4*(a-0.25), 0.)), 1.))
green = min((max((4*math.fabs(a-0.5)-1., 0)), 1.))
return int(255*red), int(255*green), int(255*blue)
@process
def mandelbrot(xcoord, dimension, cout, acorn=-2.0, bcorn=-1.250):
"""Calculate pixel values for a single column of a Mandelbrot set.
Writes an image column to C{cout}. An image column is a list of
RGB triples. The list should be of length C{height}. Uses the
normalized iteration count algorithm to smooth the colour
gradients of the area outside the set.
readset =
writeset = cout
@type xcoord: C{int}
@param xcoord: x-coordinate of this image column.
@type width: C{int}
@param width: Width of the overall Mandelbrot fractal.
@type height: C{int}
@param height: Height of the overall Mandelbrot fractal.
@type cout: L{csp.csp.Channel}
@param cout: Channel down which image column will be sent.
@type acorn: C{float}
@keyword acorn: Seed value for fractal generation (real part).
@type bcorn: C{float}
@keyword bcorn: Seed value for fractal generation (imaginary part).
"""
(width, height) = dimension
# nu implements the normalized iteration count algorithm
nu = lambda zz, n: n + 1 - math.log(math.log(abs(zz)))/math.log(2)
imgcolumn = [0. for i in range(height)]
for ycoord in range(height):
z = complex(0., 0.)
c = complex(acorn + xcoord*2.5/float(width),
bcorn + ycoord*2.5/float(height))
for i in range(MAXITER):
z = complex(z.real**2 - z.imag**2 + c.real,
2*z.real*z.imag + c.imag)
if abs(z)**2 > 4:
break
if i == MAXITER - 1: # Point lies inside the Mandelbrot set.
colour = (0, 0, 0)
else: # Point lies outside the Mandelbrot set.
colour = get_colour(nu(z, i), cmax=MAXITER)
imgcolumn[ycoord] = colour
cout.write((xcoord, imgcolumn))
return
@process
def consume(size, filename, cins):
"""Consumer process to aggregate image data for Mandelbrot fractal.
readset = cins
writeset =
@type size: C{tuple}
@param size: Width and height of generated fractal image.
@type filename: C{str}
@param filename: File in which to save generated fractal image.
@type cins: C{list}
@param cins: Input channels from which image columns will be read.
"""
# Create initial pixel data
pixmap = numpy.zeros((size[0], size[1], 3), dtype=numpy.int8)
pygame.init()
screen = pygame.display.set_mode((size[0], size[1]), 0)
pygame.display.set_caption('python-csp Mandelbrot fractal example.')
# Wait on channel events
gen = len(cins) * Alt(*cins)
for i in range(len(cins)):
xcoord, column = next(gen)
# Update column of blit buffer
pixmap[xcoord] = column
# Update image on screen.
pygame.surfarray.blit_array(screen, pixmap)
pygame.display.update(xcoord, 0, 1, size[1])
while True:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
elif event.type == pygame.QUIT:
for channel in cins:
channel.poison()
pygame.quit()
@process
def main(size, filename):
"""Manage all processes and channels required to generate fractal.
@type size: C{tuple}
@param size: Size of generated Mandelbrot fractal image.
@type filename: C{str}
@param filename: Name of file in which to store generated fractal image.
"""
channels, processes = [], []
for x in range(size[0]): # One producer + channel for each image column.
channels.append(Channel())
processes.append(mandelbrot(x, size, channels[x]))
processes.insert(0, consume(size, filename, channels))
mandel = Par(*processes).start()
return
if __name__ == '__main__':
size = (320, 240)
main(size, 'manelbrot.png').start()
| Python |
#!/usr/bin/env python
from csp.csp import *
from math import sqrt
from decimal import Decimal
def genPair():
return random.random(),random.random()
g = lambda x: sqrt(1-(x*x))
perProcess = 100000
workers = 320
@process
def worker(c):
"""
readset =
writeset = c
"""
count = 0
i = 0
while i < perProcess:
x,y = genPair()
if y<= g(x) :
count = count + 1
i += 1
c.write((Decimal(count)))
return
@process
def consumer(cins):
"""
readset = cins
writeset =
"""
alt = Alt(*cins)
total = Decimal(0)
for i in range(len(cins)):
t = alt.select()
total += t
print("Pi aproximation: " + str( Decimal((total/(perProcess*workers))*4) ) )
def main():
Chnls, procs = [],[]
for i in range(workers):
Chnls.append(Channel())
procs.append(worker(Chnls[i]))
procs.append(consumer(Chnls))
p = Par(*procs)
p.start()
return
if __name__ == '__main__':
getcontext().prec = 19
t0 = time.time()
main()
t1 = time.time()
print("Time Taken: " + str ( t1 - t0 ) )
| Python |
#!/usr/bin/env python
import copy, math, struct, time
from csp.csp import *
INFINITY = float('infinity')
delta = 0.000000001 # in Java this is java.lang.Math.ulp(1.0)
class Vector(object): # Use numpy arrays instead.
def __init__(self, x, y, z):
self.x, self.y, self.z = x, y, z
return
def __repr__(self):
return 'Vector({0}, {1}, {2})'.format(self.x, self.y, self.z)
def __add__(self, vector):
return Vector(self.x + vector.x, self.y + vector.y, self.z + vector.z)
def __sub__(self, vector):
return Vector(self.x - vector.x, self.y - vector.y, self.z - vector.z)
def dot(self, vector):
return self.x * vector.x + self.y * vector.y + self.z * vector.z
def mag(self):
return math.sqrt(self.dot(self))
def scale(self, scalar):
return Vector(self.x * scalar, self.y * scalar, self.z * scalar)
def unitise(self):
return self.scale(1 / self.mag())
class Ray(object):
def __init__(self, origin, direction):
self.origin = origin
self.direction = direction
return
def __repr__(self):
return 'Ray({0}, {1})'.format(repr(self.origin), repr(self.direction))
class Hit(object):
def __init__(self, llambda, normal):
self.llambda = llambda
self.normal = normal
return
def __repr__(self):
return 'Hit({0}, {1})'.format(self.llambda, repr(self.normal))
class Scene(object):
def intersect(self, hit, ray):
raise NotImplementedError('Must be overridden in subclass')
def sintersect(self, ray):
raise NotImplementedError('Must be overridden in subclass')
def bound(self, b): # returns a sphere
raise NotImplementedError('Must be overridden in subclass')
class Sphere(Scene):
def __init__(self, centre, radius):
self.centre = centre # vector type
self.radius = radius
return
def __repr__(self):
return 'Sphere({0}, {1})'.format(repr(self.centre), self.radius)
def ray(self, ray): # Returns a float
v = self.centre - ray.origin
b = v.dot(ray.direction)
disc = b**2 - v.dot(v) + self.radius**2
if disc < 0.0: return INFINITY
d = math.sqrt(disc)
t2 = b + d
if t2 < 0.0: return INFINITY
t1 = b - d
if t1 > 0: return t1
return t2
def sray(self, ray): # Returns a bool
v = self.centre - ray.origin
b = v.dot(ray.direction)
disc = b**2 - v.dot(v) + self.radius**2
if disc < 0.0: return False
return b + math.sqrt(disc) >= 0.0
def intersect(self, hit, ray): # Returns a Vector
l = self.ray(ray)
if l >= hit.llambda: return
n = ray.origin + (ray.direction.scale(l) - self.centre)
hit.llambda = l
hit.normal = n.unitise()
return
def sintersect(self, ray): # Returns a bool
return self.sray(ray)
def bound(self, sphere): # Returns a Sphere
s = (sphere.centre - self.centre).mag() + self.radius
if sphere.radius > s:
return Sphere(sphere.centre, sphere.radius)
else: return Sphere(sphere.centre, s)
class Group(Scene):
def __init__(self, bound, objs):
self.b, self.objs = bound, objs
return
def __repr__(self):
return 'Group(' + repr(self.b) + ', ' + repr(self.objs) + ')'
def intersect(self, hit, ray): # Calls intersect() on self.objs, updates hit
l = self.b.ray(ray)
if l >= hit.llambda: return
for scene in self.objs:
scene.intersect(hit, ray)
return
def sintersect(self, ray): # Returns a bool
if not self.b.sray(ray): return False
for scene in self.objs:
if scene.sintersect(ray): return True
return False
def bound(self, sphere): # Returns a Sphere
for scene in self.objs:
sphere = scene.bound(sphere)
return sphere
def ray_trace(light, ray, scene): # Returns a float representing a colour
i = Hit(INFINITY, Vector(0.0, 0.0, 0.0))
scene.intersect(i, ray)
if i.llambda == INFINITY: return 0.0
o = ray.origin + (ray.direction.scale(i.llambda) +
i.normal.scale(delta))
g = i.normal.dot(light)
if g >= 0.0: return 0.0
sray = Ray(o, light.scale(-1.0))
if scene.sintersect(sray): return 0.0
return -g
def create(level, centre, radius): # Returns a Group
sphere = Sphere(centre, radius)
if level == 1: return sphere
x = 3.0 * radius / math.sqrt(12.0)
objs = [sphere]
b = Sphere(centre + Vector(0.0, radius, 0.0), 2.0 * radius)
for dz in (-1, 1):
for dx in (-1, 1):
c2 = Vector(centre.x + dx*x, centre.y + x, centre.z + dz*x)
scene = create(level - 1, c2, radius / 2.0)
objs.append(scene)
b = scene.bound(b)
return Group(b, objs)
def create_run(n, level, ss, filename='scene.pgm'):
scene = create(level, Vector(0.0, -1.0, 0.0), 1.0)
fp = file(filename, 'w')
fp.write('P5\n{0} {1}\n255\n'.format(n, n))
channels, procs = [] , []
sofar = 0
for y in range(0, n, +1):
for x in range(0, n):
channels.append(Channel())
procs.append(perpixel(ss,n,x,y,scene,channels[sofar]))
sofar += 1
print(' made proccess for pixel ' + str ( y ) + ' ' + str ( x ) )
rayy = Par(*procs)
rayy.start()
print(len(procs))
alt = Alt(*channels)
while len(alt.guards) >0:
print('top of loop, #guards: ' + str ( len(alt.guards) ) )
if len(alt.guards) == 1:
print('can only read from channel ' + str ( alt.guards[0].name ) )
chn = alt.select()
fp.write(struct.pack('B', chn))
print('About to poison ' + str ( alt.last_selected.name ) )
alt.poison()
print(len(alt.guards))
print('about to close')
fp.close()
return
@process
def perpixel(ss, n, x, y, scene, chnl):
"""
readset =
writeset = chnl
"""
g = 0.0
for dx in range(0, ss):
for dy in range(0, ss):
d = Vector(x + dx * 1.0 / ss - n / 2.0,
y + dy * 1.0 / ss - n / 2.0,
n)
ray = Ray(Vector(0.0, 0.0, -4.0), d.unitise())
g += ray_trace(Vector(-1.0, -3.0, 2.0).unitise(),
ray, scene)
print('Value ' + str ( int(0.5 + 255.0 * g / ss**2) ) + ' writing to: ' + str ( chnl.name ) )
chnl.write(int(0.5 + 255.0 * g / ss**2))
return
def run(n, scene, ss, filename='scene.pgm'):
"""Ray trace an given scene and write the results to a .pgm file.
"""
fp = file(filename, 'w')
fp.write('P5\n{0} {1}\n255\n'.format((n, n)))
for y in range(n-1, -1, -1):
for x in range(0, n):
g = 0.0
for dx in range(0, ss):
for dy in range(0, ss):
d = Vector(x + dx * 1.0 / ss - n / 2.0,
y + dy * 1.0 / ss - n / 2.0,
n)
ray = Ray(Vector(0.0, 0.0, -4.0), d.unitise())
g += ray_trace(Vector(-1.0, -3.0, 2.0).unitise(),
ray, scene)
fp.write(struct.pack('B', int(0.5 + 255.0 * g / ss**2)))
fp.close()
return
if __name__ == '__main__':
import pickle
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-f', '--file', dest='file',
action='store',
help='Load a scene from a data file.')
parser.add_option('-s', '--size', dest='size',
action='store', default=4,
help='Image width in pixels.')
parser.add_option('-l', '--level', dest='level',
action='store', default=1,
help='Recursion level for auto-generated fractal scene.')
parser.add_option('-o', '--out', dest='out',
action='store', default='scene.pgm',
help='Output file to write to in .pgm format.')
(options, args) = parser.parse_args()
if options.file:
fp = file(options.file, 'r')
scene = pickle.loads(fp.read())
fp.close()
t0 = time.time()
run(int(options.size), scene, 3, filename=options.out)
t = time.time() - t0
print('Time taken: ' + str ( t ) + 'seconds.')
else:
t0 = time.time()
create_run(int(options.size), int(options.level), 3,
filename=options.out)
t = time.time() - t0
print('Time taken: ' + str ( t ) + 'seconds.')
| Python |
#!/usr/bin/env python
"""Python CSP full adder.
Based on code from PyCSP - Communicating Sequential Processes for
Python. John Markus Bjorndalen, Brian Vinter, Otto Anshus. CPA 2007,
Surrey, UK, July 8-11, 2007. IOS Press 2007, ISBN 978-1-58603-767-3,
Concurrent Systems Engineering Series (ISSN 1383-7575).
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
from csp.csp import *
from csp.builtins import *
@process
def Bool1(cout):
"""
readset =
writeset = cout
"""
while True:
cout.write(1)
cout.write(1)
cout.write(0)
cout.write(0)
return
@process
def Bool2(cout):
"""
readset =
writeset = cout
"""
while True:
cout.write(1)
cout.write(0)
cout.write(1)
cout.write(0)
return
def fulladder(A_in, B_in, C_in, Sum_in, Carry_in):
"""Full adder implementation.
Based on Bjorndalen, Vinter & Anshus (2007).
"""
Aa = Channel()
Ab = Channel()
Ba = Channel()
Bb = Channel()
Ca = Channel()
Cb = Channel()
i1 = Channel()
i1a = Channel()
i1b = Channel()
i2 = Channel()
i3 = Channel()
return Par(Delta2(A_in, Aa, Ab),
Delta2(B_in, Ba, Bb),
Delta2(C_in, Ca, Cb),
Delta2(i1, i1a, i1b),
Xor(Aa, Ba, i1),
Xor(i1a, Ca, Sum_in),
And(Ab, Bb, i2),
And(i1b, Cb, i3),
Or(i2, i3, Carry_in))
if __name__ == '__main__':
print('\nFull adder implemented in Python CSP\n')
# Inputs to full adder
A = Channel()
B = Channel()
Cin = Channel()
# Outputs of full adder
Carry = Channel()
Sum = Channel()
# Channels for printing to STDOUT
PCarry = Channel()
PSum = Channel()
# Create and start adder
adder = Par(Bool1(A),
Bool2(B),
Zeroes(Cin),
fulladder(A, B, Cin, Sum, Carry),
Sign(Carry, PCarry, 'Carry: '),
Printer(PCarry),
Sign(Sum, PSum, 'Sum: '),
Printer(PSum))
adder.start()
| Python |
#!/usr/bin/env python
# FIXME: Nowhere near PEP8 compatible :(
from csp.csp import *
def calculateRowColumnProduct(self, A, row, B, col):
product = 0
for i in range(len(A[row])):
product += A[row][i] * B[i][col]
return product
@process
def ParcalculateRowColumnProduct(cout, A, row, B, col):
"""
readset =
writeset = cout
"""
product = 0
for i in range(len(A[row])):
product += A[row][i] * B[i][col]
cout.write((row,col,product))
class Matrix():
def __init__(self, h, k):
self.matrix = []
for i in range(h):
row = []
for j in range(k):
row.append(0)
self.matrix.append(row)
def Multiply(self, mb):
b = mb.matrix
a = self.matrix
if len(a[0]) != len(b):
raise Exception()
return
mat = Matrix(len(a),len(b[0]))
for i in range(len(a)) :
for j in range(len(b[0])):
mat.matrix[i][j] = calculateRowColumnProduct(self,a,i,b,j)
return mat
def ParMultiply(self, mb):
b = mb.matrix
a = self.matrix
if len(a[0]) != len(b):
raise Exception()
return
procs = []
chnls = []
mat = Matrix(len(a),len(b[0]))
for i in range(len(a)) :
for j in range(len(b[0])):
ch = Channel()
chnls.append(ch);
procs.append(ParcalculateRowColumnProduct(ch,a,i,b,j))
p = Par(*procs);
p.start();
alt = Alt(*chnls)
for i in range(len(chnls)):
a,b,ans = alt.select()
mat.matrix[a][b] = ans
alt.poison()
return mat
def createID(self):
for i in range(len(self.matrix)) :
for j in range(len(self.matrix[0])):
if i == j:
self.matrix[i][j] = 1
else :
self.matrix[i][j] = 0
def printMatrix(self):
print(self.matrix)
if __name__ == '__main__':
i = Matrix(3,3)
g = Matrix(3,3)
i.createID()
g.createID()
j = i.Multiply(g)
j.printMatrix();
j = i.ParMultiply(g)
j.printMatrix()
print("")
| Python |
#!/usr/bin/env python
from csp.csp import *
from csp.builtins import Fibonacci, Generate, Multiply, Printer
if __name__ == '__main__':
c = []
c.append(Channel())
c.append(Channel())
c.append(Channel())
f = Fibonacci(c[0])
g = Generate(c[1])
m = Multiply(c[0],c[1],c[2])
p = Printer(c[2])
par = Par(f,g,m,p)
par.start()
| Python |
#!/usr/bin/env python
"""
This is a python-csp implementation of the Kamaelia box, based on
Michael Spark's OCCAM code: http://pastebin.com/B1kqx88G
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'July 2010'
import logging
import os
from csp.csp import *
from csp.builtins import Generate, Printer
import random
_RANGEN = random.Random(os.urandom(16))
class GuardedAlt(Alt):
"""Guarded selection, as in OCCAM.
In this form of ALTing, the GuardedAlt object is passed a list of
callable / Guard (usually Channel) pairs. When select is called,
the result is usually the result of a Channel read from a channel
which has a waiting writer, for which the corresponding callable
evaluates to True.
For example:
x = 3
alt = Alt((lambda x > 3, c1), (lambda x : x <= 3, c2))
print alt.select()
will print the result of reading from c2 as soon as c2 is blocked
on the offer of a channel write.
Maybe this should be the default implementation of Alting? A great
deal of code is shared between the Alt class and GuardedAlt. I
have left this in, for backwards compatibility and efficiency, but
it should probably be looked at again.
"""
def __init__(self, *args):
super(GuardedAlt, self).__init__()
for (call, guard) in args:
assert callable(call)
assert isinstance(guard, Guard)
self.guards = list(args)
self.last_selected = None
# These two should not be needed, but they should be bound to
# a sensible method, since they would otherwise be inherited
# from Alt.
self.fair_select = self.select
self.pri_select = self.select
def poison(self):
"""Poison the last selected guard and unlink from the guard list.
Sets self.last_selected to None.
"""
logging.debug(str(type(self.last_selected)))
self.last_selected.disable() # Just in case
try:
self.last_selected.poison()
except Exception:
pass
logging.debug('Poisoned last selected.')
self.guards.remove(self.last_selected)
logging.debug('{0} guards'.format(len(self.guards)))
self.last_selected = None
def _preselect(self):
"""Check for special cases when any form of select() is called.
"""
if len(self.guards) == 0:
raise NoGuardInAlt()
elif len(self.guards) == 1:
call, guard = self.guards[0]
logging.debug('Alt Selecting unique guard: {0}'.format(guard.name))
self.last_selected = guard
while not (call() and guard.is_selectable()):
guard.enable()
return guard.select()
return None
def select(self):
"""Randomly select from ready guards."""
if len(self.guards) < 2:
return self._preselect()
ready = []
while len(ready) == 0:
for (call, guard) in self.guards:
guard.enable()
for (call, guard) in self.guards:
if call() and guard.is_selectable():
ready.append(guard)
logging.debug('Alt got {0} items to choose from out of {1}'.format(len(ready), len(self.guards)))
selected = _RANGEN.choice(ready)
self.last_selected = selected
for call, guard in self.guards:
if guard is not selected:
guard.disable()
return selected.select()
@process
def BoundedQueue(cin, cout, maxsize):
"""Port of Michael Spark's OCCAM code.
"""
@process
def inproc(chan_in, chan_next, chan_pass):
queue = []
alt = GuardedAlt((lambda : len(queue) < maxsize, chan_in),
(lambda : len(queue) > 0, chan_next))
while True:
msg = alt.select()
if alt.last_selected == chan_in:
queue.append(msg)
print 'QUEUE:', queue # Bad style
elif alt.last_selected == chan_next:
chan_pass.write(queue[0])
queue = queue[1:]
@process
def outproc(chan_next, chan_pass, chan_out):
while True:
chan_next.write("ANY")
msg = chan_pass.read()
chan_out.write(msg)
chan_pass, chan_next = Channel(), Channel()
Par(inproc(cin, chan_next, chan_pass),
outproc(chan_next, chan_pass, cout)).start()
@process
def test_queue():
@process
def source(cout, n=15):
for i in xrange(n):
cout.write(i)
cout.poison()
cin, cout = Channel(), Channel()
Par(source(cin), BoundedQueue(cin, cout, 5), Printer(cout)).start()
return
@process
def test_alt(cout1, cout2):
alt = Alt(cout1) # Simplest case, should act like read.
# alt = Alt(cout1, cout2) # Basic test.
# alt = GuardedAlt((lambda: True, cout1), (lambda: True, cout))
while True:
print alt.select()
@process
def test_runner():
chan1, chan2 = Channel(), Channel()
Par(Generate(chan1), Generate(chan2), test_alt(chan1, chan2)).start()
if __name__ == '__main__':
# set_debug(True)
test_runner().start()
# test_queue().start()
| Python |
"""
Solution to the sleeping barber problem in python-csp.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__date__ = 'July 2010'
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
from csp.csp import *
from csp.builtins import Printer
from csp.guards import Timer
from queue import BoundedQueue as Queue
@process
def generate_customers(out_chan, printer):
import random
customers = ['Michael Palin', 'John Cleese', 'Terry Jones',
'Terry Gilliam', 'Graham Chapman']
while True:
python = random.choice(customers)
printer.write('{0} needs a good shave!'.format(python))
out_chan.write(python)
@process
def barber(door, printer):
import random
timer = Timer()
while True:
printer.write('Barber is sleeping.')
customer = door.read()
print_c.write('The barber has woken to give {0} a shave.'.format(customer))
timer.sleep(random.random() * 5)
@process
def main(max_chairs):
door_in, door_out = Channel(), Channel()
printer = Channel()
Par(generate_customers(door_in, printer),
Queue(door_in, door_out, max_chairs),
barber(door_out, printer)).start()
if __name__ == '__main__':
# Start simulation with 5 chairs in waiting room.
main(5).start()
| Python |
#!/usr/bin/env python
"""Mandelbrot set computed in parallel using python-csp.
Multiple-producer, single consumer architecture.
"""
from csp.csp import *
import logging, math, Numeric, pygame, time
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
MAXITER = 100
"""@var: Number of iterations used to determine each pixel of the fractal image.
@see: L{mandelbrot}
"""
def get_colour(mag, cmin=0, cmax=100):
"""Given a float, returns an RGB triple.
Recipe 9.10 from the Python Cookbook.
@type mag: C{int}
@param mag: Magnitude value from which to calculate RGB triple.
@type cmin: C{int}
@keyword cmin: Minimum possible value for C{mag}.
@type cmax: C{int}
@keyword cmax: Maximum possible value for C{mag}.
@rtype: C{tuple}
@return: An integer tuple representing an RGB value.
"""
assert cmin != cmax
a = float(mag-cmin)/(cmax-cmin)
blue = min((max((4*(0.75-a),0.)),1.))
red = min((max((4*(a-0.25),0.)),1.))
green = min((max((4*math.fabs(a-0.5)-1.,0)),1.))
return int(255*red), int(255*green), int(255*blue)
@process
def mandelbrot(xcoord, dimension, cout, acorn=-2.0, bcorn=-1.250):
"""Calculate pixel values for a single column of a Mandelbrot set.
Writes an image column to C{cout}. An image column is a list of
RGB triples. The list should be of length C{height}. Uses the
normalized iteration count algorithm to smooth the colour
gradients of the area outside the set.
readset =
writeset = cout
@type xcoord: C{int}
@param xcoord: x-coordinate of this image column.
@type width: C{int}
@param width: Width of the overall Mandelbrot fractal.
@type height: C{int}
@param height: Height of the overall Mandelbrot fractal.
@type cout: L{csp.csp.Channel}
@param cout: Channel down which image column will be sent.
@type acorn: C{float}
@keyword acorn: Seed value for fractal generation (real part).
@type bcorn: C{float}
@keyword bcorn: Seed value for fractal generation (imaginary part).
"""
(width, height) = dimension
# nu implements the normalized iteration count algorithm
nu = lambda zz, n: n + 1 - math.log(math.log(abs(zz)))/math.log(2)
imgcolumn = [0. for i in range(height)]
for ycoord in range(height):
z = complex(0., 0.)
c = complex(acorn + xcoord*2.5/float(width),
bcorn + ycoord*2.5/float(height))
for i in range(MAXITER):
z = complex(z.real**2 - z.imag**2 + c.real,
2*z.real*z.imag + c.imag)
if abs(z)**2 > 4: break
if i == MAXITER - 1:
# Point lies inside the Mandelbrot set.
colour = (0,0,0)
else:
# Point lies outside the Mandelbrot set.
colour = get_colour(nu(z, i), cmax=MAXITER)
imgcolumn[ycoord] = colour
logging.debug('sending column for x={0}'.format(xcoord))
cout.write((xcoord, imgcolumn))
return
@process
def consume(IMSIZE, filename, chan):
"""Consumer process to aggregate image data for Mandelbrot fractal.
readset = chan
writeset =
@type IMSIZE: C{tuple}
@param IMSIZE: Width and height of generated fractal image.
@type filename: C{str}
@param filename: File in which to save generated fractal image.
@type cins: C{list}
@param cins: Input channels from which image columns will be read.
"""
pygame.init()
screen = pygame.display.set_mode((IMSIZE[0], IMSIZE[1]), 0)
pygame.display.set_caption('python-csp Mandelbrot fractal example.')
# Create initial pixel data
pixmap = Numeric.zeros((IMSIZE[0], IMSIZE[1], 3))
# Wait on channel events
for i in range(IMSIZE[0]):
xcoord, column = chan.read()
logging.debug('Consumer got some data for column {0}'.format(xcoord))
# Update column of blit buffer
pixmap[xcoord] = column
# Update image on screen.
logging.debug('Consumer drawing image on screen')
pygame.surfarray.blit_array(screen, pixmap)
pygame.display.update(xcoord, 0, 1, IMSIZE[1])
pygame.image.save(screen, filename)
logging.info('Consumer finished processing image data')
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
return
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
print('Saving fractal image in: ' + str(filename))
def main(IMSIZE, filename, level='info'):
"""Manage all processes and channels required to generate fractal.
@type IMSIZE: C{tuple}
@param IMSIZE: Size of generated Mandelbrot fractal image.
@type filename: C{str}
@param filename: Name of file in which to store generated fractal image.
@type level: C{str}
@precondition: C{level in ['debug', 'info', 'warning', 'error', 'critical']}
@param level: Level of log output (written to L{sys.stdout}).
"""
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
assert(level in list(LEVELS.keys()))
logging.basicConfig(level=LEVELS[level])
processes = []
channel = Channel()
# Add producer processes to process list.
for x in range(IMSIZE[0]):
processes.append(mandelbrot(x, IMSIZE, channel))
# Start consumer processes separately.
con = consume(IMSIZE, filename, channel)
con.start()
time.sleep(1)
logging.info('Image size: {0}x{1}'.format(*IMSIZE))
logging.info('{0} producer processes, {1} consumer processes'.format(len(processes), 1))
# Start and join producer processes.
mandel = Par(*processes)
mandel.start()
logging.info('All processes joined.')
return
if __name__ == '__main__':
# IMSIZE = (1024, 768)
IMSIZE = (800, 600)
# IMSIZE = (640, 480)
# IMSIZE = (480, 320)
# IMSIZE = (320, 240)
# IMSIZE = (250, 150)
import sys
if len(sys.argv) > 1:
filename = sys.argv[1]
else:
filename = 'mandelbrot.png'
del sys
main(IMSIZE, filename, level='info')
| Python |
#!/usr/bin/env python
"""Mandelbrot set computed in parallel using python-csp.
Multiple-producer, single consumer architecture.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
from csp.csp import *
import logging, math, pygame, time
import Numeric
__author__ = 'Sam Wilson'
__credits__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
MAXITER = 100
"""@var: Number of iterations used to determine each pixel of the fractal image.
@see: L{mandelbrot}
"""
SOFAR = 0
"""@var: Number of columns processed so far.
@see: L{consumer}
"""
def get_colour(mag, cmin=0, cmax=100):
"""Given a float, returns an RGB triple.
Recipe 9.10 from the Python Cookbook.
@type mag: C{int}
@param mag: Magnitude value from which to calculate RGB triple.
@type cmin: C{int}
@keyword cmin: Minimum possible value for C{mag}.
@type cmax: C{int}
@keyword cmax: Maximum possible value for C{mag}.
@rtype: C{tuple}
@return: An integer tuple representing an RGB value.
"""
assert cmin != cmax
a = float(mag-cmin)/(cmax-cmin)
blue = min((max((4*(0.75-a),0.)),1.))
red = min((max((4*(a-0.25),0.)),1.))
green = min((max((4*math.fabs(a-0.5)-1.,0)),1.))
return int(255*red), int(255*green), int(255*blue)
@process
def mandelbrot(xcoord, dimension, cout, acorn=-2.0, bcorn=-1.250):
"""Calculate pixel values for a single column of a Mandelbrot set.
Writes an image column to C{cout}. An image column is a list of
RGB triples. The list should be of length C{height}. Uses the
normalized iteration count algorithm to smooth the colour
gradients of the area outside the set.
readset = cout
writeset = cout
@type xcoord: C{int}
@param xcoord: x-coordinate of this image column.
@type width: C{int}
@param width: Width of the overall Mandelbrot fractal.
@type height: C{int}
@param height: Height of the overall Mandelbrot fractal.
@type cout: L{csp.csp.Channel}
@param cout: Channel down which image column will be sent.
@type acorn: C{float}
@keyword acorn: Seed value for fractal generation (real part).
@type bcorn: C{float}
@keyword bcorn: Seed value for fractal generation (imaginary part).
"""
(width, height) = dimension
# nu implements the normalized iteration count algorithm
nu = lambda zz, n: n + 1 - math.log(math.log(abs(zz)))/math.log(2)
imgcolumn = [0. for i in range(height)]
while 1:
for ycoord in range(height):
z = complex(0., 0.)
c = complex(acorn + xcoord*2.5/float(width),
bcorn + ycoord*2.5/float(height))
for i in range(MAXITER):
z = complex(z.real**2 - z.imag**2 + c.real,
2*z.real*z.imag + c.imag)
if abs(z)**2 > 4: break
if i == MAXITER - 1:
# Point lies inside the Mandelbrot set.
colour = (0,0,0)
else:
# Point lies outside the Mandelbrot set.
colour = get_colour(nu(z, i),0, cmax=MAXITER)
imgcolumn[ycoord] = colour
cout.write((xcoord, imgcolumn))
#print '\nhere {0}'.format(xcoord)
xcoord = cout.read()
if xcoord == -1:
return
@process
def consume(IMSIZE, filename, cins):
"""Consumer process to aggregate image data for Mandelbrot fractal.
readset = cins
writeset =
@type IMSIZE: C{tuple}
@param IMSIZE: Width and height of generated fractal image.
@type filename: C{str}
@param filename: File in which to save generated fractal image.
@type cins: C{list}
@param cins: Input channels from which image columns will be read.
"""
# Create initial pixel data
pixmap = Numeric.zeros((IMSIZE[0], IMSIZE[1], 3))
pygame.init()
screen = pygame.display.set_mode((IMSIZE[0], IMSIZE[1]), 0)
pygame.display.set_caption('python-csp Mandelbrot fractal example.')
# Wait on channel events
t0 = time.time()
alt = Alt(*cins)
logging.debug('Consumer about to begin ALT loop')
j = 0
for i in range(IMSIZE[0]):
xcoord, column = alt.fair_select()
logging.debug('Consumer got some data for column {0}'.format(xcoord))
# Update column of blit buffer
pixmap[xcoord] = column
# Update image on screen.
pygame.surfarray.blit_array(screen, pixmap)
pygame.display.update(xcoord, 0, 1, IMSIZE[1])
if j < IMSIZE[0]:
alt.last_selected.write(j)
j += 1
else:
alt.last_selected.write(-1)
print('TIME TAKEN: ' + str(time.time() - t0) +'seconds.')
logging.debug('Consumer drawing image on screen')
pygame.image.save(screen, filename)
logging.info('Consumer finished processing image data')
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
print('Goodbye')
return
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
print('Saving fractal image in: ' + str(filename))
def main(IMSIZE, filename, granularity=10, level='info'):
"""Manage all processes and channels required to generate fractal.
@type IMSIZE: C{tuple}
@param IMSIZE: Size of generated Mandelbrot fractal image.
@type filename: C{str}
@param filename: Name of file in which to store generated fractal image.
@type level: C{str}
@precondition: C{level in ['debug', 'info', 'warning', 'error', 'critical']}
@param level: Level of log output (written to L{sys.stdout}).
"""
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
assert(level in list(LEVELS.keys()))
logging.basicConfig(level=LEVELS[level])
# Channel and process lists.
channels, processes = [], []
# Create channels and add producer processes to process list.
SOFAR = granularity - 1
for x in range(granularity):
channels.append(Channel())
processes.append(mandelbrot(x, IMSIZE, channels[x]))
processes.insert(0, consume(IMSIZE, filename, channels))
# Start and join producer processes.
mandel = Par(*processes)
mandel.start()
logging.info('Image size: {0}x{1}'.format(*IMSIZE))
logging.info('{0} producer processes, {1} consumer processes'.format(len(processes)-1, 1))
logging.info('All processes joined.')
return
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
numprocs = int(sys.argv[1])
else:
numprocs = 10
del sys
# main((320,240), 'mandelbrot.png', granularity=numprocs, level='info')
# main((480,320), 'mandelbrot.png', granularity=numprocs, level='info')
main((640,480), 'mandelbrot.png', granularity=numprocs, level='info')
| Python |
#!/usr/bin/env python
"""Mandelbrot set computed in parallel using python-csp.
Multiple-producer, single consumer architecture.
"""
from csp.csp import *
import logging, math, Numeric, pygame, time
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
MAXITER = 100
"""@var: Number of iterations used to determine each pixel of the fractal image.
@see: L{mandelbrot}
"""
def get_colour(mag, cmin=0, cmax=100):
"""Given a float, returns an RGB triple.
Recipe 9.10 from the Python Cookbook.
@type mag: C{int}
@param mag: Magnitude value from which to calculate RGB triple.
@type cmin: C{int}
@keyword cmin: Minimum possible value for C{mag}.
@type cmax: C{int}
@keyword cmax: Maximum possible value for C{mag}.
@rtype: C{tuple}
@return: An integer tuple representing an RGB value.
"""
assert cmin != cmax
a = float(mag-cmin)/(cmax-cmin)
blue = min((max((4*(0.75-a),0.)),1.))
red = min((max((4*(a-0.25),0.)),1.))
green = min((max((4*math.fabs(a-0.5)-1.,0)),1.))
return int(255*red), int(255*green), int(255*blue)
@process
def mandelbrot(xcoord, dimension, cout, acorn=-2.0, bcorn=-1.250):
"""Calculate pixel values for a single column of a Mandelbrot set.
Writes an image column to C{cout}. An image column is a list of
RGB triples. The list should be of length C{height}. Uses the
normalized iteration count algorithm to smooth the colour
gradients of the area outside the set.
readset =
writeset = cout
@type xcoord: C{int}
@param xcoord: x-coordinate of this image column.
@type width: C{int}
@param width: Width of the overall Mandelbrot fractal.
@type height: C{int}
@param height: Height of the overall Mandelbrot fractal.
@type cout: L{csp.csp.Channel}
@param cout: Channel down which image column will be sent.
@type acorn: C{float}
@keyword acorn: Seed value for fractal generation (real part).
@type bcorn: C{float}
@keyword bcorn: Seed value for fractal generation (imaginary part).
"""
(width, height) = dimension
# nu implements the normalized iteration count algorithm
nu = lambda zz, n: n + 1 - math.log(math.log(abs(zz)))/math.log(2)
imgcolumn = [0. for i in range(height)]
for ycoord in range(height):
z = complex(0., 0.)
c = complex(acorn + xcoord*2.5/float(width),
bcorn + ycoord*2.5/float(height))
for i in range(MAXITER):
z = complex(z.real**2 - z.imag**2 + c.real,
2*z.real*z.imag + c.imag)
if abs(z)**2 > 4: break
if i == MAXITER - 1:
# Point lies inside the Mandelbrot set.
colour = (0,0,0)
else:
# Point lies outside the Mandelbrot set.
colour = get_colour(nu(z, i), cmax=MAXITER)
imgcolumn[ycoord] = colour
logging.debug('sending column for x={0}'.format(xcoord))
cout.write((xcoord, imgcolumn))
return
@process
def consume(IMSIZE, filename, cins):
"""Consumer process to aggregate image data for Mandelbrot fractal.
readset = cins
writeset =
@type IMSIZE: C{tuple}
@param IMSIZE: Width and height of generated fractal image.
@type filename: C{str}
@param filename: File in which to save generated fractal image.
@type cins: C{list}
@param cins: Input channels from which image columns will be read.
"""
pygame.init()
screen = pygame.display.set_mode((IMSIZE[0], IMSIZE[1]), 0)
pygame.display.set_caption('python-csp Mandelbrot fractal example.')
# Create initial pixel data
pixmap = Numeric.zeros((IMSIZE[0], IMSIZE[1], 3))
# Wait on channel events
for cin in cins:
xcoord, column = cin.read()
logging.debug('Consumer got some data for column {0}'.format(xcoord))
# Update column of blit buffer
pixmap[xcoord] = column
# Update image on screen.
logging.debug('Consumer drawing image on screen')
pygame.surfarray.blit_array(screen, pixmap)
pygame.display.update(xcoord, 0, 1, IMSIZE[1])
pygame.image.save(screen, filename)
logging.info('Consumer finished processing image data')
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
return
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
print('Saving fractal image in: ' + str(filename))
def main(IMSIZE, filename, level='info'):
"""Manage all processes and channels required to generate fractal.
@type IMSIZE: C{tuple}
@param IMSIZE: Size of generated Mandelbrot fractal image.
@type filename: C{str}
@param filename: Name of file in which to store generated fractal image.
@type level: C{str}
@precondition: C{level in ['debug', 'info', 'warning', 'error', 'critical']}
@param level: Level of log output (written to L{sys.stdout}).
"""
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
assert(level in list(LEVELS.keys()))
logging.basicConfig(level=LEVELS[level])
# Channel and process lists.
channels, processes = [], []
# Create channels and add producer processes to process list.
for x in range(IMSIZE[0]):
channels.append(Channel())
processes.append(mandelbrot(x, IMSIZE, channels[x]))
# Start consumer processes separately.
con = consume(IMSIZE, filename, channels)
con.start()
time.sleep(1)
logging.info('Image size: {0}x{1}'.format(*IMSIZE)
logging.info('{0} producer processes, {1} consumer processes'.format(len(processes), 1))
# Start and join producer processes.
mandel = Par(*processes)
mandel.start()
logging.info('All processes joined.')
return
if __name__ == '__main__':
print("""
Increasing the number of processes here makes no difference
to the result. However, increasing the height of the image
leads to early or non-termination.
""")
# IMSIZE = (640,480) # Ideal value.
# IMSIZE = (480, 320) # Can't start new thread (Queue problem).
IMSIZE = (320, 240) # Works OK.
import sys
if len(sys.argv) > 1:
filename = sys.argv[1]
else:
filename = 'mandelbrot.png'
del sys
main(IMSIZE, filename, level='info')
| Python |
#!/usr/bin/env python
"""Mandelbrot set computed in parallel using python-csp.
Multiple-producer, single consumer architecture.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
from csp.csp import *
import logging
import math
import Numeric
import pygame
import time
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
MAXITER = 100
"""@var: Number of iterations used to determine each pixel of the
fractal image.
@see: L{mandelbrot}
"""
def get_colour(mag, cmin=0, cmax=100):
"""Given a float, returns an RGB triple.
Recipe 9.10 from the Python Cookbook.
@type mag: C{int}
@param mag: Magnitude value from which to calculate RGB triple.
@type cmin: C{int}
@keyword cmin: Minimum possible value for C{mag}.
@type cmax: C{int}
@keyword cmax: Maximum possible value for C{mag}.
@rtype: C{tuple}
@return: An integer tuple representing an RGB value.
"""
assert cmin != cmax
a = float(mag-cmin)/(cmax-cmin)
blue = min((max((4*(0.75-a), 0.)), 1.))
red = min((max((4*(a-0.25), 0.)), 1.))
green = min((max((4*math.fabs(a-0.5)-1., 0)), 1.))
return int(255*red), int(255*green), int(255*blue)
@process
def mandelbrot(xcoord, dimension, cout, acorn=-2.0, bcorn=-1.250):
"""Calculate pixel values for a single column of a Mandelbrot set.
Writes an image column to C{cout}. An image column is a list of
RGB triples. The list should be of length C{height}. Uses the
normalized iteration count algorithm to smooth the colour
gradients of the area outside the set.
readset =
writeset = cout
@type xcoord: C{int}
@param xcoord: x-coordinate of this image column.
@type width: C{int}
@param width: Width of the overall Mandelbrot fractal.
@type height: C{int}
@param height: Height of the overall Mandelbrot fractal.
@type cout: L{csp.csp.Channel}
@param cout: Channel down which image column will be sent.
@type acorn: C{float}
@keyword acorn: Seed value for fractal generation (real part).
@type bcorn: C{float}
@keyword bcorn: Seed value for fractal generation (imaginary part).
"""
(width, height) = dimension
# nu implements the normalized iteration count algorithm
nu = lambda zz, n: n + 1 - math.log(math.log(abs(zz)))/math.log(2)
imgcolumn = [0. for i in range(height)]
for ycoord in range(height):
z = complex(0., 0.)
c = complex(acorn + xcoord*2.5/float(width),
bcorn + ycoord*2.5/float(height))
for i in range(MAXITER):
z = complex(z.real**2 - z.imag**2 + c.real,
2*z.real*z.imag + c.imag)
if abs(z)**2 > 4:
break
if i == MAXITER - 1:
# Point lies inside the Mandelbrot set.
colour = (0, 0, 0)
else:
# Point lies outside the Mandelbrot set.
colour = get_colour(nu(z, i), cmax=MAXITER)
imgcolumn[ycoord] = colour
logging.debug('sending column for x={0}'.format(xcoord))
cout.write((xcoord, imgcolumn))
return
@process
def consume(IMSIZE, filename, cins):
"""Consumer process to aggregate image data for Mandelbrot fractal.
readset = cins
writeset =
@type IMSIZE: C{tuple}
@param IMSIZE: Width and height of generated fractal image.
@type filename: C{str}
@param filename: File in which to save generated fractal image.
@type cins: C{list}
@param cins: Input channels from which image columns will be read.
"""
# Create initial pixel data
pixmap = Numeric.zeros((IMSIZE[0], IMSIZE[1], 3))
pygame.init()
screen = pygame.display.set_mode((IMSIZE[0], IMSIZE[1]), 0)
pygame.display.set_caption('python-csp Mandelbrot fractal example.')
# Wait on channel events
t0 = time.time()
# alt = Alt(*cins)
gen = len(cins) * Alt(*cins)
logging.debug('Consumer about to begin ALT loop')
for i in range(len(cins)):
xcoord, column = next(gen) #alt.select()
logging.debug('Consumer got some data for column {0}'.format(xcoord))
# Update column of blit buffer
pixmap[xcoord] = column
# Update image on screen.
pygame.surfarray.blit_array(screen, pixmap)
pygame.display.update(xcoord, 0, 1, IMSIZE[1])
for event in pygame.event.get():
if event.type == pygame.QUIT:
for channel in cins:
channel.poison()
pygame.time.wait(1000)
pygame.quit()
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
print('TIME TAKEN: ' +str(time.time() - t0) + 'seconds.')
logging.debug('Consumer drawing image on screen')
# With ALT poisoning 320 cols: 211.819334984 seconds
# Without poisoning 320 cols: 212.845579147 seconds
# WITH poisoning, without pygame: 210.228826046 seconds.
# WithOUT poisoning, without pygame: 212.00081706 seconds.
pygame.image.save(screen, filename)
logging.info('Consumer finished processing image data')
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
for channel in cins: channel.poison()
pygame.time.wait(1000)
pygame.quit()
return
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
return
@process
def main(IMSIZE, filename, level='info'):
"""Manage all processes and channels required to generate fractal.
@type IMSIZE: C{tuple}
@param IMSIZE: Size of generated Mandelbrot fractal image.
@type filename: C{str}
@param filename: Name of file in which to store generated fractal image.
@type level: C{str}
@precondition: C{level in ['debug', 'info', 'warning', 'error', 'critical']}
@param level: Level of log output (written to L{sys.stdout}).
"""
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
assert(level in list(LEVELS.keys()))
logging.basicConfig(level=LEVELS[level])
# Channel and process lists.
channels, processes = [], []
# Create channels and add producer processes to process list.
for x in range(IMSIZE[0]): # One producer + channel for each image column.
channels.append(Channel())
# channels.append(FileChannel())
processes.append(mandelbrot(x, IMSIZE, channels[x]))
processes.insert(0, consume(IMSIZE, filename, channels))
# Start and join producer processes.
mandel = Par(*processes)
mandel.start()
logging.info('Image size: {0}x{1}'.format(*IMSIZE))
logging.info('{0} producer processes, {1} consumer processes'.format(len(processes)-1, 1))
logging.info('All processes joined.')
return
if __name__ == '__main__':
# IMSIZE = (640,480) # Can't open enough files for this...
# IMSIZE = (480, 320)
IMSIZE = (320, 240) # This value causes the pickle bug, not the others.
# IMSIZE = (250, 150)
import sys
if len(sys.argv) > 1:
filename = sys.argv[1]
else:
filename = 'mandelbrot.png'
del sys
# try:
# import csptracer
# csptracer.start_trace()
# except Exception, e:
# pass
main(IMSIZE, filename, level='info').start()
# try:
# csptracer.write_png()
# except Exception, e:
# pass
# main(IMSIZE, filename, level='debug')
| Python |
"""
Boids simulation using python-csp and pygame.
Part4 -- Adding full flocking behaviour.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
"""
from csp.csp import *
import math
import operator
from functools import reduce
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'October 2009'
def distance(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return math.sqrt((x1-x2)**2 + (y1-y2)**2)
def dot_add(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return [x1 + x2, y1 + y2]
def dot_minus(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return [x1 - x2, y1 - y2]
def dot_prod(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return [x1 * x2, y1 * y2]
def scale(point, scalar): (x, y) = point; return [x * scalar, y * scalar]
def match_neighbour_velocities(near_vel):
xs, ys = list(list(zip(*near_vel)))
n = len(near_vel)
return [reduce(operator.add, xs) / n, reduce(operator.add, ys) / n]
def avoid_collision(near_pos):
VCLOSE = 7 # Boids are very close if they are within VCLOSE pixels.
isclose = lambda x_y: math.sqrt(x_y[0]**2 + x_y[1]**2) < VCLOSE
vclose = list(filter(isclose, near_pos))
if len(vclose) == 0: return (0.0, 0.0)
neg_vclose = [dot_prod((-1.0, -1.0), vector) for vector in vclose]
close_x, close_y = list(zip(*neg_vclose))
return (reduce(operator.add, close_x), reduce(operator.add, close_y))
def stay_with_flock(near_pos, numnear):
xs, ys = list(zip(*near_pos))
return [reduce(operator.add, xs) / numnear,
reduce(operator.add, ys) / numnear]
def apply_speed_limit(velocity):
SPEED_LIMIT = 7.0 # Velocity limit (applies to both X and Y directions).
if velocity[0] ** 2 + velocity[1] ** 2 > SPEED_LIMIT ** 2:
slowdown = (SPEED_LIMIT ** 2 /
(velocity[0] ** 2 + velocity[1] ** 2))
velocity = scale(velocity, slowdown)
return velocity
@process
def simulate(infochan, SIZE):
"""
readchan = infochan
writechan = infochan
"""
COHESION = 0.03 # Cohesion weight.
AVOIDANCE = 0.25 # Separation weight.
ALIGNMENT = 0.120 # Alignment weight.
ACCEL = 0.8 # Ideal acceleration weight.
centre = [random.randint(0, SIZE[0]), random.randint(0, SIZE[1])]
default_velocity = [random.choice((-1.0, 0.0, 1.0)),
random.choice((-1.0, 0.0, 1.0))]
velocity = default_velocity
while True:
infochan.write((centre, velocity))
possible_flockmates = infochan.read()
if not possible_flockmates:
velocity = default_velocity
else:
near_pos, near_vel = list(zip(*possible_flockmates))
numnear = len(near_pos)
accel = scale(match_neighbour_velocities(near_vel), ALIGNMENT)
accel = dot_add(accel, scale(avoid_collision(near_pos), AVOIDANCE))
accel = dot_add(accel, scale(stay_with_flock(near_pos, numnear), COHESION))
velocity = dot_add(velocity, scale(accel, ACCEL))
velocity = apply_speed_limit(velocity)
centre = dot_add(centre, velocity)
# Wrap the screen.
if centre[0]<0: centre[0] += SIZE[0]
elif centre[0]>SIZE[0]: centre[0] -= SIZE[0]
if centre[1]<0: centre[1] += SIZE[1]
elif centre[1]>SIZE[1]: centre[1] -= SIZE[1]
return
def nearby(first_point, second_point):
(pos1, vel1) = first_point
(pos2, vel2) = second_point
if pos1 == pos2 and vel1 == vel2: return False
return distance(pos1, pos2) <= 20
@process
def FlockManager(channels, drawchan, NUMBOIDS):
"""
readchan = channels
writechan = drawchan, channels
"""
info = [(0,0) for i in range(len(channels))]
relify = lambda x_y_vel: ([info[i][0][0]-x_y_vel[0][0], info[i][0][1]-x_y_vel[0][1]], x_y_vel[1])
while True:
for i in range(NUMBOIDS): info[i] = channels[i].read()
drawchan.write(info)
for i in range(NUMBOIDS):
near = [posvel for posvel in info if nearby(info[i], posvel)]
rel = list(map(relify, near))
channels[i].write(rel)
return
class Slider(object):
# Based on a demo by PyMike.
def __init__(self, pos, name, value=0):
self.pos = pos
self.bar = pygame.Surface((120, 15))
self.bar.fill((200, 200, 200))
self.slider = pygame.Surface((20, 15))
self.slider.fill((230, 230, 230))
pygame.draw.rect(self.bar, (0, 0, 0), (0, 0, 120, 15), 2)
pygame.draw.rect(self.slider, (0, 0, 0), (-1, -1, 20, 15), 2)
self.slider.set_at((19, 14), (0, 0, 0))
self.brect = self.bar.get_rect(topleft = pos)
self.srect = self.slider.get_rect(topleft = pos)
self.srect.left = (value * 100.0) + pos[0]
self.clicked = False
self.value = value
self.name = name
self.label = self.name + ' {0}g'.format(self.value)
return
def update(self):
mousebutton = pygame.mouse.get_pressed()
cursor = Rect(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1], 1, 1)
if cursor.colliderect(self.brect):
if mousebutton[0]: self.clicked = True
else: self.clicked = False
if not mousebutton[0]: self.clicked = False
if self.clicked: self.srect.center = cursor.center
self.srect.clamp_ip(self.brect)
if self.srect.left - self.brect.left == 0.0: self.value = 0.0
else: self.value = (self.srect.left - self.brect.left) / 100.0
self.label = self.name + ' {0}'.format(self.value)
return
def render(self, surface):
surface.blit(self.bar, self.brect)
surface.blit(self.slider, self.srect)
return
@process
def drawboids(drawchan, SIZE):
"""
readchan = drawchan
writechan =
"""
import pygame
FGCOL = (137, 192, 210, 100) # Foreground colour.
BGCOL = pygame.Color('black') # Background colour.
FPS = 60 # Maximum frames per second.
CAPTION = 'python-csp example: Boids'
FILENAME = 'boids.png' # Screenshot file.
QUIT = False
clock = pygame.time.Clock()
dirty, last = [], []
font = pygame.font.SysFont('Times New Roman', 16)
sliders = [Slider((120, SIZE[1] - 100), 'Cohesion', value=COHESION),
Slider((120, SIZE[1] - 75), 'Avoidance', value=AVOIDANCE),
Slider((120, SIZE[1] - 50), 'Alignment', value=ALIGNMENT),
Slider((120, SIZE[1] - 25), 'Acceleration', value=ACCEL)]
pygame.init()
screen = pygame.display.set_mode((SIZE[0], SIZE[1]), 0)
pygame.display.set_caption(CAPTION)
while not QUIT:
ms_elapsed = clock.tick(FPS)
print(ms_elapsed)
dirty = last
for rect in last: screen.fill(BGCOL, rect)
last = []
positions, vels = list(zip(*drawchan.read()))
for (x, y) in positions:
rect = pygame.draw.circle(screen, FGCOL, (int(x), int(y)), 2, 0)
dirty.append(rect)
last.append(rect)
pygame.display.update(dirty) # Update dirty rects.
for event in pygame.event.get(): # Process events.
if event.type == pygame.QUIT:
QUIT = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, FILENAME)
print('Saving boids in:' + str(FILENAME))
drawchan.poison()
pygame.quit()
return
@process
def main():
NUMBOIDS = 100 # Number of boids in simulation.
SIZE = (800, 600) # Screen size.
# Set up channels for reporting boid positions / velocities.
infochans = [Channel() for i in range(NUMBOIDS)]
# Draw channel for the drawboids process.
drawchan = Channel()
# Flock manager.
fm = FlockManager(infochans, drawchan, NUMBOIDS)
# Generate a list of all processes in the simulation.
procs = [simulate(infochans[i], SIZE) for i in range(NUMBOIDS)]
procs.append(fm)
procs.append(drawboids(drawchan, SIZE)) # Drawing process.
simulation = Par(*procs) # Start simulation.
simulation.start()
return
if __name__ == '__main__':
main().start()
| Python |
"""
Boids simulation using python-csp and pygame.
Part 2 -- Adding movement to the boids.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
"""
from csp.csp import *
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'October 2009'
@process
def simulate(poschan, SIZE):
"""
readset =
writeset = poschan
"""
centre = [random.randint(0, SIZE[0]), random.randint(0, SIZE[1])]
default_velocity = [random.choice((-1.0, 0.0, 1.0)),
random.choice((-1.0, 0.0, 1.0))]
velocity = default_velocity
while True:
centre = [centre[0] + velocity[0],
centre[1] + velocity[1]]
# Wrap the screen.
if centre[0]<0: centre[0] += SIZE[0]
elif centre[0]>SIZE[0]: centre[0] -= SIZE[0]
if centre[1]<0: centre[1] += SIZE[1]
elif centre[1]>SIZE[1]: centre[1] -= SIZE[1]
# Write to the drawing process.
poschan.write(centre)
return
@process
def drawboids(poschans, SIZE):
"""
readset = poschans
writeset =
"""
import pygame
FGCOL = (137, 192, 210, 100) # Foreground colour.
BGCOL = pygame.Color('black') # Background colour.
FPS = 60 # Maximum frames per second.
CAPTION = 'python-csp example: Boids'
FILENAME = 'boids.png' # Screenshot file.
QUIT = False
clock = pygame.time.Clock()
dirty, last = [], []
# chansize = len(poschans)
pygame.init()
screen = pygame.display.set_mode((SIZE[0], SIZE[1]), 0)
pygame.display.set_caption(CAPTION)
while not QUIT:
ms_elapsed = clock.tick(FPS)
print(ms_elapsed)
dirty = last
for rect in last: screen.fill(BGCOL, rect)
last = []
for channel in poschans:
x, y = channel.read()
rect = pygame.draw.circle(screen, FGCOL, (int(x), int(y)), 2, 0)
dirty.append(rect)
last.append(rect)
pygame.display.update(dirty) # Update dirty rects.
for event in pygame.event.get(): # Process events.
if event.type == pygame.QUIT:
QUIT = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, FILENAME)
print('Saving boids in:', FILENAME)
for chan in poschans: chan.poison()
pygame.quit()
return
@process
def main():
NUMBOIDS = 100 # Number of boids in simulation.
SIZE = (800, 600) # Screen size.
# Set up channels for reporting boid positions / velocities.
poschans = [Channel() for i in range(NUMBOIDS)]
# Draw channel for the drawboids process.
# drawchan = Channel()
# Generate a list of all processes in the simulation.
procs = [simulate(poschans[i], SIZE) for i in range(NUMBOIDS)]
procs.append(drawboids(poschans, SIZE)) # Drawing process.
simulation = Par(*procs) # Start simulation.
simulation.start()
return
if __name__ == '__main__':
main().start()
| Python |
"""
Boids simulation using python-csp and pygame.
Part3 -- Adding basic flocking behaviour.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
"""
from csp.csp import *
import math
import operator
from functools import reduce
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'October 2009'
def distance(first_point, second_point):
(x1, y1) = first_point
(x2, y2) = second_point
return math.sqrt((x1-x2)**2 + (y1-y2)**2)
def dot_add(first_point, second_point):
(x1, y1) = first_point
(x2, y2) = second_point
return [x1 + x2, y1 + y2]
def match_neighbour_velocities(near_vel):
xs, ys = list(list(zip(*near_vel)))
n = len(near_vel)
return [reduce(operator.add, xs) / n, reduce(operator.add, ys) / n]
@process
def simulate(infochan, SIZE):
"""
readset = infochan
writeset = infochan
"""
centre = [random.randint(0, SIZE[0]), random.randint(0, SIZE[1])]
default_velocity = [random.choice((-1.0, 0.0, 1.0)),
random.choice((-1.0, 0.0, 1.0))]
velocity = default_velocity
while True:
infochan.write((centre, velocity))
possible_flockmates = infochan.read()
if not possible_flockmates:
velocity = default_velocity
else:
near_pos, near_vel = list(zip(*possible_flockmates))
velocity = match_neighbour_velocities(near_vel)
centre = dot_add(centre, velocity)
# Wrap the screen.
if centre[0]<0: centre[0] += SIZE[0]
elif centre[0]>SIZE[0]: centre[0] -= SIZE[0]
if centre[1]<0: centre[1] += SIZE[1]
elif centre[1]>SIZE[1]: centre[1] -= SIZE[1]
return
def nearby(first_point, second_point):
(pos1, vel1) = first_point
(pos2, vel2) = second_point
if pos1 == pos2 and vel1 == vel2: return False
return distance(pos1, pos2) <= 20
@process
def FlockManager(channels, drawchan, NUMBOIDS):
"""
readchan = channels
writechan = channels, drawchan
"""
info = [(0,0) for i in range(len(channels))]
relify = lambda x_y_vel: ([info[i][0][0]-x_y_vel[0][0], info[i][0][1]-x_y_vel[0][1]], x_y_vel[1])
while True:
for i in range(NUMBOIDS): info[i] = channels[i].read()
drawchan.write(info)
for i in range(NUMBOIDS):
near = [posvel for posvel in info if nearby(info[i], posvel)]
rel = list(map(relify, near))
channels[i].write(rel)
return
@process
def drawboids(drawchan, SIZE):
"""
readchan = drawchan
writechan =
"""
import pygame
FGCOL = (137, 192, 210, 100) # Foreground colour.
BGCOL = pygame.Color('black') # Background colour.
FPS = 60 # Maximum frames per second.
CAPTION = 'python-csp example: Boids'
FILENAME = 'boids.png' # Screenshot file.
QUIT = False
clock = pygame.time.Clock()
dirty, last = [], []
pygame.init()
screen = pygame.display.set_mode((SIZE[0], SIZE[1]), 0)
pygame.display.set_caption(CAPTION)
while not QUIT:
ms_elapsed = clock.tick(FPS)
# print ms_elapsed
dirty = last
for rect in last: screen.fill(BGCOL, rect)
last = []
positions, vels = list(zip(*drawchan.read()))
for (x, y) in positions:
rect = pygame.draw.circle(screen, FGCOL, (int(x), int(y)), 2, 0)
dirty.append(rect)
last.append(rect)
pygame.display.update(dirty) # Update dirty rects.
for event in pygame.event.get(): # Process events.
if event.type == pygame.QUIT:
QUIT = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, FILENAME)
print('Saving boids in:', FILENAME)
drawchan.poison()
pygame.quit()
return
@process
def main():
NUMBOIDS = 75 # Number of boids in simulation.
SIZE = (800, 600) # Screen size.
# Set up channels for reporting boid positions / velocities.
infochans = [Channel() for i in range(NUMBOIDS)]
# Draw channel for the drawboids process.
drawchan = Channel()
# Flock manager.
fm = FlockManager(infochans, drawchan, NUMBOIDS)
# Generate a list of all processes in the simulation.
procs = [simulate(infochans[i], SIZE) for i in range(NUMBOIDS)]
procs.append(fm)
procs.append(drawboids(drawchan, SIZE)) # Drawing process.
simulation = Par(*procs) # Start simulation.
simulation.start()
return
if __name__ == '__main__':
main().start()
| Python |
"""
Boids simulation using python-csp and pygame.
Part 1 -- Setting up Pygame.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
"""
from csp.csp import *
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'October 2009'
@process
def simulate(poschan, SIZE):
"""
readset =
writeset = poschan
"""
centre = [random.randint(0, SIZE[0]), random.randint(0, SIZE[1])]
while True:
centre = random.randint(0, SIZE[0]), random.randint(0, SIZE[1])
poschan.write(centre)
return
@process
def drawboids(poschans, SIZE):
"""
readset = poschans
writeset =
"""
import pygame
FGCOL = (137, 192, 210, 100) # Foreground colour.
BGCOL = pygame.Color('black') # Background colour.
FPS = 60 # Maximum frames per second.
CAPTION = 'python-csp example: Boids'
FILENAME = 'boids.png' # Screenshot file.
QUIT = False
clock = pygame.time.Clock()
dirty, last = [], []
# chansize = len(poschans)
pygame.init()
screen = pygame.display.set_mode((SIZE[0], SIZE[1]), 0)
pygame.display.set_caption(CAPTION)
while not QUIT:
ms_elapsed = clock.tick(FPS)
print(ms_elapsed)
dirty = last
for rect in last: screen.fill(BGCOL, rect)
last = []
for channel in poschans:
x, y = channel.read()
rect = pygame.draw.circle(screen, FGCOL, (int(x), int(y)), 2, 0)
dirty.append(rect)
last.append(rect)
pygame.display.update(dirty) # Update dirty rects.
for event in pygame.event.get(): # Process events.
if event.type == pygame.QUIT:
QUIT = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, FILENAME)
print('Saving boids in:', FILENAME)
for chan in poschans: chan.poison()
pygame.quit()
return
@process
def main():
NUMBOIDS = 100 # Number of boids in simulation.
SIZE = (800, 600) # Screen size.
# Set up channels for reporting boid positions / velocities.
poschans = [Channel() for i in range(NUMBOIDS)]
# Draw channel for the drawboids process.
# drawchan = Channel()
# Generate a list of all processes in the simulation.
procs = [simulate(poschans[i], SIZE) for i in range(NUMBOIDS)]
procs.append(drawboids(poschans, SIZE)) # Drawing process.
simulation = Par(*procs) # Start simulation.
simulation.start()
return
if __name__ == '__main__':
main().start()
| Python |
"""
Boids simulation using python-csp and pygame.
Part4 -- Adding full flocking behaviour.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
"""
from csp.csp import *
import math
import operator
from functools import reduce
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'October 2009'
def distance(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return math.sqrt((x1-x2)**2 + (y1-y2)**2)
def dot_add(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return [x1 + x2, y1 + y2]
def dot_minus(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return [x1 - x2, y1 - y2]
def dot_prod(first_point, second_point): (x1, y1) = first_point; (x2, y2) = second_point; return [x1 * x2, y1 * y2]
def scale(point, scalar): (x, y) = point; return [x * scalar, y * scalar]
def match_neighbour_velocities(near_vel):
xs, ys = list(list(zip(*near_vel)))
n = len(near_vel)
return [reduce(operator.add, xs) / n, reduce(operator.add, ys) / n]
def avoid_collision(near_pos):
VCLOSE = 7 # Boids are very close if they are within VCLOSE pixels.
isclose = lambda x_y: math.sqrt(x_y[0]**2 + x_y[1]**2) < VCLOSE
vclose = list(filter(isclose, near_pos))
if len(vclose) == 0: return (0.0, 0.0)
neg_vclose = [dot_prod((-1.0, -1.0), vector) for vector in vclose]
close_x, close_y = list(zip(*neg_vclose))
return (reduce(operator.add, close_x), reduce(operator.add, close_y))
def stay_with_flock(near_pos, numnear):
xs, ys = list(zip(*near_pos))
return [reduce(operator.add, xs) / numnear,
reduce(operator.add, ys) / numnear]
def apply_speed_limit(velocity):
SPEED_LIMIT = 7.0 # Velocity limit (applies to both X and Y directions).
if velocity[0] ** 2 + velocity[1] ** 2 > SPEED_LIMIT ** 2:
slowdown = (SPEED_LIMIT ** 2 /
(velocity[0] ** 2 + velocity[1] ** 2))
velocity = scale(velocity, slowdown)
return velocity
@process
def simulate(infochan, SIZE):
"""
readset = infochan
writeset = infochan
"""
COHESION = 0.03 # Cohesion weight.
AVOIDANCE = 0.25 # Separation weight.
ALIGNMENT = 0.120 # Alignment weight.
ACCEL = 0.8 # Ideal acceleration weight.
centre = [random.randint(0, SIZE[0]), random.randint(0, SIZE[1])]
default_velocity = [random.choice((-1.0, 0.0, 1.0)),
random.choice((-1.0, 0.0, 1.0))]
velocity = default_velocity
while True:
infochan.write((centre, velocity))
possible_flockmates = infochan.read()
if not possible_flockmates:
velocity = default_velocity
else:
near_pos, near_vel = list(zip(*possible_flockmates))
numnear = len(near_pos)
accel = scale(match_neighbour_velocities(near_vel), ALIGNMENT)
accel = dot_add(accel, scale(avoid_collision(near_pos), AVOIDANCE))
accel = dot_add(accel, scale(stay_with_flock(near_pos, numnear), COHESION))
velocity = dot_add(velocity, scale(accel, ACCEL))
velocity = apply_speed_limit(velocity)
centre = dot_add(centre, velocity)
# Wrap the screen.
if centre[0]<0: centre[0] += SIZE[0]
elif centre[0]>SIZE[0]: centre[0] -= SIZE[0]
if centre[1]<0: centre[1] += SIZE[1]
elif centre[1]>SIZE[1]: centre[1] -= SIZE[1]
return
def nearby(first_point, second_point):
(pos1, vel1) = first_point
(pos2, vel2) = second_point
if pos1 == pos2 and vel1 == vel2: return False
return distance(pos1, pos2) <= 20
@process
def FlockManager(channels, drawchan, NUMBOIDS):
"""
readset = channels
writeset = drawchan, channels
"""
info = [(0,0) for i in range(len(channels))]
relify = lambda x_y_vel: ([info[i][0][0]-x_y_vel[0][0], info[i][0][1]-x_y_vel[0][1]], x_y_vel[1])
while True:
for i in range(NUMBOIDS): info[i] = channels[i].read()
drawchan.write(info)
for i in range(NUMBOIDS):
near = [posvel for posvel in info if nearby(info[i], posvel)]
rel = list(map(relify, near))
channels[i].write(rel)
return
@process
def drawboids(drawchan, SIZE):
"""
readset = drawchan
writeset =
"""
import pygame
FGCOL = (137, 192, 210, 100) # Foreground colour.
BGCOL = pygame.Color('black') # Background colour.
FPS = 60 # Maximum frames per second.
CAPTION = 'python-csp example: Boids'
FILENAME = 'boids.png' # Screenshot file.
QUIT = False
clock = pygame.time.Clock()
dirty, last = [], []
pygame.init()
screen = pygame.display.set_mode((SIZE[0], SIZE[1]), 0)
pygame.display.set_caption(CAPTION)
while not QUIT:
ms_elapsed = clock.tick(FPS)
# print ms_elapsed
dirty = last
for rect in last: screen.fill(BGCOL, rect)
last = []
positions, vels = list(zip(*drawchan.read()))
for (x, y) in positions:
rect = pygame.draw.circle(screen, FGCOL, (int(x), int(y)), 2, 0)
dirty.append(rect)
last.append(rect)
pygame.display.update(dirty) # Update dirty rects.
for event in pygame.event.get(): # Process events.
if event.type == pygame.QUIT:
QUIT = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, FILENAME)
print('Saving boids in:' + str(FILENAME))
drawchan.poison()
pygame.quit()
return
@process
def main():
NUMBOIDS = 50 # Number of boids in simulation.
SIZE = (800, 600) # Screen size.
# Set up channels for reporting boid positions / velocities.
infochans = [Channel() for i in range(NUMBOIDS)]
# Draw channel for the drawboids process.
drawchan = Channel()
# Flock manager.
fm = FlockManager(infochans, drawchan, NUMBOIDS)
# Generate a list of all processes in the simulation.
procs = [simulate(infochans[i], SIZE) for i in range(NUMBOIDS)]
procs.append(fm)
procs.append(drawboids(drawchan, SIZE)) # Drawing process.
simulation = Par(*procs) # Start simulation.
simulation.start()
return
if __name__ == '__main__':
main().start()
| Python |
#!/usr/bin/env python
"""
Generic interface to any USB HID sensor.
Copyright (C) Sarah Mount 2008.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import struct
import hid
import os
import sys
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'November 2008'
__version__ = '0.1'
#DEBUG = True
DEBUG = False
class HIDError(Exception):
def __init__(self, value):
"""Wrapper for exceptions resulting from calls to libhid.
value should be (<method-name>, <hid-retval>).
"""
super(HIDError, self).__init__()
self.value = value
return
def __str__(self):
return 'Call to {0} failed with return code {1}'.format(*self.value)
class HIDMatcher(object):
def __init__(self, vid, pid, errstream=sys.stderr):
self.matcher = hid.HIDInterfaceMatcher()
self.matcher.vendor_id = vid
self.matcher.product_id = pid
return
@property
def vendor(self):
return self.matcher.vendor_id
@property
def product(self):
return self.matcher.product_id
class HIDSensor(object):
def __init__(self, id=None, errstream=sys.stderr):
self._id = None
self._error = errstream
# Debugging
if DEBUG:
hid.hid_set_debug(hid.HID_DEBUG_ALL)
hid.hid_set_debug_stream(self._error)
hid.hid_set_usb_debug(0)
# Initialisation
if not hid.hid_is_initialised(): # Doesn't seem to work
try: # Belt AND braces, Sir?
hid.hid_init()
except HIDError:
pass
self._interface = hid.hid_new_HIDInterface()
# Ensure we only use one HIDMatcher class per
# type of sensor, with some reflection-fu.
#if not 'MATCHER' in self.__class__.__dict__:
if not hasattr(self.__class__, 'MATCHER'):
self.__class__.MATCHER = HIDMatcher(self.__class__.VID,
self.__class__.PID)
return
def open(self):
self._check(hid.hid_force_open(self._interface,
0,
self.__class__.MATCHER.matcher,
3),
'hid_force_open')
self._id = self._interface.id
with os.tmpfile() as tmpfile:
hid.hid_write_identification(tmpfile, self._interface)
tmpfile.flush()
tmpfile.seek(0)
details = tmpfile.read() + '\n'
return details
def __del__(self): # Destructor
# try:
### WHY IS THIS BROKE?
if self._interface is None: return
# elif hid.hid_is_opened(self._interface):
# hid.hid_delete_HIDInterface(self._interface)
# else:
# print('Closing interface {1}'.format(self._id))
# self._check(hid.hid_close(self._interface), 'hid_close')
return
# finally:
# super(HIDSensor, self).__del__()
def interrupt_read(self, endpoint, size, timeout):
ret, bytes = hid.hid_interrupt_read(self._interface,
endpoint, size, timeout)
self._check(ret, 'hid_interrupt_read')
return bytes
def _check(self, retval, method, success=hid.HID_RET_SUCCESS):
if retval != success:
raise HIDError((method, retval))
@property
def vendor(self):
return self.__class__.MATCHER._vendor
@property
def product(self):
return self.__class__.MATCHER._product
@property
def id(self):
return self._id
def set_feature_report(self, path, buffer):
self._check(hid.hid_set_feature_report(self._interface, path, buffer),
'set_feature_report')
return
def get_feature_report(self, path, size):
reply = hid.hid_get_feature_report(self._interface, path, size)
#self._check(ret, 'get_feature_report')
return reply
class HIDSensorCollection:
"""Interface to all attached sensors of known types."""
MAX_HID_DEVS = 20 # Limited by libhid
def __init__(self, hidclasses):
self._hidtypes = {}
for hidclass in hidclasses:
print('Searching for {0} sensors...'.format(hidclass.__name__))
self._hidtypes[hidclass] = HIDMatcher(hidclass.VID, hidclass.PID)
self._interfaces = {}
retval = hid.hid_init()
if retval != hid.HID_RET_SUCCESS:
raise HIDError(('hid_init', retval))
return
def __del__(self):
return
def open(self):
for hidclass in self._hidtypes:
for i in range(HIDSensorCollection.MAX_HID_DEVS):
try:
hidif = hidclass()
details = hidif.open()
if details:
print('Found HID sensor: {0}'.format(hidif._id))
self._interfaces[hidif._id] = hidif
except HIDError as e:
del hidif
#except Error, e:
#continue
return
def get_all_data(self):
return [hidif.get_data() for hidif in list(self._interfaces.values())]
def _debug(self):
print(len(self._interfaces), 'HID sensors attached')
for hidif in self._interfaces:
print('Interface: {0},'.format(str(hidif)), end=' ')
print(self._interfaces[hidif]._debug_str().format(self._interfaces[hidif].get_data()))
| Python |
#!/usr/bin/env python
"""
Simple oscilloscope traces for python-csp.
Requires Pygame.
Features:
* Press 's' to save an oscilloscope trace as a PNG.
* Press UP and DOWN to scale the input more / less.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from csp.csp import *
import copy
import pygame
import numpy
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'November 2009'
__version__ = '0.2'
@forever
def Oscilloscope(inchan, scale=1.0, _process=None):
# Constants
WIDTH, HEIGHT = 512, 256
TRACE, GREY = (80, 255, 100), (110, 110, 110)
caption = 'Oscilloscope'
filename = caption + '.png'
# Open window
pygame.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT), 0)
pygame.display.set_caption(caption)
# Create a blank chart with vertical ticks, etc
blank = numpy.zeros((WIDTH, HEIGHT, 3), dtype=numpy.int16)
# Draw x-axis
xaxis = HEIGHT/2
blank[::, xaxis] = GREY
# Draw vertical ticks
vticks = [-100, -50, +50, +100]
for vtick in vticks: blank[::5, xaxis + vtick] = GREY # Horizontals
for vtick in vticks: blank[::50, ::5] = GREY # Verticals
# Draw the 'blank' screen.
pygame.surfarray.blit_array(screen, blank) # Blit the screen buffer
pygame.display.flip() # Flip the double buffer
# ydata stores data for the trace.
ydata = [0.0 for i in range(WIDTH)] # assert len(ydata) <= WIDTH
QUIT = False
while not QUIT:
pixels = copy.copy(blank)
ydata.append(inchan.read() * scale)
ydata.pop(0)
for x in range(WIDTH):
try: pixels[x][xaxis - int(ydata[x])] = TRACE
except: pass
pygame.surfarray.blit_array(screen, pixels) # Blit the screen buffer
pygame.display.flip() # Flip the double buffer
#pygame.display.update(0, xaxis-100, WIDTH, 201) # Flip the double buffer
del pixels # Use constant space.
for event in pygame.event.get():
if event.type == pygame.QUIT:
QUIT = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_s:
pygame.image.save(screen, filename)
print('Saving oscope image in:' + str ( filename ) )
elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
scale += 10.0
print('Oscilloscope scaling by {0}'.format(scale))
elif event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN:
if scale - 10.0 > 0.0: scale -= 10.0
print('Oscilloscope scaling by {0}'.format(scale))
yield
inchan.poison()
pygame.display.quit()
return
@forever
def MultipleOscilloscope(inchannels, _process=None):
# TODO: add multiple Oscilloscope traces.
# Place and size the traces automatically:
# >>> pygame.display.list_modes()
# [(1024, 600), (800, 600), (720, 400), (640, 480), (640, 400), (640, 350)]
# >>>
raise NotImplementedError('Not implemented just yet...')
@forever
def __Random(outchan, _process=None):
"""Random process.
Generates random data and writes it to outchan.
"""
import random
while True:
outchan.write(random.random())
yield
return
def __test_random():
"""Test the Oscilloscope with random data.
"""
channel = Channel()
par = Par(__Random(channel), Oscilloscope(channel))
par.start()
return
def __test_sin():
"""Plot a sine wave on the oscilloscope.
"""
channel = Channel()
Par(dsp.Sin(channel), Oscilloscope(channel)).start()
return
def __test_cos():
"""Plot a cosine wave on the oscilloscope.
"""
channel = Channel()
Par(dsp.Cos(channel), Oscilloscope(channel)).start()
return
def __test_mux():
"""Plot sine and cosine waves on the oscilloscope.
"""
import dsp
from csp.builtins import Delta2, Mux2
channels = [Channel() for i in range(6)]
par = Par(dsp.GenerateFloats(channels[0]),
Delta2(channels[0], channels[1], channels[2]),
dsp.Cos(channels[1], channels[3]),
dsp.Sin(channels[2], channels[4]),
Mux2(channels[3], channels[4], channels[5]),
Oscilloscope(channels[5]))
par.start()
return
def __test_tan():
"""Plot a tangent wave on the oscilloscope.
"""
import dsp
channels = [Channel() for i in range(2)]
par = Par(dsp.GenerateFloats(channels[0]),
dsp.Tan(channels[0], channels[1]),
Oscilloscope(channels[1]))
par.start()
return
if __name__ == '__main__':
# __test_tan()
__test_mux()
# __test_cos()
# __test_sin()
# __test_random()
| Python |
#!/usr/bin/env python
"""
python-csp process for Toradex Oak sensors.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from csp.csp import *
from toradex import ToradexCurrent, ToradexMagR, ToradexMotion, ToradexDist
from toradex import ToradexTilt, ToradexLux, ToradexG, ToradexRH, ToradexP
from toradex import Toradex8ChannelA2D
@forever
def Current(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak current sensor.
"""
debugstring = 'Toradex current sensor'
sensor = ToradexCurrent()
senbsor.open()
while True:
if blink: sensor.led_on()
current = sensor.get_data()[1]
if blink: sensor.led_off()
outchan.write(current)
if debugchan is not None: debugchan.write(current)
yield
return
@forever
def MagR(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak Magnetometer.
"""
debugstring = 'Toradex Magnetometer'
sensor = ToradexMagR()
sensor.open()
while True:
if blink: sensor.led_on()
mag, angle = sensor.get_data()[1:]
if blink: sensor.led_off()
outchan.write((mag, angle))
if debugchan is not None: debugchan.write((mag, angle))
yield
return
@forever
def Motion(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak motion sensor.
"""
debugstring = 'Toradex Motion Sensor'
sensor = ToradexMotion()
sensor.open()
while True:
if blink: sensor.led_on()
motion = sensor.get_data()[1]
if blink: sensor.led_off()
outchan.write(motion)
if debugchan is not None: debugchan.write(motion)
yield
return
@forever
def Dist(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak distance sensor.
"""
debugstring = 'Toradex Oak Distance Sensor'
sensor = ToradexDist()
sensor.open()
while True:
if blink: sensor.led_on()
dist = sensor.get_data()[1]
if blink: sensor.led_off()
outchan.write(dist)
if debugchan is not None: debugchan.write(dist)
yield
return
@forever
def Tilt(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak tilt sensor.
"""
debugstring = 'Toradex Tilt Sensor'
sensor = ToradexTilt()
sensor.open()
while True:
if blink: sensor.led_on()
accel, zen, azi = sensor.get_data()[1:]
if blink: sensor.led_off()
outchan.write((accel, zen, azi))
if debugchan is not None: debugchan.write((accel, zen, azi))
yield
return
@forever
def Lux(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak lux sensor.
"""
debugstring = 'Toradex Lux Sensor'
sensor = ToradexLux()
sensor.open()
while True:
if blink: sensor.led_on()
lux = sensor.get_data()[1]
if blink: sensor.led_off()
outchan.write(lux)
if debugchan is not None: debugchan.write(lux)
yield
return
@forever
def Accelerometer(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak 3-axis accelerometer.
"""
debugstring = 'Toradex G 3-axis Accelerometer'
sensor = ToradexG()
sensor.open()
while True:
if blink: sensor.led_on()
x, y, z = sensor.get_data()[1:]
if blink: sensor.led_off()
outchan.write((x, y, z))
if debugchan is not None: debugchan.write((x, y, z))
yield
return
@forever
def RelativeHumidity(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak RH sensor.
"""
debugstring = 'Toradex RH Sensor'
sensor = ToradexRH()
sensor.open()
while True:
if blink: sensor.led_on()
rh, temp = sensor.get_data()[1:]
if blink: sensor.led_off()
outchan.write((rh, temp))
if debugchan is not None: debugchan.write((rh, temp))
yield
return
@forever
def Pressure(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak pressure sensor.
"""
debugstring = 'Toradex Pressure Sensor'
sensor = ToradexP()
sensor.open()
while True:
if blink: sensor.led_on()
press, temp = sensor.get_data()[1:]
if blink: sensor.led_off()
outchan.write((press, temp))
if debugchan is not None: debugchan.write((press, temp))
yield
return
@forever
def A2D8Channel(outchan, debugchan=None, blink=True, _process=None):
"""python-csp interface to the Toradex Oak 8 channel A2D.
"""
debugstring = 'Toradex 8 Channel A2D'
sensor = ToradexG()
sensor.open()
while True:
if blink: sensor.led_on()
data = sensor.get_data()[1:]
if blink: sensor.led_off()
outchan.write(data)
if debugchan is not None: debugchan.write(data)
yield
return
if __name__ == '__main__':
# Print data from a ToradexG accelerometer.
channel = Channel()
Par(Accelerometer(channel),
Printer(channel)).start()
| Python |
#!/usr/bin/env python
"""
Generic interface to all Toradex OAK sensors.
Copyright (C) Sarah Mount, 2008.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import struct
from hidsensor import *
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__credits__ = 'Mat Murray'
__date__ = 'November 2008'
__version__ = '0.1'
#DEBUG = True
DEBUG = False
def _debug(*args):
"""Customised debug logging.
FIXME: Replace this with the builtin logging module.
"""
smap = list(map(str, args))
if DEBUG:
print('DEBUG: ' + ' '.join(smap))
class ToradexSensor(HIDSensor):
"""Generic interface to a single Toradex sensor."""
VID = 0x1b67 # Vendor ID. PID *MUST* be set in subclasses.
EP = 0x82 # Endpoint address for interrupt reads.
# Get or set a datum.
SET = 0x00
GET = 0x01
# Store data in flash or RAM.
RAM = 0x0
FLASH = 0x01
# Sampling rates.
AFTER_SAMPLING = 0x0 # Factory default.
AFTER_CHANGE = 0x1
FIXED_RATE = 0x2
# LED modes.
OFF = 0x0 # Factory default.
ON = 0x1
BLINK_SLOW = 0x2
BLINK_FAST = 0x3
BLINK_PULSE = 0x4
def __init__(self):
HIDSensor.__init__(self)
def feature_report(self, path, buff):
size = 32 # For all Toradex sensors
while True:
reply = self.get_feature_report(path, size)
if reply[0] == hid.HID_RET_FAIL_GET_REPORT:
_debug('Did not get report.')
break
else:
report = struct.unpack('32B', reply[1])
_debug('Report length:', len(report))
_debug(report)
if report[0] == 0xff:
break
reply = hid.hid_set_feature_report(self._interface, path, buff)
if reply == hid.HID_RET_FAIL_SET_REPORT:
_debug('Did not set report.')
else:
_debug('Response:', reply)
# Get reply
while True:
reply = hid.hid_get_feature_report(self._interface, path, size)
if reply[0] == hid.HID_RET_FAIL_GET_REPORT:
_debug('Did not get report.')
break
else:
_debug('Error message:', reply[0])
report = struct.unpack('32B', reply[1])
_debug(report)
if report[0] == 0xff:
break
def blink_led(self, rate='slow'):
if rate == 'fast': self.blink_led_fast()
elif rate == 'pulse': self.blink_led_pulse()
else: self.blink_led_slow()
return
def blink_led_slow(self):
buff = struct.pack('6b27x',
ToradexSensor.SET, ToradexSensor.FLASH,
0x01, 0x01, 0x00,
ToradexSensor.BLINK_SLOW)
self.feature_report((), buff)
return
def blink_led_fast(self):
buff = struct.pack('6b27x',
ToradexSensor.SET, ToradexSensor.FLASH,
0x01, 0x01, 0x00,
ToradexSensor.BLINK_FAST)
self.feature_report((), buff)
return
def blink_led_pulse(self):
buff = struct.pack('6b27x',
ToradexSensor.SET, ToradexSensor.FLASH,
0x01, 0x01, 0x00,
ToradexSensor.BLINK_PULSE)
self.feature_report((), buff)
return
def led_on(self):
buff = struct.pack('6b27x',
ToradexSensor.SET, ToradexSensor.FLASH,
0x01, 0x01, 0x00,
ToradexSensor.ON)
self.feature_report((), buff)
return
def led_off(self):
buff = struct.pack('6b27x',
ToradexSensor.SET, ToradexSensor.FLASH,
0x01, 0x01, 0x00,
ToradexSensor.OFF)
self.feature_report((), buff)
return
def set_sample_rate(self, rate):
"""Set the sample rate of the sensor.
N.B. This is distinct from the "data rate", which is the rate at
which the sensor provides data for interrupt reads.
"""
raise NotImplementedError('Need report paths!')
def set_data_rate(self, rate):
"""Set the data rate of the sensor.
N.B This is not the rate at which data is sampled from the
transducer, but the rate at which the sensor makes data
available for an interrupt read.
"""
raise NotImplementedError('Need report paths!')
def _unpack(self, bytes, size=2):
"""Unpack data from an interrupt read into a sequence of bytes.
Intended to be used in conjunction with __parse and read_data:
__parse(_unpack(_read_data()))
"""
try:
data = struct.unpack('<%gH' % size, bytes) # FIXME: OK for Python3?
except struct.error:
return None
return data
def _parse(self, bytes):
"""Parse bytes from an interrupt read into Python types.
Intended to be used in conjunction with _unpack and _read_data:
_parse(_unpack(_read_data()))
"""
raise NotImplemented('Must be overridden by subclasses')
def _read_data(self, size=4):
"""Perform an interrupt read on the sensor.
Intended to be used in conjunction with _unpack and _parse:
_parse(_unpack(_read_data()))
"""
bytes = self.interrupt_read(ToradexSensor.EP, size, 1000)
if bytes: return bytes
else: return None
def __str__(self):
return 'Generic Python interface to Toradex OAK sensors'
class ToradexCurrent(ToradexSensor):
"""Interface to a single Toradex current sensor."""
PID = 0x0009
def _parse(self, bytes):
return bytes[0]/100.0, bytes[1]/100000.0
def get_current(self):
data = self._parse(self._unpack(self._read_data()))
if data: return data[1]
else: return None
def get_data(self):
return self._parse(self._unpack(self._read_data()))
def __str__(self):
return 'Python interface to the Toradex current sensor'
def _debug_str(self):
return 'Frame: {0}s Current: {1}A'
class ToradexMagR(ToradexSensor):
"""Interface to a single Toradex magnetic rotation sensor."""
PID = 0x000b
def _parse(self, data):
return data[0]/1000.0, data[1]/1000.0, data[2], data[3]
def get_angle(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[1]
else: return None
def get_magnitute(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[2]
else: return None
def get_status(self): # blah.
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[3]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[:-1]
else: return None
def __str__(self):
return 'Python interface to the Toradex magnetic rotation sensor'
def _debug_str(self):
return 'Frame: {0}s Angle: {1}rad Magnitude: {2}'
class ToradexMotion(ToradexSensor):
"""Interface to a single Toradex IR motion sensor."""
PID = 0x0006
def _parse(self, data):
return data[0]/1000.0, data[1]
def get_motion(self):
data = self._parse(self._unpack(self._read_data()))
if data: return data[1]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data()))
if data: return data
else: return None
def __str__(self):
return 'Python interface to the Toradex IR motion sensor'
def _debug_str(self):
return 'Frame: {0}s Motion: {1} # motion events'
class ToradexDist(ToradexSensor):
"""Interface to a single Toradex distance sensor."""
PID = 0x0005
def _parse(self, data):
return data[0]/1000.0, data[1]/1000.0
def get_dist(self):
data = self._parse(self._unpack(self._read_data()))
if data: return data[1]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data()))
if data: return data
else: return None
def __str__(self):
return 'Python interface to the Toradex distance sensor'
def _debug_str(self):
return 'Frame: {0}s Dist: {1}m'
class ToradexTilt(ToradexSensor):
"""Interface to a single Toradex tilt sensor."""
PID = 0x0004
def _parse(self, data):
return data[0]/1000.0, data[1]/100.0, data[2]/1000.0, data[3]/1000.0
def get_accel(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[1]
else: return None
def get_zenith(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[2]
else: return None
def get_azimuth(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[3]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data
else: return None
def __str__(self):
return 'Python interface to the Toradex tilt sensor'
def _debug_str(self):
return 'Frame: {0}s accel: {1}ms^-2 zen: {2}rad ax:{3}rad'
class ToradexLux(ToradexSensor):
"""Interface to a single Toradex lux sensor."""
PID = 0x0003
def _parse(self, data):
return data[0]/1000.0, data[1]
def get_lux(self):
data = self._parse(self._unpack(self._read_data(size=4)))
if data: return data[1]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data(size=4)))
if data: return data
else: return None
def __str__(self):
return 'Python interface to the Toradex lux sensor'
def _debug_str(self):
return 'Frame: {0}s lux: {1}Lux'
class ToradexG(ToradexSensor):
"""Interface to a single ToradexG sensor."""
PID = 0x000a
def _parse(self, data):
return data[0]/1000.0, data[1]/1000.0, data[2]/1000.0, data[3]/1000.0
def get_x(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[1]
else: return None
def get_y(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[2]
else: return None
def get_z(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data[3]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data(size=8), size=4))
if data: return data
else: return None
def __str__(self):
return 'Python interface to the Toradex G (3-axis accel) sensor'
def _debug_str(self):
return 'Frame: {0}s x: {1}ms^-2 y: {2}ms^-2 z:{3}ms^-2'
class ToradexRH(ToradexSensor):
"""Interface to a single ToradexRH sensor."""
PID = 0x0001
def _parse(self, data):
return data[0]/1000.0, data[1]/100.0, data[2]/100.0-273.0
def get_temp(self):
data = self._parse(self._unpack(self._read_data(size=6), size=3))
if data: return data[2]
else: return None
def get_humidity(self):
data = self._parse(self._unpack(self._read_data(size=6), size=3))
if data: return data[1]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data(size=6), size=3))
if data: return data
else: return None
def __str__(self):
return 'Python interface to the Toradex RH sensor'
def _debug_str(self):
return 'Frame: {0}s Humidity: {1}% Temperature: {2}C'
class ToradexP(ToradexSensor):
"""Interface to a single ToradexP sensor."""
PID = 0x0002
def _parse(self, data):
# Note that the Toradex / Oak data sheet is incorrect here
return data[0]/1000.0, data[1]*10.0, data[2]/10.0-273.0
def get_temp(self):
data = self._parse(self._unpack(self._read_data(size=6), size=3))
if data: return data[2]
else: return None
def get_pressure(self):
data = self._parse(self._unpack(self._read_data(size=6), size=3))
if data: return data[1]
else: return None
def get_data(self):
data = self._parse(self._unpack(self._read_data(size=6), size=3))
if data: return data
else: return None
def __str__(self):
return 'Python interface to the ToradexP sensor'
def _debug_str(self):
return 'Frame: {0}s Pressure: {1}Pa Temperature: {2}C'
class Toradex8ChannelA2D(ToradexSensor):
"""Interface to a single Toradex +/-10V 8 Channel ADC"""
PID = 0x000e
MODE_SINGLE_ENDED = 0x0 # Factory default.
MODE_PSEUDO_DIFFERENTIAL = 0x01
def __init__(self):
ToradexSensor.__init__(self)
# Start with the factory default.
self.mode = Toradex8ChannelA2D.MODE_SINGLE_ENDED
return
def set_channel_name(self, channel, name):
"""Set user channel name."""
raise NotImplementedError('Need report paths!')
def get_channel_name(self, channel):
"""Get user channel name."""
raise NotImplementedError('Need report paths!')
def set_offset_channel(self, channel, offset):
"""Set absolute offset of a given channel (mV)."""
raise NotImplementedError('Need report paths!')
def get_offset_channel(self, channel):
"""Get absolute offset of a given channel (mV)."""
raise NotImplementedError('Need report paths!')
def set_input_mode(self, mode):
"""Set input mode.
mode must be one of:
Toradex8ChannelA2D.MODE_SINGLE_ENDED
Toradex8ChannelA2D.MODE_PSEUDO_DIFFERENTIAL
"""
if not mode in (Toradex8ChannelA2D.MODE_SINGLE_ENDED,
Toradex8ChannelA2D.MODE_PSEUDO_DIFFERENTIAL):
raise HIDError('Toradex8ChannelA2D in undefined input mode!')
raise NotImplementedError('Need report paths!')
def get_input_mode(self):
"""Get input mode.
return value will be one of:
Toradex8ChannelA2D.MODE_SINGLE_ENDED
Toradex8ChannelA2D.MODE_PSEUDO_DIFFERENTIAL
"""
raise NotImplementedError('Need report paths!')
def _parse(self, data):
return (data[0]/1000.0, data[1]/1000.0, data[2]/1000.0,
data[3]/1000.0, data[4]/1000.0, data[5]/1000.0,
data[6]/1000.0, data[7]/1000.0, data[8]/1000.0)
def get_data(self):
data = self._parse(self._unpack(self._read_data(size=18), size=9))
if data: return data
else: return None
def get_channel(self, channel):
data = self.get_data()
if not data: return None
else: return data[channel+1]
def __str__(self):
return 'Python interface to the Toradex +/-10V 8 Channel ADC'
def _debug_str(self):
if self.mode == Toradex8ChannelA2D.MODE_SINGLE_ENDED:
return ('Frame no: {0}s, CH0-GNDi: {1}V, CH1-GNDi: {2}V, ' +
'CH2-GNDi: {3}V, CH3-GNDi: {4}V, CH4-GNDi: {5}V, ' +
'CH5-GNDi: {6}V, CH6-GNDi: {7}V, CH7-GNDi: {8}V')
elif self.mode == Toradex8ChannelA2D.MODE_PSEUDO_DIFFERENTIAL:
return ('Frame no: {0}s, CH0-1: {1}V, CH1-0: {2}V, CH2-3: {3}V, ' +
'CH3-2: {4}V, CH4-5: {5}V, CH5-4: {6}V, CH6-7: {7}V, ' +
'CH7-6: {8}V,')
else: raise HIDError('Toradex8ChannelA2D in undefined input mode!')
# Define a collection of Toradex sensors. Use this when you
# have more than one sensor connected to the USB bus(ses).
ToradexSensorCollection = HIDSensorCollection([Toradex8ChannelA2D,
ToradexCurrent,
ToradexMagR,
ToradexMotion,
ToradexDist,
ToradexTilt,
ToradexLux,
ToradexG,
ToradexRH,
ToradexP])
### Test interfaces
def __test(sensorclass):
sensor = sensorclass()
print(sensor.open())
while True:
sensor.blink_led()
print(sensor._debug_str().format(&sensor.get_data()))
def __test_rh(): __test(ToradexRH)
def __test_g(): __test(ToradexG)
def __test_tilt(): __test(ToradexTilt)
def __test_lux(): __test(ToradexLux)
def __test_dist(): __test(ToradexDist)
def __test_motion(): __test(ToradexMotion)
def __test_magr(): __test(ToradexMagR)
def __test_current(): __test(ToradexCurrent)
def __test_pressure(): __test(ToradexP)
def __test_A2D(): __test(Toradex8ChannelA2D)
def __test_collection(n=10):
import time
collection = ToradexSensorCollection
print(collection.open())
while n>0:
time.sleep(0.1)
collection._debug()
n -= 1
del collection
if __name__ == '__main__':
# Test sensor collection
# __test_collection()
# Test individual sensors
# __test_rh()
# __test_g()
# __test_tilt()
# __test_lux()
# __test_dist()
# __test_motion()
__test_magr()
# __test_current()
# __test_pressure()
# __test_A2D()
# def __test2():
# sensor1 = Toradex8ChannelA2D()
# sensor2 = Toradex8ChannelA2D()
# print sensor1.open()
# print sensor2.open()
# while True:
# print sensor1._debug_str().format(*sensor1.get_data())
# print sensor2._debug_str().format(*sensor2.get_data())
# __test2()
| Python |
#!/usr/bin/env python
"""
Digital signal processing for python-csp.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from csp.csp import *
import math
# TODO: Use numpy for more sophisticated processes.
ACCEL_DUE_TO_GRAVITY = 9.80665
@forever
def Zip(outchan, inchannels, _process=None):
"""Take data from a number of input channels, and write that
data as a single list to L{outchan}.
"""
while True:
outchan.write([chan.read() for chan in inchannels])
yield
return
@forever
def Unzip(inchan, outchans, _process=None):
"""Continuously read tuples of data from a single input channel and send
each datum out down its own output channel.
"""
while True:
data = inchan.read()
for i in range(data):
outchans[i].write(data[i])
yield
return
@forever
def Sin(inchan, outchan, _process=None):
while True:
outchan.write(math.sin(inchan.read()))
yield
return
@forever
def Cos(inchan, outchan, _process=None):
while True:
outchan.write(math.cos(inchan.read()))
yield
return
@forever
def Tan(inchan, outchan, _process=None):
while True:
outchan.write(math.tan(inchan.read()))
yield
return
@forever
def GenerateFloats(outchan, _process=None):
x = 0.0
while True:
outchan.write(x)
x += 0.1
yield
return
@forever
def Magnitude(inchan, outchan, _process=None):
while True:
acceldata = inchan.read()
mag = 0.0
for axis in acceldata: mag += axis ** 2
outchan.write(math.sqrt(mag))
yield
return
@forever
def Difference(inchan, outchan, window=1, _process=None):
cache = 0.0
while True:
acceldata = inchan.read()
try:
outchan.write(acceldata - cache)
cache = acceldata
except IndexError:
pass
yield
return
@forever
def Square(inchan, outchan, _process=None):
while True:
data = inchan.read()
outchan.write(data ** 2)
yield
return
@forever
def Normalise(inchan, outchan, _process=None, start=0.0, end=100.0):
scale = end - start
while True:
outchan.write(inchan.read() / scale)
yield
return
@forever
def Threshold(thresh, inchan, outchan, _process=None):
while True:
mag = inchan.read()
if mag >= thresh:
outchan.write(mag)
yield
return
| Python |
#!/usr/bin/env python
"""
Chart the output of a Toradex Oak accelerometer.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'November 2009'
__version__ = '0.2'
from oscilloscope import Oscilloscope
def chart_accel():
"""Requires a Toradex Oak G to be attached to a USB port."""
import dsp
from toradex_csp import Accelerometer
channels = [Channel() for i in range(7)]
par = Par(Accelerometer(channels[0]),
dsp.Unzip(channels[0], (channels[0:3])),
Blackhole(channels[1]),
Blackhole(channels[2]),
dsp.Difference(channels[1], channels[2]),
dsp.Square(channels[2], channels[3]),
Oscilloscope(channels[3]))
par.start()
return
if __name__ == '__main__':
chart_accel()
| Python |
#!/usr/bin/env python
"""Interactive interpreter for python-csp, with online help.
Features:
* CSP primitives are imported automatically.
* History is saved between sessions in C{~/.csp-console-history}.
* Tab-completion can be used to complete keywords or variables.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
try:
import readline
except ImportError:
print("Module readline not available.")
else:
import rlcompleter
readline.parse_and_bind("tab: complete")
import atexit
import code
import os
import sys
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
class _Printer(object):
"""Print documentation in twenty-line chunks.
Based on a class of the same name from the site.py module.
"""
MAXLINES = 20
def __init__(self, documentation):
self.__lines = documentation.split('\n')
self.__linecnt = len(self.__lines)
def __call__(self):
prompt = '\n*** Hit Return for more, or q (and Return) to quit: '
lineno = 0
while True:
try:
for i in range(lineno, lineno + self.MAXLINES):
print(self.__lines[i])
except IndexError:
break
else:
lineno += self.MAXLINES
key = None
while key not in ('', 'q', 'Q'):
key = input(prompt)
if key == 'q':
break
print()
class TabSafeCompleter(rlcompleter.Completer):
"""Enable safe use of Tab for either tab completion or nested scope.
"""
def complete(self, text, state):
if text == '':
return ['\t', None][state]
else:
return rlcompleter.Completer.complete(self, text, state)
class CSPConsole(code.InteractiveConsole):
"""python-csp interactive console with REPL.
Features:
* CSP channel server is started automatically.
* CSP primitives are imported automatically.
* History is saved between sessions in C{~/.csp-console-history}.
* Tab-completion can be used to complete keywords or variables.
"""
# From the docs of the readline module.
def __init__(self, locals=None, filename="<console>",
histfile=os.path.expanduser("~/.csp-console-history")):
code.InteractiveConsole.__init__(self)
self.init_history(histfile)
def init_history(self, histfile):
readline.parse_and_bind("tab: complete")
delims = ' \t\n`!@#$%^&*()-=+[{]}\\|;:,<>?'
readline.set_completer_delims(delims)
readline.set_completer(TabSafeCompleter().complete)
if hasattr(readline, "read_history_file"):
try:
readline.read_history_file(histfile)
except IOError:
pass
atexit.register(self.save_history, histfile)
def save_history(self, histfile):
readline.write_history_file(histfile)
def raw_input(self, *args):
return code.InteractiveConsole.raw_input(self, *args)
_ban = "\npython-csp (c) 2008. Licensed under the GPL(v2).\n\n"
if __name__ == '__main__':
c = CSPConsole(locals=locals())
# Don't expect the csp types to be available in locals()
c.push('from csp.csp import *')
c.push('from csp.builtins import *')
c.push('from csp.guards import *')
c.interact(banner=_ban)
| Python |
#!/usr/bin/env python
"""
Debugger for the pyyhon-csp library.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'May 2010'
import sys
import csp.tracer.tracer as tracer
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-p', '--prog', dest='program',
action='store',
help='Program to be debugged')
parser.add_option('-a', '--all', dest='all',
action='store_true',
help='Provide all available debugging information')
parser.add_option('-m', '--model', dest='model',
action='store_true',
help='Provide a CSP_M model of PROGRAM, suitable for use with the FDR2 model checker')
parser.add_option('-g', '--graph', dest='graph',
action='store_true',
help='Provide (as a PNG) a graph of the processes and guards created by PROGRAM')
parser.add_option('-t', '--trace', dest='trace',
action='store_true',
help='Provide a Hoare-style CSP trace of PROGRAM')
parser.add_option('-v', '--vcr', dest='vcr',
action='store_true',
help='Provide a view-centric reasoning trace of PROGRAM')
parser.add_option('-s', '--struct', dest='struct',
action='store_true',
help='Provide a structural trace of PROGRAM')
def create_icode():
"""Create an ICODE model of the given program.
"""
raise NotImplementedError('ICODE models not yet implemented')
def create_model(icode):
"""Create a CSP_M model of the given program
"""
raise NotImplementedError('CSP_M models not yet implemented')
def create_graph(icode):
"""Create a process graph of the given program
"""
raise NotImplementedError('Process graphs not yet implemented')
def create_trace(icode):
"""Create an CSP trace of the given program.
"""
raise NotImplementedError('CSP traces not yet implemented')
def create_vcr(icode):
"""Create an VCR trace of the given program.
"""
raise NotImplementedError('VCR traces not yet implemented')
def create_struct(icode):
"""Create a structural trace of the given program.
"""
raise NotImplementedError('ICODE models not yet implemented')
if __name__ == '__main__':
(options, args) = parser.parse_args()
if options.program:
with tracer.csptrace():
exec(compile(open(options.program).read(), options.program, 'exec'))
else:
parser.print_help()
if options.all:
icode = create_icode()
create_model(icode)
create_graph(icode)
create_trace(icode)
create_vcr(icode)
create_struct(icode)
sys.exit()
if options.model: create_model(icode)
if options.graph: create_graph(icode)
if options.trace: create_trace(icode)
if options.vcr: create_vcr(icode)
if options.struct: create_struct(icode)
| Python |
#!/usr/bin/env python
from optparse import OptionParser
import csp.lint.lint
import sys
parser = OptionParser()
parser.add_option('-l', '--list', dest='listall',
action='store_true',
help='List all error messages and exit')
parser.add_option('-p', '--prog', dest='program',
action='store',
help='Program to be statically checked')
parser.add_option('-x', '--exclude', dest='excluded',
action='store',
help='Comma-separated list of error codes to exclude.')
if __name__ == '__main__':
import exstatic.cspwarnings
(options, args) = parser.parse_args()
if options.listall:
exstatic.cspwarnings.list_error_codes()
sys.exit()
# Deal with the list of excluded error codes, if used.
ex_list = []
if options.excluded:
ex_list = options.excluded.strip().split(',')
if options.program:
csp.lint.lint.run(options.program, excluded=ex_list)
else:
parser.print_help()
| Python |
#!/usr/bin/env python
from setuptools import setup, find_packages
version = '0.1'
setup(name='python-csp',
version=version,
description="Communicating sequential processes for Python",
long_description="""\
python-csp adds communicating sequential processes to Python""",
classifiers=["Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries",
"Topic :: System :: Distributed Computing"],
keywords='concurrency multicore parallel',
author='Sarah Mount',
author_email='s.mount@wlv.ac.uk',
url='http://code.google.com/p/python-csp/',
license='GPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests',
'reactive', 'applications', 'benchmark',
'jythonsetup', 'logo', 'rst', 'scripts',
'test', 'tutorial']),
include_package_data=True,
zip_safe=True,
scripts=['scripts/python-csp',
'scripts/csplint',
'scripts/cspdb'],
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
| Python |
#!/usr/bin/env python
"""
Generic warnings and errors for Exstatic.
TODO: Document this module.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'May 2010'
import os.path
import sys
severity = {'I':'Information', 'W':'Warning', 'E':'Error'}
__all__ = ['severity', 'ExstaticErrorList', 'ExstaticError',
'ExstaticErrorFactory']
class ExstaticErrorList(object):
def __init__(self, errcodes):
self.error_factory = ExstaticErrorFactory(errcodes)
self.errors = []
return
def create_error(self, filename, lineno, scope, errcode):
error = self.error_factory.create_error(filename, lineno, scope, errcode)
self.errors.append(error)
return
def get_errors(self, excluded=[]):
"""Return a list of the current errors, excluding any in the
excluded set.
"""
errors = []
for error in self.errors:
if not error.errcode in excluded:
errors.append(error)
return errors
def print_errors(self, out=sys.stdout, excluded=[]):
"""Print a list of the current errors, excluding any in the
excluded set.
"""
for error in self.errors:
if not error.errcode in excluded:
out.write(str(error))
out.write('\n')
return
def reset_errors(self):
self.errors = []
return
class ExstaticErrorFactory(object):
def __init__(self, errcodes):
"""
@param errcodes dictionary of error codes -> explainations
"""
self.errcodes = errcodes
return
def create_error(self, filename, lineno, scope, errcode):
"""Create and return a new error.
"""
obj = ExstaticError(filename, lineno, scope, errcode)
obj.set_explaination(self.errcodes[errcode])
return obj
class ExstaticError(object):
def __init__(self, filename, lineno, scope, errcode):
"""
@param filename: name of the file in which error occurs
@param lineno: line number on which error occurs
@param scope: scope that the error occurs in (e.g. function name)
@param errcode: name of this particular error
"""
self.filename = os.path.basename(filename)
self.lineno = lineno
self.scope = scope
self.errcode = errcode
self.explaination = ''
return
def get_severity(self):
"""
@return 'E' for an error and 'W' for a warning.
"""
return severity[self.errcode[0]]
def set_explaination(self, explain):
self.explaination = explain
return
def __str__(self):
return '[{0}:{1}] {2} ({3}, {4}): {5}'.format(self.filename,
self.lineno,
self.get_severity(),
self.errcode,
self.scope,
self.explaination)
| Python |
#!/usr/bin/env python
"""
Exstatic errors and warnings for CSP.
TODO: Document this module.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import exstatic.warnings
__all__ = ['errcodes', 'list_error_codes', 'create_error', 'reset_errors',
'get_errors', 'print_errors']
errcodes = {
# Information.
'I001':'Function is a CSP process or server process',
# Warnings.
'W001':'Channel in both readset and writeset.',
'W002':'No readset given in documentation.',
'W003':'No writeset given in documentation.',
'W004':'@process or @forever applied to method (rather than function)',
# Errors.
'E001':'Process / forever decorator wraps a method, not a function.',
'E002':'Channel in readset is not a formal parameter to this process.',
'E003':'Channel in writeset is not a formal parameter to this process.',
'E004':'Channel appears in documented readset but not read from in function body.',
'E005':'Channel is read from in function body but does not appear in documented readset',
'E006':'Channel appears in documented writeset but not written to in function body.',
'E007':'Channel is written to in function body but does not appear in documented writeset'
}
csp_error_list = exstatic.warnings.ExstaticErrorList(errcodes)
def list_error_codes():
"""List all available error codes.
"""
sep = '--------------------------------------------------------------------'
print ( sep )
print ( ' CODE | MESSAGE' )
codes = list(errcodes.keys())
codes.sort()
current_type = ''
for key in codes:
if key[0] != current_type:
print ( sep )
print ( str ( key ) + ': |' + str ( errcodes[key] ) )
current_type = key[0]
print ( sep )
return
def create_error(filename, lineno, scope, errcode):
"""Create a new error and add it to the list.
"""
return csp_error_list.create_error(filename, lineno, scope, errcode)
def reset_errors():
"""Empty the current error list of all errors.
"""
csp_error_list.reset_errors()
return
def get_errors(excluded=[]):
"""Return the list of current errors.
@return list of current errors.
@type list
"""
return csp_error_list.get_errors(excluded=excluded)
def print_errors(excluded=[]):
"""Print the list of current errors.
"""
csp_error_list.print_errors(excluded=excluded)
return
| Python |
#!/usr/bin/env python
"""
Visitor pattern for ICODE.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = '2010-05-16'
# Names exported by this module
__all__ = ['IcodeVisitor', 'ExampleIcodeVisitor', 'GraphIcodeVisitor']
class IcodeVisitor:
"""Performs a depth-first walk of an ICODE tree.
Largely taken from compiler.visitor in the Python2.6 distribution.
The IcodeVisitor will walk an ICODE tree, performing either a
preorder or postorder traversal depending on which method is
called.
methods:
preorder(tree, visitor)
postorder(tree, visitor)
tree: an instance of ast.Node
visitor: an instance with visitXXX methods
The IcodeVisitor is responsible for walking over the tree in the
correct order. For each node, it checks the visitor argument for
a method named 'visitNodeType' where NodeType is the name of the
node's class, e.g. Class. If the method exists, it is called
with the node as its sole argument.
The visitor method for a particular node type can control how
child nodes are visited during a preorder walk. (It can't control
the order during a postorder walk, because it is called _after_
the walk has occurred.) The ASTVisitor modifies the visitor
argument by adding a visit method to the visitor; this method can
be used to visit a child node of arbitrary type.
"""
VERBOSE = 0
def __init__(self):
self.node = None
self._cache = {}
def default(self, node, *args):
for child in node.getChildNodes():
self.dispatch(child, *args)
def dispatch(self, node, *args):
self.node = node
klass = node.__class__
meth = self._cache.get(klass, None)
if meth is None:
className = klass.__name__
meth = getattr(self.visitor, 'visit' + className, self.default)
self._cache[klass] = meth
return meth(node, *args)
def preorder(self, tree, visitor, *args):
"""Do preorder walk of tree using visitor"""
self.visitor = visitor
visitor.visit = self.dispatch
self.dispatch(tree, *args)
class ExampleIcodeVisitor(IcodeVisitor):
"""Prints examples of the nodes that aren't visited
This visitor-driver is only useful for development, when it's
helpful to develop a visitor incrementally, and get feedback on what
you still have to do.
"""
examples = {}
def dispatch(self, node, *args):
self.node = node
meth = self._cache.get(node.__class__, None)
className = node.__class__.__name__
if meth is None:
meth = getattr(self.visitor, 'visit' + className, 0)
self._cache[node.__class__] = meth
if self.VERBOSE > 1:
print ( "dispatch", className, (meth and meth.__name__ or '') )
if meth:
meth(node, *args)
elif self.VERBOSE > 0:
klass = node.__class__
if klass not in self.examples:
self.examples[klass] = klass
print ( )
print ( self.visitor )
print ( klass )
for attr in dir(node):
if attr[0] != '_':
print ( "\t", "{0}-12.12s".format(attr), getattr(node, attr) )
print ( )
return self.default(node, *args)
class GraphVisitor(IcodeVisitor):
"""Visitor which produces a Graphviz representation of the Icode
tree.
"""
def __init__(self):
self.num = 0
return
def visitETA(self, node):
pass
def visitVal(self, node):
pass
def visitArith(self, node):
pass
def visitBool(self, node):
pass
def visitPrim(self, node):
pass
def visitAssign(self, node):
pass
def visitCall(self, node):
pass
def visitSelect(self, node):
pass
def visitIterate(self, node):
pass
def visitNu(self, node):
pass
def visitNameSpace(self, node):
pass
def visitParamNameSpace(self, node):
pass
_walker = IcodeVisitor
def walk(tree, visitor, walker=None, verbose=None):
if walker is None:
walker = _walker()
if verbose is not None:
walker.VERBOSE = verbose
walker.preorder(tree, visitor)
return walker.visitor
# class CopyAndPasteVisitor(IcodeVisitor):
# def __init__(self):
# return
# def visitETA(self, node):
# pass
# def visitVal(self, node):
# pass
# def visitArith(self, node):
# pass
# def visitBool(self, node):
# pass
# def visitPrim(self, node):
# pass
# def visitAssign(self, node):
# pass
# def visitCall(self, node):
# pass
# def visitSelect(self, node):
# pass
# def visitIterate(self, node):
# pass
# def visitNu(self, node):
# pass
# def visitNameSpace(self, node):
# pass
# def visitParamNameSpace(self, node):
# pass
| Python |
#!/usr/bin/env python
"""
Generic stack type for Python.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__all__ = ['Stack']
class Stack:
def __init__(self):
self.__stack = []
return
def push(self, value):
self.__stack.append(value)
return
def pop(self):
assert(len(self.__stack) > 0)
return self.__stack.pop()
def peek(self):
assert(len(self.__stack) > 0)
return self.__stack[len(self.__stack) - 1]
def issubset(self, other):
"""Determine whether other stack is a subset of this one.
Order matters.
"""
size = min(len(self.__stack), len(other))
for i in range(size):
if not self.__stack[i] == other[i]:
return False
return True
def __contains__(self, item):
return item in self.__stack
def __len__(self):
return len(self.__stack)
def __getitem__(self, index):
return self.__stack[index]
def __iter__(self):
return self.__stack.__iter__()
def __repr__(self):
return self.__stack.__repr__()
def __str__(self):
return self.__stack.__str__()
| Python |
"""ICODE types defined in Python.
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = ''
__all__ = ['IcodeNode', 'ETA', 'Val', 'Arith', 'Bool',
'Prim', 'Assign', 'Call', 'Select', 'Iterate',
'Nu', 'NameSpace', 'ParamNameSpace']
# Functions to easily print XML.
def make_tag(name, attributes):
tag = '< {0}'.format(name)
tag += '>'
return tag
# ICODE types.
class IcodeNode(object):
"""Abstract base class for all ICODE types.
"""
def __init__(self, lineno, annote):
"""
@param lineno line number
@param annote dictionary of annotations
"""
self.annote = annote
self.lineno = lineno
return
def _annote2xml(self):
"""Convert dict of annotations to xml.
"""
x = '<annote lineno=' + str(self.lineno) + ' '
for key, val in self.annote.values():
x+= str(key) + '=' + str(val) + ' '
return x + '/>\n'
def xml(self):
raise NotImplementedError
class ETA(IcodeNode):
"""Empty node. Can be used to represent None / null, etc.
"""
def __init__(self, lineno, annote):
IcodeNode.__init__(self, lineno, annote)
return
def xml(self):
return '<eta>' + self._annote2xml() + '</eta>\n'
class Val(IcodeNode):
"""Literals and other values.
"""
def __init__(self, lineno, val, annote):
IcodeNode.__init__(self, lineno, annote)
self.val = val
return
def xml(self):
return '<val>' + str(self.val) + self._annote2xml() + '</val>\n'
class Arith(IcodeNode):
"""Arithmetic expressions.
"""
def __init__(self, lineno, e1, e2, aop, annote):
IcodeNode.__init__(self, lineno, annote)
self.e1 = e1
self.e2 = e2
self.aop = aop
return
def xml(self):
raise NotImplementedError
class Bool(IcodeNode):
"""Boolean expressions.
"""
def __init__(self, lineno, e1, e2, bop, annote):
IcodeNode.__init__(self, lineno, annote)
self.e1 = e1
self.e2 = e2
self.bop = bop
return
def xml(self):
raise NotImplementedError
class Prim(IcodeNode):
"""Statements...should rename this really.
"""
def __init__(self, lineno, e1, e2, pop, annote):
IcodeNode.__init__(self, lineno, e1, e2, pop, annote)
self.e1 = e1
self.e2 = e2
self.pop = pop
return
def xml(self):
raise NotImplementedError
class Assign(IcodeNode):
"""Assignments.
"""
def __init__(self, lineno, lvalue, rvalue, annote):
IcodeNode.__init__(self, lineno, annote)
self.lvalue = lvalue
self.rvalue = rvalue
return
def xml(self):
icode = '<assign><lvalue>'
icode += self.lvalue.xml()
icode += '</lvalue>'
icode += '<rvalue>'
icode += self.rvalue.xml()
icode += '</rvalue>'
icode += self._annote2xml()
icode += '</assign>\n'
return icode
class Call(IcodeNode):
"""Calls to execute functions, closures, methods, continuations, etc.
"""
def __init__(self, lineno, name, args, annote):
IcodeNode.__init__(self, lineno, annote)
self.name = name
self.args = args
return
def xml(self):
raise NotImplementedError
class Select(IcodeNode):
"""Selection statements.
"""
def __init__(self, lineno, guards, annote):
IcodeNode.__init__(self, lineno, annote)
self.guards = guards
return
def xml(self):
raise NotImplementedError
class Iterate(IcodeNode):
"""Iteration.
"""
def __init__(self, lineno, guards, annote):
IcodeNode.__init__(self, lineno, annote)
self.guards = guards
return
def xml(self):
raise NotImplementedError
class Nu(IcodeNode):
"""Names.
"""
def __init__(self, lineno, n, annote):
IcodeNode.__init__(self, lineno, annote)
self.n = n
return
def xml(self):
return ('<nu>' + str(self.n) + self._annote2xml() + '</nu>\n')
class NameSpace(IcodeNode):
"""Un-paramaterised name spaces.
"""
def __init__(self, lineno, name, space, annote):
IcodeNode.__init__(self, lineno, annote)
self.name = name
self.space = space # iterable of some sort
return
def xml(self):
raise NotImplementedError
class ParamNameSpace(IcodeNode):
"""Paramaterised name spaces.
"""
def __init__(self, lineno, name, args, space, annote):
IcodeNode.__init__(self, lineno, annote)
self.name = name
self.args = args
self.space = space # iterable of some sort
return
def xml(self):
raise NotImplementedError
| Python |
#!/usr/bin/env python
"""
py2icode.py produces an ICODE representation of a Python file.
Usage: py2icode.py <in_file> ?<out_file>
<in_file> should be a python file for processing. The full path is not
needed. <out_file> is the name of the file which should store the
ICODE translation of <in_file>. If no name is given STDOUT is used.
"""
__author__ = 'Sarah Mount <s.mount@coventry.ac.uk>'
__date__ = '2010-05-16'
DEBUG = True
import ast, sys
from pyicode import *
__all__ = ['Ast2IcodeVisitor']
class Ast2IcodeVisitor(ast.NodeTransformer):
"""
AST Visitor which creates an ICODE translation of the AST, stored
in its icode attribute.
"""
def __init__(self):
super(AST2ICODEVisitor, self)
self.icode = '' # BAH
def vist_Function(self, node):
pass
# def default(self, node):
# """Gives debug info in place of unwritten visit methods."""
# self.generic_visit(node)
# return node
# self.icode += "\nDEBUG: START NODE\n"
# self.icode += '\t__repr__:' + node.__repr__() + '\n'
# self.icode += '\t__dict__:' + str(node.__dict__) + '\n'
# self.icode += "DEBUG: END NODE\n"
if __name__ == '__main__':
if DEBUG: print ( 'Debugging: ON. Script arguments:' + str ( sys.argv ) )
# Determine the input file.
if len(sys.argv) == 1:
print ( 'You must specify a Python file for processing.' )
sys.exit(1)
else:
i_file = sys.argv[1]
# Determine the output file. Use sys.stdout if none specified.
if len(sys.argv) > 2:
o_file = sys.argv[2]
o_fd = open(sys.argv[2], 'w')
if DEBUG: print ( 'Output file:' + str ( sys.argv[2] ) )
else:
if DEBUG: print ( 'Using STDOUT for output.' )
o_file = ''
o_fd = sys.stdout
# This is the important stuff.
infile = open(i_file).read()
tree = compile(infile, '<string>', 'exec')#, ast.PyCF_ONLY_AST)
outtree = AST2ICODEVisitor().visit(tree)
o_fd.write(outtree.xml())
if not o_file == '':
o_fd.close()
# ...end of important stuff.
####################################################################
# SCRATCH SPACE #
####################################################################
#
# self.icode += "\tSTART CHILD NODES\n"
# for i in node.getChildNodes():
# self.icode += '\t\t__repr__:' + i.__repr__() + '\n'
# self.icode += '\t\t__dict__:' + str(i.__dict__) + '\n'
# self.dispatch(i)
# self.icode += "\tEND CHILD NODES\n"
############# SCRATCH
# def visit_Const(self, node):
# self.generic_visit(node)
# return IcodeConst(node.value, lineno=node.lineno)
# def visit_Assign(self, node):
# print 'ASSIGN'
# for key,val in node.__dict__.items():
# print key, val
# print 'END'
# self.generic_visit(node)
# return IcodeAssign(node.nodes, node.expr, lineno=node.lineno)
# def visit_Import(self, node):
# self.generic_visit(node)
# return IcodeImport(node.names)
| Python |
import Channel as chnl
import cPickle
import uuid
from csp.guards import Guard
class CChannel(Guard):
def __init__(self):
p = uuid.uuid4().int & 0xffffff
av = uuid.uuid4().int & 0xffffff
tak = uuid.uuid4().int & 0xffffff
shm = uuid.uuid4().int & 0xffffff
self.channel = chnl.getChannel(p,av,tak,shm)
self.name = uuid.uuid1()
return
def __del__(self):
chnl.removeChannel(self.channel)
self.channel = None
return
def put(self,item):
a = cPickle.dumps(item)
chnl.put(self.channel,a);
return
def get(self):
chnl.get(self.channel,ret)
item = cPickle.loads(ret)
print(item)
return item
def is_selectable(self):
#print ( "is_selectable has been called" )
a = chnl.is_selectable(self.channel)
#print ( "is_selectable got ", a )
if a == 1:
return True
else:
return False;
def write(self,item):
a = cPickle.dumps(item)
chnl._write(self.channel,a,len(a));
return
def read(self):
print("invoked read")
ret = chnl._read(self.channel)
print(ret)
item = cPickle.loads(ret)
print(item)
return item
def enable(self):
#print("ENABLED CALLED")
chnl.enable(self.channel)
#print("returning from enable")
return
def disable(self):
chnl.disable(self.channel)
return
def select(self):
#print("calling _select")
ret = chnl._select(self.channel)
item = cPickle.loads(ret)
print(item)
return item
def poison(self):
chnl.poison(self.channel);
return
def getStatus(self):
chnl.getStatus(self.channel)
return
def checkpoison(self):
chnl.checkpoison()
return
| Python |
#!/usr/bin/env python
"""CSP Commstime benchmark.
See F.R.M. Barnes (2006) Compiling CSP. In Proceedings of
Communicating Process Architectures 2006.
Code adapted from PyCSP by John Markus Bjorndalen, available:
http://www.cs.uit.no/~johnm/code/PyCSP/
PyCSP - Communicating Sequential Processes for Python. John Markus
Bjorndalen, Brian Vinter, Otto Anshus. CPA 2007, Surrey, UK, July
8-11, 2007. IOS Press 2007, ISBN 978-1-58603-767-3, Concurrent
Systems Engineering Series (ISSN 1383-7575).
"""
from csp.csp import Par, process #, Channel
from csp.builtins import Prefix, Delta2, Succ
import sys
sys.path.append('../')
from CChannel import CChannel as Channel
del sys
import time
@process
def Consumer(cin):
"""Commstime consumer process
readset = cin
writeset =
"""
N = 5000
ts = time.time
t1 = ts()
cin.read()
t1 = ts()
for i in range(N):
cin.read()
t2 = ts()
dt = t2-t1
tchan = dt / (4 * N)
print("DT = {0}\nTime per ch : {1}/(4*{2}) = {3} s = {4} us".format(dt, dt, N, tchan, tchan * 1000000))
print("consumer done, poisoning channel")
cin.poison()
def CommsTimeBM():
print('Creating channels now...')
# Create channels
a = Channel()
b = Channel()
c = Channel()
d = Channel()
print("Running commstime test")
Par(Prefix(c, a, prefix_item=0), # Initiator
Delta2(a, b, d), # Forwarding to two
Succ(b, c), # Feeding back to prefix
Consumer(d)).start() # Timing process
print('Finished run...')
if __name__ == '__main__':
N_BM = 10
for i in xrange(N_BM):
print("----------- run {0}/{1} -------------".format(i+1, N_BM))
CommsTimeBM()
print("------- Commstime finished ---------")
| Python |
#! /bin/env python2.6
"""
Benchmark based on variable sized ring buffer.
See also PyCSP papers in CPA2009 proceedings.
Usage: tokenring-processes.py [options]
Options:
-h, --help show this help message and exit
-t TOKENS, --tokens=TOKENS
Number of tokens in token ring
-n NODES, --nodes=NODES
Number of nodes in token ring
-x, --experiment Experimental mode. Run 10 token rings with nodes 2^1
to 2^10 and print results
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'November 2009'
from csp.csp import process, Par
from patterns import TokenRing
import sys
sys.path.append('../')
from CChannel import CChannel as Channel
del sys
import time
TRIALS = 10000
@process
def ringproc(index=0, numnodes=64, tokens=1, inchan=None, outchan=None):
"""
readset = inchan
writeset = outchan
"""
if tokens == 1 and index == 0:
token = 1
outchan.write(token)
for i in xrange(TRIALS):
outchan.write(inchan.read())
# Avoid deadlock.
if index == 1:
inchan.read()
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-t', '--tokens', dest='tokens',
action='store', type="int",
default=1,
help='Number of tokens in token ring')
parser.add_option('-n', '--nodes', dest='nodes',
action='store', type="int",
default=64,
help='Number of nodes in token ring')
parser.add_option('-x', '--experiment', dest='exp',
action='store_true', default=False,
help=('Experimental mode. Run 10 token rings with nodes '
+ '2^1 to 2^10 and print results'))
(options, args) = parser.parse_args()
if options.exp:
print('All times measured in microseconds.')
for size in xrange(2, 50):
try:
print('Token ring with {0} nodes.'.format(2 ** size))
starttime = time.time()
TokenRing(ringproc, 2 ** size, numtoks=options.tokens).start()
elapsed = time.time() - starttime
mu = elapsed * 1000000 / float((TRIALS * (2 ** size)))
print('{0}ms'.format(mu))
except:
continue
else:
TokenRing(ringproc, options.nodes, numtoks=options.tokens).start()
| Python |
#!/usr/bin/env python
"""Design pattern support for python-csp.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A ParTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'May 2010'
from csp.csp import Par
from CChannel import CChannel as Channel
__all__ = ['TokenRing']
class TokenRing(Par):
def __init__(self, func, size, numtoks=1):
self.chans = [Channel() for channel in xrange(size)]
self.procs = [func(index=i,
tokens=numtoks,
numnodes=size,
inchan=self.chans[i-1],
outchan=self.chans[i]) for i in xrange(size)]
super(TokenRing, self).__init__(*self.procs)
return
| Python |
from csp.csp import Par, process
import sys
sys.path.append('../')
from CChannel import CChannel as Channel
del sys
@process
def out(cout):
i = 0
while True:
print ( "PYTHON: About to write " + str ( i ) + "\n" )
cout.write(i)
print ( "PYTHON: Have written " + str ( i ) + "\n" )
i = i +1
@process
def inn(cin):
while True:
print ( "PYTHON: About to read \n" )
a = cin.read()
print ( "PYTHON: Read "+ str ( a ) + "\n" )
if __name__ == '__main__':
c = Channel()
p = Par(out(c),inn(c))
p.start()
| Python |
#!/usr/bin/env python
"""Python CSP full adder.
Based on code from PyCSP - Communicating Sequential Processes for
Python. John Markus Bjorndalen, Brian Vinter, Otto Anshus. CPA 2007,
Surrey, UK, July 8-11, 2007. IOS Press 2007, ISBN 978-1-58603-767-3,
Concurrent Systems Engineering Series (ISSN 1383-7575).
Copyright (C) Sarah Mount, 2009.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'December 2008'
from csp.csp import Par, process
import sys
sys.path.append('../')
from CChannel import CChannel as Channel
del sys
from csp.builtins import *
@process
def Bool1(cout):
"""
readset =
writeset = cout
"""
while True:
cout.write(1)
cout.write(1)
cout.write(0)
cout.write(0)
return
@process
def Bool2(cout):
"""
readset =
writeset = cout
"""
while True:
cout.write(1)
cout.write(0)
cout.write(1)
cout.write(0)
return
def fulladder(A_in, B_in, C_in, Sum_in, Carry_in):
"""Full adder implementation.
Based on Bjorndalen, Vinter & Anshus (2007).
"""
Aa = Channel()
Ab = Channel()
Ba = Channel()
Bb = Channel()
Ca = Channel()
Cb = Channel()
i1 = Channel()
i1a = Channel()
i1b = Channel()
i2 = Channel()
i3 = Channel()
return Par(Delta2(A_in, Aa, Ab),
Delta2(B_in, Ba, Bb),
Delta2(C_in, Ca, Cb),
Delta2(i1, i1a, i1b),
Xor(Aa, Ba, i1),
Xor(i1a, Ca, Sum_in),
And(Ab, Bb, i2),
And(i1b, Cb, i3),
Or(i2, i3, Carry_in))
if __name__ == '__main__':
print('\nFull adder implemented in Python CSP\n')
# Inputs to full adder
A = Channel()
B = Channel()
Cin = Channel()
# Outputs of full adder
Carry = Channel()
Sum = Channel()
# Channels for printing to STDOUT
PCarry = Channel()
PSum = Channel()
# Create and start adder
adder = Par(Bool1(A),
Bool2(B),
Zeroes(Cin),
fulladder(A, B, Cin, Sum, Carry),
Sign(Carry, PCarry, 'Carry: '),
Printer(PCarry),
Sign(Sum, PSum, 'Sum: '),
Printer(PSum))
adder.start()
| Python |
#!/usr/bin/env python
"""
Check that every process in a file has correct readsets and writesets.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import compiler
import compiler.ast as ast
import compiler.visitor as visitor
import exstatic.cspwarnings
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'April 2010'
__all__ = ['ChannelChecker']
class ChannelChecker(visitor.ASTVisitor):
"""Check that documented readsets and writesets are correct
w.r.t. code.
"""
def __init__(self, filename):
visitor.ASTVisitor.__init__(self)
self.filename = filename
self.current_process = ''
self.current_process_lineno = 0
self.writeset = {}
self.readset = {}
self.readset_lineno = 0
self.writeset_lineno = 0
return
def extract_sets(self, doc):
"""Extract the readset and writeset from function
documentation.
"""
readset = []
writeset = []
has_readset = False
has_writeset = False
lineno = 0
if doc is not None:
for line in doc.split('\n'):
lineno += 1
words = line.strip().split('=')
if words is not None:
if words[0].strip() == 'readset':
has_readset = True
self.readset_lineno += lineno
chans = words[1].strip().split(',')
readset = [y for y in [x.strip() for x in chans] if y is not '']
elif words[0].strip() == 'writeset':
has_writeset = True
self.writeset_lineno += lineno
chans = words[1].strip().split(',')
writeset = [y for y in [x.strip() for x in chans] if y is not '']
# 'W002':'No readset given in documentation.'
if not has_readset:
exstatic.cspwarnings.create_error(self.filename,
self.readset_lineno,
self.current_process,
'W002')
# 'W003':'No writeset given in documentation.'
if not has_writeset:
exstatic.cspwarnings.create_error(self.filename,
self.writeset_lineno,
self.current_process,
'W003')
return set(readset), set(writeset)
def is_process(self, decorators):
"""Determine whether or not the current function is a CSP
process.
"""
for decorator in decorators:
if (decorator.name == 'process' or decorator.name == 'forever'):
return True
return False
def check_sets(self, readset, writeset):
"""Check that the documented readset and writeset of the
current function match the code inside the function
definition.
@param readset the documented readset of the current process
@param writeset the documented writeset of the current process
"""
# 'W001':'Channel in both readset and writeset.'
if len(readset.intersection(writeset)) > 0:
exstatic.cspwarnings.create_error(self.filename,
self.readset_lineno,
self.current_process,
'W001')
# 'E004':'Channel appears in documented readset but not read
# from in function body.'
diff = set(self.readset.values()).difference(readset)
for channel in diff:
exstatic.cspwarnings.create_error(self.filename,
self.readset_lineno,
self.current_process,
'E004')
# 'E005':'Channel is read from in function body but does not
# appear in documented readset'
diff = set(readset).difference(list(self.readset.values()))
for channel in diff:
for key in self.readset:
exstatic.cspwarnings.create_error(self.filename,
key,
self.current_process,
'E005')
# 'E006':'Channel appears in documented writeset but not
# written to in function body.'
diff = set(self.writeset.values()).difference(writeset)
for channel in diff:
exstatic.cspwarnings.create_error(self.filename,
self.writeset_lineno,
self.current_process,
'E006')
# 'E007':'Channel is written to in function body but does not
# appear in documented writeset'
diff = set(writeset).difference(list(self.writeset.values()))
for channel in diff:
for key in self.writeset:
exstatic.cspwarnings.create_error(self.filename,
key,
self.current_process,
'E007')
return
def visitFunction(self, node):
"""Visit function definition.
"""
# If this function definition is not a CSP process, ignore it.
if (node.decorators is None or
self.is_process(node.decorators) is None):
return
# Store useful information about this process.
self.current_process = node.name
self.current_process_lineno = node.lineno
self.readset_lineno, self.writeset_lineno = node.lineno, node.lineno
readset, writeset = self.extract_sets(node.doc)
# 'E002':'Channel in readset is not a formal parameter to this
# process.',
for channel in readset:
if not channel in node.argnames:
exstatic.cspwarnings.create_error(self.filename,
self.readset_lineno,
node.name,
'E002')
# 'E003':'Channel in writeset is not a formal parameter to
# this process.',
for channel in writeset:
if not channel in node.argnames:
exstatic.cspwarnings.create_error(self.filename,
self.writeset_lineno,
node.name,
'E003')
# Ensure that we visit every statement inside this fuction.
for stmt in node.code:
self.visit(stmt)
# Check the documented readset and writeset against actual
# method calls within the function.
self.check_sets(readset, writeset)
# Remove information held about this function.
self.current_process = ''
self.current_process_lineno = 0
self.writeset = {}
self.readset = {}
return
def visitCallFunc(self, node):
"""Visit function call.
TODO: Deal with Alt and Barrier types.
"""
callee = node.node
if isinstance(callee, ast.Getattr):
if not isinstance(callee.expr, ast.Getattr):
# Catch all calls to channel read().
if callee.attrname == 'read':
self.readset[callee.lineno] = callee.expr.name
# Catch all calls to channel write()
elif callee.attrname == 'write':
self.writeset[callee.lineno] = callee.expr.name
return
if __name__ == '__main__':
import sys
lint = ChannelChecker(sys.argv[1])
compiler.walk(compiler.parseFile(sys.argv[1]),
lint,
walker=lint,
verbose=5)
exstatic.cspwarnings.print_errors(excluded=[])
| Python |
#!/usr/bin/env python
"""
Check for errors in process definitions.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import compiler
import compiler.ast as ast
import compiler.visitor as visitor
import exstatic.cspwarnings
__author__ = 'Sarah Mount <s.mount@wlv.ac.uk>'
__date__ = 'May 2010'
__all__ = ['ProcessChecker']
class ProcessChecker(visitor.ASTVisitor):
"""Check that documented readsets and writesets are correct
w.r.t. code.
"""
def __init__(self, filename):
visitor.ASTVisitor.__init__(self)
self.filename = filename
self.current_process = ''
self.current_process_lineno = 0
return
def is_process(self, decorators):
"""Determine whether or not the current function is a CSP
process.
"""
for decorator in decorators:
if (decorator.name == 'process' or decorator.name == 'forever'):
return True
return False
def visitFunction(self, node):
"""Visit function definition.
"""
# If this function definition is not a CSP process, ignore it.
if (node.decorators is None or
self.is_process(node.decorators) is None):
return
# Store useful information about this process.
self.current_process = node.name
self.current_process_lineno = node.lineno
# 'I001':'Function is a CSP process or server process',
exstatic.cspwarnings.create_error(self.filename,
self.current_process_lineno,
self.current_process,
'I001')
# 'W004':'@process or @forever applied to method (rather than function)'
if 'self' in node.argnames:
exstatic.cspwarnings.create_error(self.filename,
self.current_process_lineno,
self.current_process,
'W004')
return
if __name__ == '__main__':
import sys
lint = ProcessChecker(sys.argv[1])
compiler.walk(compiler.parseFile(sys.argv[1]),
lint,
walker=lint,
verbose=5)
exstatic.cspwarnings.print_errors(excluded=[])
| Python |
#!/usr/bin/env python
"""
Combined linting for python-csp.
Copyright (C) Sarah Mount, 2010.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have rceeived a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import compiler
import csp.lint.channels
import csp.lint.processes
import exstatic.cspwarnings
__all__ = ['run']
checkers = [csp.lint.channels.ChannelChecker,
csp.lint.processes.ProcessChecker]
def run(filename, excluded=[]):
exstatic.cspwarnings.reset_errors()
for checker in checkers:
lint = checker(filename)
compiler.walk(compiler.parseFile(filename),
lint,
walker=lint,
verbose=5)
exstatic.cspwarnings.print_errors(excluded=excluded)
return
if __name__ == '__main__':
import sys
if sys.argv > 1:
run(sys.argv[1])
sys.exit()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.