repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
rlugojr/django | django/db/migrations/executor.py | 9 | 17416 | from django.apps.registry import apps as global_apps
from django.db import migrations, router
from .exceptions import InvalidMigrationPlan
from .loader import MigrationLoader
from .recorder import MigrationRecorder
from .state import ProjectState
class MigrationExecutor:
"""
End-to-end migration execution - loads migrations, and runs them
up or down to a specified set of targets.
"""
def __init__(self, connection, progress_callback=None):
self.connection = connection
self.loader = MigrationLoader(self.connection)
self.recorder = MigrationRecorder(self.connection)
self.progress_callback = progress_callback
def migration_plan(self, targets, clean_start=False):
"""
Given a set of targets, returns a list of (Migration instance, backwards?).
"""
plan = []
if clean_start:
applied = set()
else:
applied = set(self.loader.applied_migrations)
for target in targets:
# If the target is (app_label, None), that means unmigrate everything
if target[1] is None:
for root in self.loader.graph.root_nodes():
if root[0] == target[0]:
for migration in self.loader.graph.backwards_plan(root):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.remove(migration)
# If the migration is already applied, do backwards mode,
# otherwise do forwards mode.
elif target in applied:
# Don't migrate backwards all the way to the target node (that
# may roll back dependencies in other apps that don't need to
# be rolled back); instead roll back through target's immediate
# child(ren) in the same app, and no further.
next_in_app = sorted(
n for n in
self.loader.graph.node_map[target].children
if n[0] == target[0]
)
for node in next_in_app:
for migration in self.loader.graph.backwards_plan(node):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.remove(migration)
else:
for migration in self.loader.graph.forwards_plan(target):
if migration not in applied:
plan.append((self.loader.graph.nodes[migration], False))
applied.add(migration)
return plan
def _create_project_state(self, with_applied_migrations=False):
"""
Create a project state including all the applications without
migrations and applied migrations if with_applied_migrations=True.
"""
state = ProjectState(real_apps=list(self.loader.unmigrated_apps))
if with_applied_migrations:
# Create the forwards plan Django would follow on an empty database
full_plan = self.migration_plan(self.loader.graph.leaf_nodes(), clean_start=True)
applied_migrations = {
self.loader.graph.nodes[key] for key in self.loader.applied_migrations
if key in self.loader.graph.nodes
}
for migration, _ in full_plan:
if migration in applied_migrations:
migration.mutate_state(state, preserve=False)
return state
def migrate(self, targets, plan=None, state=None, fake=False, fake_initial=False):
"""
Migrates the database up to the given targets.
Django first needs to create all project states before a migration is
(un)applied and in a second step run all the database operations.
"""
if plan is None:
plan = self.migration_plan(targets)
# Create the forwards plan Django would follow on an empty database
full_plan = self.migration_plan(self.loader.graph.leaf_nodes(), clean_start=True)
all_forwards = all(not backwards for mig, backwards in plan)
all_backwards = all(backwards for mig, backwards in plan)
if not plan:
if state is None:
# The resulting state should include applied migrations.
state = self._create_project_state(with_applied_migrations=True)
elif all_forwards == all_backwards:
# This should only happen if there's a mixed plan
raise InvalidMigrationPlan(
"Migration plans with both forwards and backwards migrations "
"are not supported. Please split your migration process into "
"separate plans of only forwards OR backwards migrations.",
plan
)
elif all_forwards:
if state is None:
# The resulting state should still include applied migrations.
state = self._create_project_state(with_applied_migrations=True)
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
else:
# No need to check for `elif all_backwards` here, as that condition
# would always evaluate to true.
state = self._migrate_all_backwards(plan, full_plan, fake=fake)
self.check_replacements()
return state
def _migrate_all_forwards(self, state, plan, full_plan, fake, fake_initial):
"""
Take a list of 2-tuples of the form (migration instance, False) and
apply them in the order they occur in the full_plan.
"""
migrations_to_run = {m[0] for m in plan}
for migration, _ in full_plan:
if not migrations_to_run:
# We remove every migration that we applied from these sets so
# that we can bail out once the last migration has been applied
# and don't always run until the very end of the migration
# process.
break
if migration in migrations_to_run:
if 'apps' not in state.__dict__:
if self.progress_callback:
self.progress_callback("render_start")
state.apps # Render all -- performance critical
if self.progress_callback:
self.progress_callback("render_success")
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
migrations_to_run.remove(migration)
return state
def _migrate_all_backwards(self, plan, full_plan, fake):
"""
Take a list of 2-tuples of the form (migration instance, True) and
unapply them in reverse order they occur in the full_plan.
Since unapplying a migration requires the project state prior to that
migration, Django will compute the migration states before each of them
in a first run over the plan and then unapply them in a second run over
the plan.
"""
migrations_to_run = {m[0] for m in plan}
# Holds all migration states prior to the migrations being unapplied
states = {}
state = self._create_project_state()
applied_migrations = {
self.loader.graph.nodes[key] for key in self.loader.applied_migrations
if key in self.loader.graph.nodes
}
if self.progress_callback:
self.progress_callback("render_start")
for migration, _ in full_plan:
if not migrations_to_run:
# We remove every migration that we applied from this set so
# that we can bail out once the last migration has been applied
# and don't always run until the very end of the migration
# process.
break
if migration in migrations_to_run:
if 'apps' not in state.__dict__:
state.apps # Render all -- performance critical
# The state before this migration
states[migration] = state
# The old state keeps as-is, we continue with the new state
state = migration.mutate_state(state, preserve=True)
migrations_to_run.remove(migration)
elif migration in applied_migrations:
# Only mutate the state if the migration is actually applied
# to make sure the resulting state doesn't include changes
# from unrelated migrations.
migration.mutate_state(state, preserve=False)
if self.progress_callback:
self.progress_callback("render_success")
for migration, _ in plan:
self.unapply_migration(states[migration], migration, fake=fake)
applied_migrations.remove(migration)
# Generate the post migration state by starting from the state before
# the last migration is unapplied and mutating it to include all the
# remaining applied migrations.
last_unapplied_migration = plan[-1][0]
state = states[last_unapplied_migration]
for index, (migration, _) in enumerate(full_plan):
if migration == last_unapplied_migration:
for migration, _ in full_plan[index:]:
if migration in applied_migrations:
migration.mutate_state(state, preserve=False)
break
return state
def collect_sql(self, plan):
"""
Takes a migration plan and returns a list of collected SQL
statements that represent the best-efforts version of that plan.
"""
statements = []
state = None
for migration, backwards in plan:
with self.connection.schema_editor(collect_sql=True, atomic=migration.atomic) as schema_editor:
if state is None:
state = self.loader.project_state((migration.app_label, migration.name), at_end=False)
if not backwards:
state = migration.apply(state, schema_editor, collect_sql=True)
else:
state = migration.unapply(state, schema_editor, collect_sql=True)
statements.extend(schema_editor.collected_sql)
return statements
def apply_migration(self, state, migration, fake=False, fake_initial=False):
"""
Runs a migration forwards.
"""
if self.progress_callback:
self.progress_callback("apply_start", migration, fake)
if not fake:
if fake_initial:
# Test to see if this is an already-applied initial migration
applied, state = self.detect_soft_applied(state, migration)
if applied:
fake = True
if not fake:
# Alright, do it normally
with self.connection.schema_editor(atomic=migration.atomic) as schema_editor:
state = migration.apply(state, schema_editor)
# For replacement migrations, record individual statuses
if migration.replaces:
for app_label, name in migration.replaces:
self.recorder.record_applied(app_label, name)
else:
self.recorder.record_applied(migration.app_label, migration.name)
# Report progress
if self.progress_callback:
self.progress_callback("apply_success", migration, fake)
return state
def unapply_migration(self, state, migration, fake=False):
"""
Runs a migration backwards.
"""
if self.progress_callback:
self.progress_callback("unapply_start", migration, fake)
if not fake:
with self.connection.schema_editor(atomic=migration.atomic) as schema_editor:
state = migration.unapply(state, schema_editor)
# For replacement migrations, record individual statuses
if migration.replaces:
for app_label, name in migration.replaces:
self.recorder.record_unapplied(app_label, name)
else:
self.recorder.record_unapplied(migration.app_label, migration.name)
# Report progress
if self.progress_callback:
self.progress_callback("unapply_success", migration, fake)
return state
def check_replacements(self):
"""
Mark replacement migrations applied if their replaced set all are.
We do this unconditionally on every migrate, rather than just when
migrations are applied or unapplied, so as to correctly handle the case
when a new squash migration is pushed to a deployment that already had
all its replaced migrations applied. In this case no new migration will
be applied, but we still want to correctly maintain the applied state
of the squash migration.
"""
applied = self.recorder.applied_migrations()
for key, migration in self.loader.replacements.items():
all_applied = all(m in applied for m in migration.replaces)
if all_applied and key not in applied:
self.recorder.record_applied(*key)
def detect_soft_applied(self, project_state, migration):
"""
Tests whether a migration has been implicitly applied - that the
tables or columns it would create exist. This is intended only for use
on initial migrations (as it only looks for CreateModel and AddField).
"""
def should_skip_detecting_model(migration, model):
"""
No need to detect tables for proxy models, unmanaged models, or
models that can't be migrated on the current database.
"""
return (
model._meta.proxy or not model._meta.managed or not
router.allow_migrate(
self.connection.alias, migration.app_label,
model_name=model._meta.model_name,
)
)
if migration.initial is None:
# Bail if the migration isn't the first one in its app
if any(app == migration.app_label for app, name in migration.dependencies):
return False, project_state
elif migration.initial is False:
# Bail if it's NOT an initial migration
return False, project_state
if project_state is None:
after_state = self.loader.project_state((migration.app_label, migration.name), at_end=True)
else:
after_state = migration.mutate_state(project_state)
apps = after_state.apps
found_create_model_migration = False
found_add_field_migration = False
existing_table_names = self.connection.introspection.table_names(self.connection.cursor())
# Make sure all create model and add field operations are done
for operation in migration.operations:
if isinstance(operation, migrations.CreateModel):
model = apps.get_model(migration.app_label, operation.name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if should_skip_detecting_model(migration, model):
continue
if model._meta.db_table not in existing_table_names:
return False, project_state
found_create_model_migration = True
elif isinstance(operation, migrations.AddField):
model = apps.get_model(migration.app_label, operation.model_name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if should_skip_detecting_model(migration, model):
continue
table = model._meta.db_table
field = model._meta.get_field(operation.name)
# Handle implicit many-to-many tables created by AddField.
if field.many_to_many:
if field.remote_field.through._meta.db_table not in existing_table_names:
return False, project_state
else:
found_add_field_migration = True
continue
column_names = [
column.name for column in
self.connection.introspection.get_table_description(self.connection.cursor(), table)
]
if field.column not in column_names:
return False, project_state
found_add_field_migration = True
# If we get this far and we found at least one CreateModel or AddField migration,
# the migration is considered implicitly applied.
return (found_create_model_migration or found_add_field_migration), after_state
| bsd-3-clause |
my7seven/ansible | test/units/executor/test_task_executor.py | 15 | 9940 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.task_executor import TaskExecutor
from ansible.playbook.play_context import PlayContext
from ansible.plugins import action_loader, lookup_loader
from units.mock.loader import DictDataLoader
class TestTaskExecutor(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_task_executor_init(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host = mock_host,
task = mock_task,
job_vars = job_vars,
play_context = mock_play_context,
new_stdin = new_stdin,
loader = fake_loader,
shared_loader_obj = mock_shared_loader,
)
def test_task_executor_run(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task._role._role_path = '/path/to/role/foo'
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host = mock_host,
task = mock_task,
job_vars = job_vars,
play_context = mock_play_context,
new_stdin = new_stdin,
loader = fake_loader,
shared_loader_obj = mock_shared_loader,
)
te._get_loop_items = MagicMock(return_value=None)
te._execute = MagicMock(return_value=dict())
res = te.run()
te._get_loop_items = MagicMock(return_value=[])
res = te.run()
te._get_loop_items = MagicMock(return_value=['a','b','c'])
te._run_loop = MagicMock(return_value=[dict(item='a', changed=True), dict(item='b', failed=True), dict(item='c')])
res = te.run()
te._get_loop_items = MagicMock(side_effect=AnsibleError(""))
res = te.run()
self.assertIn("failed", res)
def test_task_executor_get_loop_items(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.loop = 'items'
mock_task.loop_args = ['a', 'b', 'c']
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
mock_shared_loader.lookup_loader = lookup_loader
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host = mock_host,
task = mock_task,
job_vars = job_vars,
play_context = mock_play_context,
new_stdin = new_stdin,
loader = fake_loader,
shared_loader_obj = mock_shared_loader,
)
items = te._get_loop_items()
self.assertEqual(items, ['a', 'b', 'c'])
def test_task_executor_run_loop(self):
items = ['a', 'b', 'c']
fake_loader = DictDataLoader({})
mock_host = MagicMock()
def _copy():
new_item = MagicMock()
return new_item
mock_task = MagicMock()
mock_task.copy.side_effect = _copy
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host = mock_host,
task = mock_task,
job_vars = job_vars,
play_context = mock_play_context,
new_stdin = new_stdin,
loader = fake_loader,
shared_loader_obj = mock_shared_loader,
)
def _execute(variables):
return dict(item=variables.get('item'))
te._squash_items = MagicMock(return_value=items)
te._execute = MagicMock(side_effect=_execute)
res = te._run_loop(items)
self.assertEqual(len(res), 3)
def test_task_executor_squash_items(self):
items = ['a', 'b', 'c']
fake_loader = DictDataLoader({})
mock_host = MagicMock()
def _evaluate_conditional(templar, variables):
item = variables.get('item')
if item == 'b':
return False
return True
mock_task = MagicMock()
mock_task.evaluate_conditional.side_effect = _evaluate_conditional
mock_play_context = MagicMock()
mock_shared_loader = None
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host = mock_host,
task = mock_task,
job_vars = job_vars,
play_context = mock_play_context,
new_stdin = new_stdin,
loader = fake_loader,
shared_loader_obj = mock_shared_loader,
)
mock_task.action = 'foo'
new_items = te._squash_items(items=items, variables=job_vars)
self.assertEqual(new_items, ['a', 'b', 'c'])
mock_task.action = 'yum'
new_items = te._squash_items(items=items, variables=job_vars)
self.assertEqual(new_items, ['a,c'])
def test_task_executor_execute(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.args = dict()
mock_task.retries = 0
mock_task.delay = -1
mock_task.register = 'foo'
mock_task.until = None
mock_task.changed_when = None
mock_task.failed_when = None
mock_task.post_validate.return_value = None
mock_play_context = MagicMock()
mock_play_context.post_validate.return_value = None
mock_play_context.update_vars.return_value = None
mock_connection = MagicMock()
mock_connection.set_host_overrides.return_value = None
mock_connection._connect.return_value = None
mock_action = MagicMock()
shared_loader = None
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
te = TaskExecutor(
host = mock_host,
task = mock_task,
job_vars = job_vars,
play_context = mock_play_context,
new_stdin = new_stdin,
loader = fake_loader,
shared_loader_obj = shared_loader,
)
te._get_connection = MagicMock(return_value=mock_connection)
te._get_action_handler = MagicMock(return_value=mock_action)
mock_action.run.return_value = dict(ansible_facts=dict())
res = te._execute()
mock_task.changed_when = "1 == 1"
res = te._execute()
mock_task.changed_when = None
mock_task.failed_when = "1 == 1"
res = te._execute()
mock_task.failed_when = None
mock_task.evaluate_conditional.return_value = False
res = te._execute()
mock_task.evaluate_conditional.return_value = True
mock_task.args = dict(_raw_params='foo.yml', a='foo', b='bar')
mock_task.action = 'include'
res = te._execute()
def test_task_executor_poll_async_result(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.async = 3
mock_task.poll = 1
mock_play_context = MagicMock()
mock_connection = MagicMock()
mock_action = MagicMock()
shared_loader = MagicMock()
shared_loader.action_loader = action_loader
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
te = TaskExecutor(
host = mock_host,
task = mock_task,
job_vars = job_vars,
play_context = mock_play_context,
new_stdin = new_stdin,
loader = fake_loader,
shared_loader_obj = shared_loader,
)
te._connection = MagicMock()
def _get(*args, **kwargs):
mock_action = MagicMock()
mock_action.run.return_value = dict(stdout='')
return mock_action
# testing with some bad values in the result passed to poll async,
# and with a bad value returned from the mock action
with patch.object(action_loader, 'get', _get):
mock_templar = MagicMock()
res = te._poll_async_result(result=dict(), templar=mock_templar)
self.assertIn('failed', res)
res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
self.assertIn('failed', res)
def _get(*args, **kwargs):
mock_action = MagicMock()
mock_action.run.return_value = dict(finished=1)
return mock_action
# now testing with good values
with patch.object(action_loader, 'get', _get):
mock_templar = MagicMock()
res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
self.assertEqual(res, dict(finished=1))
| gpl-3.0 |
dancingdan/tensorflow | tensorflow/python/training/evaluation.py | 8 | 11047 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains functions for evaluation and summarization of metrics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import time
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
def _get_or_create_eval_step():
"""Gets or creates the eval step `Tensor`.
Returns:
A `Tensor` representing a counter for the evaluation step.
Raises:
ValueError: If multiple `Tensors` have been added to the
`tf.GraphKeys.EVAL_STEP` collection.
"""
graph = ops.get_default_graph()
eval_steps = graph.get_collection(ops.GraphKeys.EVAL_STEP)
if len(eval_steps) == 1:
return eval_steps[0]
elif len(eval_steps) > 1:
raise ValueError('Multiple tensors added to tf.GraphKeys.EVAL_STEP')
else:
counter = variable_scope.get_variable(
'eval_step',
shape=[],
dtype=dtypes.int64,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES, ops.GraphKeys.EVAL_STEP])
return counter
def _get_latest_eval_step_value(update_ops):
"""Gets the eval step `Tensor` value after running `update_ops`.
Args:
update_ops: A list of `Tensors` or a dictionary of names to `Tensors`,
which are run before reading the eval step value.
Returns:
A `Tensor` representing the value for the evaluation step.
"""
if isinstance(update_ops, dict):
update_ops = list(update_ops.values())
with ops.control_dependencies(update_ops):
return array_ops.identity(_get_or_create_eval_step().read_value())
class _MultiStepStopAfterNEvalsHook(session_run_hook.SessionRunHook):
"""Run hook used by the evaluation routines to run the `eval_ops` N times."""
def __init__(self, num_evals, steps_per_run=1):
"""Constructs the run hook.
Args:
num_evals: The number of evaluations to run for. if set to None, will
iterate the dataset until all inputs are exhausted.
steps_per_run: Number of steps executed per run call.
"""
self._num_evals = num_evals
self._evals_completed = None
self._steps_per_run_initial_value = steps_per_run
def _set_evals_completed_tensor(self, updated_eval_step):
self._evals_completed = updated_eval_step
def begin(self):
self._steps_per_run_variable = \
basic_session_run_hooks.get_or_create_steps_per_run_variable()
def after_create_session(self, session, coord):
# Update number of steps to run in the first run call
if self._num_evals is None:
steps = self._steps_per_run_initial_value
else:
steps = min(self._steps_per_run_initial_value, self._num_evals)
self._steps_per_run_variable.load(steps, session=session)
def before_run(self, run_context):
return session_run_hook.SessionRunArgs({
'evals_completed': self._evals_completed
})
def after_run(self, run_context, run_values):
evals_completed = run_values.results['evals_completed']
# Update number of steps to run in the next iteration
if self._num_evals is None:
steps = self._steps_per_run_initial_value
else:
steps = min(self._num_evals - evals_completed,
self._steps_per_run_initial_value)
self._steps_per_run_variable.load(steps, session=run_context.session)
if self._num_evals is None:
logging.info('Evaluation [%d]', evals_completed)
else:
logging.info('Evaluation [%d/%d]', evals_completed, self._num_evals)
if self._num_evals is not None and evals_completed >= self._num_evals:
run_context.request_stop()
class _StopAfterNEvalsHook(session_run_hook.SessionRunHook):
"""Run hook used by the evaluation routines to run the `eval_ops` N times."""
def __init__(self, num_evals, log_progress=True):
"""Constructs the run hook.
Args:
num_evals: The number of evaluations to run for. if set to None, will
iterate the dataset until all inputs are exhausted.
log_progress: Whether to log evaluation progress, defaults to True.
"""
# The number of evals to run for.
self._num_evals = num_evals
self._evals_completed = None
self._log_progress = log_progress
# Reduce logging frequency if there are 20 or more evaluations.
self._log_frequency = (1 if (num_evals is None or num_evals < 20)
else math.floor(num_evals / 10.))
def _set_evals_completed_tensor(self, updated_eval_step):
self._evals_completed = updated_eval_step
def before_run(self, run_context):
return session_run_hook.SessionRunArgs({
'evals_completed': self._evals_completed
})
def after_run(self, run_context, run_values):
evals_completed = run_values.results['evals_completed']
if self._log_progress:
if self._num_evals is None:
logging.info('Evaluation [%d]', evals_completed)
else:
if ((evals_completed % self._log_frequency) == 0 or
(self._num_evals == evals_completed)):
logging.info('Evaluation [%d/%d]', evals_completed, self._num_evals)
if self._num_evals is not None and evals_completed >= self._num_evals:
run_context.request_stop()
def _evaluate_once(checkpoint_path,
master='',
scaffold=None,
eval_ops=None,
feed_dict=None,
final_ops=None,
final_ops_feed_dict=None,
hooks=None,
config=None):
"""Evaluates the model at the given checkpoint path.
During a single evaluation, the `eval_ops` is run until the session is
interrupted or requested to finish. This is typically requested via a
`tf.contrib.training.StopAfterNEvalsHook` which results in `eval_ops` running
the requested number of times.
Optionally, a user can pass in `final_ops`, a single `Tensor`, a list of
`Tensors` or a dictionary from names to `Tensors`. The `final_ops` is
evaluated a single time after `eval_ops` has finished running and the fetched
values of `final_ops` are returned. If `final_ops` is left as `None`, then
`None` is returned.
One may also consider using a `tf.contrib.training.SummaryAtEndHook` to record
summaries after the `eval_ops` have run. If `eval_ops` is `None`, the
summaries run immediately after the model checkpoint has been restored.
Note that `evaluate_once` creates a local variable used to track the number of
evaluations run via `tf.contrib.training.get_or_create_eval_step`.
Consequently, if a custom local init op is provided via a `scaffold`, the
caller should ensure that the local init op also initializes the eval step.
Args:
checkpoint_path: The path to a checkpoint to use for evaluation.
master: The BNS address of the TensorFlow master.
scaffold: An tf.train.Scaffold instance for initializing variables and
restoring variables. Note that `scaffold.init_fn` is used by the function
to restore the checkpoint. If you supply a custom init_fn, then it must
also take care of restoring the model from its checkpoint.
eval_ops: A single `Tensor`, a list of `Tensors` or a dictionary of names
to `Tensors`, which is run until the session is requested to stop,
commonly done by a `tf.contrib.training.StopAfterNEvalsHook`.
feed_dict: The feed dictionary to use when executing the `eval_ops`.
final_ops: A single `Tensor`, a list of `Tensors` or a dictionary of names
to `Tensors`.
final_ops_feed_dict: A feed dictionary to use when evaluating `final_ops`.
hooks: List of `tf.train.SessionRunHook` callbacks which are run inside the
evaluation loop.
config: An instance of `tf.ConfigProto` that will be used to
configure the `Session`. If left as `None`, the default will be used.
Returns:
The fetched values of `final_ops` or `None` if `final_ops` is `None`.
"""
eval_step = _get_or_create_eval_step()
# Prepare the run hooks.
hooks = list(hooks or [])
if eval_ops is not None:
if any([isinstance(h, _MultiStepStopAfterNEvalsHook) for h in hooks]):
steps_per_run_variable = \
basic_session_run_hooks.get_or_create_steps_per_run_variable()
update_eval_step = state_ops.assign_add(
eval_step,
math_ops.cast(steps_per_run_variable, dtype=eval_step.dtype),
use_locking=True)
else:
update_eval_step = state_ops.assign_add(eval_step, 1, use_locking=True)
if isinstance(eval_ops, dict):
eval_ops['update_eval_step'] = update_eval_step
elif isinstance(eval_ops, (tuple, list)):
eval_ops = list(eval_ops) + [update_eval_step]
else:
eval_ops = [eval_ops, update_eval_step]
eval_step_value = _get_latest_eval_step_value(eval_ops)
for h in hooks:
if isinstance(h, (_StopAfterNEvalsHook, _MultiStepStopAfterNEvalsHook)):
h._set_evals_completed_tensor(eval_step_value) # pylint: disable=protected-access
logging.info('Starting evaluation at ' + time.strftime('%Y-%m-%d-%H:%M:%S',
time.gmtime()))
# Prepare the session creator.
session_creator = monitored_session.ChiefSessionCreator(
scaffold=scaffold,
checkpoint_filename_with_path=checkpoint_path,
master=master,
config=config)
final_ops_hook = basic_session_run_hooks.FinalOpsHook(
final_ops, final_ops_feed_dict)
hooks.append(final_ops_hook)
with monitored_session.MonitoredSession(
session_creator=session_creator, hooks=hooks) as session:
if eval_ops is not None:
while not session.should_stop():
session.run(eval_ops, feed_dict)
logging.info('Finished evaluation at ' + time.strftime('%Y-%m-%d-%H:%M:%S',
time.gmtime()))
return final_ops_hook.final_ops_values
| apache-2.0 |
Shaffi08/yowsup | yowsup/layers/auth/layer_crypt.py | 24 | 2105 | from yowsup.layers import YowLayer
from yowsup.layers.network import YowNetworkLayer
class YowCryptLayer(YowLayer):
'''
send: bytearray -> bytearray
receive: bytearray -> bytearray
'''
EVENT_KEYS_READY = "org.openwhatsapp.yowsup.crypt.keys"
def __init__(self):
super(YowCryptLayer, self).__init__()
self.keys = (None,None)
def onEvent(self, yowLayerEvent):
if yowLayerEvent.getName() == YowNetworkLayer.EVENT_STATE_CONNECT:
self.keys = (None,None)
elif yowLayerEvent.getName() == YowCryptLayer.EVENT_KEYS_READY:
self.keys = yowLayerEvent.getArg("keys")
return True
def send(self, data):
outputKey = self.keys[1]
length1 = len(data)
if length1 > 1:
if outputKey:
length1 += 4
buf = outputKey.encodeMessage(data, len(data), 0, len(data))
res = [0,0,0]
res.extend(buf)
res[0] = ((8 << 4) | (length1 & 16711680) >> 16) % 256
res[1] = ((length1 & 65280) >> 8) % 256
res[2] = (length1 & 255) % 256
data = res
else:
prep = [0,0,0]
prep.extend(data)
prep[0] = ((0 << 4) | (length1 & 16711680) >> 16) % 256
prep[1] = ((length1 & 65280) >> 8) % 256
prep[2] = (length1 & 255) % 256
data = prep
self.toLower(bytearray(data))
def receive(self, data):
inputKey = self.keys[0]
metaData = data[:3]
payload = bytearray(data[3:])
firstByte = metaData[0]
stanzaFlag = (firstByte & 0xF0) >> 4
stanzaSize = ((metaData[1] << 8) + metaData[2]) | ((firstByte & 0x0F) << 16)
isEncrypted = ((stanzaFlag & 8) != 0)
if inputKey and isEncrypted:
toDecode = data[3:]
payload = inputKey.decodeMessage(payload, 0, 4, len(payload) - 4)
self.toUpper(payload)
def __str__(self):
return "Crypt Layer" | gpl-3.0 |
muffinresearch/addons-server | apps/translations/tests/test_helpers.py | 11 | 5859 | from django.conf import settings
from django.utils import translation
import jingo
import pytest
from mock import Mock, patch
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Addon
from translations import helpers
from translations.fields import save_signal
from translations.models import PurifiedTranslation
from translations.tests.testapp.models import TranslatedModel
pytestmark = pytest.mark.django_db
def super():
jingo.load_helpers()
def test_locale_html():
"""Test HTML attributes for languages different than the site language"""
testfield = Mock()
# same language: no need for attributes
this_lang = translation.get_language()
testfield.locale = this_lang
s = helpers.locale_html(testfield)
assert not s, 'no special HTML attributes for site language'
# non-rtl language
testfield.locale = 'de'
s = helpers.locale_html(testfield)
eq_(s, ' lang="de" dir="ltr"')
# rtl language
for lang in settings.RTL_LANGUAGES:
testfield.locale = lang
s = helpers.locale_html(testfield)
eq_(s, ' lang="%s" dir="rtl"' % testfield.locale)
def test_locale_html_xss():
"""Test for nastiness-removal in the transfield's locale"""
testfield = Mock()
# same language: no need for attributes
testfield.locale = '<script>alert(1)</script>'
s = helpers.locale_html(testfield)
assert '<script>' not in s
assert '<script>alert(1)</script>' in s
def test_empty_locale_html():
"""locale_html must still work if field is None."""
s = helpers.locale_html(None)
assert not s, 'locale_html on None must be empty.'
def test_truncate_purified_field():
s = '<i>one</i><i>two</i>'
t = PurifiedTranslation(localized_string=s)
actual = jingo.env.from_string('{{ s|truncate(6) }}').render({'s': t})
eq_(actual, s)
def test_truncate_purified_field_xss():
"""Truncating should not introduce xss issues."""
s = 'safe <script>alert("omg")</script>'
t = PurifiedTranslation(localized_string=s)
actual = jingo.env.from_string('{{ s|truncate(100) }}').render({'s': t})
eq_(actual, 'safe <script>alert("omg")</script>')
actual = jingo.env.from_string('{{ s|truncate(5) }}').render({'s': t})
eq_(actual, 'safe ...')
def test_clean():
# Links are not mangled, bad HTML is escaped, newlines are slimmed.
s = '<ul><li><a href="#woo">\n\nyeah</a></li>\n\n<li><script></li></ul>'
eq_(helpers.clean(s),
'<ul><li><a href="#woo">\n\nyeah</a></li><li><script></li></ul>')
def test_clean_in_template():
s = '<a href="#woo">yeah</a>'
eq_(jingo.env.from_string('{{ s|clean }}').render({'s': s}), s)
def test_no_links():
s = 'a <a href="http://url.link">http://example.com</a>, http://text.link'
eq_(jingo.env.from_string('{{ s|no_links }}').render({'s': s}),
'a http://example.com, http://text.link')
# Bad markup.
s = '<http://bad.markup.com'
eq_(jingo.env.from_string('{{ s|no_links }}').render({'s': s}), '')
# Bad markup.
s = 'some text <http://bad.markup.com'
eq_(jingo.env.from_string('{{ s|no_links }}').render({'s': s}),
'some text')
def test_l10n_menu():
# No remove_locale_url provided.
menu = helpers.l10n_menu({})
assert 'data-rm-locale=""' in menu, menu
# Specific remove_locale_url provided (eg for user).
menu = helpers.l10n_menu({}, remove_locale_url='/some/url/')
assert 'data-rm-locale="/some/url/"' in menu, menu
# Use the remove_locale_url taken from the addon in the context.
menu = helpers.l10n_menu({'addon': Addon()},
remove_locale_url='some/url/')
assert 'data-rm-locale="/en-US/developers/addon/None/rmlocale"' in menu
@patch.object(settings, 'AMO_LANGUAGES', ('de', 'en-US', 'es', 'fr', 'pt-BR'))
class TestAllLocales(amo.tests.TestCase):
def test_all_locales_none(self):
addon = None
field_name = 'description'
eq_(helpers.all_locales(addon, field_name), None)
addon = Mock()
field_name = 'description'
del addon.description
eq_(helpers.all_locales(addon, field_name), None)
def test_all_locales(self):
obj = TranslatedModel()
obj.description = {
'en-US': 'There',
'es': 'Is No',
'fr': 'Spoon'
}
# Pretend the TranslateModel instance was saved to force Translation
# objects to be saved.
save_signal(sender=TranslatedModel, instance=obj)
result = helpers.all_locales(obj, 'description')
assert u'<div class="trans" data-name="description">' in result
assert u'<span lang="en-us">There</span>' in result
assert u'<span lang="es">Is No</span>' in result
assert u'<span lang="fr">Spoon</span>' in result
def test_all_locales_empty(self):
obj = TranslatedModel()
obj.description = {
'en-US': 'There',
'es': 'Is No',
'fr': ''
}
# Pretend the TranslateModel instance was saved to force Translation
# objects to be saved.
save_signal(sender=TranslatedModel, instance=obj)
result = helpers.all_locales(obj, 'description')
assert u'<div class="trans" data-name="description">' in result
assert u'<span lang="en-us">There</span>' in result
assert u'<span lang="es">Is No</span>' in result
assert u'<span lang="fr"></span>' in result
result = helpers.all_locales(obj, 'description', prettify_empty=True)
assert u'<div class="trans" data-name="description">' in result
assert u'<span lang="en-us">There</span>' in result
assert u'<span lang="es">Is No</span>' in result
assert u'<span class="empty" lang="fr">None</span>' in result
| bsd-3-clause |
SoreGums/bitcoinxt | qa/rpc-tests/test_framework/mininode.py | 126 | 36289 | # mininode.py - Bitcoin P2P network half-a-node
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# This python code was modified from ArtForz' public domain half-a-node, as
# found in the mini-node branch of http://github.com/jgarzik/pynode.
#
# NodeConn: an object which manages p2p connectivity to a bitcoin node
# NodeConnCB: a base class that describes the interface for receiving
# callbacks with network messages from a NodeConn
# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
# data structures that should map to corresponding structures in
# bitcoin/primitives
# msg_block, msg_tx, msg_headers, etc.:
# data structures that represent network messages
# ser_*, deser_*: functions that handle serialization/deserialization
import struct
import socket
import asyncore
import binascii
import time
import sys
import random
import cStringIO
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
BIP0031_VERSION = 60000
MY_VERSION = 60001 # past bip-31 for ping/pong
MY_SUBVERSION = "/python-mininode-tester:0.0.1/"
MAX_INV_SZ = 50000
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def hash256(s):
return sha256(sha256(s))
def deser_string(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return f.read(nit)
def ser_string(s):
if len(s) < 253:
return chr(len(s)) + s
elif len(s) < 0x10000:
return chr(253) + struct.pack("<H", len(s)) + s
elif len(s) < 0x100000000L:
return chr(254) + struct.pack("<I", len(s)) + s
return chr(255) + struct.pack("<Q", len(s)) + s
def deser_uint256(f):
r = 0L
for i in xrange(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = ""
for i in xrange(8):
rs += struct.pack("<I", u & 0xFFFFFFFFL)
u >>= 32
return rs
def uint256_from_str(s):
r = 0L
t = struct.unpack("<IIIIIIII", s[:32])
for i in xrange(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(l) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(l) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(l) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(l) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(l) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(l) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(l) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(l) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Objects that map to bitcoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = "\x00" * 10 + "\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = ""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block"}
def __init__(self, t=0, h=0L):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = ""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = ""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self, outpoint=None, scriptSig="", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = ""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), binascii.hexlify(self.scriptSig),
self.nSequence)
class CTxOut(object):
def __init__(self, nValue=0, scriptPubKey=""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = ""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // 100000000, self.nValue % 100000000,
binascii.hexlify(self.scriptPubKey))
class CTransaction(object):
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
def rehash(self):
self.sha256 = None
self.calc_sha256()
def calc_sha256(self):
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize()))
self.hash = hash256(self.serialize())[::-1].encode('hex_codec')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000L * 100000000L:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
class CBlockHeader(object):
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(hash256(r))
self.hash = hash256(r)[::-1].encode('hex_codec')
def rehash(self):
self.sha256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self):
r = ""
r += super(CBlock, self).serialize()
r += ser_vector(self.vtx)
return r
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
while len(hashes) > 1:
newhashes = []
for i in xrange(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
while self.sha256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = ""
self.strStatusBar = ""
self.strReserved = ""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
% (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = ""
self.vchSig = ""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = ""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
% (len(self.vchMsg), len(self.vchSig))
# Objects that correspond to messages on the wire
class msg_version(object):
command = "version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = time.time()
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight)
class msg_verack(object):
command = "verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = "addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = "alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = ""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = "inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = "getdata"
def __init__(self):
self.inv = []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = "getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = ""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx(object):
command = "tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_block(object):
command = "block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
class msg_getaddr(object):
command = "getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_getaddr()"
class msg_ping_prebip31(object):
command = "ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_ping() (pre-bip31)"
class msg_ping(object):
command = "ping"
def __init__(self, nonce=0L):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = ""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong(object):
command = "pong"
def __init__(self, nonce=0L):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = ""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool(object):
command = "mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_mempool()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders(object):
command = "getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = ""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers(object):
command = "headers"
def __init__(self):
self.headers = []
def deserialize(self, f):
# comment in bitcoind indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject(object):
command = "reject"
def __init__(self):
self.message = ""
self.code = ""
self.reason = ""
self.data = 0L
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.message == "block" or self.message == "tx"):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.message == "block" or self.message == "tx"):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
# This is what a callback should look like for NodeConn
# Reimplement the on_* functions to provide handling for events
class NodeConnCB(object):
def __init__(self):
self.verack_received = False
# Derived classes should call this function once to set the message map
# which associates the derived classes' functions to incoming messages
def create_callback_map(self):
self.cbmap = {
"version": self.on_version,
"verack": self.on_verack,
"addr": self.on_addr,
"alert": self.on_alert,
"inv": self.on_inv,
"getdata": self.on_getdata,
"getblocks": self.on_getblocks,
"tx": self.on_tx,
"block": self.on_block,
"getaddr": self.on_getaddr,
"ping": self.on_ping,
"pong": self.on_pong,
"headers": self.on_headers,
"getheaders": self.on_getheaders,
"reject": self.on_reject,
"mempool": self.on_mempool
}
def deliver(self, conn, message):
with mininode_lock:
try:
self.cbmap[message.command](conn, message)
except:
print "ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0])
def on_version(self, conn, message):
if message.nVersion >= 209:
conn.send_message(msg_verack())
conn.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
conn.ver_recv = conn.ver_send
def on_verack(self, conn, message):
conn.ver_recv = conn.ver_send
self.verack_received = True
def on_inv(self, conn, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
conn.send_message(want)
def on_addr(self, conn, message): pass
def on_alert(self, conn, message): pass
def on_getdata(self, conn, message): pass
def on_getblocks(self, conn, message): pass
def on_tx(self, conn, message): pass
def on_block(self, conn, message): pass
def on_getaddr(self, conn, message): pass
def on_headers(self, conn, message): pass
def on_getheaders(self, conn, message): pass
def on_ping(self, conn, message):
if conn.ver_send > BIP0031_VERSION:
conn.send_message(msg_pong(message.nonce))
def on_reject(self, conn, message): pass
def on_close(self, conn): pass
def on_mempool(self, conn): pass
def on_pong(self, conn, message): pass
# The actual NodeConn class
# This class provides an interface for a p2p connection to a specified node
class NodeConn(asyncore.dispatcher):
messagemap = {
"version": msg_version,
"verack": msg_verack,
"addr": msg_addr,
"alert": msg_alert,
"inv": msg_inv,
"getdata": msg_getdata,
"getblocks": msg_getblocks,
"tx": msg_tx,
"block": msg_block,
"getaddr": msg_getaddr,
"ping": msg_ping,
"pong": msg_pong,
"headers": msg_headers,
"getheaders": msg_getheaders,
"reject": msg_reject,
"mempool": msg_mempool
}
MAGIC_BYTES = {
"mainnet": "\xf9\xbe\xb4\xd9", # mainnet
"testnet3": "\x0b\x11\x09\x07", # testnet3
"regtest": "\xfa\xbf\xb5\xda" # regtest
}
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest"):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = ""
self.recvbuf = ""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
self.network = net
self.cb = callback
self.disconnect = False
# stuff version msg into sendbuf
vt = msg_version()
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
print 'MiniNode: Connecting to Bitcoin Node IP # ' + dstaddr + ':' \
+ str(dstport)
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.rpc = rpc
def show_debug_msg(self, msg):
self.log.debug(msg)
def handle_connect(self):
self.show_debug_msg("MiniNode: Connected & Listening: \n")
self.state = "connected"
def handle_close(self):
self.show_debug_msg("MiniNode: Closing Connection to %s:%d... "
% (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = ""
self.sendbuf = ""
try:
self.close()
except:
pass
self.cb.on_close(self)
def handle_read(self):
try:
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self.got_data()
except:
pass
def readable(self):
return True
def writable(self):
with mininode_lock:
length = len(self.sendbuf)
return (length > 0)
def handle_write(self):
with mininode_lock:
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4+12].split("\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4+12+4:4+12+4+msglen]
self.recvbuf = self.recvbuf[4+12+4+msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split("\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command in self.messagemap:
f = cStringIO.StringIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
self.show_debug_msg("Unknown command: '" + command + "' " +
repr(msg))
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
return
self.show_debug_msg("Send %s" % repr(message))
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[self.network]
tmsg += command
tmsg += "\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
if message.command == "version":
if message.nVersion <= BIP0031_VERSION:
self.messagemap['ping'] = msg_ping_prebip31
if self.last_sent + 30 * 60 < time.time():
self.send_message(self.messagemap['ping']())
self.show_debug_msg("Recv %s" % repr(message))
self.cb.deliver(self, message)
def disconnect_node(self):
self.disconnect = True
class NetworkThread(Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[ obj.handle_close() for obj in disconnected ]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
# An exception we can raise if we detect a potential disconnect
# (p2p or rpc) before the test is complete
class EarlyDisconnectError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| mit |
emk/pyjamas | examples/jsonrpc/public/services/jsonrpc/apacheServiceHandler.py | 10 | 1976 | """
Copyright (c) 2006 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
from mod_python import apache
from jsonrpc import SimpleServiceHandler
class ModPyHandler(SimpleServiceHandler):
def send(self, data):
self.req.write(data)
self.req.flush()
def handle(self, req):
self.req = req
req.content_type = "text/plain"
self.handlePartialData(req.read())
self.close()
from mod_python import apache
import os, sys
def handler(req):
(modulePath, fileName) = os.path.split(req.filename)
(moduleName, ext) = os.path.splitext(fileName)
if not os.path.exists(os.path.join(modulePath, moduleName + ".py")):
return apache.HTTP_NOT_FOUND
if not modulePath in sys.path:
sys.path.insert(0, modulePath)
module = apache.import_module(moduleName, log=1)
if hasattr(module, "getService"):
service = module.getService()
elif hasattr(module, "service"):
service = module.service
elif hasattr(module, "Service"):
service = module.Service()
else:
return apache.HTTP_SERVICE_UNAVAILABLE
ModPyHandler(service, messageDelimiter="\n").handle(req)
return apache.OK
| apache-2.0 |
kosz85/django | django/template/backends/django.py | 91 | 4186 | from importlib import import_module
from pkgutil import walk_packages
from django.apps import apps
from django.conf import settings
from django.template import TemplateDoesNotExist
from django.template.context import make_context
from django.template.engine import Engine
from django.template.library import InvalidTemplateLibrary
from .base import BaseEngine
class DjangoTemplates(BaseEngine):
app_dirname = 'templates'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
options.setdefault('autoescape', True)
options.setdefault('debug', settings.DEBUG)
options.setdefault('file_charset', settings.FILE_CHARSET)
libraries = options.get('libraries', {})
options['libraries'] = self.get_templatetag_libraries(libraries)
super().__init__(params)
self.engine = Engine(self.dirs, self.app_dirs, **options)
def from_string(self, template_code):
return Template(self.engine.from_string(template_code), self)
def get_template(self, template_name):
try:
return Template(self.engine.get_template(template_name), self)
except TemplateDoesNotExist as exc:
reraise(exc, self)
def get_templatetag_libraries(self, custom_libraries):
"""
Return a collation of template tag libraries from installed
applications and the supplied custom_libraries argument.
"""
libraries = get_installed_libraries()
libraries.update(custom_libraries)
return libraries
class Template:
def __init__(self, template, backend):
self.template = template
self.backend = backend
@property
def origin(self):
return self.template.origin
def render(self, context=None, request=None):
context = make_context(context, request, autoescape=self.backend.engine.autoescape)
try:
return self.template.render(context)
except TemplateDoesNotExist as exc:
reraise(exc, self.backend)
def copy_exception(exc, backend=None):
"""
Create a new TemplateDoesNotExist. Preserve its declared attributes and
template debug data but discard __traceback__, __context__, and __cause__
to make this object suitable for keeping around (in a cache, for example).
"""
backend = backend or exc.backend
new = exc.__class__(*exc.args, tried=exc.tried, backend=backend, chain=exc.chain)
if hasattr(exc, 'template_debug'):
new.template_debug = exc.template_debug
return new
def reraise(exc, backend):
"""
Reraise TemplateDoesNotExist while maintaining template debug information.
"""
new = copy_exception(exc, backend)
raise new from exc
def get_installed_libraries():
"""
Return the built-in template tag libraries and those from installed
applications. Libraries are stored in a dictionary where keys are the
individual module names, not the full module paths. Example:
django.templatetags.i18n is stored as i18n.
"""
libraries = {}
candidates = ['django.templatetags']
candidates.extend(
'%s.templatetags' % app_config.name
for app_config in apps.get_app_configs())
for candidate in candidates:
try:
pkg = import_module(candidate)
except ImportError:
# No templatetags package defined. This is safe to ignore.
continue
if hasattr(pkg, '__path__'):
for name in get_package_libraries(pkg):
libraries[name[len(candidate) + 1:]] = name
return libraries
def get_package_libraries(pkg):
"""
Recursively yield template tag libraries defined in submodules of a
package.
"""
for entry in walk_packages(pkg.__path__, pkg.__name__ + '.'):
try:
module = import_module(entry[1])
except ImportError as e:
raise InvalidTemplateLibrary(
"Invalid template library specified. ImportError raised when "
"trying to load '%s': %s" % (entry[1], e)
)
if hasattr(module, 'register'):
yield entry[1]
| bsd-3-clause |
saatvikshah1994/SmartMM | KeywordExtraction/utilities.py | 1 | 7986 | from sklearn.cross_validation import KFold
import csv
import numpy as np
from bs4 import BeautifulSoup
import re
from nltk.corpus import stopwords
import os
from nltk.stem import PorterStemmer
class DataClean:
"""Cleans data by inputting list of regex to search and substitute
Need to add stopword elimination support"""
def __init__(self,clean_list,html_clean = False,split_words=False):
self.clean_list = clean_list
self.html_clean = html_clean
self.split_words = split_words
self.stopwords_eng = stopwords.words("english") + [u"film",u"movie"]
def fit(self,X,y=None):
return self
def transform(self,X):
X = X.flatten()
X = map(self.clean_sentence,X)
return np.array(X)
def clean_sentence(self,sentence):
if self.html_clean:
sentence = BeautifulSoup(sentence).get_text() # removing html markup
sentence = sentence.lower() # everything to lowercase
# sentence = ''.join(x for x in sentence if x.isalnum() or x==" ")
for ch_rep in self.clean_list:
sentence = re.sub(ch_rep[0],ch_rep[1],sentence)
sentence = ' '.join(filter(lambda x:x not in self.stopwords_eng,sentence.split()))
sentence = ' '.join(filter(lambda x:len(x) > 1,sentence.split()))
sentence = sentence.strip(" ") # Remove possible extra spaces
if self.split_words:
sentence = sentence.split()
return sentence
def __repr__(self):
return "DataClean"
class CandidateSelection:
def __init__(self,method="noun_phrase_heuristic_chunks"):
assert method in ["noun_phrase_heuristic_chunks","nounadj_tags_heuristic_words"],\
"`method` must be one of `noun_phrase_heuristic_chunks`/`nounadj_tags_heuristic_words`"
self.method = method
def fit(self,X,y=None):
return self
def transform(self,X):
if self.method == "noun_phrase_heuristic_chunks":
keywords = [self.extract_candidate_chunks_noun_phrase_heuristic(text) for text in X]
else:
keywords = [self.extract_candidate_words_nounadj_tags_heuristic(text) for text in X]
return keywords
def fit_transform(self,X,y=None):
self.fit(X,y)
return self.transform(X)
def extract_candidate_chunks_noun_phrase_heuristic(self, text, grammar=r'KT: {(<JJ>* <NN.*>+ <IN>)? <JJ>* <NN.*>+}'):
import itertools, nltk, string
"""Return all those words as candidates which follow a specific pos_tag pattern"""
# exclude candidates that are stop words or entirely punctuation
punct = set(string.punctuation)
stop_words = set(nltk.corpus.stopwords.words('english'))
# tokenize, POS-tag, and chunk using regular expressions
chunker = nltk.chunk.regexp.RegexpParser(grammar)
tagged_sents = nltk.pos_tag_sents(nltk.word_tokenize(sent) for sent in nltk.sent_tokenize(text))
all_chunks = list(itertools.chain.from_iterable(nltk.chunk.tree2conlltags(chunker.parse(tagged_sent))
for tagged_sent in tagged_sents))
# join constituent chunk words into a single chunked phrase
candidates = [' '.join(word for word, pos, chunk in group).lower()
for key, group in itertools.groupby(all_chunks, lambda (word,pos,chunk): chunk != 'O') if key]
return [cand for cand in candidates
if cand not in stop_words and not all(char in punct for char in cand)]
def extract_candidate_words_nounadj_tags_heuristic(self, text, good_tags=set(['JJ','JJR','JJS','NN','NNP','NNS','NNPS'])):
"""Return all those words as candidates which are good_tags - here theyre nouns/adjectives """
import itertools, nltk, string
# exclude candidates that are stop words or entirely punctuation
punct = set(string.punctuation)
stop_words = set(nltk.corpus.stopwords.words('english'))
# tokenize and POS-tag words
tagged_words = itertools.chain.from_iterable(
nltk.pos_tag_sents(nltk.word_tokenize(sent) for sent in nltk.sent_tokenize(text)))
# filter on certain POS tags and lowercase all words
candidates = [word.lower() for word, tag in tagged_words
if tag in good_tags and word.lower() not in stop_words
and not all(char in punct for char in word)]
return candidates
def load_data(tag="semeval"):
if tag == "semeval":
data_path = "../dataset/semeval2010"
X = []
y = []
ids = []
for f in os.listdir(data_path):
f = os.path.join(data_path,f)
if f.endswith("txt"):
fname = f.replace(".txt","")
ids.append(fname)
key_file = "{}.key".format(fname)
with open(f) as articlefile:
article = articlefile.read()
X.append(article)
with open(key_file) as keywords_file:
keywords = keywords_file.readlines()
keywords_cleaned = [keyword.strip() for keyword in keywords]
y.append(keywords_cleaned)
elif tag == "imdbpy_plotkeywords":
data_path = "../dataset/imdbpy_plotkeywords.csv"
X = []
y = []
ids = []
with open(data_path) as f:
csv_f = csv.reader(f)
for row in csv_f:
num_plot_summaries = int(row[2])
plots = []
for i in xrange(num_plot_summaries):
plots.append(row[i+3])
plots = " ".join(plots)
keywords_idx = num_plot_summaries + 3
keywords = []
for i in xrange(keywords_idx,len(row)):
keyword = row[i]
keyword_alt = keyword.replace("-"," ")
if keyword_alt in plots or keyword in plots:
keywords.append(keyword_alt)
if len(keywords) > 4:
ids.append(row[0])
X.append(plots)
y.append(keywords)
else:
raise("`tag` must be one of `semeval`,`imdbpy_plotkeywords`")
return ids,np.array(X),np.array(y)
def cross_validate(data,pipeline,metric_apply,n_folds = 4,stem_y=True):
(X,y) = data
if stem_y:
stemmer = PorterStemmer()
y_stem = []
for keywords in y:
keywords_stemmed = []
for keyword in keywords:
try:
stemmed_keyword = stemmer.stem(keyword.decode('utf-8'))
keywords_stemmed.append(stemmed_keyword)
except Exception as e:
print "Error stemming keyword %s, Skipping." % keyword
y_stem.append(keywords_stemmed)
y = np.array(y_stem)
skf = KFold(len(y),n_folds=n_folds)
precision_score = []
recall_score = []
f1_score = []
metric_apply = metric_apply
counter = 0
for train_idx,val_idx in skf:
counter += 1
print "Running fold %d" % counter
print "fitting"
pipeline.fit(X[train_idx],y[train_idx])
print "predicting"
ypred = pipeline.predict(X[val_idx])
p,r,f = metric_apply(y[val_idx],ypred)
precision_score.append(p)
recall_score.append(r)
f1_score.append(f)
print metric_apply.__name__
print "{} : {} +/- {}".format("precision_score",
np.mean(precision_score),
np.std(precision_score))
print "{} : {} +/- {}".format("recall_score",
np.mean(recall_score),
np.std(recall_score))
print "{} : {} +/- {}".format("f1_score",
np.mean(f1_score),
np.std(f1_score))
| mit |
hbrunn/OCB | addons/hr_contract/base_action_rule.py | 389 | 2646 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013 OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.base_action_rule.base_action_rule import get_datetime
from openerp.osv import fields, osv
class base_action_rule(osv.Model):
""" Add resource and calendar for time-based conditions """
_name = 'base.action.rule'
_inherit = ['base.action.rule']
_columns = {
'trg_date_resource_field_id': fields.many2one(
'ir.model.fields', 'Use employee work schedule',
help='Use the user\'s working schedule.',
),
}
def _check_delay(self, cr, uid, action, record, record_dt, context=None):
""" Override the check of delay to try to use a user-related calendar.
If no calendar is found, fallback on the default behavior. """
if action.trg_date_calendar_id and action.trg_date_range_type == 'day' and action.trg_date_resource_field_id:
user = record[action.trg_date_resource_field_id.name]
if user.employee_ids and user.employee_ids[0].contract_id \
and user.employee_ids[0].contract_id.working_hours:
calendar = user.employee_ids[0].contract_id.working_hours
start_dt = get_datetime(record_dt)
resource_id = user.employee_ids[0].resource_id.id
action_dt = self.pool['resource.calendar'].schedule_days_get_date(
cr, uid, calendar.id, action.trg_date_range,
day_date=start_dt, compute_leaves=True, resource_id=resource_id,
context=context
)
return action_dt
return super(base_action_rule, self)._check_delay(cr, uid, action, record, record_dt, context=context)
| agpl-3.0 |
digital-abyss/ansible-modules-extras | notification/mqtt.py | 101 | 4848 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, 2014, Jan-Piet Mens <jpmens () gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: mqtt
short_description: Publish a message on an MQTT topic for the IoT
version_added: "1.2"
description:
- Publish a message on an MQTT topic.
options:
server:
description:
- MQTT broker address/name
required: false
default: localhost
port:
description:
- MQTT broker port number
required: false
default: 1883
username:
description:
- Username to authenticate against the broker.
required: false
password:
description:
- Password for C(username) to authenticate against the broker.
required: false
client_id:
description:
- MQTT client identifier
required: false
default: hostname + pid
topic:
description:
- MQTT topic name
required: true
default: null
payload:
description:
- Payload. The special string C("None") may be used to send a NULL
(i.e. empty) payload which is useful to simply notify with the I(topic)
or to clear previously retained messages.
required: true
default: null
qos:
description:
- QoS (Quality of Service)
required: false
default: 0
choices: [ "0", "1", "2" ]
retain:
description:
- Setting this flag causes the broker to retain (i.e. keep) the message so that
applications that subsequently subscribe to the topic can received the last
retained message immediately.
required: false
default: False
# informational: requirements for nodes
requirements: [ mosquitto ]
notes:
- This module requires a connection to an MQTT broker such as Mosquitto
U(http://mosquitto.org) and the I(Paho) C(mqtt) Python client (U(https://pypi.python.org/pypi/paho-mqtt)).
author: "Jan-Piet Mens (@jpmens)"
'''
EXAMPLES = '''
- local_action: mqtt
topic=service/ansible/{{ ansible_hostname }}
payload="Hello at {{ ansible_date_time.iso8601 }}"
qos=0
retain=false
client_id=ans001
'''
# ===========================================
# MQTT module support methods.
#
HAS_PAHOMQTT = True
try:
import socket
import paho.mqtt.publish as mqtt
except ImportError:
HAS_PAHOMQTT = False
# ===========================================
# Main
#
def main():
module = AnsibleModule(
argument_spec=dict(
server = dict(default = 'localhost'),
port = dict(default = 1883),
topic = dict(required = True),
payload = dict(required = True),
client_id = dict(default = None),
qos = dict(default="0", choices=["0", "1", "2"]),
retain = dict(default=False, type='bool'),
username = dict(default = None),
password = dict(default = None),
),
supports_check_mode=True
)
if not HAS_PAHOMQTT:
module.fail_json(msg="Paho MQTT is not installed")
server = module.params.get("server", 'localhost')
port = module.params.get("port", 1883)
topic = module.params.get("topic")
payload = module.params.get("payload")
client_id = module.params.get("client_id", '')
qos = int(module.params.get("qos", 0))
retain = module.params.get("retain")
username = module.params.get("username", None)
password = module.params.get("password", None)
if client_id is None:
client_id = "%s_%s" % (socket.getfqdn(), os.getpid())
if payload and payload == 'None':
payload = None
auth=None
if username is not None:
auth = { 'username' : username, 'password' : password }
try:
rc = mqtt.single(topic, payload,
qos=qos,
retain=retain,
client_id=client_id,
hostname=server,
port=port,
auth=auth)
except Exception, e:
module.fail_json(msg="unable to publish to MQTT broker %s" % (e))
module.exit_json(changed=False, topic=topic)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
rfguri/vimfiles | bundle/ycm/third_party/ycmd/third_party/bottle/setup.py | 32 | 1578 | #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.version_info < (2,5):
raise NotImplementedError("Sorry, you need at least Python 2.5 or Python 3.x to use bottle.")
import bottle
setup(name='bottle',
version=bottle.__version__,
description='Fast and simple WSGI-framework for small web-applications.',
long_description=bottle.__doc__,
author=bottle.__author__,
author_email='marc@gsites.de',
url='http://bottlepy.org/',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
| mit |
sushramesh/lwc | lib/python2.7/site-packages/setuptools/depends.py | 462 | 6370 | import sys
import imp
import marshal
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from distutils.version import StrictVersion
from setuptools import compat
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self, name, requested_version, module, homepage='',
attribute=None, format=None):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name,self.requested_version)
return self.name
def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f,p,i = find_module(self.module,paths)
if f: f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module, self.attribute, default, paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self, paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def _iter_code(code):
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
from array import array
from dis import HAVE_ARGUMENT, EXTENDED_ARG
bytes = array('b',code.co_code)
eof = len(code.co_code)
ptr = 0
extended_arg = 0
while ptr<eof:
op = bytes[ptr]
if op>=HAVE_ARGUMENT:
arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
ptr += 3
if op==EXTENDED_ARG:
extended_arg = arg * compat.long_type(65536)
continue
else:
arg = None
ptr += 1
yield op,arg
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
if kind==PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts,module))
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix, mode, kind) = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind==PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind==PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind==PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module, f, path, (suffix, mode, kind))
return getattr(sys.modules[module], symbol, None)
finally:
if f:
f.close()
return extract_constant(code, symbol, default)
def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for op, arg in _iter_code(code):
if op==LOAD_CONST:
const = code.co_consts[arg]
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
return const
else:
const = default
def _update_globals():
"""
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
"""
if not sys.platform.startswith('java') and sys.platform != 'cli':
return
incompatible = 'extract_constant', 'get_module_constant'
for name in incompatible:
del globals()[name]
__all__.remove(name)
_update_globals()
| mit |
mpdehaan/ansible | plugins/inventory/jail.py | 132 | 1288 | #!/usr/bin/env python
# (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen,PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['jls', '-q', 'name'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = [x[:-1] for x in pipe.stdout.readlines()]
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'jail'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print json.dumps(result)
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print json.dumps({'ansible_connection': 'jail'})
else:
print "Need an argument, either --list or --host <host>"
| gpl-3.0 |
crazy-cat/incubator-mxnet | python/mxnet/contrib/tensorboard.py | 34 | 3192 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
"""TensorBoard functions that can be used to log various status during epoch."""
from __future__ import absolute_import
import logging
class LogMetricsCallback(object):
"""Log metrics periodically in TensorBoard.
This callback works almost same as `callback.Speedometer`, but write TensorBoard event file
for visualization. For more usage, please refer https://github.com/dmlc/tensorboard
Parameters
----------
logging_dir : str
TensorBoard event file directory.
After that, use `tensorboard --logdir=path/to/logs` to launch TensorBoard visualization.
prefix : str
Prefix for a metric name of `scalar` value.
You might want to use this param to leverage TensorBoard plot feature,
where TensorBoard plots different curves in one graph when they have same `name`.
The follow example shows the usage(how to compare a train and eval metric in a same graph).
Examples
--------
>>> # log train and eval metrics under different directories.
>>> training_log = 'logs/train'
>>> evaluation_log = 'logs/eval'
>>> # in this case, each training and evaluation metric pairs has same name,
>>> # you can add a prefix to make it separate.
>>> batch_end_callbacks = [mx.contrib.tensorboard.LogMetricsCallback(training_log)]
>>> eval_end_callbacks = [mx.contrib.tensorboard.LogMetricsCallback(evaluation_log)]
>>> # run
>>> model.fit(train,
>>> ...
>>> batch_end_callback = batch_end_callbacks,
>>> eval_end_callback = eval_end_callbacks)
>>> # Then use `tensorboard --logdir=logs/` to launch TensorBoard visualization.
"""
def __init__(self, logging_dir, prefix=None):
self.prefix = prefix
try:
from tensorboard import SummaryWriter
self.summary_writer = SummaryWriter(logging_dir)
except ImportError:
logging.error('You can install tensorboard via `pip install tensorboard`.')
def __call__(self, param):
"""Callback to log training speed and metrics in TensorBoard."""
if param.eval_metric is None:
return
name_value = param.eval_metric.get_name_value()
for name, value in name_value:
if self.prefix is not None:
name = '%s-%s' % (self.prefix, name)
self.summary_writer.add_scalar(name, value)
| apache-2.0 |
RockySteveJobs/python-for-android | python3-alpha/python3-src/Lib/msilib/schema.py | 48 | 81587 | from . import Table
_Validation = Table('_Validation')
_Validation.add_field(1,'Table',11552)
_Validation.add_field(2,'Column',11552)
_Validation.add_field(3,'Nullable',3332)
_Validation.add_field(4,'MinValue',4356)
_Validation.add_field(5,'MaxValue',4356)
_Validation.add_field(6,'KeyTable',7679)
_Validation.add_field(7,'KeyColumn',5378)
_Validation.add_field(8,'Category',7456)
_Validation.add_field(9,'Set',7679)
_Validation.add_field(10,'Description',7679)
ActionText = Table('ActionText')
ActionText.add_field(1,'Action',11592)
ActionText.add_field(2,'Description',7936)
ActionText.add_field(3,'Template',7936)
AdminExecuteSequence = Table('AdminExecuteSequence')
AdminExecuteSequence.add_field(1,'Action',11592)
AdminExecuteSequence.add_field(2,'Condition',7679)
AdminExecuteSequence.add_field(3,'Sequence',5378)
Condition = Table('Condition')
Condition.add_field(1,'Feature_',11558)
Condition.add_field(2,'Level',9474)
Condition.add_field(3,'Condition',7679)
AdminUISequence = Table('AdminUISequence')
AdminUISequence.add_field(1,'Action',11592)
AdminUISequence.add_field(2,'Condition',7679)
AdminUISequence.add_field(3,'Sequence',5378)
AdvtExecuteSequence = Table('AdvtExecuteSequence')
AdvtExecuteSequence.add_field(1,'Action',11592)
AdvtExecuteSequence.add_field(2,'Condition',7679)
AdvtExecuteSequence.add_field(3,'Sequence',5378)
AdvtUISequence = Table('AdvtUISequence')
AdvtUISequence.add_field(1,'Action',11592)
AdvtUISequence.add_field(2,'Condition',7679)
AdvtUISequence.add_field(3,'Sequence',5378)
AppId = Table('AppId')
AppId.add_field(1,'AppId',11558)
AppId.add_field(2,'RemoteServerName',7679)
AppId.add_field(3,'LocalService',7679)
AppId.add_field(4,'ServiceParameters',7679)
AppId.add_field(5,'DllSurrogate',7679)
AppId.add_field(6,'ActivateAtStorage',5378)
AppId.add_field(7,'RunAsInteractiveUser',5378)
AppSearch = Table('AppSearch')
AppSearch.add_field(1,'Property',11592)
AppSearch.add_field(2,'Signature_',11592)
Property = Table('Property')
Property.add_field(1,'Property',11592)
Property.add_field(2,'Value',3840)
BBControl = Table('BBControl')
BBControl.add_field(1,'Billboard_',11570)
BBControl.add_field(2,'BBControl',11570)
BBControl.add_field(3,'Type',3378)
BBControl.add_field(4,'X',1282)
BBControl.add_field(5,'Y',1282)
BBControl.add_field(6,'Width',1282)
BBControl.add_field(7,'Height',1282)
BBControl.add_field(8,'Attributes',4356)
BBControl.add_field(9,'Text',7986)
Billboard = Table('Billboard')
Billboard.add_field(1,'Billboard',11570)
Billboard.add_field(2,'Feature_',3366)
Billboard.add_field(3,'Action',7474)
Billboard.add_field(4,'Ordering',5378)
Feature = Table('Feature')
Feature.add_field(1,'Feature',11558)
Feature.add_field(2,'Feature_Parent',7462)
Feature.add_field(3,'Title',8000)
Feature.add_field(4,'Description',8191)
Feature.add_field(5,'Display',5378)
Feature.add_field(6,'Level',1282)
Feature.add_field(7,'Directory_',7496)
Feature.add_field(8,'Attributes',1282)
Binary = Table('Binary')
Binary.add_field(1,'Name',11592)
Binary.add_field(2,'Data',2304)
BindImage = Table('BindImage')
BindImage.add_field(1,'File_',11592)
BindImage.add_field(2,'Path',7679)
File = Table('File')
File.add_field(1,'File',11592)
File.add_field(2,'Component_',3400)
File.add_field(3,'FileName',4095)
File.add_field(4,'FileSize',260)
File.add_field(5,'Version',7496)
File.add_field(6,'Language',7444)
File.add_field(7,'Attributes',5378)
File.add_field(8,'Sequence',1282)
CCPSearch = Table('CCPSearch')
CCPSearch.add_field(1,'Signature_',11592)
CheckBox = Table('CheckBox')
CheckBox.add_field(1,'Property',11592)
CheckBox.add_field(2,'Value',7488)
Class = Table('Class')
Class.add_field(1,'CLSID',11558)
Class.add_field(2,'Context',11552)
Class.add_field(3,'Component_',11592)
Class.add_field(4,'ProgId_Default',7679)
Class.add_field(5,'Description',8191)
Class.add_field(6,'AppId_',7462)
Class.add_field(7,'FileTypeMask',7679)
Class.add_field(8,'Icon_',7496)
Class.add_field(9,'IconIndex',5378)
Class.add_field(10,'DefInprocHandler',7456)
Class.add_field(11,'Argument',7679)
Class.add_field(12,'Feature_',3366)
Class.add_field(13,'Attributes',5378)
Component = Table('Component')
Component.add_field(1,'Component',11592)
Component.add_field(2,'ComponentId',7462)
Component.add_field(3,'Directory_',3400)
Component.add_field(4,'Attributes',1282)
Component.add_field(5,'Condition',7679)
Component.add_field(6,'KeyPath',7496)
Icon = Table('Icon')
Icon.add_field(1,'Name',11592)
Icon.add_field(2,'Data',2304)
ProgId = Table('ProgId')
ProgId.add_field(1,'ProgId',11775)
ProgId.add_field(2,'ProgId_Parent',7679)
ProgId.add_field(3,'Class_',7462)
ProgId.add_field(4,'Description',8191)
ProgId.add_field(5,'Icon_',7496)
ProgId.add_field(6,'IconIndex',5378)
ComboBox = Table('ComboBox')
ComboBox.add_field(1,'Property',11592)
ComboBox.add_field(2,'Order',9474)
ComboBox.add_field(3,'Value',3392)
ComboBox.add_field(4,'Text',8000)
CompLocator = Table('CompLocator')
CompLocator.add_field(1,'Signature_',11592)
CompLocator.add_field(2,'ComponentId',3366)
CompLocator.add_field(3,'Type',5378)
Complus = Table('Complus')
Complus.add_field(1,'Component_',11592)
Complus.add_field(2,'ExpType',13570)
Directory = Table('Directory')
Directory.add_field(1,'Directory',11592)
Directory.add_field(2,'Directory_Parent',7496)
Directory.add_field(3,'DefaultDir',4095)
Control = Table('Control')
Control.add_field(1,'Dialog_',11592)
Control.add_field(2,'Control',11570)
Control.add_field(3,'Type',3348)
Control.add_field(4,'X',1282)
Control.add_field(5,'Y',1282)
Control.add_field(6,'Width',1282)
Control.add_field(7,'Height',1282)
Control.add_field(8,'Attributes',4356)
Control.add_field(9,'Property',7474)
Control.add_field(10,'Text',7936)
Control.add_field(11,'Control_Next',7474)
Control.add_field(12,'Help',7986)
Dialog = Table('Dialog')
Dialog.add_field(1,'Dialog',11592)
Dialog.add_field(2,'HCentering',1282)
Dialog.add_field(3,'VCentering',1282)
Dialog.add_field(4,'Width',1282)
Dialog.add_field(5,'Height',1282)
Dialog.add_field(6,'Attributes',4356)
Dialog.add_field(7,'Title',8064)
Dialog.add_field(8,'Control_First',3378)
Dialog.add_field(9,'Control_Default',7474)
Dialog.add_field(10,'Control_Cancel',7474)
ControlCondition = Table('ControlCondition')
ControlCondition.add_field(1,'Dialog_',11592)
ControlCondition.add_field(2,'Control_',11570)
ControlCondition.add_field(3,'Action',11570)
ControlCondition.add_field(4,'Condition',11775)
ControlEvent = Table('ControlEvent')
ControlEvent.add_field(1,'Dialog_',11592)
ControlEvent.add_field(2,'Control_',11570)
ControlEvent.add_field(3,'Event',11570)
ControlEvent.add_field(4,'Argument',11775)
ControlEvent.add_field(5,'Condition',15871)
ControlEvent.add_field(6,'Ordering',5378)
CreateFolder = Table('CreateFolder')
CreateFolder.add_field(1,'Directory_',11592)
CreateFolder.add_field(2,'Component_',11592)
CustomAction = Table('CustomAction')
CustomAction.add_field(1,'Action',11592)
CustomAction.add_field(2,'Type',1282)
CustomAction.add_field(3,'Source',7496)
CustomAction.add_field(4,'Target',7679)
DrLocator = Table('DrLocator')
DrLocator.add_field(1,'Signature_',11592)
DrLocator.add_field(2,'Parent',15688)
DrLocator.add_field(3,'Path',15871)
DrLocator.add_field(4,'Depth',5378)
DuplicateFile = Table('DuplicateFile')
DuplicateFile.add_field(1,'FileKey',11592)
DuplicateFile.add_field(2,'Component_',3400)
DuplicateFile.add_field(3,'File_',3400)
DuplicateFile.add_field(4,'DestName',8191)
DuplicateFile.add_field(5,'DestFolder',7496)
Environment = Table('Environment')
Environment.add_field(1,'Environment',11592)
Environment.add_field(2,'Name',4095)
Environment.add_field(3,'Value',8191)
Environment.add_field(4,'Component_',3400)
Error = Table('Error')
Error.add_field(1,'Error',9474)
Error.add_field(2,'Message',7936)
EventMapping = Table('EventMapping')
EventMapping.add_field(1,'Dialog_',11592)
EventMapping.add_field(2,'Control_',11570)
EventMapping.add_field(3,'Event',11570)
EventMapping.add_field(4,'Attribute',3378)
Extension = Table('Extension')
Extension.add_field(1,'Extension',11775)
Extension.add_field(2,'Component_',11592)
Extension.add_field(3,'ProgId_',7679)
Extension.add_field(4,'MIME_',7488)
Extension.add_field(5,'Feature_',3366)
MIME = Table('MIME')
MIME.add_field(1,'ContentType',11584)
MIME.add_field(2,'Extension_',3583)
MIME.add_field(3,'CLSID',7462)
FeatureComponents = Table('FeatureComponents')
FeatureComponents.add_field(1,'Feature_',11558)
FeatureComponents.add_field(2,'Component_',11592)
FileSFPCatalog = Table('FileSFPCatalog')
FileSFPCatalog.add_field(1,'File_',11592)
FileSFPCatalog.add_field(2,'SFPCatalog_',11775)
SFPCatalog = Table('SFPCatalog')
SFPCatalog.add_field(1,'SFPCatalog',11775)
SFPCatalog.add_field(2,'Catalog',2304)
SFPCatalog.add_field(3,'Dependency',7424)
Font = Table('Font')
Font.add_field(1,'File_',11592)
Font.add_field(2,'FontTitle',7552)
IniFile = Table('IniFile')
IniFile.add_field(1,'IniFile',11592)
IniFile.add_field(2,'FileName',4095)
IniFile.add_field(3,'DirProperty',7496)
IniFile.add_field(4,'Section',3936)
IniFile.add_field(5,'Key',3968)
IniFile.add_field(6,'Value',4095)
IniFile.add_field(7,'Action',1282)
IniFile.add_field(8,'Component_',3400)
IniLocator = Table('IniLocator')
IniLocator.add_field(1,'Signature_',11592)
IniLocator.add_field(2,'FileName',3583)
IniLocator.add_field(3,'Section',3424)
IniLocator.add_field(4,'Key',3456)
IniLocator.add_field(5,'Field',5378)
IniLocator.add_field(6,'Type',5378)
InstallExecuteSequence = Table('InstallExecuteSequence')
InstallExecuteSequence.add_field(1,'Action',11592)
InstallExecuteSequence.add_field(2,'Condition',7679)
InstallExecuteSequence.add_field(3,'Sequence',5378)
InstallUISequence = Table('InstallUISequence')
InstallUISequence.add_field(1,'Action',11592)
InstallUISequence.add_field(2,'Condition',7679)
InstallUISequence.add_field(3,'Sequence',5378)
IsolatedComponent = Table('IsolatedComponent')
IsolatedComponent.add_field(1,'Component_Shared',11592)
IsolatedComponent.add_field(2,'Component_Application',11592)
LaunchCondition = Table('LaunchCondition')
LaunchCondition.add_field(1,'Condition',11775)
LaunchCondition.add_field(2,'Description',4095)
ListBox = Table('ListBox')
ListBox.add_field(1,'Property',11592)
ListBox.add_field(2,'Order',9474)
ListBox.add_field(3,'Value',3392)
ListBox.add_field(4,'Text',8000)
ListView = Table('ListView')
ListView.add_field(1,'Property',11592)
ListView.add_field(2,'Order',9474)
ListView.add_field(3,'Value',3392)
ListView.add_field(4,'Text',8000)
ListView.add_field(5,'Binary_',7496)
LockPermissions = Table('LockPermissions')
LockPermissions.add_field(1,'LockObject',11592)
LockPermissions.add_field(2,'Table',11552)
LockPermissions.add_field(3,'Domain',15871)
LockPermissions.add_field(4,'User',11775)
LockPermissions.add_field(5,'Permission',4356)
Media = Table('Media')
Media.add_field(1,'DiskId',9474)
Media.add_field(2,'LastSequence',1282)
Media.add_field(3,'DiskPrompt',8000)
Media.add_field(4,'Cabinet',7679)
Media.add_field(5,'VolumeLabel',7456)
Media.add_field(6,'Source',7496)
MoveFile = Table('MoveFile')
MoveFile.add_field(1,'FileKey',11592)
MoveFile.add_field(2,'Component_',3400)
MoveFile.add_field(3,'SourceName',8191)
MoveFile.add_field(4,'DestName',8191)
MoveFile.add_field(5,'SourceFolder',7496)
MoveFile.add_field(6,'DestFolder',3400)
MoveFile.add_field(7,'Options',1282)
MsiAssembly = Table('MsiAssembly')
MsiAssembly.add_field(1,'Component_',11592)
MsiAssembly.add_field(2,'Feature_',3366)
MsiAssembly.add_field(3,'File_Manifest',7496)
MsiAssembly.add_field(4,'File_Application',7496)
MsiAssembly.add_field(5,'Attributes',5378)
MsiAssemblyName = Table('MsiAssemblyName')
MsiAssemblyName.add_field(1,'Component_',11592)
MsiAssemblyName.add_field(2,'Name',11775)
MsiAssemblyName.add_field(3,'Value',3583)
MsiDigitalCertificate = Table('MsiDigitalCertificate')
MsiDigitalCertificate.add_field(1,'DigitalCertificate',11592)
MsiDigitalCertificate.add_field(2,'CertData',2304)
MsiDigitalSignature = Table('MsiDigitalSignature')
MsiDigitalSignature.add_field(1,'Table',11552)
MsiDigitalSignature.add_field(2,'SignObject',11592)
MsiDigitalSignature.add_field(3,'DigitalCertificate_',3400)
MsiDigitalSignature.add_field(4,'Hash',6400)
MsiFileHash = Table('MsiFileHash')
MsiFileHash.add_field(1,'File_',11592)
MsiFileHash.add_field(2,'Options',1282)
MsiFileHash.add_field(3,'HashPart1',260)
MsiFileHash.add_field(4,'HashPart2',260)
MsiFileHash.add_field(5,'HashPart3',260)
MsiFileHash.add_field(6,'HashPart4',260)
MsiPatchHeaders = Table('MsiPatchHeaders')
MsiPatchHeaders.add_field(1,'StreamRef',11558)
MsiPatchHeaders.add_field(2,'Header',2304)
ODBCAttribute = Table('ODBCAttribute')
ODBCAttribute.add_field(1,'Driver_',11592)
ODBCAttribute.add_field(2,'Attribute',11560)
ODBCAttribute.add_field(3,'Value',8191)
ODBCDriver = Table('ODBCDriver')
ODBCDriver.add_field(1,'Driver',11592)
ODBCDriver.add_field(2,'Component_',3400)
ODBCDriver.add_field(3,'Description',3583)
ODBCDriver.add_field(4,'File_',3400)
ODBCDriver.add_field(5,'File_Setup',7496)
ODBCDataSource = Table('ODBCDataSource')
ODBCDataSource.add_field(1,'DataSource',11592)
ODBCDataSource.add_field(2,'Component_',3400)
ODBCDataSource.add_field(3,'Description',3583)
ODBCDataSource.add_field(4,'DriverDescription',3583)
ODBCDataSource.add_field(5,'Registration',1282)
ODBCSourceAttribute = Table('ODBCSourceAttribute')
ODBCSourceAttribute.add_field(1,'DataSource_',11592)
ODBCSourceAttribute.add_field(2,'Attribute',11552)
ODBCSourceAttribute.add_field(3,'Value',8191)
ODBCTranslator = Table('ODBCTranslator')
ODBCTranslator.add_field(1,'Translator',11592)
ODBCTranslator.add_field(2,'Component_',3400)
ODBCTranslator.add_field(3,'Description',3583)
ODBCTranslator.add_field(4,'File_',3400)
ODBCTranslator.add_field(5,'File_Setup',7496)
Patch = Table('Patch')
Patch.add_field(1,'File_',11592)
Patch.add_field(2,'Sequence',9474)
Patch.add_field(3,'PatchSize',260)
Patch.add_field(4,'Attributes',1282)
Patch.add_field(5,'Header',6400)
Patch.add_field(6,'StreamRef_',7462)
PatchPackage = Table('PatchPackage')
PatchPackage.add_field(1,'PatchId',11558)
PatchPackage.add_field(2,'Media_',1282)
PublishComponent = Table('PublishComponent')
PublishComponent.add_field(1,'ComponentId',11558)
PublishComponent.add_field(2,'Qualifier',11775)
PublishComponent.add_field(3,'Component_',11592)
PublishComponent.add_field(4,'AppData',8191)
PublishComponent.add_field(5,'Feature_',3366)
RadioButton = Table('RadioButton')
RadioButton.add_field(1,'Property',11592)
RadioButton.add_field(2,'Order',9474)
RadioButton.add_field(3,'Value',3392)
RadioButton.add_field(4,'X',1282)
RadioButton.add_field(5,'Y',1282)
RadioButton.add_field(6,'Width',1282)
RadioButton.add_field(7,'Height',1282)
RadioButton.add_field(8,'Text',8000)
RadioButton.add_field(9,'Help',7986)
Registry = Table('Registry')
Registry.add_field(1,'Registry',11592)
Registry.add_field(2,'Root',1282)
Registry.add_field(3,'Key',4095)
Registry.add_field(4,'Name',8191)
Registry.add_field(5,'Value',7936)
Registry.add_field(6,'Component_',3400)
RegLocator = Table('RegLocator')
RegLocator.add_field(1,'Signature_',11592)
RegLocator.add_field(2,'Root',1282)
RegLocator.add_field(3,'Key',3583)
RegLocator.add_field(4,'Name',7679)
RegLocator.add_field(5,'Type',5378)
RemoveFile = Table('RemoveFile')
RemoveFile.add_field(1,'FileKey',11592)
RemoveFile.add_field(2,'Component_',3400)
RemoveFile.add_field(3,'FileName',8191)
RemoveFile.add_field(4,'DirProperty',3400)
RemoveFile.add_field(5,'InstallMode',1282)
RemoveIniFile = Table('RemoveIniFile')
RemoveIniFile.add_field(1,'RemoveIniFile',11592)
RemoveIniFile.add_field(2,'FileName',4095)
RemoveIniFile.add_field(3,'DirProperty',7496)
RemoveIniFile.add_field(4,'Section',3936)
RemoveIniFile.add_field(5,'Key',3968)
RemoveIniFile.add_field(6,'Value',8191)
RemoveIniFile.add_field(7,'Action',1282)
RemoveIniFile.add_field(8,'Component_',3400)
RemoveRegistry = Table('RemoveRegistry')
RemoveRegistry.add_field(1,'RemoveRegistry',11592)
RemoveRegistry.add_field(2,'Root',1282)
RemoveRegistry.add_field(3,'Key',4095)
RemoveRegistry.add_field(4,'Name',8191)
RemoveRegistry.add_field(5,'Component_',3400)
ReserveCost = Table('ReserveCost')
ReserveCost.add_field(1,'ReserveKey',11592)
ReserveCost.add_field(2,'Component_',3400)
ReserveCost.add_field(3,'ReserveFolder',7496)
ReserveCost.add_field(4,'ReserveLocal',260)
ReserveCost.add_field(5,'ReserveSource',260)
SelfReg = Table('SelfReg')
SelfReg.add_field(1,'File_',11592)
SelfReg.add_field(2,'Cost',5378)
ServiceControl = Table('ServiceControl')
ServiceControl.add_field(1,'ServiceControl',11592)
ServiceControl.add_field(2,'Name',4095)
ServiceControl.add_field(3,'Event',1282)
ServiceControl.add_field(4,'Arguments',8191)
ServiceControl.add_field(5,'Wait',5378)
ServiceControl.add_field(6,'Component_',3400)
ServiceInstall = Table('ServiceInstall')
ServiceInstall.add_field(1,'ServiceInstall',11592)
ServiceInstall.add_field(2,'Name',3583)
ServiceInstall.add_field(3,'DisplayName',8191)
ServiceInstall.add_field(4,'ServiceType',260)
ServiceInstall.add_field(5,'StartType',260)
ServiceInstall.add_field(6,'ErrorControl',260)
ServiceInstall.add_field(7,'LoadOrderGroup',7679)
ServiceInstall.add_field(8,'Dependencies',7679)
ServiceInstall.add_field(9,'StartName',7679)
ServiceInstall.add_field(10,'Password',7679)
ServiceInstall.add_field(11,'Arguments',7679)
ServiceInstall.add_field(12,'Component_',3400)
ServiceInstall.add_field(13,'Description',8191)
Shortcut = Table('Shortcut')
Shortcut.add_field(1,'Shortcut',11592)
Shortcut.add_field(2,'Directory_',3400)
Shortcut.add_field(3,'Name',3968)
Shortcut.add_field(4,'Component_',3400)
Shortcut.add_field(5,'Target',3400)
Shortcut.add_field(6,'Arguments',7679)
Shortcut.add_field(7,'Description',8191)
Shortcut.add_field(8,'Hotkey',5378)
Shortcut.add_field(9,'Icon_',7496)
Shortcut.add_field(10,'IconIndex',5378)
Shortcut.add_field(11,'ShowCmd',5378)
Shortcut.add_field(12,'WkDir',7496)
Signature = Table('Signature')
Signature.add_field(1,'Signature',11592)
Signature.add_field(2,'FileName',3583)
Signature.add_field(3,'MinVersion',7444)
Signature.add_field(4,'MaxVersion',7444)
Signature.add_field(5,'MinSize',4356)
Signature.add_field(6,'MaxSize',4356)
Signature.add_field(7,'MinDate',4356)
Signature.add_field(8,'MaxDate',4356)
Signature.add_field(9,'Languages',7679)
TextStyle = Table('TextStyle')
TextStyle.add_field(1,'TextStyle',11592)
TextStyle.add_field(2,'FaceName',3360)
TextStyle.add_field(3,'Size',1282)
TextStyle.add_field(4,'Color',4356)
TextStyle.add_field(5,'StyleBits',5378)
TypeLib = Table('TypeLib')
TypeLib.add_field(1,'LibID',11558)
TypeLib.add_field(2,'Language',9474)
TypeLib.add_field(3,'Component_',11592)
TypeLib.add_field(4,'Version',4356)
TypeLib.add_field(5,'Description',8064)
TypeLib.add_field(6,'Directory_',7496)
TypeLib.add_field(7,'Feature_',3366)
TypeLib.add_field(8,'Cost',4356)
UIText = Table('UIText')
UIText.add_field(1,'Key',11592)
UIText.add_field(2,'Text',8191)
Upgrade = Table('Upgrade')
Upgrade.add_field(1,'UpgradeCode',11558)
Upgrade.add_field(2,'VersionMin',15636)
Upgrade.add_field(3,'VersionMax',15636)
Upgrade.add_field(4,'Language',15871)
Upgrade.add_field(5,'Attributes',8452)
Upgrade.add_field(6,'Remove',7679)
Upgrade.add_field(7,'ActionProperty',3400)
Verb = Table('Verb')
Verb.add_field(1,'Extension_',11775)
Verb.add_field(2,'Verb',11552)
Verb.add_field(3,'Sequence',5378)
Verb.add_field(4,'Command',8191)
Verb.add_field(5,'Argument',8191)
tables=[_Validation, ActionText, AdminExecuteSequence, Condition, AdminUISequence, AdvtExecuteSequence, AdvtUISequence, AppId, AppSearch, Property, BBControl, Billboard, Feature, Binary, BindImage, File, CCPSearch, CheckBox, Class, Component, Icon, ProgId, ComboBox, CompLocator, Complus, Directory, Control, Dialog, ControlCondition, ControlEvent, CreateFolder, CustomAction, DrLocator, DuplicateFile, Environment, Error, EventMapping, Extension, MIME, FeatureComponents, FileSFPCatalog, SFPCatalog, Font, IniFile, IniLocator, InstallExecuteSequence, InstallUISequence, IsolatedComponent, LaunchCondition, ListBox, ListView, LockPermissions, Media, MoveFile, MsiAssembly, MsiAssemblyName, MsiDigitalCertificate, MsiDigitalSignature, MsiFileHash, MsiPatchHeaders, ODBCAttribute, ODBCDriver, ODBCDataSource, ODBCSourceAttribute, ODBCTranslator, Patch, PatchPackage, PublishComponent, RadioButton, Registry, RegLocator, RemoveFile, RemoveIniFile, RemoveRegistry, ReserveCost, SelfReg, ServiceControl, ServiceInstall, Shortcut, Signature, TextStyle, TypeLib, UIText, Upgrade, Verb]
_Validation_records = [
('_Validation','Table','N',None, None, None, None, 'Identifier',None, 'Name of table',),
('_Validation','Column','N',None, None, None, None, 'Identifier',None, 'Name of column',),
('_Validation','Description','Y',None, None, None, None, 'Text',None, 'Description of column',),
('_Validation','Set','Y',None, None, None, None, 'Text',None, 'Set of values that are permitted',),
('_Validation','Category','Y',None, None, None, None, None, 'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL','String category',),
('_Validation','KeyColumn','Y',1,32,None, None, None, None, 'Column to which foreign key connects',),
('_Validation','KeyTable','Y',None, None, None, None, 'Identifier',None, 'For foreign key, Name of table to which data must link',),
('_Validation','MaxValue','Y',-2147483647,2147483647,None, None, None, None, 'Maximum value allowed',),
('_Validation','MinValue','Y',-2147483647,2147483647,None, None, None, None, 'Minimum value allowed',),
('_Validation','Nullable','N',None, None, None, None, None, 'Y;N;@','Whether the column is nullable',),
('ActionText','Description','Y',None, None, None, None, 'Text',None, 'Localized description displayed in progress dialog and log when action is executing.',),
('ActionText','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to be described.',),
('ActionText','Template','Y',None, None, None, None, 'Template',None, 'Optional localized format template used to format action data records for display during action execution.',),
('AdminExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('Condition','Condition','Y',None, None, None, None, 'Condition',None, 'Expression evaluated to determine if Level in the Feature table is to change.',),
('Condition','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Reference to a Feature entry in Feature table.',),
('Condition','Level','N',0,32767,None, None, None, None, 'New selection Level to set in Feature table if Condition evaluates to TRUE.',),
('AdminUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AppId','AppId','N',None, None, None, None, 'Guid',None, None, ),
('AppId','ActivateAtStorage','Y',0,1,None, None, None, None, None, ),
('AppId','DllSurrogate','Y',None, None, None, None, 'Text',None, None, ),
('AppId','LocalService','Y',None, None, None, None, 'Text',None, None, ),
('AppId','RemoteServerName','Y',None, None, None, None, 'Formatted',None, None, ),
('AppId','RunAsInteractiveUser','Y',0,1,None, None, None, None, None, ),
('AppId','ServiceParameters','Y',None, None, None, None, 'Text',None, None, ),
('AppSearch','Property','N',None, None, None, None, 'Identifier',None, 'The property associated with a Signature',),
('AppSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('Property','Property','N',None, None, None, None, 'Identifier',None, 'Name of property, uppercase if settable by launcher or loader.',),
('Property','Value','N',None, None, None, None, 'Text',None, 'String value for property. Never null or empty.',),
('BBControl','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('BBControl','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('BBControl','Text','Y',None, None, None, None, 'Text',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('BBControl','BBControl','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.',),
('BBControl','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('BBControl','Billboard_','N',None, None, 'Billboard',1,'Identifier',None, 'External key to the Billboard table, name of the billboard.',),
('BBControl','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('BBControl','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('BBControl','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Billboard','Action','Y',None, None, None, None, 'Identifier',None, 'The name of an action. The billboard is displayed during the progress messages received from this action.',),
('Billboard','Billboard','N',None, None, None, None, 'Identifier',None, 'Name of the billboard.',),
('Billboard','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'An external key to the Feature Table. The billboard is shown only if this feature is being installed.',),
('Billboard','Ordering','Y',0,32767,None, None, None, None, 'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.',),
('Feature','Description','Y',None, None, None, None, 'Text',None, 'Longer descriptive text describing a visible feature item.',),
('Feature','Attributes','N',None, None, None, None, None, '0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54','Feature attributes',),
('Feature','Feature','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular feature record.',),
('Feature','Directory_','Y',None, None, 'Directory',1,'UpperCase',None, 'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.',),
('Feature','Level','N',0,32767,None, None, None, None, 'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.',),
('Feature','Title','Y',None, None, None, None, 'Text',None, 'Short text identifying a visible feature item.',),
('Feature','Display','Y',0,32767,None, None, None, None, 'Numeric sort order, used to force a specific display ordering.',),
('Feature','Feature_Parent','Y',None, None, 'Feature',1,'Identifier',None, 'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.',),
('Binary','Name','N',None, None, None, None, 'Identifier',None, 'Unique key identifying the binary data.',),
('Binary','Data','N',None, None, None, None, 'Binary',None, 'The unformatted binary data.',),
('BindImage','File_','N',None, None, 'File',1,'Identifier',None, 'The index into the File table. This must be an executable file.',),
('BindImage','Path','Y',None, None, None, None, 'Paths',None, 'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .',),
('File','Sequence','N',1,32767,None, None, None, None, 'Sequence with respect to the media images; order must track cabinet order.',),
('File','Attributes','Y',0,32767,None, None, None, None, 'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)',),
('File','File','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.',),
('File','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file.',),
('File','FileName','N',None, None, None, None, 'Filename',None, 'File name used for installation, may be localized. This may contain a "short name|long name" pair.',),
('File','FileSize','N',0,2147483647,None, None, None, None, 'Size of file in bytes (integer).',),
('File','Language','Y',None, None, None, None, 'Language',None, 'List of decimal language Ids, comma-separated if more than one.',),
('File','Version','Y',None, None, 'File',1,'Version',None, 'Version string for versioned files; Blank for unversioned files.',),
('CCPSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('CheckBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to the item.',),
('CheckBox','Value','Y',None, None, None, None, 'Formatted',None, 'The value string associated with the item.',),
('Class','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Class.',),
('Class','Attributes','Y',None, 32767,None, None, None, None, 'Class registration attributes.',),
('Class','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Class','AppId_','Y',None, None, 'AppId',1,'Guid',None, 'Optional AppID containing DCOM information for associated application (string GUID).',),
('Class','Argument','Y',None, None, None, None, 'Formatted',None, 'optional argument for LocalServers.',),
('Class','CLSID','N',None, None, None, None, 'Guid',None, 'The CLSID of an OLE factory.',),
('Class','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Class','Context','N',None, None, None, None, 'Identifier',None, 'The numeric server context for this server. CLSCTX_xxxx',),
('Class','DefInprocHandler','Y',None, None, None, None, 'Filename','1;2;3','Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"',),
('Class','FileTypeMask','Y',None, None, None, None, 'Text',None, 'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
('Class','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.',),
('Class','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('Class','ProgId_Default','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this CLSID.',),
('Component','Condition','Y',None, None, None, None, 'Condition',None, "A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component.",),
('Component','Attributes','N',None, None, None, None, None, None, 'Remote execution option, one of irsEnum',),
('Component','Component','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular component record.',),
('Component','ComponentId','Y',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Component','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.',),
('Component','KeyPath','Y',None, None, 'File;Registry;ODBCDataSource',1,'Identifier',None, 'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.',),
('Icon','Name','N',None, None, None, None, 'Identifier',None, 'Primary key. Name of the icon file.',),
('Icon','Data','N',None, None, None, None, 'Binary',None, 'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.',),
('ProgId','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Program identifier.',),
('ProgId','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.',),
('ProgId','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('ProgId','ProgId','N',None, None, None, None, 'Text',None, 'The Program Identifier. Primary key.',),
('ProgId','Class_','Y',None, None, 'Class',1,'Guid',None, 'The CLSID of an OLE factory corresponding to the ProgId.',),
('ProgId','ProgId_Parent','Y',None, None, 'ProgId',1,'Text',None, 'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.',),
('ComboBox','Text','Y',None, None, None, None, 'Formatted',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ComboBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.',),
('ComboBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ComboBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.',),
('CompLocator','Type','Y',0,1,None, None, None, None, 'A boolean value that determines if the registry value is a filename or a directory location.',),
('CompLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('CompLocator','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Complus','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the ComPlus component.',),
('Complus','ExpType','Y',0,32767,None, None, None, None, 'ComPlus component attributes.',),
('Directory','Directory','N',None, None, None, None, 'Identifier',None, 'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.',),
('Directory','DefaultDir','N',None, None, None, None, 'DefaultDir',None, "The default sub-path under parent's path.",),
('Directory','Directory_Parent','Y',None, None, 'Directory',1,'Identifier',None, 'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.',),
('Control','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('Control','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Text','Y',None, None, None, None, 'Formatted',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('Control','Property','Y',None, None, None, None, 'Identifier',None, 'The name of a defined property to be linked to this control. ',),
('Control','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('Control','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('Control','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('Control','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Control','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. ',),
('Control','Control_Next','Y',None, None, 'Control',2,'Identifier',None, 'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!',),
('Control','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'External key to the Dialog table, name of the dialog.',),
('Control','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional. ',),
('Dialog','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this dialog.',),
('Dialog','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the dialog.',),
('Dialog','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the dialog.',),
('Dialog','Dialog','N',None, None, None, None, 'Identifier',None, 'Name of the dialog.',),
('Dialog','Control_Cancel','Y',None, None, 'Control',2,'Identifier',None, 'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.',),
('Dialog','Control_Default','Y',None, None, 'Control',2,'Identifier',None, 'Defines the default control. Hitting return is equivalent to pushing this button.',),
('Dialog','Control_First','N',None, None, 'Control',2,'Identifier',None, 'Defines the control that has the focus when the dialog is created.',),
('Dialog','HCentering','N',0,100,None, None, None, None, 'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.',),
('Dialog','Title','Y',None, None, None, None, 'Formatted',None, "A text string specifying the title to be displayed in the title bar of the dialog's window.",),
('Dialog','VCentering','N',0,100,None, None, None, None, 'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.',),
('ControlCondition','Action','N',None, None, None, None, None, 'Default;Disable;Enable;Hide;Show','The desired action to be taken on the specified control.',),
('ControlCondition','Condition','N',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions the action should be triggered.',),
('ControlCondition','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlCondition','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('ControlEvent','Condition','Y',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions an event should be triggered.',),
('ControlEvent','Ordering','Y',0,2147483647,None, None, None, None, 'An integer used to order several events tied to the same control. Can be left blank.',),
('ControlEvent','Argument','N',None, None, None, None, 'Formatted',None, 'A value to be used as a modifier when triggering a particular event.',),
('ControlEvent','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlEvent','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control',),
('ControlEvent','Event','N',None, None, None, None, 'Formatted',None, 'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.',),
('CreateFolder','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('CreateFolder','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Primary key, could be foreign key into the Directory table.',),
('CustomAction','Type','N',1,16383,None, None, None, None, 'The numeric custom action type, consisting of source location, code type, entry, option flags.',),
('CustomAction','Action','N',None, None, None, None, 'Identifier',None, 'Primary key, name of action, normally appears in sequence table unless private use.',),
('CustomAction','Source','Y',None, None, None, None, 'CustomSource',None, 'The table reference of the source of the code.',),
('CustomAction','Target','Y',None, None, None, None, 'Formatted',None, 'Excecution parameter, depends on the type of custom action',),
('DrLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('DrLocator','Path','Y',None, None, None, None, 'AnyPath',None, 'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.',),
('DrLocator','Depth','Y',0,32767,None, None, None, None, 'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.',),
('DrLocator','Parent','Y',None, None, None, None, 'Identifier',None, 'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.',),
('DuplicateFile','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key referencing the source file to be duplicated.',),
('DuplicateFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the duplicate file.',),
('DuplicateFile','DestFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.',),
('DuplicateFile','DestName','Y',None, None, None, None, 'Filename',None, 'Filename to be given to the duplicate file.',),
('DuplicateFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('Environment','Name','N',None, None, None, None, 'Text',None, 'The name of the environmental value.',),
('Environment','Value','Y',None, None, None, None, 'Formatted',None, 'The value to set in the environmental settings.',),
('Environment','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the environmental value.',),
('Environment','Environment','N',None, None, None, None, 'Identifier',None, 'Unique identifier for the environmental variable setting',),
('Error','Error','N',0,32767,None, None, None, None, 'Integer error number, obtained from header file IError(...) macros.',),
('Error','Message','Y',None, None, None, None, 'Template',None, 'Error formatting template, obtained from user ed. or localizers.',),
('EventMapping','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the Dialog.',),
('EventMapping','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('EventMapping','Event','N',None, None, None, None, 'Identifier',None, 'An identifier that specifies the type of the event that the control subscribes to.',),
('EventMapping','Attribute','N',None, None, None, None, 'Identifier',None, 'The name of the control attribute, that is set when this event is received.',),
('Extension','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Extension','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Extension','Extension','N',None, None, None, None, 'Text',None, 'The extension associated with the table row.',),
('Extension','MIME_','Y',None, None, 'MIME',1,'Text',None, 'Optional Context identifier, typically "type/format" associated with the extension',),
('Extension','ProgId_','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this extension.',),
('MIME','CLSID','Y',None, None, None, None, 'Guid',None, 'Optional associated CLSID.',),
('MIME','ContentType','N',None, None, None, None, 'Text',None, 'Primary key. Context identifier, typically "type/format".',),
('MIME','Extension_','N',None, None, 'Extension',1,'Text',None, 'Optional associated extension (without dot)',),
('FeatureComponents','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('FeatureComponents','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('FileSFPCatalog','File_','N',None, None, 'File',1,'Identifier',None, 'File associated with the catalog',),
('FileSFPCatalog','SFPCatalog_','N',None, None, 'SFPCatalog',1,'Filename',None, 'Catalog associated with the file',),
('SFPCatalog','SFPCatalog','N',None, None, None, None, 'Filename',None, 'File name for the catalog.',),
('SFPCatalog','Catalog','N',None, None, None, None, 'Binary',None, 'SFP Catalog',),
('SFPCatalog','Dependency','Y',None, None, None, None, 'Formatted',None, 'Parent catalog - only used by SFP',),
('Font','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing font file.',),
('Font','FontTitle','Y',None, None, None, None, 'Text',None, 'Font name.',),
('IniFile','Action','N',None, None, None, None, None, '0;1;3','The type of modification to be made, one of iifEnum',),
('IniFile','Value','N',None, None, None, None, 'Formatted',None, 'The value to be written.',),
('IniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the .INI value.',),
('IniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to write the information',),
('IniFile','IniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('IniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('IniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('IniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('IniLocator','Type','Y',0,2,None, None, None, None, 'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.',),
('IniLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('IniLocator','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name.',),
('IniLocator','Key','N',None, None, None, None, 'Text',None, 'Key value (followed by an equals sign in INI file).',),
('IniLocator','Section','N',None, None, None, None, 'Text',None, 'Section name within in file (within square brackets in INI file).',),
('IniLocator','Field','Y',0,32767,None, None, None, None, 'The field in the .INI line. If Field is null or 0 the entire line is read.',),
('InstallExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('InstallUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('IsolatedComponent','Component_Application','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item for application',),
('IsolatedComponent','Component_Shared','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item to be isolated',),
('LaunchCondition','Description','N',None, None, None, None, 'Formatted',None, 'Localizable text to display when condition fails and install must abort.',),
('LaunchCondition','Condition','N',None, None, None, None, 'Condition',None, 'Expression which must evaluate to TRUE in order for install to commence.',),
('ListBox','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.',),
('ListBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListView','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listview.',),
('ListView','Value','N',None, None, None, None, 'Identifier',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListView','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Binary_','Y',None, None, 'Binary',1,'Identifier',None, 'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.',),
('LockPermissions','Table','N',None, None, None, None, 'Identifier','Directory;File;Registry','Reference to another table name',),
('LockPermissions','Domain','Y',None, None, None, None, 'Formatted',None, 'Domain name for user whose permissions are being set. (usually a property)',),
('LockPermissions','LockObject','N',None, None, None, None, 'Identifier',None, 'Foreign key into Registry or File table',),
('LockPermissions','Permission','Y',-2147483647,2147483647,None, None, None, None, 'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)',),
('LockPermissions','User','N',None, None, None, None, 'Formatted',None, 'User for permissions to be set. (usually a property)',),
('Media','Source','Y',None, None, None, None, 'Property',None, 'The property defining the location of the cabinet file.',),
('Media','Cabinet','Y',None, None, None, None, 'Cabinet',None, 'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.',),
('Media','DiskId','N',1,32767,None, None, None, None, 'Primary key, integer to determine sort order for table.',),
('Media','DiskPrompt','Y',None, None, None, None, 'Text',None, 'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.',),
('Media','LastSequence','N',0,32767,None, None, None, None, 'File sequence number for the last file for this media.',),
('Media','VolumeLabel','Y',None, None, None, None, 'Text',None, 'The label attributed to the volume.',),
('ModuleComponents','Component','N',None, None, 'Component',1,'Identifier',None, 'Component contained in the module.',),
('ModuleComponents','Language','N',None, None, 'ModuleSignature',2,None, None, 'Default language ID for module (may be changed by transform).',),
('ModuleComponents','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module containing the component.',),
('ModuleSignature','Language','N',None, None, None, None, None, None, 'Default decimal language of module.',),
('ModuleSignature','Version','N',None, None, None, None, 'Version',None, 'Version of the module.',),
('ModuleSignature','ModuleID','N',None, None, None, None, 'Identifier',None, 'Module identifier (String.GUID).',),
('ModuleDependency','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module requiring the dependency.',),
('ModuleDependency','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'Language of module requiring the dependency.',),
('ModuleDependency','RequiredID','N',None, None, None, None, None, None, 'String.GUID of required module.',),
('ModuleDependency','RequiredLanguage','N',None, None, None, None, None, None, 'LanguageID of the required module.',),
('ModuleDependency','RequiredVersion','Y',None, None, None, None, 'Version',None, 'Version of the required version.',),
('ModuleExclusion','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'String.GUID of module with exclusion requirement.',),
('ModuleExclusion','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'LanguageID of module with exclusion requirement.',),
('ModuleExclusion','ExcludedID','N',None, None, None, None, None, None, 'String.GUID of excluded module.',),
('ModuleExclusion','ExcludedLanguage','N',None, None, None, None, None, None, 'Language of excluded module.',),
('ModuleExclusion','ExcludedMaxVersion','Y',None, None, None, None, 'Version',None, 'Maximum version of excluded module.',),
('ModuleExclusion','ExcludedMinVersion','Y',None, None, None, None, 'Version',None, 'Minimum version of excluded module.',),
('MoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry',),
('MoveFile','DestFolder','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('MoveFile','DestName','Y',None, None, None, None, 'Filename',None, 'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file',),
('MoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular MoveFile record',),
('MoveFile','Options','N',0,1,None, None, None, None, 'Integer value specifying the MoveFile operating mode, one of imfoEnum',),
('MoveFile','SourceFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the source directory',),
('MoveFile','SourceName','Y',None, None, None, None, 'Text',None, "Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards.",),
('MsiAssembly','Attributes','Y',None, None, None, None, None, None, 'Assembly attributes',),
('MsiAssembly','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('MsiAssembly','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiAssembly','File_Application','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.',),
('MsiAssembly','File_Manifest','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the manifest file for the assembly.',),
('MsiAssemblyName','Name','N',None, None, None, None, 'Text',None, 'The name part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Value','N',None, None, None, None, 'Text',None, 'The value part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiDigitalCertificate','CertData','N',None, None, None, None, 'Binary',None, 'A certificate context blob for a signer certificate',),
('MsiDigitalCertificate','DigitalCertificate','N',None, None, None, None, 'Identifier',None, 'A unique identifier for the row',),
('MsiDigitalSignature','Table','N',None, None, None, None, None, 'Media','Reference to another table name (only Media table is supported)',),
('MsiDigitalSignature','DigitalCertificate_','N',None, None, 'MsiDigitalCertificate',1,'Identifier',None, 'Foreign key to MsiDigitalCertificate table identifying the signer certificate',),
('MsiDigitalSignature','Hash','Y',None, None, None, None, 'Binary',None, 'The encoded hash blob from the digital signature',),
('MsiDigitalSignature','SignObject','N',None, None, None, None, 'Text',None, 'Foreign key to Media table',),
('MsiFileHash','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing file with this hash',),
('MsiFileHash','Options','N',0,32767,None, None, None, None, 'Various options and attributes for this hash.',),
('MsiFileHash','HashPart1','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart2','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart3','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart4','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiPatchHeaders','StreamRef','N',None, None, None, None, 'Identifier',None, 'Primary key. A unique identifier for the row.',),
('MsiPatchHeaders','Header','N',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('ODBCAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC driver attribute',),
('ODBCAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC driver attribute',),
('ODBCAttribute','Driver_','N',None, None, 'ODBCDriver',1,'Identifier',None, 'Reference to ODBC driver in ODBCDriver table',),
('ODBCDriver','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for driver, non-localized',),
('ODBCDriver','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key driver file',),
('ODBCDriver','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDriver','Driver','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for driver',),
('ODBCDriver','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key driver setup DLL',),
('ODBCDataSource','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for data source',),
('ODBCDataSource','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDataSource','DataSource','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for data source',),
('ODBCDataSource','DriverDescription','N',None, None, None, None, 'Text',None, 'Reference to driver description, may be existing driver',),
('ODBCDataSource','Registration','N',0,1,None, None, None, None, 'Registration option: 0=machine, 1=user, others t.b.d.',),
('ODBCSourceAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC data source attribute',),
('ODBCSourceAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC data source attribute',),
('ODBCSourceAttribute','DataSource_','N',None, None, 'ODBCDataSource',1,'Identifier',None, 'Reference to ODBC data source in ODBCDataSource table',),
('ODBCTranslator','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for translator',),
('ODBCTranslator','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key translator file',),
('ODBCTranslator','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCTranslator','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key translator setup DLL',),
('ODBCTranslator','Translator','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for translator',),
('Patch','Sequence','N',0,32767,None, None, None, None, 'Primary key, sequence with respect to the media images; order must track cabinet order.',),
('Patch','Attributes','N',0,32767,None, None, None, None, 'Integer containing bit flags representing patch attributes',),
('Patch','File_','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.',),
('Patch','Header','Y',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('Patch','PatchSize','N',0,2147483647,None, None, None, None, 'Size of patch in bytes (integer).',),
('Patch','StreamRef_','Y',None, None, None, None, 'Identifier',None, 'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.',),
('PatchPackage','Media_','N',0,32767,None, None, None, None, 'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.',),
('PatchPackage','PatchId','N',None, None, None, None, 'Guid',None, 'A unique string GUID representing this patch.',),
('PublishComponent','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into the Feature table.',),
('PublishComponent','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('PublishComponent','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID that represents the component id that will be requested by the alien product.',),
('PublishComponent','AppData','Y',None, None, None, None, 'Text',None, 'This is localisable Application specific data that can be associated with a Qualified Component.',),
('PublishComponent','Qualifier','N',None, None, None, None, 'Text',None, 'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.',),
('RadioButton','Y','N',0,32767,None, None, None, None, 'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Text','Y',None, None, None, None, 'Text',None, 'The visible title to be assigned to the radio button.',),
('RadioButton','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.',),
('RadioButton','Height','N',0,32767,None, None, None, None, 'The height of the button.',),
('RadioButton','Width','N',0,32767,None, None, None, None, 'The width of the button.',),
('RadioButton','X','N',0,32767,None, None, None, None, 'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this button. Selecting the button will set the associated property to this value.',),
('RadioButton','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('RadioButton','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional.',),
('Registry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('Registry','Value','Y',None, None, None, None, 'Formatted',None, 'The registry value.',),
('Registry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the registry value.',),
('Registry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('Registry','Registry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Registry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RegLocator','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RegLocator','Type','Y',0,18,None, None, None, None, 'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.',),
('RegLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.',),
('RegLocator','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RegLocator','Root','N',0,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RemoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file to be removed.',),
('RemoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('RemoveFile','FileName','Y',None, None, None, None, 'WildCardFilename',None, 'Name of the file to be removed.',),
('RemoveFile','DirProperty','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.',),
('RemoveFile','InstallMode','N',None, None, None, None, None, '1;2;3','Installation option, one of iimEnum.',),
('RemoveIniFile','Action','N',None, None, None, None, None, '2;4','The type of modification to be made, one of iifEnum.',),
('RemoveIniFile','Value','Y',None, None, None, None, 'Formatted',None, 'The value to be deleted. The value is required when Action is iifIniRemoveTag',),
('RemoveIniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the .INI value.',),
('RemoveIniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to delete the information',),
('RemoveIniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('RemoveIniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('RemoveIniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('RemoveIniFile','RemoveIniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('RemoveRegistry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RemoveRegistry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the registry value.',),
('RemoveRegistry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RemoveRegistry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum',),
('RemoveRegistry','RemoveRegistry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ReserveCost','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reserve a specified amount of space if this component is to be installed.',),
('ReserveCost','ReserveFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('ReserveCost','ReserveKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular ReserveCost record',),
('ReserveCost','ReserveLocal','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed locally.',),
('ReserveCost','ReserveSource','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed to run from the source location.',),
('SelfReg','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the module that needs to be registered.',),
('SelfReg','Cost','Y',0,32767,None, None, None, None, 'The cost of registering the module.',),
('ServiceControl','Name','N',None, None, None, None, 'Formatted',None, 'Name of a service. /, \\, comma and space are invalid',),
('ServiceControl','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceControl','Event','N',0,187,None, None, None, None, 'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete',),
('ServiceControl','ServiceControl','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceControl','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments for the service. Separate by [~].',),
('ServiceControl','Wait','Y',0,1,None, None, None, None, 'Boolean for whether to wait for the service to fully start',),
('ServiceInstall','Name','N',None, None, None, None, 'Formatted',None, 'Internal Name of the Service',),
('ServiceInstall','Description','Y',None, None, None, None, 'Text',None, 'Description of service.',),
('ServiceInstall','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceInstall','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments to include in every start of the service, passed to WinMain',),
('ServiceInstall','ServiceInstall','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceInstall','Dependencies','Y',None, None, None, None, 'Formatted',None, 'Other services this depends on to start. Separate by [~], and end with [~][~]',),
('ServiceInstall','DisplayName','Y',None, None, None, None, 'Formatted',None, 'External Name of the Service',),
('ServiceInstall','ErrorControl','N',-2147483647,2147483647,None, None, None, None, 'Severity of error if service fails to start',),
('ServiceInstall','LoadOrderGroup','Y',None, None, None, None, 'Formatted',None, 'LoadOrderGroup',),
('ServiceInstall','Password','Y',None, None, None, None, 'Formatted',None, 'password to run service with. (with StartName)',),
('ServiceInstall','ServiceType','N',-2147483647,2147483647,None, None, None, None, 'Type of the service',),
('ServiceInstall','StartName','Y',None, None, None, None, 'Formatted',None, 'User or object name to run service as',),
('ServiceInstall','StartType','N',0,4,None, None, None, None, 'Type of the service',),
('Shortcut','Name','N',None, None, None, None, 'Filename',None, 'The name of the shortcut to be created.',),
('Shortcut','Description','Y',None, None, None, None, 'Text',None, 'The description for the shortcut.',),
('Shortcut','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table denoting the component whose selection gates the the shortcut creation/deletion.',),
('Shortcut','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Foreign key into the File table denoting the external icon file for the shortcut.',),
('Shortcut','IconIndex','Y',-32767,32767,None, None, None, None, 'The icon index for the shortcut.',),
('Shortcut','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Foreign key into the Directory table denoting the directory where the shortcut file is created.',),
('Shortcut','Target','N',None, None, None, None, 'Shortcut',None, 'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.',),
('Shortcut','Arguments','Y',None, None, None, None, 'Formatted',None, 'The command-line arguments for the shortcut.',),
('Shortcut','Shortcut','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Shortcut','Hotkey','Y',0,32767,None, None, None, None, 'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. ',),
('Shortcut','ShowCmd','Y',None, None, None, None, None, '1;3;7','The show command for the application window.The following values may be used.',),
('Shortcut','WkDir','Y',None, None, None, None, 'Identifier',None, 'Name of property defining location of working directory.',),
('Signature','FileName','N',None, None, None, None, 'Filename',None, 'The name of the file. This may contain a "short name|long name" pair.',),
('Signature','Signature','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature represents a unique file signature.',),
('Signature','Languages','Y',None, None, None, None, 'Language',None, 'The languages supported by the file.',),
('Signature','MaxDate','Y',0,2147483647,None, None, None, None, 'The maximum creation date of the file.',),
('Signature','MaxSize','Y',0,2147483647,None, None, None, None, 'The maximum size of the file. ',),
('Signature','MaxVersion','Y',None, None, None, None, 'Text',None, 'The maximum version of the file.',),
('Signature','MinDate','Y',0,2147483647,None, None, None, None, 'The minimum creation date of the file.',),
('Signature','MinSize','Y',0,2147483647,None, None, None, None, 'The minimum size of the file.',),
('Signature','MinVersion','Y',None, None, None, None, 'Text',None, 'The minimum version of the file.',),
('TextStyle','TextStyle','N',None, None, None, None, 'Identifier',None, 'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.',),
('TextStyle','Color','Y',0,16777215,None, None, None, None, 'An integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).',),
('TextStyle','FaceName','N',None, None, None, None, 'Text',None, 'A string indicating the name of the font used. Required. The string must be at most 31 characters long.',),
('TextStyle','Size','N',0,32767,None, None, None, None, 'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.',),
('TextStyle','StyleBits','Y',0,15,None, None, None, None, 'A combination of style bits.',),
('TypeLib','Description','Y',None, None, None, None, 'Text',None, None, ),
('TypeLib','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.',),
('TypeLib','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('TypeLib','Directory_','Y',None, None, 'Directory',1,'Identifier',None, 'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.',),
('TypeLib','Language','N',0,32767,None, None, None, None, 'The language of the library.',),
('TypeLib','Version','Y',0,16777215,None, None, None, None, 'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. ',),
('TypeLib','Cost','Y',0,2147483647,None, None, None, None, 'The cost associated with the registration of the typelib. This column is currently optional.',),
('TypeLib','LibID','N',None, None, None, None, 'Guid',None, 'The GUID that represents the library.',),
('UIText','Text','Y',None, None, None, None, 'Text',None, 'The localized version of the string.',),
('UIText','Key','N',None, None, None, None, 'Identifier',None, 'A unique key that identifies the particular string.',),
('Upgrade','Attributes','N',0,2147483647,None, None, None, None, 'The attributes of this product set.',),
('Upgrade','Language','Y',None, None, None, None, 'Language',None, 'A comma-separated list of languages for either products in this set or products not in this set.',),
('Upgrade','ActionProperty','N',None, None, None, None, 'UpperCase',None, 'The property to set when a product in this set is found.',),
('Upgrade','Remove','Y',None, None, None, None, 'Formatted',None, 'The list of features to remove when uninstalling a product from this set. The default is "ALL".',),
('Upgrade','UpgradeCode','N',None, None, None, None, 'Guid',None, 'The UpgradeCode GUID belonging to the products in this set.',),
('Upgrade','VersionMax','Y',None, None, None, None, 'Text',None, 'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Upgrade','VersionMin','Y',None, None, None, None, 'Text',None, 'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Verb','Sequence','Y',0,32767,None, None, None, None, 'Order within the verbs for a particular extension. Also used simply to specify the default verb.',),
('Verb','Argument','Y',None, None, None, None, 'Formatted',None, 'Optional value for the command arguments.',),
('Verb','Extension_','N',None, None, 'Extension',1,'Text',None, 'The extension associated with the table row.',),
('Verb','Verb','N',None, None, None, None, 'Text',None, 'The verb for the command.',),
('Verb','Command','Y',None, None, None, None, 'Formatted',None, 'The command text.',),
]
| apache-2.0 |
zedr/django | django/contrib/auth/management/commands/createsuperuser.py | 63 | 7419 | """
Management utility to create superusers.
"""
from __future__ import unicode_literals
import getpass
import sys
from optparse import make_option
from django.contrib.auth import get_user_model
from django.contrib.auth.management import get_default_username
from django.core import exceptions
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from django.utils.encoding import force_str
from django.utils.six.moves import input
from django.utils.text import capfirst
class NotRunningInTTYException(Exception):
pass
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
# Options are defined in an __init__ method to support swapping out
# custom user models in tests.
super(Command, self).__init__(*args, **kwargs)
self.UserModel = get_user_model()
self.username_field = self.UserModel._meta.get_field(self.UserModel.USERNAME_FIELD)
self.option_list = BaseCommand.option_list + (
make_option('--%s' % self.UserModel.USERNAME_FIELD, dest=self.UserModel.USERNAME_FIELD, default=None,
help='Specifies the login for the superuser.'),
make_option('--noinput', action='store_false', dest='interactive', default=True,
help=('Tells Django to NOT prompt the user for input of any kind. '
'You must use --%s with --noinput, along with an option for '
'any other required field. Superusers created with --noinput will '
' not be able to log in until they\'re given a valid password.' %
self.UserModel.USERNAME_FIELD)),
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Specifies the database to use. Default is "default".'),
) + tuple(
make_option('--%s' % field, dest=field, default=None,
help='Specifies the %s for the superuser.' % field)
for field in self.UserModel.REQUIRED_FIELDS
)
option_list = BaseCommand.option_list
help = 'Used to create a superuser.'
def execute(self, *args, **options):
self.stdin = options.get('stdin', sys.stdin) # Used for testing
return super(Command, self).execute(*args, **options)
def handle(self, *args, **options):
username = options.get(self.UserModel.USERNAME_FIELD, None)
interactive = options.get('interactive')
verbosity = int(options.get('verbosity', 1))
database = options.get('database')
# If not provided, create the user with an unusable password
password = None
user_data = {}
# Do quick and dirty validation if --noinput
if not interactive:
try:
if not username:
raise CommandError("You must use --%s with --noinput." %
self.UserModel.USERNAME_FIELD)
username = self.username_field.clean(username, None)
for field_name in self.UserModel.REQUIRED_FIELDS:
if options.get(field_name):
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = field.clean(options[field_name], None)
else:
raise CommandError("You must use --%s with --noinput." % field_name)
except exceptions.ValidationError as e:
raise CommandError('; '.join(e.messages))
else:
# Prompt for username/password, and any other required fields.
# Enclose this whole thing in a try/except to trap for a
# keyboard interrupt and exit gracefully.
default_username = get_default_username()
try:
if hasattr(self.stdin, 'isatty') and not self.stdin.isatty():
raise NotRunningInTTYException("Not running in a TTY")
# Get a username
verbose_field_name = self.username_field.verbose_name
while username is None:
if not username:
input_msg = capfirst(verbose_field_name)
if default_username:
input_msg = "%s (leave blank to use '%s')" % (
input_msg, default_username)
raw_value = input(force_str('%s: ' % input_msg))
if default_username and raw_value == '':
raw_value = default_username
try:
username = self.username_field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % '; '.join(e.messages))
username = None
continue
try:
self.UserModel._default_manager.db_manager(database).get_by_natural_key(username)
except self.UserModel.DoesNotExist:
pass
else:
self.stderr.write("Error: That %s is already taken." %
verbose_field_name)
username = None
for field_name in self.UserModel.REQUIRED_FIELDS:
field = self.UserModel._meta.get_field(field_name)
user_data[field_name] = options.get(field_name)
while user_data[field_name] is None:
raw_value = input(force_str('%s: ' % capfirst(field.verbose_name)))
try:
user_data[field_name] = field.clean(raw_value, None)
except exceptions.ValidationError as e:
self.stderr.write("Error: %s" % '; '.join(e.messages))
user_data[field_name] = None
# Get a password
while password is None:
if not password:
password = getpass.getpass()
password2 = getpass.getpass(force_str('Password (again): '))
if password != password2:
self.stderr.write("Error: Your passwords didn't match.")
password = None
continue
if password.strip() == '':
self.stderr.write("Error: Blank passwords aren't allowed.")
password = None
continue
except KeyboardInterrupt:
self.stderr.write("\nOperation cancelled.")
sys.exit(1)
except NotRunningInTTYException:
self.stdout.write(
"Superuser creation skipped due to not running in a TTY. "
"You can run `manage.py createsuperuser` in your project "
"to create one manually."
)
if username:
user_data[self.UserModel.USERNAME_FIELD] = username
user_data['password'] = password
self.UserModel._default_manager.db_manager(database).create_superuser(**user_data)
if verbosity >= 1:
self.stdout.write("Superuser created successfully.")
| bsd-3-clause |
ivandevp/django | django/templatetags/future.py | 129 | 2039 | import warnings
from django.template import Library, defaulttags
from django.utils.deprecation import RemovedInDjango110Warning
register = Library()
@register.tag
def cycle(parser, token):
"""
This is the future version of `cycle` with auto-escaping.
The deprecation is now complete and this version is no different
from the non-future version so this is deprecated.
By default all strings are escaped.
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% cycle var1 var2 var3 as somecycle %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% cycle var1 var2|safe var3|safe as somecycle %}
"""
warnings.warn(
"Loading the `cycle` tag from the `future` library is deprecated and "
"will be removed in Django 1.10. Use the default `cycle` tag instead.",
RemovedInDjango110Warning)
return defaulttags.cycle(parser, token)
@register.tag
def firstof(parser, token):
"""
This is the future version of `firstof` with auto-escaping.
The deprecation is now complete and this version is no different
from the non-future version so this is deprecated.
This is equivalent to::
{% if var1 %}
{{ var1 }}
{% elif var2 %}
{{ var2 }}
{% elif var3 %}
{{ var3 }}
{% endif %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
warnings.warn(
"Loading the `firstof` tag from the `future` library is deprecated and "
"will be removed in Django 1.10. Use the default `firstof` tag instead.",
RemovedInDjango110Warning)
return defaulttags.firstof(parser, token)
| bsd-3-clause |
praekelt/vumi-go | go/apps/tests/helpers.py | 1 | 7668 | from twisted.internet.defer import inlineCallbacks, returnValue
from zope.interface import implements
from vumi.tests.helpers import (
MessageDispatchHelper, proxyable, generate_proxies, maybe_async, IHelper)
from go.vumitools.api import VumiApiCommand, VumiApiEvent
from go.vumitools.tests.helpers import GoMessageHelper, VumiApiHelper
class ApplicationHelper(object):
implements(IHelper)
def __init__(self, conversation_type, vumi_helper):
self.is_sync = vumi_helper.is_sync
self._conversation_type = conversation_type
self.vumi_helper = vumi_helper
def setup(self):
pass
def cleanup(self):
pass
@proxyable
@maybe_async
def create_group_with_contacts(self, group_name, contact_count):
group = yield self.create_group(group_name)
for i in range(contact_count):
yield self.create_contact(
msisdn=u'+27831234567{0}'.format(i), groups=[group],
name=u"Contact", surname=u"%s" % (i,))
returnValue(group)
@proxyable
@maybe_async
def create_group(self, group_name):
user_helper = yield self.vumi_helper.get_or_create_user()
contact_store = user_helper.user_api.contact_store
group = yield contact_store.new_group(group_name)
returnValue(group)
@proxyable
@maybe_async
def create_contact(self, msisdn, **kw):
kw.setdefault('name', "First")
kw.setdefault('surname', "Last")
user_helper = yield self.vumi_helper.get_or_create_user()
contact_store = user_helper.user_api.contact_store
contact = yield contact_store.new_contact(msisdn=msisdn, **kw)
returnValue(contact)
@proxyable
@maybe_async
def create_conversation(self, started=False, channel=None, **conv_kw):
user_helper = yield self.vumi_helper.get_or_create_user()
conversation = yield user_helper.create_conversation(
self._conversation_type, started=started, **conv_kw)
if channel is not None:
user_account = user_helper.get_user_account()
rt = user_account.routing_table
rt.add_entry(
conversation.get_connector(), 'default',
channel.get_connector(), 'default')
rt.add_entry(
channel.get_connector(), 'default',
conversation.get_connector(), 'default')
yield user_account.save()
returnValue(conversation)
@proxyable
@maybe_async
def create_channel(self, metadata=None, supports_generic_sends=None):
if supports_generic_sends is not None:
if metadata is None:
metadata = {}
supports = metadata.setdefault('supports', {})
supports['generic_sends'] = supports_generic_sends
yield self.vumi_helper.setup_tagpool(u"pool", [u"tag"], metadata)
user_helper = yield self.vumi_helper.get_or_create_user()
yield user_helper.add_tagpool_permission(u"pool")
yield user_helper.user_api.acquire_specific_tag((u"pool", u"tag"))
channel = yield user_helper.user_api.get_channel((u"pool", u"tag"))
returnValue(channel)
@proxyable
@maybe_async
def get_conversation(self, conversation_key):
user_helper = yield self.vumi_helper.get_or_create_user()
conversation = yield user_helper.get_conversation(conversation_key)
returnValue(conversation)
class AppWorkerHelper(object):
implements(IHelper)
def __init__(self, worker_class, vumi_helper=None, **msg_helper_args):
self._worker_class = worker_class
if vumi_helper is None:
vumi_helper = VumiApiHelper()
self.vumi_helper = vumi_helper
self._app_helper = ApplicationHelper(
self._conversation_type(), self.vumi_helper)
self.msg_helper = GoMessageHelper(**msg_helper_args)
self.transport_name = self.msg_helper.transport_name
self.worker_helper = self.vumi_helper.get_worker_helper(
self.transport_name)
self.dispatch_helper = MessageDispatchHelper(
self.msg_helper, self.worker_helper)
# Proxy methods from our helpers.
generate_proxies(self, self._app_helper)
generate_proxies(self, self.msg_helper)
generate_proxies(self, self.worker_helper)
generate_proxies(self, self.dispatch_helper)
def _worker_name(self):
return self._worker_class.worker_name
def _conversation_type(self):
# This is a guess based on worker_name.
# TODO: We need a better way to do this, probably involving either the
# conversation definition or go.config.
return self._worker_name().rpartition('_')[0].decode('utf-8')
def setup(self):
return self.vumi_helper.setup(setup_vumi_api=False)
def cleanup(self):
return self.vumi_helper.cleanup()
@inlineCallbacks
def get_app_worker(self, config=None, start=True, extra_worker=False):
# Note: We assume that this is called exactly once per test.
config = self.vumi_helper.mk_config(config or {})
config.setdefault('worker_name', self._worker_name())
config.setdefault('transport_name', self.msg_helper.transport_name)
worker = yield self.get_worker(self._worker_class, config, start)
# Set up our other bits of helper.
if not extra_worker:
self.vumi_helper.set_vumi_api(worker.vumi_api)
self.msg_helper.mdb = worker.vumi_api.mdb
returnValue(worker)
@inlineCallbacks
def start_conversation(self, conversation):
assert self._get_pending_commands() == [], (
"Found pending commands while starting conversation, aborting.")
yield conversation.start()
yield self.dispatch_commands_to_app()
@inlineCallbacks
def stop_conversation(self, conversation):
assert self._get_pending_commands() == [], (
"Found pending commands while stopping conversation, aborting.")
yield conversation.stop_conversation()
yield self.dispatch_commands_to_app()
def _get_pending_commands(self):
return self.worker_helper.get_dispatched('vumi', 'api', VumiApiCommand)
@inlineCallbacks
def dispatch_commands_to_app(self):
pending_commands = self._get_pending_commands()
self.worker_helper._clear_dispatched('vumi', 'api')
for command in pending_commands:
yield self.worker_helper.dispatch_raw(
"%s.control" % (self._worker_name(),), command)
@inlineCallbacks
def dispatch_command(self, command, *args, **kw):
cmd = VumiApiCommand.command(
self._worker_name(), command, *args, **kw)
yield self.worker_helper.dispatch_raw('vumi.api', cmd)
yield self.dispatch_commands_to_app()
def get_published_metrics(self, worker):
metrics = []
for metric_msg in self.worker_helper.get_dispatched_metrics():
for name, _aggs, data in metric_msg:
for _time, value in data:
metrics.append((name, value))
return metrics
def get_published_metrics_with_aggs(self, worker):
metrics = []
for metric_msg in self.worker_helper.get_dispatched_metrics():
for name, aggs, data in metric_msg:
for _time, value in data:
for agg in aggs:
metrics.append((name, value, agg))
return metrics
def get_dispatched_app_events(self):
return self.worker_helper.get_dispatched('vumi', 'event', VumiApiEvent)
| bsd-3-clause |
infoxchange/lettuce | tests/integration/lib/Django-1.3/django/contrib/messages/tests/fallback.py | 311 | 6978 | from django.contrib.messages import constants
from django.contrib.messages.storage.fallback import FallbackStorage, \
CookieStorage
from django.contrib.messages.tests.base import BaseTest
from django.contrib.messages.tests.cookie import set_cookie_data, \
stored_cookie_messages_count
from django.contrib.messages.tests.session import set_session_data, \
stored_session_messages_count
class FallbackTest(BaseTest):
storage_class = FallbackStorage
def get_request(self):
self.session = {}
request = super(FallbackTest, self).get_request()
request.session = self.session
return request
def get_cookie_storage(self, storage):
return storage.storages[-2]
def get_session_storage(self, storage):
return storage.storages[-1]
def stored_cookie_messages_count(self, storage, response):
return stored_cookie_messages_count(self.get_cookie_storage(storage),
response)
def stored_session_messages_count(self, storage, response):
return stored_session_messages_count(self.get_session_storage(storage))
def stored_messages_count(self, storage, response):
"""
Return the storage totals from both cookie and session backends.
"""
total = (self.stored_cookie_messages_count(storage, response) +
self.stored_session_messages_count(storage, response))
return total
def test_get(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
# Set initial cookie data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), example_messages)
def test_get_empty(self):
request = self.get_request()
storage = self.storage_class(request)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), [])
def test_get_fallback(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages[:4] +
[CookieStorage.not_finished])
set_session_data(session_storage, example_messages[4:])
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), example_messages)
def test_get_fallback_only(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, [CookieStorage.not_finished],
encode_empty=True)
set_session_data(session_storage, example_messages)
# Test that the message actually contains what we expect.
self.assertEqual(list(storage), example_messages)
def test_flush_used_backends(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
set_cookie_data(cookie_storage, ['cookie', CookieStorage.not_finished])
set_session_data(session_storage, ['session'])
# When updating, previously used but no longer needed backends are
# flushed.
response = self.get_response()
list(storage)
storage.update(response)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_no_fallback(self):
"""
Confirms that:
(1) A short number of messages whose data size doesn't exceed what is
allowed in a cookie will all be stored in the CookieBackend.
(2) If the CookieBackend can store all messages, the SessionBackend
won't be written to at all.
"""
storage = self.get_storage()
response = self.get_response()
# Overwrite the _store method of the fallback storage to prove it isn't
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._store = None
for i in range(5):
storage.add(constants.INFO, str(i) * 100)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 5)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_session_fallback(self):
"""
Confirms that, if the data exceeds what is allowed in a cookie,
messages which did not fit are stored in the SessionBackend.
"""
storage = self.get_storage()
response = self.get_response()
# see comment in CookieText.test_cookie_max_length
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
for i in range(5):
storage.add(constants.INFO, str(i) * msg_size)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
def test_session_fallback_only(self):
"""
Confirms that large messages, none of which fit in a cookie, are stored
in the SessionBackend (and nothing is stored in the CookieBackend).
"""
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'x' * 5000)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 0)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
| gpl-3.0 |
tvenkat/askbot-devel | askbot/utils/forms.py | 2 | 8178 | import re
from django import forms
from django.http import str_to_unicode
from django.contrib.auth.models import User
from django.conf import settings
from django.utils.translation import ugettext as _
from django.utils.safestring import mark_safe
from askbot.conf import settings as askbot_settings
from askbot.utils.slug import slugify
from askbot import const
import logging
import urllib
DEFAULT_NEXT = '/' + getattr(settings, 'ASKBOT_URL')
def clean_next(next, default = None):
if next is None or not next.startswith('/'):
if default:
return default
else:
return DEFAULT_NEXT
next = str_to_unicode(urllib.unquote(next), 'utf-8')
next = next.strip()
logging.debug('next url is %s' % next)
return next
def get_next_url(request, default = None):
return clean_next(request.REQUEST.get('next'), default)
class StrippedNonEmptyCharField(forms.CharField):
def clean(self, value):
value = value.strip()
if self.required and value == '':
raise forms.ValidationError(_('this field is required'))
return value
class NextUrlField(forms.CharField):
def __init__(self):
super(
NextUrlField,
self
).__init__(
max_length = 255,
widget = forms.HiddenInput(),
required = False
)
def clean(self,value):
return clean_next(value)
login_form_widget_attrs = { 'class': 'required login' }
class UserNameField(StrippedNonEmptyCharField):
RESERVED_NAMES = (u'fuck', u'shit', u'ass', u'sex', u'add',
u'edit', u'save', u'delete', u'manage', u'update', 'remove', 'new')
def __init__(
self,
db_model=User,
db_field='username',
must_exist=False,
skip_clean=False,
label=_('Choose a screen name'),
**kw
):
self.must_exist = must_exist
self.skip_clean = skip_clean
self.db_model = db_model
self.db_field = db_field
self.user_instance = None
error_messages={
'required': _('user name is required'),
'taken': _('sorry, this name is taken, please choose another'),
'forbidden': _('sorry, this name is not allowed, please choose another'),
'missing': _('sorry, there is no user with this name'),
'multiple-taken': _('sorry, we have a serious error - user name is taken by several users'),
'invalid': _('user name can only consist of letters, empty space and underscore'),
'meaningless': _('please use at least some alphabetic characters in the user name'),
}
if 'error_messages' in kw:
error_messages.update(kw['error_messages'])
del kw['error_messages']
super(UserNameField,self).__init__(max_length=30,
widget=forms.TextInput(attrs=login_form_widget_attrs),
label=label,
error_messages=error_messages,
**kw
)
def clean(self,username):
""" validate username """
if self.skip_clean == True:
logging.debug('username accepted with no validation')
return username
if self.user_instance is None:
pass
elif isinstance(self.user_instance, User):
if username == self.user_instance.username:
logging.debug('username valid')
return username
else:
raise TypeError('user instance must be of type User')
try:
username = super(UserNameField, self).clean(username)
except forms.ValidationError:
raise forms.ValidationError(self.error_messages['required'])
username_regex = re.compile(const.USERNAME_REGEX_STRING, re.UNICODE)
if self.required and not username_regex.search(username):
raise forms.ValidationError(self.error_messages['invalid'])
if username in self.RESERVED_NAMES:
raise forms.ValidationError(self.error_messages['forbidden'])
if slugify(username, force_unidecode = True) == '':
raise forms.ValidationError(self.error_messages['meaningless'])
try:
user = self.db_model.objects.get(
**{'%s' % self.db_field : username}
)
if user:
if self.must_exist:
logging.debug('user exists and name accepted b/c here we validate existing user')
return username
else:
raise forms.ValidationError(self.error_messages['taken'])
except self.db_model.DoesNotExist:
if self.must_exist:
logging.debug('user must exist, so raising the error')
raise forms.ValidationError(self.error_messages['missing'])
else:
logging.debug('user name valid!')
return username
except self.db_model.MultipleObjectsReturned:
logging.debug('error - user with this name already exists')
raise forms.ValidationError(self.error_messages['multiple-taken'])
class UserEmailField(forms.EmailField):
def __init__(self,skip_clean=False,**kw):
self.skip_clean = skip_clean
super(UserEmailField,self).__init__(widget=forms.TextInput(attrs=dict(login_form_widget_attrs,
maxlength=200)), label=mark_safe(_('Your email <i>(never shared)</i>')),
error_messages={'required':_('email address is required'),
'invalid':_('please enter a valid email address'),
'taken':_('this email is already used by someone else, please choose another'),
},
**kw
)
def clean(self,email):
""" validate if email exist in database
from legacy register
return: raise error if it exist """
email = super(UserEmailField,self).clean(email.strip())
if self.skip_clean:
return email
if askbot_settings.EMAIL_UNIQUE == True:
try:
user = User.objects.get(email = email)
logging.debug('email taken')
raise forms.ValidationError(self.error_messages['taken'])
except User.DoesNotExist:
logging.debug('email valid')
return email
except User.MultipleObjectsReturned:
logging.debug('email taken many times over')
raise forms.ValidationError(self.error_messages['taken'])
else:
return email
class SetPasswordForm(forms.Form):
password1 = forms.CharField(widget=forms.PasswordInput(attrs=login_form_widget_attrs),
label=_('Password'),
error_messages={'required':_('password is required')},
)
password2 = forms.CharField(widget=forms.PasswordInput(attrs=login_form_widget_attrs),
label=mark_safe(_('Password <i>(please retype)</i>')),
error_messages={'required':_('please, retype your password'),
'nomatch':_('sorry, entered passwords did not match, please try again')},
)
def __init__(self, data=None, user=None, *args, **kwargs):
super(SetPasswordForm, self).__init__(data, *args, **kwargs)
def clean_password2(self):
"""
Validates that the two password inputs match.
"""
if 'password1' in self.cleaned_data:
if self.cleaned_data['password1'] == self.cleaned_data['password2']:
self.password = self.cleaned_data['password2']
self.cleaned_data['password'] = self.cleaned_data['password2']
return self.cleaned_data['password2']
else:
del self.cleaned_data['password2']
raise forms.ValidationError(self.fields['password2'].error_messages['nomatch'])
else:
return self.cleaned_data['password2']
| gpl-3.0 |
kaarolch/ansible | lib/ansible/modules/packaging/dpkg_selections.py | 23 | 2578 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: dpkg_selections
short_description: Dpkg package selection selections
description:
- Change dpkg package selection state via --get-selections and --set-selections.
version_added: "2.0"
author: Brian Brazil <brian.brazil@boxever.com>
options:
name:
description:
- Name of the package
required: true
selection:
description:
- The selection state to set the package to.
choices: [ 'install', 'hold', 'deinstall', 'purge' ]
required: true
notes:
- This module won't cause any packages to be installed/removed/purged, use the C(apt) module for that.
'''
EXAMPLES = '''
# Prevent python from being upgraded.
- dpkg_selections:
name: python
selection: hold
'''
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
selection = dict(choices=['install', 'hold', 'deinstall', 'purge'])
),
supports_check_mode=True,
)
dpkg = module.get_bin_path('dpkg', True)
name = module.params['name']
selection = module.params['selection']
# Get current settings.
rc, out, err = module.run_command([dpkg, '--get-selections', name], check_rc=True)
if not out:
current = 'not present'
else:
current = out.split()[1]
changed = current != selection
if module.check_mode or not changed:
module.exit_json(changed=changed, before=current, after=selection)
module.run_command([dpkg, '--set-selections'], data="%s %s" % (name, selection), check_rc=True)
module.exit_json(changed=changed, before=current, after=selection)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
vitan/hue | desktop/core/ext-py/django-extensions-1.5.0/django_extensions/mongodb/fields/__init__.py | 65 | 8837 | """
MongoDB model fields emulating Django Extensions' additional model fields
These fields are essentially identical to existing Extensions fields, but South hooks have been removed (since mongo requires no schema migration)
"""
import re
import six
import datetime
from django import forms
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
from mongoengine.fields import StringField, DateTimeField
try:
import uuid
assert uuid
except ImportError:
from django_extensions.utils import uuid
class SlugField(StringField):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 50)
# Set db_index=True unless it's been set manually.
if 'db_index' not in kwargs:
kwargs['db_index'] = True
super(SlugField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
defaults = {'form_class': forms.SlugField}
defaults.update(kwargs)
return super(SlugField, self).formfield(**defaults)
class AutoSlugField(SlugField):
""" AutoSlugField, adapted for MongoDB
By default, sets editable=False, blank=True.
Required arguments:
populate_from
Specifies which field or list of fields the slug is populated from.
Optional arguments:
separator
Defines the used separator (default: '-')
overwrite
If set to True, overwrites the slug on every save (default: False)
Inspired by SmileyChris' Unique Slugify snippet:
http://www.djangosnippets.org/snippets/690/
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('blank', True)
kwargs.setdefault('editable', False)
populate_from = kwargs.pop('populate_from', None)
if populate_from is None:
raise ValueError("missing 'populate_from' argument")
else:
self._populate_from = populate_from
self.slugify_function = kwargs.pop('slugify_function', slugify)
self.separator = kwargs.pop('separator', six.u('-'))
self.overwrite = kwargs.pop('overwrite', False)
super(AutoSlugField, self).__init__(*args, **kwargs)
def _slug_strip(self, value):
"""
Cleans up a slug by removing slug separator characters that occur at
the beginning or end of a slug.
If an alternate separator is used, it will also replace any instances
of the default '-' separator with the new separator.
"""
re_sep = '(?:-|%s)' % re.escape(self.separator)
value = re.sub('%s+' % re_sep, self.separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
def slugify_func(self, content):
return self.slugify_function(content)
def create_slug(self, model_instance, add):
# get fields to populate from and slug field to set
if not isinstance(self._populate_from, (list, tuple)):
self._populate_from = (self._populate_from, )
slug_field = model_instance._meta.get_field(self.attname)
if add or self.overwrite:
# slugify the original field content and set next step to 2
slug_for_field = lambda field: self.slugify_func(getattr(model_instance, field))
slug = self.separator.join(map(slug_for_field, self._populate_from))
next = 2
else:
# get slug from the current model instance and calculate next
# step from its number, clean-up
slug = self._slug_strip(getattr(model_instance, self.attname))
next = slug.split(self.separator)[-1]
if next.isdigit():
slug = self.separator.join(slug.split(self.separator)[:-1])
next = int(next)
else:
next = 2
# strip slug depending on max_length attribute of the slug field
# and clean-up
slug_len = slug_field.max_length
if slug_len:
slug = slug[:slug_len]
slug = self._slug_strip(slug)
original_slug = slug
# exclude the current model instance from the queryset used in finding
# the next valid slug
queryset = model_instance.__class__._default_manager.all()
if model_instance.pk:
queryset = queryset.exclude(pk=model_instance.pk)
# form a kwarg dict used to impliment any unique_together contraints
kwargs = {}
for params in model_instance._meta.unique_together:
if self.attname in params:
for param in params:
kwargs[param] = getattr(model_instance, param, None)
kwargs[self.attname] = slug
# increases the number while searching for the next valid slug
# depending on the given slug, clean-up
while not slug or queryset.filter(**kwargs):
slug = original_slug
end = '%s%s' % (self.separator, next)
end_len = len(end)
if slug_len and len(slug) + end_len > slug_len:
slug = slug[:slug_len - end_len]
slug = self._slug_strip(slug)
slug = '%s%s' % (slug, end)
kwargs[self.attname] = slug
next += 1
return slug
def pre_save(self, model_instance, add):
value = six.u(self.create_slug(model_instance, add))
setattr(model_instance, self.attname, value)
return value
def get_internal_type(self):
return "SlugField"
class CreationDateTimeField(DateTimeField):
""" CreationDateTimeField
By default, sets editable=False, blank=True, default=datetime.now
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('default', datetime.datetime.now)
DateTimeField.__init__(self, *args, **kwargs)
def get_internal_type(self):
return "DateTimeField"
class ModificationDateTimeField(CreationDateTimeField):
""" ModificationDateTimeField
By default, sets editable=False, blank=True, default=datetime.now
Sets value to datetime.now() on each save of the model.
"""
def pre_save(self, model, add):
value = datetime.datetime.now()
setattr(model, self.attname, value)
return value
def get_internal_type(self):
return "DateTimeField"
class UUIDVersionError(Exception):
pass
class UUIDField(StringField):
""" UUIDField
By default uses UUID version 1 (generate from host ID, sequence number and current time)
The field support all uuid versions which are natively supported by the uuid python module.
For more information see: http://docs.python.org/lib/module-uuid.html
"""
def __init__(self, verbose_name=None, name=None, auto=True, version=1, node=None, clock_seq=None, namespace=None, **kwargs):
kwargs['max_length'] = 36
self.auto = auto
self.version = version
if version == 1:
self.node, self.clock_seq = node, clock_seq
elif version == 3 or version == 5:
self.namespace, self.name = namespace, name
StringField.__init__(self, verbose_name, name, **kwargs)
def get_internal_type(self):
return StringField.__name__
def contribute_to_class(self, cls, name):
if self.primary_key:
assert not cls._meta.has_auto_field, "A model can't have more than one AutoField: %s %s %s; have %s" % (self, cls, name, cls._meta.auto_field)
super(UUIDField, self).contribute_to_class(cls, name)
cls._meta.has_auto_field = True
cls._meta.auto_field = self
else:
super(UUIDField, self).contribute_to_class(cls, name)
def create_uuid(self):
if not self.version or self.version == 4:
return uuid.uuid4()
elif self.version == 1:
return uuid.uuid1(self.node, self.clock_seq)
elif self.version == 2:
raise UUIDVersionError("UUID version 2 is not supported.")
elif self.version == 3:
return uuid.uuid3(self.namespace, self.name)
elif self.version == 5:
return uuid.uuid5(self.namespace, self.name)
else:
raise UUIDVersionError("UUID version %s is not valid." % self.version)
def pre_save(self, model_instance, add):
if self.auto and add:
value = six.u(self.create_uuid())
setattr(model_instance, self.attname, value)
return value
else:
value = super(UUIDField, self).pre_save(model_instance, add)
if self.auto and not value:
value = six.u(self.create_uuid())
setattr(model_instance, self.attname, value)
return value
| apache-2.0 |
VielSoft/odoo | addons/fetchmail/fetchmail.py | 167 | 15870 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import poplib
import time
from imaplib import IMAP4
from imaplib import IMAP4_SSL
from poplib import POP3
from poplib import POP3_SSL
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import zipfile
import base64
from openerp import addons
from openerp.osv import fields, osv
from openerp import tools, api
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
MAX_POP_MESSAGES = 50
MAIL_TIMEOUT = 60
# Workaround for Python 2.7.8 bug https://bugs.python.org/issue23906
poplib._MAXLINE = 65536
class fetchmail_server(osv.osv):
"""Incoming POP/IMAP mail server account"""
_name = 'fetchmail.server'
_description = "POP/IMAP Server"
_order = 'priority'
_columns = {
'name':fields.char('Name', required=True, readonly=False),
'active':fields.boolean('Active', required=False),
'state':fields.selection([
('draft', 'Not Confirmed'),
('done', 'Confirmed'),
], 'Status', select=True, readonly=True, copy=False),
'server' : fields.char('Server Name', readonly=True, help="Hostname or IP of the mail server", states={'draft':[('readonly', False)]}),
'port' : fields.integer('Port', readonly=True, states={'draft':[('readonly', False)]}),
'type':fields.selection([
('pop', 'POP Server'),
('imap', 'IMAP Server'),
('local', 'Local Server'),
], 'Server Type', select=True, required=True, readonly=False),
'is_ssl':fields.boolean('SSL/TLS', help="Connections are encrypted with SSL/TLS through a dedicated port (default: IMAPS=993, POP3S=995)"),
'attach':fields.boolean('Keep Attachments', help="Whether attachments should be downloaded. "
"If not enabled, incoming emails will be stripped of any attachments before being processed"),
'original':fields.boolean('Keep Original', help="Whether a full original copy of each email should be kept for reference"
"and attached to each processed message. This will usually double the size of your message database."),
'date': fields.datetime('Last Fetch Date', readonly=True),
'user' : fields.char('Username', readonly=True, states={'draft':[('readonly', False)]}),
'password' : fields.char('Password', readonly=True, states={'draft':[('readonly', False)]}),
'action_id':fields.many2one('ir.actions.server', 'Server Action', help="Optional custom server action to trigger for each incoming mail, "
"on the record that was created or updated by this mail"),
'object_id': fields.many2one('ir.model', "Create a New Record", help="Process each incoming mail as part of a conversation "
"corresponding to this document type. This will create "
"new documents for new conversations, or attach follow-up "
"emails to the existing conversations (documents)."),
'priority': fields.integer('Server Priority', readonly=True, states={'draft':[('readonly', False)]}, help="Defines the order of processing, "
"lower values mean higher priority"),
'message_ids': fields.one2many('mail.mail', 'fetchmail_server_id', 'Messages', readonly=True),
'configuration' : fields.text('Configuration', readonly=True),
'script' : fields.char('Script', readonly=True),
}
_defaults = {
'state': "draft",
'type': "pop",
'active': True,
'priority': 5,
'attach': True,
'script': '/mail/static/scripts/openerp_mailgate.py',
}
def onchange_server_type(self, cr, uid, ids, server_type=False, ssl=False, object_id=False):
port = 0
values = {}
if server_type == 'pop':
port = ssl and 995 or 110
elif server_type == 'imap':
port = ssl and 993 or 143
else:
values['server'] = ''
values['port'] = port
conf = {
'dbname' : cr.dbname,
'uid' : uid,
'model' : 'MODELNAME',
}
if object_id:
m = self.pool.get('ir.model')
r = m.read(cr,uid,[object_id],['model'])
conf['model']=r[0]['model']
values['configuration'] = """Use the below script with the following command line options with your Mail Transport Agent (MTA)
openerp_mailgate.py --host=HOSTNAME --port=PORT -u %(uid)d -p PASSWORD -d %(dbname)s
Example configuration for the postfix mta running locally:
/etc/postfix/virtual_aliases:
@youdomain openerp_mailgate@localhost
/etc/aliases:
openerp_mailgate: "|/path/to/openerp-mailgate.py --host=localhost -u %(uid)d -p PASSWORD -d %(dbname)s"
""" % conf
return {'value':values}
def set_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids , {'state':'draft'})
return True
@api.cr_uid_ids_context
def connect(self, cr, uid, server_id, context=None):
if isinstance(server_id, (list,tuple)):
server_id = server_id[0]
server = self.browse(cr, uid, server_id, context)
if server.type == 'imap':
if server.is_ssl:
connection = IMAP4_SSL(server.server, int(server.port))
else:
connection = IMAP4(server.server, int(server.port))
connection.login(server.user, server.password)
elif server.type == 'pop':
if server.is_ssl:
connection = POP3_SSL(server.server, int(server.port))
else:
connection = POP3(server.server, int(server.port))
#TODO: use this to remove only unread messages
#connection.user("recent:"+server.user)
connection.user(server.user)
connection.pass_(server.password)
# Add timeout on socket
connection.sock.settimeout(MAIL_TIMEOUT)
return connection
def button_confirm_login(self, cr, uid, ids, context=None):
if context is None:
context = {}
for server in self.browse(cr, uid, ids, context=context):
try:
connection = server.connect()
server.write({'state':'done'})
except Exception, e:
_logger.exception("Failed to connect to %s server %s.", server.type, server.name)
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s.") % tools.ustr(e))
finally:
try:
if connection:
if server.type == 'imap':
connection.close()
elif server.type == 'pop':
connection.quit()
except Exception:
# ignored, just a consequence of the previous exception
pass
return True
def _fetch_mails(self, cr, uid, ids=False, context=None):
if not ids:
ids = self.search(cr, uid, [('state','=','done'),('type','in',['pop','imap'])])
return self.fetch_mail(cr, uid, ids, context=context)
def fetch_mail(self, cr, uid, ids, context=None):
"""WARNING: meant for cron usage only - will commit() after each email!"""
context = dict(context or {})
context['fetchmail_cron_running'] = True
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
_logger.info('start checking for new emails on %s server %s', server.type, server.name)
context.update({'fetchmail_server_id': server.id, 'server_type': server.type})
count, failed = 0, 0
imap_server = False
pop_server = False
if server.type == 'imap':
try:
imap_server = server.connect()
imap_server.select()
result, data = imap_server.search(None, '(UNSEEN)')
for num in data[0].split():
res_id = None
result, data = imap_server.fetch(num, '(RFC822)')
imap_server.store(num, '-FLAGS', '\\Seen')
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
except Exception:
_logger.exception('Failed to process mail from %s server %s.', server.type, server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.type, server.name, (count - failed), failed)
except Exception:
_logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name)
finally:
if imap_server:
imap_server.close()
imap_server.logout()
elif server.type == 'pop':
try:
while True:
pop_server = server.connect()
(numMsgs, totalSize) = pop_server.stat()
pop_server.list()
for num in range(1, min(MAX_POP_MESSAGES, numMsgs) + 1):
(header, msges, octets) = pop_server.retr(num)
msg = '\n'.join(msges)
res_id = None
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
msg,
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
pop_server.dele(num)
except Exception:
_logger.exception('Failed to process mail from %s server %s.', server.type, server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
cr.commit()
if numMsgs < MAX_POP_MESSAGES:
break
pop_server.quit()
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", numMsgs, server.type, server.name, (numMsgs - failed), failed)
except Exception:
_logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name)
finally:
if pop_server:
pop_server.quit()
server.write({'date': time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)})
return True
def _update_cron(self, cr, uid, context=None):
if context and context.get('fetchmail_cron_running'):
return
try:
cron = self.pool['ir.model.data'].get_object(
cr, uid, 'fetchmail', 'ir_cron_mail_gateway_action', context=context)
except ValueError:
# Nevermind if default cron cannot be found
return
# Enabled/Disable cron based on the number of 'done' server of type pop or imap
cron.toggle(model=self._name, domain=[('state','=','done'), ('type','in',['pop','imap'])])
def create(self, cr, uid, values, context=None):
res = super(fetchmail_server, self).create(cr, uid, values, context=context)
self._update_cron(cr, uid, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
res = super(fetchmail_server, self).write(cr, uid, ids, values, context=context)
self._update_cron(cr, uid, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(fetchmail_server, self).unlink(cr, uid, ids, context=context)
self._update_cron(cr, uid, context=context)
return res
class mail_mail(osv.osv):
_inherit = "mail.mail"
_columns = {
'fetchmail_server_id': fields.many2one('fetchmail.server', "Inbound Mail Server",
readonly=True,
select=True,
oldname='server_id'),
}
def create(self, cr, uid, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).create(cr, uid, values, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).write(cr, uid, ids, values, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nacc/autotest | frontend/afe/management.py | 3 | 2035 | # use some undocumented Django tricks to execute custom logic after syncdb
from django.db.models import signals
from django.contrib import auth
# In this file, it is critical that we import models *just like this*. In
# particular, we *cannot* do import common; from autotest... import models.
# This is because when we pass the models module to signal.connect(), it
# calls id() on the module, and the id() of a module can differ depending on how
# it was imported. For that reason, we must import models as Django does -- not
# through the autotest magic set up through common.py. If you do that, the
# connection won't work and the dispatcher will simply never call the method.
from frontend.afe import models
BASIC_ADMIN = 'Basic admin'
def create_admin_group(app, created_models, verbosity, **kwargs):
"""\
Create a basic admin group with permissions for managing basic autotest
objects.
"""
admin_group, created = auth.models.Group.objects.get_or_create(
name=BASIC_ADMIN)
admin_group.save() # must save before adding permissions
PermissionModel = auth.models.Permission
have_permissions = list(admin_group.permissions.all())
for model_name in ('host', 'label', 'test', 'aclgroup', 'profiler',
'atomicgroup'):
for permission_type in ('add', 'change', 'delete'):
codename = permission_type + '_' + model_name
permissions = list(PermissionModel.objects.filter(
codename=codename))
if len(permissions) == 0:
print ' No permission ' + codename
continue
for permission in permissions:
if permission not in have_permissions:
print ' Adding permission ' + codename
admin_group.permissions.add(permission)
if created:
print 'Created group "%s"' % BASIC_ADMIN
else:
print 'Group "%s" already exists' % BASIC_ADMIN
signals.post_syncdb.connect(create_admin_group, sender=models)
| gpl-2.0 |
lmprice/ansible | lib/ansible/modules/cloud/openstack/os_keystone_role.py | 20 | 3233 | #!/usr/bin/python
# Copyright (c) 2016 IBM
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_keystone_role
short_description: Manage OpenStack Identity Roles
extends_documentation_fragment: openstack
version_added: "2.1"
author: "Monty Taylor (@emonty), David Shrewsbury (@Shrews)"
description:
- Manage OpenStack Identity Roles.
options:
name:
description:
- Role Name
required: true
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
availability_zone:
description:
- Ignored. Present for backwards compatibility
required: false
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Create a role named "demo"
- os_keystone_role:
cloud: mycloud
state: present
name: demo
# Delete the role named "demo"
- os_keystone_role:
cloud: mycloud
state: absent
name: demo
'''
RETURN = '''
role:
description: Dictionary describing the role.
returned: On success when I(state) is 'present'.
type: complex
contains:
id:
description: Unique role ID.
type: string
sample: "677bfab34c844a01b88a217aa12ec4c2"
name:
description: Role name.
type: string
sample: "demo"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_module_kwargs, openstack_cloud_from_module
def _system_state_change(state, role):
if state == 'present' and not role:
return True
if state == 'absent' and role:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=True),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
name = module.params.get('name')
state = module.params.get('state')
shade, cloud = openstack_cloud_from_module(module)
try:
role = cloud.get_role(name)
if module.check_mode:
module.exit_json(changed=_system_state_change(state, role))
if state == 'present':
if role is None:
role = cloud.create_role(name)
changed = True
else:
changed = False
module.exit_json(changed=changed, role=role)
elif state == 'absent':
if role is None:
changed = False
else:
cloud.delete_role(name)
changed = True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
M0ses/ansible | lib/ansible/runner/shell_plugins/powershell.py | 11 | 5296 | # (c) 2014, Chris Church <chris@ninemoreminutes.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import base64
import os
import re
import random
import shlex
import time
_common_args = ['PowerShell', '-NoProfile', '-NonInteractive']
# Primarily for testing, allow explicitly specifying PowerShell version via
# an environment variable.
_powershell_version = os.environ.get('POWERSHELL_VERSION', None)
if _powershell_version:
_common_args = ['PowerShell', '-Version', _powershell_version] + _common_args[1:]
def _escape(value, include_vars=False):
'''Return value escaped for use in PowerShell command.'''
# http://www.techotopia.com/index.php/Windows_PowerShell_1.0_String_Quoting_and_Escape_Sequences
# http://stackoverflow.com/questions/764360/a-list-of-string-replacements-in-python
subs = [('\n', '`n'), ('\r', '`r'), ('\t', '`t'), ('\a', '`a'),
('\b', '`b'), ('\f', '`f'), ('\v', '`v'), ('"', '`"'),
('\'', '`\''), ('`', '``'), ('\x00', '`0')]
if include_vars:
subs.append(('$', '`$'))
pattern = '|'.join('(%s)' % re.escape(p) for p, s in subs)
substs = [s for p, s in subs]
replace = lambda m: substs[m.lastindex - 1]
return re.sub(pattern, replace, value)
def _encode_script(script, as_list=False):
'''Convert a PowerShell script to a single base64-encoded command.'''
script = '\n'.join([x.strip() for x in script.splitlines() if x.strip()])
encoded_script = base64.b64encode(script.encode('utf-16-le'))
cmd_parts = _common_args + ['-EncodedCommand', encoded_script]
if as_list:
return cmd_parts
return ' '.join(cmd_parts)
def _build_file_cmd(cmd_parts, quote_args=True):
'''Build command line to run a file, given list of file name plus args.'''
if quote_args:
cmd_parts = ['"%s"' % x for x in cmd_parts]
return ' '.join(['&'] + cmd_parts)
class ShellModule(object):
def env_prefix(self, **kwargs):
return ''
def join_path(self, *args):
return os.path.join(*args).replace('/', '\\')
def path_has_trailing_slash(self, path):
# Allow Windows paths to be specified using either slash.
return path.endswith('/') or path.endswith('\\')
def chmod(self, mode, path):
return ''
def remove(self, path, recurse=False):
path = _escape(path)
if recurse:
return _encode_script('''Remove-Item "%s" -Force -Recurse;''' % path)
else:
return _encode_script('''Remove-Item "%s" -Force;''' % path)
def mkdtemp(self, basefile, system=False, mode=None):
basefile = _escape(basefile)
# FIXME: Support system temp path!
return _encode_script('''(New-Item -Type Directory -Path $env:temp -Name "%s").FullName | Write-Host -Separator '';''' % basefile)
def expand_user(self, user_home_path):
# PowerShell only supports "~" (not "~username"). Resolve-Path ~ does
# not seem to work remotely, though by default we are always starting
# in the user's home directory.
if user_home_path == '~':
script = 'Write-Host (Get-Location).Path'
elif user_home_path.startswith('~\\'):
script = 'Write-Host ((Get-Location).Path + "%s")' % _escape(user_home_path[1:])
else:
script = 'Write-Host "%s"' % _escape(user_home_path)
return _encode_script(script)
def checksum(self, path, python_interp):
path = _escape(path)
script = '''
If (Test-Path -PathType Leaf "%(path)s")
{
$sp = new-object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider;
$fp = [System.IO.File]::Open("%(path)s", [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read);
[System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower();
$fp.Dispose();
}
ElseIf (Test-Path -PathType Container "%(path)s")
{
Write-Host "3";
}
Else
{
Write-Host "1";
}
''' % dict(path=path)
return _encode_script(script)
def build_module_command(self, env_string, shebang, cmd, rm_tmp=None):
cmd = cmd.encode('utf-8')
cmd_parts = shlex.split(cmd, posix=False)
if not cmd_parts[0].lower().endswith('.ps1'):
cmd_parts[0] = '%s.ps1' % cmd_parts[0]
script = _build_file_cmd(cmd_parts, quote_args=False)
if rm_tmp:
rm_tmp = _escape(rm_tmp)
script = '%s; Remove-Item "%s" -Force -Recurse;' % (script, rm_tmp)
return _encode_script(script)
| gpl-3.0 |
flyfei/python-for-android | python-modules/twisted/twisted/conch/test/test_userauth.py | 59 | 39075 | # -*- test-case-name: twisted.conch.test.test_userauth -*-
# Copyright (c) 2007-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the implementation of the ssh-userauth service.
Maintainer: Paul Swartz
"""
from zope.interface import implements
from twisted.cred.checkers import ICredentialsChecker
from twisted.cred.credentials import IUsernamePassword, ISSHPrivateKey
from twisted.cred.credentials import IPluggableAuthenticationModules
from twisted.cred.credentials import IAnonymous
from twisted.cred.error import UnauthorizedLogin
from twisted.cred.portal import IRealm, Portal
from twisted.conch.error import ConchError, ValidPublicKey
from twisted.internet import defer, task
from twisted.protocols import loopback
from twisted.trial import unittest
try:
import Crypto.Cipher.DES3, Crypto.Cipher.XOR
import pyasn1
except ImportError:
keys = None
class transport:
class SSHTransportBase:
"""
A stub class so that later class definitions won't die.
"""
class userauth:
class SSHUserAuthClient:
"""
A stub class so that leter class definitions won't die.
"""
else:
from twisted.conch.ssh.common import NS
from twisted.conch.checkers import SSHProtocolChecker
from twisted.conch.ssh import keys, userauth, transport
from twisted.conch.test import keydata
class ClientUserAuth(userauth.SSHUserAuthClient):
"""
A mock user auth client.
"""
def getPublicKey(self):
"""
If this is the first time we've been called, return a blob for
the DSA key. Otherwise, return a blob
for the RSA key.
"""
if self.lastPublicKey:
return keys.Key.fromString(keydata.publicRSA_openssh)
else:
return defer.succeed(keys.Key.fromString(keydata.publicDSA_openssh))
def getPrivateKey(self):
"""
Return the private key object for the RSA key.
"""
return defer.succeed(keys.Key.fromString(keydata.privateRSA_openssh))
def getPassword(self, prompt=None):
"""
Return 'foo' as the password.
"""
return defer.succeed('foo')
def getGenericAnswers(self, name, information, answers):
"""
Return 'foo' as the answer to two questions.
"""
return defer.succeed(('foo', 'foo'))
class OldClientAuth(userauth.SSHUserAuthClient):
"""
The old SSHUserAuthClient returned a PyCrypto key object from
getPrivateKey() and a string from getPublicKey
"""
def getPrivateKey(self):
return defer.succeed(keys.Key.fromString(
keydata.privateRSA_openssh).keyObject)
def getPublicKey(self):
return keys.Key.fromString(keydata.publicRSA_openssh).blob()
class ClientAuthWithoutPrivateKey(userauth.SSHUserAuthClient):
"""
This client doesn't have a private key, but it does have a public key.
"""
def getPrivateKey(self):
return
def getPublicKey(self):
return keys.Key.fromString(keydata.publicRSA_openssh)
class FakeTransport(transport.SSHTransportBase):
"""
L{userauth.SSHUserAuthServer} expects an SSH transport which has a factory
attribute which has a portal attribute. Because the portal is important for
testing authentication, we need to be able to provide an interesting portal
object to the L{SSHUserAuthServer}.
In addition, we want to be able to capture any packets sent over the
transport.
@ivar packets: a list of 2-tuples: (messageType, data). Each 2-tuple is
a sent packet.
@type packets: C{list}
@param lostConnecion: True if loseConnection has been called on us.
@type lostConnection: C{bool}
"""
class Service(object):
"""
A mock service, representing the other service offered by the server.
"""
name = 'nancy'
def serviceStarted(self):
pass
class Factory(object):
"""
A mock factory, representing the factory that spawned this user auth
service.
"""
def getService(self, transport, service):
"""
Return our fake service.
"""
if service == 'none':
return FakeTransport.Service
def __init__(self, portal):
self.factory = self.Factory()
self.factory.portal = portal
self.lostConnection = False
self.transport = self
self.packets = []
def sendPacket(self, messageType, message):
"""
Record the packet sent by the service.
"""
self.packets.append((messageType, message))
def isEncrypted(self, direction):
"""
Pretend that this transport encrypts traffic in both directions. The
SSHUserAuthServer disables password authentication if the transport
isn't encrypted.
"""
return True
def loseConnection(self):
self.lostConnection = True
class Realm(object):
"""
A mock realm for testing L{userauth.SSHUserAuthServer}.
This realm is not actually used in the course of testing, so it returns the
simplest thing that could possibly work.
"""
implements(IRealm)
def requestAvatar(self, avatarId, mind, *interfaces):
return defer.succeed((interfaces[0], None, lambda: None))
class PasswordChecker(object):
"""
A very simple username/password checker which authenticates anyone whose
password matches their username and rejects all others.
"""
credentialInterfaces = (IUsernamePassword,)
implements(ICredentialsChecker)
def requestAvatarId(self, creds):
if creds.username == creds.password:
return defer.succeed(creds.username)
return defer.fail(UnauthorizedLogin("Invalid username/password pair"))
class PrivateKeyChecker(object):
"""
A very simple public key checker which authenticates anyone whose
public/private keypair is the same keydata.public/privateRSA_openssh.
"""
credentialInterfaces = (ISSHPrivateKey,)
implements(ICredentialsChecker)
def requestAvatarId(self, creds):
if creds.blob == keys.Key.fromString(keydata.publicRSA_openssh).blob():
if creds.signature is not None:
obj = keys.Key.fromString(creds.blob)
if obj.verify(creds.signature, creds.sigData):
return creds.username
else:
raise ValidPublicKey()
raise UnauthorizedLogin()
class PAMChecker(object):
"""
A simple PAM checker which asks the user for a password, verifying them
if the password is the same as their username.
"""
credentialInterfaces = (IPluggableAuthenticationModules,)
implements(ICredentialsChecker)
def requestAvatarId(self, creds):
d = creds.pamConversion([('Name: ', 2), ("Password: ", 1)])
def check(values):
if values == [(creds.username, 0), (creds.username, 0)]:
return creds.username
raise UnauthorizedLogin()
return d.addCallback(check)
class AnonymousChecker(object):
"""
A simple checker which isn't supported by L{SSHUserAuthServer}.
"""
credentialInterfaces = (IAnonymous,)
implements(ICredentialsChecker)
class SSHUserAuthServerTestCase(unittest.TestCase):
"""
Tests for SSHUserAuthServer.
"""
if keys is None:
skip = "cannot run w/o PyCrypto"
def setUp(self):
self.realm = Realm()
self.portal = Portal(self.realm)
self.portal.registerChecker(PasswordChecker())
self.portal.registerChecker(PrivateKeyChecker())
self.portal.registerChecker(PAMChecker())
self.authServer = userauth.SSHUserAuthServer()
self.authServer.transport = FakeTransport(self.portal)
self.authServer.serviceStarted()
self.authServer.supportedAuthentications.sort() # give a consistent
# order
def tearDown(self):
self.authServer.serviceStopped()
self.authServer = None
def _checkFailed(self, ignored):
"""
Check that the authentication has failed.
"""
self.assertEquals(self.authServer.transport.packets[-1],
(userauth.MSG_USERAUTH_FAILURE,
NS('keyboard-interactive,password,publickey') + '\x00'))
def test_noneAuthentication(self):
"""
A client may request a list of authentication 'method name' values
that may continue by using the "none" authentication 'method name'.
See RFC 4252 Section 5.2.
"""
d = self.authServer.ssh_USERAUTH_REQUEST(NS('foo') + NS('service') +
NS('none'))
return d.addCallback(self._checkFailed)
def test_successfulPasswordAuthentication(self):
"""
When provided with correct password authentication information, the
server should respond by sending a MSG_USERAUTH_SUCCESS message with
no other data.
See RFC 4252, Section 5.1.
"""
packet = NS('foo') + NS('none') + NS('password') + chr(0) + NS('foo')
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
def check(ignored):
self.assertEqual(
self.authServer.transport.packets,
[(userauth.MSG_USERAUTH_SUCCESS, '')])
return d.addCallback(check)
def test_failedPasswordAuthentication(self):
"""
When provided with invalid authentication details, the server should
respond by sending a MSG_USERAUTH_FAILURE message which states whether
the authentication was partially successful, and provides other, open
options for authentication.
See RFC 4252, Section 5.1.
"""
# packet = username, next_service, authentication type, FALSE, password
packet = NS('foo') + NS('none') + NS('password') + chr(0) + NS('bar')
self.authServer.clock = task.Clock()
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
self.assertEquals(self.authServer.transport.packets, [])
self.authServer.clock.advance(2)
return d.addCallback(self._checkFailed)
def test_successfulPrivateKeyAuthentication(self):
"""
Test that private key authentication completes sucessfully,
"""
blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
obj = keys.Key.fromString(keydata.privateRSA_openssh)
packet = (NS('foo') + NS('none') + NS('publickey') + '\xff'
+ NS(obj.sshType()) + NS(blob))
self.authServer.transport.sessionID = 'test'
signature = obj.sign(NS('test') + chr(userauth.MSG_USERAUTH_REQUEST)
+ packet)
packet += NS(signature)
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
def check(ignored):
self.assertEquals(self.authServer.transport.packets,
[(userauth.MSG_USERAUTH_SUCCESS, '')])
return d.addCallback(check)
def test_requestRaisesConchError(self):
"""
ssh_USERAUTH_REQUEST should raise a ConchError if tryAuth returns
None. Added to catch a bug noticed by pyflakes.
"""
d = defer.Deferred()
def mockCbFinishedAuth(self, ignored):
self.fail('request should have raised ConochError')
def mockTryAuth(kind, user, data):
return None
def mockEbBadAuth(reason):
d.errback(reason.value)
self.patch(self.authServer, 'tryAuth', mockTryAuth)
self.patch(self.authServer, '_cbFinishedAuth', mockCbFinishedAuth)
self.patch(self.authServer, '_ebBadAuth', mockEbBadAuth)
packet = NS('user') + NS('none') + NS('public-key') + NS('data')
# If an error other than ConchError is raised, this will trigger an
# exception.
self.authServer.ssh_USERAUTH_REQUEST(packet)
return self.assertFailure(d, ConchError)
def test_verifyValidPrivateKey(self):
"""
Test that verifying a valid private key works.
"""
blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
packet = (NS('foo') + NS('none') + NS('publickey') + '\x00'
+ NS('ssh-rsa') + NS(blob))
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
def check(ignored):
self.assertEquals(self.authServer.transport.packets,
[(userauth.MSG_USERAUTH_PK_OK, NS('ssh-rsa') + NS(blob))])
return d.addCallback(check)
def test_failedPrivateKeyAuthenticationWithoutSignature(self):
"""
Test that private key authentication fails when the public key
is invalid.
"""
blob = keys.Key.fromString(keydata.publicDSA_openssh).blob()
packet = (NS('foo') + NS('none') + NS('publickey') + '\x00'
+ NS('ssh-dsa') + NS(blob))
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
return d.addCallback(self._checkFailed)
def test_failedPrivateKeyAuthenticationWithSignature(self):
"""
Test that private key authentication fails when the public key
is invalid.
"""
blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
obj = keys.Key.fromString(keydata.privateRSA_openssh)
packet = (NS('foo') + NS('none') + NS('publickey') + '\xff'
+ NS('ssh-rsa') + NS(blob) + NS(obj.sign(blob)))
self.authServer.transport.sessionID = 'test'
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
return d.addCallback(self._checkFailed)
def test_successfulPAMAuthentication(self):
"""
Test that keyboard-interactive authentication succeeds.
"""
packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+ NS('') + NS(''))
response = '\x00\x00\x00\x02' + NS('foo') + NS('foo')
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
self.authServer.ssh_USERAUTH_INFO_RESPONSE(response)
def check(ignored):
self.assertEquals(self.authServer.transport.packets,
[(userauth.MSG_USERAUTH_INFO_REQUEST, (NS('') + NS('')
+ NS('') + '\x00\x00\x00\x02' + NS('Name: ') + '\x01'
+ NS('Password: ') + '\x00')),
(userauth.MSG_USERAUTH_SUCCESS, '')])
return d.addCallback(check)
def test_failedPAMAuthentication(self):
"""
Test that keyboard-interactive authentication fails.
"""
packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+ NS('') + NS(''))
response = '\x00\x00\x00\x02' + NS('bar') + NS('bar')
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
self.authServer.ssh_USERAUTH_INFO_RESPONSE(response)
def check(ignored):
self.assertEquals(self.authServer.transport.packets[0],
(userauth.MSG_USERAUTH_INFO_REQUEST, (NS('') + NS('')
+ NS('') + '\x00\x00\x00\x02' + NS('Name: ') + '\x01'
+ NS('Password: ') + '\x00')))
return d.addCallback(check).addCallback(self._checkFailed)
def test_invalid_USERAUTH_INFO_RESPONSE_not_enough_data(self):
"""
If ssh_USERAUTH_INFO_RESPONSE gets an invalid packet,
the user authentication should fail.
"""
packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+ NS('') + NS(''))
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
self.authServer.ssh_USERAUTH_INFO_RESPONSE(NS('\x00\x00\x00\x00' +
NS('hi')))
return d.addCallback(self._checkFailed)
def test_invalid_USERAUTH_INFO_RESPONSE_too_much_data(self):
"""
If ssh_USERAUTH_INFO_RESPONSE gets too much data, the user
authentication should fail.
"""
packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+ NS('') + NS(''))
response = '\x00\x00\x00\x02' + NS('foo') + NS('foo') + NS('foo')
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
self.authServer.ssh_USERAUTH_INFO_RESPONSE(response)
return d.addCallback(self._checkFailed)
def test_onlyOnePAMAuthentication(self):
"""
Because it requires an intermediate message, one can't send a second
keyboard-interactive request while the first is still pending.
"""
packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+ NS('') + NS(''))
self.authServer.ssh_USERAUTH_REQUEST(packet)
self.authServer.ssh_USERAUTH_REQUEST(packet)
self.assertEquals(self.authServer.transport.packets[-1][0],
transport.MSG_DISCONNECT)
self.assertEquals(self.authServer.transport.packets[-1][1][3],
chr(transport.DISCONNECT_PROTOCOL_ERROR))
def test_ignoreUnknownCredInterfaces(self):
"""
L{SSHUserAuthServer} sets up
C{SSHUserAuthServer.supportedAuthentications} by checking the portal's
credentials interfaces and mapping them to SSH authentication method
strings. If the Portal advertises an interface that
L{SSHUserAuthServer} can't map, it should be ignored. This is a white
box test.
"""
server = userauth.SSHUserAuthServer()
server.transport = FakeTransport(self.portal)
self.portal.registerChecker(AnonymousChecker())
server.serviceStarted()
server.serviceStopped()
server.supportedAuthentications.sort() # give a consistent order
self.assertEquals(server.supportedAuthentications,
['keyboard-interactive', 'password', 'publickey'])
def test_removePasswordIfUnencrypted(self):
"""
Test that the userauth service does not advertise password
authentication if the password would be send in cleartext.
"""
self.assertIn('password', self.authServer.supportedAuthentications)
# no encryption
clearAuthServer = userauth.SSHUserAuthServer()
clearAuthServer.transport = FakeTransport(self.portal)
clearAuthServer.transport.isEncrypted = lambda x: False
clearAuthServer.serviceStarted()
clearAuthServer.serviceStopped()
self.failIfIn('password', clearAuthServer.supportedAuthentications)
# only encrypt incoming (the direction the password is sent)
halfAuthServer = userauth.SSHUserAuthServer()
halfAuthServer.transport = FakeTransport(self.portal)
halfAuthServer.transport.isEncrypted = lambda x: x == 'in'
halfAuthServer.serviceStarted()
halfAuthServer.serviceStopped()
self.assertIn('password', halfAuthServer.supportedAuthentications)
def test_removeKeyboardInteractiveIfUnencrypted(self):
"""
Test that the userauth service does not advertise keyboard-interactive
authentication if the password would be send in cleartext.
"""
self.assertIn('keyboard-interactive',
self.authServer.supportedAuthentications)
# no encryption
clearAuthServer = userauth.SSHUserAuthServer()
clearAuthServer.transport = FakeTransport(self.portal)
clearAuthServer.transport.isEncrypted = lambda x: False
clearAuthServer.serviceStarted()
clearAuthServer.serviceStopped()
self.failIfIn('keyboard-interactive',
clearAuthServer.supportedAuthentications)
# only encrypt incoming (the direction the password is sent)
halfAuthServer = userauth.SSHUserAuthServer()
halfAuthServer.transport = FakeTransport(self.portal)
halfAuthServer.transport.isEncrypted = lambda x: x == 'in'
halfAuthServer.serviceStarted()
halfAuthServer.serviceStopped()
self.assertIn('keyboard-interactive',
halfAuthServer.supportedAuthentications)
def test_unencryptedConnectionWithoutPasswords(self):
"""
If the L{SSHUserAuthServer} is not advertising passwords, then an
unencrypted connection should not cause any warnings or exceptions.
This is a white box test.
"""
# create a Portal without password authentication
portal = Portal(self.realm)
portal.registerChecker(PrivateKeyChecker())
# no encryption
clearAuthServer = userauth.SSHUserAuthServer()
clearAuthServer.transport = FakeTransport(portal)
clearAuthServer.transport.isEncrypted = lambda x: False
clearAuthServer.serviceStarted()
clearAuthServer.serviceStopped()
self.assertEquals(clearAuthServer.supportedAuthentications,
['publickey'])
# only encrypt incoming (the direction the password is sent)
halfAuthServer = userauth.SSHUserAuthServer()
halfAuthServer.transport = FakeTransport(portal)
halfAuthServer.transport.isEncrypted = lambda x: x == 'in'
halfAuthServer.serviceStarted()
halfAuthServer.serviceStopped()
self.assertEquals(clearAuthServer.supportedAuthentications,
['publickey'])
def test_loginTimeout(self):
"""
Test that the login times out.
"""
timeoutAuthServer = userauth.SSHUserAuthServer()
timeoutAuthServer.clock = task.Clock()
timeoutAuthServer.transport = FakeTransport(self.portal)
timeoutAuthServer.serviceStarted()
timeoutAuthServer.clock.advance(11 * 60 * 60)
timeoutAuthServer.serviceStopped()
self.assertEquals(timeoutAuthServer.transport.packets,
[(transport.MSG_DISCONNECT,
'\x00' * 3 +
chr(transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE) +
NS("you took too long") + NS(''))])
self.assertTrue(timeoutAuthServer.transport.lostConnection)
def test_cancelLoginTimeout(self):
"""
Test that stopping the service also stops the login timeout.
"""
timeoutAuthServer = userauth.SSHUserAuthServer()
timeoutAuthServer.clock = task.Clock()
timeoutAuthServer.transport = FakeTransport(self.portal)
timeoutAuthServer.serviceStarted()
timeoutAuthServer.serviceStopped()
timeoutAuthServer.clock.advance(11 * 60 * 60)
self.assertEquals(timeoutAuthServer.transport.packets, [])
self.assertFalse(timeoutAuthServer.transport.lostConnection)
def test_tooManyAttempts(self):
"""
Test that the server disconnects if the client fails authentication
too many times.
"""
packet = NS('foo') + NS('none') + NS('password') + chr(0) + NS('bar')
self.authServer.clock = task.Clock()
for i in range(21):
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
self.authServer.clock.advance(2)
def check(ignored):
self.assertEquals(self.authServer.transport.packets[-1],
(transport.MSG_DISCONNECT,
'\x00' * 3 +
chr(transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE) +
NS("too many bad auths") + NS('')))
return d.addCallback(check)
def test_failIfUnknownService(self):
"""
If the user requests a service that we don't support, the
authentication should fail.
"""
packet = NS('foo') + NS('') + NS('password') + chr(0) + NS('foo')
self.authServer.clock = task.Clock()
d = self.authServer.ssh_USERAUTH_REQUEST(packet)
return d.addCallback(self._checkFailed)
def test__pamConvErrors(self):
"""
_pamConv should fail if it gets a message that's not 1 or 2.
"""
def secondTest(ignored):
d2 = self.authServer._pamConv([('', 90)])
return self.assertFailure(d2, ConchError)
d = self.authServer._pamConv([('', 3)])
return self.assertFailure(d, ConchError).addCallback(secondTest)
def test_tryAuthEdgeCases(self):
"""
tryAuth() has two edge cases that are difficult to reach.
1) an authentication method auth_* returns None instead of a Deferred.
2) an authentication type that is defined does not have a matching
auth_* method.
Both these cases should return a Deferred which fails with a
ConchError.
"""
def mockAuth(packet):
return None
self.patch(self.authServer, 'auth_publickey', mockAuth) # first case
self.patch(self.authServer, 'auth_password', None) # second case
def secondTest(ignored):
d2 = self.authServer.tryAuth('password', None, None)
return self.assertFailure(d2, ConchError)
d1 = self.authServer.tryAuth('publickey', None, None)
return self.assertFailure(d1, ConchError).addCallback(secondTest)
class SSHUserAuthClientTestCase(unittest.TestCase):
"""
Tests for SSHUserAuthClient.
"""
if keys is None:
skip = "cannot run w/o PyCrypto"
def setUp(self):
self.authClient = ClientUserAuth('foo', FakeTransport.Service())
self.authClient.transport = FakeTransport(None)
self.authClient.transport.sessionID = 'test'
self.authClient.serviceStarted()
def tearDown(self):
self.authClient.serviceStopped()
self.authClient = None
def test_init(self):
"""
Test that client is initialized properly.
"""
self.assertEquals(self.authClient.user, 'foo')
self.assertEquals(self.authClient.instance.name, 'nancy')
self.assertEquals(self.authClient.transport.packets,
[(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('none'))])
def test_USERAUTH_SUCCESS(self):
"""
Test that the client succeeds properly.
"""
instance = [None]
def stubSetService(service):
instance[0] = service
self.authClient.transport.setService = stubSetService
self.authClient.ssh_USERAUTH_SUCCESS('')
self.assertEquals(instance[0], self.authClient.instance)
def test_publickey(self):
"""
Test that the client can authenticate with a public key.
"""
self.authClient.ssh_USERAUTH_FAILURE(NS('publickey') + '\x00')
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('publickey') + '\x00' + NS('ssh-dss')
+ NS(keys.Key.fromString(
keydata.publicDSA_openssh).blob())))
# that key isn't good
self.authClient.ssh_USERAUTH_FAILURE(NS('publickey') + '\x00')
blob = NS(keys.Key.fromString(keydata.publicRSA_openssh).blob())
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_REQUEST, (NS('foo') + NS('nancy')
+ NS('publickey') + '\x00'+ NS('ssh-rsa') + blob)))
self.authClient.ssh_USERAUTH_PK_OK(NS('ssh-rsa')
+ NS(keys.Key.fromString(keydata.publicRSA_openssh).blob()))
sigData = (NS(self.authClient.transport.sessionID)
+ chr(userauth.MSG_USERAUTH_REQUEST) + NS('foo')
+ NS('nancy') + NS('publickey') + '\xff' + NS('ssh-rsa')
+ blob)
obj = keys.Key.fromString(keydata.privateRSA_openssh)
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('publickey') + '\xff' + NS('ssh-rsa') + blob
+ NS(obj.sign(sigData))))
def test_publickey_without_privatekey(self):
"""
If the SSHUserAuthClient doesn't return anything from signData,
the client should start the authentication over again by requesting
'none' authentication.
"""
authClient = ClientAuthWithoutPrivateKey('foo',
FakeTransport.Service())
authClient.transport = FakeTransport(None)
authClient.transport.sessionID = 'test'
authClient.serviceStarted()
authClient.tryAuth('publickey')
authClient.transport.packets = []
self.assertIdentical(authClient.ssh_USERAUTH_PK_OK(''), None)
self.assertEquals(authClient.transport.packets, [
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy') +
NS('none'))])
def test_old_publickey_getPublicKey(self):
"""
Old SSHUserAuthClients returned strings of public key blobs from
getPublicKey(). Test that a Deprecation warning is raised but the key is
verified correctly.
"""
oldAuth = OldClientAuth('foo', FakeTransport.Service())
oldAuth.transport = FakeTransport(None)
oldAuth.transport.sessionID = 'test'
oldAuth.serviceStarted()
oldAuth.transport.packets = []
self.assertWarns(DeprecationWarning, "Returning a string from "
"SSHUserAuthClient.getPublicKey() is deprecated since "
"Twisted 9.0. Return a keys.Key() instead.",
userauth.__file__, oldAuth.tryAuth, 'publickey')
self.assertEquals(oldAuth.transport.packets, [
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy') +
NS('publickey') + '\x00' + NS('ssh-rsa') +
NS(keys.Key.fromString(keydata.publicRSA_openssh).blob()))])
def test_old_publickey_getPrivateKey(self):
"""
Old SSHUserAuthClients returned a PyCrypto key object from
getPrivateKey(). Test that _cbSignData signs the data warns the
user about the deprecation, but signs the data correctly.
"""
oldAuth = OldClientAuth('foo', FakeTransport.Service())
d = self.assertWarns(DeprecationWarning, "Returning a PyCrypto key "
"object from SSHUserAuthClient.getPrivateKey() is "
"deprecated since Twisted 9.0. "
"Return a keys.Key() instead.", userauth.__file__,
oldAuth.signData, None, 'data')
def _checkSignedData(sig):
self.assertEquals(sig,
keys.Key.fromString(keydata.privateRSA_openssh).sign(
'data'))
d.addCallback(_checkSignedData)
return d
def test_no_publickey(self):
"""
If there's no public key, auth_publickey should return a Deferred
called back with a False value.
"""
self.authClient.getPublicKey = lambda x: None
d = self.authClient.tryAuth('publickey')
def check(result):
self.assertFalse(result)
return d.addCallback(check)
def test_password(self):
"""
Test that the client can authentication with a password. This
includes changing the password.
"""
self.authClient.ssh_USERAUTH_FAILURE(NS('password') + '\x00')
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('password') + '\x00' + NS('foo')))
self.authClient.ssh_USERAUTH_PK_OK(NS('') + NS(''))
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('password') + '\xff' + NS('foo') * 2))
def test_no_password(self):
"""
If getPassword returns None, tryAuth should return False.
"""
self.authClient.getPassword = lambda: None
self.assertFalse(self.authClient.tryAuth('password'))
def test_keyboardInteractive(self):
"""
Test that the client can authenticate using keyboard-interactive
authentication.
"""
self.authClient.ssh_USERAUTH_FAILURE(NS('keyboard-interactive')
+ '\x00')
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('keyboard-interactive') + NS('')*2))
self.authClient.ssh_USERAUTH_PK_OK(NS('')*3 + '\x00\x00\x00\x02'
+ NS('Name: ') + '\xff' + NS('Password: ') + '\x00')
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_INFO_RESPONSE, '\x00\x00\x00\x02'
+ NS('foo')*2))
def test_USERAUTH_PK_OK_unknown_method(self):
"""
If C{SSHUserAuthClient} gets a MSG_USERAUTH_PK_OK packet when it's not
expecting it, it should fail the current authentication and move on to
the next type.
"""
self.authClient.lastAuth = 'unknown'
self.authClient.transport.packets = []
self.authClient.ssh_USERAUTH_PK_OK('')
self.assertEquals(self.authClient.transport.packets,
[(userauth.MSG_USERAUTH_REQUEST, NS('foo') +
NS('nancy') + NS('none'))])
def test_USERAUTH_FAILURE_sorting(self):
"""
ssh_USERAUTH_FAILURE should sort the methods by their position
in SSHUserAuthClient.preferredOrder. Methods that are not in
preferredOrder should be sorted at the end of that list.
"""
def auth_firstmethod():
self.authClient.transport.sendPacket(255, 'here is data')
def auth_anothermethod():
self.authClient.transport.sendPacket(254, 'other data')
return True
self.authClient.auth_firstmethod = auth_firstmethod
self.authClient.auth_anothermethod = auth_anothermethod
# although they shouldn't get called, method callbacks auth_* MUST
# exist in order for the test to work properly.
self.authClient.ssh_USERAUTH_FAILURE(NS('anothermethod,password') +
'\x00')
# should send password packet
self.assertEquals(self.authClient.transport.packets[-1],
(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('password') + '\x00' + NS('foo')))
self.authClient.ssh_USERAUTH_FAILURE(
NS('firstmethod,anothermethod,password') + '\xff')
self.assertEquals(self.authClient.transport.packets[-2:],
[(255, 'here is data'), (254, 'other data')])
def test_disconnectIfNoMoreAuthentication(self):
"""
If there are no more available user authentication messages,
the SSHUserAuthClient should disconnect with code
DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE.
"""
self.authClient.ssh_USERAUTH_FAILURE(NS('password') + '\x00')
self.authClient.ssh_USERAUTH_FAILURE(NS('password') + '\xff')
self.assertEquals(self.authClient.transport.packets[-1],
(transport.MSG_DISCONNECT, '\x00\x00\x00\x0e' +
NS('no more authentication methods available') +
'\x00\x00\x00\x00'))
def test_ebAuth(self):
"""
_ebAuth (the generic authentication error handler) should send
a request for the 'none' authentication method.
"""
self.authClient.transport.packets = []
self.authClient._ebAuth(None)
self.assertEquals(self.authClient.transport.packets,
[(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+ NS('none'))])
def test_defaults(self):
"""
getPublicKey() should return None. getPrivateKey() should return a
failed Deferred. getPassword() should return a failed Deferred.
getGenericAnswers() should return a failed Deferred.
"""
authClient = userauth.SSHUserAuthClient('foo', FakeTransport.Service())
self.assertIdentical(authClient.getPublicKey(), None)
def check(result):
result.trap(NotImplementedError)
d = authClient.getPassword()
return d.addCallback(self.fail).addErrback(check2)
def check2(result):
result.trap(NotImplementedError)
d = authClient.getGenericAnswers(None, None, None)
return d.addCallback(self.fail).addErrback(check3)
def check3(result):
result.trap(NotImplementedError)
d = authClient.getPrivateKey()
return d.addCallback(self.fail).addErrback(check)
class LoopbackTestCase(unittest.TestCase):
if keys is None:
skip = "cannot run w/o PyCrypto or PyASN1"
class Factory:
class Service:
name = 'TestService'
def serviceStarted(self):
self.transport.loseConnection()
def serviceStopped(self):
pass
def getService(self, avatar, name):
return self.Service
def test_loopback(self):
"""
Test that the userauth server and client play nicely with each other.
"""
server = userauth.SSHUserAuthServer()
client = ClientUserAuth('foo', self.Factory.Service())
# set up transports
server.transport = transport.SSHTransportBase()
server.transport.service = server
server.transport.isEncrypted = lambda x: True
client.transport = transport.SSHTransportBase()
client.transport.service = client
server.transport.sessionID = client.transport.sessionID = ''
# don't send key exchange packet
server.transport.sendKexInit = client.transport.sendKexInit = \
lambda: None
# set up server authentication
server.transport.factory = self.Factory()
server.passwordDelay = 0 # remove bad password delay
realm = Realm()
portal = Portal(realm)
checker = SSHProtocolChecker()
checker.registerChecker(PasswordChecker())
checker.registerChecker(PrivateKeyChecker())
checker.registerChecker(PAMChecker())
checker.areDone = lambda aId: (
len(checker.successfulCredentials[aId]) == 3)
portal.registerChecker(checker)
server.transport.factory.portal = portal
d = loopback.loopbackAsync(server.transport, client.transport)
server.transport.transport.logPrefix = lambda: '_ServerLoopback'
client.transport.transport.logPrefix = lambda: '_ClientLoopback'
server.serviceStarted()
client.serviceStarted()
def check(ignored):
self.assertEquals(server.transport.service.name, 'TestService')
return d.addCallback(check)
| apache-2.0 |
santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/SQLAlchemy-0.4.0-py2.5.egg/sqlalchemy/databases/sqlite.py | 1 | 15211 | # sqlite.py
# Copyright (C) 2005, 2006, 2007 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re
from sqlalchemy import schema, exceptions, pool, PassiveDefault
from sqlalchemy.engine import default
import sqlalchemy.types as sqltypes
import datetime,time, warnings
import sqlalchemy.util as util
from sqlalchemy.sql import compiler
SELECT_REGEXP = re.compile(r'\s*(?:SELECT|PRAGMA)', re.I | re.UNICODE)
class SLNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
type_ = self.asdecimal and str or float
def process(value):
if value is not None:
return type_(value)
else:
return value
return process
def get_col_spec(self):
if self.precision is None:
return "NUMERIC"
else:
return "NUMERIC(%(precision)s, %(length)s)" % {'precision': self.precision, 'length' : self.length}
class SLInteger(sqltypes.Integer):
def get_col_spec(self):
return "INTEGER"
class SLSmallInteger(sqltypes.Smallinteger):
def get_col_spec(self):
return "SMALLINT"
class DateTimeMixin(object):
__format__ = "%Y-%m-%d %H:%M:%S"
def bind_processor(self, dialect):
def process(value):
if isinstance(value, basestring):
# pass string values thru
return value
elif value is not None:
if getattr(value, 'microsecond', None) is not None:
return value.strftime(self.__format__ + "." + str(value.microsecond))
else:
return value.strftime(self.__format__)
else:
return None
return process
def _cvt(self, value, dialect):
if value is None:
return None
try:
(value, microsecond) = value.split('.')
microsecond = int(microsecond)
except ValueError:
microsecond = 0
return time.strptime(value, self.__format__)[0:6] + (microsecond,)
class SLDateTime(DateTimeMixin,sqltypes.DateTime):
__format__ = "%Y-%m-%d %H:%M:%S"
def get_col_spec(self):
return "TIMESTAMP"
def result_processor(self, dialect):
def process(value):
tup = self._cvt(value, dialect)
return tup and datetime.datetime(*tup)
return process
class SLDate(DateTimeMixin, sqltypes.Date):
__format__ = "%Y-%m-%d"
def get_col_spec(self):
return "DATE"
def result_processor(self, dialect):
def process(value):
tup = self._cvt(value, dialect)
return tup and datetime.date(*tup[0:3])
return process
class SLTime(DateTimeMixin, sqltypes.Time):
__format__ = "%H:%M:%S"
def get_col_spec(self):
return "TIME"
def result_processor(self, dialect):
def process(value):
tup = self._cvt(value, dialect)
return tup and datetime.time(*tup[3:7])
return process
class SLText(sqltypes.TEXT):
def get_col_spec(self):
return "TEXT"
class SLString(sqltypes.String):
def get_col_spec(self):
return "VARCHAR(%(length)s)" % {'length' : self.length}
class SLChar(sqltypes.CHAR):
def get_col_spec(self):
return "CHAR(%(length)s)" % {'length' : self.length}
class SLBinary(sqltypes.Binary):
def get_col_spec(self):
return "BLOB"
class SLBoolean(sqltypes.Boolean):
def get_col_spec(self):
return "BOOLEAN"
def bind_processor(self, dialect):
def process(value):
if value is None:
return None
return value and 1 or 0
return process
def result_processor(self, dialect):
def process(value):
if value is None:
return None
return value and True or False
return process
colspecs = {
sqltypes.Integer : SLInteger,
sqltypes.Smallinteger : SLSmallInteger,
sqltypes.Numeric : SLNumeric,
sqltypes.Float : SLNumeric,
sqltypes.DateTime : SLDateTime,
sqltypes.Date : SLDate,
sqltypes.Time : SLTime,
sqltypes.String : SLString,
sqltypes.Binary : SLBinary,
sqltypes.Boolean : SLBoolean,
sqltypes.TEXT : SLText,
sqltypes.CHAR: SLChar,
}
pragma_names = {
'INTEGER' : SLInteger,
'INT' : SLInteger,
'SMALLINT' : SLSmallInteger,
'VARCHAR' : SLString,
'CHAR' : SLChar,
'TEXT' : SLText,
'NUMERIC' : SLNumeric,
'FLOAT' : SLNumeric,
'TIMESTAMP' : SLDateTime,
'DATETIME' : SLDateTime,
'DATE' : SLDate,
'BLOB' : SLBinary,
'BOOL': SLBoolean,
'BOOLEAN': SLBoolean,
}
def descriptor():
return {'name':'sqlite',
'description':'SQLite',
'arguments':[
('database', "Database Filename",None)
]}
class SQLiteExecutionContext(default.DefaultExecutionContext):
def post_exec(self):
if self.compiled.isinsert and not self.executemany:
if not len(self._last_inserted_ids) or self._last_inserted_ids[0] is None:
self._last_inserted_ids = [self.cursor.lastrowid] + self._last_inserted_ids[1:]
def is_select(self):
return SELECT_REGEXP.match(self.statement)
class SQLiteDialect(default.DefaultDialect):
supports_alter = False
supports_unicode_statements = True
def __init__(self, **kwargs):
default.DefaultDialect.__init__(self, default_paramstyle='qmark', **kwargs)
def vers(num):
return tuple([int(x) for x in num.split('.')])
if self.dbapi is not None:
sqlite_ver = self.dbapi.version_info
if sqlite_ver < (2,1,'3'):
warnings.warn(RuntimeWarning("The installed version of pysqlite2 (%s) is out-dated, and will cause errors in some cases. Version 2.1.3 or greater is recommended." % '.'.join([str(subver) for subver in sqlite_ver])))
self.supports_cast = (self.dbapi is None or vers(self.dbapi.sqlite_version) >= vers("3.2.3"))
def dbapi(cls):
try:
from pysqlite2 import dbapi2 as sqlite
except ImportError, e:
try:
from sqlite3 import dbapi2 as sqlite #try the 2.5+ stdlib name.
except ImportError:
raise e
return sqlite
dbapi = classmethod(dbapi)
def server_version_info(self, connection):
return self.dbapi.sqlite_version_info
def create_connect_args(self, url):
filename = url.database or ':memory:'
opts = url.query.copy()
util.coerce_kw_type(opts, 'timeout', float)
util.coerce_kw_type(opts, 'isolation_level', str)
util.coerce_kw_type(opts, 'detect_types', int)
util.coerce_kw_type(opts, 'check_same_thread', bool)
util.coerce_kw_type(opts, 'cached_statements', int)
return ([filename], opts)
def type_descriptor(self, typeobj):
return sqltypes.adapt_type(typeobj, colspecs)
def create_execution_context(self, connection, **kwargs):
return SQLiteExecutionContext(self, connection, **kwargs)
def oid_column_name(self, column):
return "oid"
def table_names(self, connection, schema):
s = "SELECT name FROM sqlite_master WHERE type='table'"
return [row[0] for row in connection.execute(s)]
def has_table(self, connection, table_name, schema=None):
cursor = connection.execute("PRAGMA table_info(%s)" %
self.identifier_preparer.quote_identifier(table_name), {})
row = cursor.fetchone()
# consume remaining rows, to work around: http://www.sqlite.org/cvstrac/tktview?tn=1884
while cursor.fetchone() is not None:pass
return (row is not None)
def reflecttable(self, connection, table, include_columns):
c = connection.execute("PRAGMA table_info(%s)" % self.identifier_preparer.format_table(table), {})
found_table = False
while True:
row = c.fetchone()
if row is None:
break
found_table = True
(name, type_, nullable, has_default, primary_key) = (row[1], row[2].upper(), not row[3], row[4] is not None, row[5])
name = re.sub(r'^\"|\"$', '', name)
if include_columns and name not in include_columns:
continue
match = re.match(r'(\w+)(\(.*?\))?', type_)
if match:
coltype = match.group(1)
args = match.group(2)
else:
coltype = "VARCHAR"
args = ''
try:
coltype = pragma_names[coltype]
except KeyError:
warnings.warn(RuntimeWarning("Did not recognize type '%s' of column '%s'" % (coltype, name)))
coltype = sqltypes.NULLTYPE
if args is not None:
args = re.findall(r'(\d+)', args)
coltype = coltype(*[int(a) for a in args])
colargs= []
if has_default:
colargs.append(PassiveDefault('?'))
table.append_column(schema.Column(name, coltype, primary_key = primary_key, nullable = nullable, *colargs))
if not found_table:
raise exceptions.NoSuchTableError(table.name)
c = connection.execute("PRAGMA foreign_key_list(%s)" % self.identifier_preparer.format_table(table), {})
fks = {}
while True:
row = c.fetchone()
if row is None:
break
(constraint_name, tablename, localcol, remotecol) = (row[0], row[2], row[3], row[4])
tablename = re.sub(r'^\"|\"$', '', tablename)
localcol = re.sub(r'^\"|\"$', '', localcol)
remotecol = re.sub(r'^\"|\"$', '', remotecol)
try:
fk = fks[constraint_name]
except KeyError:
fk = ([],[])
fks[constraint_name] = fk
#print "row! " + repr([key for key in row.keys()]), repr(row)
# look up the table based on the given table's engine, not 'self',
# since it could be a ProxyEngine
remotetable = schema.Table(tablename, table.metadata, autoload=True, autoload_with=connection)
constrained_column = table.c[localcol].name
refspec = ".".join([tablename, remotecol])
if constrained_column not in fk[0]:
fk[0].append(constrained_column)
if refspec not in fk[1]:
fk[1].append(refspec)
for name, value in fks.iteritems():
table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1]))
# check for UNIQUE indexes
c = connection.execute("PRAGMA index_list(%s)" % self.identifier_preparer.format_table(table), {})
unique_indexes = []
while True:
row = c.fetchone()
if row is None:
break
if (row[2] == 1):
unique_indexes.append(row[1])
# loop thru unique indexes for one that includes the primary key
for idx in unique_indexes:
c = connection.execute("PRAGMA index_info(" + idx + ")", {})
cols = []
while True:
row = c.fetchone()
if row is None:
break
cols.append(row[2])
class SQLiteCompiler(compiler.DefaultCompiler):
def visit_cast(self, cast, **kwargs):
if self.dialect.supports_cast:
return super(SQLiteCompiler, self).visit_cast(cast)
else:
if self.stack and self.stack[-1].get('select'):
# not sure if we want to set the typemap here...
self.typemap.setdefault("CAST", cast.type)
return self.process(cast.clause)
def limit_clause(self, select):
text = ""
if select._limit is not None:
text += " \n LIMIT " + str(select._limit)
if select._offset is not None:
if select._limit is None:
text += " \n LIMIT -1"
text += " OFFSET " + str(select._offset)
else:
text += " OFFSET 0"
return text
def for_update_clause(self, select):
# sqlite has no "FOR UPDATE" AFAICT
return ''
class SQLiteSchemaGenerator(compiler.SchemaGenerator):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + column.type.dialect_impl(self.dialect).get_col_spec()
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
if not column.nullable:
colspec += " NOT NULL"
return colspec
# this doesnt seem to be needed, although i suspect older versions of sqlite might still
# not directly support composite primary keys
#def visit_primary_key_constraint(self, constraint):
# if len(constraint) > 1:
# self.append(", \n")
# # put all PRIMARY KEYS in a UNIQUE index
# self.append("\tUNIQUE (%s)" % string.join([c.name for c in constraint],', '))
# else:
# super(SQLiteSchemaGenerator, self).visit_primary_key_constraint(constraint)
class SQLiteSchemaDropper(compiler.SchemaDropper):
pass
class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = util.Set([
'add', 'after', 'all', 'alter', 'analyze', 'and', 'as', 'asc',
'attach', 'autoincrement', 'before', 'begin', 'between', 'by',
'cascade', 'case', 'cast', 'check', 'collate', 'column', 'commit',
'conflict', 'constraint', 'create', 'cross', 'current_date',
'current_time', 'current_timestamp', 'database', 'default',
'deferrable', 'deferred', 'delete', 'desc', 'detach', 'distinct',
'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive',
'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob',
'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index',
'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is',
'isnull', 'join', 'key', 'left', 'like', 'limit', 'match', 'natural',
'not', 'notnull', 'null', 'of', 'offset', 'on', 'or', 'order', 'outer',
'plan', 'pragma', 'primary', 'query', 'raise', 'references',
'reindex', 'rename', 'replace', 'restrict', 'right', 'rollback',
'row', 'select', 'set', 'table', 'temp', 'temporary', 'then', 'to',
'transaction', 'trigger', 'true', 'union', 'unique', 'update', 'using',
'vacuum', 'values', 'view', 'virtual', 'when', 'where',
])
def __init__(self, dialect):
super(SQLiteIdentifierPreparer, self).__init__(dialect, omit_schema=True)
dialect = SQLiteDialect
dialect.poolclass = pool.SingletonThreadPool
dialect.statement_compiler = SQLiteCompiler
dialect.schemagenerator = SQLiteSchemaGenerator
dialect.schemadropper = SQLiteSchemaDropper
dialect.preparer = SQLiteIdentifierPreparer
| bsd-3-clause |
sunnyzwh/readthedocs.org | readthedocs/redirects/utils.py | 39 | 1316 | from django.conf import settings
from django.core.urlresolvers import reverse
def redirect_filename(project, filename=None):
"""
Return a url for a page. Always use http for now,
to avoid content warnings.
"""
protocol = "http"
# Handle explicit http redirects
if filename.startswith(protocol):
return filename
version = project.get_default_version()
lang = project.language
use_subdomain = getattr(settings, 'USE_SUBDOMAIN', False)
if use_subdomain:
if project.single_version:
return "%s://%s/%s" % (
protocol,
project.subdomain,
filename,
)
else:
return "%s://%s/%s/%s/%s" % (
protocol,
project.subdomain,
lang,
version,
filename,
)
else:
if project.single_version:
return reverse('docs_detail', kwargs={
'project_slug': project.slug,
'filename': filename,
})
else:
return reverse('docs_detail', kwargs={
'project_slug': project.slug,
'lang_slug': lang,
'version_slug': version,
'filename': filename,
})
| mit |
kevroy314/msl-iposition-pipeline | cogrecon/core/data_flexing/category_data_flexor.py | 1 | 5232 | import easygui
import os
if __name__ == '__main__':
# noinspection PyUnresolvedReferences
from cogrecon.core.file_io import match_file_prefixes, find_data_files_in_directory, \
extract_prefixes_from_file_list_via_suffix, get_coordinates_from_file
from cogrecon.core.cogrecon_globals import data_coordinates_file_suffix, category_file_suffix, \
actual_coordinates_file_suffix
else:
# noinspection PyUnresolvedReferences
from ..file_io import match_file_prefixes, find_data_files_in_directory, \
extract_prefixes_from_file_list_via_suffix, get_coordinates_from_file
from ..cogrecon_globals import data_coordinates_file_suffix, category_file_suffix, \
actual_coordinates_file_suffix
def process_category_files(selected_directory=None, output_path='..\\..\\..\\saved_data\\category_reprocessed\\'):
"""
This function performs a very specific task as requested by a researcher. It first prompts for the selection
of a particular directory. It searches that directory and sub directories for files with a particular suffix assumed
to be in the custom category format. It also finds the associated data coordinates file and splits the files into
###_category_position_data_coordinates.txt, ###_nocategory_position_data_coordinates.txt,
###_category_categories.txt, ###_nocategory_categories.txt. It also takes the root actual_coordinates.txt file and
generates ###_nocategory_actual_corodinates.txt, ###_category_actual_coordinates.txt files for each participant.
The result is written to a specified output path (created if it does not already exist).
:param selected_directory: The string path to a directory to scan for files ending in study_iposition_data.txt.
If left empty, a popup dialog will be presented to select a directory.
:param output_path: The directory into which the output files should be saved.
"""
if selected_directory is None:
selected_directory = easygui.diropenbox()
actual_coordinates_files, data_files, category_files, order_files = \
find_data_files_in_directory(selected_directory, _category_file_suffix='study_iposition_data.txt')
if not os.path.exists(output_path):
os.makedirs(output_path)
actual_coordinates_data = get_coordinates_from_file(actual_coordinates_files[0], (40, 6, 2))
for df, cf in zip(data_files, category_files):
prefix = extract_prefixes_from_file_list_via_suffix([df], suffix=data_coordinates_file_suffix)[0]
print('Parsing {0}'.format(prefix))
category_data = []
categorization_data = []
# Get the categorization split information
with open(cf, 'rU') as fp:
contents = fp.readlines()
for line in contents:
split_line = line.split('\t')
categorization_data.append(int(split_line[1]))
category_data.append([int(split_line[i]) for i in range(2, 8)])
data_coordinates_data = get_coordinates_from_file(df, (40, 6, 2))
# Write data files
with open(os.path.join(output_path, prefix+'category_'+data_coordinates_file_suffix), 'w') as fp:
for trial, cat in zip(data_coordinates_data, categorization_data):
if cat == 2:
fp.write('\t'.join([str(item) for sublist in trial for item in sublist]) + '\n')
with open(os.path.join(output_path, prefix + 'nocategory_' + data_coordinates_file_suffix), 'w') as fp:
for trial, cat in zip(data_coordinates_data, categorization_data):
if cat == 1:
fp.write('\t'.join([str(item) for sublist in trial for item in sublist]) + '\n')
# Write category files
with open(os.path.join(output_path, prefix + 'category_' + category_file_suffix), 'w') as fp:
for trial, cat in zip(category_data, categorization_data):
if cat == 2:
fp.write('\t'.join([str(item) for item in trial]) + '\n')
with open(os.path.join(output_path, prefix + 'nocategory_' + category_file_suffix), 'w') as fp:
for trial, cat in zip(category_data, categorization_data):
if cat == 1:
fp.write('\t'.join([str(item) for item in trial]) + '\n')
# Write actual coordinate files
with open(os.path.join(output_path,
prefix + 'category_' + actual_coordinates_file_suffix), 'w') as fp:
for trial, cat in zip(actual_coordinates_data, categorization_data):
if cat == 2:
fp.write('\t'.join([str(item) for sublist in trial for item in sublist]) + '\n')
with open(os.path.join(output_path,
prefix + 'nocategory_' + actual_coordinates_file_suffix), 'w') as fp:
for trial, cat in zip(actual_coordinates_data, categorization_data):
if cat == 1:
fp.write('\t'.join([str(item) for sublist in trial for item in sublist]) + '\n')
print("Done!")
if __name__ == '__main__':
process_category_files(selected_directory=r'Z:\Kevin\iPosition\Hillary\Category_Squig_iPos')
| gpl-3.0 |
hastexo/edx-platform | common/test/acceptance/tests/lms/test_bookmarks.py | 5 | 23481 | # -*- coding: utf-8 -*-
"""
End-to-end tests for the courseware unit bookmarks.
"""
import json
from unittest import skip
import requests
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common import BASE_URL
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.common.logout import LogoutPage
from common.test.acceptance.pages.lms.bookmarks import BookmarksPage
from common.test.acceptance.pages.lms.course_home import CourseHomePage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.studio.overview import CourseOutlinePage as StudioCourseOutlinePage
from common.test.acceptance.tests.helpers import EventsTestMixin, UniqueCourseTest, is_404_page
class BookmarksTestMixin(EventsTestMixin, UniqueCourseTest):
"""
Mixin with helper methods for testing Bookmarks.
"""
USERNAME = "STUDENT"
EMAIL = "student@example.com"
def setUp(self):
super(BookmarksTestMixin, self).setUp()
self.studio_course_outline_page = StudioCourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.courseware_page = CoursewarePage(self.browser, self.course_id)
self.course_home_page = CourseHomePage(self.browser, self.course_id)
self.bookmarks_page = BookmarksPage(self.browser, self.course_id)
# Get session to be used for bookmarking units
self.session = requests.Session()
params = {'username': self.USERNAME, 'email': self.EMAIL, 'course_id': self.course_id}
response = self.session.get(BASE_URL + "/auto_auth", params=params)
self.assertTrue(response.ok, "Failed to get session")
def setup_test(self, num_chapters=2):
"""
Setup test settings.
Arguments:
num_chapters: number of chapters to create in course
"""
self.create_course_fixture(num_chapters)
# Auto-auth register for the course.
AutoAuthPage(self.browser, username=self.USERNAME, email=self.EMAIL, course_id=self.course_id).visit()
self.courseware_page.visit()
def create_course_fixture(self, num_chapters):
"""
Create course fixture
Arguments:
num_chapters: number of chapters to create
"""
self.course_fixture = CourseFixture( # pylint: disable=attribute-defined-outside-init
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
xblocks = []
for index in range(num_chapters):
xblocks += [
XBlockFixtureDesc('chapter', 'TestSection{}'.format(index)).add_children(
XBlockFixtureDesc('sequential', 'TestSubsection{}'.format(index)).add_children(
XBlockFixtureDesc('vertical', 'TestVertical{}'.format(index))
)
)
]
self.course_fixture.add_children(*xblocks).install()
def verify_event_data(self, event_type, event_data):
"""
Verify emitted event data.
Arguments:
event_type: expected event type
event_data: expected event data
"""
actual_events = self.wait_for_events(event_filter={'event_type': event_type}, number_of_matches=1)
self.assert_events_match(event_data, actual_events)
def _bookmark_unit(self, location):
"""
Bookmark a unit
Arguments:
location (str): unit location
"""
_headers = {
'Content-type': 'application/json',
'X-CSRFToken': self.session.cookies['csrftoken'],
}
params = {'course_id': self.course_id}
data = json.dumps({'usage_id': location})
response = self.session.post(
BASE_URL + '/api/bookmarks/v1/bookmarks/',
data=data,
params=params,
headers=_headers
)
self.assertTrue(response.ok, "Failed to bookmark unit")
def bookmark_units(self, num_units):
"""
Bookmark first `num_units` units
Arguments:
num_units(int): Number of units to bookmarks
"""
xblocks = self.course_fixture.get_nested_xblocks(category="vertical")
for index in range(num_units):
self._bookmark_unit(xblocks[index].locator)
@attr(shard=8)
class BookmarksTest(BookmarksTestMixin):
"""
Tests to verify bookmarks functionality.
"""
def _breadcrumb(self, num_units, modified_name=None):
"""
Creates breadcrumbs for the first `num_units`
Arguments:
num_units(int): Number of units for which we want to create breadcrumbs
Returns:
list of breadcrumbs
"""
breadcrumbs = []
for index in range(num_units):
breadcrumbs.append(
[
'TestSection{}'.format(index),
'TestSubsection{}'.format(index),
modified_name if modified_name else 'TestVertical{}'.format(index)
]
)
return breadcrumbs
def _delete_section(self, index):
""" Delete a section at index `index` """
# Logout and login as staff
LogoutPage(self.browser).visit()
AutoAuthPage(
self.browser, username=self.USERNAME, email=self.EMAIL, course_id=self.course_id, staff=True
).visit()
# Visit course outline page in studio.
self.studio_course_outline_page.visit()
self.studio_course_outline_page.wait_for_page()
self.studio_course_outline_page.section_at(index).delete()
# Logout and login as a student.
LogoutPage(self.browser).visit()
AutoAuthPage(self.browser, username=self.USERNAME, email=self.EMAIL, course_id=self.course_id).visit()
# Visit courseware as a student.
self.courseware_page.visit()
self.courseware_page.wait_for_page()
def _toggle_bookmark_and_verify(self, bookmark_icon_state, bookmark_button_state, bookmarked_count):
"""
Bookmark/Un-Bookmark a unit and then verify
"""
self.assertTrue(self.courseware_page.bookmark_button_visible)
self.courseware_page.click_bookmark_unit_button()
self.assertEqual(self.courseware_page.bookmark_icon_visible, bookmark_icon_state)
self.assertEqual(self.courseware_page.bookmark_button_state, bookmark_button_state)
self.bookmarks_page.visit()
self.assertEqual(self.bookmarks_page.count(), bookmarked_count)
def _verify_pagination_info(
self,
bookmark_count_on_current_page,
header_text,
previous_button_enabled,
next_button_enabled,
current_page_number,
total_pages
):
"""
Verify pagination info
"""
self.assertEqual(self.bookmarks_page.count(), bookmark_count_on_current_page)
self.assertEqual(self.bookmarks_page.get_pagination_header_text(), header_text)
self.assertEqual(self.bookmarks_page.is_previous_page_button_enabled(), previous_button_enabled)
self.assertEqual(self.bookmarks_page.is_next_page_button_enabled(), next_button_enabled)
self.assertEqual(self.bookmarks_page.get_current_page_number(), current_page_number)
self.assertEqual(self.bookmarks_page.get_total_pages, total_pages)
def _verify_breadcrumbs(self, num_units, modified_name=None):
"""
Verifies the breadcrumb trail.
"""
bookmarked_breadcrumbs = self.bookmarks_page.breadcrumbs()
# Verify bookmarked breadcrumbs.
breadcrumbs = self._breadcrumb(num_units=num_units, modified_name=modified_name)
breadcrumbs.reverse()
self.assertEqual(bookmarked_breadcrumbs, breadcrumbs)
def update_and_publish_block_display_name(self, modified_name):
"""
Update and publish the block/unit display name.
"""
self.studio_course_outline_page.visit()
self.studio_course_outline_page.wait_for_page()
self.studio_course_outline_page.expand_all_subsections()
section = self.studio_course_outline_page.section_at(0)
container_page = section.subsection_at(0).unit_at(0).go_to()
self.course_fixture._update_xblock(container_page.locator, { # pylint: disable=protected-access
"metadata": {
"display_name": modified_name
}
})
container_page.visit()
container_page.wait_for_page()
self.assertEqual(container_page.name, modified_name)
container_page.publish_action.click()
def test_bookmark_button(self):
"""
Scenario: Bookmark unit button toggles correctly
Given that I am a registered user
And I visit my courseware page
For first 2 units
I visit the unit
And I can see the Bookmark button
When I click on Bookmark button
Then unit should be bookmarked
Then I click again on the bookmark button
And I should see a unit un-bookmarked
"""
self.setup_test()
for index in range(2):
self.course_home_page.visit()
self.course_home_page.outline.go_to_section('TestSection{}'.format(index), 'TestSubsection{}'.format(index))
self._toggle_bookmark_and_verify(True, 'bookmarked', 1)
self.course_home_page.visit()
self.course_home_page.outline.go_to_section('TestSection{}'.format(index), 'TestSubsection{}'.format(index))
self._toggle_bookmark_and_verify(False, '', 0)
# TODO: TNL-6546: Remove this test
def test_courseware_bookmarks_button(self):
"""
Scenario: (Temporarily) test that the courseware's "Bookmarks" button works.
"""
self.setup_test()
self.bookmark_units(2)
self.courseware_page.visit()
self.courseware_page.click_bookmarks_button()
self.assertTrue(self.bookmarks_page.is_browser_on_page())
def test_empty_bookmarks_list(self):
"""
Scenario: An empty bookmarks list is shown if there are no bookmarked units.
Given that I am a registered user
And I visit my bookmarks page
Then I should see an empty bookmarks list
And empty bookmarks list content is correct
"""
self.setup_test()
self.bookmarks_page.visit()
empty_list_text = (
'Use bookmarks to help you easily return to courseware pages. '
'To bookmark a page, click "Bookmark this page" under the page title.')
self.assertEqual(self.bookmarks_page.empty_list_text(), empty_list_text)
def test_bookmarks_list(self):
"""
Scenario: A bookmarks list is shown if there are bookmarked units.
Given that I am a registered user
And I have bookmarked 2 units
And I visit my bookmarks page
Then I should see a bookmarked list with 2 bookmark links
And breadcrumb trail is correct for a bookmark
When I click on bookmarked link
Then I can navigate to correct bookmarked unit
"""
self.setup_test()
self.bookmark_units(2)
self.bookmarks_page.visit()
self._verify_breadcrumbs(num_units=2)
self._verify_pagination_info(
bookmark_count_on_current_page=2,
header_text='Showing 1-2 out of 2 total',
previous_button_enabled=False,
next_button_enabled=False,
current_page_number=1,
total_pages=1
)
# get usage ids for units
xblocks = self.course_fixture.get_nested_xblocks(category="vertical")
xblock_usage_ids = [xblock.locator for xblock in xblocks]
# Verify link navigation
for index in range(2):
self.bookmarks_page.visit()
self.bookmarks_page.click_bookmarked_block(index)
self.courseware_page.wait_for_page()
self.assertIn(self.courseware_page.active_usage_id(), xblock_usage_ids)
def test_bookmark_shows_updated_breadcrumb_after_publish(self):
"""
Scenario: A bookmark breadcrumb trail is updated after publishing the changed display name.
Given that I am a registered user
And I visit my courseware page
And I can see bookmarked unit
Then I visit unit page in studio
Then I change unit display_name
And I publish the changes
Then I visit my bookmarks page
When I see the bookmark
Then I can see the breadcrumb trail has the updated display_name.
"""
self.setup_test(num_chapters=1)
self.bookmark_units(num_units=1)
self.bookmarks_page.visit()
self._verify_breadcrumbs(num_units=1)
LogoutPage(self.browser).visit()
AutoAuthPage(
self.browser,
username=self.USERNAME,
email=self.EMAIL,
course_id=self.course_id,
staff=True
).visit()
modified_name = "Updated name"
self.update_and_publish_block_display_name(modified_name)
LogoutPage(self.browser).visit()
AutoAuthPage(self.browser, username=self.USERNAME, email=self.EMAIL, course_id=self.course_id).visit()
self.bookmarks_page.visit()
self._verify_breadcrumbs(num_units=1, modified_name=modified_name)
@skip("andya: 10/19/17: potentially flaky test")
def test_unreachable_bookmark(self):
"""
Scenario: We should get a HTTP 404 for an unreachable bookmark.
Given that I am a registered user
And I have bookmarked 2 units
And I delete a bookmarked unit
And I visit my bookmarks page
Then I should see a bookmarked list
When I click on the deleted bookmark
Then I should navigated to 404 page
"""
self.setup_test(num_chapters=1)
self.bookmark_units(1)
self._delete_section(0)
self.bookmarks_page.visit()
self._verify_pagination_info(
bookmark_count_on_current_page=1,
header_text='Showing 1 out of 1 total',
previous_button_enabled=False,
next_button_enabled=False,
current_page_number=1,
total_pages=1
)
self.bookmarks_page.click_bookmarked_block(0)
self.assertTrue(is_404_page(self.browser))
def test_page_size_limit(self):
"""
Scenario: We can't get bookmarks more than default page size.
Given that I am a registered user
And I have bookmarked all the 11 units available
And I visit my bookmarks page
Then I should see a bookmarked list
And the bookmark list should contain 10 bookmarked items
"""
self.setup_test(11)
self.bookmark_units(11)
self.bookmarks_page.visit()
self._verify_pagination_info(
bookmark_count_on_current_page=10,
header_text='Showing 1-10 out of 11 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
def test_pagination_with_single_page(self):
"""
Scenario: Bookmarks list pagination is working as expected for single page
Given that I am a registered user
And I have bookmarked all the 2 units available
And I visit my bookmarks page
Then I should see a bookmarked list with 2 bookmarked items
And I should see paging header and footer with correct data
And previous and next buttons are disabled
"""
self.setup_test(num_chapters=2)
self.bookmark_units(num_units=2)
self.bookmarks_page.visit()
self.assertTrue(self.bookmarks_page.results_present())
self._verify_pagination_info(
bookmark_count_on_current_page=2,
header_text='Showing 1-2 out of 2 total',
previous_button_enabled=False,
next_button_enabled=False,
current_page_number=1,
total_pages=1
)
def test_next_page_button(self):
"""
Scenario: Next button is working as expected for bookmarks list pagination
Given that I am a registered user
And I have bookmarked all the 12 units available
And I visit my bookmarks page
Then I should see a bookmarked list of 10 items
And I should see paging header and footer with correct info
Then I click on next page button in footer
And I should be navigated to second page
And I should see a bookmarked list with 2 items
And I should see paging header and footer with correct info
"""
self.setup_test(num_chapters=12)
self.bookmark_units(num_units=12)
self.bookmarks_page.visit()
self.assertTrue(self.bookmarks_page.results_present())
self._verify_pagination_info(
bookmark_count_on_current_page=10,
header_text='Showing 1-10 out of 12 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
self.bookmarks_page.press_next_page_button()
self._verify_pagination_info(
bookmark_count_on_current_page=2,
header_text='Showing 11-12 out of 12 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
def test_previous_page_button(self):
"""
Scenario: Previous button is working as expected for bookmarks list pagination
Given that I am a registered user
And I have bookmarked all the 12 units available
And I visit my bookmarks page
Then I click on next page button in footer
And I should be navigated to second page
And I should see a bookmarked list with 2 items
And I should see paging header and footer with correct info
Then I click on previous page button
And I should be navigated to first page
And I should see paging header and footer with correct info
"""
self.setup_test(num_chapters=12)
self.bookmark_units(num_units=12)
self.bookmarks_page.visit()
self.assertTrue(self.bookmarks_page.results_present())
self.bookmarks_page.press_next_page_button()
self._verify_pagination_info(
bookmark_count_on_current_page=2,
header_text='Showing 11-12 out of 12 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
self.bookmarks_page.press_previous_page_button()
self._verify_pagination_info(
bookmark_count_on_current_page=10,
header_text='Showing 1-10 out of 12 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
def test_pagination_with_valid_page_number(self):
"""
Scenario: Bookmarks list pagination works as expected for valid page number
Given that I am a registered user
And I have bookmarked all the 12 units available
And I visit my bookmarks page
Then I should see a bookmarked list
And I should see total page value is 2
Then I enter 2 in the page number input
And I should be navigated to page 2
"""
self.setup_test(num_chapters=11)
self.bookmark_units(num_units=11)
self.bookmarks_page.visit()
self.assertTrue(self.bookmarks_page.results_present())
self.assertEqual(self.bookmarks_page.get_total_pages, 2)
self.bookmarks_page.go_to_page(2)
self._verify_pagination_info(
bookmark_count_on_current_page=1,
header_text='Showing 11-11 out of 11 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
def test_pagination_with_invalid_page_number(self):
"""
Scenario: Bookmarks list pagination works as expected for invalid page number
Given that I am a registered user
And I have bookmarked all the 11 units available
And I visit my bookmarks page
Then I should see a bookmarked list
And I should see total page value is 2
Then I enter 3 in the page number input
And I should stay at page 1
"""
self.setup_test(num_chapters=11)
self.bookmark_units(num_units=11)
self.bookmarks_page.visit()
self.assertTrue(self.bookmarks_page.results_present())
self.assertEqual(self.bookmarks_page.get_total_pages, 2)
self.bookmarks_page.go_to_page(3)
self._verify_pagination_info(
bookmark_count_on_current_page=10,
header_text='Showing 1-10 out of 11 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
def test_bookmarked_unit_accessed_event(self):
"""
Scenario: Bookmark events are emitted with correct data when we access/visit a bookmarked unit.
Given that I am a registered user
And I visit my courseware page
And I have bookmarked a unit
When I click on bookmarked unit
Then `edx.course.bookmark.accessed` event is emitted
"""
self.setup_test(num_chapters=1)
self.reset_event_tracking()
# create expected event data
xblocks = self.course_fixture.get_nested_xblocks(category="vertical")
event_data = [
{
'event': {
'bookmark_id': '{},{}'.format(self.USERNAME, xblocks[0].locator),
'component_type': xblocks[0].category,
'component_usage_id': xblocks[0].locator,
}
}
]
self.bookmark_units(num_units=1)
self.bookmarks_page.visit()
self._verify_pagination_info(
bookmark_count_on_current_page=1,
header_text='Showing 1 out of 1 total',
previous_button_enabled=False,
next_button_enabled=False,
current_page_number=1,
total_pages=1
)
self.bookmarks_page.click_bookmarked_block(0)
self.verify_event_data('edx.bookmark.accessed', event_data)
@attr('a11y')
class BookmarksA11yTests(BookmarksTestMixin):
"""
Tests for checking the a11y of the bookmarks page.
"""
def test_view_a11y(self):
"""
Verify the basic accessibility of the bookmarks page while paginated.
"""
self.setup_test(num_chapters=11)
self.bookmark_units(num_units=11)
self.bookmarks_page.visit()
self.bookmarks_page.a11y_audit.check_for_accessibility_errors()
| agpl-3.0 |
espadrine/opera | chromium/src/third_party/python_26/Tools/scripts/redemo.py | 63 | 5769 | """Basic regular expression demostration facility (Perl style syntax)."""
from Tkinter import *
import re
class ReDemo:
def __init__(self, master):
self.master = master
self.promptdisplay = Label(self.master, anchor=W,
text="Enter a Perl-style regular expression:")
self.promptdisplay.pack(side=TOP, fill=X)
self.regexdisplay = Entry(self.master)
self.regexdisplay.pack(fill=X)
self.regexdisplay.focus_set()
self.addoptions()
self.statusdisplay = Label(self.master, text="", anchor=W)
self.statusdisplay.pack(side=TOP, fill=X)
self.labeldisplay = Label(self.master, anchor=W,
text="Enter a string to search:")
self.labeldisplay.pack(fill=X)
self.labeldisplay.pack(fill=X)
self.showframe = Frame(master)
self.showframe.pack(fill=X, anchor=W)
self.showvar = StringVar(master)
self.showvar.set("first")
self.showfirstradio = Radiobutton(self.showframe,
text="Highlight first match",
variable=self.showvar,
value="first",
command=self.recompile)
self.showfirstradio.pack(side=LEFT)
self.showallradio = Radiobutton(self.showframe,
text="Highlight all matches",
variable=self.showvar,
value="all",
command=self.recompile)
self.showallradio.pack(side=LEFT)
self.stringdisplay = Text(self.master, width=60, height=4)
self.stringdisplay.pack(fill=BOTH, expand=1)
self.stringdisplay.tag_configure("hit", background="yellow")
self.grouplabel = Label(self.master, text="Groups:", anchor=W)
self.grouplabel.pack(fill=X)
self.grouplist = Listbox(self.master)
self.grouplist.pack(expand=1, fill=BOTH)
self.regexdisplay.bind('<Key>', self.recompile)
self.stringdisplay.bind('<Key>', self.reevaluate)
self.compiled = None
self.recompile()
btags = self.regexdisplay.bindtags()
self.regexdisplay.bindtags(btags[1:] + btags[:1])
btags = self.stringdisplay.bindtags()
self.stringdisplay.bindtags(btags[1:] + btags[:1])
def addoptions(self):
self.frames = []
self.boxes = []
self.vars = []
for name in ('IGNORECASE',
'LOCALE',
'MULTILINE',
'DOTALL',
'VERBOSE'):
if len(self.boxes) % 3 == 0:
frame = Frame(self.master)
frame.pack(fill=X)
self.frames.append(frame)
val = getattr(re, name)
var = IntVar()
box = Checkbutton(frame,
variable=var, text=name,
offvalue=0, onvalue=val,
command=self.recompile)
box.pack(side=LEFT)
self.boxes.append(box)
self.vars.append(var)
def getflags(self):
flags = 0
for var in self.vars:
flags = flags | var.get()
flags = flags
return flags
def recompile(self, event=None):
try:
self.compiled = re.compile(self.regexdisplay.get(),
self.getflags())
bg = self.promptdisplay['background']
self.statusdisplay.config(text="", background=bg)
except re.error, msg:
self.compiled = None
self.statusdisplay.config(
text="re.error: %s" % str(msg),
background="red")
self.reevaluate()
def reevaluate(self, event=None):
try:
self.stringdisplay.tag_remove("hit", "1.0", END)
except TclError:
pass
try:
self.stringdisplay.tag_remove("hit0", "1.0", END)
except TclError:
pass
self.grouplist.delete(0, END)
if not self.compiled:
return
self.stringdisplay.tag_configure("hit", background="yellow")
self.stringdisplay.tag_configure("hit0", background="orange")
text = self.stringdisplay.get("1.0", END)
last = 0
nmatches = 0
while last <= len(text):
m = self.compiled.search(text, last)
if m is None:
break
first, last = m.span()
if last == first:
last = first+1
tag = "hit0"
else:
tag = "hit"
pfirst = "1.0 + %d chars" % first
plast = "1.0 + %d chars" % last
self.stringdisplay.tag_add(tag, pfirst, plast)
if nmatches == 0:
self.stringdisplay.yview_pickplace(pfirst)
groups = list(m.groups())
groups.insert(0, m.group())
for i in range(len(groups)):
g = "%2d: %r" % (i, groups[i])
self.grouplist.insert(END, g)
nmatches = nmatches + 1
if self.showvar.get() == "first":
break
if nmatches == 0:
self.statusdisplay.config(text="(no match)",
background="yellow")
else:
self.statusdisplay.config(text="")
# Main function, run when invoked as a stand-alone Python program.
def main():
root = Tk()
demo = ReDemo(root)
root.protocol('WM_DELETE_WINDOW', root.quit)
root.mainloop()
if __name__ == '__main__':
main()
| bsd-3-clause |
dyoung418/tensorflow | tensorflow/contrib/quantize/python/fold_batch_norms.py | 9 | 20981 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Logic to fold batch norm into preceding convolution or FC layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.contrib import graph_editor
from tensorflow.contrib.quantize.python import common
from tensorflow.contrib.quantize.python import graph_matcher
from tensorflow.contrib.quantize.python import input_to_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
def FoldBatchNorms(graph):
"""Finds batch norm layers and folds them into preceding layers.
Folding only affects the following layers: Conv2D, fully connected, depthwise
convolution.
Args:
graph: Graph to walk and modify.
Raises:
ValueError: When batch norm folding fails.
"""
_FoldFusedBatchNorms(graph)
_FoldUnfusedBatchNorms(graph)
def _FoldFusedBatchNorms(graph):
"""Finds fused batch norm layers and folds them into preceding layers.
Folding only affects the following layers: Conv2D, fully connected, depthwise
convolution.
Args:
graph: Graph to walk and modify.
Raises:
ValueError: When batch norm folding fails.
"""
for match in _FindFusedBatchNorms(graph):
scope, sep, _ = match.layer_op.name.rpartition('/')
# Make sure new ops are added to `graph` and put on the same device as
# `bn_op`. The '/' (i.e. `sep`) ensures that we reuse the existing scope
# named `scope`. Otherwise, TF creates a unique scope whose name starts with
# `scope`.
with graph.as_default(), graph.name_scope(scope + sep), ops.device(
match.bn_op.device):
# new weights = old weights * gamma / sqrt(variance + epsilon)
# new biases = -mean * gamma / sqrt(variance + epsilon) + beta
multiplier_tensor = match.gamma_tensor * math_ops.rsqrt(
match.variance_tensor + match.bn_op.get_attr('epsilon'))
bias_tensor = math_ops.subtract(
match.beta_tensor, match.mean_tensor * multiplier_tensor, name='bias')
# The shape of depthwise weights is different, so we need to reshape the
# multiplier_tensor to ensure that the scaled_weight_tensor has the
# expected shape.
if match.layer_op.type == 'DepthwiseConv2dNative':
new_shape = [
match.weight_tensor.get_shape().as_list()[2],
match.weight_tensor.get_shape().as_list()[3]
]
multiplier_tensor = array_ops.reshape(
multiplier_tensor, new_shape, name='scale_reshape')
# TODO(suharshs): This naming of the following ops needs to carefully
# follow the naming expected by quantize.py. Generalize the quantize code
# to not require these delicate naming conventions.
scaled_weight_tensor = math_ops.multiply(
match.weight_tensor, multiplier_tensor, name='mul_fold')
new_layer_tensor = _CloneWithNewOperands(
match.layer_op, match.input_tensor, scaled_weight_tensor)
bias_add_tensor = math_ops.add(
new_layer_tensor, bias_tensor, name='add_fold')
nodes_modified_count = graph_editor.reroute_ts(bias_add_tensor,
match.output_tensor)
if nodes_modified_count != 1:
raise ValueError(
'Unexpected inputs to op: %s' % match.output_tensor.name)
def _CloneWithNewOperands(layer_op, input_tensor, weight_tensor):
"""Clones layer_op with input_tensor and weight_tensor as new inputs."""
new_layer_name = layer_op.name.split('/')[-1] + '_Fold'
if layer_op.type == 'Conv2D':
return nn_ops.conv2d(
input_tensor,
weight_tensor,
strides=layer_op.get_attr('strides'),
padding=layer_op.get_attr('padding'),
use_cudnn_on_gpu=layer_op.get_attr('use_cudnn_on_gpu'),
data_format=layer_op.get_attr('data_format'),
name=new_layer_name)
elif layer_op.type == 'MatMul':
return math_ops.matmul(
input_tensor,
weight_tensor,
transpose_a=layer_op.get_attr('transpose_a'),
transpose_b=layer_op.get_attr('transpose_b'),
name=new_layer_name)
elif layer_op.type == 'DepthwiseConv2dNative':
return nn.depthwise_conv2d(
input_tensor,
weight_tensor,
strides=layer_op.get_attr('strides'),
padding=layer_op.get_attr('padding'),
name=new_layer_name)
else:
raise ValueError('Cannot handle operation of type: %s' % layer_op.type)
def _FindFusedBatchNorms(graph):
"""Finds all ops and tensors related to found FusedBatchNorms.
Args:
graph: Graph to inspect.
Yields:
_FusedBatchNormMatches.
"""
input_pattern = graph_matcher.OpTypePattern('*')
weight_pattern = graph_matcher.OpTypePattern('*')
gamma_pattern = graph_matcher.OpTypePattern('*')
beta_pattern = graph_matcher.OpTypePattern('*')
mean_pattern = graph_matcher.OpTypePattern('*')
variance_pattern = graph_matcher.OpTypePattern('*')
conv_pattern = graph_matcher.OpTypePattern(
'Conv2D|DepthwiseConv2dNative', inputs=[input_pattern, weight_pattern])
# MatMul has a Reshape between it and FusedBatchNorm.
matmul_pattern = graph_matcher.OpTypePattern(
'MatMul', inputs=[input_pattern, weight_pattern])
matmul_reshape_pattern = graph_matcher.OpTypePattern(
'Reshape', inputs=[matmul_pattern,
graph_matcher.OpTypePattern('*')])
conv_batch_norm_pattern = graph_matcher.OpTypePattern(
'FusedBatchNorm',
inputs=[
conv_pattern, gamma_pattern, beta_pattern, mean_pattern,
variance_pattern
])
matmul_batch_norm_pattern = graph_matcher.OpTypePattern(
'FusedBatchNorm',
inputs=[
matmul_reshape_pattern, gamma_pattern, beta_pattern, mean_pattern,
variance_pattern
])
matmul_bn_output_reshape_pattern = graph_matcher.OpTypePattern(
'Reshape',
inputs=[matmul_batch_norm_pattern,
graph_matcher.OpTypePattern('*')])
conv_matcher = graph_matcher.GraphMatcher(conv_batch_norm_pattern)
matmul_matcher = graph_matcher.GraphMatcher(matmul_bn_output_reshape_pattern)
def _GetCommonTensors(match_result):
"""Gets tensors needed for FusedBatchNormMatch from match_result."""
input_tensor = match_result.get_tensor(input_pattern)
weight_tensor = match_result.get_tensor(weight_pattern)
gamma_tensor = match_result.get_tensor(gamma_pattern)
beta_tensor = match_result.get_tensor(beta_pattern)
# FusedBatchNorm in training is different from that in inference. It takes
# empty 'mean' and empty 'variance', and produces the mean and the variance
# of the batch. Therefore, when is_training is true, mean_tensor and
# variance_tensor point to 1st and 2nd (0-based) output of bn_op,
# respectively; when is_training is false, they point to bn_op's inputs.
is_training = bn_op.get_attr('is_training')
if is_training:
mean_tensor = bn_op.outputs[1]
variance_tensor = bn_op.outputs[2]
else:
mean_tensor = match_result.get_tensor(mean_pattern)
variance_tensor = match_result.get_tensor(variance_pattern)
return (input_tensor, weight_tensor, gamma_tensor, beta_tensor, mean_tensor,
variance_tensor)
for match_result in conv_matcher.match_graph(graph):
layer_op = match_result.get_op(conv_pattern)
bn_op = match_result.get_op(conv_batch_norm_pattern)
# In the case of convolution the output_tensor is the output of bn_op.
output_tensor = bn_op.outputs[0]
(input_tensor, weight_tensor, gamma_tensor, beta_tensor, mean_tensor,
variance_tensor) = _GetCommonTensors(match_result)
yield _FusedBatchNormMatch(
layer_op=layer_op,
bn_op=bn_op,
output_tensor=output_tensor,
input_tensor=input_tensor,
weight_tensor=weight_tensor,
gamma_tensor=gamma_tensor,
beta_tensor=beta_tensor,
mean_tensor=mean_tensor,
variance_tensor=variance_tensor)
for match_result in matmul_matcher.match_graph(graph):
layer_op = match_result.get_op(matmul_pattern)
bn_op = match_result.get_op(matmul_batch_norm_pattern)
# In the MatMul case, the output of batch norm is reshaped back into a
# 2D tensor, so the output_tensor is the output of the Reshape op.
output_reshape_op = match_result.get_op(matmul_bn_output_reshape_pattern)
output_tensor = output_reshape_op.outputs[0]
(input_tensor, weight_tensor, gamma_tensor, beta_tensor, mean_tensor,
variance_tensor) = _GetCommonTensors(match_result)
yield _FusedBatchNormMatch(
layer_op=layer_op,
bn_op=bn_op,
output_tensor=output_tensor,
input_tensor=input_tensor,
weight_tensor=weight_tensor,
gamma_tensor=gamma_tensor,
beta_tensor=beta_tensor,
mean_tensor=mean_tensor,
variance_tensor=variance_tensor)
class _FusedBatchNormMatch(object):
"""Contains all information related to a found FusedBatchNorm."""
def __init__(self, layer_op, bn_op, output_tensor, input_tensor,
weight_tensor, gamma_tensor, beta_tensor, mean_tensor,
variance_tensor):
self._layer_op = layer_op
self._bn_op = bn_op
self._output_tensor = output_tensor
self._input_tensor = input_tensor
self._weight_tensor = weight_tensor
self._gamma_tensor = gamma_tensor
self._beta_tensor = beta_tensor
self._mean_tensor = mean_tensor
self._variance_tensor = variance_tensor
@property
def layer_op(self):
return self._layer_op
@property
def bn_op(self):
return self._bn_op
@property
def output_tensor(self):
return self._output_tensor
@property
def input_tensor(self):
return self._input_tensor
@property
def weight_tensor(self):
return self._weight_tensor
@property
def gamma_tensor(self):
return self._gamma_tensor
@property
def beta_tensor(self):
return self._beta_tensor
@property
def mean_tensor(self):
return self._mean_tensor
@property
def variance_tensor(self):
return self._variance_tensor
def _FoldUnfusedBatchNorms(graph):
"""Finds unfused batch norm layers and folds them into preceding layers.
Folding only affects the following layers: Conv2D, fully connected, depthwise
convolution.
Args:
graph: Graph to walk and modify.
Raises:
ValueError: When batch norm folding fails.
"""
input_to_ops_map = input_to_ops.InputToOps(graph)
for bn in common.BatchNormGroups(graph):
has_scaling = _HasScaling(graph, input_to_ops_map, bn)
# The mangling code intimately depends on BatchNorm node's internals.
original_op, folded_op = _CreateFoldedOp(graph, bn, has_scaling=has_scaling)
activation = common.GetEndpointActivationOp(graph, bn)
if activation:
nodes_modified_count = graph_editor.reroute_ts([folded_op.outputs[0]],
[original_op.outputs[0]],
can_modify=[activation])
if nodes_modified_count != 1:
raise ValueError('Unexpected inputs to op: %s' % activation.name)
continue
# Treat consumer ops in bypass modules differently since they have Add
# operations instead of Relu* above.
add_bypass_ctx = re.search(r'^(.*)/([^/]+)', bn).group(1)
add_bypass = graph.get_operation_by_name(add_bypass_ctx + '/Add')
nodes_modified_count = graph_editor.reroute_ts([folded_op.outputs[0]],
[original_op.outputs[0]],
can_modify=[add_bypass])
if nodes_modified_count != 1:
raise ValueError('Unexpected inputs to op: %s' % add_bypass.name)
def _HasScaling(graph, input_to_ops_map, bn):
r"""Checks if batch norm has scaling enabled.
Difference between batch norm with scaling and without is that with scaling:
Rsqrt -> mul -> mul_1
\-> mul_2
where
mul multiplies gamma by inverse square root of EMA of batch variance,
mul_1 multiplies output of mul with output from the base operation
(convolution, FC or depthwise convolution),
mul_2 multiplies output of mul with EMA of batch mean,
and without scaling:
Rsqrt -> mul
\-> mul_1
where
mul multiplies the inverse square root of EMA of batch variance with output
from the base operation,
mul_1 multiplies inverse square root of EMA of batch variance with EMA
of batch mean.
Args:
graph: Graph to inspect.
input_to_ops_map: InputToOps object containing mapping from tensor's name
to ops that take it as input.
bn: Batch norm layer prefix string.
Returns:
A boolean indicating whether this batch norm layer has scaling enabled.
"""
rsqrt_op = graph.get_operation_by_name(bn + '/BatchNorm/batchnorm/Rsqrt')
rsqrt_consumers = input_to_ops_map.ConsumerOperations(rsqrt_op)
return sum(1 for op in rsqrt_consumers if op.type == 'Mul') == 1
def _CreateFoldedOp(graph, context, has_scaling):
"""Folds in batch norm layer into preceding convolution or FC layer.
Creates 3 new nodes, connects their inputs and adds them to the graph:
mul is cloned into mul_fold, Conv2D or MatMul, or DepthwiseConv2d is cloned
into respective *_Fold, add is cloned into add_fold.
Args:
graph: Graph to modify.
context: String, batch norm context, i.e. node into which BatchNorm is
nested.
has_scaling: Whether the batch norm has scaling enabled.
Raises:
ValueError: When operation type is not supported, or input and output tensor
shapes mismatch for created operations: mul_fold, add_fold.
Returns:
A pair of Operations, the first is the original consumer node of the batch
norm (../BatchNorm/batchnorm/add_1), the second is the consumer node of
the folded graph (add_fold).
"""
mul_scale_name = 'mul_1' if has_scaling else 'mul'
mul_scale = graph.get_operation_by_name(context +
'/BatchNorm/batchnorm/' +
mul_scale_name)
op_below = mul_scale.inputs[0].op
weights = op_below.inputs[1]
# Special handling for weights of depthwise convolution.
if op_below.type == 'DepthwiseConv2dNative':
new_shape = [weights.get_shape().as_list()[2],
weights.get_shape().as_list()[3]]
scale_name = 'mul' if has_scaling else 'Rsqrt'
scale = graph.get_operation_by_name(context + '/BatchNorm/batchnorm/' +
scale_name)
scale = array_ops.reshape(scale.outputs[0], new_shape,
context + '/scale_reshape')
mul_fold = _CloneOp(mul_scale, context + '/mul_fold',
[(0, weights), (1, scale)])
elif op_below.type in ['Conv2D', 'MatMul']:
mul_fold = _CloneOp(mul_scale, context + '/mul_fold', [(0, weights)])
else:
raise ValueError('Cannot handle operation of type: %s' % op_below.op)
_AssertShapesMatch('mul_fold', mul_fold.inputs[0], mul_fold.outputs[0])
conv_or_fc_folded = _CloneOp(op_below, op_below.name + '_Fold',
[(1, mul_fold.outputs[0])])
add_shift = graph.get_operation_by_name(context +
'/BatchNorm/batchnorm/add_1')
add_fold = _CloneOp(add_shift, context + '/add_fold',
[(0, conv_or_fc_folded.outputs[0])])
_AssertShapesMatch('add_fold', add_fold.inputs[0], add_fold.outputs[0])
return add_shift, add_fold
def _CloneOp(op, new_name, new_inputs):
"""Clones a given op, replaces its name and some of its inputs.
Args:
op: Operation to modify.
new_name: String, a new name to set on cloned op.
new_inputs: A list of tuples (idx, tensor), each input with corresponding
index will be replaced by the given Tensor in the cloned op.
Returns:
Operation, the cloned op.
Raises:
TypeError: When Operation type is not supported.
ValueError: When input shapes are incompatible.
"""
inputs = list(op.inputs)
for new_input in new_inputs:
inputs[new_input[0]] = new_input[1]
return _OP_CLONER.Clone(op, inputs, new_name)
class _OpCloner(object):
"""Helper class that clones tf.Operations based on their type."""
def __init__(self):
self.op_type_to_action = {
'Mul': self._CloneMul,
'Add': self._CloneAdd,
'Conv2D': self._CloneConv2d,
'DepthwiseConv2dNative': self._CloneDepthwiseConv2d,
'MatMul': self._CloneMatMul,
}
def _CloneMul(self, op, inputs, new_name):
del op # Unused.
return math_ops.multiply(inputs[0], inputs[1], name=new_name).op
def _CloneAdd(self, op, inputs, new_name):
del op # Unused.
return math_ops.add(inputs[0], inputs[1], name=new_name).op
def _CloneConv2d(self, op, inputs, new_name):
input_tensor = inputs[0]
weights = inputs[1]
self._AssertConvShapes(op.name, input_tensor, weights)
return nn_ops.conv2d(
input_tensor,
weights,
strides=op.get_attr('strides'),
padding=op.get_attr('padding'),
use_cudnn_on_gpu=op.get_attr('use_cudnn_on_gpu'),
data_format=op.get_attr('data_format'),
name=new_name).op
def _CloneDepthwiseConv2d(self, op, inputs, new_name):
input_tensor = inputs[0]
weights = inputs[1]
self._AssertConvShapes(op.name, input_tensor, weights)
return nn.depthwise_conv2d(
input_tensor,
weights,
strides=op.get_attr('strides'),
padding=op.get_attr('padding'),
name=new_name).op
def _CloneMatMul(self, op, inputs, new_name):
weights = inputs[0]
input_tensor = inputs[1]
self._AssertFCShapes(op.name, weights, input_tensor)
return math_ops.matmul(
weights,
input_tensor,
transpose_a=op.get_attr('transpose_a'),
transpose_b=op.get_attr('transpose_b'),
name=new_name).op
def Clone(self, op, inputs, new_name):
try:
return self.op_type_to_action[op.type](op, inputs, new_name)
except KeyError:
raise TypeError('Unsupported operation type: %s' % op.type)
def _AssertConvShapes(self, op_name, input_tensor, weights):
"""Makes sure that convolution inputs have compatible shapes.
Args:
op_name: Operation name, only used in error message.
input_tensor: Input that is convolved.
weights: Weights of the convolution filter.
Raises:
ValueError: When input shapes are incompatible.
"""
input_shape = input_tensor.get_shape()
weights_shape = weights.get_shape()
if (len(input_shape) != 4 or len(weights_shape) != 4 or
input_shape[3] != weights_shape[2]):
raise ValueError('Incompatible shapes for op %s inputs: %s and %s' %
(op_name, input_shape, weights_shape))
def _AssertFCShapes(self, op_name, weights, input_tensor):
"""Makes sure that FC layer inputs have compatible shapes.
Args:
op_name: Operation name, only used in error message.
weights: Weights used in FC layer.
input_tensor: Input into FC layer.
Raises:
ValueError: When input shapes are incompatible.
"""
weights_shape = weights.get_shape()
input_shape = input_tensor.get_shape()
if (len(weights_shape) != 2 or len(input_shape) != 2 or
weights_shape[1] != input_shape[0]):
raise ValueError('Incompatible shapes for op %s inputs: %s and %s' %
(op_name, weights_shape, input_shape))
_OP_CLONER = _OpCloner()
def _AssertShapesMatch(op_name, in_tensor, out_tensor):
"""Makes sure that shapes of input and output tensors are compatible.
Args:
op_name: String, operation name, only used in error message.
in_tensor: Tensor, input tensor.
out_tensor: Tensor, output tensor.
Raises:
ValueError: When input and output tensors have different shapes.
"""
in_shape = in_tensor.get_shape()
out_shape = out_tensor.get_shape()
if not in_shape.is_compatible_with(out_shape):
raise ValueError('%s should not change tensor shape: input %s, '
'output %s' % (op_name, in_shape, out_shape))
| apache-2.0 |
hlzz/dotfiles | graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/web/_responses.py | 2 | 3726 | # -*- test-case-name: twisted.web.test.test_http -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
HTTP response code definitions.
"""
from __future__ import division, absolute_import
_CONTINUE = 100
SWITCHING = 101
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORITATIVE_INFORMATION = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTI_STATUS = 207
MULTIPLE_CHOICE = 300
MOVED_PERMANENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
TEMPORARY_REDIRECT = 307
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTH_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
REQUEST_ENTITY_TOO_LARGE = 413
REQUEST_URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
REQUESTED_RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
INSUFFICIENT_STORAGE_SPACE = 507
NOT_EXTENDED = 510
RESPONSES = {
# 100
_CONTINUE: "Continue",
SWITCHING: "Switching Protocols",
# 200
OK: "OK",
CREATED: "Created",
ACCEPTED: "Accepted",
NON_AUTHORITATIVE_INFORMATION: "Non-Authoritative Information",
NO_CONTENT: "No Content",
RESET_CONTENT: "Reset Content.",
PARTIAL_CONTENT: "Partial Content",
MULTI_STATUS: "Multi-Status",
# 300
MULTIPLE_CHOICE: "Multiple Choices",
MOVED_PERMANENTLY: "Moved Permanently",
FOUND: "Found",
SEE_OTHER: "See Other",
NOT_MODIFIED: "Not Modified",
USE_PROXY: "Use Proxy",
# 306 not defined??
TEMPORARY_REDIRECT: "Temporary Redirect",
# 400
BAD_REQUEST: "Bad Request",
UNAUTHORIZED: "Unauthorized",
PAYMENT_REQUIRED: "Payment Required",
FORBIDDEN: "Forbidden",
NOT_FOUND: "Not Found",
NOT_ALLOWED: "Method Not Allowed",
NOT_ACCEPTABLE: "Not Acceptable",
PROXY_AUTH_REQUIRED: "Proxy Authentication Required",
REQUEST_TIMEOUT: "Request Time-out",
CONFLICT: "Conflict",
GONE: "Gone",
LENGTH_REQUIRED: "Length Required",
PRECONDITION_FAILED: "Precondition Failed",
REQUEST_ENTITY_TOO_LARGE: "Request Entity Too Large",
REQUEST_URI_TOO_LONG: "Request-URI Too Long",
UNSUPPORTED_MEDIA_TYPE: "Unsupported Media Type",
REQUESTED_RANGE_NOT_SATISFIABLE: "Requested Range not satisfiable",
EXPECTATION_FAILED: "Expectation Failed",
# 500
INTERNAL_SERVER_ERROR: "Internal Server Error",
NOT_IMPLEMENTED: "Not Implemented",
BAD_GATEWAY: "Bad Gateway",
SERVICE_UNAVAILABLE: "Service Unavailable",
GATEWAY_TIMEOUT: "Gateway Time-out",
HTTP_VERSION_NOT_SUPPORTED: "HTTP Version not supported",
INSUFFICIENT_STORAGE_SPACE: "Insufficient Storage Space",
NOT_EXTENDED: "Not Extended"
}
| bsd-3-clause |
arcean/telepathy-sunshine | debian/telepathy-sunshine/usr/lib/python2.6/dist-packages/sunshine/lqsoft/gaduapi.py | 4 | 14686 | from twisted.internet import reactor
from pprint import pformat
from twisted.internet.defer import Deferred
from twisted.internet.protocol import Protocol
from twisted.web.http_headers import Headers
from twisted.internet.defer import succeed
#from twisted.python import log
from twisted.internet import task
from twisted.web.client import getPage
from zope.interface import implements
import sys
import urllib
import logging
debug_mode = False
logger = logging.getLogger('Sunshine.GaduAPI')
try:
proper_twisted = True
from twisted.web.iweb import IBodyProducer
from twisted.web.client import Agent
except ImportError:
logger.info("Twisted version is too old.")
proper_twisted = False
try:
import oauth.oauth as oauth
test_oauth = oauth.OAuthSignatureMethod_HMAC_SHA1()
oauth_loaded = True
except:
logger.info("oAuth module can't be loaded")
oauth_loaded = False
import xml.etree.ElementTree as ET
import json
import mimetools
import mimetypes
import time
REQUEST_TOKEN_URL = 'http://api.gadu-gadu.pl/request_token'
ACCESS_TOKEN_URL = 'http://api.gadu-gadu.pl/access_token'
AUTHORIZE_TOKEN_URL = 'http://login.gadu-gadu.pl/authorize'
PUT_AVATAR_URL = 'http://api.gadu-gadu.pl/avatars/%s/0.xml'
GET_INFO_URL = 'http://api.gadu-gadu.pl/users/%s.json'
def check_requirements():
if proper_twisted == True and oauth_loaded == True:
return True
else:
return False
logger.info("Requirements related with Gadu-Gadu oAuth API support not fullfilled. You need twisted-core, twisted-web in version 9.0.0 or greater and python-oauth.")
class StringProducer(object):
if check_requirements() == True:
implements(IBodyProducer)
def __init__(self, body):
self.body = body
self.length = len(body)
def startProducing(self, consumer):
consumer.write(self.body)
return succeed(None)
def pauseProducing(self):
pass
def stopProducing(self):
pass
class BeginningPrinter(Protocol):
def __init__(self, finished):
self.finished = finished
self.remaining = 1024 * 10
self.body = ''
def dataReceived(self, bytes):
if self.remaining:
display = bytes[:self.remaining]
self.body = self.body+display
self.remaining -= len(display)
def connectionLost(self, reason):
#print 'Finished receiving body:', reason.getErrorMessage()
self.finished.callback(self.body)
class GG_Oauth(object):
def __init__(self, uin, password):
self.uin = uin
self.password = password
self.timestamp = 0
self.expire_token = 0
self.access_token = None
self.__loopingcall = None
self.agent = Agent(reactor)
self.consumer = oauth.OAuthConsumer(self.uin, self.password)
self._signature_method = oauth.OAuthSignatureMethod_HMAC_SHA1()
#
# REQUESTING TOKEN
#
def requestToken(self):
if debug_mode:
logger.info("requestToken")
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, http_method='POST', http_url=REQUEST_TOKEN_URL) # create an oauth request
oauth_request.sign_request(self._signature_method, self.consumer, None) # the request knows how to generate a signature
auth_header = oauth_request.to_header()
headers = {}
headers['Authorization'] = [auth_header['Authorization']]
headers['Accept'] = ['application/json']
headers['User-Agent'] = ['Gadu-Gadu Client, build 10,1,1,11119']
headers['Host'] = ['api.gadu-gadu.pl']
headers['Content-Length'] = [0]
headers = Headers(headers)
url = REQUEST_TOKEN_URL
d = self.agent.request(
'POST',
REQUEST_TOKEN_URL,
headers,
None)
d.addCallback(self.cbRequestToken)
d.addErrback(self.cbShutdown)
def cbRequestToken(self, response):
if debug_mode:
logger.info("cbRequestToken")
mimetype = response.headers.getRawHeaders('Content-Type', 'text/xml')
finished = Deferred()
response.deliverBody(BeginningPrinter(finished))
finished.addCallback(self.cbRequestTokenSuccess, mimetype[0])
finished.addErrback(self.cbShutdown)
return finished
def cbRequestTokenSuccess(self, result, mimetype):
if debug_mode:
logger.info("cbRequestTokenSuccess result: %s, mime: %s" % (result, mimetype))
content = {}
if mimetype == 'application/json':
content = json.loads(result)['result']
elif mimetype == 'text/xml':
xml = ET.fromstring(result)
content['oauth_token'] = xml.find("oauth_token").text
content['oauth_token_secret'] = xml.find("oauth_token_secret").text
else:
logger.info("cbRequestTokenSuccess failed: unknown mimetype.")
return
oauth_token = oauth.OAuthToken(content['oauth_token'], content['oauth_token_secret'])
postvars = 'callback_url=http://www.mojageneracja.pl&request_token=%s&uin=%s&password=%s' % (oauth_token.key, self.uin, self.password)
headers = {}
headers['User-Agent'] = ['Gadu-Gadu Client, build 10,1,1,11119']
headers['Accept'] = ['*/*']
headers['Content-Type'] = ['application/x-www-form-urlencoded']
headers = Headers(headers)
body = StringProducer(str(postvars))
d = self.agent.request(
'POST',
AUTHORIZE_TOKEN_URL,
headers,
body)
d.addCallback(self.cbTokenAuthorised, oauth_token)
d.addErrback(self.cbShutdown)
def cbTokenAuthorised(self, result, oauth_token):
if debug_mode:
logger.info("cbTokenAuthorised")
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=oauth_token, http_method='POST', http_url=ACCESS_TOKEN_URL) # create an oauth request
oauth_request.sign_request(self._signature_method, self.consumer, oauth_token) # the request knows how to generate a signature
auth_header = oauth_request.to_header()
headers = {}
headers['Authorization'] = [auth_header['Authorization']]
headers['User-Agent'] = ['Gadu-Gadu Client, build 10,1,1,11119']
headers['Accept'] = ['application/json']
headers['Content-Length'] = [0]
headers = Headers(headers)
d = self.agent.request(
'POST',
ACCESS_TOKEN_URL,
headers,
None)
d.addCallback(self.requestAccessToken, oauth_token)
d.addErrback(self.cbShutdown)
def requestAccessToken(self, response, oauth_token):
if debug_mode:
logger.info("requestAccessToken")
mimetype = response.headers.getRawHeaders('Content-Type', 'text/xml')
finished = Deferred()
response.deliverBody(BeginningPrinter(finished))
finished.addCallback(self.accessTokenReceived, oauth_token, mimetype[0])
finished.addErrback(self.cbShutdown)
return finished
def accessTokenReceived(self, result, oauth_token, mimetype):
if debug_mode:
logger.info("accessTokenReceived result: %s, mimetype: %s" % (result, mimetype))
content = {}
if mimetype == 'application/json':
content = json.loads(result)['result']
elif mimetype == 'text/xml':
xml = ET.fromstring(result)
content['oauth_token'] = xml.find("oauth_token").text
content['oauth_token_secret'] = xml.find("oauth_token_secret").text
else:
logger.info("accessTokenReceived failed: unknown mimetype.")
return
self.access_token = oauth.OAuthToken(content['oauth_token'], content['oauth_token_secret'])
self.expire_token = time.time()+36000
def getContentType(self, filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
def getExtByType(self, mime):
return mimetypes.guess_extension(mime)
def encodeMultipart(self, fields, files):
"""
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, value) elements for data to be uploaded as files
Return (content_type, body) ready for httplib.HTTP instance
"""
boundary = mimetools.choose_boundary()
crlf = '\r\n'
l = []
for (k, v) in fields:
l.append('--' + boundary)
l.append('Content-Disposition: form-data; name="%s"' % k)
l.append('')
l.append(v)
for (k, f, v) in files:
l.append('--' + boundary)
l.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (k, f))
l.append('Content-Type: %s' % self.getContentType(f))
l.append('')
l.append(v)
l.append('--' + boundary + '--')
l.append('')
body = crlf.join(l)
return boundary, body
def putAvatar(self, data, ext):
if debug_mode:
logger.info("accessTokenReceived")
url = str(PUT_AVATAR_URL % self.uin)
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=self.access_token, http_method='PUT', http_url=url) # create an oauth request
oauth_request.sign_request(self._signature_method, self.consumer, self.access_token) # the request knows how to generate a signature
auth_header = oauth_request.to_header()
filename = str(self.uin)+ext
(boundary, body) = self.encodeMultipart(fields=(('_method', 'PUT'),), files=(('avatar', filename, data),))
body = StringProducer(str(body))
headers = {}
#headers['Connection'] = ['keep-alive']
headers['Authorization'] = [auth_header['Authorization']]
headers['User-Agent'] = ['Gadu-Gadu Client, build 10,1,1,11119']
headers['Accept'] = ['*/*']
headers['Content-Type'] = ['multipart/form-data; boundary=%s' % boundary]
headers = Headers(headers)
d = self.agent.request(
'POST',
url,
headers,
body)
d.addCallback(self.putAvatarSuccess)
d.addErrback(self.cbShutdown)
def putAvatarSuccess(self, response):
logger.info("New avatar should be uploaded now.")
def fetchUserInfo(self, uin):
if debug_mode:
logger.info("fetchUserInfo")
url = str(GET_INFO_URL % uin)
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, token=self.access_token, http_method='GET', http_url=url) # create an oauth request
oauth_request.sign_request(self._signature_method, self.consumer, self.access_token) # the request knows how to generate a signature
auth_header = oauth_request.to_header()
headers = {}
#headers['Connection'] = ['keep-alive']
headers['Authorization'] = [auth_header['Authorization']]
headers['User-Agent'] = ['Gadu-Gadu Client, build 10,1,1,11119']
headers['Accept'] = ['*/*']
headers = Headers(headers)
d = self.agent.request(
'GET',
url,
headers,
None)
d.addCallback(self.fetchUserInfoSuccess)
d.addErrback(self.cbShutdown)
def fetchUserInfoSuccess(self, response):
if debug_mode:
logger.info("fetchUserInfoSuccess")
finished = Deferred()
response.deliverBody(BeginningPrinter(finished))
finished.addCallback(self.onUserInfoRecv)
finished.addErrback(self.cbShutdown)
return finished
def onUserInfoRecv(self, result):
if debug_mode:
logger.info("onUserInfoRecv")
content = json.loads(result)['result']
self.onUserInfo(content)
def onUserInfo(self, result):
pass
def cbShutdown(self, reason):
logger.info("Something went wrong: %s" % (reason))
def checkTokenForAvatar(self, data, ext):
if debug_mode:
logger.info("checkTokenForAvatar")
#print 'checkTokenForAvatar'
if int(time.time()) <= self.expire_token and self.access_token != None:
self.putAvatar(data, ext)
self.__loopingcall.stop()
def checkTokenForUserInfo(self, uin):
if debug_mode:
logger.info("checkTokenForUserInfo")
if int(time.time()) <= self.expire_token and self.access_token != None:
self.fetchUserInfo(uin)
self.__loopingcall.stop()
def getToken(self):
self.requestToken()
def uploadAvatar(self, data, ext):
if debug_mode:
logger.info("uploadAvatar")
if int(time.time()) <= self.expire_token and self.access_token != None:
self.putAvatar(data, ext)
else:
self.requestToken()
self.__loopingcall = task.LoopingCall(self.checkTokenForAvatar, data, ext)
self.__loopingcall.start(1.0)
def getUserInfo(self, uin):
if debug_mode:
logger.info("getUserInfo")
if int(time.time()) <= self.expire_token and self.access_token != None:
self.fetchUserInfo(uin)
else:
self.requestToken()
self.__loopingcall = task.LoopingCall(self.checkTokenForUserInfo, uin)
self.__loopingcall.start(1.0)
def getUserInfoDeffered(self, uin):
if debug_mode:
logger.info("getUserInfoDeffered")
d = Deferred()
if int(time.time()) <= self.expire_token and self.access_token != None:
d.addCallback(self.onUserInfoRecv)
d.addErrback(self.cbShutdown)
else:
self.requestToken()
self.__loopingcall = task.LoopingCall(self.checkTokenForUserInfo, uin)
self.__loopingcall.start(1.0)
return d
#if check_requirements() == True:
# gg = GG_Oauth(4634020, 'xxxxxx')
#data = open('avatar.png', 'r').read()
#ext = mimetypes.guess_extension(mimetypes.guess_type('avatar.png')[0])
#gg.uploadAvatar(data, ext)
# gg.getUserInfo('5120225')
#else:
# print 'GG_oAuth_API: Requirements related with Gadu-Gadu oAuth API support not fullfilled. You need twisted-core, twisted-web in version 9.0.0 or greater and python-oauth.'
#log.startLogging(sys.stdout)
#reactor.run()
| gpl-3.0 |
arduino-org/brackets-shell | gyp/PRESUBMIT.py | 127 | 2919 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
PYLINT_BLACKLIST = [
# TODO: fix me.
# From SCons, not done in google style.
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
# Needs style fix.
'pylib/gyp/generator/scons.py',
'pylib/gyp/generator/xcode.py',
]
PYLINT_DISABLED_WARNINGS = [
# TODO: fix me.
# Many tests include modules they don't use.
'W0611',
# Include order doesn't properly include local files?
'F0401',
# Some use of built-in names.
'W0622',
# Some unused variables.
'W0612',
# Operator not preceded/followed by space.
'C0323',
'C0322',
# Unnecessary semicolon.
'W0301',
# Unused argument.
'W0613',
# String has no effect (docstring in wrong place).
'W0105',
# Comma not followed by space.
'C0324',
# Access to a protected member.
'W0212',
# Bad indent.
'W0311',
# Line too long.
'C0301',
# Undefined variable.
'E0602',
# Not exception type specified.
'W0702',
# No member of that name.
'E1101',
# Dangerous default {}.
'W0102',
# Others, too many to sort.
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
'R0201', 'E0101', 'C0321',
# ************* Module copy
# W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
'W0104',
]
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
license = (
r'.*? Copyright \(c\) %(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': input_api.time.strftime('%Y'),
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=PYLINT_BLACKLIST,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally:
sys.path = old_sys_path
return report
def GetPreferredTrySlaves():
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac']
| mit |
persandstrom/home-assistant | homeassistant/components/sensor/tesla.py | 5 | 3707 | """
Sensors for the Tesla sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.tesla/
"""
from datetime import timedelta
import logging
from homeassistant.components.sensor import ENTITY_ID_FORMAT
from homeassistant.components.tesla import DOMAIN as TESLA_DOMAIN
from homeassistant.components.tesla import TeslaDevice
from homeassistant.const import (
TEMP_CELSIUS, TEMP_FAHRENHEIT, LENGTH_KILOMETERS, LENGTH_MILES)
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['tesla']
SCAN_INTERVAL = timedelta(minutes=5)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tesla sensor platform."""
controller = hass.data[TESLA_DOMAIN]['devices']['controller']
devices = []
for device in hass.data[TESLA_DOMAIN]['devices']['sensor']:
if device.bin_type == 0x4:
devices.append(TeslaSensor(device, controller, 'inside'))
devices.append(TeslaSensor(device, controller, 'outside'))
else:
devices.append(TeslaSensor(device, controller))
add_entities(devices, True)
class TeslaSensor(TeslaDevice, Entity):
"""Representation of Tesla sensors."""
def __init__(self, tesla_device, controller, sensor_type=None):
"""Initialize of the sensor."""
self.current_value = None
self._unit = None
self.last_changed_time = None
self.type = sensor_type
super().__init__(tesla_device, controller)
if self.type:
self._name = '{} ({})'.format(self.tesla_device.name, self.type)
self.entity_id = ENTITY_ID_FORMAT.format(
'{}_{}'.format(self.tesla_id, self.type))
else:
self.entity_id = ENTITY_ID_FORMAT.format(self.tesla_id)
@property
def state(self):
"""Return the state of the sensor."""
return self.current_value
@property
def unit_of_measurement(self):
"""Return the unit_of_measurement of the device."""
return self._unit
def update(self):
"""Update the state from the sensor."""
_LOGGER.debug("Updating sensor: %s", self._name)
self.tesla_device.update()
units = self.tesla_device.measurement
if self.tesla_device.bin_type == 0x4:
if self.type == 'outside':
self.current_value = self.tesla_device.get_outside_temp()
else:
self.current_value = self.tesla_device.get_inside_temp()
if units == 'F':
self._unit = TEMP_FAHRENHEIT
else:
self._unit = TEMP_CELSIUS
elif (self.tesla_device.bin_type == 0xA or
self.tesla_device.bin_type == 0xB):
self.current_value = self.tesla_device.get_value()
tesla_dist_unit = self.tesla_device.measurement
if tesla_dist_unit == 'LENGTH_MILES':
self._unit = LENGTH_MILES
else:
self._unit = LENGTH_KILOMETERS
self.current_value /= 0.621371
self.current_value = round(self.current_value, 2)
else:
self.current_value = self.tesla_device.get_value()
if self.tesla_device.bin_type == 0x5:
self._unit = units
elif self.tesla_device.bin_type in (0xA, 0xB):
if units == 'LENGTH_MILES':
self._unit = LENGTH_MILES
else:
self._unit = LENGTH_KILOMETERS
self.current_value /= 0.621371
self.current_value = round(self.current_value, 2)
| apache-2.0 |
andfoy/margffoy-tuay-server | env/lib/python2.7/site-packages/Mezzanine-3.1.10-py2.7.egg/mezzanine/forms/page_processors.py | 54 | 2568 | from __future__ import unicode_literals
from django.shortcuts import redirect
from django.template import RequestContext
from mezzanine.conf import settings
from mezzanine.forms.forms import FormForForm
from mezzanine.forms.models import Form
from mezzanine.forms.signals import form_invalid, form_valid
from mezzanine.pages.page_processors import processor_for
from mezzanine.utils.email import split_addresses, send_mail_template
from mezzanine.utils.views import is_spam
def format_value(value):
"""
Convert a list into a comma separated string, for displaying
select multiple values in emails.
"""
if isinstance(value, list):
value = ", ".join([v.strip() for v in value])
return value
@processor_for(Form)
def form_processor(request, page):
"""
Display a built form and handle submission.
"""
form = FormForForm(page.form, RequestContext(request),
request.POST or None, request.FILES or None)
if form.is_valid():
url = page.get_absolute_url() + "?sent=1"
if is_spam(request, form, url):
return redirect(url)
attachments = []
for f in form.files.values():
f.seek(0)
attachments.append((f.name, f.read()))
entry = form.save()
subject = page.form.email_subject
if not subject:
subject = "%s - %s" % (page.form.title, entry.entry_time)
fields = [(v.label, format_value(form.cleaned_data[k]))
for (k, v) in form.fields.items()]
context = {
"fields": fields,
"message": page.form.email_message,
"request": request,
}
email_from = page.form.email_from or settings.DEFAULT_FROM_EMAIL
email_to = form.email_to()
if email_to and page.form.send_email:
send_mail_template(subject, "email/form_response", email_from,
email_to, context)
headers = None
if email_to:
# Add the email entered as a Reply-To header
headers = {'Reply-To': email_to}
email_copies = split_addresses(page.form.email_copies)
if email_copies:
send_mail_template(subject, "email/form_response_copies",
email_from, email_copies, context,
attachments=attachments, headers=headers)
form_valid.send(sender=request, form=form, entry=entry)
return redirect(url)
form_invalid.send(sender=request, form=form)
return {"form": form}
| gpl-2.0 |
guillaume-philippon/aquilon | tests/broker/test_add_host.py | 1 | 41015 | #!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2015,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the add host command."""
import unittest
if __name__ == "__main__":
from broker import utils
utils.import_depends()
from broker.brokertest import TestBrokerCommand
from networktest import DummyIP
from machinetest import MachineTestMixin
class TestAddHost(MachineTestMixin, TestBrokerCommand):
def test_100_add_unittest02(self):
ip = self.net["unknown0"].usable[0]
# DSDB sync uses the machine comments, not the host comments
self.dsdb_expect_add("unittest02.one-nyp.ms.com", ip, "eth0", ip.mac,
comments="Some machine comments")
osver = self.config.get("unittest", "linux_version_prev")
self.noouttest(["add", "host",
"--hostname", "unittest02.one-nyp.ms.com", "--ip", ip,
"--machine", "ut3c5n10", "--domain", "unittest",
"--buildstatus", "build", "--archetype", "aquilon",
"--osname", "linux", "--osversion", osver,
"--personality", "compileserver",
"--comments", "Some host comments"])
self.dsdb_verify()
def test_105_verify_unittest02(self):
osver = self.config.get("unittest", "linux_version_prev")
command = "show host --hostname unittest02.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out,
"Primary Name: unittest02.one-nyp.ms.com [%s]" %
self.net["unknown0"].usable[0],
command)
self.matchoutput(out, "Machine: ut3c5n10", command)
self.matchoutput(out, "Model Type: blade", command)
self.matchoutput(out, "Archetype: aquilon", command)
self.matchoutput(out, "Personality: compileserver", command)
self.matchoutput(out, "Domain: unittest", command)
self.matchoutput(out, "Build Status: build", command)
self.matchoutput(out, "Operating System: linux", command)
self.matchoutput(out, "Version: %s" % osver, command)
self.matchoutput(out, "Advertise Status: False", command)
self.matchoutput(out, "Host Comments: Some host comments", command)
def test_105_verify_unittest02_network_osversion(self):
osver = self.config.get("unittest", "linux_version_prev")
command = ["show", "network",
"--ip", str(self.net["unknown0"].ip),
"--format", "proto",
"--hosts"]
network = self.protobuftest(command)[0]
for i in network.hosts:
if i.fqdn == 'unittest02.one-nyp.ms.com':
self.assertEqual(i.operating_system.version, osver)
break
else:
self.fail("Unable to determine osversion")
def test_105_verify_unittest02_machine(self):
command = "show machine --machine ut3c5n10"
out = self.commandtest(command.split(" "))
self.matchoutput(out,
"Primary Name: unittest02.one-nyp.ms.com [%s]" %
self.net["unknown0"].usable[0],
command)
def test_105_verify_unittest02_dns(self):
command = "search dns --fqdn unittest02.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "unittest02.one-nyp.ms.com", command)
def test_105_verify_unittest02_proto(self):
command = "show host --hostname unittest02.one-nyp.ms.com --format proto"
host = self.protobuftest(command.split(" "), expect=1)[0]
self.assertEqual(host.hostname, "unittest02")
self.assertEqual(host.fqdn, "unittest02.one-nyp.ms.com")
self.assertEqual(host.dns_domain, "one-nyp.ms.com")
self.assertEqual(host.machine.name, "ut3c5n10")
self.assertEqual(host.status, "build")
self.assertEqual(host.personality.archetype.name, "aquilon")
self.assertEqual(host.personality.name, "compileserver")
self.assertEqual(host.personality.host_environment, "dev")
self.assertEqual(host.domain.name, "unittest")
self.assertEqual(host.owner_eonid, 3)
self.assertEqual(len(host.eonid_maps), 0)
self.assertEqual(host.personality.owner_eonid, 3)
self.assertEqual(len(host.personality.eonid_maps), 1)
self.assertEqual(host.personality.eonid_maps[0].target, 'esp')
self.assertEqual(host.personality.eonid_maps[0].eonid, 3)
def test_105_cat_fail(self):
# The plenary should not be there before make/reconfigure was run
command = ["cat", "--hostname", "unittest02.one-nyp.ms.com"]
out = self.notfoundtest(command)
profile = self.build_profile_name("unittest02.one-nyp.ms.com",
domain="unittest")
self.matchoutput(out, "Pleanary file %s not found" % profile, command)
def test_106_verify_show_host_grns(self):
command = ["show_host", "--grns",
"--hostname=unittest02.one-nyp.ms.com"]
out = self.commandtest(command)
self.matchoutput(out, "Owned by GRN: grn:/ms/ei/aquilon/unittest [inherited]", command)
self.matchoutput(out, "Used by GRN: grn:/ms/ei/aquilon/unittest [target: esp, inherited]", command)
def test_106_verify_show_host_grns_proto(self):
command = ["show_host", "--format=proto", "--grns",
"--hostname=unittest02.one-nyp.ms.com"]
host = self.protobuftest(command, expect=1)[0]
self.assertEqual(host.hostname, "unittest02")
self.assertEqual(host.dns_domain, "one-nyp.ms.com")
self.assertEqual(host.fqdn, "unittest02.one-nyp.ms.com")
self.assertEqual(host.personality.archetype.name, "aquilon")
self.assertEqual(host.personality.name, "compileserver")
self.assertEqual(host.personality.host_environment, "dev")
self.assertEqual(host.status, "build")
self.assertEqual(host.domain.name, "unittest")
self.assertEqual(host.owner_eonid, 3)
self.assertEqual(len(host.eonid_maps), 0)
self.assertEqual(host.personality.owner_eonid, 3)
self.assertEqual(len(host.personality.eonid_maps), 1)
self.assertEqual(host.personality.eonid_maps[0].target, 'esp')
self.assertEqual(host.personality.eonid_maps[0].eonid, 3)
def test_110_add_unittest15(self):
ip = self.net["tor_net_0"].usable[1]
self.dsdb_expect_add("unittest15.aqd-unittest.ms.com", ip, "eth0",
ip.mac)
self.noouttest(["add", "host",
"--hostname", "unittest15.aqd-unittest.ms.com",
"--ipfromsystem", "ut01ga1s02.aqd-unittest.ms.com",
"--ipalgorithm", "max",
"--machine", "ut8s02p1", "--domain", "unittest",
"--archetype", "aquilon"])
self.dsdb_verify()
def test_115_verify_unittest15(self):
command = "show host --hostname unittest15.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out,
"Primary Name: unittest15.aqd-unittest.ms.com [%s]" %
self.net["tor_net_0"].usable[1],
command)
self.matchoutput(out, "Personality: inventory", command)
def test_120_add_unittest16_bad(self):
command = ["add", "host",
"--hostname", "unittest16.aqd-unittest.ms.com",
"--ipfromip", self.net["dyndhcp1"].usable[-1],
"--ipalgorithm", "max",
"--machine", "ut8s02p2", "--domain", "unittest",
"--archetype", "aquilon"]
out = self.badrequesttest(command)
self.matchoutput(out, "Failed to find an IP that is suitable for "
"--ipalgorithm=max. Try an other algorithm as there "
"are still some free addresses.",
command)
def test_121_add_unittest16_bad_ip(self):
command = ["add", "host",
"--hostname", "unittest16.aqd-unittest.ms.com",
"--ip", "not-an-ip-address",
"--machine", "ut8s02p2", "--domain", "unittest",
"--archetype", "aquilon"]
out = self.badrequesttest(command)
self.matchoutput(out,
"Expected an IPv4 address for --ip: "
"not-an-ip-address",
command)
def test_122_add_unittest16_bad_domain(self):
net = self.net["tor_net_0"]
command = ["add", "host", "--hostname", "unittest16.aqd-unittest.ms.com",
"--ipfromip", net.usable[0], "--ipalgorithm", "lowest",
"--machine", "ut8s02p2", "--domain", "nomanage",
"--archetype", "aquilon"]
out = self.badrequesttest(command)
self.matchoutput(out, "Adding hosts to domain nomanage "
"is not allowed.", command)
def test_123_add_unittest16_bad_hostname(self):
net = self.net["tor_net_0"]
command = ["add", "host", "--hostname", "1unittest16.aqd-unittest.ms.com",
"--ipfromip", net.usable[0], "--ipalgorithm", "lowest",
"--machine", "ut8s02p2", "--domain", "unittest",
"--archetype", "aquilon"]
out = self.badrequesttest(command)
self.matchoutput(out, "'1unittest16.aqd-unittest.ms.com' is not a valid "
"value for hostname.", command)
def test_124_add_unittest16_good(self):
net = self.net["tor_net_0"]
self.dsdb_expect_add("unittest16.aqd-unittest.ms.com", net.usable[2],
"eth0", net.usable[2].mac)
self.noouttest(["add", "host",
"--hostname", "unittest16.aqd-unittest.ms.com",
"--ipfromip", net.usable[0], "--ipalgorithm", "lowest",
"--machine", "ut8s02p2", "--domain", "unittest",
"--archetype", "aquilon",
"--personality", "compileserver"])
self.dsdb_verify()
def test_125_verify_unittest16(self):
command = "show host --hostname unittest16.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out,
"Primary Name: unittest16.aqd-unittest.ms.com [%s]" %
self.net["tor_net_0"].usable[2],
command)
self.matchoutput(out, "Personality: compileserver", command)
def test_130_add_unittest17(self):
ip = self.net["tor_net_0"].usable[3]
self.dsdb_expect_add("unittest17.aqd-unittest.ms.com", ip, "eth0",
ip.mac)
self.noouttest(["add", "host",
"--hostname", "unittest17.aqd-unittest.ms.com",
"--ipfromsystem", "ut01ga1s02.aqd-unittest.ms.com",
"--machine", "ut8s02p3", "--domain", "unittest",
"--archetype", "aquilon"])
self.dsdb_verify()
def test_135_verify_unittest17(self):
# Verifies default os and personality for aquilon
command = "show host --hostname unittest17.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
osversion = self.config.get("archetype_aquilon", "default_osversion")
osversion.replace(".", r"\.")
self.matchoutput(out,
"Primary Name: unittest17.aqd-unittest.ms.com [%s]" %
self.net["tor_net_0"].usable[3],
command)
self.searchoutput(out,
r'Operating System: linux\s*'
r'Version: %s$' % osversion,
command)
self.matchoutput(out, "Personality: inventory", command)
def test_140_add_aurora_default_os(self):
ip = self.net["tor_net_0"].usable[4]
self.dsdb_expect("show_host -host_name test-aurora-default-os")
self.noouttest(["add", "host", "--archetype", "aurora",
"--hostname", "test-aurora-default-os.ms.com",
"--ip", ip, "--domain", "unittest", "--machine",
"ut8s02p4"])
self.dsdb_verify()
def test_141_verify_aurora_default_os(self):
command = "show host --hostname test-aurora-default-os.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Primary Name: test-aurora-default-os.ms.com", command)
self.matchoutput(out, "Archetype: aurora", command)
self.matchoutput(out, "Personality: generic", command)
self.matchoutput(out, "Domain: unittest", command)
self.searchoutput(out,
r'Operating System: linux\s*'
r'Version: generic$',
command)
def test_145_add_windows_default_os(self):
ip = self.net["tor_net_0"].usable[5]
self.dsdb_expect_add("test-windows-default-os.msad.ms.com", ip,
"eth0", self.net["tor_net_0"].usable[5].mac)
self.noouttest(["add", "host", "--archetype", "windows",
"--hostname", "test-windows-default-os.msad.ms.com",
"--ip", ip, "--domain", "ut-prod",
"--machine", "ut8s02p5"])
self.dsdb_verify()
def test_146_verify_windows_default_os(self):
command = "show host --hostname test-windows-default-os.msad.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Primary Name: test-windows-default-os.msad.ms.com", command)
self.matchoutput(out, "Archetype: windows", command)
self.matchoutput(out, "Personality: generic", command)
self.matchoutput(out, "Domain: ut-prod", command)
self.searchoutput(out,
r'Operating System: windows\s*'
r'Version: generic$',
command)
def test_150_add_cciss_host(self):
ip = self.net["unknown0"].usable[18]
self.dsdb_expect_add("unittest18.aqd-unittest.ms.com", ip, "eth0",
ip.mac)
command = ["add", "host", "--archetype", "aquilon",
"--hostname", "unittest18.aqd-unittest.ms.com", "--ip", ip,
"--domain", "unittest", "--machine", "ut3c1n8"]
self.noouttest(command)
self.dsdb_verify()
def test_155_add_f5(self):
# The IP address is also a /32 network
ip = self.net["f5test"].ip
self.dsdb_expect_add("f5test.aqd-unittest.ms.com", ip, "eth0",
DummyIP(ip).mac)
command = ["add", "host", "--hostname", "f5test.aqd-unittest.ms.com",
"--machine", "f5test", "--ip", ip,
"--archetype", "f5", "--domain", "unittest",
"--osname", "f5", "--osversion", "generic"]
self.noouttest(command)
self.dsdb_verify()
def test_160_no_default_os(self):
ip = self.net["vm_storage_net"].usable[25]
command = ["add", "host", "--archetype", "filer",
"--hostname", "filer1.ms.com", "--ip", ip,
"--domain", "unittest", "--machine", "filer1"]
out = self.badrequesttest(command)
self.matchoutput(out, "Can not determine a sensible default OS", command)
def test_165_add_filer(self):
ip = self.net["vm_storage_net"].usable[25]
self.dsdb_expect_add("filer1.ms.com", ip, "v0")
command = ["add", "host", "--archetype", "filer",
"--hostname", "filer1.ms.com", "--ip", ip,
"--domain", "unittest", "--machine", "filer1",
"--osname=ontap", "--osversion=7.3.3p1"]
self.noouttest(command)
self.dsdb_verify()
def test_170_add_cardsmachine(self):
net = self.net.allocate_network(self, "cards_net", 28, "unknown",
"building", "cards")
self.create_machine("cardsmachine", "utrackmount", rack="cards1",
cpuname="utcpu", cpucount=2, memory=65536,
sda_size=600, sda_controller="sas",
eth0_mac=net.usable[0].mac)
def test_171_host_prefix_no_domain(self):
osver = self.config.get("unittest", "linux_version_curr")
command = ["add_host", "--machine", "cardsmachine", "--domain", "unittest",
"--archetype", "aquilon", "--personality", "inventory",
"--osname", "linux", "--osversion", osver,
"--grn", "grn:/ms/ei/aquilon/aqd",
"--ip", self.net["cards_net"].usable[0],
"--prefix", "cardshost"]
out = self.badrequesttest(command)
self.matchoutput(out,
"There is no default DNS domain configured for "
"rack cards1. Please specify --dns_domain.",
command)
def test_172_cleanup(self):
self.noouttest(["del_machine", "--machine", "cardsmachine"])
self.net.dispose_network(self, "cards_net")
def test_180_add_utmc8_vmhosts(self):
pri_net = self.net["ut14_net"]
storage_net = self.net["vm_storage_net"]
mgmt_net = self.net["ut14_oob"]
for i in range(0, 2):
hostname = "evh%d.aqd-unittest.ms.com" % (i + 80)
machine = "ut14s1p%d" % i
ip = pri_net.usable[i]
eth0_mac = ip.mac
eth1_ip = storage_net.usable[i + 26]
eth1_mac = eth1_ip.mac
mgmt_ip = mgmt_net.usable[i]
self.create_host(hostname, ip, machine,
model="dl360g9", rack="ut14",
eth0_mac=eth0_mac,
eth1_mac=eth1_mac, eth1_ip=eth1_ip,
manager_iface="mgmt0", manager_ip=mgmt_ip,
osname="esxi", osversion="5.0.0",
archetype="vmhost", personality="vulcan2-server-dev")
def test_185_add_utmc9_vmhosts(self):
# This machine will be moved into the right rack later
self.create_host("evh82.aqd-unittest.ms.com",
self.net["ut14_net"].usable[2],
"ut14s1p2", model="dl360g9", rack="ut3",
manager_iface="mgmt0",
manager_ip=self.net["ut14_oob"].usable[2],
archetype="vmhost", personality="vulcan-local-disk",
osname="esxi", osversion="5.0.0",
domain="alt-unittest")
self.create_host("evh83.aqd-unittest.ms.com",
self.net["ut14_net"].usable[3],
"ut14s1p3", model="dl360g9", rack="ut14",
manager_iface="mgmt0",
manager_ip=self.net["ut14_oob"].usable[3],
archetype="vmhost", personality="vulcan-local-disk",
osname="esxi", osversion="5.0.0",
domain="alt-unittest")
def test_200_machine_reuse(self):
ip = self.net["unknown0"].usable[-1]
command = ["add", "host", "--hostname", "used-already.one-nyp.ms.com",
"--ip", ip, "--machine", "ut3c5n10", "--domain", "unittest",
"--archetype", "aquilon",
"--personality", "compileserver"]
out = self.badrequesttest(command)
self.matchoutput(out, "Machine ut3c5n10 is already allocated to "
"host unittest02.one-nyp.ms.com", command)
def test_200_show_host_bad_domain(self):
command = "show host --hostname aquilon00.one-nyp"
out = self.notfoundtest(command.split(" "))
self.matchoutput(out, "DNS Domain one-nyp not found.", command)
def test_200_no_interface(self):
ip = self.net["unknown0"].usable[-1]
command = ["add", "host", "--hostname", "unittest03.aqd-unittest.ms.com",
"--ip", ip, "--machine", "ut3c1n9",
"--domain", "unittest", "--archetype", "aquilon"]
out = self.badrequesttest(command)
self.matchoutput(out,
"You have specified an IP address for the host, but "
"machine unittest03.aqd-unittest.ms.com does not have "
"a bootable interface.",
command)
def test_300_populate_hp_rack_hosts(self):
# This gives us server1.aqd-unittest.ms.com through server10
# and aquilon60.aqd-unittest.ms.com through aquilon100
# It also needs to run *after* the testadd* methods above
# as some of them rely on a clean IP space for testing the
# auto-allocation algorithms.
# I stole the last 2 hp rack hosts for default host
# aquilon63.aqd-unittest.ms.com & aquilon64.aqd-unittest.ms.com are
# reserved for manage tests.
servers = 0
net = self.net["hp_eth0"]
mgmt_net = self.net["hp_mgmt"]
# number 50 is in use by the tor_switch.
for i in range(51, 100):
if servers < 10:
servers += 1
hostname = "server%d.aqd-unittest.ms.com" % servers
personality = "utpers-prod"
else:
hostname = "aquilon%d.aqd-unittest.ms.com" % i
personality = None
port = i - 50
machine = "ut9s03p%d" % port
self.create_host(hostname, net.usable[port], machine, rack="ut9",
model="bl460cg8", sandbox="%s/utsandbox" % self.user,
manager_iface="ilo",
manager_ip=mgmt_net.usable[port],
personality=personality)
def test_305_search_sandbox_used(self):
command = ["search_sandbox", "--used"]
out = self.commandtest(command)
self.matchoutput(out, "utsandbox", command)
self.matchclean(out, "camelcasetest1", command)
def test_305_search_sandbox_unused(self):
command = ["search_sandbox", "--unused"]
out = self.commandtest(command)
self.matchclean(out, "utsandbox", command)
self.matchoutput(out, "camelcasetest1", command)
self.matchoutput(out, "camelcasetest2", command)
self.matchoutput(out, "changetest1", command)
self.matchoutput(out, "othersandbox", command)
def test_310_populate_ut10_hosts(self):
# These are used in add_virtual_hardware:
# evh1.aqd-unittest.ms.com
# evh2.aqd-unittest.ms.com
# evh3.aqd-unittest.ms.com
# evh4.aqd-unittest.ms.com
# evh5.aqd-unittest.ms.com
# evh6.aqd-unittest.ms.com
# evh7.aqd-unittest.ms.com
# evh8.aqd-unittest.ms.com
# evh9.aqd-unittest.ms.com
# This is used for utmc7 and update_machine testing:
# evh10.aqd-unittest.ms.com
# The other hosts are left for future use.
eth0_net = self.net["ut10_eth0"]
eth1_net = self.net["ut10_eth1"]
mgmt_net = self.net["ut10_oob"]
# number 100 is in use by the tor_switch.
for i in range(101, 111):
port = i - 100
hostname = "evh%d.aqd-unittest.ms.com" % port
machine = "ut10s04p%d" % port
ip = eth0_net.usable[port]
mgmt_ip = mgmt_net.usable[port]
eth0_mac = ip.mac
eth1_mac = eth1_net.usable[port].mac
# The virtual machine tests require quite a bit of memory...
self.create_host(hostname, ip, machine,
model="dl360g9", memory=81920, rack="ut10",
cpuname="e5-2660-v3", cpucount=2,
eth0_mac=eth0_mac, eth1_mac=eth1_mac,
manager_iface="mgmt0", manager_ip=mgmt_ip,
archetype="vmhost",
personality="vulcan-10g-server-prod",
osname="esxi", osversion="5.0.0")
def test_320_add_10gig_racks(self):
for port in range(1, 13):
for (template, rack, offset) in [('ut11s01p%d', "ut11", 0),
('ut12s02p%d', "ut12", 12)]:
machine = template % port
# Both counts would start at 0 except the tor_net has two
# switches taking IPs.
i = port + 1 + offset
j = port - 1 + offset
eth0_mac = self.net["vmotion_net"].usable[i].mac
eth1_mac = self.net["vm_storage_net"].usable[j].mac
self.create_machine_dl360g9(machine, rack=rack,
eth0_mac=eth0_mac,
eth1_mac=eth1_mac,
eth1_pg="storage-v701")
def test_321_auxiliary_no_host(self):
# Test port group based IP address allocation when there is no host yet
command = ["add_auxiliary", "--machine", "ut11s01p1",
"--interface", "eth1", "--autoip",
"--auxiliary", "evh51-e1.aqd-unittest.ms.com"]
out = self.badrequesttest(command)
self.matchoutput(out,
"Machine ut11s01p1 does not have a host, assigning an "
"IP address based on port group membership is not "
"possible.",
command)
def test_322_populate_10gig_rack_hosts(self):
# Assuming evh11 - evh50 will eventually be claimed above.
net = self.net["vmotion_net"]
for i in range(1, 25):
hostname = "evh%d.aqd-unittest.ms.com" % (i + 50)
manager = "evh%dr.aqd-unittest.ms.com" % (i + 50)
if i < 13:
port = i
machine = "ut11s01p%d" % port
mgmt_net = self.net["ut11_oob"]
else:
port = i - 12
machine = "ut12s02p%d" % port
mgmt_net = self.net["ut12_oob"]
self.dsdb_expect_add(hostname, net.usable[i + 1], "eth0",
net.usable[i + 1].mac)
self.dsdb_expect_add(manager, mgmt_net[port], "mgmt0",
mgmt_net[port].mac)
command = ["add", "host", "--hostname", hostname, "--autoip",
"--machine", machine,
"--domain", "unittest",
"--osname", "esxi", "--osversion", "5.0.0",
"--archetype", "vmhost", "--personality", "vulcan-10g-server-prod"]
self.noouttest(command)
command = ["add_manager", "--hostname", hostname, "--interface", "mgmt0",
"--ip", mgmt_net[port], "--mac", mgmt_net[port].mac]
self.noouttest(command)
self.dsdb_verify()
def test_323_verify_show_ut11s01p1(self):
command = "show machine --machine ut11s01p1"
out = self.commandtest(command.split())
self.matchoutput(out,
"Last switch poll: "
"ut01ga2s01.aqd-unittest.ms.com port 1 [",
command)
def test_323_verify_show_ut11s01p1_proto(self):
command = ["show_machine", "--machine", "ut11s01p1", "--format", "proto"]
machine = self.protobuftest(command, expect=1)[0]
ifaces = {iface.device: iface for iface in machine.interfaces}
self.assertIn("eth1", ifaces)
self.assertEqual(ifaces["eth1"].port_group_name, "storage-v701")
# There's no detailed information for phys machines
self.assertEqual(ifaces["eth1"].port_group_usage, "")
self.assertEqual(ifaces["eth1"].port_group_tag, 0)
def test_325_verify_cat_ut11s01p1(self):
command = "cat --machine ut11s01p1"
out = self.commandtest(command.split(" "))
self.searchoutput(out,
r'"cards/nic/eth0" = '
r'create\("hardware/nic/generic/generic_nic",\s*'
r'"boot", true,\s*'
r'"hwaddr", "%s"\s*\);'
% self.net["vmotion_net"].usable[2].mac,
command)
self.searchoutput(out,
r'"cards/nic/eth1" = '
r'create\("hardware/nic/generic/generic_nic",\s*'
r'"hwaddr", "%s",\s*'
r'"port_group", "storage-v701"\s*\);'
% self.net["vm_storage_net"].usable[0].mac,
command)
def test_325_verify_show_host_proto(self):
# We had a bug where a dangling interface with no IP address
# assigned would cause show host --format=proto to fail...
command = ["show_host", "--format=proto",
"--hostname=evh1.aqd-unittest.ms.com"]
host = self.protobuftest(command, expect=1)[0]
self.assertEqual(host.fqdn, "evh1.aqd-unittest.ms.com")
self.assertEqual(host.archetype.name, "vmhost")
self.assertEqual(host.personality.archetype.name, "vmhost")
self.assertEqual(host.operating_system.archetype.name, "vmhost")
self.assertEqual(host.operating_system.name, "esxi")
self.assertEqual(host.operating_system.version, "5.0.0")
self.assertEqual(host.ip, str(self.net["ut10_eth0"].usable[1]))
self.assertEqual(host.machine.name, "ut10s04p1")
self.assertEqual(len(host.machine.interfaces), 3)
self.assertEqual(host.machine.location.name, 'ut10')
self.assertEqual(' '.join('%s:%s' % (str(loc.location_type),
str(loc.name))
for loc in host.machine.location.parents),
"company:ms hub:ny continent:na country:us "
"campus:ny city:ny building:ut")
eth0_net = self.net["ut10_eth0"]
mgmt_net = self.net["ut10_oob"]
for i in host.machine.interfaces:
if i.device == 'eth0':
self.assertEqual(i.ip, str(eth0_net.usable[1]))
self.assertEqual(i.mac, str(eth0_net.usable[1].mac))
# We're not using this field anymore...
self.assertEqual(i.network_id, 0)
elif i.device == 'eth1':
self.assertEqual(i.ip, "")
self.assertEqual(i.network_id, 0)
elif i.device == 'mgmt0':
self.assertEqual(i.ip, str(mgmt_net.usable[1]))
self.assertEqual(i.mac, str(mgmt_net.usable[1].mac))
else:
self.fail("Unrecognized interface '%s'" % i.device)
def test_400_add_utnotify(self):
hostname = self.config.get("unittest", "hostname")
# We _could_ also look up the real address of the host...
self.dsdb_expect_add(hostname, "127.0.0.1", "eth0",
self.net["unknown0"].usable[19].mac)
self.noouttest(["add", "host",
"--hostname", hostname,
"--ip", "127.0.0.1", "--machine", "ut3c5n6",
"--domain", "unittest", "--buildstatus", "ready",
"--archetype", "aquilon",
"--personality", "compileserver"])
command = ["bind", "server", "--service", "utnotify",
"--instance", "localhost", "--hostname", hostname]
out = self.statustest(command)
self.matchoutput(out, "Warning: Host %s is missing the following "
"required services" % hostname, command)
def test_410_add_afsbynet(self):
ip = self.net["netsvcmap"].usable[0]
self.create_host("afs-by-net.aqd-unittest.ms.com", ip, "ut3c5n11",
model="hs21-8853", chassis="ut3c5", slot=11,
personality="compileserver",
comments="For network based service mappings")
def test_420_add_netmappers(self):
ip = self.net["netperssvcmap"].usable[0]
self.create_host("netmap-pers.aqd-unittest.ms.com", ip, "ut3c5n12",
model="hs21-8853", chassis="ut3c5", slot=12,
personality="utpers-dev", personality_stage="next",
comments="For net/pers based service mappings")
def test_430_add_utinfra(self):
eth0_ip = self.net["unknown0"].usable[33]
eth1_ip = self.net["unknown1"].usable[34]
ip = self.net["zebra_vip"].usable[3]
self.create_host("infra1.aqd-unittest.ms.com", ip, "ut3c5n13",
model="utrackmount", chassis="ut3c5", slot=13,
cpuname="utcpu", cpucount=2, memory=65536,
sda_size=600, sda_controller="sas",
eth0_mac=eth0_ip.mac, eth0_ip=eth0_ip,
eth0_fqdn="infra1-e0.aqd-unittest.ms.com",
eth1_mac=eth1_ip.mac, eth1_ip=eth1_ip,
eth1_fqdn="infra1-e1.aqd-unittest.ms.com",
zebra=True, personality="utpers-prod")
def test_435_add_npinfra(self):
eth0_ip = self.net["unknown0"].usable[35]
eth1_ip = self.net["unknown1"].usable[36]
ip = self.net["zebra_vip"].usable[4]
self.create_host("infra1.one-nyp.ms.com", ip, "np3c5n13",
model="utrackmount", chassis="np3c5", slot=13,
cpuname="utcpu", cpucount=2, memory=65536,
sda_size=600, sda_controller="sas",
eth0_mac=eth0_ip.mac, eth0_ip=eth0_ip,
eth0_fqdn="infra1-e0.one-nyp.ms.com",
eth1_mac=eth1_ip.mac, eth1_ip=eth1_ip,
eth1_fqdn="infra1-e1.one-nyp.ms.com",
zebra=True, personality="utpers-prod")
def test_440_add_jack_host(self):
ip = self.net["unknown0"].usable[17]
self.create_host("jack.cards.example.com", ip, "jack",
model="utrackmount", rack="cards1",
cpuname="utcpu", cpucount=2, memory=65536,
sda_size=600, sda_controller="sas",
eth0_mac=ip.mac, eth0_comments="interface for jack",
grn="grn:/example/cards", domain="unittest",
personality="compileserver")
def test_445_verify_jack_grn(self):
command = "show host --hostname jack.cards.example.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Owned by GRN: grn:/example/cards", command)
self.matchoutput(out, "Used by GRN: grn:/example/cards", command)
def test_445_verify_show_host_jack_grns(self):
ip = self.net["unknown0"].usable[17]
command = ["show_host", "--grns", "--hostname=jack.cards.example.com"]
out = self.commandtest(command)
self.matchoutput(out, "Primary Name: jack.cards.example.com [%s]" % ip,
command)
self.matchoutput(out, "Owned by GRN: grn:/example/cards", command)
self.matchoutput(out, "Used by GRN: grn:/example/cards [target: esp]", command)
def test_800_verify_host_all(self):
command = ["show", "host", "--all"]
out = self.commandtest(command)
self.matchoutput(out, "afs-by-net.aqd-unittest.ms.com", command)
self.matchoutput(out, "unittest02.one-nyp.ms.com", command)
self.matchoutput(out, "unittest15.aqd-unittest.ms.com", command)
self.matchoutput(out, "unittest16.aqd-unittest.ms.com", command)
self.matchoutput(out, "unittest17.aqd-unittest.ms.com", command)
self.matchoutput(out, "server1.aqd-unittest.ms.com", command)
self.matchoutput(out, "aquilon61.aqd-unittest.ms.com", command)
self.matchoutput(out, "evh1.aqd-unittest.ms.com", command)
self.matchoutput(out, "evh51.aqd-unittest.ms.com", command)
self.matchoutput(out, "test-aurora-default-os.ms.com", command)
self.matchoutput(out, "test-windows-default-os.msad.ms.com", command)
self.matchoutput(out, "filer1.ms.com", command)
self.matchoutput(out, "f5test.aqd-unittest.ms.com", command)
def test_800_verify_host_all_proto(self):
command = ["show", "host", "--all", "--format", "proto"]
hostlist = self.protobuftest(command)
hostnames = set(host_msg.hostname for host_msg in hostlist)
for hostname in ("afs-by-net.aqd-unittest.ms.com",
"unittest02.one-nyp.ms.com",
"unittest15.aqd-unittest.ms.com",
"unittest16.aqd-unittest.ms.com",
"unittest17.aqd-unittest.ms.com",
"server1.aqd-unittest.ms.com",
"aquilon61.aqd-unittest.ms.com",
"evh1.aqd-unittest.ms.com",
"evh51.aqd-unittest.ms.com",
"test-aurora-default-os.ms.com",
"test-windows-default-os.msad.ms.com",
"filer1.ms.com",
"f5test.aqd-unittest.ms.com"):
self.assertIn(hostname, hostnames)
def test_800_verify_host_list(self):
hosts = ["unittest15.aqd-unittest.ms.com",
"unittest16.aqd-unittest.ms.com",
"filer1.ms.com"]
scratchfile = self.writescratch("show_host_list", "\n".join(hosts))
command = ["show_host", "--list", scratchfile]
out = self.commandtest(command)
self.matchoutput(out, "Machine: ut8s02p1", command)
self.matchoutput(out, "Machine: ut8s02p2", command)
self.matchoutput(out, "Machine: filer1", command)
self.matchclean(out, "evh1.aqd-unittest.ms.com", command)
self.matchclean(out, "ut10s04", command)
def test_800_show_ut3c5(self):
ip = self.net["unknown0"].usable[6]
hostname = self.config.get("unittest", "hostname")
command = ["show_chassis", "--chassis", "ut3c5"]
out = self.commandtest(command)
self.output_equals(out, """
Chassis: ut3c5
Primary Name: ut3c5.aqd-unittest.ms.com [%s]
Building: ut
Campus: ny
City: ny
Continent: na
Country: us
Hub: ny
Organization: ms
Rack: ut3
Row: a
Column: 3
Room: utroom1
Vendor: hp Model: c-class
Model Type: chassis
Serial: ABC5678
Comments: Some new chassis comments
Interface: oa %s
Type: oa
Network Environment: internal
Provides: ut3c5.aqd-unittest.ms.com [%s]
Slot #2: ut3c5n2 (unittest20.aqd-unittest.ms.com)
Slot #3: ut3c5n3 (unittest21.aqd-unittest.ms.com)
Slot #4: ut3c5n4 (unittest22.aqd-unittest.ms.com)
Slot #5: ut3c5n5 (unittest23.aqd-unittest.ms.com)
Slot #6: ut3c5n6 (%s)
Slot #7: ut3c5n7 (unittest25.aqd-unittest.ms.com)
Slot #8: ut3c5n8 (unittest26.aqd-unittest.ms.com)
Slot #10: ut3c5n10 (unittest02.one-nyp.ms.com)
Slot #11: ut3c5n11 (afs-by-net.aqd-unittest.ms.com)
Slot #12: ut3c5n12 (netmap-pers.aqd-unittest.ms.com)
Slot #13: ut3c5n13 (infra1.aqd-unittest.ms.com)
Slot #16: ut3c5n16 (no hostname)
""" % (ip, ip.mac, ip, hostname),
command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestAddHost)
unittest.TextTestRunner(verbosity=2).run(suite)
| apache-2.0 |
sgraham/nope | third_party/webpagereplay/cachemissarchive.py | 9 | 8478 | #!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create and view cache miss archives.
Usage:
./cachemissarchive.py <path to CacheMissArchive file>
This will print out some statistics of the cache archive.
"""
import logging
import os
import sys
from perftracker import runner_cfg
import persistentmixin
def format_request(request, join_val=' ', use_path=True,
use_request_body=False, headers=False):
if use_path:
request_parts = [request.command, request.host + request.path]
else:
request_parts = [request.command, request.host]
if use_request_body:
request_parts.append(request.request_body)
if headers:
request_parts.append(request.headers)
return join_val.join([str(x) for x in request_parts])
class CacheMissArchive(persistentmixin.PersistentMixin):
"""Archives cache misses from playback mode.
Uses runner_cfg.urls for tracking the current page url.
Attributes:
archive_file: output file to store cache miss data
current_page_url: any cache misses will be marked as caused by this URL
page_urls: the list of urls to record and keep track of
archive: dict of cache misses, where the key is a page URL and
the value is a list of ArchivedHttpRequest objects
request_counts: dict that records the number of times a request is issued in
both record and replay mode
"""
def __init__(self, archive_file):
"""Initialize CacheMissArchive.
Args:
archive_file: output file to store data
"""
self.archive_file = archive_file
self.current_page_url = None
# TODO: Pass in urls to CacheMissArchive without runner_cfg dependency
if runner_cfg.urls:
self.page_urls = runner_cfg.urls
# { URL: [archived_http_request, ...], ... }
self.archive = {}
# { archived_http_request: (num_record_requests, num_replay_requests), ... }
self.request_counts = {}
def record_cache_miss(self, request, page_url=None):
"""Records a cache miss for given request.
Args:
request: instance of ArchivedHttpRequest that causes a cache miss
page_url: specify the referer URL that caused this cache miss
"""
if not page_url:
page_url = self.current_page_url
logging.debug('Cache miss on %s', request)
self._append_archive(page_url, request)
def set_urls_list(self, urls):
self.page_urls = urls
def record_request(self, request, is_record_mode, is_cache_miss=False):
"""Records the request into the cache archive.
Should be updated on every HTTP request.
Also updates the current page_url contained in runner_cfg.urls.
Args:
request: instance of ArchivedHttpRequest
is_record_mode: indicates whether WPR is on record mode
is_cache_miss: if True, records the request as a cache miss
"""
self._record_request(request, is_record_mode)
page_url = request.host + request.path
for url in self.page_urls:
if self._match_urls(page_url, url):
self.current_page_url = url
logging.debug('Updated current url to %s', self.current_page_url)
break
if is_cache_miss:
self.record_cache_miss(request)
def _record_request(self, request, is_record_mode):
"""Adds 1 to the appropriate request count.
Args:
request: instance of ArchivedHttpRequest
is_record_mode: indicates whether WPR is on record mode
"""
num_record, num_replay = self.request_counts.get(request, (0, 0))
if is_record_mode:
num_record += 1
else:
num_replay += 1
self.request_counts[request] = (num_record, num_replay)
def request_diff(self, is_show_all=False):
"""Calculates if there are requests sent in record mode that are
not sent in replay mode and vice versa.
Args:
is_show_all: If True, only includes instance where the number of requests
issued in record/replay mode differs. If False, includes all instances.
Returns:
A string displaying difference in requests between record and replay modes
"""
str_list = ['Diff of requests sent in record mode versus replay mode\n']
less = []
equal = []
more = []
for request, (num_record, num_replay) in self.request_counts.items():
format_req = format_request(request, join_val=' ',
use_path=True, use_request_body=False)
request_line = '%s record: %d, replay: %d' % (
format_req, num_record, num_replay)
if num_record < num_replay:
less.append(request_line)
elif num_record == num_replay:
equal.append(request_line)
else:
more.append(request_line)
if is_show_all:
str_list.extend(sorted(equal))
str_list.append('')
str_list.extend(sorted(less))
str_list.append('')
str_list.extend(sorted(more))
return '\n'.join(str_list)
def _match_urls(self, url_1, url_2):
"""Returns true if urls match.
Args:
url_1: url string (e.g. 'http://www.cnn.com')
url_2: same as url_1
Returns:
True if the two urls match, false otherwise
"""
scheme = 'http://'
if url_1.startswith(scheme):
url_1 = url_1[len(scheme):]
if url_2.startswith(scheme):
url_2 = url_2[len(scheme):]
return url_1 == url_2
def _append_archive(self, page_url, request):
"""Appends the corresponding (page_url,request) pair to archived dictionary.
Args:
page_url: page_url string (e.g. 'http://www.cnn.com')
request: instance of ArchivedHttpRequest
"""
self.archive.setdefault(page_url, [])
self.archive[page_url].append(request)
def __repr__(self):
return repr((self.archive_file, self.archive))
def Persist(self):
self.current_page_url = None
persistentmixin.PersistentMixin.Persist(self, self.archive_file)
def get_total_referers(self):
return len(self.archive)
def get_total_cache_misses(self):
count = 0
for k in self.archive:
count += len(self.archive[k])
return count
def get_total_referer_cache_misses(self):
count = 0
if self.page_urls:
count = sum(len(v) for k, v in self.archive.items()
if k in self.page_urls)
return count
def get_cache_misses(self, page_url, join_val=' ',
use_path=False, use_request_body=False):
"""Returns a list of cache miss requests from the page_url.
Args:
page_url: url of the request (e.g. http://www.zappos.com/)
join_val: value to join output string with
use_path: true if path is to be included in output display
use_request_body: true if request_body is to be included in output display
Returns:
A list of cache miss requests (in textual representation) from page_url
"""
misses = []
if page_url in self.archive:
cache_misses = self.archive[page_url]
for k in cache_misses:
misses.append(format_request(k, join_val, use_path, use_request_body))
return misses
def get_all_cache_misses(self, use_path=False):
"""Format cache misses into concise visualization."""
all_cache_misses = ''
for page_url in self.archive:
misses = self.get_cache_misses(page_url, use_path=use_path)
all_cache_misses = '%s%s --->\n %s\n\n' % (
all_cache_misses, page_url, '\n '.join(misses))
return all_cache_misses
if __name__ == '__main__':
archive_file = sys.argv[1]
cache_archive = CacheMissArchive.Load(archive_file)
print 'Total cache misses: %d' % cache_archive.get_total_cache_misses()
print 'Total page_urls cache misses: %d' % (
cache_archive.get_total_referer_cache_misses())
print 'Total referers: %d\n' % cache_archive.get_total_referers()
print 'Referers are:'
for ref in cache_archive.archive:
print '%s with %d cache misses' % (ref, len(cache_archive.archive[ref]))
print
print cache_archive.get_all_cache_misses(use_path=True)
print
| bsd-3-clause |
chromium/chromium | third_party/blink/tools/blinkpy/web_tests/views/printing.py | 5 | 16341 | # Copyright (C) 2010, 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Package that handles non-debug, non-file output for run_web_tests.py."""
import logging
import math
import optparse
from blinkpy.web_tests.models import test_expectations
from blinkpy.web_tests.models.typ_types import ResultType
from blinkpy.web_tests.views.metered_stream import MeteredStream
from blinkpy.tool import grammar
NUM_SLOW_TESTS_TO_LOG = 10
def print_options():
return [
optparse.make_option(
'--debug-rwt-logging',
action='store_true',
default=False,
help=
'print timestamps and debug information for run_web_tests.py itself'
),
optparse.make_option(
'--details',
action='store_true',
default=False,
help='print detailed results for every test'),
optparse.make_option(
'-q',
'--quiet',
action='store_true',
default=False,
help='run quietly (errors, warnings, and progress only)'),
optparse.make_option(
'--timing',
action='store_true',
default=False,
help='display test times (summary plus per-test w/ --verbose)'),
optparse.make_option(
'-v',
'--verbose',
action='store_true',
default=False,
help='print a summarized result for every test (one line per test)'
),
]
class Printer(object):
"""Class handling all non-debug-logging printing done by run_web_tests.py."""
def __init__(self, host, options, regular_output):
self.num_completed = 0
self.num_tests = 0
self._host = host
self._options = options
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if options.
debug_rwt_logging else logging.INFO)
self._meter = MeteredStream(
regular_output,
options.debug_rwt_logging,
logger,
number_of_columns=host.platform.terminal_width())
self._running_tests = []
self._completed_tests = []
def cleanup(self):
self._meter.cleanup()
def __del__(self):
self.cleanup()
def print_config(self, port):
self._print_default("Using port '%s'" % port.name())
self._print_default(
'Test configuration: %s' % port.test_configuration())
self._print_default('View the test results at file://%s/results.html' %
port.artifacts_directory())
if self._options.order == 'random':
self._print_default(
'Using random order with seed: %d' % self._options.seed)
fs = self._host.filesystem
fallback_path = [fs.split(x)[1] for x in port.baseline_search_path()]
self._print_default(
'Baseline search path: %s -> generic' % ' -> '.join(fallback_path))
self._print_default('Using %s build' % self._options.configuration)
self._print_default(
'Regular timeout: %s, slow test timeout: %s' %
(self._options.time_out_ms, self._options.slow_time_out_ms))
self._print_default('Command line: ' +
' '.join(port.driver_cmd_line()))
self._print_default('')
def print_found(self, num_all_test_files, num_shard_test_files, num_to_run,
repeat_each, iterations):
found_str = 'Found %s' % grammar.pluralize('test',
num_shard_test_files)
if num_all_test_files != num_shard_test_files:
found_str += ' (total %d)' % num_all_test_files
found_str += '; running %d' % num_to_run
if repeat_each * iterations > 1:
found_str += ' (%d times each: --repeat-each=%d --iterations=%d)' % (
repeat_each * iterations, repeat_each, iterations)
found_str += ', skipping %d' % (num_shard_test_files - num_to_run)
self._print_default(found_str + '.')
def print_expected(self, run_results, tests_with_result_type_callback):
self._print_expected_results_of_type(run_results,
test_expectations.PASS, 'passes',
tests_with_result_type_callback)
self._print_expected_results_of_type(
run_results, test_expectations.FAIL, 'failures',
tests_with_result_type_callback)
self._print_expected_results_of_type(run_results,
test_expectations.FLAKY, 'flaky',
tests_with_result_type_callback)
self._print_debug('')
def print_workers_and_shards(self, port, num_workers, num_shards,
num_locked_shards):
driver_name = port.driver_name()
if num_workers == 1:
self._print_default('Running 1 %s.' % driver_name)
self._print_debug('(%s).' % grammar.pluralize('shard', num_shards))
else:
self._print_default(
'Running %d %ss in parallel.' % (num_workers, driver_name))
self._print_debug(
'(%d shards; %d locked).' % (num_shards, num_locked_shards))
self._print_default('')
def _print_expected_results_of_type(self, run_results, result_type,
result_type_str,
tests_with_result_type_callback):
tests = tests_with_result_type_callback(result_type)
self._print_debug('Expect: %5d %-8s' % (len(tests), result_type_str))
def _num_digits(self, num):
ndigits = 1
if len(num):
ndigits = int(math.log10(len(num))) + 1
return ndigits
def print_results(self, run_time, run_results):
self.print_timing_statistics(run_time, run_results)
self.print_summary(run_time, run_results)
def print_timing_statistics(self, total_time, run_results):
self._print_debug('Test timing:')
self._print_debug(' %6.2f total testing time' % total_time)
self._print_debug('')
num_workers = int(self._options.child_processes)
self._print_debug('Thread timing:')
stats = {}
cuml_time = 0
for result in run_results.results_by_name.values():
stats.setdefault(result.worker_name, {
'num_tests': 0,
'total_time': 0
})
stats[result.worker_name]['num_tests'] += 1
stats[result.worker_name]['total_time'] += result.total_run_time
cuml_time += result.total_run_time
for worker_name in stats:
self._print_debug(' %10s: %5d tests, %6.2f secs' %
(worker_name, stats[worker_name]['num_tests'],
stats[worker_name]['total_time']))
self._print_debug(' %6.2f cumulative, %6.2f optimal' %
(cuml_time, cuml_time / num_workers))
self._print_debug('')
def print_summary(self, total_time, run_results):
if self._options.timing:
parallel_time = sum(
result.total_run_time
for result in run_results.results_by_name.values())
# There is serial overhead in web_test_runner.run() that we can't easily account for when
# really running in parallel, but taking the min() ensures that in the worst case
# (if parallel time is less than run_time) we do account for it.
serial_time = total_time - min(run_results.run_time, parallel_time)
speedup = (parallel_time + serial_time) / total_time
timing_summary = ' in %.2fs (%.2fs in rwt, %.2gx)' % (
total_time, serial_time, speedup)
else:
timing_summary = ''
total = run_results.total - run_results.expected_skips
expected = run_results.expected - run_results.expected_skips
unexpected = run_results.unexpected
incomplete = total - expected - unexpected
incomplete_str = ''
if incomplete:
self._print_default('')
incomplete_str = " (%d didn't run)" % incomplete
if self._options.verbose or self._options.debug_rwt_logging or unexpected:
self.writeln('')
expected_summary_str = ''
if run_results.expected_failures > 0:
expected_summary_str = " (%d passed, %d didn't)" % (
expected - run_results.expected_failures,
run_results.expected_failures)
summary = ''
if unexpected == 0:
if expected == total:
if expected > 1:
summary = 'All %d tests ran as expected%s%s.' % (
expected, expected_summary_str, timing_summary)
else:
summary = 'The test ran as expected%s%s.' % (
expected_summary_str, timing_summary)
else:
summary = '%s ran as expected%s%s%s.' % (grammar.pluralize(
'test', expected), expected_summary_str, incomplete_str,
timing_summary)
self._print_quiet(summary)
else:
self._print_quiet(
"%s ran as expected%s, %d didn't%s%s:" %
(grammar.pluralize('test', expected), expected_summary_str,
unexpected, incomplete_str, timing_summary))
for test_name in sorted(run_results.unexpected_results_by_name):
self._print_quiet(' %s' % test_name)
def _test_status_line(self, test_name, suffix):
format_string = '[%d/%d] %s%s'
status_line = format_string % (self.num_completed, self.num_tests,
test_name, suffix)
if len(status_line) > self._meter.number_of_columns():
overflow_columns = (
len(status_line) - self._meter.number_of_columns())
ellipsis = '...'
if len(test_name) < overflow_columns + len(ellipsis) + 2:
# We don't have enough space even if we elide, just show the test filename.
fs = self._host.filesystem
test_name = fs.split(test_name)[1]
else:
new_length = len(test_name) - overflow_columns - len(ellipsis)
prefix = int(new_length / 2)
test_name = (test_name[:prefix] + ellipsis +
test_name[-(new_length - prefix):])
return format_string % (self.num_completed, self.num_tests, test_name,
suffix)
def print_started_test(self, test_name):
self._running_tests.append(test_name)
if len(self._running_tests) > 1:
suffix = ' (+%d)' % (len(self._running_tests) - 1)
else:
suffix = ''
if self._options.verbose:
write = self._meter.write_update
else:
write = self._meter.write_throttled_update
write(self._test_status_line(test_name, suffix))
def print_finished_test(self, port, result, expected, exp_str, got_str):
self.num_completed += 1
test_name = result.test_name
result_message = self._result_message(result.type, result.failures,
expected, self._options.timing,
result.test_run_time)
if self._options.details:
self._print_test_trace(port, result, exp_str, got_str)
elif self._options.verbose or not expected:
self.writeln(self._test_status_line(test_name, result_message))
elif self.num_completed == self.num_tests:
self._meter.write_update('')
else:
if test_name == self._running_tests[0]:
self._completed_tests.insert(0, [test_name, result_message])
else:
self._completed_tests.append([test_name, result_message])
for test_name, result_message in self._completed_tests:
self._meter.write_throttled_update(
self._test_status_line(test_name, result_message))
self._completed_tests = []
self._running_tests.remove(test_name)
def _result_message(self, result_type, failures, expected, timing,
test_run_time):
exp_string = ' unexpectedly' if not expected else ''
timing_string = ' %.4fs' % test_run_time if timing or test_run_time > 1 else ''
if result_type == ResultType.Pass:
return ' passed%s%s' % (exp_string, timing_string)
else:
return ' failed%s (%s)%s' % (exp_string, ', '.join(
failure.message() for failure in failures), timing_string)
def _print_test_trace(self, port, result, exp_str, got_str):
test_name = result.test_name
self._print_default(self._test_status_line(test_name, ''))
base = port.lookup_virtual_test_base(test_name)
if base:
self._print_default(' base: %s' % base)
args = port.args_for_test(test_name)
if args:
self._print_default(' args: %s' % ' '.join(args))
references = port.reference_files(test_name)
if references:
for _, filename in references:
self._print_default(
' ref: %s' % port.relative_test_filename(filename))
else:
for extension in ('.txt', '.png', '.wav'):
self._print_baseline(port, test_name, extension)
self._print_default(' exp: %s' % exp_str)
self._print_default(' got: %s' % got_str)
self._print_default(' took: %-.3f' % result.test_run_time)
self._print_default('')
def _print_baseline(self, port, test_name, extension):
baseline = port.expected_filename(test_name, extension)
if self._host.filesystem.exists(baseline):
relpath = port.relative_test_filename(baseline)
else:
relpath = '<none>'
self._print_default(' %s: %s' % (extension[1:], relpath))
def _print_quiet(self, msg):
self.writeln(msg)
def _print_default(self, msg):
if not self._options.quiet:
self.writeln(msg)
def _print_debug(self, msg):
if self._options.debug_rwt_logging:
self.writeln(msg)
def write_throttled_update(self, msg):
self._meter.write_throttled_update(msg)
def write_update(self, msg):
self._meter.write_update(msg)
def writeln(self, msg):
self._meter.writeln(msg)
def flush(self):
self._meter.flush()
| bsd-3-clause |
garyjyao1/ansible | lib/ansible/modules/extras/cloud/vmware/vmware_vmkernel_ip_config.py | 45 | 4192 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: vmware_vmkernel_ip_config
short_description: Configure the VMkernel IP Address
description:
- Configure the VMkernel IP Address
version_added: 2.0
author: "Joseph Callen (@jcpowermac), Russell Teague (@mtnbikenc)"
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
hostname:
description:
- The hostname or IP address of the ESXi server
required: True
username:
description:
- The username of the ESXi server
required: True
aliases: ['user', 'admin']
password:
description:
- The password of the ESXi server
required: True
aliases: ['pass', 'pwd']
vmk_name:
description:
- VMkernel interface name
required: True
ip_address:
description:
- IP address to assign to VMkernel interface
required: True
subnet_mask:
description:
- Subnet Mask to assign to VMkernel interface
required: True
'''
EXAMPLES = '''
# Example command from Ansible Playbook
- name: Configure IP address on ESX host
local_action:
module: vmware_vmkernel_ip_config
hostname: esxi_hostname
username: esxi_username
password: esxi_password
vmk_name: vmk0
ip_address: 10.0.0.10
subnet_mask: 255.255.255.0
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def configure_vmkernel_ip_address(host_system, vmk_name, ip_address, subnet_mask):
host_config_manager = host_system.configManager
host_network_system = host_config_manager.networkSystem
for vnic in host_network_system.networkConfig.vnic:
if vnic.device == vmk_name:
spec = vnic.spec
if spec.ip.ipAddress != ip_address:
spec.ip.dhcp = False
spec.ip.ipAddress = ip_address
spec.ip.subnetMask = subnet_mask
host_network_system.UpdateVirtualNic(vmk_name, spec)
return True
return False
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(vmk_name=dict(required=True, type='str'),
ip_address=dict(required=True, type='str'),
subnet_mask=dict(required=True, type='str')))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
vmk_name = module.params['vmk_name']
ip_address = module.params['ip_address']
subnet_mask = module.params['subnet_mask']
try:
content = connect_to_api(module, False)
host = get_all_objs(content, [vim.HostSystem])
if not host:
module.fail_json(msg="Unable to locate Physical Host.")
host_system = host.keys()[0]
changed = configure_vmkernel_ip_address(host_system, vmk_name, ip_address, subnet_mask)
module.exit_json(changed=changed)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
module.fail_json(msg=method_fault.msg)
except Exception as e:
module.fail_json(msg=str(e))
from ansible.module_utils.vmware import *
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
2ndy/RaspIM | usr/share/doc/git/contrib/ciabot/ciabot.py | 33 | 6810 | #!/usr/bin/env python
# Copyright (c) 2010 Eric S. Raymond <esr@thyrsus.com>
# Distributed under BSD terms.
#
# This script contains porcelain and porcelain byproducts.
# It's Python because the Python standard libraries avoid portability/security
# issues raised by callouts in the ancestral Perl and sh scripts. It should
# be compatible back to Python 2.1.5
#
# usage: ciabot.py [-V] [-n] [-p projectname] [refname [commits...]]
#
# This script is meant to be run either in a post-commit hook or in an
# update hook. If there's nothing unusual about your hosting setup,
# you can specify the project name with a -p option and avoid having
# to modify this script. Try it with -n to see the notification mail
# dumped to stdout and verify that it looks sane. With -V it dumps its
# version and exits.
#
# In post-commit, run it without arguments (other than possibly a -p
# option). It will query for current HEAD and the latest commit ID to
# get the information it needs.
#
# In update, call it with a refname followed by a list of commits:
# You want to reverse the order git rev-list emits becxause it lists
# from most recent to oldest.
#
# /path/to/ciabot.py ${refname} $(git rev-list ${oldhead}..${newhead} | tac)
#
# Note: this script uses mail, not XML-RPC, in order to avoid stalling
# until timeout when the CIA XML-RPC server is down.
#
#
# The project as known to CIA. You will either want to change this
# or invoke the script with a -p option to set it.
#
project=None
#
# You may not need to change these:
#
import os, sys, commands, socket, urllib
# Name of the repository.
# You can hardwire this to make the script faster.
repo = os.path.basename(os.getcwd())
# Fully-qualified domain name of this host.
# You can hardwire this to make the script faster.
host = socket.getfqdn()
# Changeset URL prefix for your repo: when the commit ID is appended
# to this, it should point at a CGI that will display the commit
# through gitweb or something similar. The defaults will probably
# work if you have a typical gitweb/cgit setup.
#
#urlprefix="http://%(host)s/cgi-bin/gitweb.cgi?p=%(repo)s;a=commit;h="
urlprefix="http://%(host)s/cgi-bin/cgit.cgi/%(repo)s/commit/?id="
# The service used to turn your gitwebbish URL into a tinyurl so it
# will take up less space on the IRC notification line.
tinyifier = "http://tinyurl.com/api-create.php?url="
# The template used to generate the XML messages to CIA. You can make
# visible changes to the IRC-bot notification lines by hacking this.
# The default will produce a notfication line that looks like this:
#
# ${project}: ${author} ${repo}:${branch} * ${rev} ${files}: ${logmsg} ${url}
#
# By omitting $files you can collapse the files part to a single slash.
xml = '''\
<message>
<generator>
<name>CIA Python client for Git</name>
<version>%(gitver)s</version>
<url>%(generator)s</url>
</generator>
<source>
<project>%(project)s</project>
<branch>%(repo)s:%(branch)s</branch>
</source>
<timestamp>%(ts)s</timestamp>
<body>
<commit>
<author>%(author)s</author>
<revision>%(rev)s</revision>
<files>
%(files)s
</files>
<log>%(logmsg)s %(url)s</log>
<url>%(url)s</url>
</commit>
</body>
</message>
'''
#
# No user-serviceable parts below this line:
#
# Addresses for the e-mail. The from address is a dummy, since CIA
# will never reply to this mail.
fromaddr = "CIABOT-NOREPLY@" + host
toaddr = "cia@cia.navi.cx"
# Identify the generator script.
# Should only change when the script itself gets a new home and maintainer.
generator="http://www.catb.org/~esr/ciabot.py"
def do(command):
return commands.getstatusoutput(command)[1]
def report(refname, merged):
"Generate a commit notification to be reported to CIA"
# Try to tinyfy a reference to a web view for this commit.
try:
url = open(urllib.urlretrieve(tinyifier + urlprefix + merged)[0]).read()
except:
url = urlprefix + merged
branch = os.path.basename(refname)
# Compute a shortnane for the revision
rev = do("git describe '"+ merged +"' 2>/dev/null") or merged[:12]
# Extract the neta-information for the commit
rawcommit = do("git cat-file commit " + merged)
files=do("git diff-tree -r --name-only '"+ merged +"' | sed -e '1d' -e 's-.*-<file>&</file>-'")
inheader = True
headers = {}
logmsg = ""
for line in rawcommit.split("\n"):
if inheader:
if line:
fields = line.split()
headers[fields[0]] = " ".join(fields[1:])
else:
inheader = False
else:
logmsg = line
break
(author, ts) = headers["author"].split(">")
# This discards the part of the authors addrsss after @.
# Might be bnicece to ship the full email address, if not
# for spammers' address harvesters - getting this wrong
# would make the freenode #commits channel into harvester heaven.
author = author.replace("<", "").split("@")[0].split()[-1]
# This ignores the timezone. Not clear what to do with it...
ts = ts.strip().split()[0]
context = locals()
context.update(globals())
out = xml % context
message = '''\
Message-ID: <%(merged)s.%(author)s@%(project)s>
From: %(fromaddr)s
To: %(toaddr)s
Content-type: text/xml
Subject: DeliverXML
%(out)s''' % locals()
return message
if __name__ == "__main__":
import getopt
try:
(options, arguments) = getopt.getopt(sys.argv[1:], "np:V")
except getopt.GetoptError, msg:
print "ciabot.py: " + str(msg)
raise SystemExit, 1
mailit = True
for (switch, val) in options:
if switch == '-p':
project = val
elif switch == '-n':
mailit = False
elif switch == '-V':
print "ciabot.py: version 3.2"
sys.exit(0)
# Cough and die if user has not specified a project
if not project:
sys.stderr.write("ciabot.py: no project specified, bailing out.\n")
sys.exit(1)
# We'll need the git version number.
gitver = do("git --version").split()[0]
urlprefix = urlprefix % globals()
# The script wants a reference to head followed by the list of
# commit ID to report about.
if len(arguments) == 0:
refname = do("git symbolic-ref HEAD 2>/dev/null")
merges = [do("git rev-parse HEAD")]
else:
refname = arguments[0]
merges = arguments[1:]
if mailit:
import smtplib
server = smtplib.SMTP('localhost')
for merged in merges:
message = report(refname, merged)
if mailit:
server.sendmail(fromaddr, [toaddr], message)
else:
print message
if mailit:
server.quit()
#End
| gpl-2.0 |
godfather1103/WeiboRobot | python27/1.0/lib/test/test_codecencodings_cn.py | 41 | 3135 | #
# test_codecencodings_cn.py
# Codec encoding tests for PRC encodings.
#
from test import test_support
from test import test_multibytecodec_support
import unittest
class Test_GB2312(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'gb2312'
tstring = test_multibytecodec_support.load_teststring('gb2312')
codectests = (
# invalid bytes
("abc\x81\x81\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x81\x81\xc1\xc4", "replace", u"abc\ufffd\u804a"),
("abc\x81\x81\xc1\xc4\xc8", "replace", u"abc\ufffd\u804a\ufffd"),
("abc\x81\x81\xc1\xc4", "ignore", u"abc\u804a"),
("\xc1\x64", "strict", None),
)
class Test_GBK(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'gbk'
tstring = test_multibytecodec_support.load_teststring('gbk')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\u804a"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\u804a\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\u804a"),
("\x83\x34\x83\x31", "strict", None),
(u"\u30fb", "strict", None),
)
class Test_GB18030(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'gb18030'
tstring = test_multibytecodec_support.load_teststring('gb18030')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\u804a"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\u804a\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\u804a"),
("abc\x84\x39\x84\x39\xc1\xc4", "replace", u"abc\ufffd\u804a"),
(u"\u30fb", "strict", "\x819\xa79"),
)
has_iso10646 = True
class Test_HZ(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'hz'
tstring = test_multibytecodec_support.load_teststring('hz')
codectests = (
# test '~\n' (3 lines)
(b'This sentence is in ASCII.\n'
b'The next sentence is in GB.~{<:Ky2;S{#,~}~\n'
b'~{NpJ)l6HK!#~}Bye.\n',
'strict',
u'This sentence is in ASCII.\n'
u'The next sentence is in GB.'
u'\u5df1\u6240\u4e0d\u6b32\uff0c\u52ff\u65bd\u65bc\u4eba\u3002'
u'Bye.\n'),
# test '~\n' (4 lines)
(b'This sentence is in ASCII.\n'
b'The next sentence is in GB.~\n'
b'~{<:Ky2;S{#,NpJ)l6HK!#~}~\n'
b'Bye.\n',
'strict',
u'This sentence is in ASCII.\n'
u'The next sentence is in GB.'
u'\u5df1\u6240\u4e0d\u6b32\uff0c\u52ff\u65bd\u65bc\u4eba\u3002'
u'Bye.\n'),
# invalid bytes
(b'ab~cd', 'replace', u'ab\uFFFDd'),
(b'ab\xffcd', 'replace', u'ab\uFFFDcd'),
(b'ab~{\x81\x81\x41\x44~}cd', 'replace', u'ab\uFFFD\uFFFD\u804Acd'),
)
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
ravindrapanda/tensorflow | tensorflow/contrib/learn/python/learn/estimators/kmeans.py | 15 | 10904 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of k-means clustering on top of `Estimator` API.
This module is deprecated. Please use
@{tf.contrib.factorization.KMeansClustering} instead of
@{tf.contrib.learn.KMeansClustering}. It has a similar interface, but uses the
@{tf.estimator.Estimator} API instead of @{tf.contrib.learn.Estimator}.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.contrib.factorization.python.ops import clustering_ops
from tensorflow.python.training import training_util
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators.model_fn import ModelFnOps
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops.control_flow_ops import with_dependencies
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import session_run_hook
from tensorflow.python.training.session_run_hook import SessionRunArgs
from tensorflow.python.util.deprecation import deprecated
_USE_TF_CONTRIB_FACTORIZATION = (
'Please use tf.contrib.factorization.KMeansClustering instead of'
' tf.contrib.learn.KMeansClustering. It has a similar interface, but uses'
' the tf.estimator.Estimator API instead of tf.contrib.learn.Estimator.')
class _LossRelativeChangeHook(session_run_hook.SessionRunHook):
"""Stops when the change in loss goes below a tolerance."""
def __init__(self, tolerance):
"""Initializes _LossRelativeChangeHook.
Args:
tolerance: A relative tolerance of change between iterations.
"""
self._tolerance = tolerance
self._prev_loss = None
def begin(self):
self._loss_tensor = ops.get_default_graph().get_tensor_by_name(
KMeansClustering.LOSS_OP_NAME + ':0')
assert self._loss_tensor is not None
def before_run(self, run_context):
del run_context
return SessionRunArgs(
fetches={KMeansClustering.LOSS_OP_NAME: self._loss_tensor})
def after_run(self, run_context, run_values):
loss = run_values.results[KMeansClustering.LOSS_OP_NAME]
assert loss is not None
if self._prev_loss is not None:
relative_change = (abs(loss - self._prev_loss) /
(1 + abs(self._prev_loss)))
if relative_change < self._tolerance:
run_context.request_stop()
self._prev_loss = loss
class _InitializeClustersHook(session_run_hook.SessionRunHook):
"""Initializes clusters or waits for cluster initialization."""
def __init__(self, init_op, is_initialized_op, is_chief):
self._init_op = init_op
self._is_chief = is_chief
self._is_initialized_op = is_initialized_op
def after_create_session(self, session, _):
assert self._init_op.graph == ops.get_default_graph()
assert self._is_initialized_op.graph == self._init_op.graph
while True:
try:
if session.run(self._is_initialized_op):
break
elif self._is_chief:
session.run(self._init_op)
else:
time.sleep(1)
except RuntimeError as e:
logging.info(e)
def _parse_tensor_or_dict(features):
"""Helper function to parse features."""
if isinstance(features, dict):
keys = sorted(features.keys())
with ops.colocate_with(features[keys[0]]):
features = array_ops.concat([features[k] for k in keys], 1)
return features
def _kmeans_clustering_model_fn(features, labels, mode, params, config):
"""Model function for KMeansClustering estimator."""
assert labels is None, labels
(all_scores, model_predictions, losses,
is_initialized, init_op, training_op) = clustering_ops.KMeans(
_parse_tensor_or_dict(features),
params.get('num_clusters'),
initial_clusters=params.get('training_initial_clusters'),
distance_metric=params.get('distance_metric'),
use_mini_batch=params.get('use_mini_batch'),
mini_batch_steps_per_iteration=params.get(
'mini_batch_steps_per_iteration'),
random_seed=params.get('random_seed'),
kmeans_plus_plus_num_retries=params.get(
'kmeans_plus_plus_num_retries')).training_graph()
incr_step = state_ops.assign_add(training_util.get_global_step(), 1)
loss = math_ops.reduce_sum(losses, name=KMeansClustering.LOSS_OP_NAME)
summary.scalar('loss/raw', loss)
training_op = with_dependencies([training_op, incr_step], loss)
predictions = {
KMeansClustering.ALL_SCORES: all_scores[0],
KMeansClustering.CLUSTER_IDX: model_predictions[0],
}
eval_metric_ops = {KMeansClustering.SCORES: loss}
training_hooks = [_InitializeClustersHook(
init_op, is_initialized, config.is_chief)]
relative_tolerance = params.get('relative_tolerance')
if relative_tolerance is not None:
training_hooks.append(_LossRelativeChangeHook(relative_tolerance))
return ModelFnOps(
mode=mode,
predictions=predictions,
eval_metric_ops=eval_metric_ops,
loss=loss,
train_op=training_op,
training_hooks=training_hooks)
# TODO(agarwal,ands): support sharded input.
class KMeansClustering(estimator.Estimator):
"""An Estimator for K-Means clustering."""
SQUARED_EUCLIDEAN_DISTANCE = clustering_ops.SQUARED_EUCLIDEAN_DISTANCE
COSINE_DISTANCE = clustering_ops.COSINE_DISTANCE
RANDOM_INIT = clustering_ops.RANDOM_INIT
KMEANS_PLUS_PLUS_INIT = clustering_ops.KMEANS_PLUS_PLUS_INIT
SCORES = 'scores'
CLUSTER_IDX = 'cluster_idx'
CLUSTERS = 'clusters'
ALL_SCORES = 'all_scores'
LOSS_OP_NAME = 'kmeans_loss'
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def __init__(self,
num_clusters,
model_dir=None,
initial_clusters=RANDOM_INIT,
distance_metric=SQUARED_EUCLIDEAN_DISTANCE,
random_seed=0,
use_mini_batch=True,
mini_batch_steps_per_iteration=1,
kmeans_plus_plus_num_retries=2,
relative_tolerance=None,
config=None):
"""Creates a model for running KMeans training and inference.
Args:
num_clusters: number of clusters to train.
model_dir: the directory to save the model results and log files.
initial_clusters: specifies how to initialize the clusters for training.
See clustering_ops.kmeans for the possible values.
distance_metric: the distance metric used for clustering.
See clustering_ops.kmeans for the possible values.
random_seed: Python integer. Seed for PRNG used to initialize centers.
use_mini_batch: If true, use the mini-batch k-means algorithm. Else assume
full batch.
mini_batch_steps_per_iteration: number of steps after which the updated
cluster centers are synced back to a master copy. See clustering_ops.py
for more details.
kmeans_plus_plus_num_retries: For each point that is sampled during
kmeans++ initialization, this parameter specifies the number of
additional points to draw from the current distribution before selecting
the best. If a negative value is specified, a heuristic is used to
sample O(log(num_to_sample)) additional points.
relative_tolerance: A relative tolerance of change in the loss between
iterations. Stops learning if the loss changes less than this amount.
Note that this may not work correctly if use_mini_batch=True.
config: See Estimator
"""
params = {}
params['num_clusters'] = num_clusters
params['training_initial_clusters'] = initial_clusters
params['distance_metric'] = distance_metric
params['random_seed'] = random_seed
params['use_mini_batch'] = use_mini_batch
params['mini_batch_steps_per_iteration'] = mini_batch_steps_per_iteration
params['kmeans_plus_plus_num_retries'] = kmeans_plus_plus_num_retries
params['relative_tolerance'] = relative_tolerance
super(KMeansClustering, self).__init__(
model_fn=_kmeans_clustering_model_fn,
params=params,
model_dir=model_dir,
config=config)
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def predict_cluster_idx(self, input_fn=None):
"""Yields predicted cluster indices."""
key = KMeansClustering.CLUSTER_IDX
results = super(KMeansClustering, self).predict(
input_fn=input_fn, outputs=[key])
for result in results:
yield result[key]
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def score(self, input_fn=None, steps=None):
"""Predict total sum of distances to nearest clusters.
Note that this function is different from the corresponding one in sklearn
which returns the negative of the sum of distances.
Args:
input_fn: see predict.
steps: see predict.
Returns:
Total sum of distances to nearest clusters.
"""
return np.sum(
self.evaluate(
input_fn=input_fn, steps=steps)[KMeansClustering.SCORES])
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def transform(self, input_fn=None, as_iterable=False):
"""Transforms each element to distances to cluster centers.
Note that this function is different from the corresponding one in sklearn.
For SQUARED_EUCLIDEAN distance metric, sklearn transform returns the
EUCLIDEAN distance, while this function returns the SQUARED_EUCLIDEAN
distance.
Args:
input_fn: see predict.
as_iterable: see predict
Returns:
Array with same number of rows as x, and num_clusters columns, containing
distances to the cluster centers.
"""
key = KMeansClustering.ALL_SCORES
results = super(KMeansClustering, self).predict(
input_fn=input_fn,
outputs=[key],
as_iterable=as_iterable)
if not as_iterable:
return results[key]
else:
return results
@deprecated(None, _USE_TF_CONTRIB_FACTORIZATION)
def clusters(self):
"""Returns cluster centers."""
return super(KMeansClustering, self).get_variable_value(self.CLUSTERS)
| apache-2.0 |
konieboy/Seng_403 | Gender Computer/unidecode/x022.py | 165 | 4329 | data = (
'[?]', # 0x00
'[?]', # 0x01
'[?]', # 0x02
'[?]', # 0x03
'[?]', # 0x04
'[?]', # 0x05
'[?]', # 0x06
'[?]', # 0x07
'[?]', # 0x08
'[?]', # 0x09
'[?]', # 0x0a
'[?]', # 0x0b
'[?]', # 0x0c
'[?]', # 0x0d
'[?]', # 0x0e
'[?]', # 0x0f
'[?]', # 0x10
'[?]', # 0x11
'-', # 0x12
'[?]', # 0x13
'[?]', # 0x14
'/', # 0x15
'\\', # 0x16
'*', # 0x17
'[?]', # 0x18
'[?]', # 0x19
'[?]', # 0x1a
'[?]', # 0x1b
'[?]', # 0x1c
'[?]', # 0x1d
'[?]', # 0x1e
'[?]', # 0x1f
'[?]', # 0x20
'[?]', # 0x21
'[?]', # 0x22
'|', # 0x23
'[?]', # 0x24
'[?]', # 0x25
'[?]', # 0x26
'[?]', # 0x27
'[?]', # 0x28
'[?]', # 0x29
'[?]', # 0x2a
'[?]', # 0x2b
'[?]', # 0x2c
'[?]', # 0x2d
'[?]', # 0x2e
'[?]', # 0x2f
'[?]', # 0x30
'[?]', # 0x31
'[?]', # 0x32
'[?]', # 0x33
'[?]', # 0x34
'[?]', # 0x35
':', # 0x36
'[?]', # 0x37
'[?]', # 0x38
'[?]', # 0x39
'[?]', # 0x3a
'[?]', # 0x3b
'~', # 0x3c
'[?]', # 0x3d
'[?]', # 0x3e
'[?]', # 0x3f
'[?]', # 0x40
'[?]', # 0x41
'[?]', # 0x42
'[?]', # 0x43
'[?]', # 0x44
'[?]', # 0x45
'[?]', # 0x46
'[?]', # 0x47
'[?]', # 0x48
'[?]', # 0x49
'[?]', # 0x4a
'[?]', # 0x4b
'[?]', # 0x4c
'[?]', # 0x4d
'[?]', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
'[?]', # 0x53
'[?]', # 0x54
'[?]', # 0x55
'[?]', # 0x56
'[?]', # 0x57
'[?]', # 0x58
'[?]', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'[?]', # 0x60
'[?]', # 0x61
'[?]', # 0x62
'[?]', # 0x63
'<=', # 0x64
'>=', # 0x65
'<=', # 0x66
'>=', # 0x67
'[?]', # 0x68
'[?]', # 0x69
'[?]', # 0x6a
'[?]', # 0x6b
'[?]', # 0x6c
'[?]', # 0x6d
'[?]', # 0x6e
'[?]', # 0x6f
'[?]', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'[?]', # 0x74
'[?]', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'[?]', # 0x82
'[?]', # 0x83
'[?]', # 0x84
'[?]', # 0x85
'[?]', # 0x86
'[?]', # 0x87
'[?]', # 0x88
'[?]', # 0x89
'[?]', # 0x8a
'[?]', # 0x8b
'[?]', # 0x8c
'[?]', # 0x8d
'[?]', # 0x8e
'[?]', # 0x8f
'[?]', # 0x90
'[?]', # 0x91
'[?]', # 0x92
'[?]', # 0x93
'[?]', # 0x94
'[?]', # 0x95
'[?]', # 0x96
'[?]', # 0x97
'[?]', # 0x98
'[?]', # 0x99
'[?]', # 0x9a
'[?]', # 0x9b
'[?]', # 0x9c
'[?]', # 0x9d
'[?]', # 0x9e
'[?]', # 0x9f
'[?]', # 0xa0
'[?]', # 0xa1
'[?]', # 0xa2
'[?]', # 0xa3
'[?]', # 0xa4
'[?]', # 0xa5
'[?]', # 0xa6
'[?]', # 0xa7
'[?]', # 0xa8
'[?]', # 0xa9
'[?]', # 0xaa
'[?]', # 0xab
'[?]', # 0xac
'[?]', # 0xad
'[?]', # 0xae
'[?]', # 0xaf
'[?]', # 0xb0
'[?]', # 0xb1
'[?]', # 0xb2
'[?]', # 0xb3
'[?]', # 0xb4
'[?]', # 0xb5
'[?]', # 0xb6
'[?]', # 0xb7
'[?]', # 0xb8
'[?]', # 0xb9
'[?]', # 0xba
'[?]', # 0xbb
'[?]', # 0xbc
'[?]', # 0xbd
'[?]', # 0xbe
'[?]', # 0xbf
'[?]', # 0xc0
'[?]', # 0xc1
'[?]', # 0xc2
'[?]', # 0xc3
'[?]', # 0xc4
'[?]', # 0xc5
'[?]', # 0xc6
'[?]', # 0xc7
'[?]', # 0xc8
'[?]', # 0xc9
'[?]', # 0xca
'[?]', # 0xcb
'[?]', # 0xcc
'[?]', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'[?]', # 0xd5
'[?]', # 0xd6
'[?]', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'[?]', # 0xde
'[?]', # 0xdf
'[?]', # 0xe0
'[?]', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'[?]', # 0xe6
'[?]', # 0xe7
'[?]', # 0xe8
'[?]', # 0xe9
'[?]', # 0xea
'[?]', # 0xeb
'[?]', # 0xec
'[?]', # 0xed
'[?]', # 0xee
'[?]', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| gpl-3.0 |
creasyw/IMTAphy | documentation/doctools/branches/0.4.x/utils/check_sources.py | 9 | 7380 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Checker for file headers
~~~~~~~~~~~~~~~~~~~~~~~~
Make sure each Python file has a correct file header
including copyright and license information.
:copyright: 2006-2008 by Georg Brandl.
:license: GNU GPL, see LICENSE for more details.
"""
import sys, os, re
import getopt
import cStringIO
from os.path import join, splitext, abspath
checkers = {}
def checker(*suffixes, **kwds):
only_pkg = kwds.pop('only_pkg', False)
def deco(func):
for suffix in suffixes:
checkers.setdefault(suffix, []).append(func)
func.only_pkg = only_pkg
return func
return deco
name_mail_re = r'[\w ]+(<.*?>)?'
copyright_re = re.compile(r'^ :copyright: 200\d(-200\d)? by %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re))
license_re = re.compile(r" :license: (.*?).\n")
copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re))
coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
not_ix_re = re.compile(r'\bnot\s+\S+?\s+i[sn]\s\S+')
is_const_re = re.compile(r'if.*?==\s+(None|False|True)\b')
misspellings = ["developement", "adress", "verificate", # ALLOW-MISSPELLING
"informations"] # ALLOW-MISSPELLING
@checker('.py')
def check_syntax(fn, lines):
try:
compile(''.join(lines), fn, "exec")
except SyntaxError, err:
yield 0, "not compilable: %s" % err
@checker('.py')
def check_style_and_encoding(fn, lines):
encoding = 'ascii'
for lno, line in enumerate(lines):
if len(line) > 90:
yield lno+1, "line too long"
if lno < 2:
co = coding_re.search(line)
if co:
encoding = co.group(1)
if line.strip().startswith('#'):
continue
m = not_ix_re.search(line)
if m:
yield lno+1, '"' + m.group() + '"'
if is_const_re.search(line):
yield lno+1, 'using == None/True/False'
try:
line.decode(encoding)
except UnicodeDecodeError, err:
yield lno+1, "not decodable: %s\n Line: %r" % (err, line)
except LookupError, err:
yield 0, "unknown encoding: %s" % encoding
encoding = 'latin1'
@checker('.py', only_pkg=True)
def check_fileheader(fn, lines):
# line number correction
c = 1
if lines[0:1] == ['#!/usr/bin/env python\n']:
lines = lines[1:]
c = 2
llist = []
docopen = False
for lno, l in enumerate(lines):
llist.append(l)
if lno == 0:
if l == '# -*- coding: rot13 -*-\n':
# special-case pony package
return
elif l != '# -*- coding: utf-8 -*-\n':
yield 1, "missing coding declaration"
elif lno == 1:
if l != '"""\n' and l != 'r"""\n':
yield 2, 'missing docstring begin (""")'
else:
docopen = True
elif docopen:
if l == '"""\n':
# end of docstring
if lno <= 4:
yield lno+c, "missing module name in docstring"
break
if l != "\n" and l[:4] != ' ' and docopen:
yield lno+c, "missing correct docstring indentation"
if lno == 2:
# if not in package, don't check the module name
modname = fn[:-3].replace('/', '.').replace('.__init__', '')
while modname:
if l.lower()[4:-1] == modname:
break
modname = '.'.join(modname.split('.')[1:])
else:
yield 3, "wrong module name in docstring heading"
modnamelen = len(l.strip())
elif lno == 3:
if l.strip() != modnamelen * "~":
yield 4, "wrong module name underline, should be ~~~...~"
else:
yield 0, "missing end and/or start of docstring..."
# check for copyright and license fields
license = llist[-2:-1]
if not license or not license_re.match(license[0]):
yield 0, "no correct license info"
ci = -3
copyright = llist[ci:ci+1]
while copyright and copyright_2_re.match(copyright[0]):
ci -= 1
copyright = llist[ci:ci+1]
if not copyright or not copyright_re.match(copyright[0]):
yield 0, "no correct copyright info"
@checker('.py', '.html')
def check_whitespace_and_spelling(fn, lines):
for lno, line in enumerate(lines):
if "\t" in line:
yield lno+1, "OMG TABS!!!1 "
if line[:-1].rstrip(' \t') != line[:-1]:
yield lno+1, "trailing whitespace"
for word in misspellings:
if word in line and 'ALLOW-MISSPELLING' not in line:
yield lno+1, '"%s" used' % word
bad_tags = ('<b>', '<i>', '<u>', '<s>', '<strike>'
'<center>', '<big>', '<small>', '<font')
@checker('.html')
def check_xhtml(fn, lines):
for lno, line in enumerate(lines):
for bad_tag in bad_tags:
if bad_tag in line:
yield lno+1, "used " + bad_tag
def main(argv):
try:
gopts, args = getopt.getopt(argv[1:], "vi:")
except getopt.GetoptError:
print "Usage: %s [-v] [-i ignorepath]* [path]" % argv[0]
return 2
opts = {}
for opt, val in gopts:
if opt == '-i':
val = abspath(val)
opts.setdefault(opt, []).append(val)
if len(args) == 0:
path = '.'
elif len(args) == 1:
path = args[0]
else:
print "Usage: %s [-v] [-i ignorepath]* [path]" % argv[0]
return 2
verbose = '-v' in opts
num = 0
out = cStringIO.StringIO()
# TODO: replace os.walk run with iteration over output of
# `svn list -R`.
for root, dirs, files in os.walk(path):
if '.svn' in dirs:
dirs.remove('.svn')
if '-i' in opts and abspath(root) in opts['-i']:
del dirs[:]
continue
in_check_pkg = root.startswith('./sphinx')
for fn in files:
fn = join(root, fn)
if fn[:2] == './': fn = fn[2:]
if '-i' in opts and abspath(fn) in opts['-i']:
continue
ext = splitext(fn)[1]
checkerlist = checkers.get(ext, None)
if not checkerlist:
continue
if verbose:
print "Checking %s..." % fn
try:
f = open(fn, 'r')
lines = list(f)
except (IOError, OSError), err:
print "%s: cannot open: %s" % (fn, err)
num += 1
continue
for checker in checkerlist:
if not in_check_pkg and checker.only_pkg:
continue
for lno, msg in checker(fn, lines):
print >>out, "%s:%d: %s" % (fn, lno, msg)
num += 1
if verbose:
print
if num == 0:
print "No errors found."
else:
print out.getvalue().rstrip('\n')
print "%d error%s found." % (num, num > 1 and "s" or "")
return int(num > 0)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| gpl-2.0 |
wemanuel/smry | server-auth/ls/google-cloud-sdk/platform/gsutil/third_party/boto/tests/unit/glacier/test_layer2.py | 114 | 14402 | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Thomas Parslow http://almostobsolete.net/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from tests.unit import unittest
from mock import call, Mock, patch, sentinel
import codecs
from boto.glacier.layer1 import Layer1
from boto.glacier.layer2 import Layer2
import boto.glacier.vault
from boto.glacier.vault import Vault
from boto.glacier.vault import Job
from datetime import datetime, tzinfo, timedelta
# Some fixture data from the Glacier docs
FIXTURE_VAULT = {
"CreationDate": "2012-02-20T17:01:45.198Z",
"LastInventoryDate": "2012-03-20T17:03:43.221Z",
"NumberOfArchives": 192,
"SizeInBytes": 78088912,
"VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/examplevault",
"VaultName": "examplevault"
}
FIXTURE_VAULTS = {
'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
'VaultName': 'vault0', 'NumberOfArchives': 0,
'CreationDate': '2013-05-17T02:38:39.049Z'},
{'SizeInBytes': 0, 'LastInventoryDate': None,
'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
'VaultName': 'vault3', 'NumberOfArchives': 0,
'CreationDate': '2013-05-17T02:31:18.659Z'}]}
FIXTURE_PAGINATED_VAULTS = {
'Marker': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
'VaultName': 'vault0', 'NumberOfArchives': 0,
'CreationDate': '2013-05-17T02:38:39.049Z'},
{'SizeInBytes': 0, 'LastInventoryDate': None,
'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault1',
'VaultName': 'vault1', 'NumberOfArchives': 0,
'CreationDate': '2013-05-17T02:31:18.659Z'}]}
FIXTURE_PAGINATED_VAULTS_CONT = {
'Marker': None,
'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
'VaultName': 'vault2', 'NumberOfArchives': 0,
'CreationDate': '2013-05-17T02:38:39.049Z'},
{'SizeInBytes': 0, 'LastInventoryDate': None,
'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
'VaultName': 'vault3', 'NumberOfArchives': 0,
'CreationDate': '2013-05-17T02:31:18.659Z'}]}
FIXTURE_ARCHIVE_JOB = {
"Action": "ArchiveRetrieval",
"ArchiveId": ("NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUs"
"uhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqr"
"EXAMPLEArchiveId"),
"ArchiveSizeInBytes": 16777216,
"Completed": False,
"CreationDate": "2012-05-15T17:21:39.339Z",
"CompletionDate": "2012-05-15T17:21:43.561Z",
"InventorySizeInBytes": None,
"JobDescription": "My ArchiveRetrieval Job",
"JobId": ("HkF9p6o7yjhFx-K3CGl6fuSm6VzW9T7esGQfco8nUXVYwS0jlb5gq1JZ55yHgt5v"
"P54ZShjoQzQVVh7vEXAMPLEjobID"),
"SHA256TreeHash": ("beb0fe31a1c7ca8c6c04d574ea906e3f97b31fdca7571defb5b44dc"
"a89b5af60"),
"SNSTopic": "arn:aws:sns:us-east-1:012345678901:mytopic",
"StatusCode": "InProgress",
"StatusMessage": "Operation in progress.",
"VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/examplevault"
}
EXAMPLE_PART_LIST_RESULT_PAGE_1 = {
"ArchiveDescription": "archive description 1",
"CreationDate": "2012-03-20T17:03:43.221Z",
"Marker": "MfgsKHVjbQ6EldVl72bn3_n5h2TaGZQUO-Qb3B9j3TITf7WajQ",
"MultipartUploadId": "OW2fM5iVylEpFEMM9_HpKowRapC3vn5sSL39_396UW9zLFUWVrnRHaPjUJddQ5OxSHVXjYtrN47NBZ-khxOjyEXAMPLE",
"PartSizeInBytes": 4194304,
"Parts": [{
"RangeInBytes": "4194304-8388607",
"SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
}],
"VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/demo1-vault"
}
# The documentation doesn't say whether the non-Parts fields are defined in
# future pages, so assume they are not.
EXAMPLE_PART_LIST_RESULT_PAGE_2 = {
"ArchiveDescription": None,
"CreationDate": None,
"Marker": None,
"MultipartUploadId": None,
"PartSizeInBytes": None,
"Parts": [{
"RangeInBytes": "0-4194303",
"SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
}],
"VaultARN": None
}
EXAMPLE_PART_LIST_COMPLETE = {
"ArchiveDescription": "archive description 1",
"CreationDate": "2012-03-20T17:03:43.221Z",
"Marker": None,
"MultipartUploadId": "OW2fM5iVylEpFEMM9_HpKowRapC3vn5sSL39_396UW9zLFUWVrnRHaPjUJddQ5OxSHVXjYtrN47NBZ-khxOjyEXAMPLE",
"PartSizeInBytes": 4194304,
"Parts": [{
"RangeInBytes": "4194304-8388607",
"SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
}, {
"RangeInBytes": "0-4194303",
"SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
}],
"VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/demo1-vault"
}
class GlacierLayer2Base(unittest.TestCase):
def setUp(self):
self.mock_layer1 = Mock(spec=Layer1)
class TestGlacierLayer2Connection(GlacierLayer2Base):
def setUp(self):
GlacierLayer2Base.setUp(self)
self.layer2 = Layer2(layer1=self.mock_layer1)
def test_create_vault(self):
self.mock_layer1.describe_vault.return_value = FIXTURE_VAULT
self.layer2.create_vault("My Vault")
self.mock_layer1.create_vault.assert_called_with("My Vault")
def test_get_vault(self):
self.mock_layer1.describe_vault.return_value = FIXTURE_VAULT
vault = self.layer2.get_vault("examplevault")
self.assertEqual(vault.layer1, self.mock_layer1)
self.assertEqual(vault.name, "examplevault")
self.assertEqual(vault.size, 78088912)
self.assertEqual(vault.number_of_archives, 192)
def test_list_vaults(self):
self.mock_layer1.list_vaults.return_value = FIXTURE_VAULTS
vaults = self.layer2.list_vaults()
self.assertEqual(vaults[0].name, "vault0")
self.assertEqual(len(vaults), 2)
def test_list_vaults_paginated(self):
resps = [FIXTURE_PAGINATED_VAULTS, FIXTURE_PAGINATED_VAULTS_CONT]
def return_paginated_vaults_resp(marker=None, limit=None):
return resps.pop(0)
self.mock_layer1.list_vaults = Mock(side_effect=return_paginated_vaults_resp)
vaults = self.layer2.list_vaults()
self.assertEqual(vaults[0].name, "vault0")
self.assertEqual(vaults[3].name, "vault3")
self.assertEqual(len(vaults), 4)
class TestVault(GlacierLayer2Base):
def setUp(self):
GlacierLayer2Base.setUp(self)
self.vault = Vault(self.mock_layer1, FIXTURE_VAULT)
# TODO: Tests for the other methods of uploading
def test_create_archive_writer(self):
self.mock_layer1.initiate_multipart_upload.return_value = {
"UploadId": "UPLOADID"}
writer = self.vault.create_archive_writer(description="stuff")
self.mock_layer1.initiate_multipart_upload.assert_called_with(
"examplevault", self.vault.DefaultPartSize, "stuff")
self.assertEqual(writer.vault, self.vault)
self.assertEqual(writer.upload_id, "UPLOADID")
def test_delete_vault(self):
self.vault.delete_archive("archive")
self.mock_layer1.delete_archive.assert_called_with("examplevault",
"archive")
def test_initiate_job(self):
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return timedelta(0)
def tzname(self, dt):
return "Z"
def dst(self, dt):
return timedelta(0)
self.mock_layer1.initiate_job.return_value = {'JobId': 'job-id'}
self.vault.retrieve_inventory(start_date=datetime(2014, 0o1, 0o1, tzinfo=UTC()),
end_date=datetime(2014, 0o1, 0o2, tzinfo=UTC()),
limit=100)
self.mock_layer1.initiate_job.assert_called_with(
'examplevault', {
'Type': 'inventory-retrieval',
'InventoryRetrievalParameters': {
'StartDate': '2014-01-01T00:00:00Z',
'EndDate': '2014-01-02T00:00:00Z',
'Limit': 100
}
})
def test_get_job(self):
self.mock_layer1.describe_job.return_value = FIXTURE_ARCHIVE_JOB
job = self.vault.get_job(
"NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUsuhPA"
"dTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqrEXAMPLEA"
"rchiveId")
self.assertEqual(job.action, "ArchiveRetrieval")
def test_list_jobs(self):
self.mock_layer1.list_jobs.return_value = {
"JobList": [FIXTURE_ARCHIVE_JOB]}
jobs = self.vault.list_jobs(False, "InProgress")
self.mock_layer1.list_jobs.assert_called_with("examplevault",
False, "InProgress")
self.assertEqual(jobs[0].archive_id,
"NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z"
"8i1_AUyUsuhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs0"
"1MNGntHEQL8MBfGlqrEXAMPLEArchiveId")
def test_list_all_parts_one_page(self):
self.mock_layer1.list_parts.return_value = (
dict(EXAMPLE_PART_LIST_COMPLETE)) # take a copy
parts_result = self.vault.list_all_parts(sentinel.upload_id)
expected = [call('examplevault', sentinel.upload_id)]
self.assertEquals(expected, self.mock_layer1.list_parts.call_args_list)
self.assertEquals(EXAMPLE_PART_LIST_COMPLETE, parts_result)
def test_list_all_parts_two_pages(self):
self.mock_layer1.list_parts.side_effect = [
# take copies
dict(EXAMPLE_PART_LIST_RESULT_PAGE_1),
dict(EXAMPLE_PART_LIST_RESULT_PAGE_2)
]
parts_result = self.vault.list_all_parts(sentinel.upload_id)
expected = [call('examplevault', sentinel.upload_id),
call('examplevault', sentinel.upload_id,
marker=EXAMPLE_PART_LIST_RESULT_PAGE_1['Marker'])]
self.assertEquals(expected, self.mock_layer1.list_parts.call_args_list)
self.assertEquals(EXAMPLE_PART_LIST_COMPLETE, parts_result)
@patch('boto.glacier.vault.resume_file_upload')
def test_resume_archive_from_file(self, mock_resume_file_upload):
part_size = 4
mock_list_parts = Mock()
mock_list_parts.return_value = {
'PartSizeInBytes': part_size,
'Parts': [{
'RangeInBytes': '0-3',
'SHA256TreeHash': '12',
}, {
'RangeInBytes': '4-6',
'SHA256TreeHash': '34',
}],
}
self.vault.list_all_parts = mock_list_parts
self.vault.resume_archive_from_file(
sentinel.upload_id, file_obj=sentinel.file_obj)
mock_resume_file_upload.assert_called_once_with(
self.vault, sentinel.upload_id, part_size, sentinel.file_obj,
{0: codecs.decode('12', 'hex_codec'), 1: codecs.decode('34', 'hex_codec')})
class TestJob(GlacierLayer2Base):
def setUp(self):
GlacierLayer2Base.setUp(self)
self.vault = Vault(self.mock_layer1, FIXTURE_VAULT)
self.job = Job(self.vault, FIXTURE_ARCHIVE_JOB)
def test_get_job_output(self):
self.mock_layer1.get_job_output.return_value = "TEST_OUTPUT"
self.job.get_output((0, 100))
self.mock_layer1.get_job_output.assert_called_with(
"examplevault",
"HkF9p6o7yjhFx-K3CGl6fuSm6VzW9T7esGQfco8nUXVYwS0jlb5gq1JZ55yHgt5vP"
"54ZShjoQzQVVh7vEXAMPLEjobID", (0, 100))
class TestRangeStringParsing(unittest.TestCase):
def test_simple_range(self):
self.assertEquals(
Vault._range_string_to_part_index('0-3', 4), 0)
def test_range_one_too_big(self):
# Off-by-one bug in Amazon's Glacier implementation
# See: https://forums.aws.amazon.com/thread.jspa?threadID=106866&tstart=0
# Workaround is to assume that if a (start, end] range appears to be
# returned then that is what it is.
self.assertEquals(
Vault._range_string_to_part_index('0-4', 4), 0)
def test_range_too_big(self):
self.assertRaises(
AssertionError, Vault._range_string_to_part_index, '0-5', 4)
def test_range_start_mismatch(self):
self.assertRaises(
AssertionError, Vault._range_string_to_part_index, '1-3', 4)
def test_range_end_mismatch(self):
# End mismatch is OK, since the last part might be short
self.assertEquals(
Vault._range_string_to_part_index('0-2', 4), 0)
| apache-2.0 |
termie/nova-migration-demo | contrib/boto_v6/__init__.py | 52 | 1693 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
def connect_ec2(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.connection.EC2Connection`
:return: A connection to Amazon's EC2
"""
from boto_v6.ec2.connection import EC2ConnectionV6
return EC2ConnectionV6(aws_access_key_id, aws_secret_access_key, **kwargs)
| apache-2.0 |
kalfasyan/DA224x | code/eigens/params001.py | 1 | 6162 | import random
import numpy as np
import itertools
import decimal
import math
nrn_type = "iaf_neuron"
exc_nrns_mc = 64
inh_nrns_mc = 16
lr_mc = 3
mc_hc = 4
hc = 3
nrns = (exc_nrns_mc+inh_nrns_mc)*hc*mc_hc*lr_mc
q = 1
sigma = math.sqrt(q/decimal.Decimal(nrns))
sigma2 = math.sqrt(1/decimal.Decimal(nrns))
mu = 0
nrns_hc = nrns/hc
nrns_mc = nrns_hc/mc_hc
nrns_l23 = nrns_mc*34/100
nrns_l4 = nrns_mc*33/100
nrns_l5 = nrns_mc*33/100
print nrns,"neurons."
print nrns_hc, "per hypercolumn in %s" %hc,"hypercolumns."
print nrns_mc, "per minicolumn in %s" %mc_hc,"minicolumns."
print nrns_l23, nrns_l4, nrns_l5, "in layers23 layer4 and layer5 respectively"
##############################################################
""" 2. Creating list of Hypercolumns, list of minicolumns within
hypercolumns, list of layers within minicolumns within
hypercolumns"""
split = [i for i in range(nrns)]
split_hc = zip(*[iter(split)]*nrns_hc)
split_mc = []
split_lr23,split_lr4,split_lr5 = [],[],[]
for i in range(len(split_hc)):
split_mc.append(zip(*[iter(split_hc[i])]*nrns_mc))
for j in range(len(split_mc[i])):
split_lr23.append(split_mc[i][j][0:nrns_l23])
split_lr4.append(split_mc[i][j][nrns_l23:nrns_l23+nrns_l4])
split_lr5.append(split_mc[i][j][nrns_l23+nrns_l4:])
split_exc,split_inh = [],[]
for i in range(len(split_lr23)):
split_exc.append(split_lr23[i][0:int(round(80./100.*(len(split_lr23[i]))))])
split_inh.append(split_lr23[i][int(round(80./100.*(len(split_lr23[i])))):])
for i in range(len(split_lr4)):
split_exc.append(split_lr4[i][0:int(round(80./100.*(len(split_lr4[i]))))])
split_inh.append(split_lr4[i][int(round(80./100.*(len(split_lr4[i])))):])
for i in range(len(split_lr5)):
split_exc.append(split_lr5[i][0:int(round(80./100.*(len(split_lr5[i]))))])
split_inh.append(split_lr5[i][int(round(80./100.*(len(split_lr5[i])))):])
##############################################################
""" 3. Creating sets for all minicolumns and all layers """
hypercolumns = set(split_hc)
minitemp = []
for i in range(len(split_mc)):
for j in split_mc[i]:
minitemp.append(j)
minicolumns = set(minitemp)
layers23 = set(list(itertools.chain.from_iterable(split_lr23)))
layers4 = set(list(itertools.chain.from_iterable(split_lr4)))
layers5 = set(list(itertools.chain.from_iterable(split_lr5)))
exc_nrns_set = set(list(itertools.chain.from_iterable(split_exc)))
inh_nrns_set = set(list(itertools.chain.from_iterable(split_inh)))
exc = [None for i in range(len(exc_nrns_set))]
inh = [None for i in range(len(inh_nrns_set))]
#################### FUNCTIONS #####################################
""" Checks if 2 neurons belong in the same hypercolumn """
def same_hypercolumn(q,w):
for i in hypercolumns:
if q in i and w in i:
return True
return False
""" Checks if 2 neurons belong in the same minicolumn """
def same_minicolumn(q,w):
for mc in minicolumns:
if q in mc and w in mc:
return True
return False
""" Checks if 2 neurons belong in the same layer """
def same_layer(q,w):
if same_hypercolumn(q,w):
if q in layers23 and w in layers23:
return True
elif q in layers4 and w in layers4:
return True
elif q in layers5 and w in layers5:
return True
return False
def next_hypercolumn(q,w):
if same_hypercolumn(q,w):
return False
for i in range(len(split_hc)):
for j in split_hc[i]:
if j < len(split_hc):
if (q in split_hc[i] and w in split_hc[i+1]):
return True
return False
def prev_hypercolumn(q,w):
if same_hypercolumn(q,w):
return False
for i in range(len(split_hc)):
for j in split_hc[i]:
if i >0:
if (q in split_hc[i] and w in split_hc[i-1]):
return True
return False
def diff_hypercolumns(q,w):
if next_hypercolumn(q,w):
if (q in layers5 and w in layers4):
return flip(0.20,q)
elif prev_hypercolumn(q,w):
if (q in layers5 and w in layers23):
return flip(0.20,q)
return 0
def both_exc(q,w):
if same_layer(q,w):
if (q in exc_nrns_set and w in exc_nrns_set):
return True
return False
def both_inh(q,w):
if same_layer(q,w):
if (q in inh_nrns_set and w in inh_nrns_set):
return True
return False
""" Returns 1 under probability 'p', else 0 (0<=p<=1)"""
def flipAdj(p,q):
if q in exc_nrns_set:
return 1 if random.random() < p else 0
elif q in inh_nrns_set:
return -1 if random.random() < p else 0
def flip(p,q):
#p+=.2
r=0# np.random.uniform(0,sigma)
if q in exc_nrns_set:
return (np.random.normal(0,sigma)+.5) if random.random() < p-r else 0
elif q in inh_nrns_set:
return (np.random.normal(0,sigma)-.5) if random.random() < p-r else 0
def flip2(p,q):
a = decimal.Decimal(0.002083333)
if q in exc_nrns_set:
return (abs(np.random.normal(0,a))) if random.random() < p else 0
elif q in inh_nrns_set:
return (-abs(np.random.normal(0,a))) if random.random() < p else 0
def check_zero(z):
unique, counts = np.unique(z, return_counts=True)
occurence = np.asarray((unique, counts)).T
for i in range(len(z)):
if np.sum(z) != 0:
if len(occurence)==3 and occurence[0][1]>occurence[2][1]:
if z[i] == -1:
z[i] = 0
elif len(occurence)==3 and occurence[2][1]>occurence[0][1]:
if z[i] == 1:
z[i] = 0
elif len(occurence) < 3:
if z[i] == -1:
z[i] += 1
if z[i] == 1:
z[i] -= 1
else:
return z
def balance(l):
N = len(l)
meanP, meanN = 0,0
c1, c2 = 0,0
for i in range(N):
if l[i] > 0:
meanP += l[i]
c1+=1
if l[i] < 0:
meanN += l[i]
c2+=1
diff = abs(meanP)-abs(meanN)
for i in range(N):
if l[i] < 0:
l[i] -= diff/(c2)
return l
""" Total sum of conn_matrix weights becomes zero """
def balanceN(mat):
N = len(mat)
sumP,sumN = 0,0
c,c2=0,0
for i in range(N):
for j in range(N):
if mat[j][i] > 0:
sumP += mat[j][i]
c+=1
elif mat[j][i] < 0:
sumN += mat[j][i]
c2+=1
diff = sumP + sumN
for i in range(N):
for j in range(N):
if mat[j][i] < 0:
mat[j][i] -= diff/c2
""" Returns a counter 'c' in case a number 'n' is not (close to) zero """
def check_count(c, n):
if n <= -1e-4 or n>= 1e-4:
c+=1
return c
| gpl-2.0 |
team-xue/xue | xue/cms/plugins/twitter/models.py | 15 | 1168 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
class TwitterRecentEntries(CMSPlugin):
title = models.CharField(_('title'), max_length=75, blank=True)
twitter_user = models.CharField(_('twitter user'), max_length=75)
count = models.PositiveSmallIntegerField(_('count'), help_text=_('Number of entries to display'), default=3)
link_hint = models.CharField(_('link hint'), max_length=75, blank=True, help_text=_('If given, the hint is displayed as link to your Twitter profile.'))
def __unicode__(self):
return self.title
class TwitterSearch(CMSPlugin):
title = models.CharField(_('title'), max_length=75, blank=True)
query = models.CharField(_('query'), max_length=200, blank=True, default='', help_text=_('Example: "brains AND zombies AND from:umbrella AND to:nemesis": tweets from the user "umbrella" to the user "nemesis" that contain the words "brains" and "zombies"'))
count = models.PositiveSmallIntegerField(_('count'), help_text=_('Number of entries to display'), default=3)
def __unicode__(self):
return self.title | bsd-3-clause |
kolypto/py-overc | overcli/monitor.py | 1 | 5985 | import logging
import subprocess, shlex
import threading
from datetime import datetime
logger = logging.getLogger(__name__)
class Service(object):
def __init__(self, period, name, cwd, command, max_lag=None):
""" Define a service to be monitored
:param period: Test period, seconds
:type period: int
:param name: Service name
:type name: str
:param cwd: Current working directory
:type cwd: str
:param command: Full command path that test service state
:type command: str
"""
self.period = period
self.name = name
self.cwd = cwd
self.command = command
#: Plugin execution time
self.lag = 0
self.max_lag = max_lag
#: Timestamp when this service was tested last time
self.last_tested = None
def __str__(self):
return self.name
PERIOD_MARGIN_FACTOR = 0.8
LAG_MARGIN_FACTOR = 3.0
@property
def real_period(self):
""" Real update period, including lags and safety reserves """
return max(
self.period * self.PERIOD_MARGIN_FACTOR -
(self.max_lag if self.max_lag else self.lag * self.LAG_MARGIN_FACTOR),
0.0)
def next_update_in(self, now):
""" Get the relative time for the next update
:param now: Current datetime
:type now: datetime
:return: Delay, seconds
:rtype: float
"""
# Never updated: NOW!
if self.last_tested is None:
return 0.0
# Was updated
seconds_ago = (now - self.last_tested).total_seconds()
delay = self.real_period - seconds_ago
return max(delay, 0.0) # don't allow it to be negative
def get_state(self):
""" Execute plugin and get service's state
:return: Process state for the API
:rtype: dict
:exception OSError: Failed to execute plugin
"""
# Execute command
try:
process = subprocess.Popen(
shlex.split(self.command),
cwd=self.cwd,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
process.wait()
except OSError, e:
error_msg = u'Failed to execute plugin `{}` by command `{}`: {}'.format(self.name, self.command, e.message)
logger.exception(error_msg)
return {
'name': self.name,
'state': 'UNK',
'info': error_msg,
'period': self.period
}
# Analyze the result
try:
# Info
info = process.stdout.read()
# Determine state
try:
state = ['OK', 'WARN', 'FAIL', 'UNK'][process.returncode]
except IndexError:
logger.error(u'Plugin `{}` failed with code {}: {}'.format(self.name, process.returncode, info))
state = 'UNK'
# Finish
return {
'name': self.name,
'state': state,
'info': unicode(info).rstrip()
}
finally:
process.stdout.close()
class ServicesMonitor(object):
def __init__(self, services):
""" Monitor for Services
:param services: List of Services to be monitored
:type services: list
"""
self.services = services
def _check_services(self, services):
""" Check services provided as an argument
:param services: List of services to test
:type services: list[Service]
:return: `services` argument value for reporting
:rtype: list
"""
now = datetime.utcnow()
# Worker
service_states = []
def task(service):
# Get state, measure lag
start = datetime.utcnow()
state = service.get_state()
finish = datetime.utcnow()
# Update lag
service.lag = (finish - start).total_seconds()
# Add state
service_states.append(state)
logger.debug(u'Checked service {} (lag={}, real_period={}): last checked {} ago, state={}: {}'.format(
service.name,
service.lag,
service.real_period,
now - service.last_tested if service.last_tested else '(never)',
state['state'], state['info']
))
# Update timestamp
service.last_tested = now
# Run
threads = [threading.Thread(target=task, args=(service,)) for service in services]
for t in threads: t.start()
for t in threads: t.join()
# TODO: declare max waiting time. If any process doesnt manage to finish in time -- report it as a separate request
return service_states
def sleep_time(self):
""" Determine how many seconds is it ok to sleep before any service state should be reported
:rtype: float
"""
now = datetime.utcnow()
return min(service.next_update_in(now) for service in self.services)
def check(self):
""" Check services whose time has come, once.
:return: (period, service_states) to be reported to the API
:rtype: (int, dict)
"""
# Determine which services to test
# TODO: use a smarter algorithm to detect which services to check
max_lag = max(service.lag for service in self.services)
now = datetime.utcnow()
services = [ service
for service in self.services
if service.next_update_in(now) <= max_lag
]
if not services:
return 0, []
period = max(service.period for service in services)
# Test them
service_states = self._check_services(services)
# Report
return int(period), service_states
| bsd-2-clause |
liqd/a4-meinberlin | meinberlin/apps/bplan/views.py | 1 | 2231 | from django.utils.translation import ugettext_lazy as _
from django.views import generic
from django.views.generic import TemplateView
from adhocracy4.dashboard.blueprints import ProjectBlueprint
from adhocracy4.dashboard.components.forms.views import \
ProjectComponentFormView
from adhocracy4.dashboard.mixins import DashboardBaseMixin
from meinberlin.apps.bplan import phases as bplan_phases
from meinberlin.apps.dashboard.mixins import DashboardProjectListGroupMixin
from meinberlin.apps.extprojects.views import ExternalProjectCreateView
from . import forms
from . import models
class BplanStatementSentView(TemplateView):
template_name = 'meinberlin_bplan/statement_sent.html'
class BplanFinishedView(TemplateView):
template_name = 'meinberlin_bplan/bplan_finished.html'
class BplanProjectCreateView(ExternalProjectCreateView):
model = models.Bplan
slug_url_kwarg = 'project_slug'
form_class = forms.BplanProjectCreateForm
template_name = \
'meinberlin_bplan/bplan_create_dashboard.html'
success_message = _('Project was created.')
blueprint = ProjectBlueprint(
title=_('Development Plan'),
description=_('Create a statement formular for development plans'
' to be embedded on external sites.'),
content=[
bplan_phases.StatementPhase(),
],
image='',
settings_model=None,
)
class BplanProjectUpdateView(ProjectComponentFormView):
model = models.Bplan
@property
def project(self):
project = super().project
return project.externalproject.bplan
def get_object(self, queryset=None):
return self.project
class BplanProjectListView(DashboardProjectListGroupMixin,
DashboardBaseMixin,
generic.ListView):
model = models.Bplan
paginate_by = 12
template_name = 'meinberlin_bplan/bplan_list_dashboard.html'
permission_required = 'a4projects.add_project'
menu_item = 'project'
def get_queryset(self):
return super().get_queryset().filter(
organisation=self.organisation
)
def get_permission_object(self):
return self.organisation
| agpl-3.0 |
mrkm4ntr/incubator-airflow | airflow/contrib/utils/weekday.py | 7 | 1074 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.utils.weekday`."""
import warnings
# pylint: disable=unused-import
from airflow.utils.weekday import WeekDay # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.utils.weekday`.", DeprecationWarning, stacklevel=2
)
| apache-2.0 |
samuelmanzer/interpolation | chebyshev_nodes.py | 1 | 3491 | #!/usr/bin/env python
###############################################################################
# Interpolation
# Copyright (C) Samuel F. Manzer. All rights reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
#
# FILE: lagrange_interpolation.py
# AUTHOR: Samuel F. Manzer
# URL: http://www.samuelmanzer.com/
###############################################################################
from argparse import ArgumentParser
import numpy as np
import matplotlib.pyplot as plt
import itertools
import tempfile
import pdb
from lagrange_poly import *
parser = ArgumentParser("Produces plots of Lagrange interpolation of cos(x) for various numbers of Chebyshev and equally spaced points")
args = parser.parse_args()
start = 0
end = (5*math.pi)/2
n_eval_pts = 1000
eval_step_size = float(end-start)/n_eval_pts
n_points_range = range(2,6,1)
x = np.linspace(start,end,n_eval_pts)
y_exact = np.cos(x)
f1,(ax1,ax2) = plt.subplots(1,2,sharey=True)
ax1.set_ylim(ymin=-1.1,ymax=1.5)
ax1.set_title("Equally-Spaced")
ax2.set_title("Chebyshev")
f2,ax3 = plt.subplots()
f3,ax4 = plt.subplots()
# Equally spaced points
evenly_spaced_sets = [np.linspace(start,end,n_points) for n_points in n_points_range]
evenly_spaced_polys = [get_lagrange_poly(interp_points,math.cos) for interp_points in evenly_spaced_sets]
lines,mae_list,rmsd_list,maxe_list = plot_lagrange_polys(x,n_points_range,evenly_spaced_polys,y_exact,ax1)
texts_1 = plot_stats(mae_list,rmsd_list,maxe_list,n_points_range,ax3)
f1.legend(lines,map(lambda x: str(x)+" Points",n_points_range)+["cos(x)"],loc="upper right")
# Chebyshev points - we must transform them to our interval
cp_sets = [ [ math.cos((float(2*k - 1)/(2*n))*math.pi) for k in range(1,n+1)] for n in n_points_range ]
tcp_sets = [ [ 0.5*((end - start)*pt + start + end) for pt in point_set] for point_set in cp_sets]
chebyshev_point_polys = [get_lagrange_poly(interp_points,math.cos) for interp_points in tcp_sets]
lines,mae_list,rmsd_list,maxe_list = plot_lagrange_polys(x,n_points_range,chebyshev_point_polys,y_exact,ax2)
texts_2 = plot_stats(mae_list,rmsd_list,maxe_list,n_points_range,ax4)
ax3.set_title("Lagrange Interpolation with Equally-Spaced Points")
ax4.set_title("Lagrange Interpolation with Chebyshev Points")
# Awful haxx for text labels above bars to not get cut off by top of figure
tmp_file = tempfile.NamedTemporaryFile()
f2.savefig(tmp_file.name)
f3.savefig(tmp_file.name)
renderer_2 = f2.axes[0].get_renderer_cache()
renderer_3 = f3.axes[0].get_renderer_cache()
for (ax,renderer,texts) in [(ax3,renderer_2,texts_1),(ax4,renderer_3,texts_2)]:
window_bbox_list = [t.get_window_extent(renderer) for t in texts]
data_bbox_list = [b.transformed(ax.transData.inverted()) for b in window_bbox_list]
data_coords_list = [b.extents for b in data_bbox_list]
heights = [ coords[-1] for coords in data_coords_list]
ax.set_ylim(ymax=max(heights)*1.05)
plt.show()
| lgpl-3.0 |
NunoEdgarGub1/nupic | examples/tp/tp_constant_test.py | 8 | 5022 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This file tests that we can learn and predict the particularly vexing case of a
single constant signal!
"""
import numpy as np
import unittest2 as unittest
from nupic.research import fdrutilities as fdrutils
from nupic.research.TP import TP
from nupic.research.TP10X2 import TP10X2
from nupic.support.unittesthelpers.testcasebase import (TestCaseBase,
TestOptionParser)
def _printOneTrainingVector(x):
"Print a single vector succinctly."
print ''.join('1' if k != 0 else '.' for k in x)
def _getSimplePatterns(numOnes, numPatterns):
"""Very simple patterns. Each pattern has numOnes consecutive
bits on. There are numPatterns*numOnes bits in the vector. These patterns
are used as elements of sequences when building up a training set."""
numCols = numOnes * numPatterns
p = []
for i in xrange(numPatterns):
x = np.zeros(numCols, dtype='float32')
x[i*numOnes:(i + 1)*numOnes] = 1
p.append(x)
return p
def _createTps(numCols):
"""Create two instances of temporal poolers (TP.py and TP10X2.py) with
identical parameter settings."""
# Keep these fixed:
minThreshold = 4
activationThreshold = 5
newSynapseCount = 7
initialPerm = 0.3
connectedPerm = 0.5
permanenceInc = 0.1
permanenceDec = 0.05
globalDecay = 0
cellsPerColumn = 1
cppTp = TP10X2(numberOfCols=numCols, cellsPerColumn=cellsPerColumn,
initialPerm=initialPerm, connectedPerm=connectedPerm,
minThreshold=minThreshold, newSynapseCount=newSynapseCount,
permanenceInc=permanenceInc, permanenceDec=permanenceDec,
activationThreshold=activationThreshold,
globalDecay=globalDecay, burnIn=1,
seed=SEED, verbosity=VERBOSITY,
checkSynapseConsistency=True,
pamLength=1000)
# Ensure we are copying over learning states for TPDiff
cppTp.retrieveLearningStates = True
pyTp = TP(numberOfCols=numCols, cellsPerColumn=cellsPerColumn,
initialPerm=initialPerm, connectedPerm=connectedPerm,
minThreshold=minThreshold, newSynapseCount=newSynapseCount,
permanenceInc=permanenceInc, permanenceDec=permanenceDec,
activationThreshold=activationThreshold,
globalDecay=globalDecay, burnIn=1,
seed=SEED, verbosity=VERBOSITY,
pamLength=1000)
return cppTp, pyTp
class TPConstantTest(TestCaseBase):
def setUp(self):
self.cppTp, self.pyTp = _createTps(100)
def _basicTest(self, tp=None):
"""Test creation, pickling, and basic run of learning and inference."""
trainingSet = _getSimplePatterns(10, 10)
# Learn on several constant sequences, with a reset in between
for _ in range(2):
for seq in trainingSet[0:5]:
for _ in range(10):
tp.learn(seq)
tp.reset()
print "Learning completed"
# Infer
print "Running inference"
tp.collectStats = True
for seq in trainingSet[0:5]:
tp.reset()
tp.resetStats()
for _ in range(10):
tp.infer(seq)
if VERBOSITY > 1 :
print
_printOneTrainingVector(seq)
tp.printStates(False, False)
print
print
if VERBOSITY > 1:
print tp.getStats()
# Ensure our predictions are accurate for each sequence
self.assertGreater(tp.getStats()['predictionScoreAvg2'], 0.8)
print ("tp.getStats()['predictionScoreAvg2'] = ",
tp.getStats()['predictionScoreAvg2'])
print "TPConstant basicTest ok"
def testCppTpBasic(self):
self._basicTest(self.cppTp)
def testPyTpBasic(self):
self._basicTest(self.pyTp)
def testIdenticalTps(self):
self.assertTrue(fdrutils.tpDiff2(self.cppTp, self.pyTp))
if __name__=="__main__":
parser = TestOptionParser()
options, _ = parser.parse_args()
SEED = options.seed
VERBOSITY = options.verbosity
np.random.seed(SEED)
unittest.main()
| gpl-3.0 |
tempbottle/h-store | third_party/python/boto/ec2/autoscale/activity.py | 57 | 3059 | # Copyright (c) 2009-2011 Reza Lotun http://reza.lotun.name/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from datetime import datetime
class Activity(object):
def __init__(self, connection=None):
self.connection = connection
self.start_time = None
self.end_time = None
self.activity_id = None
self.progress = None
self.status_code = None
self.cause = None
self.description = None
self.status_message = None
self.group_name = None
def __repr__(self):
return 'Activity<%s>: For group:%s, progress:%s, cause:%s' % (self.activity_id,
self.group_name,
self.status_message,
self.cause)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'ActivityId':
self.activity_id = value
elif name == 'AutoScalingGroupName':
self.group_name = value
elif name == 'StartTime':
try:
self.start_time = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
self.start_time = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name == 'EndTime':
try:
self.end_time = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
self.end_time = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name == 'Progress':
self.progress = value
elif name == 'Cause':
self.cause = value
elif name == 'Description':
self.description = value
elif name == 'StatusMessage':
self.status_message = value
elif name == 'StatusCode':
self.status_code = value
else:
setattr(self, name, value)
| gpl-3.0 |
ehashman/oh-mainline | mysite/search/view_helpers.py | 15 | 18666 | # This file is part of OpenHatch.
# Copyright (C) 2010 Parker Phinney
# Copyright (C) 2009, 2010 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import hashlib
import logging
import django.core.cache
from django.db import connection
from django.db.models import Q, Count
from django.utils import http
import mysite.search.models
import mysite.search.views
import mysite.base.decorators
CCT = 'hit_count_cache_timestamp'
logger = logging.getLogger(__name__)
def order_bugs(query):
# Minus sign: reverse order
# Minus good for newcomers: this means true values
# (like 1) appear before false values (like 0)
# Minus last touched: Old bugs last.
return query.order_by('-good_for_newcomers', '-last_touched')
class Query:
def __init__(self, terms=None, active_facet_options=None,
any_facet_options=False, terms_string=None):
self.terms = terms or []
self.active_facet_options = (
mysite.base.decorators.no_str_in_the_dict(active_facet_options)
or {}
)
self.any_facet_options = any_facet_options or []
if type(terms_string) == str:
terms_string = unicode(terms_string, 'utf-8')
self._terms_string = terms_string
@property
def terms_string(self):
if self._terms_string is None:
raise ValueError
return self._terms_string
@staticmethod
def split_into_terms(string):
# We're given some query terms "between quotes"
# and some glomped on with spaces.
# Strategy: Find the strings validly inside quotes, and remove them
# from the original string. Then split the remainder (and probably
# trim whitespace from the remaining terms).
ret = []
splitted = re.split(r'(".*?")', string)
for (index, word) in enumerate(splitted):
if (index % 2) == 0:
ret.extend(word.split())
else:
assert word[0] == '"'
assert word[-1] == '"'
ret.append(word[1:-1])
return ret
@staticmethod
def create_from_GET_data(GET):
possible_facets = [u'language', u'toughness', u'contribution_type',
u'project']
active_facet_options = {}
any_facet_options = []
for facet in possible_facets:
if GET.get(facet):
active_facet_options[facet] = GET.get(facet)
# Only select any_facet if a facet is empty string, not None
elif GET.get(facet) == '':
any_facet_options.append(facet)
terms_string = GET.get('q', u'')
terms = Query.split_into_terms(terms_string)
return Query(terms=terms,
active_facet_options=active_facet_options,
any_facet_options=any_facet_options,
terms_string=terms_string)
def get_bugs_unordered(self):
return mysite.search.models.Bug.open_ones.filter(self.get_Q())
def __nonzero__(self):
if self.terms or self.active_facet_options or self.any_facet_options:
return 1
return 0
def get_Q(self, exclude_active_facets=False):
"""Get a Q object which can be passed to Bug.open_ones.filter()"""
# Begin constructing a conjunction of Q objects (filters)
q = Q()
# toughness facet
toughness_is_active = ('toughness' in self.active_facet_options.keys())
exclude_toughness = exclude_active_facets and toughness_is_active
if (self.active_facet_options.get('toughness', None) == 'bitesize'
and not exclude_toughness):
q &= Q(good_for_newcomers=True)
# language facet
language_is_active = (u'language' in self.active_facet_options.keys())
exclude_language = exclude_active_facets and language_is_active
if u'language' in self.active_facet_options and not exclude_language:
language_value = self.active_facet_options[u'language']
if language_value == 'Unknown':
language_value = ''
q &= Q(project__language__iexact=language_value)
# project facet
# FIXME: Because of the way the search page is set up, we have to use
# the project's display_name to identify the project, which isn't
# very nice.
project_is_active = (u'project' in self.active_facet_options.keys())
exclude_project = exclude_active_facets and project_is_active
if u'project' in self.active_facet_options and not exclude_project:
project_value = self.active_facet_options[u'project']
q &= Q(project__display_name__iexact=project_value)
# contribution type facet
contribution_type_is_active = ('contribution_type' in
self.active_facet_options.keys())
exclude_contribution_type = exclude_active_facets and contribution_type_is_active
if (self.active_facet_options.get('contribution_type', None) == 'documentation'
and not exclude_contribution_type):
q &= Q(concerns_just_documentation=True)
# NOTE: This is a terrible hack. We should stop doing this and
# just ditch this entire class and swap it out for something like
# haystack.
if connection.vendor == 'sqlite':
use_regexes = False
else:
use_regexes = False # HACK for now, while Hacker News is visiting
for word in self.terms:
if use_regexes:
whole_word = "[[:<:]]%s($|[[:>:]])" % (
mysite.base.view_helpers.mysql_regex_escape(word))
terms_disjunction = (
Q(project__language__iexact=word) |
Q(title__iregex=whole_word) |
Q(description__iregex=whole_word) |
Q(as_appears_in_distribution__iregex=whole_word) |
# 'firefox' grabs 'mozilla firefox'.
Q(project__name__iregex=whole_word)
)
else:
terms_disjunction = (
Q(project__language__icontains=word) |
Q(title__icontains=word) |
Q(description__icontains=word) |
Q(as_appears_in_distribution__icontains=word) |
# 'firefox' grabs 'mozilla firefox'.
Q(project__name__icontains=word)
)
q &= terms_disjunction
return q
def get_facet_option_data(self, facet_name, option_name):
# Create a Query for this option.
# This Query is sensitive to the currently active facet options...
GET_data = dict(self.active_facet_options)
# ...except the toughness facet option in question.
GET_data.update({
u'q': unicode(self.terms_string),
unicode(facet_name): unicode(option_name),
})
query_string = http.urlencode(GET_data)
query = Query.create_from_GET_data(GET_data)
the_all_option = u'any'
name = option_name or the_all_option
active_option_name = self.active_facet_options.get(facet_name, None)
# This facet isn't active...
is_active = False
# ...unless there's an item in active_facet_options mapping the
# current facet_name to the option whose data we're creating...
if active_option_name == option_name:
is_active = True
# ...or if this is the 'any' option and there is no active option
# for this facet.
if name == the_all_option and active_option_name is None:
is_active = True
return {
'name': name,
'count': query.get_or_create_cached_hit_count(),
'query_string': query_string,
'is_active': is_active
}
def get_facet_options(self, facet_name, option_names):
# Assert that there are only unicode strings in this list
option_names = mysite.base.decorators.no_str_in_the_list(option_names)
options = [self.get_facet_option_data(facet_name, n)
for n in option_names]
# ^^ that's a list of facet options, where each "option" is a
# dictionary that looks like this:
# {
# 'name': name,
# 'count': query.get_or_create_cached_hit_count(),
# 'query_string': query_string,
# 'is_active': is_active
# }
# Now we're gonna sort these dictionaries.
# Active facet options first. Then non-'Unknowns'. Then by number of
# bugs. Then alphabetically.
# Note that these keys are in ascending order of precedence. So the
# last one trumps all the previous sortings.
options.sort(key=lambda x: x['name'])
# Sort alphabetically by name. (This appears first because it has the
# lowest precedence.)
# 3 sorts before 50
options.sort(key=lambda x: x['count'], reverse=True)
# We want facet options that contain lots of bugs to appear at the top.
# If you sort (naively) by x['count'], then the lower numbers appear
# higher in the list. Let's reverse that with reverse=True.
options.sort(
key=lambda x: (facet_name == 'language') and (x['name'] == 'Unknown'))
# We want the Unknown language to appear last, unless it's active. If
# the key lambda function returns False, then those options appear
# first (because False appears before True), which is what we want.
options.sort(key=lambda x: x['is_active'], reverse=True)
# We want the value True to sort before the value False. So let's
# reverse this comparison (because normally False sorts before True,
# just like zero comes before one).
return options
def get_possible_facets(self):
project_options = self.get_facet_options(
u'project', self.get_project_names())
toughness_options = self.get_facet_options(u'toughness', [u'bitesize'])
contribution_type_options = self.get_facet_options(
u'contribution_type', [u'documentation'])
language_options = self.get_facet_options(
u'language', self.get_language_names())
# looks something like:
# [{'count': 1180L, 'query_string': 'q=&language=Python',
# 'is_active': False, 'name': u'Python'},
# {'count': 478L, 'query_string': 'q=&language=C%23',
# 'is_active': False, 'name': u'C#'},
# {'count': 184L, 'query_string': 'q=&language=Unknown',
# 'is_active': False, 'name': 'Unknown'},
# {'count': 532L, 'query_string': 'q=&language=C',
# 'is_active': False, 'name': u'C'},
# {'count': 2374L, 'query_string': 'q=&language=',
# 'is_active': True, 'name': 'any'}]
possible_facets = (
# The languages facet is based on the project languages, "for
# now"
(u'language', {
u'name_in_GET': u"language",
u'sidebar_heading': u"Languages",
u'description_above_results': u"projects primarily coded in %s",
u'options': language_options,
u'the_any_option': self.get_facet_options(u'language', [u''])[0],
u'sorted_by': u'(# of bugs)',
}),
(u'project', {
u'name_in_GET': u'project',
u'sidebar_heading': u'Projects',
u'description_above_results': 'in the %s project',
u'options': project_options,
u'the_any_option': self.get_facet_options(u'project', [u''])[0],
u'sorted_by': u'(# of bugs)',
}),
(u'toughness', {
u'name_in_GET': u"toughness",
u'sidebar_heading': u"Toughness",
u'description_above_results': u"where toughness = %s",
u'options': toughness_options,
u'the_any_option': self.get_facet_options(u'toughness', [u''])[0],
u'sorted_by': u'(# of bugs)',
}),
(u'contribution_type', {
u'name_in_GET': u"contribution_type",
u'sidebar_heading': u"Just bugs labeled...",
u'description_above_results': u"which need %s",
u'options': contribution_type_options,
u'the_any_option': self.get_facet_options(u'contribution_type', [u''])[0],
u'sorted_by': u'(# of bugs)',
})
)
return possible_facets
def get_GET_data(self):
GET_data = {u'q': unicode(self.terms_string)}
GET_data.update(self.active_facet_options)
return GET_data
def get_language_names(self):
GET_data = self.get_GET_data()
if u'language' in GET_data:
del GET_data[u'language']
query_without_language_facet = Query.create_from_GET_data(GET_data)
bugs = query_without_language_facet.get_bugs_unordered()
distinct_language_columns = bugs.values(
u'project__language').distinct()
languages = [x[u'project__language']
for x in distinct_language_columns]
languages = [l or u'Unknown' for l in languages]
# Add the active language facet, if there is one
if u'language' in self.active_facet_options:
active_language = self.active_facet_options[u'language']
if active_language not in languages:
languages.append(active_language)
return languages
def get_active_facet_options_except_toughness(self):
if 'toughness' not in self.active_facet_options:
return self.active_facet_options
options = self.active_facet_options.copy()
del options['toughness']
return options
def get_project_names(self):
Project = mysite.search.models.Project
GET_data = self.get_GET_data()
if u'project' in GET_data:
del GET_data[u'project']
query_without_project_facet = Query.create_from_GET_data(GET_data)
bugs = query_without_project_facet.get_bugs_unordered()
project_ids = list(
bugs.values_list(u'project__id', flat=True).distinct())
projects = Project.objects.filter(id__in=project_ids)
project_names = [
project.display_name or u'Unknown' for project in projects]
# Add the active project facet, if there is one
if u'project' in self.active_facet_options:
name_of_active_project = self.active_facet_options[u'project']
if name_of_active_project not in project_names:
project_names.append(name_of_active_project)
return project_names
def get_sha1(self):
# first, make a dictionary mapping strings to strings
simple_dictionary = {}
# add terms_string
simple_dictionary[u'terms'] = str(sorted(self.terms))
# add active_facet_options
simple_dictionary[u'active_facet_options'] = str(
sorted(self.active_facet_options.items()))
stringified = str(sorted(simple_dictionary.items()))
# then return a hash of our sorted items self.
# sadly we cause a 2x space blowup here
return hashlib.sha1(stringified).hexdigest()
def get_hit_count_cache_key(self):
hashed_query = self.get_sha1()
hcc_timestamp = mysite.base.models.Timestamp.get_timestamp_for_string(
CCT)
hit_count_cache_key = "hcc_%s_%s" % (
hashlib.sha1(hcc_timestamp.__str__()).hexdigest(), hashed_query)
return hit_count_cache_key
def get_or_create_cached_hit_count(self):
# Get the cache key used to store the hit count.
hit_count_cache_key = self.get_hit_count_cache_key()
# Fetch the hit count from the cache.
hit_count = django.core.cache.cache.get(hit_count_cache_key)
logger.debug("Cached hit count: " + str(hit_count))
# We need to be careful to check if the count is None, rather than
# just if the count is a false value. That's because a value of zero
# is still a cached value; if we just use a boolean test, we would
# mistake the zero value for an empty cache and regenerate the cache
# needlessly.
if hit_count is None:
# There is nothing in the cache for this key. Either the
# query has not been counted before, or the Timestamp has
# been refreshed due to a change in the Bug objects. So get
# a new count.
hit_count = self.get_bugs_unordered().count()
django.core.cache.cache.set(hit_count_cache_key, hit_count)
logger.debug("Set hit count: " + str(hit_count))
# TODO: Add sql query in the logger
logger.debug("Hit Count:" + str(hit_count))
return hit_count
def get_query_string(self):
GET_data = self.get_GET_data()
query_string = http.urlencode(GET_data)
logger.debug("Query is " + query_string)
return query_string
def get_project_count():
"""Retrieve the number of projects currently indexed."""
bugs = mysite.search.models.Bug.all_bugs.all()
return bugs.values(u'project').distinct().count()
def get_projects_with_bugs():
"""
Return a sorted list of all the Projects for which we've indexed bugs.
"""
projects = mysite.search.models.Project.objects.annotate(
bug_count=Count('bug')).filter(
bug_count__gt=0).order_by(u'display_name')
return projects
def get_cited_projects_lacking_bugs():
project_ids = mysite.profile.models.PortfolioEntry.published_ones.all().values_list(
'project_id', flat=True)
return mysite.search.models.Project.objects.filter(id__in=project_ids).annotate(
bug_count=Count('bug')).filter(bug_count=0)
| agpl-3.0 |
Mistobaan/tensorflow | tensorflow/contrib/quantization/python/array_ops.py | 178 | 1156 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Quantized Array Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.ops import gen_array_ops as quantized_gen_array_ops
from tensorflow.python.ops.gen_array_ops import dequantize
from tensorflow.python.ops.gen_array_ops import quantize_v2
from tensorflow.python.ops.gen_array_ops import quantized_concat
# pylint: enable=unused-import
| apache-2.0 |
Santhosh114/Hadoop-Fundamentals-for-Data-Scientists-Oreilly | mllib/collaborative_filtering/als/matchmaker.py | 2 | 4452 | #!/usr/bin/env python
import sys
import random
from math import sqrt
from operator import add
from pyspark import SparkConf, SparkContext
from pyspark.mllib.recommendation import ALS
##########################################################################
# Helpers
##########################################################################
def parse_rating(line, sep=','):
"""
Parses a rating line
Returns: tuple of (random integer, (user_id, profile_id, rating))
"""
fields = line.strip().split(sep)
user_id = int(fields[0]) # convert user_id to int
profile_id = int(fields[1]) # convert profile_id to int
rating = float(fields[2]) # convert rated_id to int
return random.randint(1, 10), (user_id, profile_id, rating)
def parse_user(line, sep=','):
"""
Parses a user line
Returns: tuple of (user_id, gender)
"""
fields = line.strip().split(sep)
user_id = int(fields[0]) # convert user_id to int
gender = fields[1]
return user_id, gender
def compute_rmse(model, data, n):
"""
Compute RMSE (Root Mean Squared Error), or square root of the average value
of (actual rating - predicted rating)^2
"""
predictions = model.predictAll(data.map(lambda x: (x[0], x[1])))
predictions_ratings = predictions.map(lambda x: ((x[0], x[1]), x[2])) \
.join(data.map(lambda x: ((x[0], x[1]), x[2]))) \
.values()
return sqrt(predictions_ratings.map(lambda x: (x[0] - x[1]) ** 2).reduce(add) / float(n))
##########################################################################
# Main
##########################################################################
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Incorrect number of arguments, correct usage: dating_recommender.py [user_id] [match_gender]"
sys.exit(-1)
# Configure Spark
conf = SparkConf().setMaster("local") \
.setAppName("Dating Recommender") \
.set("spark.executor.memory", "2g")
sc = SparkContext(conf=conf)
matchseeker = int(sys.argv[1])
gender_filter = sys.argv[2]
# Create ratings RDD of (randint, (user_id, profile_id, rating))
ratings = sc.textFile("/home/hadoop/hadoop-fundamentals/data/dating/ratings.dat") \
.map(parse_rating)
# Create users RDD
users = dict(sc.textFile("/home/hadoop/hadoop-fundamentals/data/dating/gender.dat") \
.map(parse_user) \
.collect())
# Create the training (60%) and validation (40%) set, based on last digit
# of timestamp
num_partitions = 4
training = ratings.filter(lambda x: x[0] < 6) \
.values() \
.repartition(num_partitions) \
.cache()
validation = ratings.filter(lambda x: x[0] >= 6) \
.values() \
.repartition(num_partitions) \
.cache()
num_training = training.count()
num_validation = validation.count()
print "Training: %d and validation: %d\n" % (num_training, num_validation)
# rank is the number of latent factors in the model.
# iterations is the number of iterations to run.
# lambda specifies the regularization parameter in ALS
rank = 8
num_iterations = 8
lmbda = 0.1
# Train model with training data and configured rank and iterations
model = ALS.train(training, rank, num_iterations, lmbda)
# evaluate the trained model on the validation set
print "The model was trained with rank = %d, lambda = %.1f, and %d iterations.\n" % \
(rank, lmbda, num_iterations)
# Print RMSE of model
validation_rmse = compute_rmse(model, validation, num_validation)
print "Its RMSE on the validation set is %f.\n" % validation_rmse
# Filter on preferred gender
partners = sc.parallelize([u[0] for u in filter(lambda u: u[1] == gender_filter, users.items())])
# run predictions with trained model
predictions = model.predictAll(partners.map(lambda x: (matchseeker, x))).collect()
# sort the recommedations
recommendations = sorted(predictions, key=lambda x: x[2], reverse=True)[:10]
print "Eligible partners recommended for User ID: %d" % matchseeker
for i in xrange(len(recommendations)):
print ("%2d: %s" % (i + 1, recommendations[i][1])).encode('ascii', 'ignore')
# clean up
sc.stop()
| mit |
okuta/chainer | tests/chainer_tests/functions_tests/activation_tests/test_softmax.py | 8 | 3610 | import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
@testing.parameterize(*testing.product_dict(
[
{'shape': None, 'axis': 1},
{'shape': (5,), 'axis': 0},
{'shape': (2, 3), 'axis': 0},
{'shape': (2, 3), 'axis': 1},
{'shape': (2, 3, 4), 'axis': 0},
{'shape': (2, 3, 4), 'axis': -1},
{'shape': (2, 3, 2, 3), 'axis': -3},
{'shape': (2, 3, 2, 3), 'axis': 3},
],
testing.product({
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}),
))
@testing.fix_random()
@testing.inject_backend_tests(
None,
# CPU tests
[
{},
{'use_ideep': 'always'},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'cuda_device': [0, 1],
})
# ChainerX tests
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
])
class TestSoftmax(testing.FunctionTestCase):
dodge_nondifferentiable = True
def setUp(self):
if self.dtype == numpy.float16:
self.check_forward_options.update({'atol': 1e-3, 'rtol': 1e-2})
self.check_backward_options.update({'atol': 1e-3, 'rtol': 1e-2})
self.check_double_backward_options \
.update({'atol': 1e-3, 'rtol': 1e-2})
def generate_inputs(self):
if self.shape is None:
# For checking numerical stability
value = -5 if self.dtype == numpy.float16 else -1000
x = numpy.array([[value, 1]], dtype=self.dtype)
else:
x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
return x,
def forward(self, inputs, device):
x, = inputs
return functions.softmax(x, axis=self.axis),
def forward_expected(self, inputs):
x, = inputs
expected = numpy.exp(x)
expected = numpy.rollaxis(expected, self.axis, expected.ndim)
for i in numpy.ndindex(expected.shape[:-1]):
expected[i] /= expected[i].sum()
expected = numpy.rollaxis(expected, expected.ndim-1, self.axis)
return expected.astype(x.dtype),
@testing.parameterize(*testing.product({
'axis': [0],
'use_cudnn': ['always', 'auto', 'never'],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@attr.cudnn
class TestSoftmaxCudnnCall(unittest.TestCase):
def setUp(self):
self.x = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
self.gy = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
with chainer.using_config('use_cudnn', self.use_cudnn):
self.expect = chainer.should_use_cudnn('>=auto')
def forward(self):
x = chainer.Variable(self.x)
return functions.softmax(x, axis=self.axis)
def test_call_cudnn_forward(self):
with chainer.using_config('use_cudnn', self.use_cudnn):
with testing.patch('cupy.cudnn.softmax_forward') as func:
self.forward()
self.assertEqual(func.called, self.expect)
def test_call_cudnn_backward(self):
with chainer.using_config('use_cudnn', self.use_cudnn):
y = self.forward()
y.grad = self.gy
with testing.patch('cupy.cudnn.softmax_backward') as func:
y.backward()
self.assertEqual(func.called, self.expect)
testing.run_module(__name__, __file__)
| mit |
xaviercobain88/framework-python | openerp/addons/point_of_sale/wizard/pos_details.py | 55 | 2439 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv, fields
class pos_details(osv.osv_memory):
_name = 'pos.details'
_description = 'Sales Details'
_columns = {
'date_start': fields.date('Date Start', required=True),
'date_end': fields.date('Date End', required=True),
'user_ids': fields.many2many('res.users', 'pos_details_report_user_rel', 'user_id', 'wizard_id', 'Salespeople'),
}
_defaults = {
'date_start': lambda *a: time.strftime('%Y-%m-%d'),
'date_end': lambda *a: time.strftime('%Y-%m-%d'),
}
def print_report(self, cr, uid, ids, context=None):
"""
To get the date and print the report
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : retrun report
"""
if context is None:
context = {}
datas = {'ids': context.get('active_ids', [])}
res = self.read(cr, uid, ids, ['date_start', 'date_end', 'user_ids'], context=context)
res = res and res[0] or {}
datas['form'] = res
if res.get('id',False):
datas['ids']=[res['id']]
return {
'type': 'ir.actions.report.xml',
'report_name': 'pos.details',
'datas': datas,
}
pos_details()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Midrya/chromium | third_party/logilab/common/compat.py | 93 | 2593 | # pylint: disable=E0601,W0622,W0611
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Wrappers around some builtins introduced in python 2.3, 2.4 and
2.5, making them available in for earlier versions of python.
See another compatibility snippets from other projects:
:mod:`lib2to3.fixes`
:mod:`coverage.backward`
:mod:`unittest2.compatibility`
"""
__docformat__ = "restructuredtext en"
import os
import sys
import types
from warnings import warn
# not used here, but imported to preserve API
from six.moves import builtins
if sys.version_info < (3, 0):
str_to_bytes = str
def str_encode(string, encoding):
if isinstance(string, unicode):
return string.encode(encoding)
return str(string)
else:
def str_to_bytes(string):
return str.encode(string)
# we have to ignore the encoding in py3k to be able to write a string into a
# TextIOWrapper or like object (which expect an unicode string)
def str_encode(string, encoding):
return str(string)
# See also http://bugs.python.org/issue11776
if sys.version_info[0] == 3:
def method_type(callable, instance, klass):
# api change. klass is no more considered
return types.MethodType(callable, instance)
else:
# alias types otherwise
method_type = types.MethodType
# Pythons 2 and 3 differ on where to get StringIO
if sys.version_info < (3, 0):
from cStringIO import StringIO
FileIO = file
BytesIO = StringIO
reload = reload
else:
from io import FileIO, BytesIO, StringIO
from imp import reload
from logilab.common.deprecation import deprecated
# Other projects import these from here, keep providing them for
# backwards compat
any = deprecated('use builtin "any"')(any)
all = deprecated('use builtin "all"')(all)
| bsd-3-clause |
jordanemedlock/psychtruths | temboo/core/Library/Google/Spreadsheets/DeleteListRow.py | 4 | 6613 | # -*- coding: utf-8 -*-
###############################################################################
#
# DeleteListRow
# Deletes a specified worksheet row from a Google spreadsheet.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class DeleteListRow(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the DeleteListRow Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(DeleteListRow, self).__init__(temboo_session, '/Library/Google/Spreadsheets/DeleteListRow')
def new_input_set(self):
return DeleteListRowInputSet()
def _make_result_set(self, result, path):
return DeleteListRowResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return DeleteListRowChoreographyExecution(session, exec_id, path)
class DeleteListRowInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the DeleteListRow
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid Access Token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new Access Token.)
"""
super(DeleteListRowInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(DeleteListRowInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(DeleteListRowInputSet, self)._set_input('ClientSecret', value)
def set_EditLink(self, value):
"""
Set the value of the EditLink input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(DeleteListRowInputSet, self)._set_input('EditLink', value)
def set_Link(self, value):
"""
Set the value of the Link input for this Choreo. ((optional, string) The entry's resource URL found in the link element of the entry. Can be retrieved by running RetrieveListFeed Choreo. When this is provided, SpreadsheetKey, WorksheetId, and RowId are not needed.)
"""
super(DeleteListRowInputSet, self)._set_input('Link', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((optional, password) Deprecated (retained for backward compatibility only).)
"""
super(DeleteListRowInputSet, self)._set_input('Password', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth Refresh Token used to generate a new Access Token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(DeleteListRowInputSet, self)._set_input('RefreshToken', value)
def set_RowId(self, value):
"""
Set the value of the RowId input for this Choreo. ((conditional, string) The unique ID of the row you want to delete. Required unless providing the Link input.)
"""
super(DeleteListRowInputSet, self)._set_input('RowId', value)
def set_SpreadsheetKey(self, value):
"""
Set the value of the SpreadsheetKey input for this Choreo. ((conditional, string) The unique key of the spreadsheet associated with the row you want to delete. This can be found in the URL when viewing the spreadsheet. Required unless providing the Link input.)
"""
super(DeleteListRowInputSet, self)._set_input('SpreadsheetKey', value)
def set_Username(self, value):
"""
Set the value of the Username input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(DeleteListRowInputSet, self)._set_input('Username', value)
def set_WorksheetId(self, value):
"""
Set the value of the WorksheetId input for this Choreo. ((conditional, string) The unique ID of the worksheet associated with the row you want to delete. Typically, Sheet1 has the id of "od6". Required unless providing the Link input.)
"""
super(DeleteListRowInputSet, self)._set_input('WorksheetId', value)
class DeleteListRowResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the DeleteListRow Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Google.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. (Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class DeleteListRowChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return DeleteListRowResultSet(response, path)
| apache-2.0 |
varunchitre15/android_kernel_mediatek_sprout | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
0x46616c6b/ansible | lib/ansible/modules/cloud/amazon/ec2_asg_facts.py | 25 | 11800 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ec2_asg_facts
short_description: Gather facts about ec2 Auto Scaling Groups (ASGs) in AWS
description:
- Gather facts about ec2 Auto Scaling Groups (ASGs) in AWS
version_added: "2.2"
author: "Rob White (@wimnat)"
options:
name:
description:
- The prefix or name of the auto scaling group(s) you are searching for.
- "Note: This is a regular expression match with implicit '^' (beginning of string). Append '$' for a complete name match."
required: false
tags:
description:
- "A dictionary/hash of tags in the format { tag1_name: 'tag1_value', tag2_name: 'tag2_value' } to match against the auto scaling group(s) you are searching for."
required: false
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Find all groups
- ec2_asg_facts:
register: asgs
# Find a group with matching name/prefix
- ec2_asg_facts:
name: public-webserver-asg
register: asgs
# Find a group with matching tags
- ec2_asg_facts:
tags:
project: webapp
env: production
register: asgs
# Find a group with matching name/prefix and tags
- ec2_asg_facts:
name: myproject
tags:
env: production
register: asgs
# Fail if no groups are found
- ec2_asg_facts:
name: public-webserver-asg
register: asgs
failed_when: "{{ asgs.results | length == 0 }}"
# Fail if more than 1 group is found
- ec2_asg_facts:
name: public-webserver-asg
register: asgs
failed_when: "{{ asgs.results | length > 1 }}"
'''
RETURN = '''
---
auto_scaling_group_arn:
description: The Amazon Resource Name of the ASG
returned: success
type: string
sample: "arn:aws:autoscaling:us-west-2:1234567890:autoScalingGroup:10787c52-0bcb-427d-82ba-c8e4b008ed2e:autoScalingGroupName/public-webapp-production-1"
auto_scaling_group_name:
description: Name of autoscaling group
returned: success
type: str
sample: "public-webapp-production-1"
availability_zones:
description: List of Availability Zones that are enabled for this ASG.
returned: success
type: list
sample: ["us-west-2a", "us-west-2b", "us-west-2a"]
created_time:
description: The date and time this ASG was created, in ISO 8601 format.
returned: success
type: string
sample: "2015-11-25T00:05:36.309Z"
default_cooldown:
description: The default cooldown time in seconds.
returned: success
type: int
sample: 300
desired_capacity:
description: The number of EC2 instances that should be running in this group.
returned: success
type: int
sample: 3
health_check_period:
description: Length of time in seconds after a new EC2 instance comes into service that Auto Scaling starts checking its health.
returned: success
type: int
sample: 30
health_check_type:
description: The service you want the health status from, one of "EC2" or "ELB".
returned: success
type: str
sample: "ELB"
instances:
description: List of EC2 instances and their status as it relates to the ASG.
returned: success
type: list
sample: [
{
"availability_zone": "us-west-2a",
"health_status": "Healthy",
"instance_id": "i-es22ad25",
"launch_configuration_name": "public-webapp-production-1",
"lifecycle_state": "InService",
"protected_from_scale_in": "false"
}
]
launch_configuration_name:
description: Name of launch configuration associated with the ASG.
returned: success
type: str
sample: "public-webapp-production-1"
load_balancer_names:
description: List of load balancers names attached to the ASG.
returned: success
type: list
sample: ["elb-webapp-prod"]
max_size:
description: Maximum size of group
returned: success
type: int
sample: 3
min_size:
description: Minimum size of group
returned: success
type: int
sample: 1
new_instances_protected_from_scale_in:
description: Whether or not new instances a protected from automatic scaling in.
returned: success
type: boolean
sample: "false"
placement_group:
description: Placement group into which instances are launched, if any.
returned: success
type: str
sample: None
status:
description: The current state of the group when DeleteAutoScalingGroup is in progress.
returned: success
type: str
sample: None
tags:
description: List of tags for the ASG, and whether or not each tag propagates to instances at launch.
returned: success
type: list
sample: [
{
"key": "Name",
"value": "public-webapp-production-1",
"resource_id": "public-webapp-production-1",
"resource_type": "auto-scaling-group",
"propagate_at_launch": "true"
},
{
"key": "env",
"value": "production",
"resource_id": "public-webapp-production-1",
"resource_type": "auto-scaling-group",
"propagate_at_launch": "true"
}
]
termination_policies:
description: A list of termination policies for the group.
returned: success
type: str
sample: ["Default"]
'''
try:
import boto3
from botocore.exceptions import ClientError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def match_asg_tags(tags_to_match, asg):
for key, value in tags_to_match.items():
for tag in asg['Tags']:
if key == tag['Key'] and value == tag['Value']:
break
else:
return False
return True
def find_asgs(conn, module, name=None, tags=None):
"""
Args:
conn (boto3.AutoScaling.Client): Valid Boto3 ASG client.
name (str): Optional name of the ASG you are looking for.
tags (dict): Optional dictionary of tags and values to search for.
Basic Usage:
>>> name = 'public-webapp-production'
>>> tags = { 'env': 'production' }
>>> conn = boto3.client('autoscaling', region_name='us-west-2')
>>> results = find_asgs(name, conn)
Returns:
List
[
{
"auto_scaling_group_arn": "arn:aws:autoscaling:us-west-2:275977225706:autoScalingGroup:58abc686-9783-4528-b338-3ad6f1cbbbaf:autoScalingGroupName/public-webapp-production",
"auto_scaling_group_name": "public-webapp-production",
"availability_zones": ["us-west-2c", "us-west-2b", "us-west-2a"],
"created_time": "2016-02-02T23:28:42.481000+00:00",
"default_cooldown": 300,
"desired_capacity": 2,
"enabled_metrics": [],
"health_check_grace_period": 300,
"health_check_type": "ELB",
"instances":
[
{
"availability_zone": "us-west-2c",
"health_status": "Healthy",
"instance_id": "i-047a12cb",
"launch_configuration_name": "public-webapp-production-1",
"lifecycle_state": "InService",
"protected_from_scale_in": false
},
{
"availability_zone": "us-west-2a",
"health_status": "Healthy",
"instance_id": "i-7a29df2c",
"launch_configuration_name": "public-webapp-production-1",
"lifecycle_state": "InService",
"protected_from_scale_in": false
}
],
"launch_configuration_name": "public-webapp-production-1",
"load_balancer_names": ["public-webapp-production-lb"],
"max_size": 4,
"min_size": 2,
"new_instances_protected_from_scale_in": false,
"placement_group": None,
"status": None,
"suspended_processes": [],
"tags":
[
{
"key": "Name",
"propagate_at_launch": true,
"resource_id": "public-webapp-production",
"resource_type": "auto-scaling-group",
"value": "public-webapp-production"
},
{
"key": "env",
"propagate_at_launch": true,
"resource_id": "public-webapp-production",
"resource_type": "auto-scaling-group",
"value": "production"
}
],
"termination_policies":
[
"Default"
],
"vpc_zone_identifier":
[
"subnet-a1b1c1d1",
"subnet-a2b2c2d2",
"subnet-a3b3c3d3"
]
}
]
"""
try:
asgs_paginator = conn.get_paginator('describe_auto_scaling_groups')
asgs = asgs_paginator.paginate().build_full_result()
except ClientError as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
matched_asgs = []
if name is not None:
# if the user didn't specify a name
name_prog = re.compile(r'^' + name)
for asg in asgs['AutoScalingGroups']:
if name:
matched_name = name_prog.search(asg['AutoScalingGroupName'])
else:
matched_name = True
if tags:
matched_tags = match_asg_tags(tags, asg)
else:
matched_tags = True
if matched_name and matched_tags:
matched_asgs.append(camel_dict_to_snake_dict(asg))
return matched_asgs
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(type='str'),
tags=dict(type='dict'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
asg_name = module.params.get('name')
asg_tags = module.params.get('tags')
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
autoscaling = boto3_conn(module, conn_type='client', resource='autoscaling', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except ClientError as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
results = find_asgs(autoscaling, module, name=asg_name, tags=asg_tags)
module.exit_json(results=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
CourseTalk/edx-platform | lms/djangoapps/survey/tests/test_utils.py | 102 | 3969 | """
Python tests for the Survey models
"""
from collections import OrderedDict
from django.test.client import Client
from django.contrib.auth.models import User
from survey.models import SurveyForm
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from survey.utils import is_survey_required_for_course, must_answer_survey
class SurveyModelsTests(ModuleStoreTestCase):
"""
All tests for the utils.py file
"""
def setUp(self):
"""
Set up the test data used in the specific tests
"""
super(SurveyModelsTests, self).setUp()
self.client = Client()
# Create two accounts
self.password = 'abc'
self.student = User.objects.create_user('student', 'student@test.com', self.password)
self.student2 = User.objects.create_user('student2', 'student2@test.com', self.password)
self.staff = User.objects.create_user('staff', 'staff@test.com', self.password)
self.staff.is_staff = True
self.staff.save()
self.test_survey_name = 'TestSurvey'
self.test_form = '<input name="foo"></input>'
self.student_answers = OrderedDict({
'field1': 'value1',
'field2': 'value2',
})
self.student2_answers = OrderedDict({
'field1': 'value3'
})
self.course = CourseFactory.create(
course_survey_required=True,
course_survey_name=self.test_survey_name
)
self.survey = SurveyForm.create(self.test_survey_name, self.test_form)
def test_is_survey_required_for_course(self):
"""
Assert the a requried course survey is when both the flags is set and a survey name
is set on the course descriptor
"""
self.assertTrue(is_survey_required_for_course(self.course))
def test_is_survey_not_required_for_course(self):
"""
Assert that if various data is not available or if the survey is not found
then the survey is not considered required
"""
course = CourseFactory.create()
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=False
)
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=True,
course_survey_name="NonExisting"
)
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=False,
course_survey_name=self.test_survey_name
)
self.assertFalse(is_survey_required_for_course(course))
def test_user_not_yet_answered_required_survey(self):
"""
Assert that a new course which has a required survey but user has not answered it yet
"""
self.assertTrue(must_answer_survey(self.course, self.student))
temp_course = CourseFactory.create(
course_survey_required=False
)
self.assertFalse(must_answer_survey(temp_course, self.student))
temp_course = CourseFactory.create(
course_survey_required=True,
course_survey_name="NonExisting"
)
self.assertFalse(must_answer_survey(temp_course, self.student))
def test_user_has_answered_required_survey(self):
"""
Assert that a new course which has a required survey and user has answers for it
"""
self.survey.save_user_answers(self.student, self.student_answers, None)
self.assertFalse(must_answer_survey(self.course, self.student))
def test_staff_must_answer_survey(self):
"""
Assert that someone with staff level permissions does not have to answer the survey
"""
self.assertFalse(must_answer_survey(self.course, self.staff))
| agpl-3.0 |
with-git/tensorflow | tensorflow/contrib/keras/api/keras/layers/__init__.py | 11 | 8528 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras layers API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Generic layers.
# pylint: disable=g-bad-import-order
from tensorflow.contrib.keras.python.keras.engine import Input
from tensorflow.contrib.keras.python.keras.engine import InputLayer
from tensorflow.contrib.keras.python.keras.engine import InputSpec
from tensorflow.contrib.keras.python.keras.engine import Layer
# Advanced activations.
from tensorflow.contrib.keras.python.keras.layers.advanced_activations import LeakyReLU
from tensorflow.contrib.keras.python.keras.layers.advanced_activations import PReLU
from tensorflow.contrib.keras.python.keras.layers.advanced_activations import ELU
from tensorflow.contrib.keras.python.keras.layers.advanced_activations import ThresholdedReLU
# Convolution layers.
from tensorflow.contrib.keras.python.keras.layers.convolutional import Conv1D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Conv2D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Conv3D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Conv2DTranspose
from tensorflow.contrib.keras.python.keras.layers.convolutional import Conv3DTranspose
from tensorflow.contrib.keras.python.keras.layers.convolutional import SeparableConv2D
# Convolution layer aliases.
from tensorflow.contrib.keras.python.keras.layers.convolutional import Convolution1D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Convolution2D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Convolution3D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Convolution2DTranspose
from tensorflow.contrib.keras.python.keras.layers.convolutional import Convolution3DTranspose
from tensorflow.contrib.keras.python.keras.layers.convolutional import SeparableConvolution2D
# Image processing layers.
from tensorflow.contrib.keras.python.keras.layers.convolutional import UpSampling1D
from tensorflow.contrib.keras.python.keras.layers.convolutional import UpSampling2D
from tensorflow.contrib.keras.python.keras.layers.convolutional import UpSampling3D
from tensorflow.contrib.keras.python.keras.layers.convolutional import ZeroPadding1D
from tensorflow.contrib.keras.python.keras.layers.convolutional import ZeroPadding2D
from tensorflow.contrib.keras.python.keras.layers.convolutional import ZeroPadding3D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Cropping1D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Cropping2D
from tensorflow.contrib.keras.python.keras.layers.convolutional import Cropping3D
# Convolutional-recurrent layers.
from tensorflow.contrib.keras.python.keras.layers.convolutional_recurrent import ConvLSTM2D
# Core layers.
from tensorflow.contrib.keras.python.keras.layers.core import Masking
from tensorflow.contrib.keras.python.keras.layers.core import Dropout
from tensorflow.contrib.keras.python.keras.layers.core import SpatialDropout1D
from tensorflow.contrib.keras.python.keras.layers.core import SpatialDropout2D
from tensorflow.contrib.keras.python.keras.layers.core import SpatialDropout3D
from tensorflow.contrib.keras.python.keras.layers.core import Activation
from tensorflow.contrib.keras.python.keras.layers.core import Reshape
from tensorflow.contrib.keras.python.keras.layers.core import Permute
from tensorflow.contrib.keras.python.keras.layers.core import Flatten
from tensorflow.contrib.keras.python.keras.layers.core import RepeatVector
from tensorflow.contrib.keras.python.keras.layers.core import Lambda
from tensorflow.contrib.keras.python.keras.layers.core import Dense
from tensorflow.contrib.keras.python.keras.layers.core import ActivityRegularization
# Embedding layers.
from tensorflow.contrib.keras.python.keras.layers.embeddings import Embedding
# Locally-connected layers.
from tensorflow.contrib.keras.python.keras.layers.local import LocallyConnected1D
from tensorflow.contrib.keras.python.keras.layers.local import LocallyConnected2D
# Merge layers.
from tensorflow.contrib.keras.python.keras.layers.merge import Add
from tensorflow.contrib.keras.python.keras.layers.merge import Multiply
from tensorflow.contrib.keras.python.keras.layers.merge import Average
from tensorflow.contrib.keras.python.keras.layers.merge import Maximum
from tensorflow.contrib.keras.python.keras.layers.merge import Concatenate
from tensorflow.contrib.keras.python.keras.layers.merge import Dot
from tensorflow.contrib.keras.python.keras.layers.merge import add
from tensorflow.contrib.keras.python.keras.layers.merge import multiply
from tensorflow.contrib.keras.python.keras.layers.merge import average
from tensorflow.contrib.keras.python.keras.layers.merge import maximum
from tensorflow.contrib.keras.python.keras.layers.merge import concatenate
from tensorflow.contrib.keras.python.keras.layers.merge import dot
# Noise layers.
from tensorflow.contrib.keras.python.keras.layers.noise import AlphaDropout
from tensorflow.contrib.keras.python.keras.layers.noise import GaussianNoise
from tensorflow.contrib.keras.python.keras.layers.noise import GaussianDropout
# Normalization layers.
from tensorflow.contrib.keras.python.keras.layers.normalization import BatchNormalization
# Pooling layers.
from tensorflow.contrib.keras.python.keras.layers.pooling import MaxPooling1D
from tensorflow.contrib.keras.python.keras.layers.pooling import MaxPooling2D
from tensorflow.contrib.keras.python.keras.layers.pooling import MaxPooling3D
from tensorflow.contrib.keras.python.keras.layers.pooling import AveragePooling1D
from tensorflow.contrib.keras.python.keras.layers.pooling import AveragePooling2D
from tensorflow.contrib.keras.python.keras.layers.pooling import AveragePooling3D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalAveragePooling1D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalAveragePooling2D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalAveragePooling3D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalMaxPooling1D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalMaxPooling2D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalMaxPooling3D
# Pooling layer aliases.
from tensorflow.contrib.keras.python.keras.layers.pooling import MaxPool1D
from tensorflow.contrib.keras.python.keras.layers.pooling import MaxPool2D
from tensorflow.contrib.keras.python.keras.layers.pooling import MaxPool3D
from tensorflow.contrib.keras.python.keras.layers.pooling import AvgPool1D
from tensorflow.contrib.keras.python.keras.layers.pooling import AvgPool2D
from tensorflow.contrib.keras.python.keras.layers.pooling import AvgPool3D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalAvgPool1D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalAvgPool2D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalAvgPool3D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalMaxPool1D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalMaxPool2D
from tensorflow.contrib.keras.python.keras.layers.pooling import GlobalMaxPool3D
# Recurrent layers.
from tensorflow.contrib.keras.python.keras.layers.recurrent import SimpleRNN
from tensorflow.contrib.keras.python.keras.layers.recurrent import GRU
from tensorflow.contrib.keras.python.keras.layers.recurrent import LSTM
# Wrapper functions
from tensorflow.contrib.keras.python.keras.layers.wrappers import Wrapper
from tensorflow.contrib.keras.python.keras.layers.wrappers import Bidirectional
from tensorflow.contrib.keras.python.keras.layers.wrappers import TimeDistributed
del absolute_import
del division
del print_function
| apache-2.0 |
servo/servo | tests/wpt/web-platform-tests/webdriver/tests/take_screenshot/iframe.py | 16 | 2692 | import pytest
from tests.support.asserts import assert_success
from tests.support.image import png_dimensions
from . import viewport_dimensions
DEFAULT_CONTENT = "<div id='content'>Lorem ipsum dolor sit amet.</div>"
REFERENCE_CONTENT = "<div id='outer'>{}</div>".format(DEFAULT_CONTENT)
REFERENCE_STYLE = """
<style>
#outer {
display: block;
margin: 0;
border: 0;
width: 200px;
height: 200px;
}
#content {
display: block;
margin: 0;
border: 0;
width: 100px;
height: 100px;
background: green;
}
</style>
"""
OUTER_IFRAME_STYLE = """
<style>
iframe {
display: block;
margin: 0;
border: 0;
width: 200px;
height: 200px;
}
</style>
"""
INNER_IFRAME_STYLE = """
<style>
body {
margin: 0;
}
div {
display: block;
margin: 0;
border: 0;
width: 100px;
height: 100px;
background: green;
}
</style>
"""
def take_screenshot(session):
return session.transport.send(
"GET", "session/{session_id}/screenshot".format(**vars(session)))
def test_always_captures_top_browsing_context(session, inline, iframe):
iframe_content = "{0}{1}".format(INNER_IFRAME_STYLE, DEFAULT_CONTENT)
session.url = inline("""{0}{1}""".format(OUTER_IFRAME_STYLE, iframe(iframe_content)))
response = take_screenshot(session)
reference_screenshot = assert_success(response)
assert png_dimensions(reference_screenshot) == viewport_dimensions(session)
frame = session.find.css("iframe", all=False)
session.switch_frame(frame)
response = take_screenshot(session)
screenshot = assert_success(response)
assert png_dimensions(screenshot) == png_dimensions(reference_screenshot)
assert screenshot == reference_screenshot
@pytest.mark.parametrize("domain", ["", "alt"], ids=["same_origin", "cross_origin"])
def test_source_origin(session, inline, iframe, domain):
session.url = inline("{0}{1}".format(REFERENCE_STYLE, REFERENCE_CONTENT))
response = take_screenshot(session)
reference_screenshot = assert_success(response)
assert png_dimensions(reference_screenshot) == viewport_dimensions(session)
iframe_content = "{0}{1}".format(INNER_IFRAME_STYLE, DEFAULT_CONTENT)
session.url = inline("""{0}{1}""".format(
OUTER_IFRAME_STYLE, iframe(iframe_content, domain=domain)))
response = take_screenshot(session)
screenshot = assert_success(response)
assert png_dimensions(screenshot) == viewport_dimensions(session)
assert screenshot == reference_screenshot
| mpl-2.0 |
calpeyser/google-cloud-python | datastore/tests/unit/test_transaction.py | 4 | 9736 | # Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
class TestTransaction(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.datastore.transaction import Transaction
return Transaction
def _make_one(self, client, **kw):
return self._get_target_class()(client, **kw)
def test_ctor_defaults(self):
project = 'PROJECT'
client = _Client(project)
xact = self._make_one(client)
self.assertEqual(xact.project, project)
self.assertIs(xact._client, client)
self.assertIsNone(xact.id)
self.assertEqual(xact._status, self._get_target_class()._INITIAL)
self.assertEqual(xact._mutations, [])
self.assertEqual(len(xact._partial_key_entities), 0)
def test_current(self):
from google.cloud.proto.datastore.v1 import datastore_pb2
project = 'PROJECT'
id_ = 678
ds_api = _make_datastore_api(xact_id=id_)
client = _Client(project, datastore_api=ds_api)
xact1 = self._make_one(client)
xact2 = self._make_one(client)
self.assertIsNone(xact1.current())
self.assertIsNone(xact2.current())
with xact1:
self.assertIs(xact1.current(), xact1)
self.assertIs(xact2.current(), xact1)
with _NoCommitBatch(client):
self.assertIsNone(xact1.current())
self.assertIsNone(xact2.current())
with xact2:
self.assertIs(xact1.current(), xact2)
self.assertIs(xact2.current(), xact2)
with _NoCommitBatch(client):
self.assertIsNone(xact1.current())
self.assertIsNone(xact2.current())
self.assertIs(xact1.current(), xact1)
self.assertIs(xact2.current(), xact1)
self.assertIsNone(xact1.current())
self.assertIsNone(xact2.current())
ds_api.rollback.assert_not_called()
commit_method = ds_api.commit
self.assertEqual(commit_method.call_count, 2)
mode = datastore_pb2.CommitRequest.TRANSACTIONAL
commit_method.assert_called_with(project, mode, [], transaction=id_)
begin_txn = ds_api.begin_transaction
self.assertEqual(begin_txn.call_count, 2)
begin_txn.assert_called_with(project)
def test_begin(self):
project = 'PROJECT'
id_ = 889
ds_api = _make_datastore_api(xact_id=id_)
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
xact.begin()
self.assertEqual(xact.id, id_)
ds_api.begin_transaction.assert_called_once_with(project)
def test_begin_tombstoned(self):
project = 'PROJECT'
id_ = 1094
ds_api = _make_datastore_api(xact_id=id_)
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
xact.begin()
self.assertEqual(xact.id, id_)
ds_api.begin_transaction.assert_called_once_with(project)
xact.rollback()
client._datastore_api.rollback.assert_called_once_with(project, id_)
self.assertIsNone(xact.id)
self.assertRaises(ValueError, xact.begin)
def test_begin_w_begin_transaction_failure(self):
project = 'PROJECT'
id_ = 712
ds_api = _make_datastore_api(xact_id=id_)
ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[])
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
with self.assertRaises(RuntimeError):
xact.begin()
self.assertIsNone(xact.id)
ds_api.begin_transaction.assert_called_once_with(project)
def test_rollback(self):
project = 'PROJECT'
id_ = 239
ds_api = _make_datastore_api(xact_id=id_)
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
xact.begin()
xact.rollback()
client._datastore_api.rollback.assert_called_once_with(project, id_)
self.assertIsNone(xact.id)
ds_api.begin_transaction.assert_called_once_with(project)
def test_commit_no_partial_keys(self):
from google.cloud.proto.datastore.v1 import datastore_pb2
project = 'PROJECT'
id_ = 1002930
ds_api = _make_datastore_api(xact_id=id_)
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
xact.begin()
xact.commit()
mode = datastore_pb2.CommitRequest.TRANSACTIONAL
client._datastore_api.commit.assert_called_once_with(
project, mode, [], transaction=id_)
self.assertIsNone(xact.id)
ds_api.begin_transaction.assert_called_once_with(project)
def test_commit_w_partial_keys(self):
from google.cloud.proto.datastore.v1 import datastore_pb2
project = 'PROJECT'
kind = 'KIND'
id1 = 123
key = _make_key(kind, id1, project)
id2 = 234
ds_api = _make_datastore_api(key, xact_id=id2)
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
xact.begin()
entity = _Entity()
xact.put(entity)
xact.commit()
mode = datastore_pb2.CommitRequest.TRANSACTIONAL
ds_api.commit.assert_called_once_with(
project, mode, xact.mutations, transaction=id2)
self.assertIsNone(xact.id)
self.assertEqual(entity.key.path, [{'kind': kind, 'id': id1}])
ds_api.begin_transaction.assert_called_once_with(project)
def test_context_manager_no_raise(self):
from google.cloud.proto.datastore.v1 import datastore_pb2
project = 'PROJECT'
id_ = 912830
ds_api = _make_datastore_api(xact_id=id_)
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
with xact:
self.assertEqual(xact.id, id_)
ds_api.begin_transaction.assert_called_once_with(project)
mode = datastore_pb2.CommitRequest.TRANSACTIONAL
client._datastore_api.commit.assert_called_once_with(
project, mode, [], transaction=id_)
self.assertIsNone(xact.id)
self.assertEqual(ds_api.begin_transaction.call_count, 1)
def test_context_manager_w_raise(self):
class Foo(Exception):
pass
project = 'PROJECT'
id_ = 614416
ds_api = _make_datastore_api(xact_id=id_)
client = _Client(project, datastore_api=ds_api)
xact = self._make_one(client)
xact._mutation = object()
try:
with xact:
self.assertEqual(xact.id, id_)
ds_api.begin_transaction.assert_called_once_with(project)
raise Foo()
except Foo:
self.assertIsNone(xact.id)
client._datastore_api.rollback.assert_called_once_with(
project, id_)
client._datastore_api.commit.assert_not_called()
self.assertIsNone(xact.id)
self.assertEqual(ds_api.begin_transaction.call_count, 1)
def _make_key(kind, id_, project):
from google.cloud.proto.datastore.v1 import entity_pb2
key = entity_pb2.Key()
key.partition_id.project_id = project
elem = key.path.add()
elem.kind = kind
elem.id = id_
return key
class _Entity(dict):
def __init__(self):
super(_Entity, self).__init__()
from google.cloud.datastore.key import Key
self.key = Key('KIND', project='PROJECT')
class _Client(object):
def __init__(self, project, datastore_api=None, namespace=None):
self.project = project
if datastore_api is None:
datastore_api = _make_datastore_api()
self._datastore_api = datastore_api
self.namespace = namespace
self._batches = []
def _push_batch(self, batch):
self._batches.insert(0, batch)
def _pop_batch(self):
return self._batches.pop(0)
@property
def current_batch(self):
return self._batches and self._batches[0] or None
class _NoCommitBatch(object):
def __init__(self, client):
from google.cloud.datastore.batch import Batch
self._client = client
self._batch = Batch(client)
def __enter__(self):
self._client._push_batch(self._batch)
return self._batch
def __exit__(self, *args):
self._client._pop_batch()
def _make_commit_response(*keys):
from google.cloud.proto.datastore.v1 import datastore_pb2
mutation_results = [
datastore_pb2.MutationResult(key=key) for key in keys]
return datastore_pb2.CommitResponse(mutation_results=mutation_results)
def _make_datastore_api(*keys, **kwargs):
commit_method = mock.Mock(
return_value=_make_commit_response(*keys), spec=[])
xact_id = kwargs.pop('xact_id', 123)
txn_pb = mock.Mock(
transaction=xact_id, spec=['transaction'])
begin_txn = mock.Mock(return_value=txn_pb, spec=[])
return mock.Mock(
commit=commit_method, begin_transaction=begin_txn,
spec=['begin_transaction', 'commit', 'rollback'])
| apache-2.0 |
festovalros/Examine_odoo8_accounting | account_analytic_plans/report/__init__.py | 445 | 1084 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import crossovered_analytic
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
willbarton/observation-conditions | tests/test_forecast.py | 1 | 1969 | import unittest
import os
import datetime
from dateutil import tz
from dateutil.relativedelta import *
from observation.conditions.map import ForecastMap
from observation.conditions.map import ForecastMap
from observation.conditions.map import ForecastMapSet
from observation.conditions.map import ForecastMapSeries
from observation.conditions.forecast import Forecast
from observation.conditions.weathermap import SEEING_COLOR_TABLE
# The base path for our test images
DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
# Test values
LATITUDE = 42.013750
LONGITUDE = -73.883561
BAD_LATITUDE = 51.5072
BAD_LONGITUDE = 0.1275
class TestForecastMapSeries(unittest.TestCase):
def setUp(self):
image_path = os.path.join(DATA_PATH, "ForecastMapSeries", "seeing")
self.forecastMapSeries = ForecastMapSeries(12, SEEING_COLOR_TABLE,
"seeing", image_path)
self.forecast = Forecast(self.forecastMapSeries)
def test_dateRange(self):
test_time = self.forecastMapSeries.date + datetime.timedelta(hours=4)
dateRange = list(self.forecast.dateRange())
# We should get a range with 48 hours in it
self.assertEqual(len(dateRange), 48)
# Four hours from the forecast map series start should be the
# fourth item in the date ranges returned.
self.assertEqual(dateRange[4], test_time);
def test_covered(self):
# This should be covered
coverage = self.forecast.covered(LATITUDE, LONGITUDE)
self.assertTrue(coverage)
# This should not be covered
coverage = self.forecast.covered(BAD_LATITUDE, BAD_LONGITUDE)
self.assertFalse(coverage)
def test___call__(self):
test_time = self.forecastMapSeries.date + datetime.timedelta(hours=4)
conditions = self.forecast(LATITUDE, LONGITUDE, test_time)
self.assertEqual(conditions.seeing, 3)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
guptaankita/python-novaclient | novaclient/tests/unit/v2/test_limits.py | 4 | 3160 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from novaclient.tests.unit.fixture_data import client
from novaclient.tests.unit.fixture_data import limits as data
from novaclient.tests.unit import utils
from novaclient.v2 import limits
class LimitsTest(utils.FixturedTestCase):
client_fixture_class = client.V1
data_fixture_class = data.Fixture
def test_get_limits(self):
obj = self.cs.limits.get()
self.assert_called('GET', '/limits')
self.assertIsInstance(obj, limits.Limits)
def test_get_limits_for_a_tenant(self):
obj = self.cs.limits.get(tenant_id=1234)
self.assert_called('GET', '/limits?tenant_id=1234')
self.assertIsInstance(obj, limits.Limits)
def test_absolute_limits_reserved(self):
obj = self.cs.limits.get(reserved=True)
expected = (
limits.AbsoluteLimit("maxTotalRAMSize", 51200),
limits.AbsoluteLimit("maxServerMeta", 5),
limits.AbsoluteLimit("maxImageMeta", 5),
limits.AbsoluteLimit("maxPersonality", 5),
limits.AbsoluteLimit("maxPersonalitySize", 10240),
)
self.assert_called('GET', '/limits?reserved=1')
abs_limits = list(obj.absolute)
self.assertEqual(len(abs_limits), len(expected))
for limit in abs_limits:
self.assertIn(limit, expected)
def test_rate_absolute_limits(self):
obj = self.cs.limits.get()
expected = (
limits.RateLimit('POST', '*', '.*', 10, 2, 'MINUTE',
'2011-12-15T22:42:45Z'),
limits.RateLimit('PUT', '*', '.*', 10, 2, 'MINUTE',
'2011-12-15T22:42:45Z'),
limits.RateLimit('DELETE', '*', '.*', 100, 100, 'MINUTE',
'2011-12-15T22:42:45Z'),
limits.RateLimit('POST', '*/servers', '^/servers', 25, 24, 'DAY',
'2011-12-15T22:42:45Z'),
)
rate_limits = list(obj.rate)
self.assertEqual(len(rate_limits), len(expected))
for limit in rate_limits:
self.assertIn(limit, expected)
expected = (
limits.AbsoluteLimit("maxTotalRAMSize", 51200),
limits.AbsoluteLimit("maxServerMeta", 5),
limits.AbsoluteLimit("maxImageMeta", 5),
limits.AbsoluteLimit("maxPersonality", 5),
limits.AbsoluteLimit("maxPersonalitySize", 10240),
)
abs_limits = list(obj.absolute)
self.assertEqual(len(abs_limits), len(expected))
for limit in abs_limits:
self.assertIn(limit, expected)
| apache-2.0 |
ssanderson/dask | dask/multiprocessing.py | 11 | 2328 | from __future__ import absolute_import, division, print_function
from toolz import curry, pipe, partial
from .optimize import fuse, cull
import multiprocessing
import dill
import pickle
from .async import get_async # TODO: get better get
from .context import _globals
def _process_get_id():
return multiprocessing.current_process().ident
def get(dsk, keys, optimizations=[], num_workers=None,
func_loads=None, func_dumps=None, **kwargs):
""" Multiprocessed get function appropriate for Bags
Parameters
----------
dsk: dict
dask graph
keys: object or list
Desired results from graph
optimizations: list of functions
optimizations to perform on graph before execution
num_workers: int
Number of worker processes (defaults to number of cores)
func_dumps: function
Function to use for function serialization (defaults to dill.dumps)
func_loads: function
Function to use for function deserialization (defaults to dill.loads)
"""
pool = _globals['pool']
if pool is None:
pool = multiprocessing.Pool(num_workers)
cleanup = True
else:
cleanup = False
manager = multiprocessing.Manager()
queue = manager.Queue()
apply_async = dill_apply_async(pool.apply_async,
func_dumps=func_dumps, func_loads=func_loads)
# Optimize Dask
dsk2 = fuse(dsk, keys)
dsk3 = pipe(dsk2, partial(cull, keys=keys), *optimizations)
try:
# Run
result = get_async(apply_async, len(pool._pool), dsk3, keys,
queue=queue, get_id=_process_get_id, **kwargs)
finally:
if cleanup:
pool.close()
return result
def apply_func(sfunc, sargs, skwds, loads=None):
loads = loads or _globals.get('loads') or dill.loads
func = loads(sfunc)
args = loads(sargs)
kwds = loads(skwds)
return func(*args, **kwds)
@curry
def dill_apply_async(apply_async, func, args=(), kwds={},
func_loads=None, func_dumps=None):
dumps = func_dumps or _globals.get('func_dumps') or dill.dumps
sfunc = dumps(func)
sargs = dumps(args)
skwds = dumps(kwds)
return apply_async(curry(apply_func, loads=func_loads),
args=[sfunc, sargs, skwds])
| bsd-3-clause |
rob-smallshire/trailer | trailer/readers/gpx_1_0/parser.py | 1 | 7330 | from lxml import etree
from trailer.readers.common import optional_text, determine_gpx_namespace
from trailer.model.bounds import Bounds
from trailer.model.fieldtools import nullable
from trailer.model.gpx_model import GpxModel
from trailer.model.link import Link
from trailer.model.metadata import Metadata
from trailer.model.person import Person
from trailer.model.route import Route
from trailer.model.segment import Segment
from trailer.model.track import Track
from trailer.model.waypoint import Waypoint
def read_gpx(xml, gpxns=None):
"""Parse a GPX file into a GpxModel.
Args:
xml: A file-like-object opened in binary mode - that is containing
bytes rather than characters. The root element of the XML should
be a <gpx> element containing a version attribute. GPX versions
1.1 is supported.
gpxns: The XML namespace for GPX in Clarke notation (i.e. delimited
by curly braces). If None, (the default) the namespace used in
the document will be determined automatically.
"""
tree = etree.parse(xml)
gpx_element = tree.getroot()
return parse_gpx(gpx_element, gpxns=gpxns)
def parse_gpx(gpx_element, gpxns=None):
"""Parse a GPX file into a GpxModel.
Args:
xml: A file-like-object opened in binary mode - that is containing
bytes rather than characters. The root element of the XML should
be a <gpx> element containing a version attribute. GPX versions
1.0 is supported.
Returns:
A GpxModel representing the data from the supplies xml.
Raises:
ValueError: The supplied XML could not be parsed as GPX.
"""
gpxns = gpxns if gpxns is not None else determine_gpx_namespace(gpx_element)
if gpx_element.tag != gpxns+'gpx':
raise ValueError("No gpx root element")
get_text = lambda tag: optional_text(gpx_element, gpxns+tag)
version = gpx_element.attrib['version']
if not version.startswith('1.0'):
raise ValueError("Not a GPX 1.0 file")
creator = gpx_element.attrib['creator']
name = get_text('name')
description = get_text('desc')
author_name = get_text('author')
email = get_text('email')
author = Person(author_name, email)
url = get_text('url')
urlname = get_text('urlname')
links = make_links(url, urlname)
time = get_text('time')
keywords = get_text('keywords')
bounds_element = gpx_element.find(gpxns+'bounds')
bounds = nullable(parse_bounds)(bounds_element)
metadata = Metadata(name=name, description=description, author=author,
links=links, time=time, keywords=keywords, bounds=bounds)
waypoint_elements = gpx_element.findall(gpxns+'wpt')
waypoints = [parse_waypoint(waypoint_element, gpxns) for waypoint_element in waypoint_elements]
route_elements = gpx_element.findall(gpxns+'rte')
routes = [parse_route(route_element, gpxns) for route_element in route_elements]
track_elements = gpx_element.findall(gpxns+'trk')
tracks = [parse_track(track_element, gpxns) for track_element in track_elements]
# TODO : Private elements
gpx_model = GpxModel(creator, metadata, waypoints, routes, tracks)
return gpx_model
def parse_bounds(bounds_element):
minlat = bounds_element.attrib['minlat']
minlon = bounds_element.attrib['minlon']
maxlat = bounds_element.attrib['maxlat']
maxlon = bounds_element.attrib['maxlon']
bounds = Bounds(minlat, minlon, maxlat, maxlon)
return bounds
def parse_waypoint(waypoint_element, gpxns=None):
gpxns = gpxns if gpxns is not None else determine_gpx_namespace(waypoint_element)
get_text = lambda tag: optional_text(waypoint_element, gpxns+tag)
longitude = waypoint_element.attrib['lon']
elevation = get_text('ele')
course = get_text('course')
speed = get_text('speed')
latitude = waypoint_element.attrib['lat']
time = get_text('time')
magvar = get_text('magvar')
geoid_height = get_text('geoidheight')
name = get_text('name')
comment = get_text('cmt')
description = get_text('desc')
source = get_text('src')
url = get_text('url')
urlname = get_text('urlname')
links = make_links(url, urlname)
symbol = get_text('sym')
classification = get_text('type')
fix = get_text('fix')
num_satellites = get_text('sat')
hdop = get_text('hdop')
vdop = get_text('vdop')
pdop = get_text('pdop')
seconds_since_dgps_update = get_text('ageofdgpsdata')
dgps_station_type = get_text('dgpsid')
# TODO: Private elements - consider passing private element parser in
# to cope with differences between waypoints, routes, etc.
waypoint = Waypoint(latitude, longitude, elevation, time, magvar,
geoid_height, name, comment, description, source,
links, symbol, classification, fix, num_satellites,
hdop, vdop, pdop, seconds_since_dgps_update,
dgps_station_type, course, speed)
return waypoint
def parse_route(route_element, gpxns=None):
gpxns = gpxns if gpxns is not None else determine_gpx_namespace(route_element)
get_text = lambda tag: optional_text(route_element, gpxns+tag)
name = get_text('name')
comment = get_text('cmt')
description = get_text('desc')
source = get_text('src')
url = get_text('url')
urlname = get_text('urlname')
links = make_links(url, urlname)
number = get_text('number')
routepoint_elements = route_element.findall(gpxns+'rtept')
routepoints = [parse_waypoint(routepoint_element, gpxns) for routepoint_element in routepoint_elements]
route = Route(name=name, comment=comment, description=description,
source=source, links=links, number=number, points=routepoints)
return route
def parse_track(track_element, gpxns=None):
gpxns = gpxns if gpxns is not None else determine_gpx_namespace(track_element)
get_text = lambda tag: optional_text(track_element, gpxns+tag)
name = get_text('name')
comment = get_text('comment')
description = get_text('desc')
source = get_text('src')
url = get_text('url')
urlname = get_text('urlname')
links = make_links(url, urlname)
number = get_text('number')
# TODO: Private elements
segment_elements = track_element.findall(gpxns+'trkseg')
segments = [parse_segment(segment_element, gpxns) for segment_element in segment_elements]
track = Track(name=name, comment=comment, description=description,
source=source, links=links, number=number, segments=segments)
return track
def parse_segment(segment_element, gpxns=None):
gpxns = gpxns if gpxns is not None else determine_gpx_namespace(segment_element)
trackpoint_elements = segment_element.findall(gpxns+'trkpt')
trackpoints = [parse_waypoint(trackpoint_element, gpxns) for trackpoint_element in trackpoint_elements]
segment = Segment(trackpoints)
return segment
def make_links(url, urlname):
return [Link(url, urlname)] if url is not None or urlname is not None else []
def main():
with open('/Users/rjs/dev/trailer/data/blue_hills.gpx', 'rb') as xml:
gpx_model = read_gpx(xml)
return gpx_model
if __name__ == '__main__':
gpx_model = main()
pass
| mit |
joopert/home-assistant | tests/components/api/test_init.py | 5 | 18376 | """The tests for the Home Assistant API component."""
# pylint: disable=protected-access
import asyncio
import json
from unittest.mock import patch
from aiohttp import web
import pytest
import voluptuous as vol
from homeassistant import const
from homeassistant.bootstrap import DATA_LOGGING
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from tests.common import async_mock_service
@pytest.fixture
def mock_api_client(hass, hass_client):
"""Start the Hass HTTP component and return admin API client."""
hass.loop.run_until_complete(async_setup_component(hass, "api", {}))
return hass.loop.run_until_complete(hass_client())
@asyncio.coroutine
def test_api_list_state_entities(hass, mock_api_client):
"""Test if the debug interface allows us to list state entities."""
hass.states.async_set("test.entity", "hello")
resp = yield from mock_api_client.get(const.URL_API_STATES)
assert resp.status == 200
json = yield from resp.json()
remote_data = [ha.State.from_dict(item) for item in json]
assert remote_data == hass.states.async_all()
@asyncio.coroutine
def test_api_get_state(hass, mock_api_client):
"""Test if the debug interface allows us to get a state."""
hass.states.async_set("hello.world", "nice", {"attr": 1})
resp = yield from mock_api_client.get(
const.URL_API_STATES_ENTITY.format("hello.world")
)
assert resp.status == 200
json = yield from resp.json()
data = ha.State.from_dict(json)
state = hass.states.get("hello.world")
assert data.state == state.state
assert data.last_changed == state.last_changed
assert data.attributes == state.attributes
@asyncio.coroutine
def test_api_get_non_existing_state(hass, mock_api_client):
"""Test if the debug interface allows us to get a state."""
resp = yield from mock_api_client.get(
const.URL_API_STATES_ENTITY.format("does_not_exist")
)
assert resp.status == 404
@asyncio.coroutine
def test_api_state_change(hass, mock_api_client):
"""Test if we can change the state of an entity that exists."""
hass.states.async_set("test.test", "not_to_be_set")
yield from mock_api_client.post(
const.URL_API_STATES_ENTITY.format("test.test"),
json={"state": "debug_state_change2"},
)
assert hass.states.get("test.test").state == "debug_state_change2"
# pylint: disable=invalid-name
@asyncio.coroutine
def test_api_state_change_of_non_existing_entity(hass, mock_api_client):
"""Test if changing a state of a non existing entity is possible."""
new_state = "debug_state_change"
resp = yield from mock_api_client.post(
const.URL_API_STATES_ENTITY.format("test_entity.that_does_not_exist"),
json={"state": new_state},
)
assert resp.status == 201
assert hass.states.get("test_entity.that_does_not_exist").state == new_state
# pylint: disable=invalid-name
@asyncio.coroutine
def test_api_state_change_with_bad_data(hass, mock_api_client):
"""Test if API sends appropriate error if we omit state."""
resp = yield from mock_api_client.post(
const.URL_API_STATES_ENTITY.format("test_entity.that_does_not_exist"), json={}
)
assert resp.status == 400
# pylint: disable=invalid-name
@asyncio.coroutine
def test_api_state_change_to_zero_value(hass, mock_api_client):
"""Test if changing a state to a zero value is possible."""
resp = yield from mock_api_client.post(
const.URL_API_STATES_ENTITY.format("test_entity.with_zero_state"),
json={"state": 0},
)
assert resp.status == 201
resp = yield from mock_api_client.post(
const.URL_API_STATES_ENTITY.format("test_entity.with_zero_state"),
json={"state": 0.0},
)
assert resp.status == 200
# pylint: disable=invalid-name
@asyncio.coroutine
def test_api_state_change_push(hass, mock_api_client):
"""Test if we can push a change the state of an entity."""
hass.states.async_set("test.test", "not_to_be_set")
events = []
@ha.callback
def event_listener(event):
"""Track events."""
events.append(event)
hass.bus.async_listen(const.EVENT_STATE_CHANGED, event_listener)
yield from mock_api_client.post(
const.URL_API_STATES_ENTITY.format("test.test"), json={"state": "not_to_be_set"}
)
yield from hass.async_block_till_done()
assert len(events) == 0
yield from mock_api_client.post(
const.URL_API_STATES_ENTITY.format("test.test"),
json={"state": "not_to_be_set", "force_update": True},
)
yield from hass.async_block_till_done()
assert len(events) == 1
# pylint: disable=invalid-name
@asyncio.coroutine
def test_api_fire_event_with_no_data(hass, mock_api_client):
"""Test if the API allows us to fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called."""
test_value.append(1)
hass.bus.async_listen_once("test.event_no_data", listener)
yield from mock_api_client.post(
const.URL_API_EVENTS_EVENT.format("test.event_no_data")
)
yield from hass.async_block_till_done()
assert len(test_value) == 1
# pylint: disable=invalid-name
@asyncio.coroutine
def test_api_fire_event_with_data(hass, mock_api_client):
"""Test if the API allows us to fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called.
Also test if our data came through.
"""
if "test" in event.data:
test_value.append(1)
hass.bus.async_listen_once("test_event_with_data", listener)
yield from mock_api_client.post(
const.URL_API_EVENTS_EVENT.format("test_event_with_data"), json={"test": 1}
)
yield from hass.async_block_till_done()
assert len(test_value) == 1
# pylint: disable=invalid-name
@asyncio.coroutine
def test_api_fire_event_with_invalid_json(hass, mock_api_client):
"""Test if the API allows us to fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called."""
test_value.append(1)
hass.bus.async_listen_once("test_event_bad_data", listener)
resp = yield from mock_api_client.post(
const.URL_API_EVENTS_EVENT.format("test_event_bad_data"),
data=json.dumps("not an object"),
)
yield from hass.async_block_till_done()
assert resp.status == 400
assert len(test_value) == 0
# Try now with valid but unusable JSON
resp = yield from mock_api_client.post(
const.URL_API_EVENTS_EVENT.format("test_event_bad_data"),
data=json.dumps([1, 2, 3]),
)
yield from hass.async_block_till_done()
assert resp.status == 400
assert len(test_value) == 0
@asyncio.coroutine
def test_api_get_config(hass, mock_api_client):
"""Test the return of the configuration."""
resp = yield from mock_api_client.get(const.URL_API_CONFIG)
result = yield from resp.json()
if "components" in result:
result["components"] = set(result["components"])
if "whitelist_external_dirs" in result:
result["whitelist_external_dirs"] = set(result["whitelist_external_dirs"])
assert hass.config.as_dict() == result
@asyncio.coroutine
def test_api_get_components(hass, mock_api_client):
"""Test the return of the components."""
resp = yield from mock_api_client.get(const.URL_API_COMPONENTS)
result = yield from resp.json()
assert set(result) == hass.config.components
@asyncio.coroutine
def test_api_get_event_listeners(hass, mock_api_client):
"""Test if we can get the list of events being listened for."""
resp = yield from mock_api_client.get(const.URL_API_EVENTS)
data = yield from resp.json()
local = hass.bus.async_listeners()
for event in data:
assert local.pop(event["event"]) == event["listener_count"]
assert len(local) == 0
@asyncio.coroutine
def test_api_get_services(hass, mock_api_client):
"""Test if we can get a dict describing current services."""
resp = yield from mock_api_client.get(const.URL_API_SERVICES)
data = yield from resp.json()
local_services = hass.services.async_services()
for serv_domain in data:
local = local_services.pop(serv_domain["domain"])
assert serv_domain["services"] == local
@asyncio.coroutine
def test_api_call_service_no_data(hass, mock_api_client):
"""Test if the API allows us to call a service."""
test_value = []
@ha.callback
def listener(service_call):
"""Record that our service got called."""
test_value.append(1)
hass.services.async_register("test_domain", "test_service", listener)
yield from mock_api_client.post(
const.URL_API_SERVICES_SERVICE.format("test_domain", "test_service")
)
yield from hass.async_block_till_done()
assert len(test_value) == 1
@asyncio.coroutine
def test_api_call_service_with_data(hass, mock_api_client):
"""Test if the API allows us to call a service."""
test_value = []
@ha.callback
def listener(service_call):
"""Record that our service got called.
Also test if our data came through.
"""
if "test" in service_call.data:
test_value.append(1)
hass.services.async_register("test_domain", "test_service", listener)
yield from mock_api_client.post(
const.URL_API_SERVICES_SERVICE.format("test_domain", "test_service"),
json={"test": 1},
)
yield from hass.async_block_till_done()
assert len(test_value) == 1
@asyncio.coroutine
def test_api_template(hass, mock_api_client):
"""Test the template API."""
hass.states.async_set("sensor.temperature", 10)
resp = yield from mock_api_client.post(
const.URL_API_TEMPLATE,
json={"template": "{{ states.sensor.temperature.state }}"},
)
body = yield from resp.text()
assert body == "10"
@asyncio.coroutine
def test_api_template_error(hass, mock_api_client):
"""Test the template API."""
hass.states.async_set("sensor.temperature", 10)
resp = yield from mock_api_client.post(
const.URL_API_TEMPLATE, json={"template": "{{ states.sensor.temperature.state"}
)
assert resp.status == 400
@asyncio.coroutine
def test_stream(hass, mock_api_client):
"""Test the stream."""
listen_count = _listen_count(hass)
resp = yield from mock_api_client.get(const.URL_API_STREAM)
assert resp.status == 200
assert listen_count + 1 == _listen_count(hass)
hass.bus.async_fire("test_event")
data = yield from _stream_next_event(resp.content)
assert data["event_type"] == "test_event"
@asyncio.coroutine
def test_stream_with_restricted(hass, mock_api_client):
"""Test the stream with restrictions."""
listen_count = _listen_count(hass)
resp = yield from mock_api_client.get(
"{}?restrict=test_event1,test_event3".format(const.URL_API_STREAM)
)
assert resp.status == 200
assert listen_count + 1 == _listen_count(hass)
hass.bus.async_fire("test_event1")
data = yield from _stream_next_event(resp.content)
assert data["event_type"] == "test_event1"
hass.bus.async_fire("test_event2")
hass.bus.async_fire("test_event3")
data = yield from _stream_next_event(resp.content)
assert data["event_type"] == "test_event3"
@asyncio.coroutine
def _stream_next_event(stream):
"""Read the stream for next event while ignoring ping."""
while True:
last_new_line = False
data = b""
while True:
dat = yield from stream.read(1)
if dat == b"\n" and last_new_line:
break
data += dat
last_new_line = dat == b"\n"
conv = data.decode("utf-8").strip()[6:]
if conv != "ping":
break
return json.loads(conv)
def _listen_count(hass):
"""Return number of event listeners."""
return sum(hass.bus.async_listeners().values())
async def test_api_error_log(hass, aiohttp_client, hass_access_token, hass_admin_user):
"""Test if we can fetch the error log."""
hass.data[DATA_LOGGING] = "/some/path"
await async_setup_component(hass, "api", {})
client = await aiohttp_client(hass.http.app)
resp = await client.get(const.URL_API_ERROR_LOG)
# Verify auth required
assert resp.status == 401
with patch(
"aiohttp.web.FileResponse", return_value=web.Response(status=200, text="Hello")
) as mock_file:
resp = await client.get(
const.URL_API_ERROR_LOG,
headers={"Authorization": "Bearer {}".format(hass_access_token)},
)
assert len(mock_file.mock_calls) == 1
assert mock_file.mock_calls[0][1][0] == hass.data[DATA_LOGGING]
assert resp.status == 200
assert await resp.text() == "Hello"
# Verify we require admin user
hass_admin_user.groups = []
resp = await client.get(
const.URL_API_ERROR_LOG,
headers={"Authorization": "Bearer {}".format(hass_access_token)},
)
assert resp.status == 401
async def test_api_fire_event_context(hass, mock_api_client, hass_access_token):
"""Test if the API sets right context if we fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called."""
test_value.append(event)
hass.bus.async_listen("test.event", listener)
await mock_api_client.post(
const.URL_API_EVENTS_EVENT.format("test.event"),
headers={"authorization": "Bearer {}".format(hass_access_token)},
)
await hass.async_block_till_done()
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
assert len(test_value) == 1
assert test_value[0].context.user_id == refresh_token.user.id
async def test_api_call_service_context(hass, mock_api_client, hass_access_token):
"""Test if the API sets right context if we call a service."""
calls = async_mock_service(hass, "test_domain", "test_service")
await mock_api_client.post(
"/api/services/test_domain/test_service",
headers={"authorization": "Bearer {}".format(hass_access_token)},
)
await hass.async_block_till_done()
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
assert len(calls) == 1
assert calls[0].context.user_id == refresh_token.user.id
async def test_api_set_state_context(hass, mock_api_client, hass_access_token):
"""Test if the API sets right context if we set state."""
await mock_api_client.post(
"/api/states/light.kitchen",
json={"state": "on"},
headers={"authorization": "Bearer {}".format(hass_access_token)},
)
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
state = hass.states.get("light.kitchen")
assert state.context.user_id == refresh_token.user.id
async def test_event_stream_requires_admin(hass, mock_api_client, hass_admin_user):
"""Test user needs to be admin to access event stream."""
hass_admin_user.groups = []
resp = await mock_api_client.get("/api/stream")
assert resp.status == 401
async def test_states_view_filters(hass, mock_api_client, hass_admin_user):
"""Test filtering only visible states."""
hass_admin_user.mock_policy({"entities": {"entity_ids": {"test.entity": True}}})
hass.states.async_set("test.entity", "hello")
hass.states.async_set("test.not_visible_entity", "invisible")
resp = await mock_api_client.get(const.URL_API_STATES)
assert resp.status == 200
json = await resp.json()
assert len(json) == 1
assert json[0]["entity_id"] == "test.entity"
async def test_get_entity_state_read_perm(hass, mock_api_client, hass_admin_user):
"""Test getting a state requires read permission."""
hass_admin_user.mock_policy({})
resp = await mock_api_client.get("/api/states/light.test")
assert resp.status == 401
async def test_post_entity_state_admin(hass, mock_api_client, hass_admin_user):
"""Test updating state requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.post("/api/states/light.test")
assert resp.status == 401
async def test_delete_entity_state_admin(hass, mock_api_client, hass_admin_user):
"""Test deleting entity requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.delete("/api/states/light.test")
assert resp.status == 401
async def test_post_event_admin(hass, mock_api_client, hass_admin_user):
"""Test sending event requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.post("/api/events/state_changed")
assert resp.status == 401
async def test_rendering_template_admin(hass, mock_api_client, hass_admin_user):
"""Test rendering a template requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.post(const.URL_API_TEMPLATE)
assert resp.status == 401
async def test_rendering_template_legacy_user(
hass, mock_api_client, aiohttp_client, legacy_auth
):
"""Test rendering a template with legacy API password."""
hass.states.async_set("sensor.temperature", 10)
client = await aiohttp_client(hass.http.app)
resp = await client.post(
const.URL_API_TEMPLATE,
json={"template": "{{ states.sensor.temperature.state }}"},
)
assert resp.status == 401
async def test_api_call_service_not_found(hass, mock_api_client):
"""Test if the API failes 400 if unknown service."""
resp = await mock_api_client.post(
const.URL_API_SERVICES_SERVICE.format("test_domain", "test_service")
)
assert resp.status == 400
async def test_api_call_service_bad_data(hass, mock_api_client):
"""Test if the API failes 400 if unknown service."""
test_value = []
@ha.callback
def listener(service_call):
"""Record that our service got called."""
test_value.append(1)
hass.services.async_register(
"test_domain", "test_service", listener, schema=vol.Schema({"hello": str})
)
resp = await mock_api_client.post(
const.URL_API_SERVICES_SERVICE.format("test_domain", "test_service"),
json={"hello": 5},
)
assert resp.status == 400
| apache-2.0 |
aequitas/home-assistant | homeassistant/components/tahoma/sensor.py | 7 | 3687 | """Support for Tahoma sensors."""
from datetime import timedelta
import logging
from homeassistant.const import ATTR_BATTERY_LEVEL
from homeassistant.helpers.entity import Entity
from . import DOMAIN as TAHOMA_DOMAIN, TahomaDevice
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=60)
ATTR_RSSI_LEVEL = 'rssi_level'
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Tahoma controller devices."""
controller = hass.data[TAHOMA_DOMAIN]['controller']
devices = []
for device in hass.data[TAHOMA_DOMAIN]['devices']['sensor']:
devices.append(TahomaSensor(device, controller))
add_entities(devices, True)
class TahomaSensor(TahomaDevice, Entity):
"""Representation of a Tahoma Sensor."""
def __init__(self, tahoma_device, controller):
"""Initialize the sensor."""
self.current_value = None
self._available = False
super().__init__(tahoma_device, controller)
@property
def state(self):
"""Return the name of the sensor."""
return self.current_value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self.tahoma_device.type == 'Temperature Sensor':
return None
if self.tahoma_device.type == 'io:SomfyContactIOSystemSensor':
return None
if self.tahoma_device.type == 'io:LightIOSystemSensor':
return 'lx'
if self.tahoma_device.type == 'Humidity Sensor':
return '%'
if self.tahoma_device.type == 'rtds:RTDSContactSensor':
return None
if self.tahoma_device.type == 'rtds:RTDSMotionSensor':
return None
def update(self):
"""Update the state."""
self.controller.get_states([self.tahoma_device])
if self.tahoma_device.type == 'io:LightIOSystemSensor':
self.current_value = self.tahoma_device.active_states[
'core:LuminanceState']
self._available = bool(self.tahoma_device.active_states.get(
'core:StatusState') == 'available')
if self.tahoma_device.type == 'io:SomfyContactIOSystemSensor':
self.current_value = self.tahoma_device.active_states[
'core:ContactState']
self._available = bool(self.tahoma_device.active_states.get(
'core:StatusState') == 'available')
if self.tahoma_device.type == 'rtds:RTDSContactSensor':
self.current_value = self.tahoma_device.active_states[
'core:ContactState']
self._available = True
if self.tahoma_device.type == 'rtds:RTDSMotionSensor':
self.current_value = self.tahoma_device.active_states[
'core:OccupancyState']
self._available = True
_LOGGER.debug("Update %s, value: %d", self._name, self.current_value)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attr = {}
super_attr = super().device_state_attributes
if super_attr is not None:
attr.update(super_attr)
if 'core:RSSILevelState' in self.tahoma_device.active_states:
attr[ATTR_RSSI_LEVEL] = \
self.tahoma_device.active_states['core:RSSILevelState']
if 'core:SensorDefectState' in self.tahoma_device.active_states:
attr[ATTR_BATTERY_LEVEL] = \
self.tahoma_device.active_states['core:SensorDefectState']
return attr
@property
def available(self):
"""Return True if entity is available."""
return self._available
| apache-2.0 |
glaubitz/fs-uae-debian | arcade/OpenGL/arrays/formathandler.py | 9 | 3836 | """Base class for the various Python data-format storage type APIs
Data-type handlers are specified using OpenGL.plugins module
"""
import ctypes
from OpenGL import plugins
class FormatHandler( object ):
"""Abstract class describing the handler interface
Each data-type handler is responsible for providing a number of methods
which allow it to manipulate (and create) instances of the data-type
it represents.
"""
LAZY_TYPE_REGISTRY = {} # more registrations
HANDLER_REGISTRY = {}
baseType = None
typeConstant = None
HANDLED_TYPES = ()
preferredOutput = None
isOutput = False
GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays']
ALL_OUTPUT_HANDLERS = []
def loadAll( cls ):
"""Load all OpenGL.plugins-registered FormatHandler classes
"""
for entrypoint in plugins.FormatHandler.all():
cls.loadPlugin( entrypoint )
@classmethod
def loadPlugin( cls, entrypoint ):
"""Load a single entry-point via plugins module"""
if not entrypoint.loaded:
from OpenGL.arrays.arraydatatype import ArrayDatatype
try:
plugin_class = entrypoint.load()
except ImportError as err:
from OpenGL import logs
from OpenGL._configflags import WARN_ON_FORMAT_UNAVAILABLE
_log = logs.getLog( 'OpenGL.formathandler' )
if WARN_ON_FORMAT_UNAVAILABLE:
logFunc = _log.warn
else:
logFunc = _log.info
logFunc(
'Unable to load registered array format handler %s:\n%s',
entrypoint.name, _log.getException( err )
)
else:
handler = plugin_class()
#handler.register( handler.HANDLED_TYPES )
ArrayDatatype.getRegistry()[ entrypoint.name ] = handler
return handler
entrypoint.loaded = True
@classmethod
def typeLookup( cls, type ):
"""Lookup handler by data-type"""
registry = ArrayDatatype.getRegistry()
try:
return registry[ type ]
except KeyError as err:
key = '%s.%s'%(type.__module__,type.__name__)
plugin = cls.LAZY_TYPE_REGISTRY.get( key )
if plugin:
cls.loadPlugin( plugin )
return registry[ type ]
raise KeyError( """Unable to find data-format handler for %s"""%( type,))
loadAll = classmethod( loadAll )
def register( self, types=None ):
"""Register this class as handler for given set of types"""
from OpenGL.arrays.arraydatatype import ArrayDatatype
ArrayDatatype.getRegistry().register( self, types )
def registerReturn( self ):
"""Register this handler as the default return-type handler"""
from OpenGL.arrays.arraydatatype import ArrayDatatype
ArrayDatatype.getRegistry().registerReturn( self )
def from_param( self, value, typeCode=None ):
"""Convert to a ctypes pointer value"""
def dataPointer( self, value ):
"""return long for pointer value"""
def asArray( self, value, typeCode=None ):
"""Given a value, convert to array representation"""
def arrayToGLType( self, value ):
"""Given a value, guess OpenGL type of the corresponding pointer"""
def arraySize( self, value, typeCode = None ):
"""Given a data-value, calculate dimensions for the array"""
def unitSize( self, value, typeCode=None ):
"""Determine unit size of an array (if possible)"""
if self.baseType is not None:
return
def dimensions( self, value, typeCode=None ):
"""Determine dimensions of the passed array value (if possible)"""
| gpl-2.0 |
aesteve/vertx-web | vertx-web/src/test/sockjs-protocol/unittest2/signals.py | 17 | 2417 | import signal
import weakref
from unittest2.compatibility import wraps
__unittest = True
class _InterruptHandler(object):
def __init__(self, default_handler):
self.called = False
self.original_handler = default_handler
if isinstance(default_handler, int):
if default_handler == signal.SIG_DFL:
# Pretend it's signal.default_int_handler instead.
default_handler = signal.default_int_handler
elif default_handler == signal.SIG_IGN:
# Not quite the same thing as SIG_IGN, but the closest we
# can make it: do nothing.
def default_handler(unused_signum, unused_frame):
pass
else:
raise TypeError("expected SIGINT signal handler to be "
"signal.SIG_IGN, signal.SIG_DFL, or a "
"callable object")
self.default_handler = default_handler
def __call__(self, signum, frame):
installed_handler = signal.getsignal(signal.SIGINT)
if installed_handler is not self:
# if we aren't the installed handler, then delegate immediately
# to the default handler
self.default_handler(signum, frame)
if self.called:
self.default_handler(signum, frame)
self.called = True
for result in _results.keys():
result.stop()
_results = weakref.WeakKeyDictionary()
def registerResult(result):
_results[result] = 1
def removeResult(result):
return bool(_results.pop(result, None))
_interrupt_handler = None
def installHandler():
global _interrupt_handler
if _interrupt_handler is None:
default_handler = signal.getsignal(signal.SIGINT)
_interrupt_handler = _InterruptHandler(default_handler)
signal.signal(signal.SIGINT, _interrupt_handler)
def removeHandler(method=None):
if method is not None:
@wraps(method)
def inner(*args, **kwargs):
initial = signal.getsignal(signal.SIGINT)
removeHandler()
try:
return method(*args, **kwargs)
finally:
signal.signal(signal.SIGINT, initial)
return inner
global _interrupt_handler
if _interrupt_handler is not None:
signal.signal(signal.SIGINT, _interrupt_handler.original_handler)
| apache-2.0 |
fxstein/cement | cement/core/log.py | 4 | 2956 | """
Cement core log module.
"""
from ..core import exc, interface, handler
def log_validator(klass, obj):
"""Validates an handler implementation against the ILog interface."""
members = [
'_setup',
'set_level',
'get_level',
'info',
'warn',
'error',
'fatal',
'debug',
]
interface.validate(ILog, obj, members)
class ILog(interface.Interface):
"""
This class defines the Log Handler Interface. Classes that
implement this handler must provide the methods and attributes defined
below.
Implementations do *not* subclass from interfaces.
Usage:
.. code-block:: python
from cement.core import log
class MyLogHandler(object):
class Meta:
interface = log.ILog
label = 'my_log_handler'
...
"""
# pylint: disable=W0232, C0111, R0903
class IMeta:
"""Interface meta-data."""
label = 'log'
"""The string identifier of the interface."""
validator = log_validator
"""The interface validator function."""
# Must be provided by the implementation
Meta = interface.Attribute('Handler Meta-data')
def _setup(app_obj):
"""
The _setup function is called during application initialization and
must 'setup' the handler object making it ready for the framework
or the application to make further calls to it.
:param app_obj: The application object.
"""
def set_level():
"""
Set the log level. Must except atleast one of:
``['INFO', 'WARN', 'ERROR', 'DEBUG', or 'FATAL']``.
"""
def get_level():
"""Return a string representation of the log level."""
def info(msg):
"""
Log to the 'INFO' facility.
:param msg: The message to log.
"""
def warn(self, msg):
"""
Log to the 'WARN' facility.
:param msg: The message to log.
"""
def error(self, msg):
"""
Log to the 'ERROR' facility.
:param msg: The message to log.
"""
def fatal(self, msg):
"""
Log to the 'FATAL' facility.
:param msg: The message to log.
"""
def debug(self, msg):
"""
Log to the 'DEBUG' facility.
:param msg: The message to log.
"""
class CementLogHandler(handler.CementBaseHandler):
"""
Base class that all Log Handlers should sub-class from.
"""
class Meta:
"""
Handler meta-data (can be passed as keyword arguments to the parent
class).
"""
label = None
"""The string identifier of this handler."""
interface = ILog
"""The interface that this class implements."""
def __init__(self, *args, **kw):
super(CementLogHandler, self).__init__(*args, **kw)
| bsd-3-clause |
peterm-itr/edx-platform | common/lib/xmodule/xmodule/foldit_module.py | 56 | 6913 | import logging
from lxml import etree
from pkg_resources import resource_string
from xmodule.editing_module import EditingDescriptor
from xmodule.x_module import XModule
from xmodule.xml_module import XmlDescriptor
from xblock.fields import Scope, Integer, String
from .fields import Date
from .util.duedate import get_extended_due_date
log = logging.getLogger(__name__)
class FolditFields(object):
# default to what Spring_7012x uses
required_level_half_credit = Integer(default=3, scope=Scope.settings)
required_sublevel_half_credit = Integer(default=5, scope=Scope.settings)
required_level = Integer(default=4, scope=Scope.settings)
required_sublevel = Integer(default=5, scope=Scope.settings)
due = Date(help="Date that this problem is due by", scope=Scope.settings)
extended_due = Date(
help="Date that this problem is due by for a particular student. This "
"can be set by an instructor, and will override the global due "
"date if it is set to a date that is later than the global due "
"date.",
default=None,
scope=Scope.user_state,
)
show_basic_score = String(scope=Scope.settings, default='false')
show_leaderboard = String(scope=Scope.settings, default='false')
class FolditModule(FolditFields, XModule):
css = {'scss': [resource_string(__name__, 'css/foldit/leaderboard.scss')]}
def __init__(self, *args, **kwargs):
"""
Example:
<foldit show_basic_score="true"
required_level="4"
required_sublevel="3"
required_level_half_credit="2"
required_sublevel_half_credit="3"
show_leaderboard="false"/>
"""
super(FolditModule, self).__init__(*args, **kwargs)
self.due_time = get_extended_due_date(self)
def is_complete(self):
"""
Did the user get to the required level before the due date?
"""
# We normally don't want django dependencies in xmodule. foldit is
# special. Import this late to avoid errors with things not yet being
# initialized.
from foldit.models import PuzzleComplete
complete = PuzzleComplete.is_level_complete(
self.system.anonymous_student_id,
self.required_level,
self.required_sublevel,
self.due_time)
return complete
def is_half_complete(self):
"""
Did the user reach the required level for half credit?
Ideally this would be more flexible than just 0, 0.5, or 1 credit. On
the other hand, the xml attributes for specifying more specific
cut-offs and partial grades can get more confusing.
"""
from foldit.models import PuzzleComplete
complete = PuzzleComplete.is_level_complete(
self.system.anonymous_student_id,
self.required_level_half_credit,
self.required_sublevel_half_credit,
self.due_time)
return complete
def completed_puzzles(self):
"""
Return a list of puzzles that this user has completed, as an array of
dicts:
[ {'set': int,
'subset': int,
'created': datetime} ]
The list is sorted by set, then subset
"""
from foldit.models import PuzzleComplete
return sorted(
PuzzleComplete.completed_puzzles(self.system.anonymous_student_id),
key=lambda d: (d['set'], d['subset']))
def puzzle_leaders(self, n=10, courses=None):
"""
Returns a list of n pairs (user, score) corresponding to the top
scores; the pairs are in descending order of score.
"""
from foldit.models import Score
if courses is None:
courses = [self.location.course_key]
leaders = [(leader['username'], leader['score']) for leader in Score.get_tops_n(10, course_list=courses)]
leaders.sort(key=lambda x: -x[1])
return leaders
def get_html(self):
"""
Render the html for the module.
"""
goal_level = '{0}-{1}'.format(
self.required_level,
self.required_sublevel)
showbasic = (self.show_basic_score.lower() == "true")
showleader = (self.show_leaderboard.lower() == "true")
context = {
'due': self.due,
'success': self.is_complete(),
'goal_level': goal_level,
'completed': self.completed_puzzles(),
'top_scores': self.puzzle_leaders(),
'show_basic': showbasic,
'show_leader': showleader,
'folditbasic': self.get_basicpuzzles_html(),
'folditchallenge': self.get_challenge_html()
}
return self.system.render_template('foldit.html', context)
def get_basicpuzzles_html(self):
"""
Render html for the basic puzzle section.
"""
goal_level = '{0}-{1}'.format(
self.required_level,
self.required_sublevel)
context = {
'due': self.due,
'success': self.is_complete(),
'goal_level': goal_level,
'completed': self.completed_puzzles(),
}
return self.system.render_template('folditbasic.html', context)
def get_challenge_html(self):
"""
Render html for challenge (i.e., the leaderboard)
"""
context = {
'top_scores': self.puzzle_leaders()}
return self.system.render_template('folditchallenge.html', context)
def get_score(self):
"""
0 if required_level_half_credit - required_sublevel_half_credit not
reached.
0.5 if required_level_half_credit and required_sublevel_half_credit
reached.
1 if requred_level and required_sublevel reached.
"""
if self.is_complete():
score = 1
elif self.is_half_complete():
score = 0.5
else:
score = 0
return {'score': score,
'total': self.max_score()}
def max_score(self):
return 1
class FolditDescriptor(FolditFields, XmlDescriptor, EditingDescriptor):
"""
Module for adding Foldit problems to courses
"""
mako_template = "widgets/html-edit.html"
module_class = FolditModule
filename_extension = "xml"
has_score = True
js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]}
js_module_name = "HTMLEditingDescriptor"
# The grade changes without any student interaction with the edx website,
# so always need to actually check.
always_recalculate_grades = True
@classmethod
def definition_from_xml(cls, xml_object, system):
return {}, []
def definition_to_xml(self, resource_fs):
xml_object = etree.Element('foldit')
return xml_object
| agpl-3.0 |
balister/GNU-Radio | gr-digital/python/digital/ofdm_sync_fixed.py | 58 | 1964 | #!/usr/bin/env python
#
# Copyright 2007,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import math
from gnuradio import gr
from gnuradio import blocks
class ofdm_sync_fixed(gr.hier_block2):
def __init__(self, fft_length, cp_length, nsymbols, freq_offset, logging=False):
gr.hier_block2.__init__(self, "ofdm_sync_fixed",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature2(2, 2, gr.sizeof_float, gr.sizeof_char)) # Output signature
# Use a fixed trigger point instead of sync block
symbol_length = fft_length + cp_length
pkt_length = nsymbols*symbol_length
data = (pkt_length)*[0,]
data[(symbol_length)-1] = 1
self.peak_trigger = blocks.vector_source_b(data, True)
# Use a pre-defined frequency offset
foffset = (pkt_length)*[math.pi*freq_offset,]
self.frequency_offset = blocks.vector_source_f(foffset, True)
self.connect(self, blocks.null_sink(gr.sizeof_gr_complex))
self.connect(self.frequency_offset, (self,0))
self.connect(self.peak_trigger, (self,1))
if logging:
self.connect(self.peak_trigger, blocks.file_sink(gr.sizeof_char, "ofdm_sync_fixed-peaks_b.dat"))
| gpl-3.0 |
garbersc/keras-galaxies | extract_pysex_params_gen2.py | 8 | 3889 | import load_data
import pysex
import numpy as np
import multiprocessing as mp
import cPickle as pickle
"""
Extract a bunch of extra info to get a better idea of the size of objects
"""
SUBSETS = ['train', 'test']
TARGET_PATTERN = "data/pysex_params_gen2_%s.npy.gz"
SIGMA2 = 5000 # 5000 # std of the centrality weighting (Gaussian)
DETECT_THRESH = 2.0 # 10.0 # detection threshold for sextractor
NUM_PROCESSES = 8
def estimate_params(img):
img_green = img[..., 1] # supposedly using the green channel is a good idea. alternatively we could use luma.
# this seems to work well enough.
out = pysex.run(img_green, params=[
'X_IMAGE', 'Y_IMAGE', # barycenter
# 'XMIN_IMAGE', 'XMAX_IMAGE', 'YMIN_IMAGE', 'YMAX_IMAGE', # enclosing rectangle
# 'XPEAK_IMAGE', 'YPEAK_IMAGE', # location of maximal intensity
'A_IMAGE', 'B_IMAGE', 'THETA_IMAGE', # ellipse parameters
'PETRO_RADIUS',
# 'KRON_RADIUS', 'PETRO_RADIUS', 'FLUX_RADIUS', 'FWHM_IMAGE', # various radii
], conf_args={ 'DETECT_THRESH': DETECT_THRESH })
# x and y are flipped for some reason.
# theta should be 90 - theta.
# we convert these here so we can plot stuff with matplotlib easily.
try:
ys = out['X_IMAGE'].tonumpy()
xs = out['Y_IMAGE'].tonumpy()
as_ = out['A_IMAGE'].tonumpy()
bs = out['B_IMAGE'].tonumpy()
thetas = 90 - out['THETA_IMAGE'].tonumpy()
# kron_radii = out['KRON_RADIUS'].tonumpy()
petro_radii = out['PETRO_RADIUS'].tonumpy()
# flux_radii = out['FLUX_RADIUS'].tonumpy()
# fwhms = out['FWHM_IMAGE'].tonumpy()
# detect the most salient galaxy
# take in account size and centrality
surface_areas = np.pi * (as_ * bs)
centralities = np.exp(-((xs - 211.5)**2 + (ys - 211.5)**2)/SIGMA2) # 211.5, 211.5 is the center of the image
# salience is proportional to surface area, with a gaussian prior on the distance to the center.
saliences = surface_areas * centralities
most_salient_idx = np.argmax(saliences)
x = xs[most_salient_idx]
y = ys[most_salient_idx]
a = as_[most_salient_idx]
b = bs[most_salient_idx]
theta = thetas[most_salient_idx]
# kron_radius = kron_radii[most_salient_idx]
petro_radius = petro_radii[most_salient_idx]
# flux_radius = flux_radii[most_salient_idx]
# fwhm = fwhms[most_salient_idx]
except TypeError: # sometimes these are empty (no objects found), use defaults in that case
x = 211.5
y = 211.5
a = np.nan # dunno what this has to be, deal with it later
b = np.nan # same
theta = np.nan # same
# kron_radius = np.nan
petro_radius = np.nan
# flux_radius = np.nan
# fwhm = np.nan
# return (x, y, a, b, theta, flux_radius, kron_radius, petro_radius, fwhm)
return (x, y, a, b, theta, petro_radius)
for subset in SUBSETS:
print "SUBSET: %s" % subset
print
if subset == 'train':
num_images = load_data.num_train
ids = load_data.train_ids
elif subset == 'test':
num_images = load_data.num_test
ids = load_data.test_ids
def process(k):
print "image %d/%d (%s)" % (k + 1, num_images, subset)
img_id = ids[k]
img = load_data.load_image(img_id, from_ram=True, subset=subset)
return estimate_params(img)
pool = mp.Pool(NUM_PROCESSES)
estimated_params = pool.map(process, xrange(num_images), chunksize=100)
pool.close()
pool.join()
# estimated_params = map(process, xrange(num_images)) # no mp for debugging
params_array = np.array(estimated_params)
target_path = TARGET_PATTERN % subset
print "Saving to %s..." % target_path
load_data.save_gz(target_path, params_array)
| bsd-3-clause |
wuhengzhi/chromium-crosswalk | third_party/cython/src/Cython/Compiler/Symtab.py | 87 | 96295 | #
# Symbol Table
#
import copy
import re
from Errors import warning, error, InternalError
from StringEncoding import EncodedString
import Options, Naming
import PyrexTypes
from PyrexTypes import py_object_type, unspecified_type
from TypeSlots import \
pyfunction_signature, pymethod_signature, \
get_special_method_signature, get_property_accessor_signature
import Code
import __builtin__ as builtins
iso_c99_keywords = set(
['auto', 'break', 'case', 'char', 'const', 'continue', 'default', 'do',
'double', 'else', 'enum', 'extern', 'float', 'for', 'goto', 'if',
'int', 'long', 'register', 'return', 'short', 'signed', 'sizeof',
'static', 'struct', 'switch', 'typedef', 'union', 'unsigned', 'void',
'volatile', 'while',
'_Bool', '_Complex'', _Imaginary', 'inline', 'restrict'])
def c_safe_identifier(cname):
# There are some C limitations on struct entry names.
if ((cname[:2] == '__'
and not (cname.startswith(Naming.pyrex_prefix)
or cname == '__weakref__'))
or cname in iso_c99_keywords):
cname = Naming.pyrex_prefix + cname
return cname
class BufferAux(object):
writable_needed = False
def __init__(self, buflocal_nd_var, rcbuf_var):
self.buflocal_nd_var = buflocal_nd_var
self.rcbuf_var = rcbuf_var
def __repr__(self):
return "<BufferAux %r>" % self.__dict__
class Entry(object):
# A symbol table entry in a Scope or ModuleNamespace.
#
# name string Python name of entity
# cname string C name of entity
# type PyrexType Type of entity
# doc string Doc string
# init string Initial value
# visibility 'private' or 'public' or 'extern'
# is_builtin boolean Is an entry in the Python builtins dict
# is_cglobal boolean Is a C global variable
# is_pyglobal boolean Is a Python module-level variable
# or class attribute during
# class construction
# is_member boolean Is an assigned class member
# is_pyclass_attr boolean Is a name in a Python class namespace
# is_variable boolean Is a variable
# is_cfunction boolean Is a C function
# is_cmethod boolean Is a C method of an extension type
# is_builtin_cmethod boolean Is a C method of a builtin type (implies is_cmethod)
# is_unbound_cmethod boolean Is an unbound C method of an extension type
# is_final_cmethod boolean Is non-overridable C method
# is_inline_cmethod boolean Is inlined C method
# is_anonymous boolean Is a anonymous pyfunction entry
# is_type boolean Is a type definition
# is_cclass boolean Is an extension class
# is_cpp_class boolean Is a C++ class
# is_const boolean Is a constant
# is_property boolean Is a property of an extension type:
# doc_cname string or None C const holding the docstring
# getter_cname string C func for getting property
# setter_cname string C func for setting or deleting property
# is_self_arg boolean Is the "self" arg of an exttype method
# is_arg boolean Is the arg of a method
# is_local boolean Is a local variable
# in_closure boolean Is referenced in an inner scope
# is_readonly boolean Can't be assigned to
# func_cname string C func implementing Python func
# func_modifiers [string] C function modifiers ('inline')
# pos position Source position where declared
# namespace_cname string If is_pyglobal, the C variable
# holding its home namespace
# pymethdef_cname string PyMethodDef structure
# signature Signature Arg & return types for Python func
# as_variable Entry Alternative interpretation of extension
# type name or builtin C function as a variable
# xdecref_cleanup boolean Use Py_XDECREF for error cleanup
# in_cinclude boolean Suppress C declaration code
# enum_values [Entry] For enum types, list of values
# qualified_name string "modname.funcname" or "modname.classname"
# or "modname.classname.funcname"
# is_declared_generic boolean Is declared as PyObject * even though its
# type is an extension type
# as_module None Module scope, if a cimported module
# is_inherited boolean Is an inherited attribute of an extension type
# pystring_cname string C name of Python version of string literal
# is_interned boolean For string const entries, value is interned
# is_identifier boolean For string const entries, value is an identifier
# used boolean
# is_special boolean Is a special method or property accessor
# of an extension type
# defined_in_pxd boolean Is defined in a .pxd file (not just declared)
# api boolean Generate C API for C class or function
# utility_code string Utility code needed when this entry is used
#
# buffer_aux BufferAux or None Extra information needed for buffer variables
# inline_func_in_pxd boolean Hacky special case for inline function in pxd file.
# Ideally this should not be necesarry.
# might_overflow boolean In an arithmetic expression that could cause
# overflow (used for type inference).
# utility_code_definition For some Cython builtins, the utility code
# which contains the definition of the entry.
# Currently only supported for CythonScope entries.
# error_on_uninitialized Have Control Flow issue an error when this entry is
# used uninitialized
# cf_used boolean Entry is used
# is_fused_specialized boolean Whether this entry of a cdef or def function
# is a specialization
# TODO: utility_code and utility_code_definition serves the same purpose...
inline_func_in_pxd = False
borrowed = 0
init = ""
visibility = 'private'
is_builtin = 0
is_cglobal = 0
is_pyglobal = 0
is_member = 0
is_pyclass_attr = 0
is_variable = 0
is_cfunction = 0
is_cmethod = 0
is_builtin_cmethod = False
is_unbound_cmethod = 0
is_final_cmethod = 0
is_inline_cmethod = 0
is_anonymous = 0
is_type = 0
is_cclass = 0
is_cpp_class = 0
is_const = 0
is_property = 0
doc_cname = None
getter_cname = None
setter_cname = None
is_self_arg = 0
is_arg = 0
is_local = 0
in_closure = 0
from_closure = 0
is_declared_generic = 0
is_readonly = 0
pyfunc_cname = None
func_cname = None
func_modifiers = []
final_func_cname = None
doc = None
as_variable = None
xdecref_cleanup = 0
in_cinclude = 0
as_module = None
is_inherited = 0
pystring_cname = None
is_identifier = 0
is_interned = 0
used = 0
is_special = 0
defined_in_pxd = 0
is_implemented = 0
api = 0
utility_code = None
is_overridable = 0
buffer_aux = None
prev_entry = None
might_overflow = 0
fused_cfunction = None
is_fused_specialized = False
utility_code_definition = None
needs_property = False
in_with_gil_block = 0
from_cython_utility_code = None
error_on_uninitialized = False
cf_used = True
outer_entry = None
def __init__(self, name, cname, type, pos = None, init = None):
self.name = name
self.cname = cname
self.type = type
self.pos = pos
self.init = init
self.overloaded_alternatives = []
self.cf_assignments = []
self.cf_references = []
self.inner_entries = []
self.defining_entry = self
def __repr__(self):
return "%s(<%x>, name=%s, type=%s)" % (type(self).__name__, id(self), self.name, self.type)
def redeclared(self, pos):
error(pos, "'%s' does not match previous declaration" % self.name)
error(self.pos, "Previous declaration is here")
def all_alternatives(self):
return [self] + self.overloaded_alternatives
def all_entries(self):
return [self] + self.inner_entries
class InnerEntry(Entry):
"""
An entry in a closure scope that represents the real outer Entry.
"""
from_closure = True
def __init__(self, outer_entry, scope):
Entry.__init__(self, outer_entry.name,
outer_entry.cname,
outer_entry.type,
outer_entry.pos)
self.outer_entry = outer_entry
self.scope = scope
# share state with (outermost) defining entry
outermost_entry = outer_entry
while outermost_entry.outer_entry:
outermost_entry = outermost_entry.outer_entry
self.defining_entry = outermost_entry
self.inner_entries = outermost_entry.inner_entries
self.cf_assignments = outermost_entry.cf_assignments
self.cf_references = outermost_entry.cf_references
self.overloaded_alternatives = outermost_entry.overloaded_alternatives
self.inner_entries.append(self)
def __getattr__(self, name):
if name.startswith('__'):
# we wouldn't have been called if it was there
raise AttributeError(name)
return getattr(self.defining_entry, name)
def all_entries(self):
return self.defining_entry.all_entries()
class Scope(object):
# name string Unqualified name
# outer_scope Scope or None Enclosing scope
# entries {string : Entry} Python name to entry, non-types
# const_entries [Entry] Constant entries
# type_entries [Entry] Struct/union/enum/typedef/exttype entries
# sue_entries [Entry] Struct/union/enum entries
# arg_entries [Entry] Function argument entries
# var_entries [Entry] User-defined variable entries
# pyfunc_entries [Entry] Python function entries
# cfunc_entries [Entry] C function entries
# c_class_entries [Entry] All extension type entries
# cname_to_entry {string : Entry} Temp cname to entry mapping
# return_type PyrexType or None Return type of function owning scope
# is_builtin_scope boolean Is the builtin scope of Python/Cython
# is_py_class_scope boolean Is a Python class scope
# is_c_class_scope boolean Is an extension type scope
# is_closure_scope boolean Is a closure scope
# is_passthrough boolean Outer scope is passed directly
# is_cpp_class_scope boolean Is a C++ class scope
# is_property_scope boolean Is a extension type property scope
# scope_prefix string Disambiguator for C names
# in_cinclude boolean Suppress C declaration code
# qualified_name string "modname" or "modname.classname"
# Python strings in this scope
# nogil boolean In a nogil section
# directives dict Helper variable for the recursive
# analysis, contains directive values.
# is_internal boolean Is only used internally (simpler setup)
is_builtin_scope = 0
is_py_class_scope = 0
is_c_class_scope = 0
is_closure_scope = 0
is_passthrough = 0
is_cpp_class_scope = 0
is_property_scope = 0
is_module_scope = 0
is_internal = 0
scope_prefix = ""
in_cinclude = 0
nogil = 0
fused_to_specific = None
def __init__(self, name, outer_scope, parent_scope):
# The outer_scope is the next scope in the lookup chain.
# The parent_scope is used to derive the qualified name of this scope.
self.name = name
self.outer_scope = outer_scope
self.parent_scope = parent_scope
mangled_name = "%d%s_" % (len(name), name)
qual_scope = self.qualifying_scope()
if qual_scope:
self.qualified_name = qual_scope.qualify_name(name)
self.scope_prefix = qual_scope.scope_prefix + mangled_name
else:
self.qualified_name = EncodedString(name)
self.scope_prefix = mangled_name
self.entries = {}
self.const_entries = []
self.type_entries = []
self.sue_entries = []
self.arg_entries = []
self.var_entries = []
self.pyfunc_entries = []
self.cfunc_entries = []
self.c_class_entries = []
self.defined_c_classes = []
self.imported_c_classes = {}
self.cname_to_entry = {}
self.string_to_entry = {}
self.identifier_to_entry = {}
self.num_to_entry = {}
self.obj_to_entry = {}
self.buffer_entries = []
self.lambda_defs = []
self.return_type = None
self.id_counters = {}
def __deepcopy__(self, memo):
return self
def merge_in(self, other, merge_unused=True, whitelist=None):
# Use with care...
entries = []
for name, entry in other.entries.iteritems():
if not whitelist or name in whitelist:
if entry.used or merge_unused:
entries.append((name, entry))
self.entries.update(entries)
for attr in ('const_entries',
'type_entries',
'sue_entries',
'arg_entries',
'var_entries',
'pyfunc_entries',
'cfunc_entries',
'c_class_entries'):
self_entries = getattr(self, attr)
names = set([e.name for e in self_entries])
for entry in getattr(other, attr):
if (entry.used or merge_unused) and entry.name not in names:
self_entries.append(entry)
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.qualified_name)
def qualifying_scope(self):
return self.parent_scope
def mangle(self, prefix, name = None):
if name:
return "%s%s%s" % (prefix, self.scope_prefix, name)
else:
return self.parent_scope.mangle(prefix, self.name)
def mangle_internal(self, name):
# Mangle an internal name so as not to clash with any
# user-defined name in this scope.
prefix = "%s%s_" % (Naming.pyrex_prefix, name)
return self.mangle(prefix)
#return self.parent_scope.mangle(prefix, self.name)
def mangle_class_private_name(self, name):
if self.parent_scope:
return self.parent_scope.mangle_class_private_name(name)
return name
def next_id(self, name=None):
# Return a cname fragment that is unique for this module
counters = self.global_scope().id_counters
try:
count = counters[name] + 1
except KeyError:
count = 0
counters[name] = count
if name:
if not count:
# unique names don't need a suffix, reoccurrences will get one
return name
return '%s%d' % (name, count)
else:
return '%d' % count
def global_scope(self):
""" Return the module-level scope containing this scope. """
return self.outer_scope.global_scope()
def builtin_scope(self):
""" Return the module-level scope containing this scope. """
return self.outer_scope.builtin_scope()
def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0):
# Create new entry, and add to dictionary if
# name is not None. Reports a warning if already
# declared.
if type.is_buffer and not isinstance(self, LocalScope): # and not is_type:
error(pos, 'Buffer types only allowed as function local variables')
if not self.in_cinclude and cname and re.match("^_[_A-Z]+$", cname):
# See http://www.gnu.org/software/libc/manual/html_node/Reserved-Names.html#Reserved-Names
warning(pos, "'%s' is a reserved name in C." % cname, -1)
entries = self.entries
if name and name in entries and not shadow:
if visibility == 'extern':
warning(pos, "'%s' redeclared " % name, 0)
elif visibility != 'ignore':
error(pos, "'%s' redeclared " % name)
entry = Entry(name, cname, type, pos = pos)
entry.in_cinclude = self.in_cinclude
if name:
entry.qualified_name = self.qualify_name(name)
# if name in entries and self.is_cpp():
# entries[name].overloaded_alternatives.append(entry)
# else:
# entries[name] = entry
if not shadow:
entries[name] = entry
if type.is_memoryviewslice:
import MemoryView
entry.init = MemoryView.memslice_entry_init
entry.scope = self
entry.visibility = visibility
return entry
def qualify_name(self, name):
return EncodedString("%s.%s" % (self.qualified_name, name))
def declare_const(self, name, type, value, pos, cname = None, visibility = 'private', api = 0):
# Add an entry for a named constant.
if not cname:
if self.in_cinclude or (visibility == 'public' or api):
cname = name
else:
cname = self.mangle(Naming.enum_prefix, name)
entry = self.declare(name, cname, type, pos, visibility)
entry.is_const = 1
entry.value_node = value
return entry
def declare_type(self, name, type, pos,
cname = None, visibility = 'private', api = 0, defining = 1,
shadow = 0, template = 0):
# Add an entry for a type definition.
if not cname:
cname = name
entry = self.declare(name, cname, type, pos, visibility, shadow,
is_type=True)
entry.is_type = 1
entry.api = api
if defining:
self.type_entries.append(entry)
if not template:
type.entry = entry
# here we would set as_variable to an object representing this type
return entry
def declare_typedef(self, name, base_type, pos, cname = None,
visibility = 'private', api = 0):
if not cname:
if self.in_cinclude or (visibility == 'public' or api):
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
try:
type = PyrexTypes.create_typedef_type(name, base_type, cname,
(visibility == 'extern'))
except ValueError, e:
error(pos, e.args[0])
type = PyrexTypes.error_type
entry = self.declare_type(name, type, pos, cname,
visibility = visibility, api = api)
type.qualified_name = entry.qualified_name
return entry
def declare_struct_or_union(self, name, kind, scope,
typedef_flag, pos, cname = None,
visibility = 'private', api = 0,
packed = False):
# Add an entry for a struct or union definition.
if not cname:
if self.in_cinclude or (visibility == 'public' or api):
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
entry = self.lookup_here(name)
if not entry:
type = PyrexTypes.CStructOrUnionType(
name, kind, scope, typedef_flag, cname, packed)
entry = self.declare_type(name, type, pos, cname,
visibility = visibility, api = api,
defining = scope is not None)
self.sue_entries.append(entry)
type.entry = entry
else:
if not (entry.is_type and entry.type.is_struct_or_union
and entry.type.kind == kind):
warning(pos, "'%s' redeclared " % name, 0)
elif scope and entry.type.scope:
warning(pos, "'%s' already defined (ignoring second definition)" % name, 0)
else:
self.check_previous_typedef_flag(entry, typedef_flag, pos)
self.check_previous_visibility(entry, visibility, pos)
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
return entry
def declare_cpp_class(self, name, scope,
pos, cname = None, base_classes = (),
visibility = 'extern', templates = None):
if cname is None:
if self.in_cinclude or (visibility != 'private'):
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
base_classes = list(base_classes)
entry = self.lookup_here(name)
if not entry:
type = PyrexTypes.CppClassType(
name, scope, cname, base_classes, templates = templates)
entry = self.declare_type(name, type, pos, cname,
visibility = visibility, defining = scope is not None)
self.sue_entries.append(entry)
else:
if not (entry.is_type and entry.type.is_cpp_class):
error(pos, "'%s' redeclared " % name)
return None
elif scope and entry.type.scope:
warning(pos, "'%s' already defined (ignoring second definition)" % name, 0)
else:
if scope:
entry.type.scope = scope
self.type_entries.append(entry)
if base_classes:
if entry.type.base_classes and entry.type.base_classes != base_classes:
error(pos, "Base type does not match previous declaration")
else:
entry.type.base_classes = base_classes
if templates or entry.type.templates:
if templates != entry.type.templates:
error(pos, "Template parameters do not match previous declaration")
def declare_inherited_attributes(entry, base_classes):
for base_class in base_classes:
if base_class is PyrexTypes.error_type:
continue
if base_class.scope is None:
error(pos, "Cannot inherit from incomplete type")
else:
declare_inherited_attributes(entry, base_class.base_classes)
entry.type.scope.declare_inherited_cpp_attributes(base_class.scope)
if entry.type.scope:
declare_inherited_attributes(entry, base_classes)
entry.type.scope.declare_var(name="this", cname="this", type=PyrexTypes.CPtrType(entry.type), pos=entry.pos)
if self.is_cpp_class_scope:
entry.type.namespace = self.outer_scope.lookup(self.name).type
return entry
def check_previous_typedef_flag(self, entry, typedef_flag, pos):
if typedef_flag != entry.type.typedef_flag:
error(pos, "'%s' previously declared using '%s'" % (
entry.name, ("cdef", "ctypedef")[entry.type.typedef_flag]))
def check_previous_visibility(self, entry, visibility, pos):
if entry.visibility != visibility:
error(pos, "'%s' previously declared as '%s'" % (
entry.name, entry.visibility))
def declare_enum(self, name, pos, cname, typedef_flag,
visibility = 'private', api = 0):
if name:
if not cname:
if self.in_cinclude or (visibility == 'public' or api):
cname = name
else:
cname = self.mangle(Naming.type_prefix, name)
type = PyrexTypes.CEnumType(name, cname, typedef_flag)
else:
type = PyrexTypes.c_anon_enum_type
entry = self.declare_type(name, type, pos, cname = cname,
visibility = visibility, api = api)
entry.enum_values = []
self.sue_entries.append(entry)
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = 0):
# Add an entry for a variable.
if not cname:
if visibility != 'private' or api:
cname = name
else:
cname = self.mangle(Naming.var_prefix, name)
if type.is_cpp_class and visibility != 'extern':
type.check_nullary_constructor(pos)
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
if in_pxd and visibility != 'extern':
entry.defined_in_pxd = 1
entry.used = 1
if api:
entry.api = 1
entry.used = 1
return entry
def declare_builtin(self, name, pos):
return self.outer_scope.declare_builtin(name, pos)
def _declare_pyfunction(self, name, pos, visibility='extern', entry=None):
if entry and not entry.type.is_cfunction:
error(pos, "'%s' already declared" % name)
error(entry.pos, "Previous declaration is here")
entry = self.declare_var(name, py_object_type, pos, visibility=visibility)
entry.signature = pyfunction_signature
self.pyfunc_entries.append(entry)
return entry
def declare_pyfunction(self, name, pos, allow_redefine=False, visibility='extern'):
# Add an entry for a Python function.
entry = self.lookup_here(name)
if not allow_redefine:
return self._declare_pyfunction(name, pos, visibility=visibility, entry=entry)
if entry:
if entry.type.is_unspecified:
entry.type = py_object_type
elif entry.type is not py_object_type:
return self._declare_pyfunction(name, pos, visibility=visibility, entry=entry)
else: # declare entry stub
self.declare_var(name, py_object_type, pos, visibility=visibility)
entry = self.declare_var(None, py_object_type, pos,
cname=name, visibility='private')
entry.name = EncodedString(name)
entry.qualified_name = self.qualify_name(name)
entry.signature = pyfunction_signature
entry.is_anonymous = True
return entry
def declare_lambda_function(self, lambda_name, pos):
# Add an entry for an anonymous Python function.
func_cname = self.mangle(Naming.lambda_func_prefix + u'funcdef_', lambda_name)
pymethdef_cname = self.mangle(Naming.lambda_func_prefix + u'methdef_', lambda_name)
qualified_name = self.qualify_name(lambda_name)
entry = self.declare(None, func_cname, py_object_type, pos, 'private')
entry.name = lambda_name
entry.qualified_name = qualified_name
entry.pymethdef_cname = pymethdef_cname
entry.func_cname = func_cname
entry.signature = pyfunction_signature
entry.is_anonymous = True
return entry
def add_lambda_def(self, def_node):
self.lambda_defs.append(def_node)
def register_pyfunction(self, entry):
self.pyfunc_entries.append(entry)
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', api = 0, in_pxd = 0,
defining = 0, modifiers = (), utility_code = None):
# Add an entry for a C function.
if not cname:
if visibility != 'private' or api:
cname = name
else:
cname = self.mangle(Naming.func_prefix, name)
entry = self.lookup_here(name)
if entry:
if visibility != 'private' and visibility != entry.visibility:
warning(pos, "Function '%s' previously declared as '%s'" % (name, entry.visibility), 1)
if not entry.type.same_as(type):
if visibility == 'extern' and entry.visibility == 'extern':
can_override = False
if self.is_cpp():
can_override = True
elif cname:
# if all alternatives have different cnames,
# it's safe to allow signature overrides
for alt_entry in entry.all_alternatives():
if not alt_entry.cname or cname == alt_entry.cname:
break # cname not unique!
else:
can_override = True
if can_override:
temp = self.add_cfunction(name, type, pos, cname, visibility, modifiers)
temp.overloaded_alternatives = entry.all_alternatives()
entry = temp
else:
warning(pos, "Function signature does not match previous declaration", 1)
entry.type = type
else:
error(pos, "Function signature does not match previous declaration")
else:
entry = self.add_cfunction(name, type, pos, cname, visibility, modifiers)
entry.func_cname = cname
if in_pxd and visibility != 'extern':
entry.defined_in_pxd = 1
if api:
entry.api = 1
if not defining and not in_pxd and visibility != 'extern':
error(pos, "Non-extern C function '%s' declared but not defined" % name)
if defining:
entry.is_implemented = True
if modifiers:
entry.func_modifiers = modifiers
if utility_code:
assert not entry.utility_code, "duplicate utility code definition in entry %s (%s)" % (name, cname)
entry.utility_code = utility_code
type.entry = entry
return entry
def add_cfunction(self, name, type, pos, cname, visibility, modifiers):
# Add a C function entry without giving it a func_cname.
entry = self.declare(name, cname, type, pos, visibility)
entry.is_cfunction = 1
if modifiers:
entry.func_modifiers = modifiers
self.cfunc_entries.append(entry)
return entry
def find(self, name, pos):
# Look up name, report error if not found.
entry = self.lookup(name)
if entry:
return entry
else:
error(pos, "'%s' is not declared" % name)
def find_imported_module(self, path, pos):
# Look up qualified name, must be a module, report error if not found.
# Path is a list of names.
scope = self
for name in path:
entry = scope.find(name, pos)
if not entry:
return None
if entry.as_module:
scope = entry.as_module
else:
error(pos, "'%s' is not a cimported module" % '.'.join(path))
return None
return scope
def lookup(self, name):
# Look up name in this scope or an enclosing one.
# Return None if not found.
return (self.lookup_here(name)
or (self.outer_scope and self.outer_scope.lookup(name))
or None)
def lookup_here(self, name):
# Look up in this scope only, return None if not found.
return self.entries.get(name, None)
def lookup_target(self, name):
# Look up name in this scope only. Declare as Python
# variable if not found.
entry = self.lookup_here(name)
if not entry:
entry = self.declare_var(name, py_object_type, None)
return entry
def lookup_type(self, name):
entry = self.lookup(name)
if entry and entry.is_type:
if entry.type.is_fused and self.fused_to_specific:
return entry.type.specialize(self.fused_to_specific)
return entry.type
def lookup_operator(self, operator, operands):
if operands[0].type.is_cpp_class:
obj_type = operands[0].type
method = obj_type.scope.lookup("operator%s" % operator)
if method is not None:
res = PyrexTypes.best_match(operands[1:], method.all_alternatives())
if res is not None:
return res
function = self.lookup("operator%s" % operator)
if function is None:
return None
return PyrexTypes.best_match(operands, function.all_alternatives())
def lookup_operator_for_types(self, pos, operator, types):
from Nodes import Node
class FakeOperand(Node):
pass
operands = [FakeOperand(pos, type=type) for type in types]
return self.lookup_operator(operator, operands)
def use_utility_code(self, new_code):
self.global_scope().use_utility_code(new_code)
def generate_library_function_declarations(self, code):
# Generate extern decls for C library funcs used.
pass
def defines_any(self, names):
# Test whether any of the given names are
# defined in this scope.
for name in names:
if name in self.entries:
return 1
return 0
def infer_types(self):
from TypeInference import get_type_inferer
get_type_inferer().infer_types(self)
def is_cpp(self):
outer = self.outer_scope
if outer is None:
return False
else:
return outer.is_cpp()
def add_include_file(self, filename):
self.outer_scope.add_include_file(filename)
class PreImportScope(Scope):
namespace_cname = Naming.preimport_cname
def __init__(self):
Scope.__init__(self, Options.pre_import, None, None)
def declare_builtin(self, name, pos):
entry = self.declare(name, name, py_object_type, pos, 'private')
entry.is_variable = True
entry.is_pyglobal = True
return entry
class BuiltinScope(Scope):
# The builtin namespace.
is_builtin_scope = True
def __init__(self):
if Options.pre_import is None:
Scope.__init__(self, "__builtin__", None, None)
else:
Scope.__init__(self, "__builtin__", PreImportScope(), None)
self.type_names = {}
for name, definition in self.builtin_entries.iteritems():
cname, type = definition
self.declare_var(name, type, None, cname)
def lookup(self, name, language_level=None):
# 'language_level' is passed by ModuleScope
if language_level == 3:
if name == 'str':
name = 'unicode'
return Scope.lookup(self, name)
def declare_builtin(self, name, pos):
if not hasattr(builtins, name):
if self.outer_scope is not None:
return self.outer_scope.declare_builtin(name, pos)
else:
if Options.error_on_unknown_names:
error(pos, "undeclared name not builtin: %s" % name)
else:
warning(pos, "undeclared name not builtin: %s" % name, 2)
def declare_builtin_cfunction(self, name, type, cname, python_equiv = None,
utility_code = None):
# If python_equiv == "*", the Python equivalent has the same name
# as the entry, otherwise it has the name specified by python_equiv.
name = EncodedString(name)
entry = self.declare_cfunction(name, type, None, cname, visibility='extern',
utility_code = utility_code)
if python_equiv:
if python_equiv == "*":
python_equiv = name
else:
python_equiv = EncodedString(python_equiv)
var_entry = Entry(python_equiv, python_equiv, py_object_type)
var_entry.is_variable = 1
var_entry.is_builtin = 1
var_entry.utility_code = utility_code
entry.as_variable = var_entry
return entry
def declare_builtin_type(self, name, cname, utility_code = None, objstruct_cname = None):
name = EncodedString(name)
type = PyrexTypes.BuiltinObjectType(name, cname, objstruct_cname)
scope = CClassScope(name, outer_scope=None, visibility='extern')
scope.directives = {}
if name == 'bool':
type.is_final_type = True
type.set_scope(scope)
self.type_names[name] = 1
entry = self.declare_type(name, type, None, visibility='extern')
entry.utility_code = utility_code
var_entry = Entry(name = entry.name,
type = self.lookup('type').type, # make sure "type" is the first type declared...
pos = entry.pos,
cname = "((PyObject*)%s)" % entry.type.typeptr_cname)
var_entry.is_variable = 1
var_entry.is_cglobal = 1
var_entry.is_readonly = 1
var_entry.is_builtin = 1
var_entry.utility_code = utility_code
if Options.cache_builtins:
var_entry.is_const = True
entry.as_variable = var_entry
return type
def builtin_scope(self):
return self
builtin_entries = {
"type": ["((PyObject*)&PyType_Type)", py_object_type],
"bool": ["((PyObject*)&PyBool_Type)", py_object_type],
"int": ["((PyObject*)&PyInt_Type)", py_object_type],
"long": ["((PyObject*)&PyLong_Type)", py_object_type],
"float": ["((PyObject*)&PyFloat_Type)", py_object_type],
"complex":["((PyObject*)&PyComplex_Type)", py_object_type],
"bytes": ["((PyObject*)&PyBytes_Type)", py_object_type],
"bytearray": ["((PyObject*)&PyByteArray_Type)", py_object_type],
"str": ["((PyObject*)&PyString_Type)", py_object_type],
"unicode":["((PyObject*)&PyUnicode_Type)", py_object_type],
"tuple": ["((PyObject*)&PyTuple_Type)", py_object_type],
"list": ["((PyObject*)&PyList_Type)", py_object_type],
"dict": ["((PyObject*)&PyDict_Type)", py_object_type],
"set": ["((PyObject*)&PySet_Type)", py_object_type],
"frozenset": ["((PyObject*)&PyFrozenSet_Type)", py_object_type],
"slice": ["((PyObject*)&PySlice_Type)", py_object_type],
# "file": ["((PyObject*)&PyFile_Type)", py_object_type], # not in Py3
"None": ["Py_None", py_object_type],
"False": ["Py_False", py_object_type],
"True": ["Py_True", py_object_type],
}
const_counter = 1 # As a temporary solution for compiling code in pxds
class ModuleScope(Scope):
# module_name string Python name of the module
# module_cname string C name of Python module object
# #module_dict_cname string C name of module dict object
# method_table_cname string C name of method table
# doc string Module doc string
# doc_cname string C name of module doc string
# utility_code_list [UtilityCode] Queuing utility codes for forwarding to Code.py
# python_include_files [string] Standard Python headers to be included
# include_files [string] Other C headers to be included
# string_to_entry {string : Entry} Map string const to entry
# identifier_to_entry {string : Entry} Map identifier string const to entry
# context Context
# parent_module Scope Parent in the import namespace
# module_entries {string : Entry} For cimport statements
# type_names {string : 1} Set of type names (used during parsing)
# included_files [string] Cython sources included with 'include'
# pxd_file_loaded boolean Corresponding .pxd file has been processed
# cimported_modules [ModuleScope] Modules imported with cimport
# types_imported {PyrexType} Set of types for which import code generated
# has_import_star boolean Module contains import *
# cpp boolean Compiling a C++ file
# is_cython_builtin boolean Is this the Cython builtin scope (or a child scope)
# is_package boolean Is this a package module? (__init__)
is_module_scope = 1
has_import_star = 0
is_cython_builtin = 0
def __init__(self, name, parent_module, context):
import Builtin
self.parent_module = parent_module
outer_scope = Builtin.builtin_scope
Scope.__init__(self, name, outer_scope, parent_module)
if name == "__init__":
# Treat Spam/__init__.pyx specially, so that when Python loads
# Spam/__init__.so, initSpam() is defined.
self.module_name = parent_module.module_name
self.is_package = True
else:
self.module_name = name
self.is_package = False
self.module_name = EncodedString(self.module_name)
self.context = context
self.module_cname = Naming.module_cname
self.module_dict_cname = Naming.moddict_cname
self.method_table_cname = Naming.methtable_cname
self.doc = ""
self.doc_cname = Naming.moddoc_cname
self.utility_code_list = []
self.module_entries = {}
self.python_include_files = ["Python.h"]
self.include_files = []
self.type_names = dict(outer_scope.type_names)
self.pxd_file_loaded = 0
self.cimported_modules = []
self.types_imported = set()
self.included_files = []
self.has_extern_class = 0
self.cached_builtins = []
self.undeclared_cached_builtins = []
self.namespace_cname = self.module_cname
for var_name in ['__builtins__', '__name__', '__file__', '__doc__', '__path__']:
self.declare_var(EncodedString(var_name), py_object_type, None)
def qualifying_scope(self):
return self.parent_module
def global_scope(self):
return self
def lookup(self, name):
entry = self.lookup_here(name)
if entry is not None:
return entry
if self.context is not None:
language_level = self.context.language_level
else:
language_level = 3
return self.outer_scope.lookup(name, language_level=language_level)
def declare_builtin(self, name, pos):
if not hasattr(builtins, name) \
and name not in Code.non_portable_builtins_map \
and name not in Code.uncachable_builtins:
if self.has_import_star:
entry = self.declare_var(name, py_object_type, pos)
return entry
else:
if Options.error_on_unknown_names:
error(pos, "undeclared name not builtin: %s" % name)
else:
warning(pos, "undeclared name not builtin: %s" % name, 2)
# unknown - assume it's builtin and look it up at runtime
entry = self.declare(name, None, py_object_type, pos, 'private')
entry.is_builtin = 1
return entry
if Options.cache_builtins:
for entry in self.cached_builtins:
if entry.name == name:
return entry
entry = self.declare(None, None, py_object_type, pos, 'private')
if Options.cache_builtins and name not in Code.uncachable_builtins:
entry.is_builtin = 1
entry.is_const = 1 # cached
entry.name = name
entry.cname = Naming.builtin_prefix + name
self.cached_builtins.append(entry)
self.undeclared_cached_builtins.append(entry)
else:
entry.is_builtin = 1
entry.name = name
return entry
def find_module(self, module_name, pos):
# Find a module in the import namespace, interpreting
# relative imports relative to this module's parent.
# Finds and parses the module's .pxd file if the module
# has not been referenced before.
return self.global_scope().context.find_module(
module_name, relative_to = self.parent_module, pos = pos)
def find_submodule(self, name):
# Find and return scope for a submodule of this module,
# creating a new empty one if necessary. Doesn't parse .pxd.
scope = self.lookup_submodule(name)
if not scope:
scope = ModuleScope(name,
parent_module = self, context = self.context)
self.module_entries[name] = scope
return scope
def lookup_submodule(self, name):
# Return scope for submodule of this module, or None.
return self.module_entries.get(name, None)
def add_include_file(self, filename):
if filename not in self.python_include_files \
and filename not in self.include_files:
self.include_files.append(filename)
def add_imported_module(self, scope):
if scope not in self.cimported_modules:
for filename in scope.include_files:
self.add_include_file(filename)
self.cimported_modules.append(scope)
for m in scope.cimported_modules:
self.add_imported_module(m)
def add_imported_entry(self, name, entry, pos):
if entry not in self.entries:
self.entries[name] = entry
else:
warning(pos, "'%s' redeclared " % name, 0)
def declare_module(self, name, scope, pos):
# Declare a cimported module. This is represented as a
# Python module-level variable entry with a module
# scope attached to it. Reports an error and returns
# None if previously declared as something else.
entry = self.lookup_here(name)
if entry:
if entry.is_pyglobal and entry.as_module is scope:
return entry # Already declared as the same module
if not (entry.is_pyglobal and not entry.as_module):
# SAGE -- I put this here so Pyrex
# cimport's work across directories.
# Currently it tries to multiply define
# every module appearing in an import list.
# It shouldn't be an error for a module
# name to appear again, and indeed the generated
# code compiles fine.
return entry
else:
entry = self.declare_var(name, py_object_type, pos)
entry.as_module = scope
self.add_imported_module(scope)
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = 0):
# Add an entry for a global variable. If it is a Python
# object type, and not declared with cdef, it will live
# in the module dictionary, otherwise it will be a C
# global variable.
if not visibility in ('private', 'public', 'extern'):
error(pos, "Module-level variable cannot be declared %s" % visibility)
if not is_cdef:
if type is unspecified_type:
type = py_object_type
if not (type.is_pyobject and not type.is_extension_type):
raise InternalError(
"Non-cdef global variable is not a generic Python object")
if not cname:
defining = not in_pxd
if visibility == 'extern' or (visibility == 'public' and defining):
cname = name
else:
cname = self.mangle(Naming.var_prefix, name)
entry = self.lookup_here(name)
if entry and entry.defined_in_pxd:
#if visibility != 'private' and visibility != entry.visibility:
# warning(pos, "Variable '%s' previously declared as '%s'" % (name, entry.visibility), 1)
if not entry.type.same_as(type):
if visibility == 'extern' and entry.visibility == 'extern':
warning(pos, "Variable '%s' type does not match previous declaration" % name, 1)
entry.type = type
#else:
# error(pos, "Variable '%s' type does not match previous declaration" % name)
if entry.visibility != "private":
mangled_cname = self.mangle(Naming.var_prefix, name)
if entry.cname == mangled_cname:
cname = name
entry.cname = name
if not entry.is_implemented:
entry.is_implemented = True
return entry
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
if is_cdef:
entry.is_cglobal = 1
if entry.type.is_pyobject:
entry.init = 0
self.var_entries.append(entry)
else:
entry.is_pyglobal = 1
if Options.cimport_from_pyx:
entry.used = 1
return entry
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', api = 0, in_pxd = 0,
defining = 0, modifiers = (), utility_code = None):
# Add an entry for a C function.
if not cname:
if visibility == 'extern' or (visibility == 'public' and defining):
cname = name
else:
cname = self.mangle(Naming.func_prefix, name)
entry = self.lookup_here(name)
if entry and entry.defined_in_pxd:
if entry.visibility != "private":
mangled_cname = self.mangle(Naming.var_prefix, name)
if entry.cname == mangled_cname:
cname = name
entry.cname = cname
entry.func_cname = cname
entry = Scope.declare_cfunction(
self, name, type, pos,
cname = cname, visibility = visibility, api = api, in_pxd = in_pxd,
defining = defining, modifiers = modifiers, utility_code = utility_code)
return entry
def declare_global(self, name, pos):
entry = self.lookup_here(name)
if not entry:
self.declare_var(name, py_object_type, pos)
def use_utility_code(self, new_code):
if new_code is not None:
self.utility_code_list.append(new_code)
def declare_c_class(self, name, pos, defining = 0, implementing = 0,
module_name = None, base_type = None, objstruct_cname = None,
typeobj_cname = None, typeptr_cname = None, visibility = 'private', typedef_flag = 0, api = 0,
buffer_defaults = None, shadow = 0):
# If this is a non-extern typedef class, expose the typedef, but use
# the non-typedef struct internally to avoid needing forward
# declarations for anonymous structs.
if typedef_flag and visibility != 'extern':
if not (visibility == 'public' or api):
warning(pos, "ctypedef only valid for 'extern' , 'public', and 'api'", 2)
objtypedef_cname = objstruct_cname
typedef_flag = 0
else:
objtypedef_cname = None
#
# Look for previous declaration as a type
#
entry = self.lookup_here(name)
if entry and not shadow:
type = entry.type
if not (entry.is_type and type.is_extension_type):
entry = None # Will cause redeclaration and produce an error
else:
scope = type.scope
if typedef_flag and (not scope or scope.defined):
self.check_previous_typedef_flag(entry, typedef_flag, pos)
if (scope and scope.defined) or (base_type and type.base_type):
if base_type and base_type is not type.base_type:
error(pos, "Base type does not match previous declaration")
if base_type and not type.base_type:
type.base_type = base_type
#
# Make a new entry if needed
#
if not entry or shadow:
type = PyrexTypes.PyExtensionType(name, typedef_flag, base_type, visibility == 'extern')
type.pos = pos
type.buffer_defaults = buffer_defaults
if objtypedef_cname is not None:
type.objtypedef_cname = objtypedef_cname
if visibility == 'extern':
type.module_name = module_name
else:
type.module_name = self.qualified_name
if typeptr_cname:
type.typeptr_cname = typeptr_cname
else:
type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
entry = self.declare_type(name, type, pos, visibility = visibility,
defining = 0, shadow = shadow)
entry.is_cclass = True
if objstruct_cname:
type.objstruct_cname = objstruct_cname
elif not entry.in_cinclude:
type.objstruct_cname = self.mangle(Naming.objstruct_prefix, name)
else:
error(entry.pos,
"Object name required for 'public' or 'extern' C class")
self.attach_var_entry_to_c_class(entry)
self.c_class_entries.append(entry)
#
# Check for re-definition and create scope if needed
#
if not type.scope:
if defining or implementing:
scope = CClassScope(name = name, outer_scope = self,
visibility = visibility)
scope.directives = self.directives.copy()
if base_type and base_type.scope:
scope.declare_inherited_c_attributes(base_type.scope)
type.set_scope(scope)
self.type_entries.append(entry)
else:
if defining and type.scope.defined:
error(pos, "C class '%s' already defined" % name)
elif implementing and type.scope.implemented:
error(pos, "C class '%s' already implemented" % name)
#
# Fill in options, checking for compatibility with any previous declaration
#
if defining:
entry.defined_in_pxd = 1
if implementing: # So that filenames in runtime exceptions refer to
entry.pos = pos # the .pyx file and not the .pxd file
if visibility != 'private' and entry.visibility != visibility:
error(pos, "Class '%s' previously declared as '%s'"
% (name, entry.visibility))
if api:
entry.api = 1
if objstruct_cname:
if type.objstruct_cname and type.objstruct_cname != objstruct_cname:
error(pos, "Object struct name differs from previous declaration")
type.objstruct_cname = objstruct_cname
if typeobj_cname:
if type.typeobj_cname and type.typeobj_cname != typeobj_cname:
error(pos, "Type object name differs from previous declaration")
type.typeobj_cname = typeobj_cname
if self.directives.get('final'):
entry.type.is_final_type = True
# cdef classes are always exported, but we need to set it to
# distinguish between unused Cython utility code extension classes
entry.used = True
#
# Return new or existing entry
#
return entry
def allocate_vtable_names(self, entry):
# If extension type has a vtable, allocate vtable struct and
# slot names for it.
type = entry.type
if type.base_type and type.base_type.vtabslot_cname:
#print "...allocating vtabslot_cname because base type has one" ###
type.vtabslot_cname = "%s.%s" % (
Naming.obj_base_cname, type.base_type.vtabslot_cname)
elif type.scope and type.scope.cfunc_entries:
# one special case here: when inheriting from builtin
# types, the methods may also be built-in, in which
# case they won't need a vtable
entry_count = len(type.scope.cfunc_entries)
base_type = type.base_type
while base_type:
# FIXME: this will break if we ever get non-inherited C methods
if not base_type.scope or entry_count > len(base_type.scope.cfunc_entries):
break
if base_type.is_builtin_type:
# builtin base type defines all methods => no vtable needed
return
base_type = base_type.base_type
#print "...allocating vtabslot_cname because there are C methods" ###
type.vtabslot_cname = Naming.vtabslot_cname
if type.vtabslot_cname:
#print "...allocating other vtable related cnames" ###
type.vtabstruct_cname = self.mangle(Naming.vtabstruct_prefix, entry.name)
type.vtabptr_cname = self.mangle(Naming.vtabptr_prefix, entry.name)
def check_c_classes_pxd(self):
# Performs post-analysis checking and finishing up of extension types
# being implemented in this module. This is called only for the .pxd.
#
# Checks all extension types declared in this scope to
# make sure that:
#
# * The extension type is fully declared
#
# Also allocates a name for the vtable if needed.
#
for entry in self.c_class_entries:
# Check defined
if not entry.type.scope:
error(entry.pos, "C class '%s' is declared but not defined" % entry.name)
def check_c_class(self, entry):
type = entry.type
name = entry.name
visibility = entry.visibility
# Check defined
if not type.scope:
error(entry.pos, "C class '%s' is declared but not defined" % name)
# Generate typeobj_cname
if visibility != 'extern' and not type.typeobj_cname:
type.typeobj_cname = self.mangle(Naming.typeobj_prefix, name)
## Generate typeptr_cname
#type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
# Check C methods defined
if type.scope:
for method_entry in type.scope.cfunc_entries:
if not method_entry.is_inherited and not method_entry.func_cname:
error(method_entry.pos, "C method '%s' is declared but not defined" %
method_entry.name)
# Allocate vtable name if necessary
if type.vtabslot_cname:
#print "ModuleScope.check_c_classes: allocating vtable cname for", self ###
type.vtable_cname = self.mangle(Naming.vtable_prefix, entry.name)
def check_c_classes(self):
# Performs post-analysis checking and finishing up of extension types
# being implemented in this module. This is called only for the main
# .pyx file scope, not for cimported .pxd scopes.
#
# Checks all extension types declared in this scope to
# make sure that:
#
# * The extension type is implemented
# * All required object and type names have been specified or generated
# * All non-inherited C methods are implemented
#
# Also allocates a name for the vtable if needed.
#
debug_check_c_classes = 0
if debug_check_c_classes:
print("Scope.check_c_classes: checking scope " + self.qualified_name)
for entry in self.c_class_entries:
if debug_check_c_classes:
print("...entry %s %s" % (entry.name, entry))
print("......type = ", entry.type)
print("......visibility = ", entry.visibility)
self.check_c_class(entry)
def check_c_functions(self):
# Performs post-analysis checking making sure all
# defined c functions are actually implemented.
for name, entry in self.entries.items():
if entry.is_cfunction:
if (entry.defined_in_pxd
and entry.scope is self
and entry.visibility != 'extern'
and not entry.in_cinclude
and not entry.is_implemented):
error(entry.pos, "Non-extern C function '%s' declared but not defined" % name)
def attach_var_entry_to_c_class(self, entry):
# The name of an extension class has to serve as both a type
# name and a variable name holding the type object. It is
# represented in the symbol table by a type entry with a
# variable entry attached to it. For the variable entry,
# we use a read-only C global variable whose name is an
# expression that refers to the type object.
import Builtin
var_entry = Entry(name = entry.name,
type = Builtin.type_type,
pos = entry.pos,
cname = "((PyObject*)%s)" % entry.type.typeptr_cname)
var_entry.is_variable = 1
var_entry.is_cglobal = 1
var_entry.is_readonly = 1
entry.as_variable = var_entry
def is_cpp(self):
return self.cpp
def infer_types(self):
from TypeInference import PyObjectTypeInferer
PyObjectTypeInferer().infer_types(self)
class LocalScope(Scope):
# Does the function have a 'with gil:' block?
has_with_gil_block = False
# Transient attribute, used for symbol table variable declarations
_in_with_gil_block = False
def __init__(self, name, outer_scope, parent_scope = None):
if parent_scope is None:
parent_scope = outer_scope
Scope.__init__(self, name, outer_scope, parent_scope)
def mangle(self, prefix, name):
return prefix + name
def declare_arg(self, name, type, pos):
# Add an entry for an argument of a function.
cname = self.mangle(Naming.var_prefix, name)
entry = self.declare(name, cname, type, pos, 'private')
entry.is_variable = 1
if type.is_pyobject:
entry.init = "0"
entry.is_arg = 1
#entry.borrowed = 1 # Not using borrowed arg refs for now
self.arg_entries.append(entry)
return entry
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = 0):
# Add an entry for a local variable.
if visibility in ('public', 'readonly'):
error(pos, "Local variable cannot be declared %s" % visibility)
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
if type.is_pyobject:
entry.init = "0"
entry.is_local = 1
entry.in_with_gil_block = self._in_with_gil_block
self.var_entries.append(entry)
return entry
def declare_global(self, name, pos):
# Pull entry from global scope into local scope.
if self.lookup_here(name):
warning(pos, "'%s' redeclared ", 0)
else:
entry = self.global_scope().lookup_target(name)
self.entries[name] = entry
def declare_nonlocal(self, name, pos):
# Pull entry from outer scope into local scope
orig_entry = self.lookup_here(name)
if orig_entry and orig_entry.scope is self and not orig_entry.from_closure:
error(pos, "'%s' redeclared as nonlocal" % name)
else:
entry = self.lookup(name)
if entry is None or not entry.from_closure:
error(pos, "no binding for nonlocal '%s' found" % name)
def lookup(self, name):
# Look up name in this scope or an enclosing one.
# Return None if not found.
entry = Scope.lookup(self, name)
if entry is not None:
if entry.scope is not self and entry.scope.is_closure_scope:
if hasattr(entry.scope, "scope_class"):
raise InternalError("lookup() after scope class created.")
# The actual c fragment for the different scopes differs
# on the outside and inside, so we make a new entry
entry.in_closure = True
inner_entry = InnerEntry(entry, self)
inner_entry.is_variable = True
self.entries[name] = inner_entry
return inner_entry
return entry
def mangle_closure_cnames(self, outer_scope_cname):
for entry in self.entries.values():
if entry.from_closure:
cname = entry.outer_entry.cname
if self.is_passthrough:
entry.cname = cname
else:
if cname.startswith(Naming.cur_scope_cname):
cname = cname[len(Naming.cur_scope_cname)+2:]
entry.cname = "%s->%s" % (outer_scope_cname, cname)
elif entry.in_closure:
entry.original_cname = entry.cname
entry.cname = "%s->%s" % (Naming.cur_scope_cname, entry.cname)
class GeneratorExpressionScope(Scope):
"""Scope for generator expressions and comprehensions. As opposed
to generators, these can be easily inlined in some cases, so all
we really need is a scope that holds the loop variable(s).
"""
def __init__(self, outer_scope):
name = outer_scope.global_scope().next_id(Naming.genexpr_id_ref)
Scope.__init__(self, name, outer_scope, outer_scope)
self.directives = outer_scope.directives
self.genexp_prefix = "%s%d%s" % (Naming.pyrex_prefix, len(name), name)
def mangle(self, prefix, name):
return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name))
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = True):
if type is unspecified_type:
# if the outer scope defines a type for this variable, inherit it
outer_entry = self.outer_scope.lookup(name)
if outer_entry and outer_entry.is_variable:
type = outer_entry.type # may still be 'unspecified_type' !
# the parent scope needs to generate code for the variable, but
# this scope must hold its name exclusively
cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id()))
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
entry.is_local = 1
self.var_entries.append(entry)
self.entries[name] = entry
return entry
def declare_pyfunction(self, name, pos, allow_redefine=False):
return self.outer_scope.declare_pyfunction(
name, pos, allow_redefine)
def declare_lambda_function(self, func_cname, pos):
return self.outer_scope.declare_lambda_function(func_cname, pos)
def add_lambda_def(self, def_node):
return self.outer_scope.add_lambda_def(def_node)
class ClosureScope(LocalScope):
is_closure_scope = True
def __init__(self, name, scope_name, outer_scope, parent_scope=None):
LocalScope.__init__(self, name, outer_scope, parent_scope)
self.closure_cname = "%s%s" % (Naming.closure_scope_prefix, scope_name)
# def mangle_closure_cnames(self, scope_var):
# for entry in self.entries.values() + self.temp_entries:
# entry.in_closure = 1
# LocalScope.mangle_closure_cnames(self, scope_var)
# def mangle(self, prefix, name):
# return "%s->%s" % (self.cur_scope_cname, name)
# return "%s->%s" % (self.closure_cname, name)
def declare_pyfunction(self, name, pos, allow_redefine=False):
return LocalScope.declare_pyfunction(self, name, pos, allow_redefine, visibility='private')
class StructOrUnionScope(Scope):
# Namespace of a C struct or union.
def __init__(self, name="?"):
Scope.__init__(self, name, None, None)
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = 0,
allow_pyobject = 0):
# Add an entry for an attribute.
if not cname:
cname = name
if visibility == 'private':
cname = c_safe_identifier(cname)
if type.is_cfunction:
type = PyrexTypes.CPtrType(type)
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_pyobject and not allow_pyobject:
error(pos,
"C struct/union member cannot be a Python object")
if visibility != 'private':
error(pos,
"C struct/union member cannot be declared %s" % visibility)
return entry
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', api = 0, in_pxd = 0,
defining = 0, modifiers = ()): # currently no utility code ...
return self.declare_var(name, type, pos,
cname=cname, visibility=visibility)
class ClassScope(Scope):
# Abstract base class for namespace of
# Python class or extension type.
#
# class_name string Python name of the class
# scope_prefix string Additional prefix for names
# declared in the class
# doc string or None Doc string
def __init__(self, name, outer_scope):
Scope.__init__(self, name, outer_scope, outer_scope)
self.class_name = name
self.doc = None
def lookup(self, name):
entry = Scope.lookup(self, name)
if entry:
return entry
if name == "classmethod":
# We don't want to use the builtin classmethod here 'cause it won't do the
# right thing in this scope (as the class members aren't still functions).
# Don't want to add a cfunction to this scope 'cause that would mess with
# the type definition, so we just return the right entry.
entry = Entry(
"classmethod",
"__Pyx_Method_ClassMethod",
PyrexTypes.CFuncType(
py_object_type,
[PyrexTypes.CFuncTypeArg("", py_object_type, None)], 0, 0))
entry.utility_code_definition = Code.UtilityCode.load_cached("ClassMethod", "CythonFunction.c")
entry.is_cfunction = 1
return entry
class PyClassScope(ClassScope):
# Namespace of a Python class.
#
# class_obj_cname string C variable holding class object
is_py_class_scope = 1
def mangle_class_private_name(self, name):
return self.mangle_special_name(name)
def mangle_special_name(self, name):
if name and name.startswith('__') and not name.endswith('__'):
name = EncodedString('_%s%s' % (self.class_name.lstrip('_'), name))
return name
def lookup_here(self, name):
name = self.mangle_special_name(name)
return ClassScope.lookup_here(self, name)
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = 0):
name = self.mangle_special_name(name)
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
entry.is_pyglobal = 1
entry.is_pyclass_attr = 1
return entry
def declare_nonlocal(self, name, pos):
# Pull entry from outer scope into local scope
orig_entry = self.lookup_here(name)
if orig_entry and orig_entry.scope is self and not orig_entry.from_closure:
error(pos, "'%s' redeclared as nonlocal" % name)
else:
entry = self.lookup(name)
if entry is None:
error(pos, "no binding for nonlocal '%s' found" % name)
else:
# FIXME: this works, but it's unclear if it's the
# right thing to do
self.entries[name] = entry
def declare_global(self, name, pos):
# Pull entry from global scope into local scope.
if self.lookup_here(name):
warning(pos, "'%s' redeclared ", 0)
else:
entry = self.global_scope().lookup_target(name)
self.entries[name] = entry
def add_default_value(self, type):
return self.outer_scope.add_default_value(type)
class CClassScope(ClassScope):
# Namespace of an extension type.
#
# parent_type CClassType
# #typeobj_cname string or None
# #objstruct_cname string
# method_table_cname string
# getset_table_cname string
# has_pyobject_attrs boolean Any PyObject attributes?
# has_memoryview_attrs boolean Any memory view attributes?
# has_cyclic_pyobject_attrs boolean Any PyObject attributes that may need GC?
# property_entries [Entry]
# defined boolean Defined in .pxd file
# implemented boolean Defined in .pyx file
# inherited_var_entries [Entry] Adapted var entries from base class
is_c_class_scope = 1
has_pyobject_attrs = False
has_memoryview_attrs = False
has_cyclic_pyobject_attrs = False
defined = False
implemented = False
def __init__(self, name, outer_scope, visibility):
ClassScope.__init__(self, name, outer_scope)
if visibility != 'extern':
self.method_table_cname = outer_scope.mangle(Naming.methtab_prefix, name)
self.getset_table_cname = outer_scope.mangle(Naming.gstab_prefix, name)
self.property_entries = []
self.inherited_var_entries = []
def needs_gc(self):
# If the type or any of its base types have Python-valued
# C attributes, then it needs to participate in GC.
if self.has_cyclic_pyobject_attrs:
return True
base_type = self.parent_type.base_type
if base_type and base_type.scope is not None:
return base_type.scope.needs_gc()
elif self.parent_type.is_builtin_type:
return not self.parent_type.is_gc_simple
return False
def needs_tp_clear(self):
"""
Do we need to generate an implementation for the tp_clear slot? Can
be disabled to keep references for the __dealloc__ cleanup function.
"""
return self.needs_gc() and not self.directives.get('no_gc_clear', False)
def get_refcounted_entries(self, include_weakref=False,
include_gc_simple=True):
py_attrs = []
py_buffers = []
memoryview_slices = []
for entry in self.var_entries:
if entry.type.is_pyobject:
if include_weakref or entry.name != "__weakref__":
if include_gc_simple or not entry.type.is_gc_simple:
py_attrs.append(entry)
elif entry.type == PyrexTypes.c_py_buffer_type:
py_buffers.append(entry)
elif entry.type.is_memoryviewslice:
memoryview_slices.append(entry)
have_entries = py_attrs or py_buffers or memoryview_slices
return have_entries, (py_attrs, py_buffers, memoryview_slices)
def declare_var(self, name, type, pos,
cname = None, visibility = 'private',
api = 0, in_pxd = 0, is_cdef = 0):
if is_cdef:
# Add an entry for an attribute.
if self.defined:
error(pos,
"C attributes cannot be added in implementation part of"
" extension type defined in a pxd")
if get_special_method_signature(name):
error(pos,
"The name '%s' is reserved for a special method."
% name)
if not cname:
cname = name
if visibility == 'private':
cname = c_safe_identifier(cname)
if type.is_cpp_class and visibility != 'extern':
type.check_nullary_constructor(pos)
self.use_utility_code(Code.UtilityCode("#include <new>"))
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
if type.is_memoryviewslice:
self.has_memoryview_attrs = True
elif type.is_pyobject and name != '__weakref__':
self.has_pyobject_attrs = True
if (not type.is_builtin_type
or not type.scope or type.scope.needs_gc()):
self.has_cyclic_pyobject_attrs = True
if visibility not in ('private', 'public', 'readonly'):
error(pos,
"Attribute of extension type cannot be declared %s" % visibility)
if visibility in ('public', 'readonly'):
# If the field is an external typedef, we cannot be sure about the type,
# so do conversion ourself rather than rely on the CPython mechanism (through
# a property; made in AnalyseDeclarationsTransform).
entry.needs_property = True
if name == "__weakref__":
error(pos, "Special attribute __weakref__ cannot be exposed to Python")
if not type.is_pyobject:
if (not type.create_to_py_utility_code(self) or
(visibility=='public' and not
type.create_from_py_utility_code(self))):
error(pos,
"C attribute of type '%s' cannot be accessed from Python" % type)
else:
entry.needs_property = False
return entry
else:
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
api=api, in_pxd=in_pxd, is_cdef=is_cdef)
entry.is_member = 1
entry.is_pyglobal = 1 # xxx: is_pyglobal changes behaviour in so many places that
# I keep it in for now. is_member should be enough
# later on
self.namespace_cname = "(PyObject *)%s" % self.parent_type.typeptr_cname
return entry
def declare_pyfunction(self, name, pos, allow_redefine=False):
# Add an entry for a method.
if name in ('__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__'):
error(pos, "Special method %s must be implemented via __richcmp__" % name)
if name == "__new__":
error(pos, "__new__ method of extension type will change semantics "
"in a future version of Pyrex and Cython. Use __cinit__ instead.")
entry = self.declare_var(name, py_object_type, pos,
visibility='extern')
special_sig = get_special_method_signature(name)
if special_sig:
# Special methods get put in the method table with a particular
# signature declared in advance.
entry.signature = special_sig
entry.is_special = 1
else:
entry.signature = pymethod_signature
entry.is_special = 0
self.pyfunc_entries.append(entry)
return entry
def lookup_here(self, name):
if name == "__new__":
name = EncodedString("__cinit__")
entry = ClassScope.lookup_here(self, name)
if entry and entry.is_builtin_cmethod:
if not self.parent_type.is_builtin_type:
# For subtypes of builtin types, we can only return
# optimised C methods if the type if final.
# Otherwise, subtypes may choose to override the
# method, but the optimisation would prevent the
# subtype method from being called.
if not self.parent_type.is_final_type:
return None
return entry
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', api = 0, in_pxd = 0,
defining = 0, modifiers = (), utility_code = None):
if get_special_method_signature(name) and not self.parent_type.is_builtin_type:
error(pos, "Special methods must be declared with 'def', not 'cdef'")
args = type.args
if not args:
error(pos, "C method has no self argument")
elif not self.parent_type.assignable_from(args[0].type):
error(pos, "Self argument (%s) of C method '%s' does not match parent type (%s)" %
(args[0].type, name, self.parent_type))
entry = self.lookup_here(name)
if cname is None:
cname = c_safe_identifier(name)
if entry:
if not entry.is_cfunction:
warning(pos, "'%s' redeclared " % name, 0)
else:
if defining and entry.func_cname:
error(pos, "'%s' already defined" % name)
#print "CClassScope.declare_cfunction: checking signature" ###
if entry.is_final_cmethod and entry.is_inherited:
error(pos, "Overriding final methods is not allowed")
elif type.same_c_signature_as(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil:
pass
elif type.compatible_signature_with(entry.type, as_cmethod = 1) and type.nogil == entry.type.nogil:
entry = self.add_cfunction(name, type, pos, cname, visibility='ignore', modifiers=modifiers)
defining = 1
else:
error(pos, "Signature not compatible with previous declaration")
error(entry.pos, "Previous declaration is here")
else:
if self.defined:
error(pos,
"C method '%s' not previously declared in definition part of"
" extension type" % name)
entry = self.add_cfunction(name, type, pos, cname,
visibility, modifiers)
if defining:
entry.func_cname = self.mangle(Naming.func_prefix, name)
entry.utility_code = utility_code
type.entry = entry
if u'inline' in modifiers:
entry.is_inline_cmethod = True
if (self.parent_type.is_final_type or entry.is_inline_cmethod or
self.directives.get('final')):
entry.is_final_cmethod = True
entry.final_func_cname = entry.func_cname
return entry
def add_cfunction(self, name, type, pos, cname, visibility, modifiers):
# Add a cfunction entry without giving it a func_cname.
prev_entry = self.lookup_here(name)
entry = ClassScope.add_cfunction(self, name, type, pos, cname,
visibility, modifiers)
entry.is_cmethod = 1
entry.prev_entry = prev_entry
return entry
def declare_builtin_cfunction(self, name, type, cname, utility_code = None):
# overridden methods of builtin types still have their Python
# equivalent that must be accessible to support bound methods
name = EncodedString(name)
entry = self.declare_cfunction(name, type, None, cname, visibility='extern',
utility_code = utility_code)
var_entry = Entry(name, name, py_object_type)
var_entry.is_variable = 1
var_entry.is_builtin = 1
var_entry.utility_code = utility_code
entry.as_variable = var_entry
return entry
def declare_property(self, name, doc, pos):
entry = self.lookup_here(name)
if entry is None:
entry = self.declare(name, name, py_object_type, pos, 'private')
entry.is_property = 1
entry.doc = doc
entry.scope = PropertyScope(name,
outer_scope = self.global_scope(), parent_scope = self)
entry.scope.parent_type = self.parent_type
self.property_entries.append(entry)
return entry
def declare_inherited_c_attributes(self, base_scope):
# Declare entries for all the C attributes of an
# inherited type, with cnames modified appropriately
# to work with this type.
def adapt(cname):
return "%s.%s" % (Naming.obj_base_cname, base_entry.cname)
entries = base_scope.inherited_var_entries + base_scope.var_entries
for base_entry in entries:
entry = self.declare(
base_entry.name, adapt(base_entry.cname),
base_entry.type, None, 'private')
entry.is_variable = 1
self.inherited_var_entries.append(entry)
# If the class defined in a pxd, specific entries have not been added.
# Ensure now that the parent (base) scope has specific entries
# Iterate over a copy as get_all_specialized_function_types() will mutate
for base_entry in base_scope.cfunc_entries[:]:
if base_entry.type.is_fused:
base_entry.type.get_all_specialized_function_types()
for base_entry in base_scope.cfunc_entries:
cname = base_entry.cname
var_entry = base_entry.as_variable
is_builtin = var_entry and var_entry.is_builtin
if not is_builtin:
cname = adapt(cname)
entry = self.add_cfunction(base_entry.name, base_entry.type,
base_entry.pos, cname,
base_entry.visibility, base_entry.func_modifiers)
entry.is_inherited = 1
if base_entry.is_final_cmethod:
entry.is_final_cmethod = True
entry.is_inline_cmethod = base_entry.is_inline_cmethod
if (self.parent_scope == base_scope.parent_scope or
entry.is_inline_cmethod):
entry.final_func_cname = base_entry.final_func_cname
if is_builtin:
entry.is_builtin_cmethod = True
entry.as_variable = var_entry
if base_entry.utility_code:
entry.utility_code = base_entry.utility_code
class CppClassScope(Scope):
# Namespace of a C++ class.
is_cpp_class_scope = 1
default_constructor = None
type = None
def __init__(self, name, outer_scope, templates=None):
Scope.__init__(self, name, outer_scope, None)
self.directives = outer_scope.directives
self.inherited_var_entries = []
if templates is not None:
for T in templates:
template_entry = self.declare(
T, T, PyrexTypes.TemplatePlaceholderType(T), None, 'extern')
template_entry.is_type = 1
def declare_var(self, name, type, pos,
cname = None, visibility = 'extern',
api = 0, in_pxd = 0, is_cdef = 0,
allow_pyobject = 0, defining = 0):
# Add an entry for an attribute.
if not cname:
cname = name
entry = self.lookup_here(name)
if defining and entry is not None:
if not entry.type.same_as(type):
error(pos, "Function signature does not match previous declaration")
else:
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
if type.is_cfunction and self.type:
entry.func_cname = "%s::%s" % (self.type.declaration_code(""), cname)
if name != "this" and (defining or name != "<init>"):
self.var_entries.append(entry)
if type.is_pyobject and not allow_pyobject:
error(pos,
"C++ class member cannot be a Python object")
return entry
def check_base_default_constructor(self, pos):
# Look for default constructors in all base classes.
if self.default_constructor is None:
entry = self.lookup(self.name)
if not entry.type.base_classes:
self.default_constructor = True
return
for base_class in entry.type.base_classes:
if base_class is PyrexTypes.error_type:
continue
temp_entry = base_class.scope.lookup_here("<init>")
found = False
if temp_entry is None:
continue
for alternative in temp_entry.all_alternatives():
type = alternative.type
if type.is_ptr:
type = type.base_type
if not type.args:
found = True
break
if not found:
self.default_constructor = temp_entry.scope.name
error(pos, "no matching function for call to " \
"%s::%s()" % (temp_entry.scope.name, temp_entry.scope.name))
elif not self.default_constructor:
error(pos, "no matching function for call to %s::%s()" %
(self.default_constructor, self.default_constructor))
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'extern', api = 0, in_pxd = 0,
defining = 0, modifiers = (), utility_code = None):
if name in (self.name.split('::')[-1], '__init__') and cname is None:
self.check_base_default_constructor(pos)
cname = self.type.cname
name = '<init>'
type.return_type = PyrexTypes.InvisibleVoidType()
elif name == '__dealloc__' and cname is None:
cname = "~%s" % self.type.cname
name = '<del>'
type.return_type = PyrexTypes.InvisibleVoidType()
prev_entry = self.lookup_here(name)
entry = self.declare_var(name, type, pos,
defining=defining,
cname=cname, visibility=visibility)
if prev_entry and not defining:
entry.overloaded_alternatives = prev_entry.all_alternatives()
entry.utility_code = utility_code
type.entry = entry
return entry
def declare_inherited_cpp_attributes(self, base_scope):
# Declare entries for all the C++ attributes of an
# inherited type, with cnames modified appropriately
# to work with this type.
for base_entry in \
base_scope.inherited_var_entries + base_scope.var_entries:
#contructor is not inherited
if base_entry.name == "<init>":
continue
#print base_entry.name, self.entries
if base_entry.name in self.entries:
base_entry.name # FIXME: is there anything to do in this case?
entry = self.declare(base_entry.name, base_entry.cname,
base_entry.type, None, 'extern')
entry.is_variable = 1
self.inherited_var_entries.append(entry)
for base_entry in base_scope.cfunc_entries:
entry = self.declare_cfunction(base_entry.name, base_entry.type,
base_entry.pos, base_entry.cname,
base_entry.visibility, 0,
modifiers = base_entry.func_modifiers,
utility_code = base_entry.utility_code)
entry.is_inherited = 1
def specialize(self, values):
scope = CppClassScope(self.name, self.outer_scope)
for entry in self.entries.values():
if entry.is_type:
scope.declare_type(entry.name,
entry.type.specialize(values),
entry.pos,
entry.cname,
template=1)
elif entry.type.is_cfunction:
for e in entry.all_alternatives():
scope.declare_cfunction(e.name,
e.type.specialize(values),
e.pos,
e.cname,
utility_code = e.utility_code)
else:
scope.declare_var(entry.name,
entry.type.specialize(values),
entry.pos,
entry.cname,
entry.visibility)
return scope
class PropertyScope(Scope):
# Scope holding the __get__, __set__ and __del__ methods for
# a property of an extension type.
#
# parent_type PyExtensionType The type to which the property belongs
is_property_scope = 1
def declare_pyfunction(self, name, pos, allow_redefine=False):
# Add an entry for a method.
signature = get_property_accessor_signature(name)
if signature:
entry = self.declare(name, name, py_object_type, pos, 'private')
entry.is_special = 1
entry.signature = signature
return entry
else:
error(pos, "Only __get__, __set__ and __del__ methods allowed "
"in a property declaration")
return None
class CConstScope(Scope):
def __init__(self, const_base_type_scope):
Scope.__init__(
self,
'const_' + const_base_type_scope.name,
const_base_type_scope.outer_scope,
const_base_type_scope.parent_scope)
self.const_base_type_scope = const_base_type_scope
def lookup_here(self, name):
entry = self.const_base_type_scope.lookup_here(name)
if entry is not None:
entry = copy.copy(entry)
entry.type = PyrexTypes.c_const_type(entry.type)
return entry
class TemplateScope(Scope):
def __init__(self, name, outer_scope):
Scope.__init__(self, name, outer_scope, None)
self.directives = outer_scope.directives
| bsd-3-clause |
viger/docker | proxy/proxy/code/default/x_tunnel/local/base_container.py | 2 | 23086 | import threading
import time
import socket
import struct
import select
import utils
from xlog import getLogger
xlog = getLogger("x_tunnel")
class WriteBuffer(object):
def __init__(self, s=None):
if isinstance(s, str):
self.string_len = len(s)
self.buffer_list = [s]
else:
self.reset()
def reset(self):
self.buffer_list = []
self.string_len = 0
def __len__(self):
return self.string_len
def __add__(self, other):
self.append(other)
return self
def insert(self, s):
if isinstance(s, WriteBuffer):
self.buffer_list = s.buffer_list + self.buffer_list
self.string_len += s.string_len
elif isinstance(s, str):
self.buffer_list.insert(0, s)
self.string_len += len(s)
else:
raise Exception("WriteBuffer append not string or StringBuffer")
def append(self, s):
if isinstance(s, WriteBuffer):
self.buffer_list.extend(s.buffer_list)
self.string_len += s.string_len
elif isinstance(s, str):
self.buffer_list.append(s)
self.string_len += len(s)
else:
raise Exception("WriteBuffer append not string or StringBuffer")
def __str__(self):
return self.get_string()
def get_string(self):
return "".join(self.buffer_list)
class ReadBuffer(object):
def __init__(self, buf, begin=0, size=None):
buf_len = len(buf)
if size is None:
if begin > buf_len:
raise Exception("ReadBuffer buf_len:%d, start:%d" % (buf_len, begin))
size = buf_len - begin
elif begin + size > buf_len:
raise Exception("ReadBuffer buf_len:%d, start:%d len:%d" % (buf_len, begin, size))
self.size = size
self.buf = buf
self.begin = begin
def __len__(self):
return self.size
def get(self, size=None):
if size is None:
size = self.size
elif size > self.size:
raise Exception("ReadBuffer get %d but left %d" % (size, self.size))
data = self.buf[self.begin:self.begin + size]
self.begin += size
self.size -= size
return data
def get_buf(self, size=None):
if size is None:
size = self.size
elif size > self.size:
raise Exception("ReadBuffer get %d but left %d" % (size, self.size))
buf = ReadBuffer(self.buf, self.begin, size)
self.begin += size
self.size -= size
return buf
class AckPool():
def __init__(self):
self.mutex = threading.Lock()
self.reset()
def reset(self):
# xlog.info("Ack_pool reset")
self.mutex.acquire()
self.ack_buffer = WriteBuffer()
self.mutex.release()
# xlog.info("Ack_pool reset finished")
def put(self, data):
# xlog.debug("Ack_pool put len:%d", len(data))
self.mutex.acquire()
self.ack_buffer.append(data)
self.mutex.release()
def get(self):
self.mutex.acquire()
data = self.ack_buffer
self.ack_buffer = WriteBuffer()
self.mutex.release()
# xlog.debug("Ack_pool get len:%d", len(data))
return data
def status(self):
out_string = "Ack_pool:len %d<br>\r\n" % len(self.ack_buffer)
return out_string
class BlockSendPool():
def __init__(self, max_payload, send_delay):
self.mutex = threading.Lock()
self.wake_thread = None
self.max_payload = max_payload
self.send_delay = send_delay
self.start()
def start(self):
with self.mutex:
self.head_sn = 1
self.tail_sn = 1
self.block_list = {}
self.last_block = WriteBuffer()
self.waiters = [] # (end_time, Lock())
self.last_notify_time = 0
self.running = True
if self.send_delay:
self.wake_thread = threading.Thread(target=self.wake_worker)
self.wake_thread.daemon = True
self.wake_thread.start()
else:
self.wake_thread = None
def stop(self):
# xlog.info("Block_send_pool stop")
self.running = False
with self.mutex:
for end_time, lock in self.waiters:
lock.release()
self.waiters = []
if self.wake_thread:
# xlog.debug("join wake_thread")
self.wake_thread.join()
self.wake_thread = None
# xlog.info("Block_send_pool stop finished")
def wake_worker(self):
wake_interval = self.send_delay / 1000.0
while self.running:
with self.mutex:
if len(self.waiters):
end_time, lock = self.waiters[0]
if end_time < time.time() or len(self.last_block):
lock.release()
del self.waiters[0]
self.last_notify_time = time.time()
time.sleep(wake_interval)
# xlog.debug("wake_worker exit")
def put(self, data, no_delay=False):
if len(data) == 0:
with self.mutex:
self.notify()
return
# xlog.debug("send_pool put len:%d no_deay:%r", len(data), no_delay)
with self.mutex:
self.last_block.append(data)
if len(self.last_block) > self.max_payload or self.send_delay == 0 or no_delay:
self.block_list[self.head_sn] = self.last_block
self.last_block = WriteBuffer()
self.head_sn += 1
self.notify()
def notify(self):
# xlog.debug("notify")
if len(self.waiters) == 0:
# xlog.debug("notify none.")
return
end_time, lock = self.waiters.pop(0)
lock.release()
self.last_notify_time = time.time()
def wait(self, end_time):
lock = threading.Lock()
lock.acquire()
if len(self.waiters) == 0:
self.waiters.append((end_time, lock))
else:
is_max = True
for i in range(0, len(self.waiters)):
iend_time, ilock = self.waiters[i]
if iend_time > end_time:
is_max = False
break
if is_max:
self.waiters.append((end_time, lock))
else:
self.waiters.insert(i, (end_time, lock))
self.mutex.release()
lock.acquire()
self.mutex.acquire()
def get(self, timeout=24 * 3600):
# xlog.debug("send_pool get")
data = WriteBuffer()
sn = 0
begin_time = time.time()
end_time = begin_time + timeout
with self.mutex:
for wait_i in range(0, 2):
if self.tail_sn < self.head_sn:
data = self.block_list[self.tail_sn]
del self.block_list[self.tail_sn]
sn = self.tail_sn
self.tail_sn += 1
break
time_now = time.time()
if time_now > end_time or len(self.last_block):
if len(self.last_block) > 0:
data = self.last_block
sn = self.tail_sn
self.last_block = WriteBuffer()
self.head_sn += 1
self.tail_sn += 1
break
# xlog.debug("send_pool get wait when no data, sn:%d tail:%d", sn, self.tail_sn)
self.wait(end_time)
if not self.running:
break
# xlog.debug("send_pool get wake after no data, sn:%d tail:%d", sn, self.tail_sn)
# xlog.debug("send_pool get, sn:%r len:%d t:%d", sn, len(data), (time.time() - begin_time)*1000)
# xlog.debug("Get:%s", utils.str2hex(data))
return data, sn
def status(self):
out_string = "Block_send_pool:<br>\n"
out_string += " head_sn:%d<br>\n" % self.head_sn
out_string += " tail_sn:%d<br>\n" % self.tail_sn
out_string += "block_list:<br>\n"
for sn in sorted(self.block_list.iterkeys()):
data = self.block_list[sn]
out_string += "[%d] len:%d<br>\r\n" % (sn, len(data))
out_string += "waiters:<br>\n"
for i in range(0, len(self.waiters)):
end_time, lock = self.waiters[i]
out_string += "%d<br>\r\n" % ((end_time - time.time()))
return out_string
class BlockReceivePool():
def __init__(self, process_callback):
self.lock = threading.Lock()
self.process_callback = process_callback
self.reset()
def reset(self):
# xlog.info("recv_pool reset")
self.next_sn = 1
self.block_list = []
def put(self, sn, data):
self.lock.acquire()
try:
if sn < self.next_sn:
# xlog.warn("recv_pool put timeout sn:%d", sn)
return False
elif sn > self.next_sn:
# xlog.debug("recv_pool put unorder sn:%d", sn)
if sn in self.block_list:
# xlog.warn("recv_pool put sn:%d exist", sn)
return False
else:
self.block_list.append(sn)
self.process_callback(data)
return True
else:
# xlog.debug("recv_pool put sn:%d in order", sn)
self.process_callback(data)
self.next_sn = sn + 1
while sn + 1 in self.block_list:
sn += 1
# xlog.debug("recv_pool sn:%d processed", sn)
self.block_list.remove(sn)
self.next_sn = sn + 1
return True
except Exception as e:
raise Exception("recv_pool put sn:%d len:%d error:%r" % (sn, len(data), e))
finally:
self.lock.release()
def status(self):
out_string = "Block_receive_pool:<br>\r\n"
out_string += " next_sn:%d<br>\r\n" % self.next_sn
for sn in sorted(self.block_list):
out_string += "[%d] <br>\r\n" % (sn)
return out_string
class Conn(object):
def __init__(self, session, conn_id, sock, host, port, windows_size, windows_ack, is_client, xlog):
# xlog.info("session:%s Conn:%d host:%s port:%d", session.session_id, conn_id, host, port)
self.host = host
self.port = port
self.session = session
self.conn_id = conn_id
self.sock = sock
self.windows_size = windows_size
self.windows_ack = windows_ack
self.is_client = is_client
self.cmd_queue = {}
self.cmd_notice = threading.Condition()
self.recv_notice = threading.Condition()
self.running = True
self.received_position = 0
self.remote_acked_position = 0
self.sended_position = 0
self.sended_window_position = 0
self.recv_thread = None
self.cmd_thread = None
self.xlog = xlog
self.transfered_close_to_peer = False
if sock:
self.next_cmd_seq = 1
else:
self.next_cmd_seq = 0
self.next_recv_seq = 1
def start(self, block):
if self.sock:
self.recv_thread = threading.Thread(target=self.recv_worker)
self.recv_thread.start()
else:
self.recv_thread = None
if block:
self.cmd_thread = None
self.cmd_processor()
else:
self.cmd_thread = threading.Thread(target=self.cmd_processor)
self.cmd_thread.start()
def status(self):
out_string = "Conn[%d]: %s:%d<br>\r\n" % (self.conn_id, self.host, self.port)
out_string += " received_position:%d/ Ack:%d <br>\n" % (self.received_position, self.remote_acked_position)
out_string += " sended_position:%d/ win:%d<br>\n" % (self.sended_position, self.sended_window_position)
out_string += " next_cmd_seq:%d<br>\n" % self.next_cmd_seq
out_string += " next_recv_seq:%d<br>\n" % self.next_recv_seq
out_string += " status: running:%r<br>\n" % self.running
out_string += " transfered_close_to_peer:%r<br>\n" % self.transfered_close_to_peer
out_string += " sock:%r<br>\n" % (self.sock is not None)
out_string += " cmd_queue.len:%d " % len(self.cmd_queue)
for seq in self.cmd_queue:
out_string += "[%d]," % seq
out_string += "<br>\n"
return out_string
def stop(self, reason=""):
self.stop_thread = threading.Thread(target=self.do_stop, args=(reason,))
self.stop_thread.start()
def do_stop(self, reason="unknown"):
self.xlog.debug("Conn session:%s conn:%d stop:%s", self.session.session_id, self.conn_id, reason)
self.running = False
self.cmd_notice.acquire()
self.cmd_notice.notify()
self.cmd_notice.release()
self.recv_notice.acquire()
self.recv_notice.notify()
self.recv_notice.release()
if self.recv_thread:
self.recv_thread.join()
self.recv_thread = None
if self.cmd_thread:
self.cmd_thread.join()
self.cmd_thread = None
self.cmd_queue = {}
if self.sock is not None:
self.sock.close()
self.sock = None
# xlog.debug("Conn session:%s conn:%d stopped", self.session.session_id, self.conn_id)
self.session.remove_conn(self.conn_id)
def do_connect(self, host, port):
self.xlog.info("session_id:%s create_conn %d %s:%d", self.session.session_id, self.conn_id, host, port)
connect_timeout = 30
sock = None
# start_time = time.time()
ip = ""
try:
if ':' in host:
# IPV6
ip = host
elif utils.check_ip_valid(host):
# IPV4
ip = host
else:
# xlog.debug("getting ip of %s", host)
ip = socket.gethostbyname(host)
# xlog.debug("resolve %s to %s", host, ip)
sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6)
# set reuseaddr option to avoid 10048 socket error
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# resize socket recv buffer 8K->32K to improve browser releated application performance
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32 * 1024)
# disable negal algorithm to send http request quickly.
sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True)
# set a short timeout to trigger timeout retry more quickly.
sock.settimeout(connect_timeout)
sock.connect((ip, port))
# record TCP connection time
# conn_time = time.time() - start_time
# xlog.debug("tcp conn %s %s time:%d", host, ip, conn_time * 1000)
return sock, True
except Exception as e:
# conn_time = int((time.time() - start_time) * 1000)
# xlog.debug("tcp conn host:%s %s:%d fail t:%d %r", host, ip, port, conn_time, e)
if sock:
sock.close()
return e, False
def put_cmd_data(self, data):
with self.cmd_notice:
seq = struct.unpack("<I", data.get(4))[0]
if seq < self.next_cmd_seq:
raise Exception("put_send_data %s conn:%d seq:%d next:%d" % (self.session.session_id, self.conn_id,
seq, self.next_cmd_seq))
self.cmd_queue[seq] = data.get_buf()
if seq == self.next_cmd_seq:
self.cmd_notice.notify()
def get_cmd_data(self):
self.cmd_notice.acquire()
try:
while self.running:
if self.next_cmd_seq in self.cmd_queue:
payload = self.cmd_queue[self.next_cmd_seq]
del self.cmd_queue[self.next_cmd_seq]
self.next_cmd_seq += 1
#self.xlog.debug("Conn session:%s conn:%d get data len:%d ", self.session.session_id, self.conn_id, len(payload))
return payload
else:
self.cmd_notice.wait()
finally:
self.cmd_notice.release()
return False
def cmd_processor(self):
while self.running:
data = self.get_cmd_data()
if not data:
break
cmd_id = struct.unpack("<B", data.get(1))[0]
if cmd_id == 1: # data
self.send_to_sock(data)
elif cmd_id == 3: # ack:
position = struct.unpack("<Q", data.get(8))[0]
self.xlog.debug("Conn session:%s conn:%d ACK:%d", self.session.session_id, self.conn_id, position)
if position > self.remote_acked_position:
self.remote_acked_position = position
self.recv_notice.acquire()
self.recv_notice.notify()
self.recv_notice.release()
elif cmd_id == 2: # Closed
self.xlog.info("Conn session:%s conn:%d Peer Close:%s", self.session.session_id, self.conn_id, data.get())
if self.is_client:
self.transfer_peer_close("finish")
self.stop("peer close")
elif cmd_id == 0: # Create connect
if self.port or len(self.host) or self.next_cmd_seq != 1 or self.sock:
raise Exception("put_send_data %s conn:%d Create but host:%s port:%d next seq:%d" % (
self.session.session_id, self.conn_id,
self.host, self.port, self.next_cmd_seq))
self.sock_type = struct.unpack("<B", data.get(1))[0]
host_len = struct.unpack("<H", data.get(2))[0]
self.host = data.get(host_len)
self.port = struct.unpack("<H", data.get(2))[0]
sock, res = self.do_connect(self.host, self.port)
if res is False:
self.xlog.debug("Conn session:%s conn:%d %s:%d Create fail", self.session.session_id, self.conn_id,
self.host, self.port)
self.transfer_peer_close("connect fail")
else:
self.xlog.info("Conn session:%s conn:%d %s:%d", self.session.session_id, self.conn_id, self.host,
self.port)
self.sock = sock
self.recv_thread = threading.Thread(target=self.recv_worker)
self.recv_thread.start()
else:
self.xlog.error("Conn session:%s conn:%d unknown cmd_id:%d",
self.session.session_id, self.conn_id, cmd_id)
raise Exception("put_send_data unknown cmd_id:%d" % cmd_id)
def send_to_sock(self, data):
sock = self.sock
if not sock:
return
payload_len = len(data)
buf = data.buf
start = data.begin
end = data.begin + payload_len
while start < end:
send_size = min(end - start, 65535)
try:
sended = sock.send(buf[start:start + send_size])
except Exception as e:
self.xlog.info("%s conn_id:%d send closed", self.session.session_id, self.conn_id)
sock.close()
self.sock = None
if self.is_client:
self.do_stop(reason="send fail.")
return
start += sended
self.sended_position += payload_len
if self.sended_position - self.sended_window_position > self.windows_ack:
self.sended_window_position = self.sended_position
self.transfer_ack(self.sended_position)
# xlog.debug("Conn:%d ack:%d", self.conn_id, self.sended_window_position)
def transfer_peer_close(self, reason=""):
with self.recv_notice:
if self.transfered_close_to_peer:
return
self.transfered_close_to_peer = True
cmd = struct.pack("<IB", self.next_recv_seq, 2)
self.session.send_conn_data(self.conn_id, cmd + reason)
self.next_recv_seq += 1
def transfer_received_data(self, data):
with self.recv_notice:
if self.transfered_close_to_peer:
return
buf = WriteBuffer(struct.pack("<IB", self.next_recv_seq, 1))
buf.append(data)
self.next_recv_seq += 1
self.received_position += len(data)
if self.received_position < 16 * 1024:
no_delay = True
else:
no_delay = False
self.session.send_conn_data(self.conn_id, buf, no_delay)
def transfer_ack(self, position):
with self.recv_notice:
if self.transfered_close_to_peer:
return
cmd_position = struct.pack("<IBQ", self.next_recv_seq, 3, position)
self.session.send_conn_data(self.conn_id, cmd_position)
self.next_recv_seq += 1
def recv_worker(self):
sock = self.sock
fdset = [sock, ]
while self.running:
self.recv_notice.acquire()
try:
if self.received_position > self.remote_acked_position + self.windows_size:
# xlog.debug("Conn session:%s conn:%d recv blocked, rcv:%d, ack:%d", self.session.session_id, self.conn_id, self.received_position, self.remote_acked_position)
self.recv_notice.wait()
continue
finally:
self.recv_notice.release()
r, w, e = select.select(fdset, [], [], 1)
if sock in r:
try:
data = sock.recv(65535)
except:
data = ""
data_len = len(data)
if data_len == 0:
# xlog.debug("Conn session:%s conn:%d recv socket closed", self.session.session_id, self.conn_id)
self.transfer_peer_close("recv closed")
sock.close()
self.sock = None
self.recv_thread = None
if self.is_client:
self.do_stop(reason="recv fail.")
return
self.transfer_received_data(data)
# xlog.debug("Conn session:%s conn:%d Recv len:%d id:%d", self.session.session_id, self.conn_id, data_len, self.recv_id)
# xlog.debug("Conn session:%s conn:%d Recv worker stopped", self.session.session_id, self.conn_id)
| mit |
g8os/grid | pyclient/zeroos/orchestrator/client/HealthCheck.py | 2 | 5193 | """
Auto-generated class for HealthCheck
"""
from .Message import Message
from . import client_support
class HealthCheck(object):
"""
auto-generated. don't touch.
"""
@staticmethod
def create(category, id, interval, lasttime, messages, name, resource, stacktrace):
"""
:type category: str
:type id: str
:type interval: float
:type lasttime: float
:type messages: list[Message]
:type name: str
:type resource: str
:type stacktrace: str
:rtype: HealthCheck
"""
return HealthCheck(
category=category,
id=id,
interval=interval,
lasttime=lasttime,
messages=messages,
name=name,
resource=resource,
stacktrace=stacktrace,
)
def __init__(self, json=None, **kwargs):
if json is None and not kwargs:
raise ValueError('No data or kwargs present')
class_name = 'HealthCheck'
create_error = '{cls}: unable to create {prop} from value: {val}: {err}'
required_error = '{cls}: missing required property {prop}'
data = json or kwargs
property_name = 'category'
val = data.get(property_name)
if val is not None:
datatypes = [str]
try:
self.category = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'id'
val = data.get(property_name)
if val is not None:
datatypes = [str]
try:
self.id = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'interval'
val = data.get(property_name)
if val is not None:
datatypes = [float]
try:
self.interval = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'lasttime'
val = data.get(property_name)
if val is not None:
datatypes = [float]
try:
self.lasttime = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'messages'
val = data.get(property_name)
if val is not None:
datatypes = [Message]
try:
self.messages = client_support.list_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'name'
val = data.get(property_name)
if val is not None:
datatypes = [str]
try:
self.name = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'resource'
val = data.get(property_name)
if val is not None:
datatypes = [str]
try:
self.resource = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
property_name = 'stacktrace'
val = data.get(property_name)
if val is not None:
datatypes = [str]
try:
self.stacktrace = client_support.val_factory(val, datatypes)
except ValueError as err:
raise ValueError(create_error.format(cls=class_name, prop=property_name, val=val, err=err))
else:
raise ValueError(required_error.format(cls=class_name, prop=property_name))
def __str__(self):
return self.as_json(indent=4)
def as_json(self, indent=0):
return client_support.to_json(self, indent=indent)
def as_dict(self):
return client_support.to_dict(self)
| apache-2.0 |
yamila-moreno/django | django/core/serializers/pyyaml.py | 439 | 2843 | """
YAML serializer.
Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
"""
import collections
import decimal
import sys
from io import StringIO
import yaml
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import (
Deserializer as PythonDeserializer, Serializer as PythonSerializer,
)
from django.db import models
from django.utils import six
# Use the C (faster) implementation if possible
try:
from yaml import CSafeLoader as SafeLoader
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import SafeLoader, SafeDumper
class DjangoSafeDumper(SafeDumper):
def represent_decimal(self, data):
return self.represent_scalar('tag:yaml.org,2002:str', str(data))
def represent_ordered_dict(self, data):
return self.represent_mapping('tag:yaml.org,2002:map', data.items())
DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal)
DjangoSafeDumper.add_representer(collections.OrderedDict, DjangoSafeDumper.represent_ordered_dict)
class Serializer(PythonSerializer):
"""
Convert a queryset to YAML.
"""
internal_use_only = False
def handle_field(self, obj, field):
# A nasty special case: base YAML doesn't support serialization of time
# types (as opposed to dates or datetimes, which it does support). Since
# we want to use the "safe" serializer for better interoperability, we
# need to do something with those pesky times. Converting 'em to strings
# isn't perfect, but it's better than a "!!python/time" type which would
# halt deserialization under any other language.
if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None:
self._current[field.name] = str(getattr(obj, field.name))
else:
super(Serializer, self).handle_field(obj, field)
def end_serialization(self):
yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options)
def getvalue(self):
# Grand-parent super
return super(PythonSerializer, self).getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of YAML data.
"""
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
if isinstance(stream_or_string, six.string_types):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
try:
for obj in PythonDeserializer(yaml.load(stream, Loader=SafeLoader), **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
| bsd-3-clause |
jonathanstrong/tmetrics | tmetrics/classification.py | 1 | 14853 | import theano, theano.tensor as T
import numpy as np
import pandas as pd
import lasagne
"""
note: we are following the sklearn api for metrics/loss functions,
where the first arg for a function is y true, and second value is
y predicted. this is the opposite of the theano functions, so just
keep in mind.
"""
#copy existing code and place in tmetrics namespace
multiclass_hinge_loss = lambda yt, yp: lasagne.objectives.multiclass_hinge_loss(yp, yt)
squared_error = lambda yt, yp: lasagne.objectives.squared_error(yp, yt)
binary_accuracy = lambda yt, yp: lasagne.objectives.binary_accuracy(yp, yt)
categorical_accuracy = lambda yt, yp: lasagne.objectives.categorical_accuracy(yp, yt)
def binary_crossentropy(y_true, y_predicted):
"""
wrapper of theano.tensor.nnet.binary_crossentropy
args reversed to match tmetrics api
"""
return T.nnet.binary_crossentropy(y_predicted, y_true)
def categorical_crossentropy(y_true, y_predicted):
"""
wrapper of theano.tensor.nnet.categorical_crossentropy
args reversed to match tmetrics api
"""
return T.nnet.binary_crossentropy(y_predicted, y_true)
def binary_hinge_loss(y_true, y_predicted, binary=True, delta=1):
"""
wrapper of lasagne.objectives.binary_hinge_loss
args reversed to match tmetrics api
"""
return lasagne.objectives.binary_hinge_loss(y_predicted, y_true, binary, delta)
def brier_score_loss(y_true, y_predicted, sample_weight=None):
"""
port of sklearn.metrics.brier_score_loss
works for 2D binary data as well, e.g.
y_true: [[0, 1, 0],
[1, 0, 0]]
y_predicted: [[.1, .9, .3],
[.4, .7, .2]]
y_true: tensor, y true (binary)
y_predicted: tensor, y predicted (float between 0 and 1)
sample_weight: tensor or None (standard mean)
assumptions:
-binary ground truth values ({0, 1}); no pos_label
training wheels like sklearn or figuring out how to
run this on text labels.
-probabilities are floats between 0-1
-sample_weight broadcasts to ((y_true - y_predicted) ** 2)
"""
scores = ((y_true - y_predicted) ** 2)
if sample_weight is not None:
scores = scores * sample_weight
return scores.mean()
def hamming_loss(y_true, y_predicted):
"""
note - works on n-dim arrays, means across the final axis
note - we round predicted because float probabilities would not work
"""
return T.neq(y_true, T.round(y_predicted)).astype(theano.config.floatX).mean(axis=-1)
def jaccard_similarity(y_true, y_predicted):
"""
y_true: tensor ({1, 0})
y_predicted: tensor ({1, 0})
note - we round predicted because float probabilities would not work
"""
y_predicted = T.round(y_predicted).astype(theano.config.floatX)
either_nonzero = T.or_(T.neq(y_true, 0), T.neq(y_predicted, 0))
return T.and_(T.neq(y_true, y_predicted), either_nonzero).sum(axis=-1, dtype=theano.config.floatX) / either_nonzero.sum(axis=-1, dtype=theano.config.floatX)
def _nbool_correspond_all(u, v):
"""
port of scipy.spatial.distance._nbool_correspond_all
with dtype assumed to be integer/float (no bool in theano)
sums are on last axis
"""
not_u = 1.0 - u
not_v = 1.0 - v
nff = (not_u * not_v).sum(axis=-1, dtype=theano.config.floatX)
nft = (not_u * v).sum(axis=-1, dtype=theano.config.floatX)
ntf = (u * not_v).sum(axis=-1, dtype=theano.config.floatX)
ntt = (u * v).sum(axis=-1, dtype=theano.config.floatX)
return (nff, nft, ntf, ntt)
def kulsinski_similarity(y_true, y_predicted):
y_predicted = T.round(y_predicted)
nff, nft, ntf, ntt = _nbool_correspond_all(y_true, y_predicted)
n = y_true.shape[0].astype('float32')
return (ntf + nft - ntt + n) / (ntf + nft + n)
def trapz(y, x=None, dx=1.0, axis=-1):
"""
reference implementation: numpy.trapz
---------
Integrate along the given axis using the composite trapezoidal rule.
Integrate `y` (`x`) along given axis.
Parameters
----------
y : array_like
Input array to integrate.
x : array_like, optional
If `x` is None, then spacing between all `y` elements is `dx`.
dx : scalar, optional
If `x` is None, spacing given by `dx` is assumed. Default is 1.
axis : int, optional
Specify the axis.
Returns
-------
trapz : float
Definite integral as approximated by trapezoidal rule.
See Also
--------
sum, cumsum
Notes
-----
Image [2]_ illustrates trapezoidal rule -- y-axis locations of points
will be taken from `y` array, by default x-axis distances between
points will be 1.0, alternatively they can be provided with `x` array
or with `dx` scalar. Return value will be equal to combined area under
the red lines.
References
----------
.. [1] Wikipedia page: http://en.wikipedia.org/wiki/Trapezoidal_rule
.. [2] Illustration image:
http://en.wikipedia.org/wiki/File:Composite_trapezoidal_rule_illustration.png
Examples
--------
>>> np.trapz([1,2,3])
4.0
>>> np.trapz([1,2,3], x=[4,6,8])
8.0
>>> np.trapz([1,2,3], dx=2)
8.0
>>> a = np.arange(6).reshape(2, 3)
>>> a
array([[0, 1, 2],
[3, 4, 5]])
>>> np.trapz(a, axis=0)
array([ 1.5, 2.5, 3.5])
>>> np.trapz(a, axis=1)
array([ 2., 8.])
"""
if x is None:
d = dx
else:
if x.ndim == 1:
d = T.extra_ops.diff(x)
# reshape to correct shape
shape = T.ones(y.ndim, dtype='int8')
shape = T.set_subtensor(shape[axis], d.shape[0])
d = d.reshape(shape)
else:
d = T.extra_ops.diff(x, axis=axis)
nd = y.ndim
slice1 = [slice(None)]*nd
slice2 = [slice(None)]*nd
slice1[axis] = slice(1, None)
slice2[axis] = slice(None, -1)
return (d * (y[slice1] + y[slice2]) / 2.0).sum(axis)
def auc(x, y):
return abs(trapz(y, x))
#def roc_curve(y_true, y_predicted):
# fps, tps, thresholds = _binary_clf_curve(y_true, y_predicted)
# fpr = fps.astype('float32') / fps[-1]
# tpr = tps.astype('float32') / tps[-1]
# return fpr, tpr, thresholds
#
#def roc_auc_score(y_true, y_predicted):
# fpr, tpr, thresholds = roc_curve(y_true, y_predicted)
# return auc(fpr, tpr)
def _last_axis_binary_clf_curve(y_true, y_predicted):
"""
returns y_predicted.shape[-2] binary clf curves calculated axis[-1]-wise
this is a numpy implementation
"""
assert y_true.shape == y_predicted.shape
axis = -1
sort_idx = list(np.ogrid[[slice(x) for x in y_predicted.shape]])
sort_idx[axis] = y_predicted.argsort(axis=axis).astype('int8')
reverse = [slice(None)] * y_predicted.ndim
reverse[axis] = slice(None, None, -1)
sorted_y_predicted = y_predicted[sort_idx][reverse]
sorted_y_true = y_true[sort_idx][reverse]
tps = sorted_y_true.cumsum(axis=axis)
count = (np.ones(y_predicted.shape) * np.arange(y_predicted.shape[-1]))
fps = 1 + count - tps
threshold_values = sorted_y_predicted
return fps, tps, threshold_values
def last_axis_roc_curve(y_true, y_predicted):
"numpy implementation"
fps, tps, thresholds = _last_axis_binary_clf_curve(y_true, y_predicted)
i = [slice(None)] * fps.ndim
i[-1] = -1
fpr = fps.astype('float32') / np.expand_dims(fps[i], axis=-1)
tpr = tps.astype('float32') / np.expand_dims(tps[i], axis=-1)
#tpr = tps.astype('float32') / tps[i][:, np.newaxis]
return fpr, tpr, thresholds
def last_axis_roc_auc_scores(y_true, y_predicted):
fpr, tpr, _ = last_axis_roc_curve(y_true, y_predicted)
return np.trapz(tpr, fpr)
def _vector_clf_curve(y_true, y_predicted):
"""
sklearn.metrics._binary_clf_curve port
y_true: tensor (vector): y true
y_predicted: tensor (vector): y predicted
returns: fps, tps, threshold_values
fps: tensor (vector): false positivies
tps: tensor (vector): true positives
threshold_values: tensor (vector): value of y predicted at each threshold
along the curve
restrictions:
-not numpy compatible
-only works with two vectors (not matrix or tensor)
"""
assert y_true.ndim == y_predicted.ndim == 1
desc_score_indices = y_predicted.argsort()[::-1].astype('int8')
sorted_y_predicted = y_predicted[desc_score_indices]
sorted_y_true = y_true[desc_score_indices]
distinct_value_indices = (1-T.isclose(T.extra_ops.diff(sorted_y_predicted), 0)).nonzero()[0]
curve_cap = T.extra_ops.repeat(sorted_y_predicted.size - 1, 1)
threshold_indices = T.concatenate([distinct_value_indices, curve_cap]).astype('int8')
tps = T.extra_ops.cumsum(sorted_y_true[threshold_indices])
fps = 1 + threshold_indices - tps
threshold_values = sorted_y_predicted[threshold_indices]
return fps, tps, threshold_values
def _matrix_clf_curve(y_true, y_predicted):
assert y_true.ndim == y_predicted.ndim == 2
row_i = T.arange(y_true.shape[0], dtype='int8').dimshuffle(0, 'x')
col_i = y_predicted.argsort().astype('int8')
reverse = [slice(None), slice(None, None, -1)]
y_true = y_true[row_i, col_i][reverse]
y_predicted = y_predicted[row_i, col_i][reverse]
tps = y_true.cumsum(axis=-1)
counts = T.ones_like(y_true) * T.arange(y_predicted.shape[-1], dtype='int8')
fps = 1 + counts - tps
return fps, tps, y_predicted
def _tensor3_clf_curve(y_true, y_predicted):
assert y_true.ndim == y_predicted.ndim == 3
x_i = T.arange(y_true.shape[0], dtype='int8').dimshuffle(0, 'x', 'x')
y_i = T.arange(y_true.shape[1], dtype='int8').dimshuffle('x', 0, 'x')
z_i = y_predicted.argsort().astype('int8')
reverse = [slice(None), slice(None), slice(None, None, -1)]
y_true = y_true[x_i, y_i, z_i][reverse]
y_predicted = y_predicted[x_i, y_i, z_i][reverse]
tps = y_true.cumsum(axis=-1)
counts = T.ones_like(y_true) * T.arange(y_predicted.shape[-1], dtype='int8')
fps = 1 + counts - tps
return fps, tps, y_predicted
def _tensor4_clf_curve(y_true, y_predicted):
assert y_true.ndim == y_predicted.ndim == 4
a_i = T.arange(y_true.shape[0], dtype='int8').dimshuffle(0, 'x', 'x', 'x')
b_i = T.arange(y_true.shape[1], dtype='int8').dimshuffle('x', 0, 'x', 'x')
c_i = T.arange(y_true.shape[2], dtype='int8').dimshuffle('x', 'x', 0, 'x')
d_i = y_predicted.argsort().astype('int8')
reverse = [slice(None), slice(None), slice(None), slice(None, None, -1)]
y_true = y_true[a_i, b_i, c_i, d_i][reverse]
y_predicted = y_predicted[a_i, b_i, c_i, d_i][reverse]
tps = y_true.cumsum(axis=-1)
counts = T.ones_like(y_true) * T.arange(y_predicted.shape[-1], dtype='int8')
fps = 1 + counts - tps
return fps, tps, y_predicted
def _binary_clf_curves(y_true, y_predicted):
"""
returns curves calculated axis[-1]-wise
note - despite trying several approaches, could not seem to get a
n-dimensional verision of clf_curve to work, so abandoning. 2,3,4 is fine.
"""
if not (y_true.ndim == y_predicted.ndim):
raise ValueError('Dimension mismatch, ({}, {})'.format(y_true.ndim, y_predicted.ndim))
if not isinstance(y_true, T.TensorVariable) or not isinstance(y_predicted, T.TensorVariable):
raise TypeError('This only works for symbolic variables.')
if y_true.ndim == 1:
clf_curve_fn = _vector_clf_curve
elif y_true.ndim == 2:
clf_curve_fn = _matrix_clf_curve
elif y_true.ndim == 3:
clf_curve_fn = _tensor3_clf_curve
elif y_true.ndim == 4:
clf_curve_fn = _tensor4_clf_curve
else:
raise NotImplementedError('Not implemented for ndim {}'.format(y_true.ndim))
fps, tps, thresholds = clf_curve_fn(y_true, y_predicted)
return fps, tps, thresholds
def _last_col_idx(ndim):
last_col = [slice(None) for x in xrange(ndim)]
last_col[-1] = -1
return last_col
def _reverse_idx(ndim):
reverse = [slice(None) for _ in range(ndim-1)]
reverse.append(slice(None, None, -1))
return reverse
def roc_curves(y_true, y_predicted):
"returns roc curves calculated axis -1-wise"
fps, tps, thresholds = _binary_clf_curves(y_true, y_predicted)
last_col = _last_col_idx(y_true.ndim)
fpr = fps.astype('float32') / T.shape_padright(fps[last_col], 1)
tpr = tps.astype('float32') / T.shape_padright(tps[last_col], 1)
return fpr, tpr, thresholds
def roc_auc_scores(y_true, y_predicted):
"roc auc scores calculated axis -1-wise"
fpr, tpr, thresholds = roc_curves(y_true, y_predicted)
return auc(fpr, tpr)
def roc_auc_loss(y_true, y_predicted):
return 1-roc_auc_scores(y_true, y_predicted)
def precision_recall_curves(y_true, y_predicted):
"precision recall curves calculated axis -1-wise"
fps, tps, thresholds = _binary_clf_curves(y_true, y_predicted)
last_col = _last_col_idx(y_true.ndim)
last_col[-1] = np.asarray([-1], dtype='int8')
precision = tps.astype('float32') / (tps + fps)
if y_true.ndim == 1:
recall = tps.astype('float32') / tps[-1]
else:
recall = tps.astype('float32') / tps[last_col]
reverse = _reverse_idx(fps.ndim)
precision = precision[reverse]
recall = recall[reverse]
thresholds = thresholds[reverse]
if y_true.ndim == 1:
ones, zeros = np.asarray([1], dtype='float32'), np.asarray([0], dtype='float32')
else:
ones = T.ones_like(precision)[last_col]
zeros = T.zeros_like(recall)[last_col]
precision = T.concatenate([precision, ones], axis=-1)
recall = T.concatenate([recall, zeros], axis=-1)
return precision, recall, thresholds
def average_precision_scores(y_true, y_predicted):
precision, recall, _ = precision_recall_curves(y_true, y_predicted)
return auc(recall, precision)
def precision_recall_loss(y_true, y_predicted):
"convenience function to minimize for"
return 1-average_precision_scores(y_true, y_predicted)
def last_axis_precision_recall_curve(y_true, y_predicted):
fps, tps, thresholds = _last_axis_binary_clf_curve(y_true, y_predicted)
i = [slice(None)] * fps.ndim
i[-1] = [-1]
precision = tps.astype('float32') / (tps + fps)
recall = tps.astype('float32') / tps[i]
i[-1] = slice(None, None, -1)
precision = precision[i]
recall = recall[i]
thresholds = thresholds[i]
i[-1] = [-1]
precision = np.concatenate([precision, np.ones(precision.shape)[i]], axis=-1)
recall = np.concatenate([recall, np.zeros(recall.shape)[i]], axis=-1)
return precision, recall, thresholds
#aliases
roc_curve = roc_curves
roc_auc_score = roc_auc_scores
precision_recall_curve = precision_recall_curves
average_precision_score = average_precision_scores
_binary_clf_curve = _binary_clf_curves
| mit |
BenLubar/valis | valis/resources/abstract.py | 2 | 1223 |
from collections import namedtuple
from datetime import datetime
from flask import request
from flask.ext.restful import Resource
Collection = namedtuple("Collection", ["items", "count", "updated_at"])
class AbstractCollectionResource(Resource):
COLLECTION_LIMIT_MAX = 100
COLLECTION_LIMIT_DEFAULT = 20
def _parse_options(self):
options, _ = self._endpoint().options_schema.load(request.args)
return options
def _endpoint(self):
raise NotImplementedError
def _bind_limit(self, limit):
if limit and (limit > 0) and (limit <= self.COLLECTION_LIMIT_MAX):
return limit
else:
return self.COLLECTION_LIMIT_DEFAULT
def _build_collection(self, models, count=None):
return Collection(models, count, datetime.utcnow())
def _serialize_collection(self, collection):
serialized, _ = self._schema().dump(collection)
return serialized
def _schema(self):
raise NotImplementedError
class AbstractItemResource(Resource):
def _serialize_model(self, model):
serialized, _ = self._schema().dump(model)
return serialized
def _schema(self):
raise NotImplementedError
| gpl-3.0 |
laslabs/vertical-medical | sale_crm_medical_prescription/tests/test_crm_lead.py | 1 | 1492 | # -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License GPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from odoo.tests.common import TransactionCase
class TestCrmLead(TransactionCase):
def setUp(self):
super(TestCrmLead, self).setUp()
self.crm_lead_1 = self.env.ref(
'sale_crm_medical_prescription.crm_lead_medical_lead_1'
)
self.rx_order_9 = self.env.ref(
'sale_crm_medical_prescription.'
'medical_prescription_prescription_order_9'
)
self.rx_order_10 = self.env.ref(
'sale_crm_medical_prescription.'
'medical_prescription_prescription_order_10'
)
def test_compute_prescription_order(self):
""" Test prescription orders properly calculated """
rx_orders = [self.rx_order_9, self.rx_order_10]
for rx in rx_orders:
self.assertIn(
rx,
self.crm_lead_1.prescription_order_ids
)
def test_compute_patient_ids(self):
""" Test patient ids properly calculated """
patient = self.env.ref(
'sale_crm_medical_prescription.'
'medical_patient_patient_10'
)
self.assertIn(
patient,
self.crm_lead_1.patient_ids
)
def test_compute_is_prescription(self):
""" Test is_prescription set to True """
self.assertTrue(
self.crm_lead_1.is_prescription
)
| agpl-3.0 |
mdietrichc2c/OCB | addons/crm/crm_phonecall.py | 255 | 14844 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import crm
from datetime import datetime
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp.tools.translate import _
class crm_phonecall(osv.osv):
""" Model for CRM phonecalls """
_name = "crm.phonecall"
_description = "Phonecall"
_order = "id desc"
_inherit = ['mail.thread']
_columns = {
'date_action_last': fields.datetime('Last Action', readonly=1),
'date_action_next': fields.datetime('Next Action', readonly=1),
'create_date': fields.datetime('Creation Date' , readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team', \
select=True, help='Sales team to which Case belongs to.'),
'user_id': fields.many2one('res.users', 'Responsible'),
'partner_id': fields.many2one('res.partner', 'Contact'),
'company_id': fields.many2one('res.company', 'Company'),
'description': fields.text('Description'),
'state': fields.selection(
[('open', 'Confirmed'),
('cancel', 'Cancelled'),
('pending', 'Pending'),
('done', 'Held')
], string='Status', readonly=True, track_visibility='onchange',
help='The status is set to Confirmed, when a case is created.\n'
'When the call is over, the status is set to Held.\n'
'If the callis not applicable anymore, the status can be set to Cancelled.'),
'email_from': fields.char('Email', size=128, help="These people will receive email."),
'date_open': fields.datetime('Opened', readonly=True),
# phonecall fields
'name': fields.char('Call Summary', required=True),
'active': fields.boolean('Active', required=False),
'duration': fields.float('Duration', help='Duration in minutes and seconds.'),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="['|',('section_id','=',section_id),('section_id','=',False),\
('object_id.model', '=', 'crm.phonecall')]"),
'partner_phone': fields.char('Phone'),
'partner_mobile': fields.char('Mobile'),
'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority'),
'date_closed': fields.datetime('Closed', readonly=True),
'date': fields.datetime('Date'),
'opportunity_id': fields.many2one ('crm.lead', 'Lead/Opportunity'),
}
def _get_default_state(self, cr, uid, context=None):
if context and context.get('default_state'):
return context.get('default_state')
return 'open'
_defaults = {
'date': fields.datetime.now,
'priority': '1',
'state': _get_default_state,
'user_id': lambda self, cr, uid, ctx: uid,
'active': 1
}
def on_change_partner_id(self, cr, uid, ids, partner_id, context=None):
values = {}
if partner_id:
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
values = {
'partner_phone': partner.phone,
'partner_mobile': partner.mobile,
}
return {'value': values}
def write(self, cr, uid, ids, values, context=None):
""" Override to add case management: open/close dates """
if values.get('state'):
if values.get('state') == 'done':
values['date_closed'] = fields.datetime.now()
self.compute_duration(cr, uid, ids, context=context)
elif values.get('state') == 'open':
values['date_open'] = fields.datetime.now()
values['duration'] = 0.0
return super(crm_phonecall, self).write(cr, uid, ids, values, context=context)
def compute_duration(self, cr, uid, ids, context=None):
for phonecall in self.browse(cr, uid, ids, context=context):
if phonecall.duration <= 0:
duration = datetime.now() - datetime.strptime(phonecall.date, DEFAULT_SERVER_DATETIME_FORMAT)
values = {'duration': duration.seconds/float(60)}
self.write(cr, uid, [phonecall.id], values, context=context)
return True
def schedule_another_phonecall(self, cr, uid, ids, schedule_time, call_summary, \
user_id=False, section_id=False, categ_id=False, action='schedule', context=None):
"""
action :('schedule','Schedule a call'), ('log','Log a call')
"""
model_data = self.pool.get('ir.model.data')
phonecall_dict = {}
if not categ_id:
try:
res_id = model_data._get_id(cr, uid, 'crm', 'categ_phone2')
categ_id = model_data.browse(cr, uid, res_id, context=context).res_id
except ValueError:
pass
for call in self.browse(cr, uid, ids, context=context):
if not section_id:
section_id = call.section_id and call.section_id.id or False
if not user_id:
user_id = call.user_id and call.user_id.id or False
if not schedule_time:
schedule_time = call.date
vals = {
'name' : call_summary,
'user_id' : user_id or False,
'categ_id' : categ_id or False,
'description' : call.description or False,
'date' : schedule_time,
'section_id' : section_id or False,
'partner_id': call.partner_id and call.partner_id.id or False,
'partner_phone' : call.partner_phone,
'partner_mobile' : call.partner_mobile,
'priority': call.priority,
'opportunity_id': call.opportunity_id and call.opportunity_id.id or False,
}
new_id = self.create(cr, uid, vals, context=context)
if action == 'log':
self.write(cr, uid, [new_id], {'state': 'done'}, context=context)
phonecall_dict[call.id] = new_id
return phonecall_dict
def _call_create_partner(self, cr, uid, phonecall, context=None):
partner = self.pool.get('res.partner')
partner_id = partner.create(cr, uid, {
'name': phonecall.name,
'user_id': phonecall.user_id.id,
'comment': phonecall.description,
'address': []
})
return partner_id
def on_change_opportunity(self, cr, uid, ids, opportunity_id, context=None):
values = {}
if opportunity_id:
opportunity = self.pool.get('crm.lead').browse(cr, uid, opportunity_id, context=context)
values = {
'section_id' : opportunity.section_id and opportunity.section_id.id or False,
'partner_phone' : opportunity.phone,
'partner_mobile' : opportunity.mobile,
'partner_id' : opportunity.partner_id and opportunity.partner_id.id or False,
}
return {'value' : values}
def _call_set_partner(self, cr, uid, ids, partner_id, context=None):
write_res = self.write(cr, uid, ids, {'partner_id' : partner_id}, context=context)
self._call_set_partner_send_note(cr, uid, ids, context)
return write_res
def _call_create_partner_address(self, cr, uid, phonecall, partner_id, context=None):
address = self.pool.get('res.partner')
return address.create(cr, uid, {
'parent_id': partner_id,
'name': phonecall.name,
'phone': phonecall.partner_phone,
})
def handle_partner_assignation(self, cr, uid, ids, action='create', partner_id=False, context=None):
"""
Handle partner assignation during a lead conversion.
if action is 'create', create new partner with contact and assign lead to new partner_id.
otherwise assign lead to specified partner_id
:param list ids: phonecalls ids to process
:param string action: what has to be done regarding partners (create it, assign an existing one, or nothing)
:param int partner_id: partner to assign if any
:return dict: dictionary organized as followed: {lead_id: partner_assigned_id}
"""
#TODO this is a duplication of the handle_partner_assignation method of crm_lead
partner_ids = {}
# If a partner_id is given, force this partner for all elements
force_partner_id = partner_id
for call in self.browse(cr, uid, ids, context=context):
# If the action is set to 'create' and no partner_id is set, create a new one
if action == 'create':
partner_id = force_partner_id or self._call_create_partner(cr, uid, call, context=context)
self._call_create_partner_address(cr, uid, call, partner_id, context=context)
self._call_set_partner(cr, uid, [call.id], partner_id, context=context)
partner_ids[call.id] = partner_id
return partner_ids
def redirect_phonecall_view(self, cr, uid, phonecall_id, context=None):
model_data = self.pool.get('ir.model.data')
# Select the view
tree_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_tree_view')
form_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_form_view')
search_view = model_data.get_object_reference(cr, uid, 'crm', 'view_crm_case_phonecalls_filter')
value = {
'name': _('Phone Call'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'crm.phonecall',
'res_id' : int(phonecall_id),
'views': [(form_view and form_view[1] or False, 'form'), (tree_view and tree_view[1] or False, 'tree'), (False, 'calendar')],
'type': 'ir.actions.act_window',
'search_view_id': search_view and search_view[1] or False,
}
return value
def convert_opportunity(self, cr, uid, ids, opportunity_summary=False, partner_id=False, planned_revenue=0.0, probability=0.0, context=None):
partner = self.pool.get('res.partner')
opportunity = self.pool.get('crm.lead')
opportunity_dict = {}
default_contact = False
for call in self.browse(cr, uid, ids, context=context):
if not partner_id:
partner_id = call.partner_id and call.partner_id.id or False
if partner_id:
address_id = partner.address_get(cr, uid, [partner_id])['default']
if address_id:
default_contact = partner.browse(cr, uid, address_id, context=context)
opportunity_id = opportunity.create(cr, uid, {
'name': opportunity_summary or call.name,
'planned_revenue': planned_revenue,
'probability': probability,
'partner_id': partner_id or False,
'mobile': default_contact and default_contact.mobile,
'section_id': call.section_id and call.section_id.id or False,
'description': call.description or False,
'priority': call.priority,
'type': 'opportunity',
'phone': call.partner_phone or False,
'email_from': default_contact and default_contact.email,
})
vals = {
'partner_id': partner_id,
'opportunity_id': opportunity_id,
'state': 'done',
}
self.write(cr, uid, [call.id], vals, context=context)
opportunity_dict[call.id] = opportunity_id
return opportunity_dict
def action_make_meeting(self, cr, uid, ids, context=None):
"""
Open meeting's calendar view to schedule a meeting on current phonecall.
:return dict: dictionary value for created meeting view
"""
partner_ids = []
phonecall = self.browse(cr, uid, ids[0], context)
if phonecall.partner_id and phonecall.partner_id.email:
partner_ids.append(phonecall.partner_id.id)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'calendar', 'action_calendar_event', context)
res['context'] = {
'default_phonecall_id': phonecall.id,
'default_partner_ids': partner_ids,
'default_user_id': uid,
'default_email_from': phonecall.email_from,
'default_name': phonecall.name,
}
return res
def action_button_convert2opportunity(self, cr, uid, ids, context=None):
"""
Convert a phonecall into an opp and then redirect to the opp view.
:param list ids: list of calls ids to convert (typically contains a single id)
:return dict: containing view information
"""
if len(ids) != 1:
raise osv.except_osv(_('Warning!'),_('It\'s only possible to convert one phonecall at a time.'))
opportunity_dict = self.convert_opportunity(cr, uid, ids, context=context)
return self.pool.get('crm.lead').redirect_opportunity_view(cr, uid, opportunity_dict[ids[0]], context)
# ----------------------------------------
# OpenChatter
# ----------------------------------------
def _call_set_partner_send_note(self, cr, uid, ids, context=None):
return self.message_post(cr, uid, ids, body=_("Partner has been <b>created</b>."), context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
12AngryMen/votca-scripts | xtp/check_distances.py | 2 | 1617 | #!usr/bin/python
import numpy as np
import sqlite3
import argparse
import sys
parser=argparse.ArgumentParser(description="Checks pairs for dR diferrences")
parser.add_argument("-f","--file", default="system.sql",type=str,help="Statefile,default:system.sql")
args=parser.parse_args()
def readSqlall(sqlname):
sqlstatement = "SELECT pairs.drX,pairs.drY, pairs.drZ, seg1.posX,seg1.posY,seg1.posZ, seg2.posX,seg2.posY,seg2.posZ FROM pairs JOIN segments seg1 ON seg1._id =pairs.seg1 JOIN segments seg2 ON seg2._id =pairs.seg2"
con = sqlite3.connect(sqlname)
with con:
cur = con.cursor()
cur.execute(sqlstatement)
rows = cur.fetchall()
sqlall=(np.array(rows))
return sqlall
def getBox(sqlname):
sqlstatement="SELECT box11,box12,box13,box21,box22,box23,box31,box32,box33 FROM frames"
con = sqlite3.connect(sqlname)
with con:
cur = con.cursor()
cur.execute(sqlstatement)
rows = cur.fetchall()
sqlall=(np.array(rows))
return sqlall
sql=readSqlall(args.file)
box=getBox(args.file)
box=np.reshape(box,(3,3))
test=np.diag(np.diag(box))
print "Box has dimensions:"
print box
if not np.allclose(box,test):
print "Box is not orthogonal. Exiting"
sys.exit()
box=np.diag(box)
dr=sql[:,:3]
pos1=sql[:,3:6]
pos2=sql[:,6:]
drnew=pos2-pos1
factor=(drnew-dr)%box
pairid=0
isfine=True
for f,d,dn in zip(factor,dr,drnew):
if not np.allclose(f,np.zeros(3)):
print "For pair {} segment dR and pair dR differ".format(pairdid)," segmentdR=",dn," pairdR=",d
isfine=False
pairid+=1
if isfine:
print "Statefile has only correct pairs"
else:
print "Statefile has weird pairs. Something went wrong"
| apache-2.0 |
ingolemo/cmd-utils | backup.py | 1 | 6424 | #!/usr/bin/env python
# see `backup --help` for full options
"""
Make a backup of the source directory to the destination using
rsync. Creates a directory in the destination with the date that the
backup was made. If the destination contains previous backups then the
new backup will be done incrementally using the --link-dest feature of
rsync to create hardlinks to files that have not changed. It will also
delete backups that are too old using a sort of exponential backoff to
keep fewer backups as they go further into the past.
"""
import os
import sys
import datetime
import subprocess
import argparse
import shlex
excludes = os.path.join(
os.path.expanduser(os.environ.get("XDG_CONFIG_HOME", "~/.config")),
"backup_excludes",
)
# args to pass to rsync
RSYNC_ARGS = {
"--acls": None,
"--archive": None, # -rlptgoD
"--delete": None,
"--delete-excluded": None,
"--exclude-from": excludes,
"--hard-links": None,
"--human-readable": None,
"--inplace": None,
"--itemize-changes": None,
"--max-size": "2g",
"--numeric-ids": None,
"--one-file-system": None,
"--preallocate": None,
"--relative": None,
"--verbose": None,
"--xattrs": None,
}
# Offsets from now for which to keep a backup around
OFFSETS = {
# minutes
datetime.timedelta(minutes=1),
datetime.timedelta(minutes=2),
datetime.timedelta(minutes=5),
datetime.timedelta(minutes=10),
datetime.timedelta(minutes=20),
datetime.timedelta(minutes=30),
# hours
datetime.timedelta(hours=1),
datetime.timedelta(hours=2),
datetime.timedelta(hours=3),
datetime.timedelta(hours=4),
datetime.timedelta(hours=5),
datetime.timedelta(hours=6),
datetime.timedelta(hours=7),
datetime.timedelta(hours=8),
datetime.timedelta(hours=9),
datetime.timedelta(hours=10),
datetime.timedelta(hours=12),
datetime.timedelta(hours=14),
datetime.timedelta(hours=16),
datetime.timedelta(hours=18),
datetime.timedelta(hours=20),
datetime.timedelta(hours=22),
# days
datetime.timedelta(days=1),
datetime.timedelta(days=2),
datetime.timedelta(days=3),
datetime.timedelta(days=4),
datetime.timedelta(days=5),
datetime.timedelta(days=6),
datetime.timedelta(days=7),
datetime.timedelta(days=14),
datetime.timedelta(days=21),
# months
datetime.timedelta(days=30 * 1),
datetime.timedelta(days=30 * 2),
datetime.timedelta(days=30 * 3),
datetime.timedelta(days=30 * 4),
datetime.timedelta(days=30 * 5),
datetime.timedelta(days=30 * 6),
datetime.timedelta(days=30 * 7),
datetime.timedelta(days=30 * 8),
datetime.timedelta(days=30 * 9),
datetime.timedelta(days=30 * 10),
datetime.timedelta(days=30 * 11),
# years
datetime.timedelta(days=365 * 1),
datetime.timedelta(days=365 * 2),
datetime.timedelta(days=365 * 3),
datetime.timedelta(days=365 * 4),
datetime.timedelta(days=365 * 5),
datetime.timedelta(days=365 * 6),
datetime.timedelta(days=365 * 7),
datetime.timedelta(days=365 * 8),
datetime.timedelta(days=365 * 9),
datetime.timedelta(days=365 * 10),
datetime.timedelta(days=365 * 12),
datetime.timedelta(days=365 * 14),
datetime.timedelta(days=365 * 16),
datetime.timedelta(days=365 * 18),
datetime.timedelta(days=365 * 20),
datetime.timedelta(days=365 * 30),
datetime.timedelta(days=365 * 40),
datetime.timedelta(days=365 * 50),
datetime.timedelta(days=365 * 60),
datetime.timedelta(days=365 * 70),
datetime.timedelta(days=365 * 80),
datetime.timedelta(days=365 * 90),
datetime.timedelta(days=365 * 100),
}
def build_synccmd(source, dest, linkdests=(), remote=False):
"builds rsync command list from arguments"
rargs = [item for items in RSYNC_ARGS.items() for item in items if item is not None]
for linkdest in sorted(linkdests)[-18:]:
rargs.extend(["--link-dest", linkdest])
if sys.stdout.isatty():
rargs.append('--progress')
if remote:
dest = "{}:{}".format(remote, dest)
cmd = ["/usr/bin/rsync"] + rargs + [source, dest]
return cmd
def execute(cmd, output=False):
print("$", *(shlex.quote(a) for a in cmd))
if not output:
return subprocess.run(cmd, check=True).returncode
else:
out = subprocess.check_output(cmd).decode().strip()
return out.split("\n") if out else []
def wanted_backups(all_backups, now, datefmt):
def is_younger(folder, wanted_time):
backup_time = datetime.datetime.strptime(os.path.basename(folder), datefmt)
return backup_time > wanted_time
wanted_time = now
for offset in OFFSETS:
wanted_time = now - offset
youngers = [a for a in all_backups if is_younger(a, wanted_time)]
if youngers:
yield min(youngers)
def parse_args(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("source")
parser.add_argument("destination")
parser.add_argument("--remote", default=None, help="remote server for ssh")
parser.add_argument(
"--date-format",
default="%Y-%m-%dT%H%M%S",
help="Date format for backup folders (default: iso-8601 -ish).",
)
return parser.parse_args(args[1:])
def main(argv):
now = datetime.datetime.now()
args = parse_args(argv)
def make_cmd(*cmd):
if args.remote:
return ("ssh", args.remote) + cmd
else:
return cmd
# get existing directories
backups = execute(
make_cmd(
"find", args.destination, "-maxdepth", "1", "-mindepth", "1", "-type", "d",
),
output=True,
)
curr = os.path.join(args.destination, now.strftime(args.date_format))
# create new directory
execute(make_cmd("mkdir", curr))
# rsync
execute(build_synccmd(args.source, curr, linkdests=backups, remote=args.remote))
# make symlink to most recent backup
symlink_loc = os.path.join(args.destination, "current")
execute(make_cmd("rm", "-f", symlink_loc))
execute(make_cmd("ln", "-sr", curr, symlink_loc))
# remove unwanted directories
wanted = set(wanted_backups(backups, now, args.date_format))
for backup in backups:
if backup not in wanted:
execute(make_cmd("rm", "-rf", backup))
if __name__ == "__main__":
sys.exit(main(sys.argv))
| gpl-3.0 |
Bindupriya/nuxeo-drive | setup.py | 2 | 19023 | #! /usr/bin/env python
#
# Copyright (C) 2012 Nuxeo
#
import os
import sys
from datetime import datetime
import nx_esky
from esky.bdist_esky import Executable as es_Executable
OUTPUT_DIR = 'dist'
SERVER_MIN_VERSION = '5.6'
def read_version(init_file):
if 'DRIVE_VERSION' in os.environ:
return os.environ['DRIVE_VERSION']
with open(init_file, 'rb') as f:
return f.readline().split("=")[1].strip().replace('\'', '')
def update_version(init_file, version):
with open(init_file, 'wb') as f:
f.write("__version__ = '%s'\n" % version)
def create_json_metadata(client_version, server_version):
output_dir = os.path.abspath(OUTPUT_DIR)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
file_path = os.path.abspath(os.path.join(OUTPUT_DIR,
client_version + '.json'))
with open(file_path, 'wb') as f:
f.write('{"nuxeoPlatformMinVersion": "%s"}\n' % server_version)
return file_path
class Packages(object):
def __init__(self, directory_list):
self.directory_list = directory_list
def _make_package_name_from_path(self, root, filepath):
basename = '/' + os.path.basename(root)
dir_name = filepath[filepath.find(basename):]
package_name = dir_name.replace('/', '.')[1:]
return package_name
return root
def _isNonTrivialPythonFile(self, afile):
if afile.endswith('/' + '__init__.py'):
return False
if afile.endswith('.py'):
return True
return False
def _load_packages_in_tree(self, root):
# get all the directories with non trivial python files (derived from
# http://stackoverflow.com/questions/9994414/python-get-folder-containing-specific-files-extension)
package_dirs = set(folder for folder, _, files in os.walk(root)
for file_ in files
if self._isNonTrivialPythonFile(file_))
for package_dir in package_dirs:
dir_ = package_dir.replace("\\", "/")
aa = self._make_package_name_from_path(root, dir_)
self.packages.append(aa)
def load(self):
self.packages = []
for directory in self.directory_list:
self._load_packages_in_tree(directory)
return self.packages
class data_file_dir(object):
def __init__(self, home_dir, subfolderName, include_files):
self.home_dir = home_dir
self.subfolderName = subfolderName
self.include_files = include_files
self.recursive_result = None
def load(self):
result = []
for filename in os.listdir(os.path.normpath(self.home_dir)):
filepath = os.path.join(self.home_dir, filename)
if os.path.isfile(filepath):
self.include_files.append(
(filepath, os.path.join(self.subfolderName, filename)))
result.append(filepath)
return result
def load_recursive(self, path=None, shortpath=None):
if path is None:
self.recursive_result = []
shortpath = self.subfolderName
path = self.home_dir
result = []
for filename in os.listdir(os.path.normpath(path)):
filepath = os.path.join(path, filename)
childshortpath = os.path.join(shortpath, filename)
if os.path.isfile(filepath):
self.include_files.append(
(filepath, childshortpath))
result.append(filepath)
elif os.path.isdir(filepath):
self.load_recursive(filepath, childshortpath)
self.recursive_result.append((shortpath, result))
return self.recursive_result
class NuxeoDriveAttributes(object):
def get_uid(self):
return '{800B7778-1B71-11E2-9D65-A0FD6088709B}'
def rubric_company(self):
return 'nuxeo'
def rubric_top_dir(self):
return 'nuxeo-drive'
def rubric_2nd_dir(self):
return 'nuxeo-drive-client'
def rubric_3rd_dir(self):
return 'nxdrive'
def rubric_super_dir(self):
return ''
def rubric_product_name(self):
return 'Nuxeo Drive'
def get_name(self):
return self.rubric_top_dir()
def get_package_data(self):
package_data = {
self.rubric_3rd_dir() + '.data': self._get_recursive_data('data'),
}
return package_data
def _get_recursive_data(self, data_dir):
data_files = []
data_dir_path = os.path.join(self.rubric_2nd_dir(), self.rubric_3rd_dir(), data_dir)
for dirpath, _, filenames in os.walk(data_dir_path):
rel_path = dirpath.rsplit(data_dir, 1)[1]
if rel_path.startswith(os.path.sep):
rel_path = rel_path[1:]
data_files.extend([os.path.join(rel_path, filename)
for filename in filenames
if not (filename.endswith('.py') or filename.endswith('.pyc'))])
return data_files
def get_package_dirs(self):
package_dirs = [os.path.join(self.rubric_2nd_dir(),
self.rubric_3rd_dir())]
return package_dirs
def get_script(self):
return os.path.join(self.rubric_2nd_dir(), 'scripts', 'ndrive')
def get_scripts(self):
return [es_Executable(self.get_script()), 'launcher.pyw']
def get_win_script(self):
return os.path.join(self.rubric_2nd_dir(), 'scripts', 'ndrivew.pyw')
def get_app(self):
return self.get_scripts()
def get_ui5_home(self):
return os.path.join(self.rubric_2nd_dir(), self.rubric_3rd_dir(),
'data', 'ui5')
def get_icons_home(self):
return os.path.join(self.rubric_2nd_dir(), self.rubric_3rd_dir(),
'data', 'icons')
def get_win_icon(self):
return 'nuxeo_drive_icon_64.ico'
def get_png_icon(self):
return 'nuxeo_drive_icon_64.png'
def get_osx_icon(self):
return 'nuxeo_drive_app_icon_128.icns'
def get_init_file(self):
return os.path.abspath(os.path.join(self.rubric_2nd_dir(),
self.rubric_3rd_dir(),
'__init__.py'))
def append_includes(self, includes):
pass
def get_win_targetName(self):
return "ndrivew.exe"
def shortcutName(self):
return "Nuxeo Drive"
def get_CFBundleURLSchemes(self):
return ['nxdrive']
def get_package_dir(self):
return {'nxdrive': os.path.join(self.rubric_2nd_dir(),
self.rubric_3rd_dir())}
def get_path_append(self):
return self.rubric_2nd_dir()
def get_CFBundleDisplayName(self):
return self.rubric_product_name()
def get_CFBundleName(self):
return self.rubric_product_name()
def get_CFBundleIdentifier(self):
return "org.nuxeo.drive"
def get_CFBundleURLName(self):
return 'Nuxeo Drive URL'
def get_description(self):
return "Desktop synchronization client for Nuxeo."
def get_author(self):
return "Nuxeo"
def get_install_dir(self):
return os.path.join(self.get_author(), 'Drive')
def get_author_email(self):
return "contact@nuxeo.com"
def get_url(self):
return 'http://github.com/nuxeo/nuxeo-drive'
def get_long_description(self):
return open('README.rst').read()
def get_data_files(self):
return []
def get_includes(self):
return []
def get_licence(self):
return None
def get_gpl_licence(self):
license_ = open('LICENSE.txt').read().replace('\n', '\\line')
return '{\\rtf1\\ansi\\ansicpg1252\\deff0\\deftab720{'\
'\\fonttbl{\\f0\\froman\\fprq2 Times New Roman;}}'\
'{\\colortbl\\red0\\green0\\blue0;}' + license_ + '}'
def customize_msi(self, db):
import msilib
# Add the possibility to bind an engine with MSI
msilib.add_data(db, "CustomAction", [("NuxeoDriveBinder", 82,
self.get_win_targetName(),
"bind-server --password \"[TARGETPASSWORD]\" --local-folder \"[TARGETDRIVEFOLDER]\" [TARGETUSERNAME] [TARGETURL]")])
msilib.add_data(db, "InstallExecuteSequence", [("NuxeoDriveBinder",
'NOT (TARGETUSERNAME="" OR TARGETURL="")', -1)])
class NuxeoDriveSetup(object):
def __init__(self, driveAttributes):
from distutils.core import setup
attribs = driveAttributes
freeze_options = {}
ext_modules = []
script = attribs.get_script()
scripts = attribs.get_scripts()
name = attribs.get_name()
packages = Packages(attribs.get_package_dirs()).load()
# special handling for data files, except for Linux
if sys.platform == "win32" or sys.platform == 'darwin':
packages.remove('nxdrive.data')
package_data = attribs.get_package_data()
icons_home = attribs.get_icons_home()
ui5_home = attribs.get_ui5_home()
win_icon = os.path.join(icons_home, attribs.get_win_icon())
png_icon = os.path.join(icons_home, attribs.get_png_icon())
osx_icon = os.path.join(icons_home, attribs.get_osx_icon())
if sys.platform == 'win32':
icon = win_icon
elif sys.platform == 'darwin':
icon = osx_icon
else:
icon = png_icon
# Files to include in frozen app
# build_exe freeze with cx_Freeze (Windows)
include_files = attribs.get_includes()
# bdist_esky freeze with cx_Freeze (Windows) and py2app (OS X)
# In fact this is a global setup option
# TODO NXP-13810: check removed data_files from py2app and added to
# global setup
icon_files = data_file_dir(icons_home, 'icons', include_files).load()
ui5_files = data_file_dir(ui5_home, 'ui5', include_files).load_recursive()
data_files = [('icons', icon_files)]
data_files.extend(ui5_files)
data_files.extend(attribs.get_data_files())
old_version = None
init_file = attribs.get_init_file()
version = read_version(init_file)
if '-dev' in version:
# timestamp the dev artifacts as distutils only accepts "b" + digit
timestamp = datetime.utcnow().isoformat()
timestamp = timestamp.replace(":", "")
timestamp = timestamp.replace(".", "")
timestamp = timestamp.replace("T", "")
timestamp = timestamp.replace("-", "")
old_version = version
# distutils imposes a max 3 levels integer version
# (+ prerelease markers which are not allowed in a
# msi package version). On the other hand,
# msi imposes the a.b.c.0 or a.b.c.d format where
# a, b, c and d are all 16 bits integers
# TODO: align on latest distutils versioning
month_day = timestamp[4:8]
if month_day.startswith('0'):
month_day = month_day[1:]
version = version.replace('-dev', ".%s" % (
month_day))
update_version(init_file, version)
print "Updated version to " + version
# Create JSON metadata file for the frozen application
json_file = create_json_metadata(version, SERVER_MIN_VERSION)
print "Created JSON metadata file for frozen app: " + json_file
includes = [
"PyQt4",
"PyQt4.QtCore",
"PyQt4.QtNetwork",
"PyQt4.QtGui",
"atexit", # implicitly required by PyQt4
"cffi",
"xattr"
]
attribs.append_includes(includes)
excludes = [
"ipdb",
"clf",
"IronPython",
"pydoc",
"tkinter",
]
if '--freeze' in sys.argv:
print "Building standalone executable..."
sys.argv.remove('--freeze')
from nx_cx_Freeze import setup
from cx_Freeze import Executable as cx_Executable
from esky.util import get_platform
# build_exe does not seem to take the package_dir info into account
sys.path.append(attribs.get_path_append())
executables = [cx_Executable(script)]
freeze_options = dict()
if sys.platform == "win32":
# Windows GUI program that can be launched without a cmd
# console
script_w = attribs.get_win_script()
if script_w is not None:
scripts.append(
es_Executable(script_w, icon=icon,
shortcutDir="ProgramMenuFolder",
shortcutName=attribs.shortcutName()))
executables.append(
cx_Executable(script_w,
targetName=attribs.get_win_targetName(),
base="Win32GUI", icon=icon,
shortcutDir="ProgramMenuFolder",
shortcutName=attribs.shortcutName()))
freeze_options.update({'attribs': attribs})
package_data = {}
esky_app_name = (attribs.get_name()
+ '-' + version + '.' + get_platform())
esky_dist_dir = os.path.join(OUTPUT_DIR, esky_app_name)
freeze_options.update({
'executables': executables,
'options': {
"build": {
"exe_command": "bdist_esky",
},
"build_exe": {
"includes": includes,
"packages": packages + [
"nose",
],
"excludes": excludes,
"include_files": include_files,
},
"bdist_esky": {
"excludes": excludes,
"enable_appdata_dir": True,
"freezer_options": {
"packages": packages + [
"nose",
],
},
"rm_freeze_dir_after_zipping": False,
},
"install": {
"skip_sub_commands":
"install_lib,install_scripts,install_data",
},
"install_exe": {
"skip_build": True,
"build_dir": esky_dist_dir,
},
"bdist_msi": {
"add_to_path": True,
"upgrade_code":
attribs.get_uid(),
},
},
})
# Include cffi compiled C extension under Linux
if sys.platform.startswith('linux'):
import xattr
includeFiles = [(os.path.join(os.path.dirname(xattr.__file__), '_cffi__x7c9e2f59xb862c7dd.so'),
'_cffi__x7c9e2f59xb862c7dd.so')]
freeze_options['options']['bdist_esky']['freezer_options'].update({
"includeFiles": includeFiles
})
if sys.platform == 'darwin':
# Under OSX we use py2app instead of cx_Freeze because we need:
# - argv_emulation=True for nxdrive:// URL scheme handling
# - easy Info.plist customization
import py2app # install the py2app command
import xattr
ext_modules = [xattr.lib.ffi.verifier.get_extension()]
includes.append("_cffi__x7c9e2f59xb862c7dd")
name = attribs.get_CFBundleName()
py2app_options = dict(
iconfile=icon,
qt_plugins='imageformats',
argv_emulation=False, # We use QT for URL scheme handling
plist=dict(
CFBundleDisplayName=attribs.get_CFBundleDisplayName(),
CFBundleName=attribs.get_CFBundleName(),
CFBundleIdentifier=attribs.get_CFBundleIdentifier(),
LSUIElement=True, # Do not launch as a Dock application
CFBundleURLTypes=[
dict(
CFBundleURLName=attribs.get_CFBundleURLName(),
CFBundleURLSchemes=(attribs
.get_CFBundleURLSchemes()),
)
],
NSServices=[
dict(
NSMenuItem=dict(
default=attribs.get_CFBundleDisplayName()
),
NSMessage=u"macRightClick",
NSPortName=attribs.get_CFBundleDisplayName(),
NSRequiredContext=dict(),
NSSendTypes=[
u'NSStringPboardType',
],
NSSendFileTypes=[
u"public.item"
]
)
]
),
includes=includes,
excludes=excludes,
)
freeze_options = dict(
app=attribs.get_app(),
options=dict(
py2app=py2app_options,
bdist_esky=dict(
enable_appdata_dir=True,
create_zipfile=False,
freezer_options=py2app_options,
)
)
)
setup(
name=name,
version=version,
description=attribs.get_description(),
author=attribs.get_author(),
author_email=attribs.get_author_email(),
url=attribs.get_url(),
packages=packages,
package_dir=attribs.get_package_dir(),
package_data=package_data,
scripts=scripts,
long_description=attribs.get_long_description(),
data_files=data_files,
ext_modules=ext_modules,
**freeze_options
)
if old_version is not None:
update_version(init_file, old_version)
print "Restored version to " + old_version
def main(argv=None):
NuxeoDriveSetup(NuxeoDriveAttributes())
if __name__ == '__main__':
sys.exit(main())
| lgpl-2.1 |
eXistenZNL/SickRage | lib/github/tests/NamedUser.py | 39 | 9973 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
import github
import datetime
class NamedUser(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.user = self.g.get_user("jacquev6")
def testAttributesOfOtherUser(self):
self.user = self.g.get_user("nvie")
self.assertEqual(self.user.avatar_url, "https://secure.gravatar.com/avatar/c5a7f21b46df698f3db31c37ed0cf55a?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-140.png")
self.assertEqual(self.user.bio, None)
self.assertEqual(self.user.blog, "http://nvie.com")
self.assertEqual(self.user.collaborators, None)
self.assertEqual(self.user.company, "3rd Cloud")
self.assertEqual(self.user.created_at, datetime.datetime(2009, 5, 12, 21, 19, 38))
self.assertEqual(self.user.disk_usage, None)
self.assertEqual(self.user.email, "vincent@3rdcloud.com")
self.assertEqual(self.user.followers, 296)
self.assertEqual(self.user.following, 41)
self.assertEqual(self.user.gravatar_id, "c5a7f21b46df698f3db31c37ed0cf55a")
self.assertFalse(self.user.hireable)
self.assertEqual(self.user.html_url, "https://github.com/nvie")
self.assertEqual(self.user.id, 83844)
self.assertEqual(self.user.location, "Netherlands")
self.assertEqual(self.user.login, "nvie")
self.assertEqual(self.user.name, "Vincent Driessen")
self.assertEqual(self.user.owned_private_repos, None)
self.assertEqual(self.user.plan, None)
self.assertEqual(self.user.private_gists, None)
self.assertEqual(self.user.public_gists, 16)
self.assertEqual(self.user.public_repos, 61)
self.assertEqual(self.user.total_private_repos, None)
self.assertEqual(self.user.type, "User")
self.assertEqual(self.user.url, "https://api.github.com/users/nvie")
def testAttributesOfSelf(self):
self.assertEqual(self.user.avatar_url, "https://secure.gravatar.com/avatar/b68de5ae38616c296fa345d2b9df2225?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-140.png")
self.assertEqual(self.user.bio, "")
self.assertEqual(self.user.blog, "http://vincent-jacques.net")
self.assertEqual(self.user.collaborators, 0)
self.assertEqual(self.user.company, "Criteo")
self.assertEqual(self.user.created_at, datetime.datetime(2010, 7, 9, 6, 10, 6))
self.assertEqual(self.user.disk_usage, 17080)
self.assertEqual(self.user.email, "vincent@vincent-jacques.net")
self.assertEqual(self.user.followers, 13)
self.assertEqual(self.user.following, 24)
self.assertEqual(self.user.gravatar_id, "b68de5ae38616c296fa345d2b9df2225")
self.assertFalse(self.user.hireable)
self.assertEqual(self.user.html_url, "https://github.com/jacquev6")
self.assertEqual(self.user.id, 327146)
self.assertEqual(self.user.location, "Paris, France")
self.assertEqual(self.user.login, "jacquev6")
self.assertEqual(self.user.name, "Vincent Jacques")
self.assertEqual(self.user.owned_private_repos, 5)
self.assertEqual(self.user.plan.name, "micro")
self.assertEqual(self.user.plan.collaborators, 1)
self.assertEqual(self.user.plan.space, 614400)
self.assertEqual(self.user.plan.private_repos, 5)
self.assertEqual(self.user.private_gists, 5)
self.assertEqual(self.user.public_gists, 2)
self.assertEqual(self.user.public_repos, 11)
self.assertEqual(self.user.total_private_repos, 5)
self.assertEqual(self.user.type, "User")
self.assertEqual(self.user.url, "https://api.github.com/users/jacquev6")
def testGetGists(self):
self.assertListKeyEqual(self.user.get_gists(), lambda g: g.description, ["Gist created by PyGithub", "FairThreadPoolPool.cpp", "How to error 500 Github API v3, as requested by Rick (GitHub Staff)", "Cadfael: order of episodes in French DVD edition"])
def testGetFollowers(self):
self.assertListKeyEqual(self.user.get_followers(), lambda f: f.login, ["jnorthrup", "brugidou", "regisb", "walidk", "afzalkhan", "sdanzan", "vineus", "gturri", "fjardon", "cjuniet", "jardon-u", "kamaradclimber", "L42y"])
def testGetFollowing(self):
self.assertListKeyEqual(self.user.get_following(), lambda f: f.login, ["nvie", "schacon", "jamis", "chad", "unclebob", "dabrahams", "jnorthrup", "brugidou", "regisb", "walidk", "tanzilli", "fjardon", "r3c", "sdanzan", "vineus", "cjuniet", "gturri", "ant9000", "asquini", "claudyus", "jardon-u", "s-bernard", "kamaradclimber", "Lyloa"])
def testHasInFollowing(self):
nvie = self.g.get_user("nvie")
self.assertTrue(self.user.has_in_following(nvie))
def testGetOrgs(self):
self.assertListKeyEqual(self.user.get_orgs(), lambda o: o.login, ["BeaverSoftware"])
def testGetRepo(self):
self.assertEqual(self.user.get_repo("PyGithub").description, "Python library implementing the full Github API v3")
def testGetRepos(self):
self.assertListKeyEqual(self.user.get_repos(), lambda r: r.name, ["TestPyGithub", "django", "PyGithub", "developer.github.com", "acme-public-website", "C4Planner", "DrawTurksHead", "DrawSyntax", "QuadProgMm", "Boost.HierarchicalEnum", "ViDE"])
def testGetReposWithType(self):
self.assertListKeyEqual(self.user.get_repos("owner"), lambda r: r.name, ["django", "PyGithub", "developer.github.com", "acme-public-website", "C4Planner", "DrawTurksHead", "DrawSyntax", "QuadProgMm", "Boost.HierarchicalEnum", "ViDE"])
def testGetWatched(self):
self.assertListKeyEqual(self.user.get_watched(), lambda r: r.name, ["git", "boost.php", "capistrano", "boost.perl", "git-subtree", "git-hg", "homebrew", "celtic_knot", "twisted-intro", "markup", "hub", "gitflow", "murder", "boto", "agit", "d3", "pygit2", "git-pulls", "django_mathlatex", "scrumblr", "developer.github.com", "python-github3", "PlantUML", "bootstrap", "drawnby", "django-socketio", "django-realtime", "playground", "BozoCrack", "FatherBeaver", "PyGithub", "django", "django", "TestPyGithub"])
def testGetStarred(self):
self.assertListKeyEqual(self.user.get_starred(), lambda r: r.name, ["git", "boost.php", "capistrano", "boost.perl", "git-subtree", "git-hg", "homebrew", "celtic_knot", "twisted-intro", "markup", "hub", "gitflow", "murder", "boto", "agit", "d3", "pygit2", "git-pulls", "django_mathlatex", "scrumblr", "developer.github.com", "python-github3", "PlantUML", "bootstrap", "drawnby", "django-socketio", "django-realtime", "playground", "BozoCrack", "FatherBeaver", "amaunet", "django", "django", "moviePlanning", "folly"])
def testGetSubscriptions(self):
self.assertListKeyEqual(self.user.get_subscriptions(), lambda r: r.name, ["ViDE", "Boost.HierarchicalEnum", "QuadProgMm", "DrawSyntax", "DrawTurksHead", "PrivateStuff", "vincent-jacques.net", "Hacking", "C4Planner", "developer.github.com", "PyGithub", "PyGithub", "django", "CinePlanning", "PyGithub", "PyGithub", "PyGithub", "IpMap", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub"])
def testGetEvents(self):
self.assertListKeyBegin(self.user.get_events(), lambda e: e.type, ["GistEvent", "IssueCommentEvent", "PushEvent", "IssuesEvent"])
def testGetPublicEvents(self):
self.assertListKeyBegin(self.user.get_public_events(), lambda e: e.type, ["PushEvent", "CreateEvent", "GistEvent", "IssuesEvent"])
def testGetPublicReceivedEvents(self):
self.assertListKeyBegin(self.user.get_public_received_events(), lambda e: e.type, ["IssueCommentEvent", "IssueCommentEvent", "IssueCommentEvent", "IssueCommentEvent"])
def testGetReceivedEvents(self):
self.assertListKeyBegin(self.user.get_received_events(), lambda e: e.type, ["IssueCommentEvent", "IssueCommentEvent", "IssueCommentEvent", "IssueCommentEvent"])
def testGetKeys(self):
self.assertListKeyEqual(self.user.get_keys(), lambda k: k.id, [3557894, 3791954, 3937333, 4051357, 4051492])
| gpl-3.0 |
suyashphadtare/sajil-frappe | frappe/tasks.py | 27 | 3079 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils.scheduler import enqueue_events
from frappe.celery_app import get_celery, celery_task, task_logger, LONGJOBS_PREFIX
from frappe.cli import get_sites
from frappe.utils.file_lock import create_lock, delete_lock
@celery_task()
def sync_queues():
"""notifies workers to monitor newly added sites"""
app = get_celery()
shortjob_workers, longjob_workers = get_workers(app)
if shortjob_workers:
for worker in shortjob_workers:
sync_worker(app, worker)
if longjob_workers:
for worker in longjob_workers:
sync_worker(app, worker, prefix=LONGJOBS_PREFIX)
def get_workers(app):
longjob_workers = []
shortjob_workers = []
active_queues = app.control.inspect().active_queues()
for worker in active_queues:
if worker.startswith(LONGJOBS_PREFIX):
longjob_workers.append(worker)
else:
shortjob_workers.append(worker)
return shortjob_workers, longjob_workers
def sync_worker(app, worker, prefix=''):
active_queues = set(get_active_queues(app, worker))
required_queues = set(get_required_queues(app, prefix=prefix))
to_add = required_queues - active_queues
to_remove = active_queues - required_queues
for queue in to_add:
app.control.broadcast('add_consumer', arguments={
'queue': queue
}, reply=True, destination=[worker])
for queue in to_remove:
app.control.broadcast('cancel_consumer', arguments={
'queue': queue
}, reply=True, destination=[worker])
def get_active_queues(app, worker):
active_queues = app.control.inspect().active_queues()
if not (active_queues and active_queues.get(worker)):
return []
return [queue['name'] for queue in active_queues[worker]]
def get_required_queues(app, prefix=''):
ret = []
for site in get_sites():
ret.append('{}{}'.format(prefix, site))
ret.append(app.conf['CELERY_DEFAULT_QUEUE'])
return ret
@celery_task()
def scheduler_task(site, event, handler, now=False):
from frappe.utils.scheduler import log
traceback = ""
task_logger.info('running {handler} for {site} for event: {event}'.format(handler=handler, site=site, event=event))
try:
frappe.init(site=site)
if not create_lock(handler):
return
if not now:
frappe.connect(site=site)
frappe.get_attr(handler)()
except Exception:
frappe.db.rollback()
traceback = log(handler, "Method: {event}, Handler: {handler}".format(event=event, handler=handler))
task_logger.warn(traceback)
raise
else:
frappe.db.commit()
finally:
delete_lock(handler)
if not now:
frappe.destroy()
task_logger.info('ran {handler} for {site} for event: {event}'.format(handler=handler, site=site, event=event))
@celery_task()
def enqueue_scheduler_events():
for site in get_sites():
enqueue_events_for_site.delay(site=site)
@celery_task()
def enqueue_events_for_site(site):
try:
frappe.init(site=site)
if frappe.local.conf.maintenance_mode:
return
frappe.connect(site=site)
enqueue_events(site)
finally:
frappe.destroy()
| mit |
clstl/servo | tests/wpt/web-platform-tests/tools/py/testing/root/test_py_imports.py | 163 | 1983 | import py
import types
import sys
def checksubpackage(name):
obj = getattr(py, name)
if hasattr(obj, '__map__'): # isinstance(obj, Module):
keys = dir(obj)
assert len(keys) > 0
print (obj.__map__)
for name in list(obj.__map__):
assert hasattr(obj, name), (obj, name)
def test_dir():
for name in dir(py):
if not name.startswith('_'):
yield checksubpackage, name
def test_virtual_module_identity():
from py import path as path1
from py import path as path2
assert path1 is path2
from py.path import local as local1
from py.path import local as local2
assert local1 is local2
def test_importall():
base = py._pydir
nodirs = [
]
if sys.version_info >= (3,0):
nodirs.append(base.join('_code', '_assertionold.py'))
else:
nodirs.append(base.join('_code', '_assertionnew.py'))
def recurse(p):
return p.check(dotfile=0) and p.basename != "attic"
for p in base.visit('*.py', recurse):
if p.basename == '__init__.py':
continue
relpath = p.new(ext='').relto(base)
if base.sep in relpath: # not py/*.py itself
for x in nodirs:
if p == x or p.relto(x):
break
else:
relpath = relpath.replace(base.sep, '.')
modpath = 'py.%s' % relpath
try:
check_import(modpath)
except py.test.skip.Exception:
pass
def check_import(modpath):
py.builtin.print_("checking import", modpath)
assert __import__(modpath)
def test_all_resolves():
seen = py.builtin.set([py])
lastlength = None
while len(seen) != lastlength:
lastlength = len(seen)
for item in py.builtin.frozenset(seen):
for value in item.__dict__.values():
if isinstance(value, type(py.test)):
seen.add(value)
| mpl-2.0 |
sloria/osf.io | addons/dataverse/serializer.py | 32 | 3736 | from addons.base.serializer import OAuthAddonSerializer
from addons.dataverse import client
from addons.dataverse.settings import DEFAULT_HOSTS
from website.util import api_url_for, web_url_for
class DataverseSerializer(OAuthAddonSerializer):
addon_short_name = 'dataverse'
REQUIRED_URLS = []
# Include host information with more informative labels / formatting
def serialize_account(self, external_account):
ret = super(DataverseSerializer, self).serialize_account(external_account)
host = external_account.oauth_key
ret.update({
'host': host,
'host_url': 'https://{0}'.format(host),
})
return ret
@property
def credentials_owner(self):
return self.node_settings.user_settings.owner
@property
def serialized_urls(self):
external_account = self.node_settings.external_account
ret = {
'settings': web_url_for('user_addons'), # TODO: Is this needed?
}
# Dataverse users do not currently have profile URLs
if external_account and external_account.profile_url:
ret['owner'] = external_account.profile_url
addon_urls = self.addon_serialized_urls
# Make sure developer returns set of needed urls
for url in self.REQUIRED_URLS:
assert url in addon_urls, "addon_serilized_urls must include key '{0}'".format(url)
ret.update(addon_urls)
return ret
@property
def addon_serialized_urls(self):
node = self.node_settings.owner
external_account = self.node_settings.external_account
host = external_account.oauth_key if external_account else ''
return {
'create': api_url_for('dataverse_add_user_account'),
'set': node.api_url_for('dataverse_set_config'),
'importAuth': node.api_url_for('dataverse_import_auth'),
'deauthorize': node.api_url_for('dataverse_deauthorize_node'),
'getDatasets': node.api_url_for('dataverse_get_datasets'),
'datasetPrefix': 'https://doi.org/',
'dataversePrefix': 'http://{0}/dataverse/'.format(host),
'accounts': api_url_for('dataverse_account_list'),
}
@property
def serialized_node_settings(self):
result = super(DataverseSerializer, self).serialized_node_settings
result['hosts'] = DEFAULT_HOSTS
# Update with Dataverse specific fields
if self.node_settings.has_auth:
external_account = self.node_settings.external_account
dataverse_host = external_account.oauth_key
connection = client.connect_from_settings(self.node_settings)
dataverses = client.get_dataverses(connection)
result.update({
'dataverseHost': dataverse_host,
'connected': connection is not None,
'dataverses': [
{'title': dataverse.title, 'alias': dataverse.alias}
for dataverse in dataverses
],
'savedDataverse': {
'title': self.node_settings.dataverse,
'alias': self.node_settings.dataverse_alias,
},
'savedDataset': {
'title': self.node_settings.dataset,
'doi': self.node_settings.dataset_doi,
}
})
return result
def serialize_settings(self, node_settings, user):
if not self.node_settings:
self.node_settings = node_settings
if not self.user_settings:
self.user_settings = user.get_addon(self.addon_short_name)
return self.serialized_node_settings
| apache-2.0 |
h3biomed/luigi | test/instance_wrap_test.py | 37 | 3012 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import decimal
from helpers import unittest
import luigi
import luigi.notifications
from luigi.mock import MockTarget
luigi.notifications.DEBUG = True
class Report(luigi.Task):
date = luigi.DateParameter()
def run(self):
f = self.output().open('w')
f.write('10.0 USD\n')
f.write('4.0 EUR\n')
f.write('3.0 USD\n')
f.close()
def output(self):
return MockTarget(self.date.strftime('/tmp/report-%Y-%m-%d'))
class ReportReader(luigi.Task):
date = luigi.DateParameter()
def requires(self):
return Report(self.date)
def run(self):
self.lines = list(self.input().open('r').readlines())
def get_line(self, line):
amount, currency = self.lines[line].strip().split()
return decimal.Decimal(amount), currency
def complete(self):
return False
class CurrencyExchanger(luigi.Task):
task = luigi.Parameter()
currency_to = luigi.Parameter()
exchange_rates = {('USD', 'USD'): decimal.Decimal(1),
('EUR', 'USD'): decimal.Decimal('1.25')}
def requires(self):
return self.task # Note that you still need to state this explicitly
def get_line(self, line):
amount, currency_from = self.task.get_line(line)
return amount * self.exchange_rates[(currency_from, self.currency_to)], self.currency_to
def complete(self):
return False
class InstanceWrapperTest(unittest.TestCase):
''' This test illustrates that tasks can have tasks as parameters
This is a more complicated variant of factorial_test.py which is an example of
tasks communicating directly with other tasks. In this case, a task takes another
task as a parameter and wraps it.
Also see wrap_test.py for an example of a task class wrapping another task class.
Not the most useful pattern, but there's actually been a few cases where it was
pretty handy to be able to do that. I'm adding it as a unit test to make sure that
new code doesn't break the expected behavior.
'''
def test(self):
d = datetime.date(2012, 1, 1)
r = ReportReader(d)
ex = CurrencyExchanger(r, 'USD')
luigi.build([ex], local_scheduler=True)
self.assertEqual(ex.get_line(0), (decimal.Decimal('10.0'), 'USD'))
self.assertEqual(ex.get_line(1), (decimal.Decimal('5.0'), 'USD'))
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.