repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
yongtang/tensorflow
tensorflow/python/training/monitored_session.py
7
58340
# pylint: disable=g-bad-file-header # Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A wrapper of Session API which runs hooks.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import os import sys import six from tensorflow.core.protobuf import config_pb2 from tensorflow.python.distribute import distribute_coordinator_context from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import lookup_ops from tensorflow.python.ops import resources from tensorflow.python.ops import variables from tensorflow.python.platform import tf_logging as logging from tensorflow.python.summary import summary from tensorflow.python.training import basic_session_run_hooks from tensorflow.python.training import coordinator from tensorflow.python.training import queue_runner from tensorflow.python.training import saver as training_saver from tensorflow.python.training import session_manager as sm from tensorflow.python.training import session_run_hook from tensorflow.python.training.tracking import graph_view from tensorflow.python.training.tracking import util as trackable_util from tensorflow.python.util import function_utils from tensorflow.python.util.tf_export import tf_export # The list of exceptions that we should recover from. Exceptions not in this # list may terminate the job. _PREEMPTION_ERRORS = (errors.AbortedError, errors.UnavailableError) # Value that indicates no value was provided. USE_DEFAULT = object() @tf_export(v1=['train.Scaffold']) class Scaffold(object): """Structure to create or gather pieces commonly needed to train a model. When you build a model for training you usually need ops to initialize variables, a `Saver` to checkpoint them, an op to collect summaries for the visualizer, and so on. Various libraries built on top of the core TensorFlow library take care of creating some or all of these pieces and storing them in well known collections in the graph. The `Scaffold` class helps pick these pieces from the graph collections, creating and adding them to the collections if needed. If you call the scaffold constructor without any arguments, it will pick pieces from the collections, creating default ones if needed when `scaffold.finalize()` is called. You can pass arguments to the constructor to provide your own pieces. Pieces that you pass to the constructor are not added to the graph collections. The following pieces are directly accessible as attributes of the `Scaffold` object: * `saver`: A `tf.compat.v1.train.Saver` object taking care of saving the variables. Picked from and stored into the `SAVERS` collection in the graph by default. * `init_op`: An op to run to initialize the variables. Picked from and stored into the `INIT_OP` collection in the graph by default. * `ready_op`: An op to verify that the variables are initialized. Picked from and stored into the `READY_OP` collection in the graph by default. * `ready_for_local_init_op`: An op to verify that global state has been initialized and it is alright to run `local_init_op`. Picked from and stored into the `READY_FOR_LOCAL_INIT_OP` collection in the graph by default. This is needed when the initialization of local variables depends on the values of global variables. * `local_init_op`: An op to initialize the local variables. Picked from and stored into the `LOCAL_INIT_OP` collection in the graph by default. * `summary_op`: An op to run and merge the summaries in the graph. Picked from and stored into the `SUMMARY_OP` collection in the graph by default. You can also pass the following additional pieces to the constructor: * `init_feed_dict`: A session feed dictionary that should be used when running the init op. * `init_fn`: A callable to run after the init op to perform additional initializations. The callable will be called as `init_fn(scaffold, session)`. """ def __init__(self, init_op=None, init_feed_dict=None, init_fn=None, ready_op=None, ready_for_local_init_op=None, local_init_op=None, summary_op=None, saver=None, copy_from_scaffold=None, local_init_feed_dict=None): """Create a scaffold. Args: init_op: Optional op for initializing variables. init_feed_dict: Optional session feed dictionary to use when running the init_op. init_fn: Optional function to use to initialize the model after running the init_op. Will be called as `init_fn(scaffold, session)`. ready_op: Optional op to verify that the variables are initialized. Must return an empty 1D string tensor when the variables are initialized, or a non-empty 1D string tensor listing the names of the non-initialized variables. ready_for_local_init_op: Optional op to verify that the global variables are initialized and `local_init_op` can be run. Must return an empty 1D string tensor when the global variables are initialized, or a non-empty 1D string tensor listing the names of the non-initialized global variables. local_init_op: Optional op to initialize local variables. summary_op: Optional op to gather all summaries. Must return a scalar string tensor containing a serialized `Summary` proto. saver: Optional `tf.compat.v1.train.Saver` object to use to save and restore variables. May also be a `tf.train.Checkpoint` object, in which case object-based checkpoints are saved. This will also load some object-based checkpoints saved from elsewhere, but that loading may be fragile since it uses fixed keys rather than performing a full graph-based match. For example if a variable has two paths from the `Checkpoint` object because two `Model` objects share the `Layer` object that owns it, removing one `Model` may change the keys and break checkpoint loading through this API, whereas a graph-based match would match the variable through the other `Model`. copy_from_scaffold: Optional scaffold object to copy fields from. Its fields will be overwritten by the provided fields in this function. local_init_feed_dict: Optional session feed dictionary to use when running the local_init_op. """ if copy_from_scaffold is not None: if not isinstance(copy_from_scaffold, Scaffold): raise TypeError('copy_from_scaffold is not a Scaffold instance.') # We need _coalesce since Tensor is not converted to bool automatically, # so the common idiom of (a or b) does not work. coalesce = lambda a, b: a if a is not None else b init_op = coalesce(init_op, copy_from_scaffold.init_op) init_feed_dict = coalesce(init_feed_dict, copy_from_scaffold.init_feed_dict) # Use the original init_fn provided by the user to init the new Scaffold. init_fn = coalesce(init_fn, copy_from_scaffold._user_init_fn) # pylint: disable=protected-access ready_op = coalesce(ready_op, copy_from_scaffold.ready_op) ready_for_local_init_op = coalesce( ready_for_local_init_op, copy_from_scaffold.ready_for_local_init_op) local_init_op = coalesce(local_init_op, copy_from_scaffold.local_init_op) local_init_feed_dict = coalesce(local_init_feed_dict, copy_from_scaffold.local_init_feed_dict) summary_op = coalesce(summary_op, copy_from_scaffold.summary_op) saver = coalesce(saver, copy_from_scaffold.saver) # NOTE(touts): modifying the init function to be passed the scaffold is a # hack to make it easy to find the saver. Is there a better way? self._user_init_fn = init_fn if init_fn: self._init_fn = lambda sess: init_fn(self, sess) else: self._init_fn = None self._init_op = init_op self._init_feed_dict = init_feed_dict self._ready_op = ready_op self._ready_for_local_init_op = ready_for_local_init_op self._local_init_op = local_init_op self._local_init_feed_dict = local_init_feed_dict self._summary_op = summary_op self._saver = saver def finalize(self): """Creates operations if needed and finalizes the graph.""" if self._init_op is None: def default_init_op(): return control_flow_ops.group( variables.global_variables_initializer(), resources.initialize_resources(resources.shared_resources()), ops.get_collection('saved_model_initializers')) self._init_op = Scaffold.get_or_default('init_op', ops.GraphKeys.INIT_OP, default_init_op) if self._ready_op is None: def default_ready_op(): return array_ops.concat([ variables.report_uninitialized_variables(), resources.report_uninitialized_resources() ], 0) self._ready_op = Scaffold.get_or_default('ready_op', ops.GraphKeys.READY_OP, default_ready_op) if self._ready_for_local_init_op is None: def default_ready_for_local_init_op(): return array_ops.concat([ variables.report_uninitialized_variables( variables.global_variables()), resources.report_uninitialized_resources( resources.shared_resources()) ], 0) self._ready_for_local_init_op = Scaffold.get_or_default( 'ready_for_local_init_op', ops.GraphKeys.READY_FOR_LOCAL_INIT_OP, default_ready_for_local_init_op) if self._local_init_op is None: self._local_init_op = Scaffold.get_or_default( 'local_init_op', ops.GraphKeys.LOCAL_INIT_OP, Scaffold.default_local_init_op) if self._summary_op is None: self._summary_op = Scaffold.get_or_default('summary_op', ops.GraphKeys.SUMMARY_OP, summary.merge_all) # pylint: disable=g-long-lambda if self._saver is None: self._saver = training_saver._get_saver_or_default() # pylint: disable=protected-access # pylint: enable=g-long-lambda if isinstance(self._saver, trackable_util.Checkpoint): self._saver = training_saver.Saver( var_list=graph_view.ObjectGraphView( self._saver).frozen_saveable_objects(), sharded=True) else: self._saver.build() ops.get_default_graph().finalize() logging.info('Graph was finalized.') return self @property def init_fn(self): return self._init_fn @property def init_op(self): return self._init_op @property def ready_op(self): return self._ready_op @property def ready_for_local_init_op(self): return self._ready_for_local_init_op @property def local_init_op(self): return self._local_init_op @property def local_init_feed_dict(self): return self._local_init_feed_dict @property def summary_op(self): return self._summary_op @property def saver(self): return self._saver @property def init_feed_dict(self): return self._init_feed_dict @staticmethod def get_or_default(arg_name, collection_key, default_constructor): """Get from cache or create a default operation.""" elements = ops.get_collection(collection_key) if elements: if len(elements) > 1: raise RuntimeError( 'More than one item in the collection "%s". ' 'Please indicate which one to use by passing it to ' 'the tf.Scaffold constructor as: ' 'tf.Scaffold(%s=item to use)', collection_key, arg_name) return elements[0] op = default_constructor() if op is not None: ops.add_to_collection(collection_key, op) return op @staticmethod def default_local_init_op(): """Returns an op that groups the default local init ops. This op is used during session initialization when a Scaffold is initialized without specifying the local_init_op arg. It includes `tf.compat.v1.local_variables_initializer`, `tf.compat.v1.tables_initializer`, and also initializes local session resources. Returns: The default Scaffold local init op. """ return control_flow_ops.group( variables.local_variables_initializer(), lookup_ops.tables_initializer(), resources.initialize_resources(resources.local_resources())) def _create_monitored_session_with_worker_context( worker_context, # pylint: disable=missing-docstring scaffold, checkpoint_dir=None, hooks=None, chief_only_hooks=None, save_checkpoint_secs=None, save_summaries_steps=None, save_summaries_secs=None, config=None, stop_grace_period_secs=120, log_step_count_steps=100, max_wait_secs=7200, save_checkpoint_steps=None, summary_dir=None, save_graph_def=True): all_hooks = [] if hooks: all_hooks.extend(hooks) if chief_only_hooks and worker_context.is_chief: all_hooks.extend(chief_only_hooks) # We need to call save or summary ops on all workers since these ops may # contain collective ops, only running save ops on some workers would make # collective ops hang. Therefore on those workers that don't need to actually # write checkpoints or summaries, we let them write to a temp directory. # pylint: disable=protected-access if type( worker_context._strategy).__name__ in ('CollectiveAllReduceStrategy', 'CollectiveAllReduceStrategyV1', 'MultiWorkerMirroredStrategy'): if worker_context.task_type: tmpdir = 'tmp_%s_%d' % (worker_context.task_type, worker_context.task_id) else: tmpdir = 'tmp' if save_checkpoint_secs: logging.warning('Collective ops may deadlock with ' '`save_checkpoints_secs` please use ' '`save_checkpoint_steps` instead. Clearing ' '`save_checkpoint_secs` and setting ' '`save_checkpoint_steps` to 1000 now.') save_checkpoint_secs = None save_checkpoint_steps = 1000 if save_summaries_secs: logging.warning('Collective ops may run out of sync with' '`save_summaries_secs`, please use ' '`save_summaries_steps` instead.') else: tmpdir = None summary_dir = summary_dir or checkpoint_dir if summary_dir and log_step_count_steps and log_step_count_steps > 0: if worker_context.should_save_summary: all_hooks.append( basic_session_run_hooks.StepCounterHook( output_dir=summary_dir, every_n_steps=log_step_count_steps)) elif tmpdir: all_hooks.append( basic_session_run_hooks.StepCounterHook( output_dir=os.path.join(summary_dir, tmpdir), every_n_steps=log_step_count_steps)) if (((save_summaries_steps and save_summaries_steps > 0) or (save_summaries_secs and save_summaries_secs > 0)) and summary_dir): if worker_context.should_save_summary: all_hooks.append( basic_session_run_hooks.SummarySaverHook( scaffold=scaffold, save_steps=save_summaries_steps, save_secs=save_summaries_secs, output_dir=summary_dir)) elif tmpdir: all_hooks.append( basic_session_run_hooks.SummarySaverHook( scaffold=scaffold, save_steps=save_summaries_steps, save_secs=save_summaries_secs, output_dir=os.path.join(summary_dir, tmpdir))) if (((save_checkpoint_secs and save_checkpoint_secs > 0) or (save_checkpoint_steps and save_checkpoint_steps > 0)) and checkpoint_dir): if worker_context.should_checkpoint: all_hooks.append( basic_session_run_hooks.CheckpointSaverHook( checkpoint_dir, save_steps=save_checkpoint_steps, save_secs=save_checkpoint_secs, scaffold=scaffold, save_graph_def=save_graph_def)) elif tmpdir: all_hooks.append( basic_session_run_hooks.CheckpointSaverHook( os.path.join(checkpoint_dir, tmpdir), save_steps=save_checkpoint_steps, save_secs=save_checkpoint_secs, scaffold=scaffold, save_graph_def=save_graph_def)) logging.info('all_hooks %r', all_hooks) session_creator = worker_context.session_creator( scaffold, config=config, checkpoint_dir=checkpoint_dir, max_wait_secs=max_wait_secs) return MonitoredSession( session_creator=session_creator, hooks=all_hooks, stop_grace_period_secs=stop_grace_period_secs) @tf_export(v1=['train.MonitoredTrainingSession']) def MonitoredTrainingSession( master='', # pylint: disable=invalid-name is_chief=True, checkpoint_dir=None, scaffold=None, hooks=None, chief_only_hooks=None, save_checkpoint_secs=USE_DEFAULT, save_summaries_steps=USE_DEFAULT, save_summaries_secs=USE_DEFAULT, config=None, stop_grace_period_secs=120, log_step_count_steps=100, max_wait_secs=7200, save_checkpoint_steps=USE_DEFAULT, summary_dir=None, save_graph_def=True): """Creates a `MonitoredSession` for training. For a chief, this utility sets proper session initializer/restorer. It also creates hooks related to checkpoint and summary saving. For workers, this utility sets proper session creator which waits for the chief to initialize/restore. Please check `tf.compat.v1.train.MonitoredSession` for more information. Args: master: `String` the TensorFlow master to use. is_chief: If `True`, it will take care of initialization and recovery the underlying TensorFlow session. If `False`, it will wait on a chief to initialize or recover the TensorFlow session. checkpoint_dir: A string. Optional path to a directory where to restore variables. scaffold: A `Scaffold` used for gathering or building supportive ops. If not specified, a default one is created. It's used to finalize the graph. hooks: Optional list of `SessionRunHook` objects. chief_only_hooks: list of `SessionRunHook` objects. Activate these hooks if `is_chief==True`, ignore otherwise. save_checkpoint_secs: The frequency, in seconds, that a checkpoint is saved using a default checkpoint saver. If both `save_checkpoint_steps` and `save_checkpoint_secs` are set to `None`, then the default checkpoint saver isn't used. If both are provided, then only `save_checkpoint_secs` is used. Default 600. save_summaries_steps: The frequency, in number of global steps, that the summaries are written to disk using a default summary saver. If both `save_summaries_steps` and `save_summaries_secs` are set to `None`, then the default summary saver isn't used. Default 100. save_summaries_secs: The frequency, in secs, that the summaries are written to disk using a default summary saver. If both `save_summaries_steps` and `save_summaries_secs` are set to `None`, then the default summary saver isn't used. Default not enabled. config: an instance of `tf.compat.v1.ConfigProto` proto used to configure the session. It's the `config` argument of constructor of `tf.compat.v1.Session`. stop_grace_period_secs: Number of seconds given to threads to stop after `close()` has been called. log_step_count_steps: The frequency, in number of global steps, that the global step/sec is logged. max_wait_secs: Maximum time workers should wait for the session to become available. This should be kept relatively short to help detect incorrect code, but sometimes may need to be increased if the chief takes a while to start up. save_checkpoint_steps: The frequency, in number of global steps, that a checkpoint is saved using a default checkpoint saver. If both `save_checkpoint_steps` and `save_checkpoint_secs` are set to `None`, then the default checkpoint saver isn't used. If both are provided, then only `save_checkpoint_secs` is used. Default not enabled. summary_dir: A string. Optional path to a directory where to save summaries. If None, checkpoint_dir is used instead. save_graph_def: Whether to save the GraphDef and MetaGraphDef to `checkpoint_dir`. The GraphDef is saved after the session is created as `graph.pbtxt`. MetaGraphDefs are saved out for every checkpoint as `model.ckpt-*.meta`. Returns: A `MonitoredSession` object. """ if save_summaries_steps == USE_DEFAULT and save_summaries_secs == USE_DEFAULT: save_summaries_steps = 100 save_summaries_secs = None elif save_summaries_secs == USE_DEFAULT: save_summaries_secs = None elif save_summaries_steps == USE_DEFAULT: save_summaries_steps = None if (save_checkpoint_steps == USE_DEFAULT and save_checkpoint_secs == USE_DEFAULT): save_checkpoint_steps = None save_checkpoint_secs = 600 elif save_checkpoint_secs == USE_DEFAULT: save_checkpoint_secs = None elif save_checkpoint_steps == USE_DEFAULT: save_checkpoint_steps = None scaffold = scaffold or Scaffold() worker_context = distribute_coordinator_context.get_current_worker_context() if worker_context: return _create_monitored_session_with_worker_context( worker_context, scaffold, checkpoint_dir=checkpoint_dir, hooks=hooks, chief_only_hooks=chief_only_hooks, save_checkpoint_secs=save_checkpoint_secs, save_summaries_steps=save_summaries_steps, save_summaries_secs=save_summaries_secs, config=config, stop_grace_period_secs=stop_grace_period_secs, log_step_count_steps=log_step_count_steps, max_wait_secs=max_wait_secs, save_checkpoint_steps=save_checkpoint_steps, summary_dir=summary_dir, save_graph_def=save_graph_def) if not is_chief: session_creator = WorkerSessionCreator( scaffold=scaffold, master=master, config=config, max_wait_secs=max_wait_secs) return MonitoredSession( session_creator=session_creator, hooks=hooks or [], stop_grace_period_secs=stop_grace_period_secs) all_hooks = [] if chief_only_hooks: all_hooks.extend(chief_only_hooks) session_creator = ChiefSessionCreator( scaffold=scaffold, checkpoint_dir=checkpoint_dir, master=master, config=config) summary_dir = summary_dir or checkpoint_dir if summary_dir: if log_step_count_steps and log_step_count_steps > 0: all_hooks.append( basic_session_run_hooks.StepCounterHook( output_dir=summary_dir, every_n_steps=log_step_count_steps)) if (save_summaries_steps and save_summaries_steps > 0) or (save_summaries_secs and save_summaries_secs > 0): all_hooks.append( basic_session_run_hooks.SummarySaverHook( scaffold=scaffold, save_steps=save_summaries_steps, save_secs=save_summaries_secs, output_dir=summary_dir)) if checkpoint_dir: if (save_checkpoint_secs and save_checkpoint_secs > 0) or (save_checkpoint_steps and save_checkpoint_steps > 0): all_hooks.append( basic_session_run_hooks.CheckpointSaverHook( checkpoint_dir, save_steps=save_checkpoint_steps, save_secs=save_checkpoint_secs, scaffold=scaffold, save_graph_def=save_graph_def)) if hooks: all_hooks.extend(hooks) return MonitoredSession( session_creator=session_creator, hooks=all_hooks, stop_grace_period_secs=stop_grace_period_secs) @tf_export(v1=['train.SessionCreator']) @six.add_metaclass(abc.ABCMeta) class SessionCreator(object): """A factory for tf.Session.""" @abc.abstractmethod def create_session(self): raise NotImplementedError( 'create_session is not implemented for {}.'.format(self)) @tf_export(v1=['train.ChiefSessionCreator']) class ChiefSessionCreator(SessionCreator): """Creates a tf.compat.v1.Session for a chief.""" def __init__(self, scaffold=None, master='', config=None, checkpoint_dir=None, checkpoint_filename_with_path=None): """Initializes a chief session creator. Args: scaffold: A `Scaffold` used for gathering or building supportive ops. If not specified a default one is created. It's used to finalize the graph. master: `String` representation of the TensorFlow master to use. config: `ConfigProto` proto used to configure the session. checkpoint_dir: A string. Optional path to a directory where to restore variables. checkpoint_filename_with_path: Full file name path to the checkpoint file. """ self._checkpoint_dir = checkpoint_dir self._checkpoint_filename_with_path = checkpoint_filename_with_path self._scaffold = scaffold or Scaffold() self._session_manager = None self._master = master self._config = config def _get_session_manager(self): """Gets or creates a SessionManager.""" if self._session_manager: return self._session_manager self._session_manager = sm.SessionManager( local_init_op=self._scaffold.local_init_op, local_init_feed_dict=self._scaffold.local_init_feed_dict, ready_op=self._scaffold.ready_op, ready_for_local_init_op=self._scaffold.ready_for_local_init_op, graph=ops.get_default_graph()) return self._session_manager def create_session(self): self._scaffold.finalize() return self._get_session_manager().prepare_session( self._master, saver=self._scaffold.saver, checkpoint_dir=self._checkpoint_dir, checkpoint_filename_with_path=self._checkpoint_filename_with_path, config=self._config, init_op=self._scaffold.init_op, init_feed_dict=self._scaffold.init_feed_dict, init_fn=self._scaffold.init_fn) @tf_export(v1=['train.WorkerSessionCreator']) class WorkerSessionCreator(SessionCreator): """Creates a tf.compat.v1.Session for a worker.""" def __init__(self, scaffold=None, master='', config=None, max_wait_secs=30 * 60): """Initializes a worker session creator. Args: scaffold: A `Scaffold` used for gathering or building supportive ops. If not specified a default one is created. It's used to finalize the graph. master: `String` representation of the TensorFlow master to use. config: `ConfigProto` proto used to configure the session. max_wait_secs: Maximum time to wait for the session to become available. """ self._scaffold = scaffold or Scaffold() self._session_manager = None self._master = master self._config = config self._max_wait_secs = max_wait_secs def _get_session_manager(self): """Gets or creates a SessionManager.""" if self._session_manager: return self._session_manager self._session_manager = sm.SessionManager( local_init_op=self._scaffold.local_init_op, local_init_feed_dict=self._scaffold.local_init_feed_dict, ready_op=self._scaffold.ready_op, ready_for_local_init_op=self._scaffold.ready_for_local_init_op, graph=ops.get_default_graph()) return self._session_manager def create_session(self): self._scaffold.finalize() return self._get_session_manager().wait_for_session( self._master, config=self._config, max_wait_secs=self._max_wait_secs) class _MonitoredSession(object): """See `MonitoredSession` or `SingularMonitoredSession`.""" def __init__(self, session_creator, hooks, should_recover, stop_grace_period_secs=120): """Sets up a Monitored or Hooked Session. Args: session_creator: A factory object to create session. Typically a `ChiefSessionCreator` or a `WorkerSessionCreator`. hooks: An iterable of `SessionRunHook' objects. should_recover: A bool. Indicates whether to recover from `AbortedError` and `UnavailableError` or not. stop_grace_period_secs: Number of seconds given to threads to stop after `close()` has been called. """ self._graph_was_finalized = ops.get_default_graph().finalized self._hooks = hooks or [] for h in self._hooks: h.begin() worker_context = distribute_coordinator_context.get_current_worker_context() if not session_creator and worker_context: session_creator = worker_context.session_creator() # Create the session. self._coordinated_creator = self._CoordinatedSessionCreator( session_creator=session_creator or ChiefSessionCreator(), hooks=self._hooks, stop_grace_period_secs=stop_grace_period_secs) if should_recover: self._sess = _RecoverableSession(self._coordinated_creator) else: self._sess = self._coordinated_creator.create_session() @property def graph(self): """The graph that was launched in this session.""" if self._tf_sess() is None: return None return self._tf_sess().graph def run(self, fetches, feed_dict=None, options=None, run_metadata=None): """Run ops in the monitored session. This method is completely compatible with the `tf.Session.run()` method. Args: fetches: Same as `tf.Session.run()`. feed_dict: Same as `tf.Session.run()`. options: Same as `tf.Session.run()`. run_metadata: Same as `tf.Session.run()`. Returns: Same as `tf.Session.run()`. """ return self._sess.run( fetches, feed_dict=feed_dict, options=options, run_metadata=run_metadata) def run_step_fn(self, step_fn): """Run ops using a step function. Args: step_fn: A function or a method with a single argument of type `StepContext`. The function may use methods of the argument to perform computations with access to a raw session. The returned value of the `step_fn` will be returned from `run_step_fn`, unless a stop is requested. In that case, the next `should_stop` call will return True. Example usage: ```python with tf.Graph().as_default(): c = tf.compat.v1.placeholder(dtypes.float32) v = tf.add(c, 4.0) w = tf.add(c, 0.5) def step_fn(step_context): a = step_context.session.run(fetches=v, feed_dict={c: 0.5}) if a <= 4.5: step_context.request_stop() return step_context.run_with_hooks(fetches=w, feed_dict={c: 0.1}) with tf.MonitoredSession() as session: while not session.should_stop(): a = session.run_step_fn(step_fn) ``` Hooks interact with the `run_with_hooks()` call inside the `step_fn` as they do with a `MonitoredSession.run` call. Returns: Returns the returned value of `step_fn`. Raises: StopIteration: if `step_fn` has called `request_stop()`. It may be caught by `with tf.MonitoredSession()` to close the session. ValueError: if `step_fn` doesn't have a single argument called `step_context`. It may also optionally have `self` for cases when it belongs to an object. """ step_fn_arguments = function_utils.fn_args(step_fn) if step_fn_arguments != ('step_context',) and step_fn_arguments != ( 'self', 'step_context', ): raise ValueError( '`step_fn` may either have one `step_context` argument, or' ' `self` and `step_context` arguments if it\'s an instance' ' method. Got {} instead.'.format(step_fn_arguments)) # `self._sess` is either `_RecoverableSession` or a `_CoordinatedSession`. # Setting `run_with_hooks` to `None` will cause `run_with_hooks` to be # `_CoordinatedSession.run` downstream in either case. This allows # `_PREEMPTION_ERRORS` to propage from within `step_fn` to # `_RecoverableSession.run_step_fn`. return self._sess.run_step_fn(step_fn, self._tf_sess(), run_with_hooks=None) class StepContext(object): """Control flow instrument for the `step_fn` from `run_step_fn()`. Users of `step_fn` may perform `run()` calls without running hooks by accessing the `session`. A `run()` call with hooks may be performed using `run_with_hooks()`. Computation flow can be interrupted using `request_stop()`. """ def __init__(self, session, run_with_hooks_fn): """Initializes the `step_context` argument for a `step_fn` invocation. Args: session: An instance of `tf.compat.v1.Session`. run_with_hooks_fn: A function for running fetches and hooks. """ self._session = session self._run_with_hooks_fn = run_with_hooks_fn @property def session(self): return self._session def run_with_hooks(self, *args, **kwargs): """Same as `MonitoredSession.run`. Accepts the same arguments.""" return self._run_with_hooks_fn(*args, **kwargs) def request_stop(self): """Exit the training loop by causing `should_stop()` to return `True`. Causes `step_fn` to exit by raising an exception. Raises: StopIteration """ raise StopIteration('step_fn has requested the iterations to stop.') def should_stop(self): return self._sess is None or self._sess.should_stop() def close(self): self._close_internal() def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): if exception_type in [errors.OutOfRangeError, StopIteration]: exception_type = None self._close_internal(exception_type) # __exit__ should return True to suppress an exception. return exception_type is None class _CoordinatedSessionCreator(SessionCreator): """Factory for _CoordinatedSession.""" def __init__(self, session_creator, hooks, stop_grace_period_secs): self._session_creator = session_creator self._hooks = hooks self.coord = None self.tf_sess = None self._stop_grace_period_secs = stop_grace_period_secs def create_session(self): """Creates a coordinated session.""" # Keep the tf_sess for unit testing. self.tf_sess = self._session_creator.create_session() # We don't want coordinator to suppress any exception. self.coord = coordinator.Coordinator(clean_stop_exception_types=[]) if ops.get_collection(ops.GraphKeys.QUEUE_RUNNERS): queue_runner.start_queue_runners(sess=self.tf_sess, coord=self.coord) # Inform the hooks that a new session has been created. for hook in self._hooks: hook.after_create_session(self.tf_sess, self.coord) return _CoordinatedSession( _HookedSession(self.tf_sess, self._hooks), self.coord, self._stop_grace_period_secs) def _close_internal(self, exception_type=None): try: if not exception_type: for h in self._hooks: h.end(self._coordinated_creator.tf_sess) finally: try: if self._sess is None: raise RuntimeError('Session is already closed.') self._sess.close() finally: self._sess = None self._coordinated_creator.tf_sess = None self._coordinated_creator.coord = None if not self._graph_was_finalized: ops.get_default_graph()._unsafe_unfinalize() # pylint: disable=protected-access def _is_closed(self): """Return True if the monitored session is closed. For tests only. Returns: A boolean. """ return self._coordinated_creator.tf_sess is None def _tf_sess(self): """Return underlying tf.compat.v1.Session object. Warning: accessing the returned object in user code is likely to cause races or "flaky tests". Returns: A tf.compat.v1.Session object. """ return self._coordinated_creator.tf_sess @tf_export(v1=['train.MonitoredSession']) class MonitoredSession(_MonitoredSession): """Session-like object that handles initialization, recovery and hooks. Example usage: ```python saver_hook = CheckpointSaverHook(...) summary_hook = SummarySaverHook(...) with MonitoredSession(session_creator=ChiefSessionCreator(...), hooks=[saver_hook, summary_hook]) as sess: while not sess.should_stop(): sess.run(train_op) ``` Initialization: At creation time the monitored session does following things in given order: * calls `hook.begin()` for each given hook * finalizes the graph via `scaffold.finalize()` * create session * initializes the model via initialization ops provided by `Scaffold` * restores variables if a checkpoint exists * launches queue runners * calls `hook.after_create_session()` Run: When `run()` is called, the monitored session does following things: * calls `hook.before_run()` * calls TensorFlow `session.run()` with merged fetches and feed_dict * calls `hook.after_run()` * returns result of `session.run()` asked by user * if `AbortedError` or `UnavailableError` occurs, it recovers or reinitializes the session before executing the run() call again Exit: At the `close()`, the monitored session does following things in order: * calls `hook.end()` * closes the queue runners and the session * suppresses `OutOfRange` error which indicates that all inputs have been processed if the monitored_session is used as a context How to set `tf.compat.v1.Session` arguments: * In most cases you can set session arguments as follows: ```python MonitoredSession( session_creator=ChiefSessionCreator(master=..., config=...)) ``` * In distributed setting for a non-chief worker, you can use following: ```python MonitoredSession( session_creator=WorkerSessionCreator(master=..., config=...)) ``` See `MonitoredTrainingSession` for an example usage based on chief or worker. Note: This is not a `tf.compat.v1.Session`. For example, it cannot do following: * it cannot be set as default session. * it cannot be sent to saver.save. * it cannot be sent to tf.train.start_queue_runners. Args: session_creator: A factory object to create session. Typically a `ChiefSessionCreator` which is the default one. hooks: An iterable of `SessionRunHook' objects. Returns: A MonitoredSession object. """ def __init__(self, session_creator=None, hooks=None, stop_grace_period_secs=120): super(MonitoredSession, self).__init__( session_creator, hooks, should_recover=True, stop_grace_period_secs=stop_grace_period_secs) @tf_export(v1=['train.SingularMonitoredSession']) class SingularMonitoredSession(_MonitoredSession): """Session-like object that handles initialization, restoring, and hooks. Please note that this utility is not recommended for distributed settings. For distributed settings, please use `tf.compat.v1.train.MonitoredSession`. The differences between `MonitoredSession` and `SingularMonitoredSession` are: * `MonitoredSession` handles `AbortedError` and `UnavailableError` for distributed settings, but `SingularMonitoredSession` does not. * `MonitoredSession` can be created in `chief` or `worker` modes. `SingularMonitoredSession` is always created as `chief`. * You can access the raw `tf.compat.v1.Session` object used by `SingularMonitoredSession`, whereas in MonitoredSession the raw session is private. This can be used: - To `run` without hooks. - To save and restore. * All other functionality is identical. Example usage: ```python saver_hook = CheckpointSaverHook(...) summary_hook = SummarySaverHook(...) with SingularMonitoredSession(hooks=[saver_hook, summary_hook]) as sess: while not sess.should_stop(): sess.run(train_op) ``` Initialization: At creation time the hooked session does following things in given order: * calls `hook.begin()` for each given hook * finalizes the graph via `scaffold.finalize()` * create session * initializes the model via initialization ops provided by `Scaffold` * restores variables if a checkpoint exists * launches queue runners Run: When `run()` is called, the hooked session does following things: * calls `hook.before_run()` * calls TensorFlow `session.run()` with merged fetches and feed_dict * calls `hook.after_run()` * returns result of `session.run()` asked by user Exit: At the `close()`, the hooked session does following things in order: * calls `hook.end()` * closes the queue runners and the session * suppresses `OutOfRange` error which indicates that all inputs have been processed if the `SingularMonitoredSession` is used as a context. """ def __init__(self, hooks=None, scaffold=None, master='', config=None, checkpoint_dir=None, stop_grace_period_secs=120, checkpoint_filename_with_path=None): """Creates a SingularMonitoredSession. Args: hooks: An iterable of `SessionRunHook' objects. scaffold: A `Scaffold` used for gathering or building supportive ops. If not specified a default one is created. It's used to finalize the graph. master: `String` representation of the TensorFlow master to use. config: `ConfigProto` proto used to configure the session. checkpoint_dir: A string. Optional path to a directory where to restore variables. stop_grace_period_secs: Number of seconds given to threads to stop after `close()` has been called. checkpoint_filename_with_path: A string. Optional path to a checkpoint file from which to restore variables. """ session_creator = ChiefSessionCreator( scaffold=scaffold, master=master, config=config, checkpoint_dir=checkpoint_dir, checkpoint_filename_with_path=checkpoint_filename_with_path) super(SingularMonitoredSession, self).__init__( session_creator, hooks, should_recover=False, stop_grace_period_secs=stop_grace_period_secs) def raw_session(self): """Returns underlying `TensorFlow.Session` object.""" return self._tf_sess() class _WrappedSession(object): """Wrapper around a `tf.compat.v1.Session`. This wrapper is used as a base class for various session wrappers that provide additional functionality such as monitoring, coordination, and recovery. In addition to the methods exported by `SessionInterface` the wrapper provides a method to check for stop and never raises exceptions from calls to `close()`. """ def __init__(self, sess): """Creates a `_WrappedSession`. Args: sess: A `tf.compat.v1.Session` or `_WrappedSession` object. The wrapped session. """ self._sess = sess self._wrapped_is_stoppable = isinstance(self._sess, _WrappedSession) @property def graph(self): return self._sess.graph @property def sess_str(self): return self._sess.sess_str def should_stop(self): """Return true if this session should not be used anymore. Always return True if the session was closed. Returns: True if the session should stop, False otherwise. """ if self._check_stop(): return True if self._sess: return self._wrapped_is_stoppable and self._sess.should_stop() return True def _check_stop(self): """Hook for subclasses to provide their own stop condition. Returns: True if the session should stop, False otherwise. """ return False def close(self): if self._sess: try: self._sess.close() except _PREEMPTION_ERRORS as e: logging.error( 'An error occurred when attempting to close the ' 'session. This may be due to a preemption in a ' 'connected worker or parameter server. Error: %s', e) finally: self._sess = None def run(self, *args, **kwargs): return self._sess.run(*args, **kwargs) def run_step_fn(self, step_fn, raw_session, run_with_hooks): # `_RecoverableSession` sets `run_with_hooks` to `_CoordinatedSession.run`. # It is `None` when called from `_CoordinatedSession`. In that case # `self.run` is `_CoordinatedSession.run`. run_with_hooks = run_with_hooks or self.run return step_fn(_MonitoredSession.StepContext(raw_session, run_with_hooks)) class _RecoverableSession(_WrappedSession): """A wrapped session that recreates a session upon certain kinds of errors. The constructor is passed a SessionCreator object, not a session. Calls to `run()` are delegated to the wrapped session. If a call raises the exception `tf.errors.AbortedError` or `tf.errors.UnavailableError`, the wrapped session is closed, and a new one is created by calling the factory again. """ def __init__(self, sess_creator): """Create a new `_RecoverableSession`. The value returned by calling `sess_creator.create_session()` will be the session wrapped by this recoverable session. Args: sess_creator: A 'SessionCreator' to be wrapped by recoverable. """ self._sess_creator = sess_creator _WrappedSession.__init__(self, self._create_session()) def _create_session(self): while True: try: return self._sess_creator.create_session() except _PREEMPTION_ERRORS as e: logging.info( 'An error was raised while a session was being created. ' 'This may be due to a preemption of a connected worker ' 'or parameter server. A new session will be created. ' 'This error may also occur due to a gRPC failure caused ' 'by high memory or network bandwidth usage in the ' 'parameter servers. If this error occurs repeatedly, try ' 'increasing the number of parameter servers assigned to ' 'the job. Error: %s', e) def _check_stop(self): try: if self._sess: return self._sess._check_stop() # pylint: disable=protected-access else: return True except _PREEMPTION_ERRORS as e: logging.info( 'An error was raised while considering whether the ' 'session is complete. This may be due to a preemption in ' 'a connected worker or parameter server. The current ' 'session will be closed and a new session will be ' 'created. This error may also occur due to a gRPC failure ' 'caused by high memory or network bandwidth usage in the ' 'parameter servers. If this error occurs repeatedly, try ' 'increasing the number of parameter servers assigned to ' 'the job. Error: %s', e) self.close() self._sess = self._create_session() # Since we have just recreated the session, the overall computation should # not stop: return False except Exception: # pylint: disable=broad-except # `should_stop` should return True instead of raising an exception. return True def run(self, fetches, feed_dict=None, options=None, run_metadata=None): while True: try: if not self._sess: self._sess = self._create_session() return self._sess.run( fetches, feed_dict=feed_dict, options=options, run_metadata=run_metadata) except _PREEMPTION_ERRORS as e: logging.info( 'An error was raised. This may be due to a preemption in ' 'a connected worker or parameter server. The current ' 'session will be closed and a new session will be ' 'created. This error may also occur due to a gRPC failure ' 'caused by high memory or network bandwidth usage in the ' 'parameter servers. If this error occurs repeatedly, try ' 'increasing the number of parameter servers assigned to ' 'the job. Error: %s', e) self.close() self._sess = None def run_step_fn(self, step_fn, raw_session, run_with_hooks): while True: try: if not self._sess: self._sess = self._create_session() run_with_hooks = self._sess.run return self._sess.run_step_fn(step_fn, raw_session, run_with_hooks) except _PREEMPTION_ERRORS as e: logging.info( 'An error was raised. This may be due to a preemption in ' 'a connected worker or parameter server. The current ' 'session will be closed and a new session will be ' 'created. This error may also occur due to a gRPC failure ' 'caused by high memory or network bandwidth usage in the ' 'parameter servers. If this error occurs repeatedly, try ' 'increasing the number of parameter servers assigned to ' 'the job. Error: %s', e) self.close() self._sess = None class _CoordinatedSession(_WrappedSession): """A wrapped session that works with a `tf.Coordinator`. Calls to `run()` are delegated to the wrapped session. If a call raises an exception, the exception is reported to the coordinator. In addition, after each call to `run()` this session ask the coordinator if the session should stop. In that case it will join all the threads registered with the coordinator before returning. If the coordinator was requested to stop with an exception, that exception will be re-raised from the call to `run()`. """ def __init__(self, sess, coord, stop_grace_period_secs=120): """Create a new `_CoordinatedSession`. Args: sess: A `tf.compat.v1.Session` object. The wrapped session. coord: A `tf.train.Coordinator` object. stop_grace_period_secs: Number of seconds given to threads to stop after `close()` has been called. """ _WrappedSession.__init__(self, sess) self._coord = coord self._stop_grace_period_secs = stop_grace_period_secs def _check_stop(self): # If the coordinator was asked to stop due to an exception, then it needs # to be propagated to this stack. self._coord.raise_requested_exception() # At this point, no exceptions are recorded in the coordinator. return self._coord.should_stop() def close(self): self._coord.request_stop() try: self._coord.join( stop_grace_period_secs=self._stop_grace_period_secs, ignore_live_threads=True) finally: try: _WrappedSession.close(self) except Exception: # pylint: disable=broad-except # We intentionally suppress exceptions from the close() here since # useful exceptions are already reported by join(). pass def run(self, *args, **kwargs): try: return self._sess.run(*args, **kwargs) except _PREEMPTION_ERRORS: raise except Exception: # pylint: disable=broad-except # A non-preemption error could have been caused by a preemption error # in the coordinator. If this is the case, raise that exception instead, # since it's the root cause. Otherwise, stick to the `original_exc_info`. original_exc_info = sys.exc_info() try: self._coord.raise_requested_exception() except _PREEMPTION_ERRORS: raise except Exception: # pylint: disable=broad-except raise six.reraise(*original_exc_info) else: raise six.reraise(*original_exc_info) class _HookedSession(_WrappedSession): """A _WrappedSession that calls hooks during calls to run(). The list of hooks to call is passed in the constructor. Before each call to `run()` the session calls the `before_run()` method of the hooks, which can return additional ops or tensors to run. These are added to the arguments of the call to `run()`. When the `run()` call finishes, the session calls the `after_run()` methods of the hooks, passing the values returned by the `run()` call corresponding to the ops and tensors that each hook requested. If any call to the hooks, requests stop via run_context the session will be marked as needing to stop and its `should_stop()` method will now return `True`. """ def __init__(self, sess, hooks): """Initializes a _HookedSession object. Args: sess: A `tf.compat.v1.Session` or a `_WrappedSession` object. hooks: An iterable of `SessionRunHook' objects. """ _WrappedSession.__init__(self, sess) self._hooks = hooks self._should_stop = False def _check_stop(self): """See base class.""" return self._should_stop def run(self, fetches, feed_dict=None, options=None, run_metadata=None): """See base class.""" if self.should_stop(): raise RuntimeError('Run called even after should_stop requested.') actual_fetches = {'caller': fetches} run_context = session_run_hook.SessionRunContext( original_args=session_run_hook.SessionRunArgs(fetches, feed_dict), session=self._sess) options = options or config_pb2.RunOptions() feed_dict = self._call_hook_before_run(run_context, actual_fetches, feed_dict, options) # Do session run. run_metadata = run_metadata or config_pb2.RunMetadata() outputs = _WrappedSession.run( self, fetches=actual_fetches, feed_dict=feed_dict, options=options, run_metadata=run_metadata) for hook in self._hooks: hook.after_run( run_context, session_run_hook.SessionRunValues( results=outputs[hook] if hook in outputs else None, options=options, run_metadata=run_metadata)) self._should_stop = self._should_stop or run_context.stop_requested return outputs['caller'] def _call_hook_before_run(self, run_context, fetch_dict, user_feed_dict, options): """Calls hooks.before_run and handles requests from hooks.""" hook_feeds = {} for hook in self._hooks: request = hook.before_run(run_context) if request is not None: if request.fetches is not None: fetch_dict[hook] = request.fetches if request.feed_dict: self._raise_if_feeds_intersects(hook_feeds, request.feed_dict, 'Same tensor is fed by two hooks.') hook_feeds.update(request.feed_dict) if request.options: self._merge_run_options(options, request.options) if not hook_feeds: return user_feed_dict if not user_feed_dict: return hook_feeds self._raise_if_feeds_intersects( user_feed_dict, hook_feeds, 'Same tensor is fed by a SessionRunHook and user.') hook_feeds.update(user_feed_dict) return hook_feeds def _raise_if_feeds_intersects(self, feeds1, feeds2, message): intersection = set(feeds1.keys()) & set(feeds2.keys()) if intersection: raise RuntimeError(message + ' Conflict(s): ' + str(list(intersection))) def _merge_run_options(self, options, incoming_options): """Merge two instances of RunOptions into the first one. During the merger, the numerical fields including trace_level, timeout_in_ms, inter_op_thread_pool are set to the larger one of the two. The boolean value is set to the logical OR of the two. debug_tensor_watch_opts of the original options is extended with that from the incoming one. Args: options: The options to merge into. incoming_options: The options to be merged into the first argument. """ options.trace_level = max(options.trace_level, incoming_options.trace_level) options.timeout_in_ms = max(options.timeout_in_ms, incoming_options.timeout_in_ms) options.inter_op_thread_pool = max(options.inter_op_thread_pool, incoming_options.inter_op_thread_pool) options.output_partition_graphs = max( options.output_partition_graphs, incoming_options.output_partition_graphs) options.debug_options.debug_tensor_watch_opts.extend( incoming_options.debug_options.debug_tensor_watch_opts) options.debug_options.reset_disk_byte_usage = ( options.debug_options.reset_disk_byte_usage or incoming_options.debug_options.reset_disk_byte_usage) options.report_tensor_allocations_upon_oom = ( options.report_tensor_allocations_upon_oom or incoming_options.report_tensor_allocations_upon_oom)
apache-2.0
mattsanf/legallydistinctpocketmonsters
node_modules/utf8/tests/generate-test-data.py
1788
1435
#!/usr/bin/env python import re import json # https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae # http://stackoverflow.com/a/13436167/96656 def unisymbol(codePoint): if codePoint >= 0x0000 and codePoint <= 0xFFFF: return unichr(codePoint) elif codePoint >= 0x010000 and codePoint <= 0x10FFFF: highSurrogate = int((codePoint - 0x10000) / 0x400) + 0xD800 lowSurrogate = int((codePoint - 0x10000) % 0x400) + 0xDC00 return unichr(highSurrogate) + unichr(lowSurrogate) else: return 'Error' def hexify(codePoint): return 'U+' + hex(codePoint)[2:].upper().zfill(6) def writeFile(filename, contents): print filename with open(filename, 'w') as f: f.write(contents.strip() + '\n') data = [] for codePoint in range(0x000000, 0x10FFFF + 1): # Skip non-scalar values. if codePoint >= 0xD800 and codePoint <= 0xDFFF: continue symbol = unisymbol(codePoint) # http://stackoverflow.com/a/17199950/96656 bytes = symbol.encode('utf8').decode('latin1') data.append({ 'codePoint': codePoint, 'decoded': symbol, 'encoded': bytes }); jsonData = json.dumps(data, sort_keys=False, indent=2, separators=(',', ': ')) # Use tabs instead of double spaces for indentation jsonData = jsonData.replace(' ', '\t') # Escape hexadecimal digits in escape sequences jsonData = re.sub( r'\\u([a-fA-F0-9]{4})', lambda match: r'\u{}'.format(match.group(1).upper()), jsonData ) writeFile('data.json', jsonData)
gpl-3.0
marcuskelly/recover
Lib/site-packages/passlib/utils/decor.py
5
7651
""" passlib.utils.decor -- helper decorators & properties """ #============================================================================= # imports #============================================================================= # core from __future__ import absolute_import, division, print_function import logging log = logging.getLogger(__name__) from functools import wraps, update_wrapper import types from warnings import warn # site # pkg from passlib.utils.compat import PY3 # local __all__ = [ "classproperty", "hybrid_method", "memoize_single_value", "memoized_property", "deprecated_function", "deprecated_method", ] #============================================================================= # class-level decorators #============================================================================= class classproperty(object): """Function decorator which acts like a combination of classmethod+property (limited to read-only properties)""" def __init__(self, func): self.im_func = func def __get__(self, obj, cls): return self.im_func(cls) @property def __func__(self): """py3 compatible alias""" return self.im_func class hybrid_method(object): """ decorator which invokes function with class if called as class method, and with object if called at instance level. """ def __init__(self, func): self.func = func update_wrapper(self, func) def __get__(self, obj, cls): if obj is None: obj = cls if PY3: return types.MethodType(self.func, obj) else: return types.MethodType(self.func, obj, cls) #============================================================================= # memoization #============================================================================= def memoize_single_value(func): """ decorator for function which takes no args, and memoizes result. exposes a ``.clear_cache`` method to clear the cached value. """ cache = {} @wraps(func) def wrapper(): try: return cache[True] except KeyError: pass value = cache[True] = func() return value def clear_cache(): cache.pop(True, None) wrapper.clear_cache = clear_cache return wrapper class memoized_property(object): """ decorator which invokes method once, then replaces attr with result """ def __init__(self, func): self.__func__ = func self.__name__ = func.__name__ self.__doc__ = func.__doc__ def __get__(self, obj, cls): if obj is None: return self value = self.__func__(obj) setattr(obj, self.__name__, value) return value if not PY3: @property def im_func(self): """py2 alias""" return self.__func__ def clear_cache(self, obj): """ class-level helper to clear stored value (if any). usage: :samp:`type(self).{attr}.clear_cache(self)` """ obj.__dict__.pop(self.__name__, None) def peek_cache(self, obj, default=None): """ class-level helper to peek at stored value usage: :samp:`value = type(self).{attr}.clear_cache(self)` """ return obj.__dict__.get(self.__name__, default) # works but not used ##class memoized_class_property(object): ## """function decorator which calls function as classmethod, ## and replaces itself with result for current and all future invocations. ## """ ## def __init__(self, func): ## self.im_func = func ## ## def __get__(self, obj, cls): ## func = self.im_func ## value = func(cls) ## setattr(cls, func.__name__, value) ## return value ## ## @property ## def __func__(self): ## "py3 compatible alias" #============================================================================= # deprecation #============================================================================= def deprecated_function(msg=None, deprecated=None, removed=None, updoc=True, replacement=None, _is_method=False, func_module=None): """decorator to deprecate a function. :arg msg: optional msg, default chosen if omitted :kwd deprecated: version when function was first deprecated :kwd removed: version when function will be removed :kwd replacement: alternate name / instructions for replacing this function. :kwd updoc: add notice to docstring (default ``True``) """ if msg is None: if _is_method: msg = "the method %(mod)s.%(klass)s.%(name)s() is deprecated" else: msg = "the function %(mod)s.%(name)s() is deprecated" if deprecated: msg += " as of Passlib %(deprecated)s" if removed: msg += ", and will be removed in Passlib %(removed)s" if replacement: msg += ", use %s instead" % replacement msg += "." def build(func): is_classmethod = _is_method and isinstance(func, classmethod) if is_classmethod: # NOTE: PY26 doesn't support "classmethod().__func__" directly... func = func.__get__(None, type).__func__ opts = dict( mod=func_module or func.__module__, name=func.__name__, deprecated=deprecated, removed=removed, ) if _is_method: def wrapper(*args, **kwds): tmp = opts.copy() klass = args[0] if is_classmethod else args[0].__class__ tmp.update(klass=klass.__name__, mod=klass.__module__) warn(msg % tmp, DeprecationWarning, stacklevel=2) return func(*args, **kwds) else: text = msg % opts def wrapper(*args, **kwds): warn(text, DeprecationWarning, stacklevel=2) return func(*args, **kwds) update_wrapper(wrapper, func) if updoc and (deprecated or removed) and \ wrapper.__doc__ and ".. deprecated::" not in wrapper.__doc__: txt = deprecated or '' if removed or replacement: txt += "\n " if removed: txt += "and will be removed in version %s" % (removed,) if replacement: if removed: txt += ", " txt += "use %s instead" % replacement txt += "." if not wrapper.__doc__.strip(" ").endswith("\n"): wrapper.__doc__ += "\n" wrapper.__doc__ += "\n.. deprecated:: %s\n" % (txt,) if is_classmethod: wrapper = classmethod(wrapper) return wrapper return build def deprecated_method(msg=None, deprecated=None, removed=None, updoc=True, replacement=None): """decorator to deprecate a method. :arg msg: optional msg, default chosen if omitted :kwd deprecated: version when method was first deprecated :kwd removed: version when method will be removed :kwd replacement: alternate name / instructions for replacing this method. :kwd updoc: add notice to docstring (default ``True``) """ return deprecated_function(msg, deprecated, removed, updoc, replacement, _is_method=True) #============================================================================= # eof #=============================================================================
bsd-2-clause
bendikro/deluge-yarss-plugin
yarss2/lib/requests/packages/urllib3/util/url.py
553
5836
from collections import namedtuple from ..exceptions import LocationParseError url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] class Url(namedtuple('Url', url_attrs)): """ Datastructure for representing an HTTP URL. Used as a return value for :func:`parse_url`. """ slots = () def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): if path and not path.startswith('/'): path = '/' + path return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) @property def hostname(self): """For backwards-compatibility with urlparse. We're nice like that.""" return self.host @property def request_uri(self): """Absolute path including the query string.""" uri = self.path or '/' if self.query is not None: uri += '?' + self.query return uri @property def netloc(self): """Network location including host and port""" if self.port: return '%s:%d' % (self.host, self.port) return self.host @property def url(self): """ Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:password@host.com:80/path?query#fragment' """ scheme, auth, host, port, path, query, fragment = self url = '' # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: url += scheme + '://' if auth is not None: url += auth + '@' if host is not None: url += host if port is not None: url += ':' + str(port) if path is not None: url += path if query is not None: url += '?' + query if fragment is not None: url += '#' + fragment return url def __str__(self): return self.url def split_first(s, delims): """ Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """ min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return s, '', None return s[:min_idx], s[min_idx+1:], min_delim def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. Partly backwards-compatible with :mod:`urlparse`. Example:: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/mail/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ # While this code has overlap with stdlib's urlparse, it is much # simplified for our needs and less annoying. # Additionally, this implementations does silly things to be optimal # on CPython. if not url: # Empty return Url() scheme = None auth = None host = None port = None path = None fragment = None query = None # Scheme if '://' in url: scheme, url = url.split('://', 1) # Find the earliest Authority Terminator # (http://tools.ietf.org/html/rfc3986#section-3.2) url, path_, delim = split_first(url, ['/', '?', '#']) if delim: # Reassemble the path path = delim + path_ # Auth if '@' in url: # Last '@' denotes end of auth part auth, url = url.rsplit('@', 1) # IPv6 if url and url[0] == '[': host, url = url.split(']', 1) host += ']' # Port if ':' in url: _host, port = url.split(':', 1) if not host: host = _host if port: # If given, ports must be integers. if not port.isdigit(): raise LocationParseError(url) port = int(port) else: # Blank ports are cool, too. (rfc3986#section-3.2.3) port = None elif not host and url: host = url if not path: return Url(scheme, auth, host, port, path, query, fragment) # Fragment if '#' in path: path, fragment = path.split('#', 1) # Query if '?' in path: path, query = path.split('?', 1) return Url(scheme, auth, host, port, path, query, fragment) def get_host(url): """ Deprecated. Use :func:`.parse_url` instead. """ p = parse_url(url) return p.scheme or 'http', p.hostname, p.port
gpl-3.0
mancoast/CPythonPyc_test
cpython/263_test_codecencodings_cn.py
64
2054
#!/usr/bin/env python # # test_codecencodings_cn.py # Codec encoding tests for PRC encodings. # from test import test_support from test import test_multibytecodec_support import unittest class Test_GB2312(test_multibytecodec_support.TestBase, unittest.TestCase): encoding = 'gb2312' tstring = test_multibytecodec_support.load_teststring('gb2312') codectests = ( # invalid bytes ("abc\x81\x81\xc1\xc4", "strict", None), ("abc\xc8", "strict", None), ("abc\x81\x81\xc1\xc4", "replace", u"abc\ufffd\u804a"), ("abc\x81\x81\xc1\xc4\xc8", "replace", u"abc\ufffd\u804a\ufffd"), ("abc\x81\x81\xc1\xc4", "ignore", u"abc\u804a"), ("\xc1\x64", "strict", None), ) class Test_GBK(test_multibytecodec_support.TestBase, unittest.TestCase): encoding = 'gbk' tstring = test_multibytecodec_support.load_teststring('gbk') codectests = ( # invalid bytes ("abc\x80\x80\xc1\xc4", "strict", None), ("abc\xc8", "strict", None), ("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\u804a"), ("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\u804a\ufffd"), ("abc\x80\x80\xc1\xc4", "ignore", u"abc\u804a"), ("\x83\x34\x83\x31", "strict", None), (u"\u30fb", "strict", None), ) class Test_GB18030(test_multibytecodec_support.TestBase, unittest.TestCase): encoding = 'gb18030' tstring = test_multibytecodec_support.load_teststring('gb18030') codectests = ( # invalid bytes ("abc\x80\x80\xc1\xc4", "strict", None), ("abc\xc8", "strict", None), ("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\u804a"), ("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\u804a\ufffd"), ("abc\x80\x80\xc1\xc4", "ignore", u"abc\u804a"), ("abc\x84\x39\x84\x39\xc1\xc4", "replace", u"abc\ufffd\u804a"), (u"\u30fb", "strict", "\x819\xa79"), ) has_iso10646 = True def test_main(): test_support.run_unittest(__name__) if __name__ == "__main__": test_main()
gpl-3.0
qskycolor/viewfinder
backend/logs/itunes_trends.py
13
11557
# Copyright 2012 Viewfinder Inc. All Rights Reserved """Interface to the iTunes Connect sales and trend. Fetches daily download stats from iTunes Connect and saves them to the metrics table. Specify the apple user to log in with (eg: --user=marc to use marc@emailscrubbed.com). We get the user's apple password from the secret 'itunes_connect_${user}'. Default user is 'itunes_viewer', a special user with "sales" data access only. Run with: $ python -m viewfinder.backend.logs.itunes_trends --start_date=2013-01-20 Appropriate for cron: Detect start date, update metrics table. Don't run if last run was less than 6h ago. $ python -m viewfinder.backend.logs.itunes_trends --dry_run=False --smart_scan=True --hours_between_runs=6 ExceptionOptions: - user: default=itunes_viewer: apple user. We expand this to ${user}@emailscrubbed.com - vendor_id: default=<our ID>: the vendor ID, from the iTunes Connect dashboard. - dry_run: default=True: display stats only, do not update Metrics table or write job summary. - start_date: default=None: look up stats from that date until yesterday. format: YYYY-MM-DD. - smart_scan: default=False: determine the start date from previous successful run. - require_lock: default=True: grab the job:itunes_trends lock for the duration of the job. - hours_between_runs: default=0: don't run if the last successful run was less than this many hours ago. """ import gzip import cStringIO import getpass import json import logging import os import re import sys import time import traceback import urllib import urllib2 from tornado import gen, options from urlparse import urljoin from viewfinder.backend.base import constants, main, secrets, util from viewfinder.backend.base.dotdict import DotDict from viewfinder.backend.db import db_client, metric from viewfinder.backend.db.job import Job from viewfinder.backend.logs import logs_util from viewfinder.backend.services import itunes_trends_codes from viewfinder.backend.storage.object_store import ObjectStore options.define('user', default='itunes_viewer', help='User for iTunesConnect. Will expand to user@emailscrubbed.com and ' 'lookup the itunes password in secrets/itunes_connect_${user}') # vendor_id comes from the iTunes Connect dashboard. options.define('vendor_id', default='85575078', help='iTunes vendor ID') options.define('dry_run', default=True, help='Print output only, do not write to metrics table.') options.define('start_date', default=None, help='Lookup stats from date onwards (YYYY-MM-DD)') options.define('smart_scan', type=bool, default=False, help='determine start_date from previous successful runs. Overrides start_date.') options.define('require_lock', type=bool, default=True, help='attempt to grab the job:itunes_trends lock before running. Exit if acquire fails.') options.define('hours_between_runs', type=int, default=0, help='minimum time since start of last successful run (with dry_run=False)') options.define('download_from_s3', type=bool, default=False, help='Fetch raw gzipped files from S3 (if false, fetch from iTunesConnect)') kITCBaseURL = 'https://reportingitc.apple.com/autoingestion.tft' kS3Bucket = ObjectStore.SERVER_DATA kS3Base = 'itunes-trends/' class ITunesTrends(object): def __init__(self, apple_id, password, vendor_id, html_retries=3): self._apple_id = apple_id self._password = password self._vendor_id = vendor_id self._html_retries=3 self._form_fields = None self._available_days = None self._available_weeks = None self._object_store = ObjectStore.GetInstance(kS3Bucket) def _Fetch(self, url, data=None): """Attempt to fetch 'url' with optional 'data'. We retry self._retry times, regardless of the error.""" retries = 0 while True: logging.info('fetching (%d) %s' % (retries, url)) request = urllib2.Request(url, data) handle = urllib2.urlopen(request) logging.info('fetch reply headers: %s' % handle.info()) return handle.read() try: pass except Exception: if retries >= self._html_retries: raise time.sleep(2**retries) retries += 1 @gen.engine def FetchOneDay(self, day, callback): """Fetch a single day's worth of data. Exception could be due to http errors, unavailable date, or failed parsing. TODO(marc): handle these cases separately. """ s3_filename = os.path.join(kS3Base, '%s.gz' % day) def DownloadFromiTunes(): # We use our own date format in the entire tool. Only now do we convert to iTuneConnect's YYYYMMDD. y, m, d = day.split('-') itunes_date = '%s%s%s' % (y, m, d) data = urllib.urlencode({'USERNAME': self._apple_id, 'PASSWORD': self._password, 'VNDNUMBER': self._vendor_id, 'TYPEOFREPORT': 'Sales', 'DATETYPE': 'Daily', 'REPORTTYPE': 'Summary', 'REPORTDATE': itunes_date }) buf = self._Fetch(kITCBaseURL, data) return buf def ParseContents(contents): result = DotDict() skipped_lines = [] for line in contents.splitlines(): tokens = line.split('\t') if tokens[0] == 'Provider': # Skip header line. skipped_lines.append(line) continue # Replace dots with underscores as we'll be using the version in a DotDict. version = tokens[5].replace('.', '_') if not version or version == ' ': # subscriptions do not have a version, use 'all'. version = 'all' type_id = tokens[6] # Use the type id if we don't have a name for it. type_name = itunes_trends_codes.PRODUCT_TYPE_IDENTIFIER.get(type_id, type_id) units = int(tokens[7]) # Ignore proceeds, it does not reflect in-app purchases. store = tokens[12] result['itunes.%s.%s.%s' % (type_name, version, store)] = units assert len(skipped_lines) <= 1, 'Skipped too many lines: %r' % skipped_lines return result # Failures in any of Get/Download/Put will interrupt this day's processing. if options.options.download_from_s3: logging.info('S3 get %s' % s3_filename) buf = yield gen.Task(self._object_store.Get, s3_filename) else: buf = DownloadFromiTunes() logging.info('S3 put %s' % s3_filename) yield gen.Task(self._object_store.Put, s3_filename, buf) iobuffer = cStringIO.StringIO(buf) gzipIO = gzip.GzipFile('rb', fileobj=iobuffer) contents = gzipIO.read() iobuffer.close() logging.info('Contents: %s' % contents) callback(ParseContents(contents)) @gen.engine def DetermineStartDate(client, job, callback): """If smart_scan is true, lookup the start date from previous job summaries, otherwise use --start_date. --start_date and job summary days are of the form YYYY-MM-DD. """ start_date = options.options.start_date # Lookup previous runs started in the last week. if options.options.smart_scan: # Search for successful full-scan run in the last week. last_run = yield gen.Task(job.FindLastSuccess, with_payload_key='stats.last_day') if last_run is None: logging.info('No previous successful scan found, rerun with --start_date') callback(None) return last_run_start = last_run['start_time'] if (last_run_start + options.options.hours_between_runs * constants.SECONDS_PER_HOUR > time.time()): logging.info('Last successful run started at %s, less than %d hours ago; skipping.' % (time.asctime(time.localtime(last_run_start)), options.options.hours_between_runs)) callback(None) return # Start start_date to the last processed day + 1. last_day = last_run['stats.last_day'] start_time = util.ISO8601ToUTCTimestamp(last_day) + constants.SECONDS_PER_DAY start_date = util.TimestampUTCToISO8601(start_time) logging.info('Last successful run (%s) scanned up to %s, setting start date to %s' % (time.asctime(time.localtime(last_run_start)), last_day, start_date)) callback(start_date) @gen.engine def RunOnce(client, job, apple_id, password, callback): start_date = yield gen.Task(DetermineStartDate, client, job) if start_date is None: logging.info('Start date not specified, last run too recent, or smart_scan could not determine a date; exiting.') callback(None) return query_dates = [] start_time = util.ISO8601ToUTCTimestamp(start_date) today = util.NowUTCToISO8601() while start_time < time.time(): date = util.TimestampUTCToISO8601(start_time) if date != today: query_dates.append(date) start_time += constants.SECONDS_PER_DAY logging.info('fetching data for dates: %r' % query_dates) results = {} itc = ITunesTrends(apple_id, password, options.options.vendor_id) failed = False for day in query_dates: if failed: break try: result = yield gen.Task(itc.FetchOneDay, day) if not result: # We don't get an exception when iTunesConnect has no data. We also don't want to # fail as there's no way it will have this data later. logging.warning('No data for day %s' % day) else: results[day] = result except Exception: msg = traceback.format_exc() logging.warning('Error fetching iTunes Connect data for day %s: %s', day, msg) failed = True if len(results) == 0: callback(None) else: # Replace the entire 'itunes' category of previous metrics. This is so we can fix any processing errors we # may have had. hms = logs_util.kDailyMetricsTimeByLogType['itunes_trends'] yield gen.Task(logs_util.UpdateMetrics, client, results, prefix_to_erase='itunes', dry_run=options.options.dry_run, hms_tuple=hms) callback(sorted(results.keys())[-1]) @gen.engine def _Start(callback): """Grab a lock on job:itunes_trends and call RunOnce. If we get a return value, write it to the job summary.""" assert options.options.user is not None and options.options.vendor_id is not None apple_id = '%s@emailscrubbed.com' % options.options.user # Attempt to lookup iTunes Connect password from secrets. password = secrets.GetSecret('itunes_connect_%s' % options.options.user) assert password client = db_client.DBClient.Instance() job = Job(client, 'itunes_trends') if options.options.require_lock: got_lock = yield gen.Task(job.AcquireLock) if got_lock == False: logging.warning('Failed to acquire job lock: exiting.') callback() return result = None job.Start() try: result = yield gen.Task(RunOnce, client, job, apple_id, password) except: # Failure: log run summary with trace. msg = traceback.format_exc() logging.info('Registering failed run with message: %s' % msg) yield gen.Task(job.RegisterRun, Job.STATUS_FAILURE, failure_msg=msg) else: if result is not None and not options.options.dry_run: # Successful run with data processed and not in dry-run mode: write run summary. stats = DotDict() stats['last_day'] = result logging.info('Registering successful run with stats: %r' % stats) yield gen.Task(job.RegisterRun, Job.STATUS_SUCCESS, stats=stats) finally: yield gen.Task(job.ReleaseLock) callback() if __name__ == '__main__': sys.exit(main.InitAndRun(_Start))
apache-2.0
sonnyhu/scikit-learn
examples/svm/plot_separating_hyperplane_unbalanced.py
329
1850
""" ================================================= SVM: Separating hyperplane for unbalanced classes ================================================= Find the optimal separating hyperplane using an SVC for classes that are unbalanced. We first find the separating plane with a plain SVC and then plot (dashed) the separating hyperplane with automatically correction for unbalanced classes. .. currentmodule:: sklearn.linear_model .. note:: This example will also work by replacing ``SVC(kernel="linear")`` with ``SGDClassifier(loss="hinge")``. Setting the ``loss`` parameter of the :class:`SGDClassifier` equal to ``hinge`` will yield behaviour such as that of a SVC with a linear kernel. For example try instead of the ``SVC``:: clf = SGDClassifier(n_iter=100, alpha=0.01) """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn import svm #from sklearn.linear_model import SGDClassifier # we create 40 separable points rng = np.random.RandomState(0) n_samples_1 = 1000 n_samples_2 = 100 X = np.r_[1.5 * rng.randn(n_samples_1, 2), 0.5 * rng.randn(n_samples_2, 2) + [2, 2]] y = [0] * (n_samples_1) + [1] * (n_samples_2) # fit the model and get the separating hyperplane clf = svm.SVC(kernel='linear', C=1.0) clf.fit(X, y) w = clf.coef_[0] a = -w[0] / w[1] xx = np.linspace(-5, 5) yy = a * xx - clf.intercept_[0] / w[1] # get the separating hyperplane using weighted classes wclf = svm.SVC(kernel='linear', class_weight={1: 10}) wclf.fit(X, y) ww = wclf.coef_[0] wa = -ww[0] / ww[1] wyy = wa * xx - wclf.intercept_[0] / ww[1] # plot separating hyperplanes and samples h0 = plt.plot(xx, yy, 'k-', label='no weights') h1 = plt.plot(xx, wyy, 'k--', label='with weights') plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Paired) plt.legend() plt.axis('tight') plt.show()
bsd-3-clause
fhaoquan/kbengine
kbe/res/scripts/common/Lib/test/test_json/test_dump.py
109
1626
from io import StringIO from test.test_json import PyTest, CTest from test.support import bigmemtest, _1G class TestDump: def test_dump(self): sio = StringIO() self.json.dump({}, sio) self.assertEqual(sio.getvalue(), '{}') def test_dumps(self): self.assertEqual(self.dumps({}), '{}') def test_encode_truefalse(self): self.assertEqual(self.dumps( {True: False, False: True}, sort_keys=True), '{"false": true, "true": false}') self.assertEqual(self.dumps( {2: 3.0, 4.0: 5, False: 1, 6: True}, sort_keys=True), '{"false": 1, "2": 3.0, "4.0": 5, "6": true}') # Issue 16228: Crash on encoding resized list def test_encode_mutated(self): a = [object()] * 10 def crasher(obj): del a[-1] self.assertEqual(self.dumps(a, default=crasher), '[null, null, null, null, null]') class TestPyDump(TestDump, PyTest): pass class TestCDump(TestDump, CTest): # The size requirement here is hopefully over-estimated (actual # memory consumption depending on implementation details, and also # system memory management, since this may allocate a lot of # small objects). @bigmemtest(size=_1G, memuse=1) def test_large_list(self, size): N = int(30 * 1024 * 1024 * (size / _1G)) l = [1] * N encoded = self.dumps(l) self.assertEqual(len(encoded), N * 3) self.assertEqual(encoded[:1], "[") self.assertEqual(encoded[-2:], "1]") self.assertEqual(encoded[1:-2], "1, " * (N - 1))
lgpl-3.0
maloL/nao-fsm
tracking_and_storing.py
1
11706
# object tracking algorithm with trajectory points plotting on # video stream window # after taking 20 points it stores them in txt files from naoqi import ALProxy, ALBroker, ALModule import time from vision_definitions import kVGA, kBGRColorSpace import cv2 as opencv import numpy as np import random from ghmm import * import ConfigParser, argparse import training global ObjectTracker # object tracking module class ObjectTrackerModule(ALModule): def __init__(self, name): ALModule.__init__(self, name) self.data = 0 self.behaviors = [] self.exists = [] self.kindNames = [] self.waiting = [] self.tts = ALProxy("ALTextToSpeech") self.gestureProxy = ALProxy("NAOObjectGesture", myBroker) self.motionProxy = ALProxy("ALMotion", myBroker) self.memProxy = ALProxy("ALMemory", myBroker) self.motionProxy.setStiffnesses("Head", 1.0) self.gestureProxy.startTracker(15, 0) #self.log = open("temp.txt", "w") ############################################################ def startTracker(self, camId): self.gestureProxy.startTracker(15, camId) #self.gestureProxy.focusObject(-1) def stopTracker(self): self.gestureProxy.stopTracker() self.gestureProxy.stopFocus() def load(self, path, name): self.gestureProxy.loadDataset(path) self.kindNames.append(name) self.exists.append(False) self.behaviors.append([]) self.waiting.append(None) self.gestureProxy.trackObject(name, -len(self.kindNames)) self.memProxy.subscribeToMicroEvent(name, "ObjectTracker", name, "storeData") def getIdx(self, name): if (name in self.kindNames): return self.kindNames.index(name) else: return None def getBehaviors(self, name): idx = self.getIdx(name) if idx!=None: return self.behaviors[idx] else: return None def getExist(self, name): idx = self.getIdx(name) if idx!=None: return self.exists[idx] else: return None def getWaiting(self, name): idx = self.getIdx(name) if idx!=None: return self.waiting[idx] else: return None def clearWaiting(self): for i in range(len(self.waiting)): self.waiting[i] = None def waitForBehavior(self, name, behavior): idx = self.getIdx(name) self.gestureProxy.clearEventTraj(name) print('Waiting for behavior: ' + str(behavior)) if idx!=None: if behavior == "Frog": self.waiting[idx] = ["FrogL", "FrogR"] else: if behavior == "Plane": self.waiting[idx] = ["PlaneL", "PlaneR"] else: self.waiting[idx] = [behavior] else: return None def onObjGet(self, key, value, message): id = -1 if (key in self.kindNames): id = self.kindNames.index(key) else: return if (value != None): if (value[0] != 0): self.exists[id]=True if (value[5]!=None): print (value[5]) self.behaviors[id] = value[5] if (self.waiting[id]!= None): for tmp in self.waiting[id]: if tmp in value[5]: self.waiting[id] = None break else: self.exists[id]=False if (value[1]!=None): print (value[1]) self.behaviors[id] = value[1] if (self.waiting[id]!= None): for tmp in self.waiting[id]: if tmp in value[1]: self.waiting[id] = None break def storeData(self, key, value, message): if value: if value[0]: print("I see the cup") #self.log.write(str(value[3][0])+", "+str(value[3][1])+"\n") ######################################## self.data = value[3] else: self.data = 0 print("I don't see the cup") def unload(self): self.gestureProxy.stopTracker() #self.log.close() for i in range(0, len(self.exists)): self.gestureProxy.removeObjectKind(0) self.gestureProxy.removeEvent(self.kindNames[i]) # class with functions for Kalman filter class KalmanFilter(object): def __init__(self, process_variance, estimated_measurement_variance): self.process_variance = process_variance self.estimated_measurement_variance = estimated_measurement_variance self.posteri_estimate = 0.0 self.posteri_error_estimate = 1.0 def input_latest_noisy_measurement(self, measurement): priori_estimate = self.posteri_estimate priori_error_estimate = self.posteri_error_estimate + self.process_variance blending_factor = priori_error_estimate / (priori_error_estimate + self.estimated_measurement_variance) self.posteri_estimate = priori_estimate + blending_factor * (measurement - priori_estimate) self.posteri_error_estimate = (1 - blending_factor) * priori_error_estimate def get_latest_estimated_measurement(self): return self.posteri_estimate # function for getting video stream from nao camera def nao_image_getter(alvideoproxy, video): alimg = alvideoproxy.getImageRemote(video) imgheader = opencv.cv.CreateImageHeader((alimg[0], alimg[1]), opencv.cv.IPL_DEPTH_8U, 3) opencv.cv.SetData(imgheader, alimg[6]) img = np.asarray(imgheader[:, :]) return img if __name__ == '__main__': # initializing proxies and other required parameters IP = "192.168.1.105" PORT = 9559 myBroker = ALBroker("myBroker", "0.0.0.0", 0, IP, PORT) #opencv.namedWindow("Robot camera feed") # get sample image to detect size alvideoproxy = ALProxy("ALVideoDevice", IP, PORT) video = alvideoproxy.subscribeCamera("video", 0, kVGA, kBGRColorSpace, 30) motionproxy=ALProxy('ALMotion', myBroker) motionproxy.killAll() tts = ALProxy('ALTextToSpeech', myBroker) behaveproxy = ALProxy('ALBehaviorManager', myBroker) postureproxy = ALProxy('ALRobotPosture', myBroker) navigationProxy = ALProxy('ALNavigation', myBroker) sound = ALProxy('ALAudioDevice', myBroker) memory = ALProxy('ALMemory', myBroker) memory.insertData('ObjectGrabber', int(0)) camProxy = ALProxy("ALVideoDevice", IP, PORT) postureproxy.goToPosture("StandInit", 0.8) motionproxy.setAngles('HeadPitch', 0, 0.5) time.sleep(0.5) motionproxy.setAngles('HeadYaw', 0, 0.5) time.sleep(0.5) motionproxy.setStiffnesses("Head", 1.0) cfile = "Config.ini" config = ConfigParser.ConfigParser() config.read(cfile) set_num = config.get("Grab settings", "dataset") volume = config.get('Grab settings', 'Volume') volume = int(float(volume)) sound.setOutputVolume(volume) new_set = int(set_num) + 1 filename = '/home/luka/Documents/FER_projekt/Diplomski_rad/Temp_set/Pos/gest' + str(new_set) config.set("Grab settings", "Dataset", str(new_set)) with open(cfile, 'wb') as configfile: config.write(configfile) # try object tracking try: # kalman filter preparations iteration_count = 500 measurement_standard_deviation = np.std([random.random() * 2.0 - 1.0 for j in xrange(iteration_count)]) process_variance = 1e-1 # greater = faster, worse estimation, lower = slower, better estimation estimated_measurement_variance = measurement_standard_deviation ** 2 # 0.05 ** 2 kalman_filter = KalmanFilter(process_variance, estimated_measurement_variance) posteri_estimate_graph = [] # initilazing tracking ObjectTracker = ObjectTrackerModule("ObjectTracker") ObjectTracker.load("/home/nao/ImageSets/cup", 'Cup') ObjectTracker.gestureProxy.stopTracker() time.sleep(2) #tts.say("Now you repeat the gesture") time.sleep(2) print ('Starting tracker...') ObjectTracker.startTracker(0) image_position = np.zeros(shape=2) pos_vec = np.zeros(shape=2) i = 0 log = open(filename + ".txt", "w") #################################################################################### estimation = np.zeros(shape=(1, 2)) # while loop where tracking is executed tts.say("Now you repeat the gesture") time.sleep(0.5) while len(estimation) < 30: # if object is detected do data analysis image = nao_image_getter(alvideoproxy, video) if ObjectTracker.data: # angular position data from micro event pos_data = np.asarray(ObjectTracker.data) print "data: " print ObjectTracker.data # calculating image position based on angular position of object image_position = camProxy.getImagePositionFromAngularPosition(0, [pos_data[0], pos_data[1]]) image_position = np.asarray(image_position) print image_position # applying kalman filter on image position data kalman_filter.input_latest_noisy_measurement(image_position) posteri_estimate_graph.append(kalman_filter.get_latest_estimated_measurement()) # separating estimated values for easier plotting estimation = np.zeros(shape=(len(posteri_estimate_graph), 2)) for i in range(0, len(posteri_estimate_graph)): temp2 = posteri_estimate_graph[i] estimation[i, 0] = temp2[0] estimation[i, 1] = temp2[1] # video frame size height, width = image.shape[:2] opencv.ellipse(image, (int(estimation[-1, 0] * width), int(estimation[-1, 1] * height + 15)), (70, 90), -180, 0, 360, (255, 0, 0), 2) # plotting trajectory points for j in range(2, len(estimation)): opencv.circle(image, (int(estimation[j, 0] * width), int(estimation[j, 1] * height + 15)), 5, (0, 0, 255), -1) opencv.putText(image, "Object", (10, 70), opencv.FONT_HERSHEY_SIMPLEX, 3, (0, 255, 0), 5) opencv.putText(image, "tracking", (10, 140), opencv.FONT_HERSHEY_SIMPLEX, 3, (0, 255, 0), 5) #opencv.putText(image, "Object tracking", (100, 100), opencv.FONT_HERSHEY_DUPLEX, 2.0, (0, 0, 255)) opencv.imshow("Robot camera feed", image) #opencv.imwrite("Slike/Tracking/image" + str(len(estimation)) + ".png", image) if opencv.waitKey(10) == 27: break # if try doesn't work for any reason program breaks and stops after # stopping video subscribe and other things finally: n = len(estimation) for i in range(0, n): log.write(str(estimation[i, 0])+", "+str(estimation[i, 1])+"\n") log.close() ObjectTracker.gestureProxy.stopTracker() print('Ending tracking...') time.sleep(1) alvideoproxy.unsubscribe(video) opencv.destroyAllWindows() ObjectTracker.unload() behaveproxy.stopAllBehaviors() time.sleep(1.0) motionproxy.killAll() myBroker.shutdown()
lgpl-3.0
ChanderG/scipy
doc/source/conf.py
40
10928
# -*- coding: utf-8 -*- import sys, os, re # Check Sphinx version import sphinx if sphinx.__version__ < "1.1": raise RuntimeError("Sphinx 1.1 or newer required") needs_sphinx = '1.1' # ----------------------------------------------------------------------------- # General configuration # ----------------------------------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. sys.path.insert(0, os.path.abspath('../sphinxext')) sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) extensions = ['sphinx.ext.autodoc', 'sphinx.ext.mathjax', 'numpydoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.autosummary', 'scipyoptdoc'] # Determine if the matplotlib has a recent enough version of the # plot_directive. try: from matplotlib.sphinxext import plot_directive except ImportError: use_matplotlib_plot_directive = False else: try: use_matplotlib_plot_directive = (plot_directive.__version__ >= 2) except AttributeError: use_matplotlib_plot_directive = False if use_matplotlib_plot_directive: extensions.append('matplotlib.sphinxext.plot_directive') else: raise RuntimeError("You need a recent enough version of matplotlib") # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General substitutions. project = 'SciPy' copyright = '2008-2014, The Scipy community' # The default replacements for |version| and |release|, also used in various # other places throughout the built documents. import scipy version = re.sub(r'\.dev-.*$', r'.dev', scipy.__version__) release = scipy.__version__ print "Scipy (VERSION %s)" % (version,) # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # The reST default role (used for this markup: `text`) to use for all documents. default_role = "autolink" # List of directories, relative to source directories, that shouldn't be searched # for source files. exclude_dirs = [] # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = False # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # ----------------------------------------------------------------------------- # HTML output # ----------------------------------------------------------------------------- themedir = os.path.join(os.pardir, 'scipy-sphinx-theme', '_theme') if os.path.isdir(themedir): html_theme = 'scipy' html_theme_path = [themedir] if 'scipyorg' in tags: # Build for the scipy.org website html_theme_options = { "edit_link": True, "sidebar": "right", "scipy_org_logo": True, "rootlinks": [("http://scipy.org/", "Scipy.org"), ("http://docs.scipy.org/", "Docs")] } else: # Default build html_theme_options = { "edit_link": False, "sidebar": "left", "scipy_org_logo": False, "rootlinks": [] } html_logo = '_static/scipyshiny_small.png' html_sidebars = {'index': 'indexsidebar.html'} else: # Build without scipy.org sphinx theme present if 'scipyorg' in tags: raise RuntimeError("Get the scipy-sphinx-theme first, " "via git submodule init & update") else: html_style = 'scipy_fallback.css' html_logo = '_static/scipyshiny_small.png' html_sidebars = {'index': 'indexsidebar.html'} html_title = "%s v%s Reference Guide" % (project, version) html_static_path = ['_static'] html_last_updated_fmt = '%b %d, %Y' html_additional_pages = {} html_use_modindex = True html_copy_source = False html_file_suffix = '.html' htmlhelp_basename = 'scipy' pngmath_use_preview = True pngmath_dvipng_args = ['-gamma', '1.5', '-D', '96', '-bg', 'Transparent'] # ----------------------------------------------------------------------------- # LaTeX output # ----------------------------------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). _stdauthor = 'Written by the SciPy community' latex_documents = [ ('index', 'scipy-ref.tex', 'SciPy Reference Guide', _stdauthor, 'manual'), # ('user/index', 'scipy-user.tex', 'SciPy User Guide', # _stdauthor, 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. latex_preamble = r''' \usepackage{amsmath} \DeclareUnicodeCharacter{00A0}{\nobreakspace} % In the parameters etc. sections, align uniformly, and adjust label emphasis \usepackage{expdlist} \let\latexdescription=\description \let\endlatexdescription=\enddescription \renewenvironment{description}% {\begin{latexdescription}[\setleftmargin{60pt}\breaklabel\setlabelstyle{\bfseries\itshape}]}% {\end{latexdescription}} % Make Examples/etc section headers smaller and more compact \makeatletter \titleformat{\paragraph}{\normalsize\normalfont\bfseries\itshape}% {\py@NormalColor}{0em}{\py@NormalColor}{\py@NormalColor} \titlespacing*{\paragraph}{0pt}{1ex}{0pt} \makeatother % Save vertical space in parameter lists and elsewhere \makeatletter \renewenvironment{quote}% {\list{}{\topsep=0pt% \parsep \z@ \@plus\p@}% \item\relax}% {\endlist} \makeatother % Fix footer/header \renewcommand{\chaptermark}[1]{\markboth{\MakeUppercase{\thechapter.\ #1}}{}} \renewcommand{\sectionmark}[1]{\markright{\MakeUppercase{\thesection.\ #1}}} ''' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. latex_use_modindex = False # ----------------------------------------------------------------------------- # Intersphinx configuration # ----------------------------------------------------------------------------- intersphinx_mapping = { 'http://docs.python.org/dev': None, 'http://docs.scipy.org/doc/numpy': None, } # ----------------------------------------------------------------------------- # Numpy extensions # ----------------------------------------------------------------------------- # If we want to do a phantom import from an XML file for all autodocs phantom_import_file = 'dump.xml' # Generate plots for example sections numpydoc_use_plots = True # ----------------------------------------------------------------------------- # Autosummary # ----------------------------------------------------------------------------- if sphinx.__version__ >= "0.7": import glob autosummary_generate = glob.glob("*.rst") # ----------------------------------------------------------------------------- # Coverage checker # ----------------------------------------------------------------------------- coverage_ignore_modules = r""" """.split() coverage_ignore_functions = r""" test($|_) (some|all)true bitwise_not cumproduct pkgload generic\. """.split() coverage_ignore_classes = r""" """.split() coverage_c_path = [] coverage_c_regexes = {} coverage_ignore_c_items = {} #------------------------------------------------------------------------------ # Plot #------------------------------------------------------------------------------ plot_pre_code = """ import numpy as np np.random.seed(123) """ plot_include_source = True plot_formats = [('png', 96), 'pdf'] plot_html_show_formats = False import math phi = (math.sqrt(5) + 1)/2 font_size = 13*72/96.0 # 13 px plot_rcparams = { 'font.size': font_size, 'axes.titlesize': font_size, 'axes.labelsize': font_size, 'xtick.labelsize': font_size, 'ytick.labelsize': font_size, 'legend.fontsize': font_size, 'figure.figsize': (3*phi, 3), 'figure.subplot.bottom': 0.2, 'figure.subplot.left': 0.2, 'figure.subplot.right': 0.9, 'figure.subplot.top': 0.85, 'figure.subplot.wspace': 0.4, 'text.usetex': False, } if not use_matplotlib_plot_directive: import matplotlib matplotlib.rcParams.update(plot_rcparams) # ----------------------------------------------------------------------------- # Source code links # ----------------------------------------------------------------------------- import inspect from os.path import relpath, dirname for name in ['sphinx.ext.linkcode', 'linkcode', 'numpydoc.linkcode']: try: __import__(name) extensions.append(name) break except ImportError: pass else: print "NOTE: linkcode extension not found -- no links to source generated" def linkcode_resolve(domain, info): """ Determine the URL corresponding to Python object """ if domain != 'py': return None modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) except: return None try: fn = inspect.getsourcefile(obj) except: fn = None if not fn: try: fn = inspect.getsourcefile(sys.modules[obj.__module__]) except: fn = None if not fn: return None try: source, lineno = inspect.getsourcelines(obj) except: lineno = None if lineno: linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1) else: linespec = "" fn = relpath(fn, start=dirname(scipy.__file__)) if 'dev' in scipy.__version__: return "http://github.com/scipy/scipy/blob/master/scipy/%s%s" % ( fn, linespec) else: return "http://github.com/scipy/scipy/blob/v%s/scipy/%s%s" % ( scipy.__version__, fn, linespec)
bsd-3-clause
vperron/sentry
src/sentry/migrations/0006_auto.py
36
3873
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): pass def backwards(self, orm): pass models = { 'sentry.filtervalue': { 'Meta': {'unique_together': "(('key', 'value'),)", 'object_name': 'FilterValue'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'sentry.groupedmessage': { 'Meta': {'unique_together': "(('logger', 'view', 'checksum'),)", 'object_name': 'GroupedMessage'}, 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}), 'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'view': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) }, 'sentry.message': { 'Meta': {'object_name': 'Message'}, 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}), 'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'message_set'", 'null': 'True', 'to': "orm['sentry.GroupedMessage']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}), 'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}), 'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'view': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) } } complete_apps = ['sentry']
bsd-3-clause
isazi/Transpose
analysis/manage.py
1
2009
#!/usr/bin/env python # Copyright 2014 Alessio Sclocco <a.sclocco@vu.nl> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. def get_tables(queue): """Get a list of the tables""" queue.execute("SHOW TABLES") return queue.fetchall() def create_table(queue, table): """Create a table to store auto-tuning results for transpose.""" queue.execute("CREATE table " + table + "(id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, M INTEGER NOT NULL, N INTEGER NOT NULL, itemsPerBlock INTEGER NOT NULL, GBs FLOAT UNSIGNED NOT NULL, time FLOAT UNSIGNED NOT NULL, time_err FLOAT UNSIGNED NOT NULL, cov FLOAT UNSIGNED NOT NULL)") def delete_table(queue, table): """Delete table.""" queue.execute("DROP table " + table) def load_file(queue, table, input_file): """Load input_file into a table in the database.""" for line in input_file: if (line[0] != "#") and (line[0] != "\n"): items = line.split(sep=" ") queue.execute("INSERT INTO " + table + " VALUES (NULL, " + items[0] + ", " + items[1] + ", " + items[2] + ", " + items[3] + ", " + items[4] + ", " + items[5] + ", " + items[6].rstrip("\n") + ")") def print_results(confs): """Print the result tuples.""" for conf in confs: for item in conf: print(item, end=" ") print() def get_M_range(queue, table, N): """Return the M in the scenario.""" queue.execute("SELECT DISTINCT M FROM " + table + " WHERE (N = " + N + ") ORDER BY M") return queue.fetchall()
apache-2.0
defionscode/ansible-modules-core
commands/raw.py
49
3161
# this is a virtual module that is entirely implemented server side # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: raw short_description: Executes a low-down and dirty SSH command version_added: historical options: free_form: description: - the raw module takes a free form command to run required: true executable: description: - change the shell used to execute the command. Should be an absolute path to the executable. - when using privilege escalation (C(become)), a default shell will be assigned if one is not provided as privilege escalation requires a shell. required: false version_added: "1.0" description: - Executes a low-down and dirty SSH command, not going through the module subsystem. This is useful and should only be done in two cases. The first case is installing C(python-simplejson) on older (Python 2.4 and before) hosts that need it as a dependency to run modules, since nearly all core modules require it. Another is speaking to any devices such as routers that do not have any Python installed. In any other case, using the M(shell) or M(command) module is much more appropriate. Arguments given to M(raw) are run directly through the configured remote shell. Standard output, error output and return code are returned when available. There is no change handler support for this module. - This module does not require python on the remote system, much like the M(script) module. notes: - "If using raw from a playbook, you may need to disable fact gathering using C(gather_facts: no) if you're using C(raw) to bootstrap python onto the machine." - If you want to execute a command securely and predictably, it may be better to use the M(command) or M(shell) modules instead. - the C(environment) keyword does not work with raw normally, it requires a shell which means it only works if C(executable) is set or using the module with privilege escalation (C(become)). author: - Ansible Core Team - Michael DeHaan ''' EXAMPLES = ''' # Bootstrap a legacy python 2.4 host - raw: yum -y install python-simplejson # Bootstrap a host without python2 installed - raw: dnf install -y python2 python2-dnf libselinux-python # Run a command that uses non-posix shell-isms (in this example /bin/sh # doesn't handle redirection and wildcards together but bash does) - raw: cat < /tmp/*txt args: executable: /bin/bash '''
gpl-3.0
ojengwa/oh-mainline
vendor/packages/html5lib/html5lib/filters/formfiller.py
135
5839
# # The goal is to finally have a form filler where you pass data for # each form, using the algorithm for "Seeding a form with initial values" # See http://www.whatwg.org/specs/web-forms/current-work/#seeding # import _base from html5lib.constants import spaceCharacters spaceCharacters = u"".join(spaceCharacters) class SimpleFilter(_base.Filter): def __init__(self, source, fieldStorage): _base.Filter.__init__(self, source) self.fieldStorage = fieldStorage def __iter__(self): field_indices = {} state = None field_name = None for token in _base.Filter.__iter__(self): type = token["type"] if type in ("StartTag", "EmptyTag"): name = token["name"].lower() if name == "input": field_name = None field_type = None input_value_index = -1 input_checked_index = -1 for i,(n,v) in enumerate(token["data"]): n = n.lower() if n == u"name": field_name = v.strip(spaceCharacters) elif n == u"type": field_type = v.strip(spaceCharacters) elif n == u"checked": input_checked_index = i elif n == u"value": input_value_index = i value_list = self.fieldStorage.getlist(field_name) field_index = field_indices.setdefault(field_name, 0) if field_index < len(value_list): value = value_list[field_index] else: value = "" if field_type in (u"checkbox", u"radio"): if value_list: if token["data"][input_value_index][1] == value: if input_checked_index < 0: token["data"].append((u"checked", u"")) field_indices[field_name] = field_index + 1 elif input_checked_index >= 0: del token["data"][input_checked_index] elif field_type not in (u"button", u"submit", u"reset"): if input_value_index >= 0: token["data"][input_value_index] = (u"value", value) else: token["data"].append((u"value", value)) field_indices[field_name] = field_index + 1 field_type = None field_name = None elif name == "textarea": field_type = "textarea" field_name = dict((token["data"])[::-1])["name"] elif name == "select": field_type = "select" attributes = dict(token["data"][::-1]) field_name = attributes.get("name") is_select_multiple = "multiple" in attributes is_selected_option_found = False elif field_type == "select" and field_name and name == "option": option_selected_index = -1 option_value = None for i,(n,v) in enumerate(token["data"]): n = n.lower() if n == "selected": option_selected_index = i elif n == "value": option_value = v.strip(spaceCharacters) if option_value is None: raise NotImplementedError("<option>s without a value= attribute") else: value_list = self.fieldStorage.getlist(field_name) if value_list: field_index = field_indices.setdefault(field_name, 0) if field_index < len(value_list): value = value_list[field_index] else: value = "" if (is_select_multiple or not is_selected_option_found) and option_value == value: if option_selected_index < 0: token["data"].append((u"selected", u"")) field_indices[field_name] = field_index + 1 is_selected_option_found = True elif option_selected_index >= 0: del token["data"][option_selected_index] elif field_type is not None and field_name and type == "EndTag": name = token["name"].lower() if name == field_type: if name == "textarea": value_list = self.fieldStorage.getlist(field_name) if value_list: field_index = field_indices.setdefault(field_name, 0) if field_index < len(value_list): value = value_list[field_index] else: value = "" yield {"type": "Characters", "data": value} field_indices[field_name] = field_index + 1 field_name = None elif name == "option" and field_type == "select": pass # TODO: part of "option without value= attribute" processing elif field_type == "textarea": continue # ignore token yield token
agpl-3.0
Asuka52/jubatus
unittest_gtest.py
3
175890
#!/usr/bin/env python # encoding: ISO8859-1 """ Copyright (c)2011, Hideyuki Tanaka All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Hideyuki Tanaka nor the names of other contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import os, subprocess, sys from waflib.TaskGen import before, after, feature from waflib import Options, Task, Utils, Logs, Errors C1 = '#XXX'.encode() C2 = '#YYY'.encode() UNPACK_DIR = '.unittest-gtest' GTEST_DIR = 'gtest-1.7.0/fused-src' def cleanup(): import shutil try: shutil.rmtree(UNPACK_DIR) except OSError: pass def unpack_gtest(conf): cwd = os.getcwd() fname = __file__ if fname.endswith('.pyc'): fname = fname[0:-1] f = open(fname, 'rb') while 1: line = f.readline() if not line: Logs.error('not contain gtest archive') sys.exit(1) if line == '#==>\n'.encode(): txt = f.readline() if not txt: Logs.error('corrupt archive') if f.readline() != '#<==\n'.encode(): Logs.error('corrupt archive') break txt = txt[1:-1].replace(C1, '\n'.encode()).replace(C2, '\r'.encode()) cleanup() tmp = 't.tar.bz2' os.makedirs(UNPACK_DIR) os.chdir(UNPACK_DIR) t = open(tmp, 'wb') t.write(txt) t.close() def check_call(args): if subprocess.call(args): raise try: check_call(['tar', 'xf', tmp]) check_call(['mkdir', GTEST_DIR + '/gtest/gtest']) check_call(['cp', GTEST_DIR + '/gtest/gtest.h', GTEST_DIR + '/gtest/gtest/gtest.h']) except: os.chdir(cwd) cleanup() Logs.error('gtest cannot be unpacked.') os.unlink(tmp) conf.env.UNITTEST_GTEST_PATH = os.path.abspath(os.getcwd()) os.chdir(cwd) def configure(conf): try: unpack_gtest(conf) conf.msg('Unpacking gtest', 'yes') except: conf.msg('Unpacking gtest', 'no') Logs.error(sys.exc_info()[1]) conf.check_cxx(lib = 'pthread', uselib_store = 'GTEST_PTHREAD') def options(opt): opt.add_option('--check', action = 'store_true', default = False, help = 'Execute unit tests') opt.add_option('--checkall', action = 'store_true', default = False, help = 'Execute all unit tests') opt.add_option('--checkone', action = 'store', default = False, help = 'Execute specified unit test') opt.add_option('--checkfilter', action = 'store', default = False, help = 'Execute unit tests sprcified by pattern') def match_filter(filt, targ): if isinstance(filt, str): (pat, _, _) = filt.partition('.') if pat == '*': return True return pat == targ return False @feature('testt', 'gtest') @before('process_rule') def test_remover(self): if not Options.options.check and not Options.options.checkall and self.target != Options.options.checkone and not match_filter(Options.options.checkfilter, self.target): self.meths[:] = [] @feature('gtest') @before('process_source') def gtest_attach(self): if not hasattr(self.bld, 'def_gtest_objects'): self.bld.objects( source = [UNPACK_DIR + '/' + GTEST_DIR + '/gtest/gtest-all.cc', UNPACK_DIR + '/' + GTEST_DIR + '/gtest/gtest_main.cc'], target = 'GTEST_OBJECTS' ) self.bld.def_gtest_objects = True DIR = self.env.UNITTEST_GTEST_PATH + '/' + GTEST_DIR self.includes = self.to_list(getattr(self, 'includes', [])) + [DIR] self.use = self.to_list(getattr(self, 'use', [])) + ['GTEST_PTHREAD', 'GTEST_OBJECTS'] @feature('testt', 'gtest') @after('apply_link') def make_test(self): if not 'cprogram' in self.features and not 'cxxprogram' in self.features: Logs.error('test cannot be executed %s'%self) return self.default_install_path = None self.create_task('utest', self.link_task.outputs) import threading testlock = threading.Lock() class utest(Task.Task): """ Execute a unit test """ color = 'PINK' after = ['vnum','inst'] ext_in = ['.bin'] vars = [] def runnable_status(self): stat = super(utest, self).runnable_status() if stat != Task.SKIP_ME: return stat if Options.options.checkall: return Task.RUN_ME if Options.options.checkone == self.generator.name: return Task.RUN_ME if isinstance(Options.options.checkfilter, str): if match_filter(Options.options.checkfilter, self.generator.name): return Task.RUN_ME return stat def run(self): """ Execute the test. The execution is always successful, but the results are stored on ``self.generator.bld.utest_results`` for postprocessing. """ status = 0 filename = self.inputs[0].abspath() self.ut_exec = getattr(self, 'ut_exec', [filename]) if getattr(self.generator, 'ut_fun', None): self.generator.ut_fun(self) try: fu = getattr(self.generator.bld, 'all_test_paths') except AttributeError: fu = os.environ.copy() lst = [] for g in self.generator.bld.groups: for tg in g: if getattr(tg, 'link_task', None): lst.append(tg.link_task.outputs[0].parent.abspath()) def add_path(dct, path, var): dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')]) if sys.platform == 'win32': add_path(fu, lst, 'PATH') elif sys.platform == 'darwin': add_path(fu, lst, 'DYLD_LIBRARY_PATH') add_path(fu, lst, 'LD_LIBRARY_PATH') else: add_path(fu, lst, 'LD_LIBRARY_PATH') self.generator.bld.all_test_paths = fu if isinstance(Options.options.checkfilter, str): (_, _, filt) = Options.options.checkfilter.partition('.') if filt != "": self.ut_exec += ['--gtest_filter=' + filt] cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath() proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE) (stdout, stderr) = proc.communicate() tup = (filename, proc.returncode, stdout, stderr) self.generator.utest_result = tup testlock.acquire() try: bld = self.generator.bld Logs.debug("ut: %r", tup) try: bld.utest_results.append(tup) except AttributeError: bld.utest_results = [tup] a = getattr(self.generator.bld, 'added_post_fun', False) if not a: self.generator.bld.add_post_fun(summary) self.generator.bld.added_post_fun = True finally: testlock.release() def summary(bld): lst = getattr(bld, 'utest_results', []) if not lst: return total = len(lst) fail = len([x for x in lst if x[1]]) Logs.pprint('CYAN', 'test summary') Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-fail, total)) for (f, code, out, err) in lst: if not code: Logs.pprint('GREEN', ' %s' % f) if isinstance(Options.options.checkfilter, str): print(out) if fail>0: Logs.pprint('RED', ' tests that fail %d/%d' % (fail, total)) for (f, code, out, err) in lst: if code: Logs.pprint('RED', ' %s' % f) print(out.decode('utf-8')) if err: print(err.decode('utf-8')) raise Errors.WafError('test failed') #==> #BZh91AY&SYîì;Ò®¤ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿb`|¾Qž¥EöVñíé½îO|îíînÙzîžÉ´d0䯛kõÛ\Û¯=ÝkxØwh*ÉÕP* 8»gšAò dÑ*‰GÀùô+UlUß6ï.Üà#XXXåÍ>º<d}w̑•;¥¬îõíš>ï€hÔ>¯¾|wWÝí·nl÷´/{w«¶èhlK"Ù*Õ¶#XXXìfî¶iÙۏ¦ó[fÚûï¯Qê>õ¨û`òR’W¾`r;Ì !=ºåï=ì()(H«}sï4.ځªÐÖÌÔ[PKTšjÖ[#YYY#XXXÍ´d+gµÜÖ¨Ó@#XXXú`"ìˆh€YU¶Qi‚õè÷ªéÝx=Jˆ {Ïo½ï!ÙPU\;ާyµ`†öäCÊ^à¶ú^`%èî`4S£èû±æûš4¾Ü<O¯•S¢ W¶5PN€æÖƒ®ï[qÝÇÑò½÷sAÝ»ˆ‚vQ³£Õp°äÍÕv#YYYDŠ"¤’Ù··z»¾Á¼ðs¾]sݶiKŒSÄô­ÐГÏ{‡r°ŠŠÆӜ Ýn={`^aÚÔvîºáˆîç {¹ÓMß/¾ûrå7Û>´ïn}}sÛ7·wejí¼ïpõÀ+‡vwhÏ+kÚxB$ gË0öj>Á€šPgÐϽZç³t¹‡C"²ú +·¯ywۀQO¯§¢[6ÜïlÐÊP€{2ˆ;XlØ*èVèœiÞ͵z÷±ˆë­W6C±‡6:΂JØ4$u奴ÛZSLœ^¡ìk‹'¬/OC{ì>AÞÁÒ€^€­±‚hï¼ÂžeºVì=Ã2»ºö÷o§W,ޝ½Ëæ}4ï&=·¬æèDÅÚº7ØèªìizQ¹Ø|î¨í÷7>#!ÞîªBõ­¬s:MØuÑï¾;í‡Ý’‰ŠåuÊÙ¦ØÛš¨©¶9³ns&!£ßoª÷¬÷ÑïX͞@4è—`! .Ãè{ÐC콦®:Þöšóà2vÔII÷}Z}°Ó‘zÚÓÓ+n¡w`zEɈôSÅßYŸÞúplÙËs #YYYšÙV6s…(ú v¥íÈ:›z1èkØ*¤ƒ0œÇl¥\f†#»:+KÛàt¶>‡O¡4d:ÁU#XXX5#YYYP»:¶öí'·ëO^O­>¢éŸx7ZØÏAÀIU4Q®ˆP#YYY–VØi­Ð#YYY˜:Ī•öô—™ÉœÊu«Zƒ¢ ¾m'‘æ0¢©Q¥·ÑéìÕfÓÑ@h:¢š¤ûÎS¬Q·Ó äÒ¾(í€ÀÒbі(R¨èÕkSA¡Ý€TÕjØwlø˜@ª *R§³$)(Aë>̆Ûîï6¬* ì÷c±ˆ¡]Á·D‡NÛA.vtè굪|£/`4R‘*özâŠ(BJzûž,kÖ¯{¹@»=šAeFlօ<›êéKì0#XXX{°^˸hV¨íœ&ŽìdË ètîÝQ³4ÊÌh:®×]ҍ'hŽs(÷ÛÍIIê"á)¤Ð!¡0ƒS€™ <¦Lƒj˜CM=F˜êS@A$È4ÒmQ§‘¥=Oԍ=CÔÈÈhi 4h#YYYH$$ šBdÅ1 '¤ÕOô™€‚žQíShƒõPõ6£M42“Õ%"$§¦‘¦Œ§¤õ6“DÈ<©šõF&#hL ä i„¤H M&ÐMO&†…<4&Mª~F §ê™£&¦š) 4 ˆ1Fšb&Á#ÉáFFšd@Ѓÿ ÿýÿiÿÕòoºS#YYYJ°ÌKìü?»åîf`ªIŠÌvf`ÐH‹ ?…ïè¿áãrw×5B¤¦•‘/éÊLĐ¶Â¿ñ2$b«þïú}ÿºsþOù?ÇÙ·ý»sÎgÿNŽÿéõú<ޟG³µóA 0D‘Q"T+ìë·ÿgDL¤î¸3úÿ^®ìiüÞ«š¿ȅkñ˜±øðˆ¢-VrŸFfÞheúõ?E\~§WÚ¹±~~w€ß¦\ìÁº¹K À½ÕÄ éÑdþsHH“Tú»sH•¿ùg„Š:ÊÈÑÞf 4‰J|ŸÝ›bš„›ž8k®“wuÐëÏɼ¼‘žØ¢Š(¢Š(£ÀIUА9† ñ”\`ª¦¨‚@¤¦ ð8ìÎ]q8?÷ÞóÙÀgãþýϪúGçCþç»~úuÏ·Ì·1ˆN„m<ià‡ÿޘ¨^5¦Ku}šuó–绝Ç]î¹w¶±r‹ÿ¢DFiç….2Lx?/òi“Ž‹— º9MgDÒÿ†ëþíjž!R‡û·u×ZÒk·JýßþôÝî¦Ì ŸÜƒß‚©d=Ðtè‚ÿ挈F¤1îF÷å[Ü;wԍ“]+©ª¸¥gË9‚«³§)8EdyR¥=D~ê*×1ˏÿ±ŒüH¿ž’×Ð*ºÔFû¼ Bo奼VÉ7¬ ]ük†Øs»,”Nó„n²ŚƒÞ¼|×%]u„}žôT;¾¿#YYYA|t¾ýïgü_±Ä¹ZB¼™9~*¦¿”_‰t|ËÄ~?”nÇ™™ú)ËFÝ&öïZy¤%Ãâ%cŽ{·t0PFäÒ/TíNNC°G +IeIy(¥o(OšçŽõá«dc_eÂÆqéíã¾C§¸âìçÛpZÓ+ ¢e7×ãE ¸wü×–J* ªžyáìSzå}öQ¸óæm¯Ñÿ•þ›Ñðû½oBLêHÂϛ!õdû=·Nwè¿¢Æwå£OKÝê•x¬øx§>6¼`Ü8c» d„„„„„„„„˜´T!Ûî÷{½Þïw» X@$¯11ûnsœç9Î5ÝÇwVfUn®3TêL‚²#XXXó|\÷´ûã¢ïL”w}/ˌ'ό3 ùÍ»?æBfÁÿƒMaH«§¤!ÉÿÛh4~Ÿcí¤]N9©€›äûé$¥”áIBQÊ›ýÚü¤Á†)ÿŽ ÷_"5ìQ1Çãrÿõ8Ӛ`ëZ+d]ÿÚ×ïØÞ¯×ëõúý~¼9¶ôgŸg п]_Ìð(ÀdL4T²~gr@à š ɸºà‘i‰ &q+D.GœWšBˆÆÜäÓYÌɈ‰šÜÓûÿ2 ƒÚì÷–„[Ës‰’$bf rLšxÖdXÚ§”6ÎèY²Ù§ږöH¢H‡%lm×¶w’^éï^ç…ùîýOíÛéoÓ»½ð4¦ÝC‹ˆ-s‘iäZiwddV{3zÊ;e#XXXÎQoë;a(4b‰¶ÆÆnGX#XXXF„ƶÕ\35oÿ²˜×4ÑmC°ŠybBˆ Eämæ)ÕI#YYYD…<=ÚS{Úó$w@” áòJàönÄ+t¨ ¾ øÜì4ÔÓ˜!«§Ù»VcÇÂpŽš?/󿊞Ì>Z/žm?|ó(È÷ç¬5ð¦×3ÃÕÌuÇϾó\QU¾<,ƃæˆ1ŒƒmŒ¶V$)ç®õ±NÄãQ¾HmMhL,;€¥¡¤$É 'HRSoô†’zT‚¿ú­I½÷™ ŚL-í¼ ©]ùË¢·ÃtÍúvíÚ$ ¡öѸÈÂ7,3 €Ãá¦øRйêV‘¾]ƒp£Ë~M›™ !!,°Ã$A3Âç ”)£8ó÷‚‹ÏݕS&™ã¨wpÓÿöƒÒ¤XÀ§ŸžÍäÙ¤“1s0ù3 ]kõ‘ˆ ÉÖýhyPÒBYa†I ¦˜Í Iõ©âõAö§¢"¯F6‚žz½HNcF¬gË´/!Ï®¯HhŽ!¶†„åÐÖ¥†þï. ÝçÎôýñ?1óî®Ñû|é>œ-d9#T<è>ß»Ø{U˜ôæãYïͶjç B&{Ï7«Ü}#YYY1\«_½*JYÝ㹢ΈÇCF¿#XXXÇ_(µ/C¿g—ÚWémñ0ǰaùµ5êMýP#YYY4ià&>Á:™&ùóÙòØÒ%Öx;Î;nÂsV³iF@áö)mv¢b †–;å¬Jeºì¬kál#±ïãS,ö©"]“ ¹?ùw…ß\Ð†Ä ‡I­¢6â/ìùzë\õ<ú¨Æ7~]0l»N1%ZÆãÝí"?·º©L̖”"¥Úÿˆëtü¾|8eh¹òƦòá%읒§º·¥0šD¥Ü„ðEÅ$’’#“É4&žC‘wLˆƒº Û§hcY}Ö .8¿¿rø‘tj[u—ßïó'Ï>Š3ꕯÇòÎ$ü«?ûúNçì#XXX@Š=7q*Þ/Š.êɯ‹Ó’„IºPyÀ”]u÷¼ÊJ炉vdäü¶çL‘Uõ—Öî{Þ8××eåúªÃÿÄ­ ]´V]pв\ûð*ûm—¶æžYPO¤ÕŽ«¸È†RÛ¨†ßlöú{Ñ0ߖWÛ¨x¤Üê"ç/ J§:÷ÞEz«5šö«­¢ÈF&-º_(\ý¿ü A]œ¤qƒ—)}wôÒÜvۍ²7¸¨eçé|³w¶BÖè˜ðÅé*ݾ”Ôõz<1VxÇ·œ¯E9UUJck.·gûzœÎ޲Âй´vQc iÆqËþ<g¼Óïw·£r=HfçÛÓ;sœQ1–é‚kyú&ÄÓÿ#Î~ȅ¶ƒ¦6Ù§¥#qãóƒê ÍiŸ—ûÜ:Í¢2c7â#YYYþl6hOMÝÓeÉvÚ`ª4½Ìn°1¶ië&X˜Å xÂc9#dƒµÊ€]\ôbŒcOcÕúYOi;ZwÃ1g5W¿#ý9{†ÐWJõü3Mùî В’ `‘m¤îV˜T#XXX£+.D<Õ5#XXXk){ZëBÓ&¦õ.¢5žšÌDzâ£]…†3VN,°ÆßîwF‰R7¾tȖÍwå0cU¤¼™ÒÜq(5#ÜW.iŒgmI Õ£le§#XXX¢EîwÍìÖ»óqô^É ]sf³—_ÇË{0à¨ÒÊu·³:aÎGP+˜e°®7ðp¼§Œ.+©Ðì5KA¸Pga£fM¦#v"ëµÆ´úHkðÙÎFäGecoU›)†¤f5}Hîå<ºlWFŽBhIMö!ÂVĈ‰#YYYRé>!ÀFêE¢3ku8î7â«o (ªª‘¡C» 6X öæŽ6VœôÖ|F{=½ ðƒá £=§Xm[­VØç7—ÓÞí~Ó½«ç×Î÷ϗ|P_F±ý:°8·—TԍJFAF358J!Ë3D±±{ÈÈ¡ ‰Z+‰“p:hR'¥±$“îã‹í¸(^é¢8£¯¿ƒƒŽ)MH/a)©)T-ç`RœH¨“ Áàe³Ýùé΃t?‡ž5€©³–d‘ÿ‘Ô²›õ¡Ä³¸ÜDuùöq¿Ž‰>œn¦»žº¬¦R¦=¯'Ðö¾£ºèù¼eÏt¬­7þhB/ùÈE7mÝ ÷n™0vvm#YYYÿ}ç4G¶›ß·¬¹7yIŸ¯™Kv¾”$K×Ý0©R¸˜û–õ¢©ù”åÂWh°›¦GRs€Õ]'ñPSÁ!}…™òõ#ºÙj]GðOÒò±·Ýîš~NM¤ytrµ†%–.$ee¿¢Ýç»oeûoÕ~[ù¯æí;ŽÔÇ8Ö¢aöY{™ÃÚ Æ;¶& Š‚d„Xݶ C—[²EԜ ·\h”û×邸F2±ÔÒ~+ˆi¬ÍZs‰…ÌܚÖþnܳ¥Þx8z@ÛÁÿ“’äQØÝdªµ+‘~]¼ˆ4ŒCIÞÉgҘk+ž;¡¨¾:VýŽhÆ¢¿£Ý±±«Íåy/Ë]¦÷](…ü W?_væi £µ¼ßÚÙ¢üËS¨„üR¦ðò_§Óµýt¨¿É@Yíôo&í#jM&`ÈĄ _~¢QNCYæµGüKüËÒf¤9¾„„˜Ö?G?4I*X-°%åȚb¬ó.$7j^ZÅ8œ#ˆå ¸l ħ0 %%Kû=qk§¶Ú821sù!º ò[Þ¦dtÁHeû¦%b„"t.`Ý ÷(x»¡5å̓9a#åŸ×å8Ûêîo#YYY—Oœ•ѧVÉ1´üˆÙ’ºF _€ïïþû?ö_‡¥öG~èÿû×òm4mkèvÑퟞœ‹p®•8ªÆDðÁrKÃ÷z"×2ýԅãVïÃëråÆNjö;:N—Qö¸ÕX=ÿq ®~S+âºPٖ‰Ìýr"¿FwÄ{ðç®äîé^1£HtEÂöÌý¡cú1T½Ø°­=­ºÌà9‘ù'e²ž;míåÁa„UU٤ƨß˳ñåϟ:ߏî.ïĐkëd,° š‚ §ƒ¶S” ÷Á©þ³î\ß܊8¹Ùé,á”/´sú\MÄã[ãŽ»¿UmÎoS7Ã<®kësÂGóÏ?f1Så¶Æïš|´"CRCܳvý¿šï<pÍùäÁƒ!bÜÁ%ùÔ똸AøàsŠ\Öÿ*FúRÖ­>°åwo]‰Öäþ領Ž#YYY†"èîÓ· +§É#YYYN§#YYY;?YKå…2üà4ÁÓ̐ 2lSÁpçη°p;¤ã‰&ÅÚ#XXXT¨f<»­ԏ„ßÞª¨’0?§•¡ݖƒ_Ö¡¢w,¢y·=¿ 7¾Øøa„-áÀbhÏ.ãXéSF‰j]ý I> Næêb!_‰¸ø±‡åÓLÂBތZïCº:Þ òd¡‚Þ:R5l8 #vG""=\m¡sÖk Ùñ]SXN܇·¤ÐÄÆ{Ú'èŠ5ܘ&ԝ€^_˜hr-Túñ~¾k€:1Àl_—™éŸ#YYYÌvpûÚK¨úï…ü’ƒ‡Ìù“ÑEÍKYHnB,Ž}~ 3€Ö°]v·„6Æ, .;»ðÈ#YYY+÷5u£Ì•:ˆÒmÐìéø;²FŽé0— ¾-ú/ׂ¬«z€éÄFvh9Û¦¶e|Ç;hëxÀ@AQû¤Ôúgoæ3"[â3&¸°óû1_úuËZ„¨óçî¡©÷8A4Ï­î™ ‘I$Ž[¶CU%N0‚L„Ov°Ø¥Dֆ]™9â?Ãïxœcçêç==M<n§xuï¿´ÌDC¼u 9ÿ›ã¯Z}÷£žü[[åF•Ó*½Æ9þâܞi0~# 0„??ÒF&? añÇMyü!£Â#XXXÌüߺ˜þ×½òÿQ´RûžˆÊùê#'™®G*°5Žò1ˆëHs…T¨H¬}ï(UQu" €jÕeaçñNmaû"‘÷N½ým崈®¸ÖµÝj7ÔõV§ÓuX’$Ë¹Úè”øÖ¿và ¥Àº/À»áyLÔàp‹Ao¿?™Gfsñ7MâçZ!GJ3êxJEC.3›Ä„—vÊ«Œç7RhWØ÷ç³Õ .S:ÏeùʗÄEÝ:cº£“¦1å†Â®‹®K?éÒ×ÏÚ®5žB,0ÑÎçÅÙHs›?¦54ˆÍ £#ªÏ< Ÿ!Þ1¤6)ál¡*;xÛí¾ZñR.s·S)ÖJ?ðUö»÷Qeûgª©—ßU/׎>“É‹MÄ?4(ž¿ôÉJ¤wŽHÊÍÛ:HÛÝRً=k¢œ5t«Ÿ ÊØt$æò+Bó#YYYêµ¢E§ïû9‹×¼–Ê¾DvšŸ_Ž)eç8O.\OÖs®ñÉáÍÄâS“»'ÙâBèˆê·º‡Çf'À#YYYqEC´F/QÜÛ|Î›÷qó-ÇOíêø‡n'ŽeàzŸ7Ê÷ЍÁ3J’%ÆC_û?/Pÿ|¼N¼+ë–ÝÇ¢kG¡t#YYY‘ªÇ”Ëß"éøª¨{zÍW¥}û³5—‹øÿ·|›MÇçB߉¹yÏÍb~¬È((‚¨$¤é^å­ç~ðÑ=KLi/îtۛOpܕï‹ x·DÓg†A§Œõ9(Þ"®²”u¡xòùoLHéòÎ$8ÆåŠ!ޏ«c’“1ʔ̽օZ5ó M N±“g=ùö‘ŒT"qã¸T¼IpõÝUŠïók(6¼öfú5s²J¦TÁð^ö6Ȧ¨þ[˜¡lÖ «ŠfR‚… A³2P[m÷„@¤‹àcºBBlшË@´Œ^ŒÊEÌ¼Æ1ŒcÆ1ŒcÆ1ŒcÆ1ŒcÆ1ŒbŠ(¢F{®/Ný¾ùïã@Ìîè8ÀóÖ³3"2°0ԙSn°Â<^ÿ×zØà9ädWхjž‰y´•Ì•žŒ<·ßdŽåÜ’É¥(¢’“­°Ö*#YYYtú~W#XXX%¿Tí8žsfL4ì¦ϬÔîWíß-¦DTƒ?5Ƥ$h@)@h"îŒ ”3Üøž…?¬%ˆ¢£ï1$ëQõäÊY§øÿÁöý¿Q.ÌT¤üzj,Á3#XXXüÉ*€S W L€þ¿·ùÔ9‘Ë&b¬”¡¬xŒR4Æ#XXXŸ³Ýáå8n‚´¾’AëvŽAä%?_¯ÿÇþ{yó1L€°ÑßûìñÙñÈ÷À¨t„#XXXh¤)PJ)>¨°‚jB€(çã£L‡ç¸Ò û¸õiþÔ ¡Ä#XXX‹Î@úºz¡ÓĽ‘Õ?.#XXX®B‹¸à„³Ç‡KÂBÿ,®³Ý@ÓK·´Çú<@u®4ªle Aƒ7f°RF†ž­ Ó#ƒ¡%¥Æ‰«%Ç0ÈV’a”¤h’%¤û±D>yΦ}FÁԃÁ"HÇñGwfgâÕxqœ)ÁÜG„/|‡d¥Dù£D§ãBrÆPˆ‹ Ãù¸ù¿£ó}¿·óþŸ¯»Ù ^å#XXX‘O<@®`ŸŒ$$?·¿¿ààÏ߉ïøpÂJCõIÙª>Õø¬ø`7§5+HýÒ|R›”àãÑ|°kxœ OfT¼ÊS¬ J­8Ф‰ yH|ðtº’óåóP‹Y’e\˜Ö•È9FK®A×®"„rlj]Êäã4'ê;óÜ  “!9B©J) c‰Œ£…“çòà&åíNP#¡Båä •zfvf¡h1T•0…*!"^ ·1Q(N'qJ²-II?LúÊ+øÍÀ@Õü'ÐVÄÚVÉ÷æ–jï;i©M©êÊÁv5ÚvÁ‰KG~hB $„˜(—d&IŒ‰HÌ÷¥È))4bAA†cù`]DΊr…×M»®¸WRš™WKn»«éÓÛIEEÝåÚÕ{DàŽ@PÀš„<Ѹ)¶ùq×m4ŠéréiØÚnÔ;m(ŠY2 ”ÈÆ%'v«V×#YYYk›øk˜–‹.ÉMź–á»\BµÊÝZùÓ·|í\´k¢4PœL JV Ôæ`cď=8ͨ1’€2 "‘¥²4÷kZ¸m°FŽæ (BikPâ€Dˆ©37¢PLb„)BdZT&H(9OfAÉ#XXXQ$`B…fF ÇÃËÔ¶¨ÇŸ‹ÃÍÁÉÊÿŠ;U'$ߦÉhEoõçÍ7kôÐwgLÿޚAHoã‘]—¾ü?OèÙ'ô\Ïô]f­Y‹©ÖW—{ò‘ûãký°{Eb±fn“~•0fmuAœñ$tZ`Ù] Ô&‰/Àµ†ÎM)Àˆ5va §g.qÊK;ªÞ´GmóÔQ[v°ª¾šÉ¤g ´™QØok††fënÖ}ñÆqÏ.Úñu:͚ƒ(¨?½ÖV–üýsÑ©Èû»¶cØÔù.¹Êw³´v!_³sÚ¥AÈ7ü‘”Ú[ãāHª ¹‘ùp(ßZˆ!HQœ¯V®$Mµ§v»5ÌÅ»,Ë|ϗòýI¨ÜjÃ0ý~ß¾‘?ô’jÈo¨ûyþâ-ö&û–ç¿íŸKã×1š¼EÌQæÃ;’ïwQgÐD°¤’½û¢GÒó=}”+&Z±é¼ÓXÇÉאàtû[‹ëÓþȨñ{Žîâ£~5bººË씦HwgŸÖF\dl©•¶•íùݛ_Mû錶š¬œë $M73 ™Gu˔å L‡ý÷sQ›ëý\2a´8O–l1GF)Ûë¨#YYYé«"ß:Eäòâ~›S#)Áœ…Òo¥37ʺà[é»5ä·Â#ùSãu(s.7÷³õC…Ë$Þ#E;Ø©µ·Cwñò¾%‡Tæ± ëz„Rù©¬v,ϚÕ5×á,e{l—ˆm‹V9sÊES"oõ6G(æ"qÍcr¤>.^#XXX·C«ð|\±î' äݰǿþ'açü>lYì£b.ôÝ/q¾ÈJ½p“p¿g•Óq5HÝ4oα—]Ö½—¶÷k˜ß»«dˤW>#XXX¬ÀtùÝ»”.å‹%~\÷Åð«D'²vǎúœ/˜ÑŒÁƒ•ù6Ô·ßf 看BªÊƒ»RWµvÝ>‹csa`Ø'nÈKkºéÝï‹.|.ï08Üáûã-m$[ðR"o’ú@ݔvóÍ1µžô?Á@ ·@ˆ’7.Uq€_%ù×(:/qÒbô[æ0 cááPðMÉc#XXXý{qß]Uͨ˜د ‘ù–jíU ]M¤¢ˆF‹è&ø·aÕǕ¹väg61è\Feü>3ü½ýë½oÉ8~ BQ Æ‚_‰äf<S ޖÌðíóh?À=~Þçùyöéî;v V÷[7²)ßðøÝsˆ;Pé•]®QGo‘kËêcañŽˆ/O~2#Ã]¡ö;Ãևöp%7"¸þÞpœ—½ú½¿‡áJ÷Ó;=(•Ö¤%æ9ø|—Ü7Á»:ÑÛ9Ô¯P t{ÊÂ=jM`£SÑ*ìݳa½>^ϲ^Œ½†;.ÑãWv3#èD¹ªW<‹¿ÅÑ˜ >í\ftÁ½ý6¿'2¼Þ›Ó÷DJõŽPäúÎ#XXX§™›³Å¬`\ù{ÁÌ>hšÀgžìbæõ\ò­XQ†M‰FÒÍA?¾”:õ¡Sc޾ Þ :?¹ë”Fô¡¦›Z;ði)r‡Ú†n~‡Ýʱ’´ð~ôÑFBÓ¸ðy»çö™:äÀ̈́›Â(<Ê«JûÜbïÒ°ßEN¿¢XµÄ0»²¦}m#Î7$’BH¤ŠH¤ŠC#XXX{—;ïMváVƒ³d`í«Ê,Hݝ½Ò%’Ç~ý¸à-BL܇5ÝDzãØó˜òÚù–¹kÅú²sä]öÖx¿’å±¢ßwYöyn×Çi"ʵ₩P¶˜A¸?Á¿ûù܊fÂкzÞOÛIÂJË æ2z&¢ŸöÈ萿Wïÿm\Ó0iQ˪ó²gÙމ‹n*R¬($˜dDn{LÍ$ªñ±›ýwë­GÅTLý,f`oI†Äà ¢$³_’O*"´ BüS³‹]EÓÔ^‰™€f”ސ##¢`‘ë´ÏÞÃ\!‡òs-»ÆeÆ9:3SOºí"‚înÄ2˜,”qx¢#XXXŠI ‰+b7¢ïé<Ö÷T¸\û»_îŒc™qæfffI?áû?'åÿUǙ™™™3&fIÛº#YYY÷ƒi´7A žÃ1˃ÂPBƒ‹Œ_‚qvWª›§ÛKÑWÒzú|¾_,Ì̸ó333$Ÿ7Ãåòù\y™™™“2fEÞí8bû¶rާ>H™Ô éÞ(Ýw‚n~DëVfAQàIÝ¥„-IM4ݦíÜ8oálºûف¸Gl6níÚݐéæƒ Ø~~7]«©xJúbÝ0ñ’½»ÛGƒn|ó\I õãô™ûµõ÷ákփh°ÃŒÃšàD¥æóÖÎ@[Ǔßf¾Ÿ>ÅßӒêùÌãÐqçêÂhGòHþX5S0̬a}ÿ‹^>Ÿ~Ÿ‹ô¿Ò.ÏËö úEþX¼^{Å÷‹qh[ÿqÇÿLü·áùõý‚û€iÝ»…k/SB­3rtvéÌ:Šv°óUM¨)ë˜UHÔô‡9ròs6¼Ûç¡æ7çÏßO7®ðÃLØó__H‘úO΢4?0Û§#XXXBC‰3~)å–̾ƒí›á¹pnÖ³ Ž‘£—þº×òS³oÈ×ُ@†j´óªÇ~Í£ qúµ¿4@êHæf½? û?•x¾ p#YYY˜ 5›f$¨¸ÜBÒâ#YYYÊPßJÕm¨6͗b}\ñŸÃ¡;É4U)Rx ÈMa2Qrå{E邞#YYY›ÃÛ²P낗âl/‚AúsÁCl~R1…Ø;r}•ugzT„îq™÷k#YYYôs_õ0ÅÛhÎÛ¥»÷êDíÃsE¤õâ_ÿ ;o<ãOÁÄOð~~Ø_œ/¶ïñó}SëСïcõ>/‡5?+-•–ÊËee²²ÙYl¬¶V[/ã†1±±±±±±±÷>˜úJÿ<J+üÔKÃ(“F !©6ÊVD#XXXfîÒêm§>öÁRûg#XXXêä¢P\O¬O~q!֏EìÃvÞÇC¹¤é™¥`ýƒ«aø£“I͈³l•Іçq7‚à#÷€Ûg«×é8ø{eååâÀß uAÁ!hÀ ߇TIöSsC§\Á¹Ç~}òx£„3m#XXXYá4ÄmÑÈ ±9õϔl×@‡fÑ/a#YYY dÇv;"#*QÛ Dý 7”#XXX~ÝK¨ÀÞZ_ƒ¼¡HZµzÄN¥×-Ñf„hö”µŽßª­ÃXFëKČc™:~EŸh 9ôjÌ$hì*ž©Û©NQOЪ!ys”µvB…Õ @Œ©Œd¦Pœ õÜMŠóöïŽYStô‹:ï½›<@o“Éw]3Ô'ÿ¢†-ÝqäÛÕf):4þnNö$‚¹.䈑Ɖñ™=\f.¿mâï¢xþ­Kn÷ˆü5m œF¬ÑÚ~Xáí¼Âtø™~î?UÙßڞø?…Tf„ed¾üDåÚùµ™¯ç'ju{])ÛìÒm%úÏvßfÊýñóâIÀýñ¿Ôgr’q‡1ª=õÖ‡[ÒÌlÌŘٙ‹1³3cff,ÆÌÌY™˜³31f6fbÌ ˆq«‘ă‡ËßÉ5%¢ªªþ_WøC0ëÜDò·3³Q{Åùä§ìIù;›³»ƒ‘¸&†ÐïZ¿éz‰Žz[á%ókö;D¶ÏŸçþ9&b6Ãæu~ñôÇ®Ô)Km!#ÛjÚª-H•°ajǁˆËB¥j²Òª­ÝÆ1)j¡R¶×e®°¹–ÖÏHÖeÈäm±Â8G$„’Fär8HäŽI$„ÂFärIhÙ#ƒ“Îä#Ý_ȏÑ$ÈñôIcèéd˜Ôd¯$m¸I 1G™qcLi€ò$"Œ±0ˆˆˆˆ›÷¿¹·Ï¯Ç¹÷i$2ºÚl»ãÔ.a–¦N«´r(,ýŠTzùþ¬4C/Ë0TÌ50rÁ‘!zP(#YYY€šU*”JHT"Љ¸ ¢**…I@€THPUAU*£p¢qÒO7¯«I1æ$+2¤ý5¯±ªlKjž”¥Y?+šÄN%³\IQgvÆ®ŒœY殆"›#YYYGÅðÏÍáԚVq~Æý+c#YYYû€×Î×l½Þ{vÃ.—ÛB_ èýîß^[׃{šžÛùž¼¶¼•I?šî(ó<ïÁ“ÛLæŸ}É®&­9Ŝkž®t¶è:˜âJÂîùÁéjÔI§G¤°»³'ôÄ÷€Ý;Àkmã]t{¨S8e¸Š|ãÂ,óaŸс½cE™°ÅÝ#YYYÍÝÝ÷ŽÃ²Ý#YYY~¬0؊; ©ÛŽÓ|€n_˜aßó‹ù…§Ãøþî°?àa|Å·÷ð³#YYY€0SÞ̅”¾Þóœ†äCÜÙ¨ “äv=ŸM·äQiÈã$ $å˜Adc{låôgÑ£~ü|»:Õ¢ll‰j¢ƒ-rw¯AÍٚž^~B>UÛjmÜLÆò2 ÙÙöŒðG¤‘1s?taõ­Š¤mR9Ëba²Ó Èw’#YYYY—|=´%ÖÓÒôM08Î{v N¯cUÕ¢ÀóQ8ÔêLÌnÏ ;Ô×A_΀HpLÄÓ)áÁï®êD¢mç·ôÒY®Àðº€k5o{¿¨#YYYÛ#|›×œ_(­ˆöð‹QÌL½ù€Ô%·s÷&®´ß#çž2"¦ÃoSÖ^Èö!¹²4O»¬î¼]ž‘[)Iåôû ¬iBPƒß·H÷k¿Œö}‘¥¡ûøåŒÖ§¶Œk1Ùí>Z¿/õÞîJAÚó¶ÿ®M'Œõ‹ÖØ|Q£TDæ{nÏH#YYYÛhH>Ðþ4/f„/ÃÒ£h÷ÏMœd&>¿Á“CîK´•ZlRÎ I›ÎS:ËW´pa› ’EV4‘ªI”ža*±æ³BÃ¥,5ÄÑ·wær)¿›{¹õ³ÐZ©YEiTøü¼ÞG<J+%αËG—ð€ýÂú…¡~Ñh\ quº^߬^ûEÈ¿l- î}Ó§”ÇÏÓíòÏ÷y”}:‡# Äô¨ȟ¯;;rÃ(Ä&¢ÚŒ÷%\Ài•;_—c›Êå¶/Sˆ€ õ€ùðŸáñóv¶w‡jUU_Gnl2–”°Œ$±,« …‘…‹ȾÆþÆ|lžÅ*]S¾™ˆï4fD!¡y+¹3lu¦WLƒä¡•$½ùuçÐß®.2Y•+ïèý‰Ø˜hù&…þCPÙ1PÄÖnø±Ñ߃ŀ °ÎºÕ¡*Nk*Bq0¹š¬ÁCAç#’: us³¦||¾ïÐ&­×ÑÊ·â)žeã¥âÒö÷û+ƒ"üZ.;ýô¾o©´g¹2ݼ)¾rÍóDŒbó2Çõæc2M³•úà&©}^Q1lT™ªìðaw+BkŸiJ)NÉAÉÌ*„SFŠ¥DÏ MšÿtjÅõ9¾ûûÀjö€ßÿį¯˜ïáÑ×{·)¾(£!-f:ÊrÞ|à3ÃYNù€Ú]ºâ„±áÊ=1å.YUåÈI\u!Ã\%¤™¨& ÔW¼a÷¼h¿håÇX鎹¢.ƒÄìN l{gi5È.:oT$©»â•wmb4é‹Á¢+“;d΋‰Y²'ƒÐd«ÓIºPżnѤY¦¯§^Q<jêO”‡”©E#YYY½öeÇÞ;Ð.¤û£FMœµw;+•„¡¾}TöaÂûqAÕ5Ú€ &d̒²ÃM#XXXPÈÐ?¾Jù¦@ÝÊ=|»´ó‚!ˆO~Šæä‚º\'\'Ñ»b[ÇâCsˆ,IYåm4 ›UѦZ¦µ › ‘6”d!²¨Ó5­mQµ#EÉF˜Õ­iCe(Ú#*Se#LjÖµ!´ŒHÚ¡•CdF˜Õ­jCb6Õ#YYYŠ#XXXcLhÖµ*چÔT[$4Ʀµ¨!´#YYY$(4Ɩµ¨¶ƒj«H#YYY4Ɩµ 6Ò­%VÊ«Li}Ú¶eÆ´éÜoÍT샋Kš©ˆ“B–³E[UhUµ+dU¦˜ÒÖ´¥²­¢´’¶R´Æ–µª–Ò¶©i#XXXÙ¦4µ­)lVÒZJ[*ZcKZÐ[RڋH–É-1¥­jÉfˆ±-¡eRi¦4µ­Ch²–ÕLª!iZւØ[Ji ²©¦5kZ”Ú¦µ&¶JiZÖ¢l¦Rm 6RiY-0Ô3Xâ¸Ý)ÂW§#YYYˆ–BZÍTm&Õ&Á1E1©­iFÄÚFª#YYY•cKZÔQµ#YYY"6HÓZ֐ÙA¤†ÊcKZÔ¶†ÕZ@Ù˜ÒÖ´#YYYƒ]"èEpÂ0s1JéGd­1¥­j-•´ZIl¥¦4µ­E´¶©¤-‘i-kU6-¤ÒSeM1¥“L´ZӅÆéN*[ñJl¤ZHµš£jf„Âڌ¥i¦4kZFɔÚ%²„UIA QE$ 4#YYY#Â1ÌÁt²éHWB(2AA d$AQR‚Jˆ È)"T* …D È)"*ª(ˆ2#XXXH©`Š(2 ¢2ÑkN'©8%ºªk'MTœ–”ˆ5Hh5‰!@-TšJ§ZeªkN.7’qTÎ$˜%8Šše¤Öœ8Þ¨âS(˜”œ"šeªkN8Ü'œBiQ8¤še¤ÊªÛLÊ$,NäÉWIbש)¥#XXX)ulÝ=Õuµ0õú€¿X¾üa~¾Ñ¿¹…¸¸ø²Û.@ÎÄ7¤ç®\wÇ’¹0;¾Fcw í1›&1Æ_n¦:¡ýñÈ""’"""")"""""’"" ¢Šš(¢ŽYĀñíî7îßUîmƒÅßîÄé#Àv`“Äá¬ó䣯~ jCWÊϖ{zg4µÙ³™Úî4›%‹$[&j‚›\fšgfÆ{^{õ‡mßKÙ¹Í_½t6/ ¹’…‡ã1~ïQßå"}ôCeTðC”q·fÄi8Þ )§ÕhÅ×O'£Ô¹ã¾™›Kå—eð³G!–ÄâöM×WPk¦KxL:ÓOJß*«oßñïÑÚz*1ÎÝÉçù’)ú^|»;0àîö!SIŒ‘ÏèÁ5nrÊÌ£¥yy3^®vÙÝ\ìæ¹“H6#YYY¯½Š&AÆAƂHBE«®Æò½Ý=Û®÷ohÞt›–„É]ݵ̝Ûã]E^nkwnG+ºë¬ë¶îv‰Êå¹Ó¯.•FwYžqyvYÝ]Ìw]_žšå\¹°¹ØßjlÖÃë©bE:íg#YYYÏ)Î9D®ç‹±”ˆx]X‰¹cÖ’g‰¤Éªi'¯×ýΰAˆHí"ê±iÐ=×GREêã<Ùâwô:qÛ|î+b•/;KˊÞã¼ jéÓmºDڐÄGêýw~Ÿ—Ïæýuà   *ª«g?NÍØyœç¶ Ã;<ܨž qÄ5.y<^Nÿ#XXX>|î5¼¤ö¨Oõ0Å3ÃfpÔ/l œ•â*rPgŠãTÏ¿¯¸1Ïä³ãáõ(æp38³S¤ýï‹PW^‡vtäޖ•é•ïéuŶ† `9óÒõ÷‡{'Hèìé$Àœ½:ìLuï÷n˔ê×í¡/vI$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’OÉÍ|dcø~Ÿ)0×Ï(n%ÑØæM™ú}‡°½ 5ÀÍnããž<ðæÌÙ¸ÈI 2BRk÷Ý»­Ý×6£dNî†Iz7½žïCõ÷ÂzuíÒõ“¬–Uܚ4„·vùË\élÛÃ,&cêz 7ìbÆ ½¨í&¶ûŒücŒyòïùà?f¿Ž~æ¨eb×ôh?Î+ûÕiåQ±<î&¨‘Û`¸"_ÏV.Á͇îîèÇl#YYYecìŒ ?4áúùÿ’ƒß0ÁdR.?œZ÷9 “˜}héó`›ä`n9žÄ@Ë¥A„)M;À’–ªôT±#YYY~EÛ§ê}xÿF¿!òû¸ED&|ºúF¨¥ù~PÕª–‰’Ì7üK±rø(*ð«3³EìB;šžž¨Çö­â¿*@»,.ڋ÷#XXXÞ[|qütQ»ÖĈ·wê%Ïë×1Š•{ñÅOÜ_®ZŽS¿˜¥\ùÔìw:P¿iF•—Í$’Úp«ï#YYYO#YYY<lOuÎS°:7zƒÇ|nN_ORåvîsÖ²rԜ¿Ýu¿Ý®§+þ5Yaz÷xôÃO±ÉÓM«ÝªéÚE$dìR´g—‹¼R¦ßü =½¯ý2þélÜwÜÅþŸ0_‘ÎLt™¼ÓÙ¼‡eÚG2â!÷ˆÈB‘i :ëhv[¬³q6ЭE£Éý_=ÉáqÜÊOÃ/uáJ¯¾,¯ß'²uéø–|G/ÒíÛñ6â¹YÆtÝqpfùɓùñ®køLö»áúœA ühíBxáÓðB§;[’zˆéF9;TÓ· 'n"œrþÜó<kŠÇõ†·.p \ý 7´"ÙêULÇé_D {øÏ;Û.^Ù;½.B“”c”üCNûñ/ÃH¸/£uZæÔ?"\5Cã§èÒZÑÆîó}<•ÊåÓcÓëž5PN‹ZÚ{ÜSûOoOåõ7öývtI¢#XXXQkÑÎo<$kgSÚnú%߭ȐƒR©Aá͓ú® 7Õm%å.2²¹kGˆ! ¯KîÛñgõÚmrøo‚ž3¥>íþ}aãX#ç˜-2CÁ˜4žêÍåҒˆ£ÙÓm®³“ìË#YYYpÓ'nwØòž·#ABَ‹ƒ¶0¡:8N³@:ݼ,> —˜vžxãÎnùÞà“B®8©)rh¯Ä©×¥i߇ç¾{ÝÿöΠ×\bªˆ?I¢SCÎK:—“‚MBv¾zf’œ#“ÆÙn;hóÐçfߙ[K»ôL›Q.tƒ­0ï,ñ¼ƒ©|.?L’´rôE™A*â7?i}ùòûäxæÎ.à©;ÜÄ"=áÿŸõÌ<;Ö@øóʨ'C.ßñY.Ô¸ð¸'Ƹ‹Ð%ÞêÚÊ;Óå»Zð†SïÁ4¥wCœmÇCxéïFãS«“j$כîí¹Èm¢qvñ{{8x[2«4U)ä’=qœã"‡t?ˆ‘ûP?ã:~#XXX;ªœýÿߟõŸ« å¿®¿²Q—H‡iÍå‡ã?ÙtoΗôø·à,Eßr…•[¡é‡9îýÿ¶[å¶÷»cý*]JÂ-?5|JJ–™è”%Cl½\..ûX6Öé5÷›*yßùø¬móËίÙó­f˜Þ‹ýs“ïž.u²¿nvÜ}j¶ó•GEC#YYYB< ù’h_®¸øÉC6ZÑåÙgJv›šÑ·œa?´#zšEŠÅ÷ò¶Úƀ\+œÊ†yο&åԎ’jý9ó·™ðÞÒ8nßóÉÇÐ#XXXV~õäüݞnä7ÄՋ¿¨ÑoxQ<ßy|+K‰@´¦v!ªBVØþhì±KcBH¥êMkúàUR÷/…Ãû(ø]€tFc…T>ºa²*EÀä¯BDªåÂQÕ(ÏOª]|®ûEö'ÊK£_P YøÙ>ÎÞØhòåá×ÒN˜AVúú|Ghé£aÔJb>_TçuÆ íÉr6ɐ ãõôö‡ ”(9°‰ VQ¢íÅuS:F#YYYõ4I*NY:WˆBHW ÆçôüŸÊÿ|qèϺ çøÏÕW­?§•ê0”Ïíµ¬*çdWٟb©¶˜¿Ñú¹½*^·ü#¿¥ÐŸ¥9 kÑ+’Iøªê„ò[çL햒K 3Ž¿1yjñÚíÙãšïãyéÚ3Óþ‰ÜyÎþn»ãŸ^ön•Ìp'.γ…Ø@xzßü?ͳƒKqÒ75»êBr1IA¸¾Â<$Ðàä;Õ¾yҜPÛDž¶~ފ®s Çई¿Ë#XXX_sò`FVŒ2C\ '¤8¾hŽÂxý3„ü·U¸ýhK¯Í9Ýf¯áêHÞy]d¦Çޒb<<wëR(#YYYiЧAH±Îׯó{ÜOðüï …Ö#A™—”“1oÄ‹1µ˜—C?;ë·fü~að‹¿®TëhÑ…†¬6äÐøXÌcÊ꺖-7sÒV’ËI¹…ÜA·w‘ÖÞ̦YŒe¯ÎoEÐ=Z-#XXX<‡$ÜÃY¼Þ1éj<#YYY6чÂE¡ì¡Å¸C™dÑsP–k†æÜԗ¢¦8à&'SR‹ö¼hûpóÙÔ¥gRò†gºñqóՓùá_ÃQqíòááûkï÷ö§Kð^£Ýן3ϼOO§]!  e†&L„V`뺺öØÖ‰ Ú*è 4ÞcϛÊInuF1xÝ!Ícç¾!uë¤oS@èª`·R`“c-©nƒLì #YYY@=R¾J/ÙÚ÷$‡‚Õ®ù ö„?`ýÿ@|æ( )Z(v°­³½» 5 ‘¶H±µq”%£8ƒ‚ö[ïǤj”r†ƒ {ð<íɼß(íË­V¾ÙÌÇÌÝwsïö`}p‡Q¢îëE å–[³èXŠ*&I5̱x²r)RȁN½@§&Õç‘õê ð·<‡Q õ6šooáç îðóéMGdäžØ8¼’S¬˜Ã Cãò--M”Y&ÕCŠbd1Azf„‰"*h1¡.ÍÔú6?òÃr""#YYYà.›×àßó¬ß—^ñø“=T4WÒø­ñ¿tD¸mâå™Õə…Jç€Lxâí!lK¶‘]…Æ^£éh1}QÊ6úœ<#®¡ËñȺ½s‘Ù&‹ß¦Tò™4^ZCæwG»Xç³!0KšÑ ºÃo’ô¢ö\Òö율k!ßLôî:Pä}‘"¯Zi%ÅÎr þ¥vµìüä¹néÑü1ý½dÐ ÷ۘ>h>‚rkÈ@dD!²O”ÒYÔ³—®D~—÷ÍÑ/µ éã'DŒü¿³ù~ÿțVc@¶Àr£ÝôúÿYÆwgÅz× /àI•©d×Ý»÷A×'8úë}ÖæÏ3!‚dzóÌ ¾‚¯Q­”:ù;ë£S6˜»­Q†…½ª„!²T ìš\a6Ê3 ·ñ¨´`™5‡Ç(3®Á>L¶3Æru#XXXÏ<ëkêÚå畫¯:c¬¦Zq$öŒR0ˆö~ºr^Yk€í ÓFº¾[ßbš,¢©bG×ãÆÍqÅ7" 8++7çç:uÑî¿uõDÚþ¨tˑŽoŒµC‘ ֎uu)ðŒ—H‘ËEBâÝ9NÏ{x5W#XXX³#9RO¬ÍîB«ûšǍ èØ¢^;«LžyÊ5ﯤí·$\œÖ-(s~^X×­ä®Tqža¥ØyNú¤ŠÇÕHt‚¤ÕµßÙúK÷õåó{v)ycTƒa«;\KÖû`n2‰L–Øî¾ü÷;¦ˆ/qLm†Úè½?gê¬)`ùøñÅЎb7 ¼:glV@Joîݟ(+{Òó‹Ê_ŒnZ|L þ¾òÇý>.~é©£m|úwgía·ƒ[g§• ÚÑÍ oíݰŽÇÆê—iŒ $˜ð…ÁOõύð$#ëÌ·Y_PØ éEÒ¶Ò¼²ôJœ°6”26à£v$°G1 íêdAfž½D^jZ•*lÙefšR™hˆ&Q€‘†B #È´›2åµíÍìâKxÅQbd !•FHT‚&DJB–<U Üà7ëxKµ ŸL.Òdð“!ÐщQ4&ñÆHì\è‡.¸Ða±´Ì&hóg´7ώ2M XÉ&ÚÈA-95$žo„€aš¼7°½OAÀaâÔj4J9gõôIOgrÑpYl¢7Q^±Ÿ6#XXXˆíeé«Ñm±(ï…;ö¯[½L2r<yæ¾÷¥sçÄþ‹q,œ&‘O»ÂöÑï,#YYY;â6¨žýÎX]21Vôèñ|Lª÷é–X¬fÆ~­6©ï¡®!~,`ô¤-qNÔõ)m‹> æ¼§±¶À ™#*iô_b›‹®CUOŽÝ…7κvö„åö̦În ìÒEkl鈯1/µ;÷ÈÈÔj5­TJ#XXX#XXX#XXX"""E! Aõ/(àâ›*'dğSºNÌ®ÆeQË·žþº—SOt£œ5˜Â4OÇS÷Î7âyë.^¥=#YYYR÷º×‘#±wr½c<¯zÛyËN0¤÷íºe~RWêø_$–îQ‰TíÊ®Xñ|fßÎ/K©‹”ÓیoâWØüs_únøZ×÷Tå ñû°L 22²””²7ï}}ûäjJY*CI¦$Æ~J ÙózN»{Üýö¥ëã6b}le!òD#XXX÷ >S$»"ðx/ك’ŸÔI|;¶¼±ýˆ¢Ð;²$’„;³ˆ0œÓ¬¼«èX{üM¶ÎNf‰òðlVÕ èÉߥ]S7fœg_éÇRúãÖë¨ÃÚ©TÆô±|‰>q GÂßIØ¾L¨kë¶^z¶]†‘”çÞùžnH]Sr ©Ñ̨ý{\-<çí²‰w2­ ¶É…Ëe?3â#÷N}³íÏ?” OÝ:îbyŽî£9õÃûWÝ=’ÌnÎ8xÁÉá°*V’­öì^ÎdÕÚ ‡í C/lð%}ŒIPœ_u e>B֏¬CÁ–Ú̹㺸p0áßX *V‘#YYYËlÐsšaG#YYYºò‹ç­œry“Çšµpx@8ÑîovлcHÞX½Uâîµ^yIõØHÁ~qwŠç|.øþ˜B>&[^sٛjˆy( êˆqÃ|(GUÓjú‹’pñõúÇJ•»ÂûíÛH $qéÊÕђqߎܯ°±f#éí«Áò¥H||˜ñ‘vÎí¤Vy:³ß.øk~Èí§ØŠ|Ã0€@†L]Òs&„Ãù5jôåÅTËi=ûøTq_ŒP¹i)¤Zø{Ý¢Sœ¡‰\İ^Rä5ßJjk›Î°­0„ äúÛÈ씃&tL|ÈFQ6?ޏø0ØÔ!½™-¯‡Ø4Ezq37¿sÅä”ÃÓ¦Hëîgí„&F窩¯öø¸û´iñ:qãfì2`L͎+#XXXi7vR¬"o«†u¶&£Q¡¨ÈÔ`ÐÁ K‹.,80àɉ&$`N3Œa^ï.vö¡ÈéÙËFuóú¢J)„!&Iò×0~·HÓ÷9ÜÑÉYMÓI(’f…ØM ‹“VH>ªímêúW|?Šk á•4üÓ»W•Yñs°s.ÙþöžÆ#YYY#XXX½ng,?(”´åÌ&5TE}™ý±À‹†5õn4»ªô’@Ó¦˜æf÷±Ó¹Loƒ@Œh_V³=Ú¢òîS0¹'dݺãué‹¢#XXXehe|þD®{ç0­Ò"´çùÊo·s±×áúöŸ>9ŸJˆõ¨]ÆÖ*uï©á!çÉoàEQÈ´¸åB:¦á¼¦…U²êêÛBpùÞS<%ÅõêL¤”&Ñ^‡ Ç4ºªõ«±±1µ3^™¢¢™ˆmS+ƒz»nމËL4ÃÚa=i·}þîѝʛ :…ï•ñ…w„#m÷¦6û8éõzwû>¼ŒíÄþÄÛnt44S덭iD¢H>ð%°PPþ„­±$ŒVØ"ü¨¶ÅH?h[Y%þE-wvâƒ~þðú÷ºdYø®îí÷#YYYH¢EYŸ¸nÝȤÅT„?³ºMGŸÊ_EôY÷`þ”;Uʧ"œp“药#XXX-ôÏÓ´Æh"<ïµûš­‚ »ßsMV%$Cé„É#wÇÍBë¹0gõ”¡#êrxïƒ3Ý}¿LùýžfÑ ’A\oÒí=ç²\ˆ!Ãrß ]›`iSO• ¾“³4Óð‰y+ªBèzF½|¹Á ð¦*#XXXk9BÜdk‡Þ(†]ëž&b,׿uÖÖwÂÖŸâÿLHÛÂwî1ý(!#YYY;ߞeÛȹD*SËÙòìfΡuÞï7gù4ñ“l=¸Æ6XºgÎþ" ãZ8G­á¹£b/n×­÷lúëâfóÅâc;å~ϑãtÜòƒb˜aŒÄ2L“Åi¶9aH~?9ôcÁK̺'Ê#XXX[0ߓN ¹y8„umàØ—ÀºÙ<±yΚ¸þ؆õB»y@Š¡²º×å ÊæJóZJŽ][.3ڟ(í¾Ù¼öT‡[¶WÑé;ˆOmô©I£íz÷;.MåT«#u~ŸMyjÕ ÷=®¦þ0‘Šp¢pÕB)$’Iš8Áð5Å~ÅP}‡x?„Z?™Õ_»èƒðCèZ€£¹ðN¼lúÍñ®yÔq¾Tò‡»ue#Ǘ!ø-N‘æ«ïÅe­ž9¹K­Ç¶eՐ; ðËšËòrú2¥ä£ú»½oµ¿/P'»Ê;®|U’`µ³eŠÒöÔy)éĨñÖqƒFT¸¾g¬¤“(= {'èøúhr"’f‘ÞÎÛ,wÙjϳ7ÓêïÖ+ö #YYY[½¹ßœ¯5æÛÝÅ,#YYYñ¦nfß½A+A5P®vîÍ«Ù;ûéÚÿ«‡½k?'”ôæ›\í5Ÿ=žSw&eÛôx4n#YYYr’CÂ1NßlîfÊk™‡jP:©Y—ýb‹Ïë·3T¦OÝìVßrÀÏ8c!sȉØà:SºWp’#XXX&ÊÆµºeBÌçŒf0¬ñL#XXXát£F³ÍïÅìŸÑ fõߎöõ¾k¯Ê©rï¢fÄ<zæûþÝ7¿=}:ß^¢£lÅôþϚ6#öFñŶB€!WäHZu"ʧʻÓ©A>±ÚÚu"H'èM§R }¡´´êFTýÂÓ© _Ðm´êI”DïÇ×´\·zW\£ØêæâqSí픥8B.7Ozòñ{òVù@H¾œÆF®Ñ5ö¿’oV%cÅÁªú <8k8IÇm6Ýx£‰ò¦wf.b–â ñ×&՛Ÿ˜«ÏM>›j·]Œ4Yá#YYY™ÄÖ#:d*&3I:tÚdÓdbøóÓàiéOŒ ð;2ùþ~2ôq¾õ•Ãêz;¯=òß;GãË</ˆîn“œ4apË<LðÓç§”ŠfÐcvß*@™ÁáŒd"SŸ²Ô­ì%s1¶Ê2{å!ã@ݶn|ÇOXž{jüغ°`—Íkú<Ô3rT´2ʑœv¯hÿŒë\)ßÕ7ÖúFx¸˜1„Ý´SEñ~ÌŽ#XXXšSX]Y:˜à¯ïƒÓ Õº½ù|h¤÷ÞE@â%ÝȤ P±3_sÇ wõ‡x3ï¹±êE„!»ÝÙç.„^‚d1,÷Ã~¥Zq“Þ¸Æ+ê êX¼!ò¤býw:› NY,EHÉ$ðêÊ>4–u„jZFëžNî]î‡ʛp¯uŧôÃdð!¤ è_å}`“òo[y&Èn÷ýÀl`2c" ÆÜz‰HH<NïÔ=ÜxGéããÇò¾g(´3EÇf#YYYD9¢)!’H?¸—¦|Ê҇Øû~»»ÔÃgVĎNùÓ(¬´´£Jíå·]|hª„ˆ´¾¯L éé)œœE};ðŽ_›=U_âZ•=Tïå‚öÍD£iœãÎFxoö&~ܒd\lvóüœäMÆgC0b«i^Äv]){-4®Þ{ƒÌÎw­K¸sgë豒ÂXM€b\–¤Mdäe%+!ÈIõ¯Æ½*ʔ—úq0aI;w¹_UDHšf EèKù’ã1°/*´n¸†·CïU-¯D¨ý¨Û>Ù>)a?š'Iç„3ÛZ”/wE¹¸žmç·ûGÂ6ã«•&Hðîðýø"eDLrSœ— ²ߨù-ƒð«=ü.ff"x í´%M-utwï¥.#J ž7i¯õ­õÛ2úá|­²¿ß#YYYý^·NÞìËòvÃGK¥¡-cÇÏ{?gåiÃSüõ 2áJÞÚrŽ&Ãû*»š‰FËUòEkÀµ£på@åŽý€Ñ¹zŸOZÝ6ÐC}Ò¢ßÏK=¾Ú,éõNÿ—Q|rÐãÁ¨™C»Q4‘ìʤâa@˜˜7`ïýT[}knÁ`+a)†!æyHHµnÕµr ӌ¾£ü©åR,·âïÝsœÀük„I ƒ8[«jöÌqþ7ÚU¯#ÀLošåÐw*2 r­8âcûTœ[þ¨ªóügOÕj¿#YYYž‹Í͇¡Å[šÆ©ðK"ð®±P0L̙>Tžý9Ñ;äæòõñô•˜¢œS­¦8˜¾K¥ìùÂÖ½wG„áýbwÓ¨•|Ñ>ZIl:å=.*q"´Â–C´]ü÷šÁ¢|ÄGÏõù\íÎa“¤ºwNÔ¯¾ô{|[½Ý;5>ú‘Ã0ÔEö¨ÒúO ÓkVM0?¹&¶þÇÆmŸY?Ä#~Ï~œ¾ÃÒ”Θg!÷2zî<}ê4~3vÿ|–ζ·l0P²%–(î㋌~Ÿ†ßO5œ3òLÀÎ:%£RŒÒek¯¯må3$Zò—>pz¾‰ž‘Í<–.ÖîœÃZ¦¡QáÓweÔkëÝ ¯™«øm¶Þèp†ßu0÷©ÝXR¾Óݰ{ˆ$âÏµhÐá¶jGwÀm"eÂøËd#YYYèt=Ò (9#YYYwKæŒÉê)F¶‘´º»ô5¸‹mvÒόèð:‘–räV·(ñôGã+Þ¯ž×Æ!Ï­V])Æìߺ5þccó@ ‘Ý8$Bÿ?UñÜg¾.á B¸CZΓ;ÿ$ñnoY‰+½~'Q#YYYO{켿6ŠZ<!]„_~Qè’%§”›é¾˜<aBuoV—a'TºÈ%?égÑe8‡>òы2w{Õg\õ‘ŠÜæs+u²n<™Þ}^g78$ûòî˜nk9ÔÝ?·üšÕ݈ ®¾M«óxׯp¥|TB&ÈD¹éþÎ6V2Â"V­Ðʈ™‚!ÕŠ®#qÝÍx¼¹ì`ª‡ ¯>ØúۍÄÔ$"Úå²'Í ûbë3wƶœjû̼®ÉÂÓÒ¼¿?iRRÉC=¹é”äpyÆXMÈJw\çY‰BK€æ1ÊPvpõB;ÎÙD_4K£q²ýO«öñ)+[t~K†ž5ÕU¿وѲ­Æ¢È6³†WOX¶YIè‰ß5>‡Ô¨¡§„©;ä;CZY¨¤|õw!ëŽ`Û/£ˆQ?•´çQIöÿ”?<pټߨêÀ}ë¾æº~-zjfoã»Ï»øj·5}&çýx³¥Pâ:Á{Oêx{ó¸=µ¨¢·ép«ôEÃé;¦xxé{…ªO¸i™ªöÁ¶Ç¶ß_Ÿž¾¤Lˆ5Š0(S71¬$û»3¦3ÙsƒàÌ3“Ã0&}^“ØçOœ­N½i‰Íw½+6ý/mûuxî=ýˆñ”ñLÉÃÍsf[o÷d띐Xà‰^vÆ?Tœ½þè¼,&G‹©ãîy1S DŒk)~1¯#YYYo§ˆûtÕýÓÉþîíqú{äÿju­R—3Jˆ#XXXq„cß*Ê2‘¸“ΗÜF»Ÿõ.Ï@;]7Ÿ+F̎Øfã˜TêGÝi]d‡×–Èó,ì×&h૤¤O8G.ˆ8\ÍB,ND¯ž»9™u—Ëd25=n[M›Íè8å¶g¿Më}o…o¥k¹nŒiôHRI—$Vm58ß³£D„¶`E1R;ÝðXúéBC&˜ã« Tž1mÚ>m†‚åDóݶçyÆR¦±ÇYúµÓ-|r{ûzJ«áµHR‹ö¯IyVÖK{K9Íê8‡/Oд­: —è\õøwÏ{m  ý‘Iö÷݇ƒäòáÎQÃC·¥»yýÍñ5ã0¤#YYY¥œG‘„Xˆa1p–ZY ,oŒpRŠ#YYY³X¥‡ÃÞÚFTð¼aTü#XXX.*DþØ\EÇ_*•¥ñ'yÆë¾òSùHºÎY6õß³Z—O9B°\¯ûçÇHA‹-+3«|0ž˜È‰ZW²OçÖu""Ç¿ÁGAŒé£ïފpŽº*ºµˆ`ÿԊ—„ÄãŠ×E ÖêºïS÷û&ØMБ­‡½ÊÎg‹”Þ ȲŠkT©#XXX´9D»½8#YYY+Õõgf-¾‹«å)¥œ'ú˜#XXXpŠgz·%¹[¹£çeàÖ!2ëáûµùuê9yË#XXX‘¶“áӌ«WAsǪÎ=¨½*×…]޲ºÊY<}_naš‘ûÇÛ쿪2µV7ë¼7  ¹%\߯X#‰°Ó\ºbPDˆ#YYY8ÓXsŒã[~>7õá{ǺSÅóøÓ\q®yþbƒpx zwÖWí‘Û±¶’àFEO9Ž~+¶Ÿ\(}ÝqÙáü>W³™ôðžÈ^Éܵ [õÄK]#XXXñ«Á4Úá]Òá/¨´Êîø[<`õá.¼¥këÎÆ°ÂÎ3MßÑ.gxò½N0ìxš“™ôEýxÁ°@7ÝÁæxJIº¶x5£¹K*ŽLUÛüŽ0½–û«‘lºÛSʝ—±^šļHKV}¦1Ê:Â1€…ÛGÃÝb1Ÿ™ê|ÁÆ}×ÖÒù8é(B#YYY¥ä ŽöRœ{Ö¼)†nü k¸Öü ӓ²ûD Æ÷ð”+ì€bxw–8ê{ÅÏ낒ïaٖdãîØþRO¬Zß3×Û÷?7\^5Sas­æ´Cviºdù?Õ²ŸÄ>`Y÷}}¯ºøÂ:Ù,Ž\2ã`¢šn r·ÏFIÑ6Ïí„ç±M§gq§cô°Jïº& ÑDÕ䖯[RßËÕ×Þ6é»ôZáê ¹}'TŸðȄ¹¡< #³|V 4zòˆö{F¦¶ŽAd ˜š Ó¦;@$Ááëc–ÐÌ@ä×yáèd¶št;i6ãY-_síx:åÃ|U¼ÎL$ÑdR\n’֍i™ÓrȆ¤º¤ç]@h€Éš7.Â×^Ԓbš”`iÜâò=t‘êÒocAçsMøò“Ëj[>~žÕä-é‘>ü»y7]FïŠëw ã–ý×Y­xˆÚ[-Y-Ç^‹¿´‘’Њƈei¤ì¸í8§In»"éÑŽ¡mõ<ê`üà85¸wO¥É²W?ÓŠnËöõäs¨ü´ÚԔÐT’—yù¿|ï_ގLc͹ª×sI4Úq…µÖ¦oVɛ7£7üÆsðåӀÆ.5]üxF#YYYJ4ûRâҌÖàó¡ä´ƒ70­£ Œcë¬Q¬ùOèÑ­n}ñ`xç~<ÆJ‘ª¸`W©9ª³M<¿Ðâø60ÐìY:’âñsM=(Ò¦#XXXy\î ñz¢Ø$qí؎^Ê|Ê^ÒЙ#YYYN\0C>ÎiDS9KáÇ Þ‰ûhºT“ò㈿Uº¿\ÌåÞÌUçe”kT˜Tþ³ZSq–ïv¸…¸.Æí#b'‹ˆLΩtÅè;ãΆ¼ýj/\+•­å3ÝE®óǜޡÍË$='»î䇇½KxUÃ֗ßLüÓdò¹›íÚ_Ž‘÷oï’ËU²·40œMhóŠɨB®DÛ>¸}øe$e‹ÆÞÿFº%ƒÆ¡êK÷õѨ#Źë4®S?~ÒÔ¨Ê8ä ;RR ʯ(Pœëqý~újdŽ!zH©éDx>½è§z¯«•·—$S$Q£Â?§í°ß¿’#ÝýoÑVùjx*{ÃЮ š·-GÙ} DEœz¢)ʾ«_¢Ðãß¾Ü#XXX°¶øá㾓͘ӽ†f€lM]àM±ƒÖ§sW9Ó¢õ*_ϺÌü5½D¤0L`„О¹<…úß¶¹²‚썥é†ò}©kƒÎ°²Û}6oP…†L~ØÃÜíù&ö¬²å‡ÙtAcË®“ÐÎ#i·‘†0ž7Î¥M#XXX—Ôwþ'twœ.vAFG)/½ßĜî'àºÄ¼eIú‰Ñ¢ø%…©&Œ×ß#ª5ҕ²Q≓Þù<|×îµóÇKÀ¶ µá1Ï¢¤Îªcùá$&’ Bû•tïqnVÐ@7¿öˆ‡ªn]íÕ{÷­ÞFªN€¼²L6 .]”ªÉ  ‘Pƒ3¢eüü8Â<ÞH®ïš>}ž‰içlÕ¥`2°%Ô*ÍàòHì˜{kHt÷YO$×Ý×®¿ ƒ#YYYøÈßáQÌî]2¢9“99´ìû]âû/.Cëë±ïâ(}P`íx_^L’wˆéºÚbyJÿ_lĦ¶´Ñ$?¬[Ëu|Œ‡è¤B޲®šJ†ôB.Æ2)ˆFø»h‹“Qœžˆ¯×¿>H¾›‰ožȽAË£¬ˆ±Ý ¿W"ÜÌ(×%·²ä*Ù=`3ˆ„5ԌH©¦–ß“äÄØé„‚xFæÞ1£§Dîw±êPL z¤LŒð„„)ju¨Ï{9‚)|õ?©%ÕíÖпЉ"¾Ä<ºàÒô[(í¾Ô„“jº½[²Îü¸  5´Äår.Âmb†ôVÞ!­§ŽokÇÑÛZSw$#(‘³¯S¼$ÁË=EºŒã©h”}¼²R׋#÷n4{šw:W0ú½È"|¡Øç¹`¯Ë“AŸ²žrôYü£úveŽÚ˰åù(L“̇(á œ6ÎyÚâ¸í¿'|íg))Â=Ò.M-ªËäÌԚͅ“ôœX[rž•é¶-¦[X¦Ÿæ”ée¸ŽWé"hú"õBgJÇ2;u7힜Øá)ºå©CËÞ6-¼Àââ¯RDÑi!m4½ST¶žþ3ðß[á@—Y≝Ÿ¹É¥BªUS¥:¡Ä»;ì?Ë&fC33$‚#ç]:ø-ÏWïöZ÷Fm­O7ﺛKØTÆ žãUiÁŒ¤q¶Î»u½ÿls7lñgD³ô'P~~"U¡óMݞo|6©*lXyDìð@¹>éÇXËO¥Î}»å†7í‡#YYY6™ú¹Ïv³7ï2ŵ<¤ô<2&2#XXXþI_0ñïØz艏Âxñˆ[qfn#0Ìx¡‡eº5›¡^úLZ‘ƒ‘„;vCãžü#‚ã6›ŒQÛv#YYY¿åª»Yx¸âFÌr#jD‡[`»°!±ÆÁ‚‘ýZK̋ÌŸ˜¤p„ÆôYøŒƒõzüñ˜÷QeŸìs³£z¿´,|¡÷¾3|ÓjÉ]s·0–ú¶È:èâz8Á(aY®Þ†²ŽnU¾¹f·ÕðôW(¹Û°sú§o9³1Ù ˜ƒ&D”ö[¹˜æûV©¡ã¤e Fˎ‘®.·ž+š]¦¸G4×÷=ðß#[KJ;™¼Þ6d_9ȋLƯ‡$BíÕ­³î¤,÷§§5Æ>ÜF`âØ=ÀÂÁØ$ôþ¬}ˆt£®›Ü¦©Xو»¿«ÕRҐÃZ·¼%$÷[œ—‚î£0xëü=Ö\A´¤ì”Êg‘{ÝIf¿Á=Ψûò]÷e&p¨˜÷íƒ!1YÖm}ÂßÛQ*Á´Ý.¥¢‰#XXXHíæÊvGÏ»#XXXÔÙ`[÷uÛ]‘~—]#CïyžÍi0á"l™ÌÀc10M4dˆyu%0ß;§òÖúA’a„É®Êv¾Õ©R¨gO$ìKP3ʔ£àTðx³ö­YOÝÄHÚ¸ØR8*§D^°¾3{p][“!¼Å#XXXY-Àó珑½Òaö¿Ñòé hᙃ׼þz¯õ<ܲÞ>ò呋W-γ뙮ø–Æï·ãt:v­›ýÅûžã!kÞÌì2aÇlnÞѓãªòsï°é3ÌI3j¯«¢õÖ°"“ÇeЦü{ô»¢*dãá.ž$"”²Xª´g‹"'Õúi·åÍ?¨T߯n=qñë8q„ôr›‡=Ô.×-ñž)ï•ôϗÂs…Û½XN½`ôqÄÓ#0g¾ÓÎ紉1¼~fþ‰3¸˜’4Ê0±) ¡Eo{@¶`™UŽ»­gÎ6"Ø{:ùü#õqÜï¾ëXç£û«Ïcó^²¸u§ý10‰Ç #YYYc³»¥î=‰êè“äDã°¤bf€žëºø,©Ïàˆ9“³q†õ6vûðI­ý~¤±w×Î×^q¦¤ò­Hsgi]Ne£ MBv瑅î]ûp…ç©ñ[ ™ì^qW™Lð2­kRÒfüàí8ĉì‡Ï1¨¹©˜òjA›w]Յôjkr¬ª‰v÷¬ümCÚ>̾Hø$ºwž§¥ã#YYYÌ}Œ³k7–Hï›Gau$ÿa8è£W¥n/ˆBSžÀ4ÞŽÇ$}e]¾‹9#YYYú@~û©vVº‰h&Ðí]P]÷q<%¶ÝŸFô¼ÔzÛyÂM}³mð†ñ¼a,sÖ=á&7ožœð'¯:‡ßÇÆíî̹¡ÓcQLùKïø†}mëz“íЂ¾þ„ã}>=ý&F}¢1…H,c‘”=wPô҅.ӛ?Ì¿ot¶#YYY¦˜à1zffË®’ÑtL&¸W'»£Ã5˜;¢ÑQqח¢®‹ÕC×·±˜ïœ3%ƒ½ gºë {€èI0­€ãAM`˜t™™Ÿ3¯5èdõ«:I’BjmJDˆÝžÏ/B¿‹¸ÐrUµÞ!çsVÒîPo”M9ø#YYY$¬&Jþj“â4‘4^ìŤuìRެ°ÌºYäc³ Äq\ÿvÙʛÇS™:Pú¿4ZMãÈìîLün¼•”?¿b”mŽŸKÏÒ<´î_5ê #n£Ä4Û#YYY¾pbø[ÑF²w½(¦€ðtäsąÚéND¸!*oCÑOYcƒÓ‡,€›jŶìJ]#¡²g¦øCsÌæ¢Â(Œ/̐‘TãË)n7ɶUg ×IÊçôF2Ú{ÜÑÝðƒ—ľ\‚Ù/Y¬p¬ï5Áhøû<K›GÆ*>r—Ž]ßs$©D‰¼y»FµP‰ýÿ†âx¼Knôß˜_±gò?¦£\ç֙ñ“Ž&›¼0þ|_ìœ'Îæ¯Ü£U´G؝ß,¯zB=›Ãæ|³Ü{ÅCr7Ì'"B "X†@„ÈB#²OÖ<&ÜYz°/†›¯&9͍Àž )J Äö›v–m¸O)˜5—¼“ñbsÅêkêãù%¼í'¨„;ªÿ¡Ywâ÷>½zímuW¸Ñ~´inûƒòUÂËéoýœ¶‘ÉIúrüììú½ki鎕Ëýv•÷#YYYpl…ûf$õÄwo¡¸x©ïTKIfJƒ¡ÐëµBšÍd4ÓV÷\¿ºãoÞw(\öö‹\Ûû@è·'Úên=ì¯;׆ã~ÕyȋðtÄW5+î}O¤ §IÈBNpá+„zAòáù6S2ŠRO{y— ¶+Þ÷;Žôº~-±R.Ý⟎ÝÓKÃNjŽÃìó¹Pd¹ê¼Ûv!ûF蜂6h…åð§ÃkÔbÞRóí‚ÓfþWÉ5é¹Ä¶¸¸´1‰8@©T Ù »mõ™Za¬˜#XXXð„F²'^øB žÌ#RPhن8õdÆù9DrÒ¬1-«\ìçE³Ÿ_6Ž.A C¢4Ä:iÔ›´„‘TL=MG#XXX"®ïñÃÊ%´¥m5¸¢ ‘4§wC­p¿n³mçNÜÔw!D·5qZúÖ»~XâI’íÜ <zŽ(rwˆD÷ÔDÍï¡„˜¸ÙïÆ4¤ìïœ!#YYY~Wk hjšQÙTð9ã{ø<³šîàÚ{‹ÉòjƒúþkđÚܳäxT©·Ì:òF)TµåþêvD*ðþ~\´—NڍCð#sÖÿ˞þ4zèàÎ_Fàòašßñހ}j.e¨n"ÅѨz6>ϜF?\ï¨M5ìÖ ê/}ÞFçLñ˜››§¹àýú¢/‘¹ðɵHw•ØÃœñ¯#XXX䙊`]ŽÞ9Ôöb˜6äBž“T¸õ<JOçãŠåº¸ã…Jù\C¨|îecÞµ4}:ºß@7Rkž8ž_/Åx¬¥uËÇUÒð>L—˜mæžbu¬Ýk­ÙG³œë½uÆõ{¦É1öäxx´C/Tð•ð„˰q,n% }xS"–³ZߖÆ?ª…T»ñÁ¢±qÑp½f^•ÅïÖz/<¾Ž¸rÐýŽÖšô¶ÎL ·ŽÐ#YYY´ðQ-6ü¦ ò(Ùuºg®e¼u×êö¸SËñ¥hç8%§~,‹ï©æ¯¾÷Â×c蔝Á“;åLšXR_áf¬²àß4tµÅlWǝe¯Šëƒ]p×Ãu/Ôu]¼ucÁ„¿T+Néòa¶½2Çv+îoG5ҌÅp¢8CÇ4ÒêÔ»Lî÷ÄÍ8ã•q<̨H5~“>Þµ7WRмÔñLÞ3¾?çN,úNÁٞ3µ>r~VxžÂJóSI¯™€Fš½IÃø•ß­´žË‡ŽìîðîëÂÙÌíyE¢›FÀjg¾È©5^.6]6úˆÇæþèäã¼x¼#YYY®8òÑÄâöˆ‚M¿ˆœœ¡ÊÑá),R ϛíð_²zđ„øèq¥À†¡¿'ðëÁa-/Ù¹!)w®Ÿ8®•Vš¸1Ù$ë§ÞÉC¤:D¸º"9åè½wß}]ª€uŸóߒ©¹ñçÏK¶MÊfýÎÓz±†ÉÞ­ž.”kªþ{}Å÷, j›ÓØH#XXX#ZÆüsxL¢7™O=$×@±c#‹Í\òÌ9~!âßL¹‘o{Ë(Ûí¸IÚuì¾oÞ¹÷ó›Ÿ#YYYãDjkG#YYY4¾î‰Bgðã”æôO%Ñȃv1~Hù®Úù¯tsŸ&ÕÚÒhðAöUãòñ˜“:LÄ8ˆ#YYYÅx}ùëNûy¨{æًԣiý2'@ëlçÆ£Òž24µ\÷ªKõZqo›Ùzޑ¾/Çì†êµâ»Ãº ¾çÛ? ñ@â' Ùê½G:Ü=y¥=5½;<CÅ»/2q&×;Ü"ýÃ%…4 ÄÚ§Ø·Eu#YYYÀŠƒÒ9©iMŒ±àLdŒ‘šÉr?;k„WgæyöÒ¸óÆõD£XÿEñfk7rââ ÛL)—.ÎeëUå'<fFFmzâ¡bOqr«›»MI¿ª· ä›8ô>_!@#“ªÉ¤¼L#YYYJY6…¨e¼ˆš­ [·>Ÿ":Ñ<ì˜xì§ä\Lp?[€—b‚•­Å)¦àFÜj¼¤£ë¾¾‘Èq‹o~2ÅÖ¸½ ­-jhc¿³ÈQ£¯;éš/½VpQöñíÞrÕ¶àEïp¯ÊÑ›Í4õ®GhIªÒž"´Iñ=y(9àà5Ñ­æçN(üáûœa0˜¾W?æœù[v€»¾¹vç£ÌyäõâÜç骓pf#š¨èíÖÌ'šÜ ­<™Þ#m½ŽŸŸiž-u®ç?SÝ»ŠxÍu³êàMj½wZu|5i#ûÿ6çÇ,oÖwOâòæ§ó·jÉ©ŸqxÖҖ—Xžùÿ‘Ó Z$ñ~Ó|"3ùȎè‚átÐÛc¿э/[R5†’ðªrCMá*Ȭ%²¬î²‹å½6âxí‡2é®×ã’ò­Hö¸Ý‹*҆E—7Où&fö³QdN¤;ëýßw¾ãïWó›^×ÜÃyåç¬é†ø–€úg“9Ø*o¾àVÅô2³FµELi£õÜ]½¡[¢¾—#YYY€‰#XXXV¤÷ò}ó­Sˆª€‚°žjzJ_tiÝI™“ø~²Ï-Þ2°šØq½ÇîÙVHmômÉ(ÕÌ&›ô÷:O«ï‹d©˜Útâ'˜;ç„ȝB¼R[_d&e —´dÿlÝêw„¦U岕§‡•Õ‹FY×¤@Ô^¹Dl"ɵÆÙÛ»Öj4\—>õ÷êPR#XXX¯Äü“±ÃêY\:oõªe,Óg#;ǜ.§^*H­6é Jk{ίt(_ct¸@ŽdzåF6KOE¸}s‰4®øZÌò0â.–‘œ`äT9ãU¬DÒ¥ÂYhêD™¯ÆÜÑ)Zhr1F¬r“xÇâàÇüàÔNæ<¾¨_Gó¾}ÇÀ·}µe´©†aŽNç*ï>xÑ6mѽµ½s{7¾µ£çEöʜJ¤ƒ½žõ< Ùn菅+Šz„ä¨$_¾O*üê>vl»¹—®µO’´!4ø¶dÒó]0Ä !5§QN'µ¡ë8ƒÊH õI¾yK4QÜ®Mп•o×ì/ âð³ÅØ&Àæ ›ÛÝi‹Jû‡HÑd†K˜ÈB{Üu^I ™Þ4½+XóϽ©9¨bû F;ÚPq<‹|'¤™ŠC1¬Flù—â5Õ87ŽV´Ts#XXXûÙW¯Òœ)˜ãÏÂ8ŒZ¹G¥Â¤fÄäkÇKHœý¦e6ÉÜHÊM<ÄÂXp=ڑüHÍÆºÉ鏠øÛë}ÛO±Õ¼» sÃu-U­ý‘ÙÛ'’Âçå¿X#YYY0µ«„nŠ%”ç»´s㜥©[ZF¸`zg#IÚ+G+S Gi3k‡‡?íù?·û;¿²k ^®[œÌÞŽ?g› ˎwæÑø~—?Ÿ1í¡K^ž¢þS,™~„>5€Ì2X¦t0FøøÎÿaˏtÌGÌ %†ÄÁA@ w™1R6»ùþßaåvÀÅ7H<™Rëå¾ÈL…ÚíæzÎ#YYYµø–žn¿Þ£¸ži¬Tv/ƒ¢…'-õ%:{O¸˜‚‰ù›™PT”§`ϲæ Vv¦­Ã¦”v˜Æ­•òd@#Éã9?U½ùe€[ïw­~|Ò9K»¾„»C8©Ñá Nßû²(nb·1^Š«Ã'!áÝÚ®J¦>ú¹~éNmë}S³­<-QüõEêãr‰f§íë›3«q÷¤Ç,ŒÙá]}e®èc¨Gyq³`6ó;¼6nC[Ä ²;§ãxÕf|sæáçYO c×$ÜÁˆc#R2Ô+$"#_°¡¡Ÿ·¿¯>ZFìC¥øâä[K<h!(êÛ/•×Rr²g³fð) •ž³éF~P;^~QÇ¿¦ºÙ.Gsa^ËGMáÃkÆ¿¿\LyR؎•¯ŒóØe¿`ÿ8Hg>($î™ÁêSQèʏÞa1C"lÖ2Œª°OÜÖ,±CeGîk&Y!1Gîšd†@ɹ¬Œd«"°ÜÖÅVPýÍdLdYˆ~æ¬"²ÆdI°#O”Aíqûÿ—¿™á‡ÇZ±«ã¡²S> ¬’•z™1*2#XXXèȈQÊd0@ "@¾S&$&_² d0¦%ðjÀ±&Cå2‰‘ʙ  *yLœÊŒ¡Š{µIÔ-:ØõÖ¬2Ó#YYY%0‡£0LQ’z5“2 # ËXÌC Ê7k$Ë%ÂÆ@ wR,#w4ƒ Ê÷Dƒ*B÷R¤‰#Üd҄݂'@:h;³`È0Q$G¸¢„$ Nã hea;„0C™°°#/q’÷'q•$wÜdLw` ÍTçÝ˜Ì’¢BOF3DÙS¦²Ë$˜%:k+,ª0£#XXXtÖ`LA”'S$FdY!Çàòmó}=þŸ‡~·™¤1‘‚ZÖ³#G©÷çðjßÏ{a¯Åô¼Õçëß)ö#YYY0W³t=ê’Ðèå'ðÊ>õÂ#YYY`âê#YYYjâÁo\Ÿ®eKøoö}<ü³B»,y#nçh¡š×¿ 9¶W6úè×zÐcGM™(úe"鯻Ñ5¡«¶ñ|±»h…qLzà_ 4½Ižk×m×QÐI/ž;&ôqáWè ÍoÍ­$jXzLÂÂô'c7ƒÙ¯jÔ­§¯íqá¿?½ÔCb҄É.'¨×è¬E׺¥©¾lÙálku´w#&@ó”ç× g‹6ó<ŸBûèñx­HKé¥9ÙëYNtvV¿ó“²ê:ø”û÷FSæ–Æ`@A &Í7e»ö7&âçÆ!‚húFË ,q,F°CDID”ô¾Zæ_*$pÖDFëRcDš±s;x¢6;#XXXìpšô†-½õøªSñÜúZÆwÀöv]]»_c3Fè§ (Iâ-º}P¨æO(ёÚÝ?+˜‹3"› JþEh¢Üu‹…÷Áþx­n¡q{—}¼®¯ë,÷lÁ¯¸v2Š—”çH¤Ð<ó”{ ¤ò ï~sÎ;,·ÔÿMýÓÁ\'ÁëŽ Õ§ a÷/è'¥IiÛí&؇éáGìPу¸©ü"£¨…ׇç`çÌÄ£^ë¯Ò›ørFQÅvr·YV©wiJÊdH÷AF?cò„án{nÕ~Ÿãwó2ûIú°`õ-˜pE©–—îŒéF8/j”½#Ã#XXXzp·TèÔ~Î#ÞŒÈÆùÝsÑûìÔ3¾ž»¹ìè¥á=sÂjÙgãõǺ:ë­(Sàt´4;Ý÷«CF0ž‘~ñ”ÞOy?rŒæä?unØBkCÇí!¥6"¿úÔìwI˜H"‚·S5|¹qÒ×ÊäÕG÷ۏ}¢µÞuʪãò}¦OÃîÄ¥çc¼%h;eejù¢i>N9æ~‡™o»8Q﬽›8‘ö%âT2½¶É³n~[]¥OŒ#XXXÑxÄʑGè¡^(Œ5œl‘¥ðJb|á+ßÉfŸå~ï~ÿW4›+"!‘ì¢)y[šàž"Lü‘ç4yŽùûõÅpwÈõ޵Ƌ‡åoñ²È~(‰v þ™º“g×Rs‘¹Ø ñ®){ñ:#XXXˆ`d­?•ú¶Õ‹KÃÃ`üÒáþŠÊœí¸½Õ¿§Ý»º'GƒpÌûp#æww¿)û4=Ë:?2ä9 Ï^U¹¦×ŽN?6x÷m­÷eˉ‘¹—ærW´º^w„’÷L˜¢ˆÜ9®ùµoWä>„œH«V/=ˆÅ—ȽO*ò«gó덙îÍó´!Þ0p´O/ý‡ú®ÆÖ·ŸÕÈî/Ÿ‡h㻙X¡ËÆ9†,N”“´/UŒ¢}0={µ\&Boƒ‡é“QÑógÄöIGmI^ÙìqÆÛ ^N1YŠA&7#YYYӐ¤„“@³CžSE¥3 BÉ#XXX#XXX"È%)IAÈù”öòɈÓA˜;#XXXÏV!VM$uÈ M¦a26˜ìñþáþÙþáþá¼Y™™™šÌ|ÌÌ̼̌ÌÌʪª®88ãŽ8ãŽ8Þ,ÌÌÌÍf>fff^fFffeUUW 3ŠcÙ$Ã0éíyû7Cu!-ŠHÝ'•ÒT\!CºQIüäétäågsÞO¯×ëõúý~¿_®³333/3#332³3'33'*ªVoq»û65=®Ô@Ç6»1;¶Ø'1 öíÛ·nÝ»q#YYYNƨ1͉½9ś¡¾“¡^y|ÿ¦;I d>_ñô9ì}PˆÀ“#YYY°Q¥$PŒ¡4XÙ)IZAbÊß.ùÛø¸ñÂxå§a/?Y#Íö $¤$‘ª%GŠIUÌóv+wr/K¢<J‚s„;3ìû®t}¤üD1ïa˜‹³Ì?¿õ~ï¯ô~˜UFGÌ¢OõÎ#YYYØxbdðkæüþœŒ©ª‰‚f‰¨EJ€iˆˆƒ²E¢.îÓi´U°Z "áîþáʚwîz½+};ÝrƒQZ6¹Ô¦Æfo£ë®õëÞîÏ{CÙµHS %$,Ð'ñk󞍃£•——9©+ïü˜HÍJLQ%î¿'Íþ`¼žÍÏ>ÎA ø>N¿ån¿=š”]¤à„)KþÙ_õë#XXXþiÃÍø Tÿât˜k˜“1ê·èŸùòÝßø*Ä×ëÆ0\ˆ#ÝöeòÿsjÁûîwîf8åÝ)¯/¯1àfdr8»}kÿº,Ì;³7¼LÔ~|+ópÔËwÊãր5#ëÖËé> o»s¾–Ùö•YÓì§d&ÒOƒÉ ¿ÔàݨB²ŽFBTÁØvø>ñ`ÎrðQþ?ÊÝuõÜ37Ž «æÍ|üÿÏÂî܇ÿE˜}úSlšù#XXXfօJþéMUÓ¤#Œ÷ÒûßuÍÏuùÏZS2Ð8l¦±µßa¯ëËòÃt1KnÈ^·k§¶6Éþ¯÷åÕ§ÿɇåÚû:Ê-:xVæþû·Ÿ‘mFØ=<‰¿fFð=ÝþK³õ(oëÛ³6ïÙ°<Ä0H|pШÏù˜O÷Àþjƒd`š Ä –LŠ4FBfa“ŸÉ9FáÚô²SWQ½7Òù5^\ÉÐþírØÞîÒ]ÝÝÅW*åiяÍôu@ôªs‘@ᕏäڇ¤>øL÷iŽˆœ¡#XXXTˆ.páL¸* ÈD¥F$<ÇÓ¤GL Ð´¨L)@4¤Åzcç´:Ç;#YYYkB¼©-¸1õ—îùœ(+›5ýäÊ0ä&oŽãîxì@baïzô&|óf I\ÊGP¼‰ÃɾÄ3#YYY“M榲jù ;€þŒßÐoÉ?xuþéýßß ÀC¸C¿+¾€€ï©X^ü¥‡¾†RNü¤€ƒ¾¥‡¿&b;êc¸öwþž Æ?zH)ˆ ]·ÓÏÑ»ŒôÂpJF¸¹éÆÔŽÉ˜ùïǟ^Þÿ;Þ÷½ï{ÌÌÌÌÌúffffffffffffffffffffwíàúž¡ô½†ù£æ@K‘‡úvÞη3ZéÀú³_P¯pàäà Îü8l×âÚ9dÞ禚†3|±Èzocߚ§khð’t͈kDŠ xný°+ñÆÏ%×Ëӂ<Ú-ð§E©ñ¬˜×ä¡ùþðšÑºâE)~àiDýB(0I#YYY|<Ïãå\„-û?ʵßðý‚´¾ŒóÛ´Ì?M=?áüvÝêº]Æqji¡åßqmÒўÙűoßZt‘þ1ôQëÍK è—NÏÙFù¿v??áVïA§-ƒ¿fÅîôv®ûƒŽî?»÷Èb\üÿ<r·ï¤X?²`?‚a HªHcԁ޶ ¡ÄY7ïÿ'‚áçÂ#YYY45Kò®Š?#YYYy_ó0<3ȐýonÃL?fAìŽ@È@\hý±®!ÿ=ðÿW™NH_àì<ÉD6U@þ·ŸOï’mp¼Cü¾f FèÒ©‚×B[¼ê(]·öéšÝ´oÓQÿLgϞ}ÿäìÒ{ã±üQµÁîi>3PüÊ'Wú%‡,9uèd¡£L„DÍV)‹‘7+EÙÿ#XXX;·#XXXfæc…aϯ´·HÜWò¼Ò×ïM¾œu‰‘€…¢ÑBpáþÝ=]õ=ÿì?¥ÂŸûzëóîƒÌ|úêØHƒžÉ¼­nŠ.f0·Ûk[h.ûYŽo”ÍT‹¡—àMÿÁ?ËóŒíâR"ÊW™cÕTiYL‹n–”=.ŠÇŸûúé;>ÅJ§w¬(€Q43>Ï?£C@Ðvõ2 ½$TŠ >®‡Ž%õ­uNԝ_ǝÞÍk.zÕq¸5ëÀôƶIüµ—fG"sýØÃDÝ»½uñ\ÿ?;16^­ „k¡áÒ³i¨É SxaOZ¼mÓ¼·]p£Â—Ü ¥‰”%:užÅâ*ïq B·Kvg\c6|“¾9K)C§õùTœñœEº|i€ç>¾zãØ>UˆøéDñXwcþÊIY5-0YuïrQøæFZÍ%™ècëýS©’§¦ýº½1º_q \cM/ÒÅë t€3Èu8PcOåü¿/çý?»úþ—äƒ7#YYYê:Î^Ò/²KŽê4™#‡uÐ`RfCºNr.,L¬†i°~¾HÍÔÄäóE3#YYYÀÌ6¶~ÿÑxüüN0sæÈÅÌ #“óã^aÿðg9 ùvªfC޵â»Ãߍ¥íz ¯Œ'Hl¿e}tbø¤À€€‘†I –Ië—Íäõh9ñŸ8CnÏgüó.Òê‘6³³r^Ø_˜Ç\þìË|7Ró´†\†SáC×ðµÙRº¾Xr€û# #XXXwÕK·‚’£‘qȑòxB.Büe"‰ÉP}öîîyKƱ³»¨s%}SÖ)á>½µzÝ"zŽ Ɓ%m׆2û®›cܸ-n—ëš„.]å]Éï&üto©øæ)øÃ©HX”3rjêeË<SZý¤x„î´ê+†ØH^ƒñHþúüxáå[ðÖôæÌ}ÇÃÎËvKH£ã!-ÈSÍ)oQSt¦Eâ·ªI‰~Æã°".›®bLyfd3 ·ÐNHxL*©»Í¢ÑZfìÇˍѥ6ˆ A’ëstÀMõ·û‡ÑNé(ѓw5þ,E¨ˆÅ ;âðY—Uq‹1ø9ß÷APà»}x,!pÅøSyr¸ž—Ö4ü?#XXX›Ž`ÛVh¯ûÓ¶I?q™ÃŒf-¯»Áž)PFÆõ3‡ HØDÞv»IªåꐎdåDÌ3fî®=Êð˜ÇÃÅ3x¦>)›ïM÷¦ûÐßzo½}öµ­kZֺ뮺뮺뮻333333333– ƒÙßÂó?•‡¿ÒÞ#XXX%G]ÿÖDRÝÖý/´¸@¦¿úÈ`#YYYn>TÍàö1ŽÎ ÈCdáÛ}î75“3Ö$~¦ÀZŸyü߂Uâóøq¸—cÂqÚ-dÄى”>•e/®VV¨z+¦r|sÝu×äh’¤†3ÜòbJ#ôÝëµN‡q'V¶ÊÆ\ñ­LfŸê†/‰WÇÎpµ‡f6-ªJ"¨¢²áBႝ8ïß­ùœºÇ øÉ$’I$’I$’I'¯èý£ô~Ñãççɘ¼c"÷5‹Ã 0à 0ÂÀÖm³å#YYYd-¬¸xlñøDéårr5Ž(B˜EV©‘!Ž©¤¨Ë£#Û%ìúyèØº›#YYY”Rd dÌ#YYYÒÅxF#YYY±2a_•ûÁ‘úFwÂ4&\Ͷ輳¯ÔÓdÑ#XXXzG¯qhjxA5ô5ÏDÓÁ_H‘¢/¼G#XXXîCb¦7wV5™“5pÆróSR¥M›,³M)Lɰ ïw¢HJr7g£pì0M“±øÕí!Ø&i8àþÏÝÙö¶8ÁäÖ2‹–RÇpÂa˜üȒÁòÛT.{ù7¹î+=ÍíÈÌQÅ6ð«UŸKŠ7Å;&‘€Á…e`¡$€ ˜ø [.<ù«`éÎr}Ñu7v«ÎhìÁ#±X<%ùÿVËKé~F”‚&ü,ç ésÊWo~#XXXñ>šÕ£øá‰°ê(¨ ÉS‹ÿëô…6—©nGµ%êÁþÈ47SK$Û*.?f,Üuòu#YYYù—Ÿt¨~³qmx‹¯;]Ãò÷‚‰SϘq¥ú"I鍞^‹×>Œ1ÿá¿Nó©å9s ى¬¥ýn?? ŸÐOè 8g†MBjPšý·àþíkéæwZaLª¨'éCÍÍYzSTª£˜aáR4ï Ñbàëob^¦ròëcøŽÙ%„ÇÙ8å™ûÏú+5Î'ý èw^òL¸O|Oâª## —‘}Ÿ?·ÍùÿW¯Ýâ5<D|éÏO¼õht-‘a^Pòóp¤yܟ˜¿¡Š5îÆá›ÇßpÌÃMÅt3×a*ß>´—¥3#m¿Rž£wCû.`6æÌÆ|0—ÔZÂé5ñoDá¼Î:Ù9®-mÎß½%¿Ñ(L,C˜™Á2£*1F(É#Â2†PÄ1 ƒ Àd&ʬUбE`YFQ„aA…‚Ê™SʆHd˜L&Q”b1 †U”ÂÂÈÈÀÀ¬"ÊBBHZ•*lÙešiJfMVmLªd¦JaL)”™I‰aŠÒæéÝ$$’¾¢Î­U۔EƒÍd˜IŒ#XXX¯X¤;6rÅžð*Åȹ@Øø{åçè£#XXX.}‚ìû¤Ú'’æcW¢O:"Ñ{F vvȌ8¡ðºâÅæ®’•$Æ/(qüéÈç>¢ÓǏ ÚF ±_óøÐúyõ‘úÎ~ƒ¬ßlt"±Fȸò]Úmžç»há»víÛ·nÛîÅ0iéóË=>Ýð`c™xn8Jìá_¹Êƒóÿ.‘ûyX—^oÁOK“yõúÎ{UXmþÁÇIٝÏá´sÓ<x5žóÃÑÒ½[ž­==/¤=[Þ,Ù”`f˳GëmO\|=ßß áIŸ«•9ôì¯?ÏÓòíú¥ÊOÍlܽNÝG A]¼z®÷z»|}Þï<[/0ívŽ«4Í ÌnÓü'Sc3±õxz‰ÂSí!¾ËunËêðºˆ;–-Ù0=hö §J|ò–©˜×Ùþ}<<<;vkë=ȏN?ñú¼WÇì¼øïÛ×ÇϳϿÎþr2–³2ó37½ï7½ï5;¨­Þ÷™ž<ú>ßÜÿ‡ÏÙ÷&ÆlÐo^ìèxtô~‰v§]ßÊTöõ*~Žÿ»‡Ìo#YYYgYE¡š(uðHf<44³äSßô€Ñ¼¨Ô£6˜#YYYpæîÅPB3[ým"Dçb íëù¦Ot/Uñ̈́9 4ØG\ |æŒügÚa†a꽁ŒÓ¥ØîÓÁõLCƛµù'›±r>(c»›ŸË'²"‡ºÓúÅòü&—G²¤óÓ:…¤O“FJ@’A¬—Óʼ°ƨŒwç¶!Ši{¶ƒ:ћ ÛØõ­½g¯‹åï÷û¾o5  #XXXeCðGÍnhy®å!É&Âr!B][âÝRsŸ‹·¥4†Rfн;6k4ÅȖ_£iNuùÆ,Ùd9ޘï×oç'ŠÂ2Mޙ©ìÞÌzP5pGþ´Ò±Â.šl6³ýÛwÇõwèöò¾3EòɊ3Öü毵ӷ&ÇCS8Û¯ü¥çÇ¥F/cdäVɳ”2¼ŒÏ±ßL0)NÑî¶ÝÛѐÄZˆ¦åK9°jfÙV1ï2á+‰`d‹ð>Ȃõ9ôS_¨,$ç˱|µ´$u¿£}—’2y‰¢úÈu lA†>ék…š¤ã<¯í.Ù\QÝËÝî÷kù°5†©q_·«qÇ¿ßMׅúïÚ¾zÏM4ÓM6z3ãŽ8ãŽ8ãx³3335˜ù™™™y™™™•—wwћfÏ.„M†G^öãQ9ë`³µ€z= ÷4ëw nß~ºu ø‰ÿ<Iª!÷3…ÐGµ}:9%éFÅ#ç[a™BhW£Ð˜õ -£eIÓoÒð™é˜íƒÅkGÆ_XëÁ"3ý1ýZúE~eAô†bT(R†Çïý³öþïßü?£¿Ù™™™™™™™™™™™™™™™™™™™™ý¼ŸDÍÔÕëyÇ=vÃ~k¿)9ø~/éç/®$ã-–¿ rÏMvãh3á)Ò·Zü.Ëc†ÄØïÂÐø?߬¤~˜‚ÂggéÅ¿Šc³ô³7â37¼Iþ/¾Âô‰dv»Êá„qÚódÛîx)ÁÝ7—ÑêºLÌPÆoÜwJmÇçf€ÕGœ›â{ΊEW³ÏÏÏ)[]»vH’#YYYÐfIš<ƒ¾0¿ˆíÅ~äL Å{SBlÐ0Íö£ªV=yî^Ò÷QÒð±¾Rá„0xÁ#±@Ä8àn1Îúõü5_UW0! ºLýx‰$ãÙÝ2G€:o‰;mÙ<âì*ǹ¶0Ý­ý÷±A‹1Üü§IôO¼~Óê:ºWWWWK«¥Øl0sÃ Ì  26oxÍ CÆ0؆#Æ Í©ÒL0 ÜwÈñãSdX5`Ô#YYYYµV5#YYY[h„—¯£¡³"=?ÏíéwHtî$8†8CŒ8ñãÅû甆vñÿSœùR#YYY¼fhÎRϞÉñ©Q Õ|#YYYV—¹hÎÆ£Œ8j·Fqœè8t^7ß>±0$z¢²½ Ñ1=C l®\wÆ3Ÿ­æ!φek¡ÛCgy´Â^ ˆ)…>}ü{þ0¢ü½ùÓ¿ÏâúÈúU|PöCåÍ/š¯­/®WØWEt§Qu'Bê§JtމÐv¤êŽ‘¨t]UÑu.¥Ñt9IÔt:WUÑÑÕÕӁ:æ'"sQÊ®QŒNÝr×U#Œœœ“†ók¿‚( J78A0r¿é¾gIÖ¶bÙ Þ;ïv‘Š8¿gÙiãzjþ[ñw°öæ~Ãz×ÕÊ¿³«ïpaǹWFWý>ïM§ªÛ{Ù]ü=¢ý³†1¤aÏëÓ²H¥D·¹ß×÷¨ôý™TÃÁ]M‘¬¦Ïl6À]ìÜ;T 8G›¢e·†g+†8¯Íå‘€n\ âžsµ\¾ˆ£oì¬Âû([zž<]¤ðx 4í“­ØGÈxÉéÑ mƒƒ3.·Òy»nvEæN~É4•—î#½As¥ªúÚŒ¡#YYYÏùF:èRøI¨EɧƒÁúò©Ö£·ªâëìðVŒHÆY¾ÉT€ü߀Í%ºÓ·Ë G<ŸÏãݯ²¿ÃQŸ©UßÖö„ ¯:˜ñáçv²eáñN/sgÎÁ´Á`|—f$a0v²vD@þn·ß(ôåÏYÅ~·gNe¼øuÑÓ´pÚò„¶FùUìôŽÅ=bÓF:B+&ŠÞ›jlòñ{üˆ{xAÎýkã&tÉÕ¤g›AqÃǼ²J»Šs%#XXXþ«óêã¼ÚQO1Ú¾.ѳü±,¨b­“Åso*FËl*dœñžòQP²n²(œ#XðR¯YE¢@_¿øµò:øh7¬“ T÷·_(万ÕvSf¨nÍÔéµðæ¸qøBÙÒÜPCN”Ó'Ϫr”ôüÿ>´çóü>¯«êùïé|e:Ví+ZÖµ­oÂ뮺ë³33333333333333Ë. „s律¹c_\êȋG¯‹ÃgXZ_{ FWB÷œßÅyMÚ$¯„q’Öå·ZÉÃtÖS½&îÇmнvYH…EÜ`ïˆÃ¶*2ø¦%Ž0WÎ?;Kϵ˦V~w< ¤nAC´;×ç•ß€ÌØj3&ˆû¤ÐÀvs·µb½rÝaåòqÞ5ë‹þ,m3w0#YYY¦Úý+l:lˆä]oªsÏ6ï›#?¯’ûÝ®9Fú;N¡xßêjÉÀoBhj&lr¿}÷D0A¯8$Ùí÷ƒW›H»xÎÄM¬k­ÔŠRú©P™ÏCºhÛýˆßñÑ5¶°}mƒ¾´ÄpÙd+áÑ͞‡.Ã>L89›¤;!~žETøúU³®˜‰*"ä?/Eæ…Ò`‘‘«ÒïãX?T’Pñ³¡Òi*&ÆhØmñ`Ôr8ó'ś,Äʨ#XXXTŽ …JâD¼%{BÀ™°çi·Éði36¶7ÙP¼ë!²ÎÔ&:3!¸KWÊÍzý]Ñ· .ð4’BiôÖ/¶J/¶øY¨G£]ö¦"›,‰óWæà¢"YœðäöiÏððv¿fGé|áúŠ$©;3Pz—Q_|™’¿/•©£ñÃè* ?æL!“¦À# /¯ŸÃ½ŸóXEþ‰0ÈÊ÷è× Pu!Ï«õœâŸ—(.a“`08aˆÿ·?Û-jP†þÌ3i_Ç×6Ç+”Ã# ÷ä[ÿ_ðЭ#YYY £øÓZ´`?/Éóiïëå=(P´4”ÑYÿŒºìïð“Îo>MA¦#XXX#XXXÎÚüfÊ'Ë?/åüh ôŸz‘ŸÑ!ÂªÏäŠÇôQgõJeð¨ÏçƒÕK5Jx“OgˆÇ‹†œÔK–—»bö@ºŽ!GˆÜ!÷ìÎçµyÒvQæ“ËÐswwZ»»®îñß»Ê5ŒÌc c½¶÷!¶Ù°ÜŒc4#YYY®oÇóás4ÛÀ/Ì\;4Mv%1UϞù¸ýúí;¤î—Ï'žA^R<¦s•O.¿·ßµ|ŽzÇr†ZcDYzfÁ1±VZ&[j)‡gÙÇå×±ƒjÆ;ÌâÍÕóAj€c•HøÐñâ#YYY¶¢Ù-…ôÔêNÝ¡¢ê]Q¼o\qÛ³kãöpú#¤t:W¢ôO'•åñôNjìÏoHôAé^‹Òz—«Þ§Ÿw®Í»Î-§­‰ëdõ°õ°õbvd½z0íu5M0îÊîeݗrnÄ6 ±hÇ D!±ÛFÖÅ´1M¼;#¬<¥ì :ÉՓlW׏ÍwC/kƒeKôŒ¬¬8Âðž‡µØÆö3Òâ¶`Ã&<äݹ3‰2ª~¤÷;îJB!xÛëŠjy#îÆ“D¶Öò áF¡¸Eoþ‹Ñ÷ék\nõ¢ìHùùùùù¶()¨¬bBZIÊÅN¢Ľ±ÈwíG©†…Ê!¯²a¥O©ËîÙ÷Bí½!U³ßüWG¯Cöýßã´Ù=rÛqd9ι}÷? ¬¾•Ûþézf&œßz©ÌxÀÙ Gdq€c'ýZc­ÿÊ!»WÝí¿¶ !v¸pZÛûýp‹E}v6¦ö/\Ès¼¡Õ,õ½G«·mKúw¬rÂb§#‰ã³Àî?¸‹Š'“Þ ùô>áJêmž#YYYÌHC%»nµÑƒšÂíîîCá#ƒ»‚:áðr 9ŠI&؎#ð !ßr.FÀ"Âˎ弜˜ß¦„Äaº˜œn|}çWя+¨þžd½À#YYY<U9p¾+Yïv‡¶¤™VŒvhkë«ÖX²³ŽgÕ8ú¢Ý?Ÿ#YYYºöú¯ö}_W—[j^Áfk1p\UåÑi9Îtç9ÎGnyçž~?_¥Q¥ÌG5#YYYzPg¾qU3ß*íJ׊–w’Ï»Õ5íIÝÌ©·x™´¨kÅVv•ž"íS†Ïghg‰w¦¼Vw›E5ãMxŠfòONèð—)Ür4Û»³Û"riÂðpìך¹Ëgã #YYYî#YYYä)—U-_Érõ*?gí×ÌÔôszûuӵن晘m@ÃûáG NrŽü_#XXX›‡|+ïʞrSX¼êOu|oмxÑ:Á5йëTxʎrWYþ5C¶;aO‡8«¯Tk*·Â;àñzeGœ_/ùý‚ÁëøgÇ?_§nÎùÏt%ˆ#YYY&ò"{øGÂ)_Ët¦Á:3t©Ž û_oèþ¹%<*ŠRx”OG‰Pðûá'˜#ȵJ]Ž”Fª¡ÒèAÔ¥TêItQuI;C²ª®”N¨¦ÑSdS©# N¤Ž¢@uR¹.B«ª©Ô“ª#YYYÔ7©uRêSªŽ¥uG©]F%JäEr Ì4Àa†ÚÚyóµf×órôxÆ© Ùø3›÷ïß=Û·nßu×]u×]uÖºÜvðЁÏ(pêï¦åSvߣÙËæG‚= #YYYózôk3#hË_<ÊwÖ |ËÁsDXR‹Í <qc ;w›¡C¾LH$Ò%m.0fbü¡n¤'Ž<a°ƒ‹£3@ïà‚5¥7ù™F…ŸÆ4‘3ÕKS6éû÷Ýìįß2Çãþ©ÀMèçé¶Íû«¼ácJowšÒP”z‹(ú•ðìGlî©hK¦_L Éç…cÙº;ˆáH_%dZýÊå§óœòÙ`j¾ü3Öø›Ê*l‡åÎæšÌñöC‡(<íÕ6ŠÞÑ:lÀÖáîàÁè¨o<ÃÞT™m£ÏnÂÜ®2½›0lØÌ>h_»;€¨Ô ™Á©ð:0[& †³í}ß=ãq98šÔ:¢j„N+lq£Bé!Õ#XXX¤@¯+‹ºÿG)øLfšD™2.­c¤{¡äËJžßPøèZdÕ`x¤2‹3#YYYð´þÃeԥ׏éwyuxoÆÍ‰ö¢#YYY²Ì€€d0xŒ†âŐ7€òE}Qü¾`Üp>jo…ºåݝk’¸¿Íô¸`C4¨hUjM+êZ‹êš 8Çp,ãÛëµ[=´›R´ùNèèå¡C¶½)…ò‘ªG]Š•ŒyÆ tVl¥,dêcö(¶j¾~Š}TºZîƒÂ^²ÚpsÙ1PfíÕÚÈޘáƒá—á—á…¤'H’ Pçó¶}ø8¹Éød9å9Ê<óãxœeã'Œ¥8Èã'ŒR¼xñãmã|‘d Llã’éc¹Cï„9GªèÝãôúe—/cÖÑé¾KàK/Œ['/¡#ZÉJ Ëã|ñôRêU¼’ïØ­Þ3}ó0¥°“ìÎ\«F)»³¿¿¼OÜVµs¢ºAåŸÅèRu?ÎÍSI¥TóÒ\‘²ÓáÞ¯¾3Ì||H=5ëªð½ÿœvq=ˆuH¾ü ÒïNÌÏäÔöGèµ ãF.ŒÝ[kž²W•±|(!ª!„ɍñƒ‘"5bðÑ^ÚÈ÷‡6f¾ìNÙé¦>r´U<N#YYYÚ.MßÄÐëüpú=8Ï|ºµãézgºí×Sx¶1 GÝLýg05zÓ½?oçÓtýJ#YYY=J¶¤Vº°Xeón”i#ü¢xÛ«,1-ëËk•TTð‚í‰ º(§Óê“ͪš/tI÷µ†üy¥û½}«ŸˆáŒòÇ"ÉØ;ÀóÖÇ6÷&ÏMÛØ,±xžÙåçéø~ʉáF¼<k¡ 胠븥®Š¬Ñ+ ­tRèK]Ί#XXXv‡hK~䙱FÂ5܇p5ÜVw%Ü.kn»$Zè¦tGÖåt-tÑt8®¹ ¸èÕknÅSZí%;*O†ïÌü¶ø½Ì_oß9À¦ŸÂX°X]T#sºI$„`î’HFçz¨>eTG§2«Ò(u:¢¯R1¨b!t†%Ô«æ†%óªÉ_5XWt—„´W…X¦Ô²/ dž*`¼ *x©ŠxSâL'‰2•ÙÁ5G‚b<#!¼0¼+*ð^’ñ,—Šax¦"䇄ÔxŒ+ŠÊð°ñ0ðeᗆ$áKƒrSæ&Tr&*æ£}·_;ø—¥·™òø,d›4YWGfË'ØqݹÛˆŒ~ç"ˆ¢+º®Fœ) …‰(KV#XXXB»í8Vƒ{+ªé N8A¢•oÀçè‘D{`Bqº¥Ï»Á›í½ÏäFÖõ8Ã'¯µóV£ÂDaŒ¹µ'ÝÞpod9­‹¶Þ8²ÿC?Íþý_÷¿×ÏôsûêP0ÉÞ8F…ÿÇÄÆ¿çfƐ§ú ܲÿº8eâNøM뚎  ×³Ôù8~œÁ$¿â“˜ôL~ÿL?Gз›û¶#YYYLA>»ó³ÿØÙ¯ÃõQä3„WÖ.„…’Ú¿H<¯û¨¾†–ú"ä tõî>ôÿE ®.€f·zéýö>M÷lÇý­‡ï÷cóèߨüÐ?UÓ‹´¸9/Ç9õúW: yvÊјÒ#YYYñj‘ß³õìÝÄéÜ|ø½NÏ'o‹äÙÉJÌv»„(xpéC‚$8ßÄblJ•8„Q=ƒfӍ»F½kÄÕ#Hô@móÃ#öû6 Æ f8&d.|!ˆÞèбCç«ÚäfþßÂÈ·ãv2V=n}ò ü󄉢Ӌ©Gÿj/"÷¿÷Àt‰&ºÏ‡è{瑞¬í²-*K«f“;³¡#YYYpXA]pưy¬zÂSL<-REåºxPb´Ícƒß¥÷«œ§Je KcßÌhcŽ˜'x›ì·­j¯®i¿WµÝ_û£"ÛÊuδýËK҄´ß¶qñJú=aøD—­š5#XXXŒvy^~÷µf÷½%²lyC÷÷L½ˆº„EE¬•§¦Û_â¿¥¦×ÌãŠR»ØF“‰ðø©,#XXX=qÏÛ²s‰y+¿-Ó¯9éÑ&7]n>&›Þ[‰$ÛõåÍmqr¿Vdäò—ŠYގ [ëuCü'!#XXXŸJÓ¹¨Œ>íuwƧ>è®EGÚØqµE¢D™0ñ¿e9ã‚õãS­×˜<ó½|)ß÷kìóùžÅ?/ˆ>“Úh+WãEWc¾øeltž¹§º6/j›·ù½®jaø†…ÿš£õB–ˆ‡|î)|Þ-º0€;#Ç>é-‘…”W)ø)²“Š;ñÇŧ:©ÚàNsj"¶å!3ɸ0דÂr'‘þ³¾Õh-±t˜ò!ýcMiÔú’=»v.Õ9[—ªNgSj©-ւ«Ê¯ªRã'AD:ˆÉéŠjvþoM‘õUÉUÓ°Mÿ~dUOÑBª9v˜-iÿ(¾,¢Eø9 «š§ÚùáþóU©§”¿rÒRy½&i¾?K™+pûOâ V5Қ«ª=,÷FPD=Ÿá%'_á#z‚kÝï´#©þp³ËÕs÷ãíٌë3@Êwß@Çæ; ¦£E~`7”c5S¯ Wû/{J«dKEi#XXX+ޓŽ=¾Qœþˆ.1,T’qüO?³MçȬ@7@7Ï?ï<twÆOzŤæÕfQX9ô‹Äv~p»®S„jXºêë×(ü*S‹ÅZ0 ߬~‘ˆ۸€ÊHóõ\(W¦ßÛÑ÷ÞVÛ!la|Ökò{Ñ×ç´åŒbОp£ìÎWV°Ùè[µI.MoU[Ñõº©zGìfèT“Jþ0F¿„RŸÉû]’ÿªÏÎ~ÓP”Q Н}Û¸ý•=¹í­KÉåïo+Ê£äRŠøŸ?N´º[7ÎȍËsYéé+X‹AÇEžkXõ>â?•ÔQb’áÞõûé#XXX¬Ö¹o‹¾κß:£îޟëº××JïÍNX<ïC/ B¯¦ž”Çeýü¹“Þ#ߍÙ]õê:Þû÷¿øßuÝÝñ´ÞèxDÙÝ8VÏâ*÷¼Ô-(GÕÏÞò:-?d)ÍGCf¸@Œˆ,m'X¼ç)ÑJ6¶ÏDIEÞ½;½¾>Á<#*c’ËÐûL4µÕçþUÛ)rÃõÇ,<'\N°àÿ%µñÃ^YÂoL¡ŒùsÕ»íõãÕlÓäGòQyý¢Å pmãþþ~ju8NìSÍ<"â"?(Š„håÃÞJƒÙkRƒz­8ŽØÍáÉ<Íðƒºœ§ßŒ}ÉÕBW翌:¿"5ãÆUzç6? •ú5É\öT´Ö±Ü”»Oëò"×»môo«ùÌ]ÛûDÃä$¦Åüf×çøh•uµ Uô””û»%­AAÍN¦úîD+^’Â}\éB÷½R4´Þ‘Ÿ]%GRtñ¡¶/—ÛVêŶ}ïJgxð†·D}>øq¼^¯ˆ<äûßG?R#ëxGîEÈžåt}ÝßZ×ßúòrºß¼¸"\¯Âõî«]Ž|¯œ‘=^.Ù?B# ºÇ5³ÞîÁL]ðnï%³³Åö.š—þr%ßGZÎõ9zÓYk¬Œ‰hóÍa­zš»üæ'_^–Q.Â_²HÉßÊù¡ÿ°áÈE-¶’•£HƒüÀc?-¸³#XXXÎIݹ§œ`uÀ¯tŒº«²pŠÖ¹ª/Ÿ Ôöñv?FZÁ¯@ɲv‚Q8OÅ tJûWý¹p'/ñý‡ºýù5ó›>·ø’3úQ¬ûb;Tͺì[q™Û5¶ïòKòý|ý\ŽlÂ$së‚b4WëŒÆ‘QÝ‹¦w#¼¸µÝÞq³1³V#YYYŒÀàÍÌnLrmÌ¿QF.ŸV`V°QÅòzJ¾‚Lٙ™;h|ÜùÃøš·cLAÝ#YYY$𝠝¿°Ašy©9óùÇÎ>cç_5óŸ3ç|ÞS×+ç•–™#XXX>ßx ©«÷h‰퇐fyW ß3t÷ùEÔ»“ƒ uÔ;Ñvº«Ò3~å5ÛJéÑ×øž+ù-Lߍ¼Ï‹ä¥cÉ2ÑÆô-;ÕóŒ<a2R¾ø8Ú$îíó=±ÚdïÃó7Âòëö‡ð/ Iz†ì.©ŽÉèZ+e´ŒvïrnTymi|‡ÝÏ_—ÏÍ×ê¶ÿz#¾IR~–÷e$S%7õ§?óìü¡Ñª§I'ýÇõ×mUÚÚ¶‹Y4h¢‹U%¥-QfXM­±’ª5±XÚMS"Ó4•ŠÑµ’Ù©«s³QÛúíý¥sÊÓ_ü{•ŠþËt«Ûy¼¤5Ó4Ëc`’ynGjv¬–JÌ£gòuÇú=¶¹25óº•×\ÄùN‹*õÚê_;¢!ËÞÑhºß;µ)5²EÁÝv¢Éb™.ë±E§ås#Ý­×\îOµÓ)7œ,dÒ&‰Tm¨Ûi±HdkX†e%0ÛæÓW©^R’QhÌMl‹»k¤[ý»m刟óúöÂÿéÝUôÅÒÚÆbhR¨ÃL2m©6šêÝV®[DaݪÜÃfönå²RhM³cÝÂîä¤X››š²“—ú?Óí÷û]×ìþ×·àbŸžÖäͱ%&ŌT`¬µ™Ý/y•aú½yr™¤”¾*¿kÇb¶ÍñÜ»M”•û;¿Ú]€“&ϝXåþ¯p‘_}t‘, –üwcy®leD)›%}u»lÈi’æÝ$L‘l†¬Íh²T¢Yc3\·"÷ú¾¿µü¯«^•>€iBû\P³*›ºíÝq(Þµw½¸žîòìÔS4eʹ b5)±kð·)6CWšáÝÍ ¹;®ÕÔ¿gt½Ú.D“,JÙe¦Mˆ°h­MÍîèæêJjÈZÄh»WqC7:iˤ¹WYª„ÔDÒ>º¸"¯Ø™;®h(¦i“v$#XXXY0×uÃ"¤£&ˆåÝÝ]ÛtÊUå»<Ü­FË&dÈVCF=ÝF¤ÉvÒ÷®(ÒìÚRa†[bÖ1ÝÛ6Úéwӄ>us÷ÝI*ō ñ¯¦ôŠ%bQ¢’íÜ¿”툷»­»;`¥4Iš&2’[»Þágµw—³1"^–äžk‰ûV潩óW·hŠ‹F-¹p~zášøÜÌÈÇÎݙJHy],PewtȔýçiî5pÚ5ot`©#XXXÜ…™‘­ô췜ÐÈÁŒ1E˜¦ù.Q$ÞöéîíÍ£œÑ²@Kçs·ô?¡þíw ùƒÈfôÎø}Nzgù©ùÿÏûFžè}íðÿ³à0ÿ‡ÿýÇÆ_*áYyvHü믳ñhr¯)õÊþ†Ù·ÚóîÄRNÆ¡ÛFí#YYYùâ ¡zLli-¬#YYY8ĜƒcæÑˆÝìbS@"Ö7>Ö¶öØÛÀýŽ.I ècþ ÿÙ8Rû¡#YYYŸÇ}#Æ>3ϘŠ’QÃä#XXXé‰&ì.(åÐ㚊ö¢§j«Ú€mç½ï|nfd™™›,9ý¥—Þq:CÄԝ£¥Ùÿè§[ËÍû/v0äY­æ âitÛI¸áËÇ*áNÀߗ¶ÖÝúë·~»9ç·W5rµ'fl °«A6l–ÏÝÿžÙ™þoM=#½Qâ•Õ¶ÖÞ<}1¡n¢7¢œI:xù±·^cvG»ÇŽþ·»³Ú=õ#º¯bx`žû͘6w<ôœÈÌ2f?­äN@JL¼ÃŸOòˆÄÊVË$ØÀŒàÝÁÑæh_ž6Jxœ@9ëÁR›åÓ-²Ûm¸Ù%—0ÌÌÌÌÌÌÌÌÌÇ.ÄÌÌËm,ÌÈe̶Ý~/Äêù­óqÐÿAíM{ԘJYA:*ôDïNû¸e­#BpNVûmÆë„À€NxàN8Ío‡eýÒF•‰€I›§ü™Ø:ÜìLÀèÎ_ôû>_òï-ß;éëõ¼D4×6/LyF^ü8ÚR”¥ ŠQvÄ`mÈӉÝÇ^•°l<°zn|ÿd^‚¥$=#.Yžt#´‹‘V"ø%>_]Ýûü;öíÛxÆ1„!døkx4 N•­(Í£&!Nœ|Z£pÓqø¬ãžtwaÚ}æxOV¹bB¢øcŸ<k¸ùþú=yCÌZ¤ó#ޝâÓ_f^xç¿]rú÷ŠÞÞÕÄäì8Työ0j5#XXXO@Ò@ƒƒoÃNÎêCwnÁÓ¹*ǓÝ(íí#Úˆß^î8çïғUSqIÅT2ENh<º›{˜456ŽÙևd€yŒ|Zºc¦šgݶø½Õ{ø¾ ôÚ>CÔ¨ò)z²…¤yàŒIqKtn=ô;~NjɺˆáÎ <ÃpÁÂ6ÔayÙ6ŒÙñi*ާ óäG<#ۚè %Ž$’ª„ˆlÑ~«ŠÌ{^™={#XXX:š¨J‡à;S°÷|V³½Ìݽ€›³s[nÖ·‰»ß%ú¿ztø{y­…^¯kĦÑ2ÅFaIaóðΜ—"¯ýÚwí©+‡/{3µs”8¤bșs­(`ƒX(Õþ™ûÿ¾vùëW9—Eðôó%lJÝŲ¦ÒŒ±#$0*bll["²¦ÖÂm–Dc*!‚Sä÷ïÆeÁqT±RÓkj¤21Æd«(–Aêý^¼s™r\”±þ¿®ÔK7q~~bpC dQbyãœË‚ç·h–¶ÝŸMb Š*ôC[§`ìPdAÃNˆÚªÆ`b¯Æ~ž¸ç2乡b›[$ÙU…Y%LKUµAPz ¯gäÍ0Ó?OÔxçä’I$”ñàzÖ¤š4¿¹s[¹™—3333330râY™–Û’ÜÄæfff~±ˆ6>w ¾s¯Ã—,avÛ՟„{|SHæ5ŒÁ\oÆ£Íx´ŽÈ½öUìOdáÔu#Ìw¨æ9òº…Õm®7ê;Þ5ã¨ÔhnŽª9…Ô®¨ê9ÍmÇ>*¾ì`4lÉÓt89¥šÿ'bâ”ô?n»= e–8윥)IÜD©)38 ¨ìØÁ¶5ÑõÕØ6½¦Ã È`6fLS7–a³/ãWs33?”Ìõþ·<û&bA‰N—]J1y²FëKç@5eu¯£¯¢]mPÛ®LºŒµ!Haü9#XXXG>¼å®Zñ³Vöc#XXX¡˜ãí÷<³×¯k׫;*ï{ÚY¨ið°ÐõV`*ÈÃfãyzl0†Vë_k5QÃۊGj²»;.7î:®Dßxð8Œ\8o¥ôT×ÉyJ±´uöª£â޼OD´I!u*zPÇ`×on8ßî®%u!Ù)ÁáÂåCäÛµwæ½ÌƇ°y¬&ÿ¯Ÿ__O,ãÈÁG\B[ˆÊäçŠ%¤M¡¸ï„'nû¶ïæ]±®ClZŒœû§{33N¶ØS²b¯Þ%wÇåÖÝ»æ]˼©…M6¶ƒh¬™•,•&RÖٖŲSÆ¢›7·#aX¥“ÖٖŵIh“w6RÀ²BeNNLߌ˂à“(šml«j,™²*0_'äü§æ\—0˜M®ûÈÞE‘bXÄY T™SϞ9̸.êQ³{r¶¥Œ ÅðÛ|Ë`íD!ÃN”tŠR R)«"v·NÁØJ†êj¦e&DÄ0SRm¾e±m!„4ÚÚ-²Ä™‘2£(ŒŽ"Ri‘€>¿¬öv\L=•Ìý ˆ‡wwxç’ÞîÝÞË9oÈA¹z½UMU;Z¥UW32æff$¶õ­k330–æw3330ò@ÄÕU:R½Ò`b,ÎFú>–ùŽl` ˜»Ž­†ÑƒëGãŽ#†£ˆÚ>ÛÖâìj>ˆ^ù[ÑŽÏ9[#YYY£Ìn9Gp¹®”tWC¦ÜoÏÄgxÁÜtš£jÜ:Qܬl0 O5VYúØ f=7£=6ÏÓð7=g»¬ÌÌ}³í©9Ü ÁTˆ. €.³<k3333å™Î$Äë{ÚÛ%ÍogÁ£Ñî{o×Äٕîõu¡âfìx j!›lÏñð\¼@q%ÒX9Îf·ÎsiSÍv擲àìn8®5ªë+eCzëÅb·»œ]íë§¿f1ô½1é6:ˆêK̵®þ>+x#qÒG)N‹š–M§O5಻óߗ…à‘ÁKr$^#XXXÊç„8©Â¶!ÅsÏ³ÄøLÆ3xÂè[Tc$X£% 'Äå¿—Å+#XXX¸iÒ.… ‘„=ÿ_.9UÉrUÿ.ŸSçXÉøñ6ŠåÚÛ²˜ËzÍ!ÌÁ¨wÓ>'¦Í¹í™v-êíâ¿ôe¸®ÜI´,‘‘A…ÿúÖéØ;dG#YYY:Ð @°<FoÆeÁqEˆ´Ó M´+"È+ Û­Ó°vʦt!¥)AGåÖéØ;E S#YYY:Ќƒ"¡)š§bÙ&DÓkdl,©Œ§ÏméØ;T!C#YYY:WH“R„‚“éqµ€°~/×=ÔëËËè-´¶É;÷77½îIvw\¹u½kY­f\ə™nfc™ˆZy™¬ÌÌÈíʹ™™™Ÿk@m™­ï°õûO¢¯º°[Ö«C+ì¨n0n4?Þå](ô+âª=G]Ï8¶­‡‘½w¥ÂêGxw®ª7çn8ì9¬w]ë¨ÐØl·WR:¥Ý€Æfc­DÕYgé`>–ökÎOÒÇ=z™ìè|UV#YYYùû—³VúªTê èU1†313?Z™™™™ûNhdóWv^DÕêÏ¡³~çÏïZùãã©ê•åêõiâfìwh– Ù¤t'³€§.$Žàbzó™­ó‘ SB"ŠÍ¢!t.%Jl;{ãÏqPñUâœÒžÅß[ü§2¡¼`Nj]”œ’/oÊͬðð½옼wñÝÊôž‰p »$]¸Cp-Ô¹ú<ïóàÅü¹pôW·4>‡Ñy«h™…˜LFÈö}¬t»g=³.ÅÚ ›[ÙŠ†ã87Å<À¬‹† d°2¤ÁÇãŽs.Kš]ô–Í»SB°² KÏ[§`ìTÃOŽÄ؄  æ·NÁÚ'<Ñ·hi#XXX 3TèñÀ„@Ùô4²j¥apÓ¤42H„f©Ð:S#YYY:52 emPUQQ´'×3M­н·Ü¯®8æ"""""#Ž {»²",áµsSuUSS9qæf[s1ÜÀZy™¬ÌÌÈíʓ¹™™™¿€•åvñœ¹óæìÞ|±PȚX~*û«zÊÞµ_MÚ云éKÒ§­uÕvyÙm^keÑ®S¤º•:s¾ûóÂìVw¬]×CUµm7.ˆèÃ\Q®˜›Â2”ɝlfӶۏ9uc•vo¾ûîþçÝãñxÎ'¢ã^#XXX^ðÒfg™™ŸÃ}m‡¦¯Z¾uWª>ÆÏ“ë«>ûpµô€kf–¬éY§„c9šWI‰µ#XXX͋1&»*ì§iv7žvߎyÉ#YYYã)6ö›ÜN6‡\lOSzÑqÞbo7nç¶ëãézg“ƒ´.Ò»E=“ãí¿_‘ ‡#XXX©¼‘¼§z“™å¯>'¬ô:í×g3ɯ0sS…y›”ãÇ÷3¬÷üzÞ>`FbdÛÍÍxdJJL4›)Oóv{7GõîµÃ›¯-¶Ûm·ÓÐÜÞ÷»mÙè¹s[Öµ­kY™˜ff[n9s4õ­k332;r‰ÜÌÌÌϵ¡o7Î9ça~#YYYžßm>¹ˆÞjidù—à·X·Z_UÞå:#Ê^CÊø*uÒìó´Ùy[NÕs)Èê‡Qu:m·;óÄì/Á®–N§ŠÒÙlpž(x‹Å^' 3ñ5VYÃZ#YYYø}çìâùי™™%Û ’i IbH ÌøffffgοFÁÁ¦¯Z¤ñ5Dø=¹^gÖ·œ…榯Iâgž_[qÂç·iÑÞî;%Ù¥ÚíQÏ15w Ãc3 A0ÕMÈnìaµŒA›j#YYYϟÕÓªõO5ä}öÝy眄›É-¢Ty’qEOWÉtz+¹ëǎúàâ·#XXX¦èn¤Xh…˜ÂIŠŒ?Q•kÜÔћ»îvöÑÛî=­¶¨õμ(ˆˆˆwˆúýå½ÝÜDEŸ¢jꪪªffLÌËmÇ.`=kZÌÌÌ,ÌÄL¹™™™öºsœg<s±5éI•kð66˜|Óð›Ì›ÍMÇ;EÕ]OªS·iÙÖÆÓ©±Ü\.é]é¿<qÇbgy‡s¥©´ÚÜt‡Bé]ç[FR™3µ€W,›¤nµùÆ1ŒNÔÔ1˜`0lB6 ff{ó3333áÎ&"æ·½9r¨ð{}Uë|5î&¯Iâf쀖ªa¶ÔX‚ЖÁÅÁ%½ÜÖ÷‘eÌeÀÜ‹~»õbáç]¯£ˆżï$ð½îûqÏ>栎#XXX-à7•ÕQÌJ}Ï+՗mûwïïô¼· ìTä—%ÚJoU¼›Ów‘9îzO#×éó¯!ôý’yÄwãw¢""#ßà·»»wˆ³ØÔM]UUULÌÉ33)âe‚•UTÌÌÉ3 ó333õL©]\gË7Ë o ìà›Ð>£s#YYYÍ®èw…x>e;v;:ÚØèÚê×'U:—T9ß~xìsags.÷SFÆÍ×PuS©uuI挥2g«åŽmë9ñ¿ rÙÆ1$ª‰f|,Ìî„twAˆ33>Y™™™™óöàÀ‰«ºO^Ö/sè<Õüï‚÷zÒx™»>|ÐXÚ #YYYP3P† 1WW~r™šÛ®ivíwl.6ôyqwsۛ—^x¤â¦ôOÀÖÝüütíI7©[Ñà'"“ÕÓÐòï§úë³ÆÀ;#XXXp•Iº­äޛ¼Tsé:]šÇךýõç%9“ÅDD(ˆwˆö÷-îîÔDYÙ¨ººªªªœÌÃ32Ûr˘žfµ™™˜Y™ŠeÌÌÌß´—•Åˎ\ƒæOCÚaŒyÚ흸ìïzž%âøÒvívu›6º·v.R^3aszkÛ#f´4H`ƒ1€Ø`Ãñ5VYô`>ÿ©ç菏\s×s9™Ÿ8ø=€ bØ Ø°I%™™ðÌÌəŸi#YYYWtž&¨ò{ùéÿ_¶ó~{¾jõzO7c±#YYY%E530kVñ5{`™=8ܸj¦önÞ&¤Ø¹íÖ8»Nyß¿—®"wߝöí+yDؔp¢qU8­ã§n;sµDÑUؓr›†Æ`‘†bF'26œßÁüѱt‘¾""""""=½Ë{»¸ˆ‹>|›š½UUUTÌ̓™–Û–\Ä´ó5¬ÌÌÂÌÌ!—2Û~†ºWW{7ºìî/`û"G$С˜Àl>F֍aa‹`c™±˜Í‘S5V1Ù×V͛¸‰ÕWRuNºÍm¿¹}0=ž9ú¯¢íÿ.«ì0+7åíº¦lÜRùªä¦VÙ1,‘IýÏ_o–évsqš¬Õ5(¥S0”%#YYY+Ã0­à=p9o/óa¹õÍ_ª!$&)Is{-ºìšTR£÷ÂYF ­a³°<¾m;’Gÿ'”=œ;°; å‡å¤þL¾c;únÌéä¾ÍÞûš¦gáìm5Š>ö¯÷öÐ$î¸8gv:5¤äpk®?“Õý¿²‡Íñ£¼¾“®ýf(`ö\÷Os‡¼åÝÃÄùÉw'D! ©£!ÿ Z<pr=ýwßϟNŠyµ0É®]:ïãÔÀ¼fÿb¬ø6£ä¼Lâ{˜ƒl¢A´ {÷Ñ#üÍw`d•^†[{ÜL'˔qœMQCt"Ðfm|ý~χæú¼þ8l¶¸Œò3"LÑC3 m‚ÙYôÞ/ƒ‡æìùñ*ê4Eã¼cÐQ„]tû»´4i¥4ôóζ$|ðttYˆä›BQpØf(³úò¯ñúg–;ÝðÒµŽWŒgQVü¹BtF$'w‹zùûƒ™ BFü-º{¶Õõ`þ‹é²Uÿ#XXXI¹ãò%]ÄÁéÅPÈz •@DØY ˆ“h¬%Ÿþ»WšKO_4¿T\ðp?¬0ëIþià$à—‘JУwů‘F3ÄÓ½RMF›5p0(;^Ï(•J(ò į8FÐU4•EG†> >¹PEpý—ɵ'œU+éÉ }ùŸ£^~ýOáþMZu±Ù£W™”ýPÑUI`„ŽÄWiÀ8ð² ¡in1Ã"灁Ýncœ7`Û`ÐÐÞDe&…õ0{õ…·c[®"ž›àŒIà³ôrÔ<2’^eÒ`éòY…D ,}¦ D#XXXÙbq媯XF6ë8Ë5JےYVMZhÃòèÅÝöÞdȽ×ä/û_V ll®~WFÈ×q¢Ôá#ێ4íÀÈ&üܳBÇ.ü;vqjðŒC¾8¹À¡¹¥ðóg=áÿ.zæRMó¸8?àaóiôþh°S}maµRr£µPfþO¿2´€Z¾!"{¼Ddñaᬒ3¥ä<¼ ªÅþ-tÚv]ÑÙ%¾ÛRÎÜð„üòR<¯,'l¦É#YYYþ~ÞÃôH’ È_hÄLÁÉ?ØÀ’k-ø°8’"ìåfaÓ Å_ÏoLßÍyLÕÛÜ}a'§»óÑÝÇwd æ»3Ѱâb{eA™#¦~iã¾Øçoµèd«Û1XåÒ1£´hìÎ*& ‰'LeÀ’€8#XXXpÌÏܨ[#XXX§¤“7Ên#ÚkÎãBG¸é:Jõ·vŒb/“‚?•͸0MÍË`$œÝIXâX×¹éüXoÌq´—›ìí_K]XMF!#¨ÐÄ$ԒŽSù>?GR–… оÆVëHwm£cìÌh¡ߜÇ1§m›dÔL”Uñ_ŠoÇ©øLœ SdDN±8_㩱¿‘>cmOGNé- ׯHBÐІ‡ãK’5‹Š^ŠÉ†Ú¤›u%ÉõG =8ý²Þ›­m»£ù'å“SÝ—Ó€‰O®òÚ²?úGž6²ç?€ú%6zÛ·µlÈÂ#YYY7e³ƒ‘D"BäÛo[Öõ½o_zûׯ@´0ƒ¤ú~¿d&¯*œB#YYYPÝ2øÖÍüZ³\D™ Ν{w(PÂožvm43µ}døG‹ý#XXXÊðè;:HÁúÝ ½Œ‚K¨Cµé›øå±Ç«ôÊW“éåyUÉ"hÓl|#=ö'Ÿ®u”‹âr£™:¡Ó $´pq$šuNA’<ž0Ž1 pvf’‘ ŒñE®×úáiÜ0™#YYYø8Ûl¬P²#YYYÙÕîg†#®þœA,XÜÌP#ðÿö ‚æbi •mkBâ0Á$‚˜QEÛg§åÒs€”Š…û½xs¨9’œOÂx'SOm0ApòAý²Pü¿³/øïýZãSÀjµ)ŸèVwSþ8lÄ9™¹xH ™ˆ«ø¡ÿ_,y)rAÉ\€d†ŠŠ""ƒ¤‡~íÊÖG µë·<X;ùè4l sõæzmé2…#YYY#YYY#YYY#YYY#YYY#YYY#YYY#YYYÉ'É÷þ‡·ã)Îsœþsæ.ÎôɰY@fàÀ™Ì‡ƒhHˆ³{òhýß.Ærcž5%P^küBê>>>>>>>7Æffffffffffffff=a™Y›Î7½ï{Þ÷Æfo{Þ÷½ï30âjó‹¾8½oŽ.îîïyÆeÝÝÝæfg½ïs½ïŽ7½ï{Þ÷™™½ï{Þ÷¼ÌÎ1`¹á’wá’Î!Üá›$]ÓdÝ6HÓdá6H=Yë}l]÷Âq`&06nÀ„0QÜf#XXXá ²†YeL³®÷½ï{ÌÌÞ÷½ï{Þfa;·»»ÌÍï{Þ÷™™½ï{Þ÷¼ÌÍìÙ³f͛6a†0p!†aŽ~èÏ<h« óg“— ¤FõFs¡PŇ=™>m¸n€ã_,‡ê‡å‹¶H¶vGÔûK`K€õÏ ÅKÖÈñDC•VC X´ÚÔÑß'YÝÕÈúEÿpçiG#ï€ÿÇÑßÏ!Ð#YYY9Ĭ5)Ã@m¬ä^²BâDîêm{?ÏõFžDDЉÙ(²@½”­Bÿd͈è¬þ ´Ù~üLÕ¡ªòÚö¥c½vS¦^÷™{·jeßÈâÖùٛ4æKrîUÉ\jû¤”MS«j¬FŠbÚÇkØ^ï{Ýʛagw.;”b„mÚ6HQÔcB§š…+"2*5œo`ñQòž›?o.ub%"HŽH€ !Úöy‹û|»øI` ü??6þgΜ¿ j£oæšÕîý•»«8OüÛù±íù½Ù{½Þïwºæ`c¸!ϬOº„×¥gǞʏ·îÁ½kî6@@\{¹HÁa‘¿"#YYYžšìÛ ‹Ïä] ‘ÐÙHò{ᅬN+ZÖµ­k×·,ÆŒulO£§‡Ú"ÍÓrÝð¾>>>>>>>5˜ù™™™y™™™•™™9™™9UUÐ|xî>?Ú¡‡û½ ³Þfûööööööööûc¡Î~:뮺뮷‹3333Y™™™—™‘™™™Yww}†I¾ŽçH;Iíþ¼ö§ÏŸ>|ùó¬ÇÌÌÌËÌÈÌÌ̬ÌÉÌÌÉʪ®øãŽ8ÌÌã3333333333333333333=þ=vxñ^xñãǏ<k1ó332ó2333+32s32rª«ZˆˆxšÖó5­k[ÌÌÖµ­kZÌÌÍkZÖµ«®ºì/ÀY3HrFÕ¥Úi¦ši¥4³nAÃg>ˆ:Ó6ôÅÖ^»¼<ÂxœxñãǏ={-Êt­ÖªxFS¥o»“"™¦5¨\Ö-Ž6µ­kZØqÉú|}<Œ#YYYR_<xñãÇ\5zÞqÖfffffffffffffffffffkZÖµ­ffffffffffffffffffff{3ã¾ûÅ®»ã˜ˆë»W½ãÜn–b­ï"3V³ZÞkZ×±ä_Ÿ· “æg}%ÝQ× ;˜ëãý]EÃÆ¶°dÁ´#YYY¬Ü†äÇ ä܎yô[²Äã$ñ#põ‹×²éجgIYÝP´§Ý|ç×Ñv>PëE¨©ÔŸS ûwoáǗ>—Z뮺뮻333333333333333ZÖµ­k33333333333333333333ì{.—á÷ý>ý¯f#YYY?w ÆÞ¼^$F&Ì#YYYA¿IûŸ®;'ãÎÖ­ÎßLpÓ}ü8òçÓN­ÌdýœfHßt“Júvð7 Íãð#YYYã6ðmãÀÚ6ðë9™0hbc:Vë_†7qÌÞwzg™éë_†9g¥µàÁ}÷ß}÷ß~ŒÂ5¿&±`^ÆÚJQ4='µ;åJ¼®§œ¡'¿¶bÌ.ã¨êÆx ÃR£ÎÇN˜ÔŒ…¶$ˆÄ™6Œ¸îǏ§ÚWÖf¾ÛÝÆFV÷¸ÌŒÅ—Y„åffxePï;ž~O/³àçæBI#XXXBd Ž££ð±E¹çâCDS·¯ÑÅÈYQ(º‚„¹ù“i—°¹ÎÃlùó)®#YYYÀ^›Ý•Û^üv²´}1Xɬ}£Ì}õ~ îŸyâüuGCªéu::ºuÔu«¥Ôèêéç¨ñâ<fk#YYYf,k‡jdíofXÅ¡D÷ýp%-淛éî7¤Æ|h¯ˆè?ëëˆï’ÿG<O"ìüÇø>÷È|¯_.§ ÿן@4öJRŸ×€öŸ›þ?«>¡{µ¡pø† _ä•?ôžßê?Í?¶Ÿð±ö¸ãÿ/Þ¬´X,ãÍ·i/mM}ë…ëaqÞowïÎ{f (jV…šÿºÛÜ/0¿—¢½ÞÇE]Tg±[Þ2⁵é,•è›3–Ô/QîŒNÉXÔÚ¾Ÿ6âÉ]ÂÜ+©ì6­”c÷] ƒâ º®EþдqÃxw´r—2¯³’¼œ »íoDWÇ|èàíŸeÿ$êÔPêM=$A…oÚùv¶óïk_ÚÚÿê7…Ô<jY%‹)1…bd½÷m”G°¿—Ãä>c̪¨Aìè@·(ª``”»}§ ò_Y»Wÿ¾òÁ|³O°^¯tû®ÔÏêk۝çâ1ÿûßïѾ"Äøˆcð‹ö}Ûþ+¶é ¬‚Ö?,j7ó⁢NÉ£ê0zoº¢·ôÂóOû¯”#YYY æ`òOöGÃü2œß:Å4Gâs °Ã ²]`аwÿVó wåēØ\þ*ÖÖíUU_ébC›­D‡è9¦T.˜H„¹X`By!Ò„4„†!å!ۏ~y~Ogè·üå~µ¤¾äæVèþap, vëC‹Æ¤Ãq/ÏÚZœÖFŒûüYÈ¿~\ˆä«É ü=tú2×ÚÖæXù6Ô«ÞH©ùey§Y–ѹm^Ù0ÔOW½èæ¦ÕIÈ¿Vre}‘ï;œÒuŽïþ?ìÖf3Ý/-`_?GÕfU´>Xp¹'õz¢"]ð$½–`áá¼@í¿ÂT/õ7ñ¾/¾' hÖ?Ni"щ¿¸Þ·„¥Œ¡úÈLœ‘.ì6o‰Êü?ýµbOÉùƒ˜¿ÛÏÿœuÿ'wӞu¬Ñ¾ÿ¡,ÿã^ÂØÇw±MhØ[+ƒ“0þëÀ¹·Ní §ïúTwà/þt~Éh¬¦S4Ø[Txv/u1AŸ’Ú·÷ài:²?wÔÒ[‹&e‚<©:ykúoܸv=ýü7ô¸/Èó0íbTç\ ÔZ6Sîeô¾‡ÓÐ `ྠ=;¢$‚ œð†Œeü°ÐÐÐÐЛbm/ßùH¤"ŠB)¤"ŠCÃj#øÙöð`}ðS՚€Â¤ÕþŸAÀ¡ø` 3ì“îû9&¸zÿqÙwwHû@ízîÏBÿähŒŠþѯÞýî4"`þ ^¬?gÑՐÁh¢"#Ïé NîøCÎO ÿä­~•‡°“Ò ™ü8yOó¨ìÃù6Wx,@q€Ð>½•ž?¨À3 -ïf³'O­ˆo3¤("{`èBŽùxG—­rû°rNø÷“ê3)éj­Ö±­-çÀ^Œ?aåüGéýmóõmµ¶5¸¸ ˜,‹ßP±Á2þBš™ (Ð$`Ð*@7ù“#oßpñ`¿<Çx⧞zj•û‹»TÆÔ6kö kXç•W“è¿Ç·'=¯On…à^•‡÷¾®¡l-ÇsÝbåãߔ^ú»Ëˆ¯,—mÀÇt‡`@>'NBràÌÊr™CÈ≄OãD#YYY6ꧤ6c±³õ@0첸âƎ:Ç[Á¿"wþ€Oõ~o·‹¾¾ÌšHÝp7P‹Å&Æ«¬X0Ášÿ6ddSåõd({]ÐÁŽ×‚/FŸú;¼ýÿ×)+óW;b÷êxЗÁ¡´×/‹gC„£øEÏy>;~ø¾3éna¬2œŸàëšã?Ž»†.ùMÈ®åiW#q³ö2sôšÓ6mšmfâÿ_«p/Ô,÷ð.`à\ÿD^¢à\7h[ú7圽κٯe»BæyêÚ)ÿŠãs {ý,ÅúѸ–•#¸"-ÿ̌7ˆzô$ŽI$×Oºß¾èU=¹9€-\l29:.™½Ä#¬àÄߎZöËÃ}¯Íj‹Äîu»»Å)Ÿ|„Ú£`¶e1UüÊWÀ½8ùk¡l,Ö ø‹|yÍ/G9›-óûæG&W¸hê»[àîÚÚïq!–V¬zA–.]®Ùñ/'¤Ý7î;PħÞÚeõ'µ/_ŽÍ» w µ_jUTr/a/˜¸›=ƒÍp>"ì,{ |Ŋ¼x<Oˆ|¸žá{®¯‘OS½ÖQö?.Ϟù­kZa¬i̪°Øp¯ õ¤]!à_1h›½Î×£‘ì,ŸhùºØ£³ÓB÷‹e«ÌŠàZç㙮©­¦Yž–_@¾bßò‹Ö{ïûS%¤ïHß”+q/Tåê/†<ý¼.x³ÞVõ…ý­j/ŽÁ?½ˆå¾üêOÕ.êCs¨@ÂÔÓü1É_ï#ýp¯S`>„<} za0(ª¡¯>óê"9;ÂE"3mA­Üp¬CŽuœ9VKno¦['Úíj—?5V×;³3Žge‚}–-æ—ä¸ìë,Ó,u¦ß‚óĵ‹²x0è© É*ƒ¦9LQÞüŒ‡×æí½¯˜Ï¤§½âÈ«õ4þ5ùàÁ0d ’@ÌQ^öÉa´iÈ)ýçîmÿ./Ñï^ÅîûŸÔ› â©TÛ?Ò2{-úº Ø!ó™óœÌ!.d¶z­~ü3ùÛçò\tcG‰û5¢+²NÚ3þþ×Õ¯Ðxþpä”úÁ£z4r*˜ü$¨”ƒ¯à£ªª‰#XXX¡$)båÑ;0ýÏà¼?Äs㡏i_·µ#YYY¢ð-[Ém‹9Àµ)?—··˜³‡™äeP¹ý¿Q\§ ¤¦š  œ}IÌÿuõq¿·ê>k"¯û{£A„<Àæpi 1~„Ncðͦ&ÿ Á1ÙzÓÅ><ìt$ÅПڄá„#YYY€ÆljlÛgé‹ÈïÎ;۔#>ÇúÄÓx'ŽÓþ&¿Œ”ÇìŽ×}¤ý_ªPètýاT‡ÈcBö±Ä½þkI¯Wò]¿úY)A5çR|ïLðíÙá¼`òíý4Rjö@ìHòIígÂcG±:í^Æ¿ ¥Î6`}úWâNÈHäÁ^È ÈH<']°?t#YYY*û¬>ϱ€>¸Òqˆ¹¯ßM2{þ·"ì.gÂÉüíSz/à[Ø,+N£P´/λ\ùשñûzzf¨Jx“%ˆ€"©†¥†¥†¥†¥†¥‰jB†÷Ê{§ L0ÌAP™+,ܵ.íjj±òÓWõªli¥Vÿ=rm>¤›§Níºþ›Iü«-_lM*1K^jŒký匭øm¿¬¾òúúp EFÛôÓåTÚ¶þÃWñªû•Á@Ÿ¶ä‘¹ìI‚ %W@‡y€ãÞd”Wgø³Bm…;ªTðºÿÏî!ݕ[…ˆ#XXXV!Ök4åA¢ˆ¬²NHD²uŽ.·– âË,®<ßʋ}‘2TaF$fÅ `iTk#XXX±+%,%ˆ³ÅL”̨0LQ˜F!’¬+ÌKR…$B… a$e•‚˜¤Ì¢k!é†ÙߖžŸõë†Mw~‡WŠ{Z¶D#îڐCc?[ù˜LΤ:ŒÖãυƒè&«ÿºÞ|tĀx°MÛ¹zp·ÉÜc|GB6°7©:ó}¨ü‡ÖL‘ìõèҌÀAY•²RkMëЫz^÷¯Z™ZJM³*’’³+%&¦T”–e%çâû«ÛYsÕÿÎiQåÔÑÜ$¤Å#YYY…=ñ‚HGfðêâWÃ~ø’þ˜üañÁô“¤‡H5c¾Aa<¡à§Ï!Î{å  BÚ¾ì9…øÒIö€âba„!DN)û@v‡BøGb·¦4ª\#XXX´&«H¾Ìù’XoÈ¿Âùë¿-R£ùVËi?ÝPq8‚УÇì ÛæÌH}?VyaÜ!˜«ÀI˜½È?žgúìÿ¯óÙk?Ëј¤«ÖóÞilÙ¡ð1à±á˜#XXX͜|phQÃÚxÁé÷ç¬Ç_l“?ƒm¸ßù?Ä#Q~AgùÃ%l¹5ø-ïqµ÷Åú7ÀþÐí›8~quz5Ù¦Áhþ®áÐ[Eç¬/NÂä-«!{Kü~߯›ٙƒF_ bl 6!é.íN‚ô¡æxZ Ee£Ñ¥K¾…þÞ*¯ÎÂÖ ˆ¯#—awk1X–#Ÿ#XXX7¼ZŸ,cAkþ÷üU¤·L·³Ïn¢ZSòÁ@S´PôñèÙù¾Ï.ôm@áQ‚ñv¬>WDß»âÕ½'½‚Ì)Üsv½¥_oÇgÄèX´,â‘ÛÀ¾Án.?bô¹‡_èO> /SÑóöòì{ð½Ã!—‰\{TäG°('â<ø @ù4P~;?–AüVü7¸b"Ìœ‹*Ì3è—#ï¿£G#XXXþPŸÚ¹ŸÎØðwAýòܱµ…V0ßäo›‚ýýö?T4ê)òC‡›Ž)œ?‡ËàjФ9|•vçcSûðè×Ág<2+®¯ÁË12¶Î§dgñ?Ÿgó…õˆïüوd”¨”´)Dâ?³¼Éü¼'SȉÖv2íC}èjÞà:ý¨"ƒU5ÁEÔ´rÁþ ‹ÜGÖ³P¯tî—ŸG@9©Šy¾‹Q¬Be “åL¦Ÿ53@Ææ‹ ú«gj/àqüOèÙgڐ‘ãìqAåßï} Å„û>þ¿§îýÝß=Ýtþõ†°_Êö"衝“Ó…Ä^'68BXåÌãßÐt6¢f¹¡zi¡ašß&¥â4®™Wí?tW°¿Ž?¹; jŽÞ¨Èª¨h<ž™à[òK1ÎÂl‡#XXXŠBE€lÎBnù±kB~²Ìk2áÈZ<Ù&êV£%Â`²ÈÞÆÉǰKû¶\…°· ó(Â}ïð† *<䩿ÚY±…gÑy+Ö]6WÊ´.éæoñâ¾w¢j]91qŸ¶äöìÑéDjݽÿ&üù|XÊõßb­ƒVª±\hÕ4fO0ñÁ ”Ž^OMðß%ñ‡ã1=§‹‹ï5Î5?v~.Ê0ƒI”ü¡Ú€DL4ï^Rý.>vLѨ>hñ#YYYxîÚwimüÙô“ï0.ulCœ†žÂ˅Ý}‚î–1C'"âv¦*¬ú¤q]ªÉ ›Ê¯¶»pWG÷2[urlløbÚ¾ /×öM7Üðø¡Ä7W¬ô@ÝC™½õ¦}ìÞe¥1y4ˆpÄ£‡pð˜£R5ÒôÖ(.§2ÛDrÊ;…?€Àv³<¨ª¿$vèƒ6gœ°dÑ1¼_ƒÒD±žÍ_êÛù¿“\‘Ç*hX?±¨Ÿ£¬‘ážïú×@]7ÔM¢o齒¡>¡{:é;âùN?qäÅCiâ¤H-AŠw¬e«€Àr Ï<z!lÄrqZMEd¶–š”Õ®s9 $Cê³$Q@ùº¦L#YYY :0 ê.¾X‹ÄG†ɃÏR֌̸Qu¦í÷’·'òœ‡»ŽejmjQ³ÀZÆ¢Äß#vl-åÒ4«'DM½èBz˜Pðƒ&ÆÂj¢"¿_óᢧ‰Wp=4v'ûïQ؏kٍš¤Ë…hï#YYYæì[½æ‡Ì½‡oÃóã.|Õ%˜SMP{g3„‡6ÇúLæŽ|Ë ü <ÂMïͪû¸~µµ¼DÆ"•˜‹#XXXŠ? ŸB´€R0DK0C)„Jғ*Á@´%!F2;#HJ;mé?}ÙÈ>¾-÷GÑó~°©A½OãÒ¿·ëR¯ð‰‘jüýùü…mWæÖ÷¼ ó- Ø.ö‰•’àgÐç¸uv?܉ï”wç™=ñ;‹Ä4{âÎÑmÇë7sñ/zjne~‘1x¹8²”×Ä_ëÓBð­ž¿¦õê;I9ñÈ\_ëúÛïÛ3Ôý8ßÇt¯ X/|S¸°£—˜W®4,|,|ï§æÿsøe³±_«î® Ü[‹ÍuïN# 2 åòÐíbÌíýž¨7¸óv˜&˵>žÀ;_b<}oàÐ~ ÅÛÝÝa âËöôþ›Äñü>|¥^ò÷°ñGÈfof¾ôØñâїë´jåù®Eý©©O&H²ìÙÇèÅÅ~‡ñיe«+RA=À$*„<È;Kw¿õË·…qí~±s-¤Áwé‘|>þB½âðš¿IеaL0Z[{ïÅeÂqà]èmdtÜ^•)¹»Ý^;ÎÏñG«!)`ÂÒý„!û3Ó,„ªÈJ¬Ë 0Â$’ 2$BâBŒÇ?rFÔªq úxäëHiÁC{„¤@èÃØ¥ 9~Ü:KÖßLzѼÜZÃê`5/ÌwI5P¿—úØÉå«h’[Ç“[Î?ç¡’æãIÿtɺxµ?ËýyÓ¼9WëyVÏÔþgj쓬p¥m±ƒdÒ*'¿„Z?›˜Î“íÞ Åf"d‡(ôƃ›€A׫ü÷3HZ†&¤‹…åï3ÓøÆ(½Pܚ!"‚(üÌh<‰ø<4a¥ý˜}#XXX3N*ý2 ^80óàµ`X¸´‹3ÆÑ’Z!ã`•è`ø/æ .’U“~õÒ©wKøy´°\ÈdgŒ¸?\`Iìǐ#YYY®?Ÿz‚=‡áó²²G­kB<žù¸ÚÂO»×òû zä|)·t=kK,X±—1<f0øê“’NÏæä Ãrî»¿xëm³i&©ÆÒÝÍCù¥xÕ/(ÅÀ¹…‡z] ‚ËY{òØ BAïÉã! (ŽŽ+3Q«%/DqØ.‚Ð!Ã:'FxùùžŒ ùߊ~ÄÓ¹>buòÎw$.§%:ÿÔ÷ˆ> 1!ñsÒ@>Ÿåªªòex™…²üÚ»Óa !ëÌÐu“í«Jœáû#XXXrOŒîîèwprË˜›U`äHé!"¤0ÌH–¦óÅێˆJ31$#XXXOÙš¯Ám@3Š–åILm{ٍ®oe47éojPÒÁða‡ßò€èÝ6òúÿÙõp8#”6ßnÁiû"Æ ÷…ØZ ¡n þ^%Ûóèj ¤ökÆ(o[£ßU€³¶…Ø[··¦¡u‚õ¯~Ómµü?‘j†®ÆNE°µŸÁd[ ÝÀÆõÞ}l¶Þ·=âæG¤¢äZN¥ÌÕdŒ[EÍ3g#‘z~—£x]“¸½æþod?oÊìvð¦…¥í°\‹"ÖS%…µ,†RvÊË:Ä=¥sI®ëà.•o•û·t¤2m’“Õou ED“Z½2}Ö¿j´ÛD±P(€4¢ö œÀxª˜ÿQ˜Œƒ \äÂy•*ש!/+½"Ò‚u_ ¤’‡sڎ1ŒTØ÷]j¼ý:j˜ï}jîNRvM ´pqpPÓˆ‡o`¯åö‡SÙ@Ƀô} ?+˜îË"Ü/ñÃÐ óýÿ¡ #XXX?Í£Ÿµ(¿L?#XXXÖ=Ìþ8`a²2úøY¬÷¦ÀZ¡ÎÁل- ˜ÜrŒR£B=χá£Ç—~Ì^¿¡8ŸyŸ¶G€½×»×̤„÷.ÇÛ/iÎók¦ŒOuâµ=vÆyífƒêß9Š(³?Ž8ô‘ïH½ÀÕ6)øËÝf:N);¤ÿ聱®|›þm”PwwÙ)ÚõæÅ:ÐùÖXA0‰_ù‡:ŠÙ$&ï<%f€†pÛ]·¨}~i(AG%QW-íÅç";öœ‰˜ºÂšüŽiµ2¦ řD/ü?ÉëÕ·õjo°ý€;éoŸ<]<®pü B\Àl÷ ûAqvi‚ÁnìØZÖ(Ã| _ö}U<M±k!È ‹#YYYaàœµAŹøÈ}üÓ<¨HR¨lŠé#XXXšØPõíìñÒl5ª*®þÔ^pS’‰‰ø.Ý`0¯#YYYN†bë½¹£ZËÕPëZñçÓh¾q¢HhZ»sßk`ìÝ®>]öc/Ñc}ô§¢b "ÉA¨ÑѐXÌi„SQQä¯ 30s131ÈJ,@0PPÜEþkD7…CES#XXXŠ@, Á‘2„@ÛWu֪唅V„h,ÅIR¬30ÌÃ3 ÈI !$$„’7$”hLj*’(Ò¦XJ˜)ˆ‘ÅQEQE“njA(É1¬îîs•ÎW9\ås•ÎW9\è2È2U!¤ÂUUʚ\Ã1˜³¡‘hP6€âÒ¥½ˆ5+‘šU#YYYò£$e ¶¢II²jMIi6“j%"UG$]È¢ÓKB„Â*³ŠÄˆ‹ (ƒÅ«\¨ÛUnã»»wvæ,:š®ÖÖËhcV·:ZÕB°"Ú·uÕ[›j·HˆˆˆˆŠ«Ý­®ÉÁ6rÀc’E$RE$RG3Ìs1ÌÇ3S Ä¡DHL!,ˆU¶A5°B. €ˆ¦H†ªhIF”š¾ºê»n³ƒ4hÑ£FFµsb¶ØÖl"‹,X±bō­¢-³-¶«–«›mUÂ( :šºª#YYYb*Ìf33)K0 † `e€ÞÕM°ÌÌR ‡€ª&@š¢€"™´‘­ÊÖ¸€‹mw]œˆ™UF"""5hîëjŠ1ŒcÔ»µX䈒$$ €’ I¡¤ŒU‹,mQUE«s[_Y‚X…˜n ¹$8L28x!ˆ€tâªpä. R'ôc‰†Lbe„£lªm€w!BR5ˀÀ‚ &‡ P࢈Tà„E#YYYmÀ˜†`dÉP7!‰¸QàŠ™œˆ&åãÜiÙ)æ„)Ë ¦€‹,!(Á1œ*Ìmˆ„ÖAȹÃøÛ-…˜Ä¯£Ýx•Ý„àLq’}1w;7íÆÀÐT ò€ö"ª¡žÛÈ4§7³ôètó7T0ÌÕP¬€Ë"L˜Q†L‘• ú>œ}:Äx_¢A9QUA ì&­™«32ÕRܕtÖf­£VHe‚¡9qµ°ûã٘±“#XXXļÖÈnˆø à-I{Ÿdx3 2NT¿/ç%oݸ´6 Ìs5}³t1™Õ,2-L¯ˆ´á¨70òžx²¯!ãû.¸·vµaä[(ðZ R–Èójí¤{Ly¦Í8Ì, Á€| ;9…d †#XXX8zƒÃ¹î{èЙ 'v1i†tÌ8u¢,G8mi_‘Ö°USj ªˆL…ë]\ŽÛWé?)5¯)(¼q‚X5Qrl@c•Vt3AVŒe\Ì$°\Êà ¡ª¥<8ÅLL'zAÍV`J•`æcϨ`'(GWÜßwßn»·wnîÝݜìâ@ —gNÝÝ»»sZœ9˜h”ä½ãÌ1ÔÕ(”„pvžˆk»òâ}n~xÆg×?ŸIí“P õ_BÁ¯Bë·Ò|Z£ãÎ4¨ý0ƙ"×søZ=Û˯&D ÑÎßôQ&äã?>² Œ(È]@Òêæöq½í_4d›ýkP’,+“PùN9qÐz“1ZÓ|Z’h®ÊGH¹%ª ¼MSc—^ցÓ_ÏÐÆÕ†ÎâuÈÊ©ªÒØÏ!#YYY3åû½óïjf¿îk\;úap•ÁÉo»<m?Iyà0žRëEUD^‹]¥á¤¦Ÿqϕky©Òꬃ¨O%ÿŸ']Ý5 µl'ŸøÃÂÐbÕ^TFÎý•4Gv¾ßÜF1EjÝLÁÔû’FÄÆ`˜tèڒë cš.EŒ\=.‘Ñòa—2¡ä'A´‡Š¯Z‰,DÌ8a‚Òç•]–±½#YYY#YYYYÆL˜25Ւg#YYY*9?åÿthÞ`;:`[!^S.T2õú=þ×ÎÀ?B8 5Y·?ý盰ô4DYšÇµ4¾i;ŽÜ} Ô3͒h0º„ɸ1þµØÁƒ4Θ_¸óýï°äqÁÜo}#XXX‚h”ëÝcþÊ¿ôOóüwƧÁZN3<ŠQ)ùìÀô4 { L‡ÿh\RZw({.$÷H<@.¡@é ½Æ_ë„öGYóÏÕΏÄ^†rÖ¾A‹¿_×)ß´IQž¼Àèï?<.ä5ŠÎÉ÷JUYØkÂ?´ÃÀN;SÇ.`u#BÿçÉÉ×i{ö{fB®îÈI‘çŠBM#XXXÀ»ûçX8°s0Æךèèêx£·Ô=¨è“ÍUUUUP t.°¯UÅô–‘ƒ—gù*ë˜uªƒ2i·]ãìo^í×´ˆŒteÝa´)…<21b+?,Wjúåæ~¼;Ü{øˆhÉ<3Ìiˆà´ÃùµÏ!JäIì'RQEQ"""" ÄDDDD""7K~Æ«zK̓õ«úÔIV‚?a?º?³þÏ €Úk&øxeX²þ‹ìHïGOðQh™Ò·üþ-sñì›×y(!ÔíþøõÙ(ÜîV¿Ý›M—Õ.^Ðõ—û÷û¦üÅÀ¾ô*oÃä-μI?€YêY#µOº÷'ixn’кÞ2›—ivN¶ ©ýYç,c2¶ ÇRèvp`cÝÉÅÚìÜþçñ‰&.¢’6D``JV|®¿.úƵ§M+«‹$ Ê¿ !ýï¬Ã𷔻D>Éïå€·Š˜vçö§Š$ô¾áùSEBϗûÿê@¹©‚Åù&SãɏÎ- Ÿó ÷ºüýpšîôMçÁø¿é0ƒ*´¾âbbbS&L™2–L™2dɓ&L¥“&L™2dɓ)dɓ&L™2dÊ æ÷}ܯçáñ…_¼þ¼³'åǎú¿Í—WìŸèËëŒ~À·}ÔäF¢µœMÄ-›Õ~+*¿c˜8#¸=Ï=E@|”)ú¥/ן«ßžð¨C”ù DŸÊü?9É AÏc[cÅ;‚­ü¿äÀìÿòډúIùì- IsP5¿³Õϟ*¹˜Æ—í 8U<¤ªl¦I-¯¶¬ë}«{îxì=£]Ûi4azkþv=ÿ/Æp/È, ø¿”cqn/·áÝÁȰ.wö¯â—Gò…ÊÊNO</±ÙOsã~:¶þÞGúþ«÷ÿ³LÿèzÞ?õ4º?°“‡³_»“öBnÙü 1M;`’ýQ×O½ƒØÁérÅö!¿ó?Ú;r€%CÕ!@>Ó#YYY„ÆWÿðÔX<A#ÊMGòœÌ¤¿~j#XXX#XXX-“kÿ+©©6ÛÑÿW}´–"PÑ@Bu1GzKáÿÛ;¤åõÃÎyÎ_ô!8ò9œÿöë|ØÎO🾿Õý݇îxHJIÏäßϧ?Æ=7ƒË™LÜϓ ûóèôo¶úºõ"§úÿgûÁBÏü3_Y¿»øŸŸýsüÝs»®'£|¢R†ôfÎõŸÐ¦y‰ŒH3#YYY÷ü>(5ÀþŸæÝZÖÿå?«TÞeŒW‹Ûá~í͹?Ã@£Ñc'‹íZI$’I$’_õgO³Â‰sS·ÏٓŽ•ÿ§'ӇìzçÞÁÌlÝÃ{ÿ0ù7þ‡#YYYˆºÿBá·]Ÿ÷6ácÒt§P@ö–LË£ºÍÿ[õEFNÿëþo10-s„T=}L@u3u#YYYÔÇPu7Y­¯Y…§Ñ—\šD“xÈbL£6ƒhƁ£hi¸Ë\ÙѓÚ=‘ìÊö/döËØð~¬{½=#Ò=¥z/Ièz^—ãø}sWØÆnãj¶L330&¿`‚Ônhå9ÜÁ>ɌÅzQ‚ Ñ¾× ×á.ê0]l;sŠÍãYWL(™n®WI˜!}ë‡c`ŒÝƒv1È;°Ïˆ>np:ǂ¹Å¿k-'IÔuÛ8û?÷ôôrǗ.\¹rçÇX^Áx³^5ì^µç£o!&ÞÌzwðHHnÞ§RŸö‰_0ðn@݄¸ÄLQ‘LTvkUƒòEÓhÐ#YYY½DR¥—$Ùuec?)¹,֘’ÄÎR90sæÁÌlÜÆæÇ0èÝFzpöÛfÜÆ4`Ð#YYY´F4#YYYCL›LóPüÙa˜0ff ـك€f#YYY˜‡Ì°Ý6­ÇSî鹺æX騘À-%vëÝáâ {(ÌræÁV`sfæ769‡6æ]tu«¥Ôèêé×QÔt:®—S ¹®36eôe²çìëU#Îmµ¶¶ÖÚÝLI¤Òi4›J+ Ä¿±w”'†NWþÐnÑ:cq»¯x1´ŽáòWȾIöµ}§Úú}ê÷Zù}7ËÙº& î¨÷(÷H÷î¡îCݸ¡uUÒ­J让t—Qt.ªt§!u'DꍑÔ:ªé]K¢â©Õ:N£¡½tº]7Të‰N#XXXqIÂO§Û‹ê~GÊ~€L"[Ü &fÓM4Ï<óÓÄé”/`¼ Ù¯ö/ ÚòîwñŽñÇqÇ®­kZÖ·f1Ø+ZÖµ­kü_ÝšÏásÏÂ=ÑÀ⽤ô=.ž½ã¼wëºï;ïºŽ£¡Õtº]]ã¸ï]×yÜïwv¨à éWj\‹©:GUtZ ¨éuq)ÂO|£°w¯+¤î<¯bx¥HïCºÍW‰\•á.bñS•8…Ñ2Ž¡Èt®eÕ9N‡5ÔäéµSžê›”ïIãÚ0fåí}1¾Øß~8ä'„e:VÙ-ÈO»¿{æOvÖmw_†9yõíï¬ÇÌÌÌËÌÈÌÌ̬ÌÉÌÌÉʪ¯Ìúþ§ÙÓ. ™Ù‘&ÎɃ0362 ›é»¨ÝH8ÿ€’Bv+ÀÌ`Ì`؃6€Ú h`qÔÄÇólÙ9íY´HBgSzBœá5z*òÿÊ#µwnÝ»víÛ¸Yø0X 3Xk1`³XÜÇÿ–m›’Lw"ûï¾ûïÇ<êÍ Ø,†³ 5ŒØÃƒ#x½Z뺎 ¥$Ovýç`Ók†&Ù3w ˜DÀ$0cÛ݇~ŸÎëmîë9aóZ»äҊ—”|¢N·{+dZÄeEùêéƒjiAèñŠþ|»z»z÷õõõöß¹=»F Ø Ùšöúy5ætê:WK©ÑÕÓ¯χ¦üsÛ¾qÏ_^»‰`¦ LÛà9nŽw½ï{Þøï¦aíkZÖµ¹ìÓz0b,؍‹†-‰ªÁP*ÍQªÅB­S//ÿ-®hûeÒ2žêÎ*`ÚÖm£mchmm¦Û`Œ®Å¹7æÁ˜³f6lfëÜøuÔt:®—S£«§Ñíâ'ÄO5Ty‘ä4<¡æ í ª®•jWEuK¤º‹¡uS¥9 ©:'TlŽ¡ÐuWJê]N©Òu#YYYë¥Ôèê麧\J$ ę†$0Æ»¶6Å¢í[¶Qù§æï.*RRhƒ’ytêëïS9ï¯dVwî§WWWWWWVW<#)Ò·]uٙ™™™™™™™™™™™™™™™™ž>ïǪ½o8ç­îµ­kZåÕÉ>°~=k‹Ï]’˜ô–Îי+­Ë¤!;Ni¤.×yÆþTœåUh‰Š©«"ôÜ8ñãǏ<xìµ­kZÖµ´ã°c-Ë é3yïù˜ Aô‘ć@ý#XXX'à6vB€x<ßuÃ$Ԓˆˆ‰þÖ5hv·õméMnY|&Í¥“´zé*ÿJÁ^µû«”Òá¶ñg\s­qx¤ý½Ÿ÷ïãÃÒ/©psòñ”Ó@I(mBööPM?&š6;Gÿ d¡o32Âò<d@OèSaåe9]Çë0C¬Ǒˆˆ#XXXA(1O=$Ì¡ÚOh’+¬rCÎ%ío0âÞsaYC[©v= HW3:™™àvôvŠyèš)|Xˆؾü†Øä\ ëóΧt÷Ó×÷Dȁ6‡°™d¢ê¤­SÎ÷¢”ÓÕè8M’#YYYl| ™ÞšUk>_ÍãzŽ«3 Èöaïû¥¼äS߃¾fMÌ·ÂÔáñÒ: bC*@˳ €½F!&H%“ÌÖO#a‰Ž(‹Ï!”Kñ7#YYYfÍ¢¾Vº·å]$ÂzTǙ ID‘#MÄ´””¾fY؝^a‰×;ʦ‚?ÔÛë±~?ÀJ¶ë_«êßCÁӜ”Œt6é|ÁžsDZ×´àAÀšàÃîzпí¡ÅLØ4É)pòf339Oˆqxí§fÚ=˜Èò¿œ¶Lè ߈0Ú*ª>R‚| ĚdìÌÖ6jM–m…8#YYY¡DI q½eŽ†q—„:¸ùÏSaᚭ9˜Öõ<¼ÐRDURÖÕLÇ1†öDñx{=žÏg³ÙìµK…u×]u×ZæcùñÇqÇqÃ'‰«ÖðOW­àfffffffff`ž&¯[ÌQ5zÞfÌ7lÃyôÂC46»×}÷ß}ïÆfffqœ ÌÌÌÍæ™™™™€Ù™™™™™™™™™™™™™™™™™™™õìo'Ð}@ø‰·Êł|ò;èñUOÀü¨L§ÎÀsàOHÆ{ÿò‚ìA~?}EQBJDÜ]Ú¥7‡¾Û%³H«â^ÃH„IDû{nº6¥÷»©ØƒžçÌGp®ßìLpCŒŒƒ?¯ùkþGþì×  þ¨ÂXi>\ÿ± þܘÈ^)ïCù=¿0Ÿ¿öœŸˆŠ*ý!ùCT\{;þ9¿Cä:‰×¾„÷·lb"LPÒ°)CɁƒÁ…üCø7ûâ?yÁ¨©ƒ–„á$2µ€Ã…ð7}†Ì[ÞÛX Sèæý%À “"Ï-(:jlàٌ²#»@˜ç‰Ã%‰މ N0\Û ßgê ƒ\""‰Š›êý†f³5 Ð]<«sÌ€“&€7 EÀjÔ«9D58_Ä8ñ²ËíáÕs¹ïuÞnâï—ïhöÞ|þƒJñæFÞOr\™Ý*É9¼"°èg€…tŸ6Gƒ¹Õóñô× ä$Lp 6gäº#YYYɰùdÚ¶V©!êb’t^ þ#g|5˳ÂODfYXÓ,Ë*¾5p3»¾|îzÚKxd…PRM#s{öâèÛÓ^1UUÛ˼9†Éy‚øöìCIÌV•h=*sQÊöh­|«q!+æN¶Žg¿®˜6»#¶C9á@½“Ñð4’ø£ #úöÏOm¼; ²g⦦©0 &?nA µâ°å¬&Z \a0Imb©·eҕSNä ¿¤/&rßÃ:_(ÜÇÐÕÂd‰GB18½[¦š ‹ãì<ܴ塂˜YœÌÈÃÒ`äaç±E¾®’m#ÀÕMÄÃå!8ØìwSUUUŒdã©Á  šj™^„p}º…“—žTÓÛQ”D¤c/ÙûsÈöà¯Lvûu®žÞÆN)qÿ‡o¥Új³w÷¿QÊO鄥ˆ$~(#YYY@´Ì£ˆRr$#YYYê˦#XXX‘\óbƒÄ¿,95 ¼ÉQÏF&•×õÝœ„‡IÖ÷& `ÝÝcî¼A÷PV* 'u{±#/~vråEU3'÷s(‰(½@ԆoçÑòëðŒyfQÜ4^"‹Íæ(2MESèjt©B#YYY‚åo>8ÙbQ½I¸jf©‘‘ mög`ÇJa„{Fa4KšÏ&؉Š‚‘8#XXX ÌÃã¹ìQ_?ˆeCçðkÂ#XXXÀl’#YYYÅ"%ó”` »ø³3¯^®­¯KwZnæDOwµ¹íÙïqܝåÚ¸Õ¶÷V³/”aŽÃè$Ož "¥~†ÑÓ¸ëÏÄóþ:+Ɏáq"#XXXuÝúæõêá#XXX±!%Afý<Ó;óG<7Ĝ Ô¹ôSD±C0H³±×ü#XXX“íÖ'þ$ñÂ~nå=åeÂh‚"vüx:Y¨¢hÑ`$NáTd>ˆéiWÓ œCùˆô†”Í¢û/€w…o¹;Mm¡Š(i#YYY‡À)͏ű_„žŠIýp>Òë“äúñ]@ü0:‘ZP(0k+?#YYY%Çg·<Câ㚴~IêN¡^ãíøþLçÈÚº"Þ¨m5Æ#YYYd¸DÑ0’']æ³e$FHÒÑE¢b¶l´ª–µùªuQQêÌè¯åœHd#YYY:CG˜_£Ãø¿Dwbºó¨|ý²¢ˆñÁó$½ÈDވa àÅêÑpõì%ö?µßâòŠñ ˜©.™çó(`á¤ÊïIørˆL̘Ûè‘Æ™µ¥mjñ‚SI½êÜÄÆeØî™Ë6t*Mto+¶æÏŸ†´ÆNSY&Äë/"DÖBî#YYY‘7´]–‘NÌ#YYY:ŒÄõKô°  àÛÓ£:Ù;1r ѽ¬#XXXTÈ%mHc Ãa½<î[yoCšsš“ƒÁ݅ҵ-¶QÒsª€¡T"F•)¿ñ³¹áר|çór×%Ô!54Ìvhž‘À•&*üx%™^þ]­ó5“h $˜"Ó½=ðЃÊ Å^á6Ä* ½Ÿeâr‹ceS‹áqÈí{ÊW§6â™aØ;òˆ9@[ˆ™æ¥dW~—Ù~–ÓÏÆ%ò°ŒÌ”̰¬ÊFb,e€²¨iOñ¼#ªhˆ¨:멤ÚVF±S¥Â+UM-`af@ÀJ „©A0 uîɂh"OÉ*d#’£’#XXX(¤ý‡(u0LÐe’Ùb̲BÉ#XXXW Íc…ݺKa¯iaJbA‘›F‰a¤Æ“&¤“Z+"&jk[²òÛ֛^vÁØ¹å ‘~4„pŒR°Äɤ ¤%á2ûöö ç#YYY+Ù,BÄ<'G{l=jrDh}&Ô§æž yElà4{ ih0€uH¦a ¸Õ#YYY1-98pâ}MDq¶Shq‰Ã‡Çn“}‰ß>zò?:»ã„ùÅ4(¼Žg»³Br_2R{aë”c@ÂDúÃPWÕÐ^r!©ÿ²~¸tŠ;–  xB\”®ê†…¤¢Ÿ±¦½fJJßߢ:|i¤ê‰ääõ̓QO抩$‘…>~} |×4î³aÒ¢«‘i56]¶QµÝ%²Ú,Œß´àú/ÒÚ=?OkÔ_‹÷ÄÁÐ=Âû޾Ýó5\g»O!à!2&·ŽLœ"BܖˆÖº^œAi‚†ƒÉ)„…,ëg™Ù³¯cğtõÒ÷uü»i°h5«»Ÿ’¯Á©òXbÄüï®~½Xáî#ÚáEϛª$f&…þ¸ºÅa‘ $ïa½ :c#YYY#XXX Æ·©Æ@›äçèñ4 ‰e`â;84€uÀÌ ÀÓQ’j«YYimŒlZ¤@R+#XXX2,_p¤§A!Ë˙åux‡¥:ga§ēVã݆A7¥çÙöÏlçSOùHšáLD‘C™cIvec,¯‰‘E #YYY^q¡1š`„M0ì#f^ÿ¦¬L8i&}­QüW“デæòy;°ˆáÇ®„cä#á~FæòŽïVJäæ4AzPCdB¯Ìf æ_Æv!í†|§È„#YYYQQ0ß¾¤´4K¹›YÕѦµ"+JˆAŠ|â+ó|›á í„9,IIKvû#ÚV8ç×­í vk#LîË#gué„Â%Àƒ!"bJbf¿¤·6,½ÖÜ¥N ™ŒA•'šÉ_>`”Âo1Ö:‡H.î‚Å×:«ÞºŠ¶i˜/»0ú•0^Ÿ0‡£Î`>rPz7ف„k ˜ L•6Ó­A•±¦Ñò¹>#YYYy·uvõ芖ºkËÒi½î«¯Š†³¾^—YfR½wMí]×ví¤#YYYŒ¼¶kBdš#XXXœ‡%…kuíT–ÒÕ$T‰RAU&eEM¿G×n•zù󢅕´'Ê>Dø¾”:Xˆ×§ÄÞlžÏW¿²y¡/¥ˆ3fTÌH{$šR“çCðgÃÙ ž¨§9J¡Àë&—¬b‡Ëú±Ï>ÝSM¦NÉônÛ8É*NyÃJúC‰´<'‹ÖŠ@³<Ö ?<®ÈøK$MøN%Mw‘€”œ@î‰R€ Ét$„2jx!)©žl¥l™–ß;¦š½ë·UX˜…ÄcIhO+P…±Plº€”P“+‘0Æ%t´+‹è_¹#YYY#ÐCóÑT¡#ÃÌf!§Ö!wã¥#YYYRG†bj …Œ`¬ììò}>Ü.˜±£M¨\ ÎÖ:¢Õ„4ƒÎ¢3ýr×&º'O>·¥¼eû·û4"Þqõ£¼ z]ò>a RH]vg1NùN•jhŠNóiÚ9Yu0l AQ)҃#YYYĐ­苉aKËÞXã[|ÞÚl¯¾K“ST4Ò¦››Âñ¬ˆ›ÜãkO{ZÕᕏlé 4kp0Ê n†",)¯ñkW0³†éÝaÚ ®rÉžƒg“ªfT%Q%0w`šcEcà9™k*3(º[*ÚÊõŠ8ÅB™=|;vև™Òfblæ³Î¯ùß5iv2ñ Zõ…äYV»Z:ٍ<ó'QöÊë}B0i0ëDMÖ…â&F2Ë3:žK–dLm7…mïw+NÛB'ôííH»Æõ&)[¶AÀ83„ƒ°˜Ø¢›ÂvA"HÒ¶uɨ†”é$ý¬Æ_-ôà˜ÒÞ|4¦¹Õ[1hUùAV²Û”X-f`1‚í£Ú¦õyU<ÕD¤$.±ƒ¸uã£YfLÏÌ=Ðø}x¾Ä/f¥ëßÔ|Gñw¢öÈ|Òû…<§#¦GCõý#XXX<E¬Øf‹êS617Èʉ)ñËê1Đq_«ƒ!Ô cå˜Ê#YYY°pd:b´’ºD9û2a…ÖÛdDãZbrÁªÁ‚Ã͛¼ô#Àîc[ûØ&ž,mÐw{•j…åøÄËJ¦?“½Àƒ=:òrÁOÇe¤`úOĦ#XXXn€‹™7âÐI;ö&üàvž ¥q?vFT½2¬Ã2ù±é#XXX¬ÿMÿ5þ”ÆëM?wùù…APTAPTA@ P(J¥R¢TTTTEEEB…#XXXUUEJR‰^¹„²!G¸ÊY“‘}míÄÑÖª=!¾S,ñÀ#ÂRŽÛ<„úK/0(?NÜÐvè!Gü¤ Ä¡ÙÜâÌ$2Ȃ—3ÍÉMhH4S4Ý2ÝLnw]’ŽšÈZ卣RåC]i3+²Õ¯T¯-RªrMêií~<_õ­ É*)`ƒÉ~² —xc¸4O÷ÿéÈ‡‰hNPıQ:ñѕHb†§E†kÍþ³2²ZC*§¡¡êŸMõ_SÅîq:®¬žÒÖÓ&•ñ® Ã,9‘óÍ2’’™:…ÆA†¬\€ßÿ¶Ä©kñ?~aüßêÄê"ùQüÕ  ðµ¥DÆ+ΘCø3Œ®`ày#XXX×+I,‡âðƒ3FâèÄþKíB±ª}ÇfÇ÷éK{Ð>†±3–>…óT‰¥€gdgV}0<Â]æ31— /:ìÞç™ÅRDþN¿82>U¾‚öí¿#¡7)T¡áÆ3ʈ6ްM*šK­ÂýXü2¾’AÀŸŒzH´‡„˜”´ÂÃ*daæÌ¯¸ƒ(–bW¯…AÐy{¿N݆±§FDH\twkâYß{SËfgjìҞ>"²¬oçzüüùoq¤R1ˆ’QŠMó™Ñ!R‡œõİňÐúþŽ¿z¯â—R²¬lÑFÁÀ§KvEåÌ.,"µ9C=ºÄ©‹Y…½æ‰‹‡)ˆ´j²2hÉdÒò¹ã9š“6-iz¬£^sÊfÙ—ÏmzˆÚl´M¢{­¸ÙEu«v]ÑÎæËçy¼Ñqˆ™"&&@6Ô¡ŽÉ“"ÿ½BÝTiIª22L ˆÑ2‰×áïqét#XXX:óÖÝáUJÒ1•ú¿6ŒFfSj3e11Ø8´LÌ<Î=\Š„di ŽÔYˆqŒ„‚D¢b¥®–òÚjN~ŽOkº[“¼ß>çŃ,V(† £À{HöÐv]kRÀÔ¡ƒø¸9Hò" Oª“Q+P´ª* }Rea‡ÇŽ)0 ø'ª>ç´%¦«ÝmÂǧ 0ßáÂÒÝfÊ÷¾ç#YYY¥Nµ>·ÐÎ:-E3‡}èíw®Öw¥x¼ÜÝz4í·g{]524Md›Ý[f÷ºÝ$ X#YYYÖKñt+cËÐÚòÒv·ô–ýÌ2îDDDD@¦æñ‘5¨~ýÁ´4!¥•™Ð2‘LÅF³42d¤Î³]š(’Ýt­.¬T„Úþd¿¤ÕW†òûÒjÌ2—¤P‚Ã˪£@š; Äþ¢}<`×<·ìýy£È@€{ˆi‘7ïWɞ^úS·H§‘€Ä ŒIô©YD¨vÆN¢¥«—œ;ɉ>A¤>GÛA{ó¡¬ÌNSÿÞÑ*Ò¾¾ìí—ùáš@<%Nw8´Lÿ.lþO‡¬m­86>ìxÌ2#¼>&¼eyÉ\dxÉя-.¬ÖGݛ¶hCM@$…pþÜ%_ü¢J Çü_î‹Ù†(¼J9J\  0û¥ %€Ê’IS$‚L¨‰Lйã´O‘å1ÔU%¢CÄ1B€–! ˜`‰ C„7­ëÒ«¶ƒR×j·[¾þé¶#YYYDr̉ïǝ8&=?Gü4‡ö/û=6ª‹~9ØrbTqZ´”±„-„¢tz„Ë©É"¿ýçûû3gH壜¯ÿh6$ J uÊ]œØQ•4:¼c1€n?÷?éÆ¿èøþoف»yú|zèSq~t3·&fR•ä&†Jfˆ÷É¿ù_Êãïÿ…Aªá¼‰HL2·Èm¢ Ó¸‹Ÿ(#XXX7ô9ãÖ#XXX©SøâÎÐ#YYY³¨y¹ÜÅÿÒM×ý5[>±h‰!áè=`äBôRi€4ÄI##YYYJÛÿëJéª;ñúµ…zё|“‰ÁDáóI‡’2Rvr„&–¦¥R¥³ee–šT¤š`È©é¤çͧJR~LÀ”ýРPCI†§ìþŒ]BèW<˜Šÿ¦@ç~ Â+b‡’Áü2«ù?Œ ÃTS(t¨»\r" ́’Ù†><:;5³Gíã÷ÿ´xH@Ú^2ÂýNRß³ë8_כ±4¢¦åÓnÔ|ï^´’h(b'0ցÝNÍbB… ¸1ÌL\\É)78J¶³>tɝ®«ÓUé[»ŠÛ›Ê¹’˜¡ZfëMm|»ÛÖŒ ‰€¤ÒW9­q‰°ÅE+¯]röéÍÝk¦ÎíÛjæô$ÉFJFlI“1cSdÙ[&f^Ó$l«#XXXW‚2)Q(YW;Ž~·›Ú@Àò?`•õùüôkÉ*„%ð^Ó[ƒü]÷ú¶ŽQ´sÇ1´sÚ0vÇa¼boGhã´o£; ­ë‘Ú·‡aÇa¸ì3µaØò^cµ€Úéy¯×kÍnc&Ӑ󛬭Ö˲ã²ÝvYÚa¹Ììo1nŠŽ#\F#ˆ|±l8°0(p5ÀÀà>&aµÃ Uqk‹àóg0k#XXXìã³wfSé¼ij֎ё÷Æ2>ƒÑúA¡KæÙŸ#YYYÃù½÷¦pgöa‚ødz´(Ç9bŸ»×}»4d‘TÆo-H¿4âÈÆ6>j¢°C?Ìà4斞oçr›xœWòXQuº¤òÔd 2vÒ¥fsJÒìM¶ÿru#YYY6B"¨ØÙ$D3,"ÉT°Ä#YYYHè„5Ã6î»ÝÛEJ{•†4ˆy"Cû[™¢Û#XXXÒd1Š•‹lH­£c"A #·Y@XðÉp%*d¦³žðÞˀ¦B& `ÆØKQXÊ8M†,Ã,TdlqÄ#R#YYY£q8ÜӆœDgÞl¶5F÷tS·mÍWwtº÷­ôƒ–8q‘G<ÎKfXSÆbR!X–4’­%©±²¨ ¹5m¤Ðö2˜EZ4È4M¨®aÌ+æÊÝÁAs#XXXdA8ÈÖÚé…7#ð¢Õâšù¯.K}µ=ÉyvÑh€´ÊWq©­ÁŒSœÑr D°#ƕ0ˆ ðá–›„ÓJc ²QQ#XXX®˜iGˆÒ‹. ÌYÈFÐ-COXõ¤#YYYS…3Zdfò\‰¡3lszNPiܯ"(ԉ˜à `¦äÔÚ±Š4[çvѤûëwwL±KB”(L‘yQ*ÏPVC©Ù)9àÖ9»²0ŽmÚͨ{j©\+TdkÍÎ]snDP³¨ì”5q‡KBÕÓ™¸¶hÿe‚¸è›mïÐCîÏÛ}÷'Y”ûÄîáãXV!Œ…(&rÅ%š¹‹L‰5å! ô"¤%Ó`à†#]B$ŒÂÄ^¡î&Ú|{woªp51ٍЃÛ6Ò¬é£f¶ÁÊQÐCo‘¹ëFœŒç¡ÀSCDÊya¬4馃ë(TôÁÉEbCƅsDÉÅϖ0±C´§–¡rMÃÊÕo³¤œN¢ ¡ˆˆ³-æ­øyy­ô·˜®QWÒÓkm0m#Ri™‘m›Íï'##nLĆ‘IaF !!ÿ¶q ”ÔðªÆvðhu‘M)Î6†ˆ08ä‡ÁÓNìt!{ž1VÃiÞ~ ìí,WbxmNchØÙړ{%ݓKra©Œ.9hÝ#XXX <Â&ÆXˆRʛÛ#XXXÑ3(ØpWXąß* ´¡ 5Ìá5+I’Aˆx† ˆN ­IçÙòÌÊcsò¬ôœ7aåÕ{ï‘‹E`˜›GÛuHEjj5+ºé5rù×½t^W3×V͘Ô]w*–rI3½ç]J!ˆV” ÈÆÂÄÇŽjÏzÆ$÷dßü0µZ _¡ˆÐÎÞÝ¢öøy×~šæÐø=)ÌJÆê4Í\do$ˆÐbM&î0ÖÍçBS<ܱa€L0•P,”¨ÁQæð(w|?÷þ¿ªLeOàmlچ‘d«³M˜Ë4Éf2ÍÿÀk\ߕÝVYH-¥c2JˆÛ`õÀwÅèÄq—'! ždájüR“E¸s¨Ô&áÁ“D\³Ós”^;cb§•9ãÔÙ)¤ôš>¾Ã¦€å¯¯±]z}~ôæwknÉ‚`†ÄØÖ!ØÂAFPY =Í!C—¼Sƒ9ï^Ù¦¦O&c\Ä7'wi‚÷ØՊ¼*=¥ç#YYYAÜ2:¤)($H¦”ÑJùgï1;À þŸºÏ¿ÎpÑϾqô_#XXXO™k–’_B¦æ£Zt‚‰ &¢%7AÐ'y±¿ÕýŒÛƒB;æy‰­°û<]Šãú'¶I NC0’GȰ.*ùXŒÅ¬µcy¡dÔ,„¤ùO(ãÐÙåŽévՎÀd—åÒZþ®µk‡ÉE] ñÜkÔD’ž@a#YYYªÞD4p±/™kåYN¡¶™‘ä1²Y(ëÄ*^Ôò5 q㔮-‹—•VâH¡X´$­j”Â6ÚU„’dÐÝn8lΪyDçÎAš¢"dØÁæ0‹8þž¡µ:ý*2g­& ímDÙôoR@ŽŽ=¿§Qkû Áþ0¬÷Ê##QR}ݲÃùE¨P¨M´é´…§ÏLí%ÿ>÷L<¾q1¤›ÝtšÑgÄÒäéÉ-P¹îñÞäqooÌq#YYYÃíÿ߯ó j'&+D5åGnæÝòœ„=ó2*OÛ¸“:ᔠK{ÕMx~Ï üyx‡5¼‚8€©'uR,Ôõ\xq’Â/õ¦+ǦZ©*à‹í=-®Ëõ@3æóƒc‡·„p_…8²F»¸²Üw燝 I»ïC­ÎµšñÅûèˆrí÷½qÞ¹ÍÑ GŸûPy^|k6*õbðO+Ié»å­P„ÊßÊfŸT¸Å)jzÙGdËs#ǤtÞdç¹9[t¸PÇ WTâ<ø”ý¢ÎðŸŬ‹dhž†–Õ´©jʕKkË¥­t¡¹D à c“|'„i#YYYˆ¤R•ÛG„W:È$›s¹8ž¸ò¢†ÒÜøV˜„:i…ht’z˜òºè¿;;LÇ|AB÷í‡dnx*6^øyid@†@ˆƒiy>Ú%áG¹;5}ãϗESu:œ„rJA°@Ûbm ¢BàVù¯uêÏ]èZò’ò›¶>ºìwn‹àrná_\-ÈÖÞ­ˆ¾2 '”ðxAél¦[†a°%œnZîŠ8’ÑzŒOF‘);#XXX0ŽŠ I.J‚ ™#XXX¯Èç™sƸÛÑ$¥ 9RR3àÔM6#˜M±°~¤k7ӓí‰]1`ÆõßÄ&-YJ"+ÍGŽ–ˆz‡ñ#YYYr>ƒÄ¹*£Ã݇ [ÒÏhˆÙdàiï‰cÅ ”lgmtvŽ˜Õñ“3ZnÒccíµ‰6w‘¸xšŽëAág¼‰•–D-–#XXX¶æí¦¯˜#YYYM^}ܪ{šöÝòxëvµËië–a⺯ kôj#YYYªõ {2˜éÝ4xGnyˆ‡<ï¸I7”:Nçeò?}n£)Ù{ãù7ó¦}ÍüéÀ¾ ñ»pvd’|gªG³7ž×9}6Ƀ–aH™G\”Pß.œ;ƒ!&–·\°ÈEäð™¸A²íã#XXX°7>cކfvaØçãv™ŒLRåe8’Be°YÁ#YYYÇQ’Õ—ß'I®‰á¾$ë§\޽Gn£—0!Þ×5J^Ä©È8øÏ>¨ ù,Q1ð‹¥O^Þ¯B÷7ce¾XwDç•N¶+¿ ×j¦Ï„3ˆÀT¼jŠ2AeI€7<™-~¸èn:bˆuBPžH#YYYÍy‰<¼öDÉH“ùþv@:¡õµßœÜø7æÐ¹8ß"¤ÍÏd¤Œ0êÁ8#YYYSÖË Z›v®·oÔt¸=ó'_/»7#XXXÓuÛâ´5i3 ٙ¸9øº'¾tÍ탁ájzòE#Ç´1Ѻô¸P)¦òC‹Àb#XXX<? ‹pî{‚`éÂ@‡V£H:æ.$f Lб Üu‘\Ë ùާWçk•A@¨Â8[¢¼;”­Š×^{ÒcÀBv<%Û5‹Xîôp‹ÚcBðï¤#XXX™Íû˜ wŒ¬ÀíPDñ‹Ì53;Ú-yŽî3"³#YYYŒÞÂ8b— ˜­u9Ã÷uŽ|3·ô™™Ðût<J²8Æv oƦ0Äü:4Á§¾…Óa38“‹¾ÌwC5'¾Œ€l¥²Ñ¤©a‰»»0f=>Õ=ŸÍ™¹„úêH–ñœ•¶f8±qE/#YYYUUÌùàt0™3iÀjÉó÷=ÉíæâxÙI¦qù{RºëGˆuփZ˜ƒ.Ì֐TÆÎSæ‡Ü¨|§ù8B4×ßMPBjìz; K¤ƒ^e7‹LÌhÔXþ#—ùö9Àˆ~[¦PÔ0ܰòˆWÛ¬µ#YYY1<J¥0ì“xw`ÒÃ4Õ® uÉ^oÏNݦvmŸåqÄ7s§¦–beùÓxË6“ Œ’Ь[:/«#<Óô×­JŒk‡˜ƒÐç¯Ìå<†²4ÚD$ï=.QjûŠdɃf3?.“3(܎¬LŽÎ¼ù:]Í[8Ìä'óÇ®}lÐviΦ y‡Ýׯ5׊æ]Û`7ê´c]ò\M<¹sQ>ÏáùÖËÜæ¡ 8Ô©npD¸0éŽxˆÃ{-n¬PÞº4ݲ„Ú#>$ÑÊòEû1îäӃŠ[ÔäL7§uï<z‡ÎÓøàŠS Úõ®­¸1h¶ôì?#XXX¸Z×´Ç)–Ùt8:/Äž!õtÉÇaùn&Lì°íú–-‰f/YܕzdÎîi1匱6Àõ)Æè†lAó,í&Þۗ9GËÙ3xìt§l+Ù=©AOÇ4€ÛíØºvÕ¹ªp×± o±øÃõA”QøõÖSÑûbžg­/­ËÝ7êwÌtò\hm#Ù4a¹ÔÞ<‹·–I&;|óÕ!-¿„К=ž#’k®¶ñ®]4Ô#—å‚OhVßhmˍÊèÁÊ6ÛÇb­Ð(s‚[p͸Ù”ÁÎ!Ò4S¦žM¶½Ž£…*¥Bóˆ$öNÝ t™2ñ.™´8:#Ÿ7ÙǕ³¯/si»wÔõ³¤ê86öwæÖ”+‘#YYYvH<¤VHq [òk§ˆ†s±B"Nò'¸Å<òœ⋎šÐ÷ÝPŸ6üÜ»ò£nªuTƒtŸÄ§òªSÎ=ŠP”&twŽÜ©.ൻO÷¿W½k¥ˆ¥È#YYY\ÁÓKESó”;ÇKh8½pôl†ÙniÒ¾¨­Û&Xò@„´à­ãÃÚµÛ»v=†4ƒã‰µrF߇m98Ëԛ‚¯‰ïJÍ÷Þ »z±Æ¢uI¥8!&:O{ëŸ2x®Î¼ðkníŽëBó3n\j>I<Mq­>ž|G"={u×´DõëU'eB"$¨ÔõêŽ\å¹ÜæH8^!Î<çt®=ŽãòæC°rCã1Hv´ 0ÝÎu¤ÛcCÝެj¼=çsçµ Î¥Gz¦Ÿ‚Ññ#XXXY¹éÆå7) ‡CˆÒðS(°ñFÝC[µëËíS0é2¾þYUáJٍDÄcîÏoSÆôwðz#YYY’ æ¬Ìœ'<dòÚHÓ4âDV¯­£~zÎB£.½Løä€­Jš*7φ+ˆÇ0³[æ‡àˆ4õ7}×\À•èZç\Ã3€Õ®ŽÑkÂë–á̳ n¹Ü4ð£Q0;$ɐÜHïãƒÅR¾˜ûLãü=~ߤª5|’a™Þ¸¯B֖+ž¼CJzõ³bœӍi=Ü=þP—ÑáwÃì\Sùíì-©»'‰Uu|b´=k‡Ç–b¥«sd˜¡FŒÔ­J˜š¢gŠ/†ª·†\)c삄$¡ÝNÜ-Ѧ!M¸o4©BЄKæò8vºMß9Äñ7ÅK˜Ù¹%˜mê…z¦Ã{å‚<ÛP¥ùätº9sOI‘|b˜ÇÅ-º½*Ž|Ïž8f6@Ž5˜ü•:˹Aêu›"½Nâé·\—ÃêÊJ—EpB`ˆrµ.varÜ7-×Pü½Ïª“‰“qÐýS§¹–Ü¼!òX…Ô­ÖÔnt¶Ëś6ü9 ¯U&žMÖs4NÞ´ÕS â¨CÒÒ»¨"¤ʂ…P¢ÚæZ.³o+b¨äɧ)ßn´†Ê‹’åîòÉ*ª!MÅU Ñ[vªM(ræžØûJz҆ÊÞôQÂWߛKÑ~7jîѬƒtºº—¯At;¶mòw@A‹­o ³Z™Žä BÓ¼JPÙ:!‰Íºx]jkG¬§l("%Žø¶â Z4ŠÌæKŽt¥Œ€rÇ$†t?)A#YYY¸*„óµÉ•\VãdQoÞ­ß©89]ï9röç³P‰£¸¦‡ßl>+b؛bµu½Ëð03]kŒ½¢UN±öÓÀiö#ŽÃVJÍrÄÁµ[g8ÐÁi’'Œ›'‡dõûÚçc&Û`‰ú:Ùg*….͂ˆ3‘ì\ØÎI¢Ê(»íhˆÌH‰v«ë×çñ{Ýâ;©Ó˾´¡=pÀóbn–n7TEnovYñ<HL§Þ\[ŽF”´Äh!æ0qéTÓk‹r¼Coj³\$Û»0¡ˆ:^eåaÑÁÔduuO­iNѲä‘õ5ÔÛ¹¤õÔӕAµß\./sE(r.ë%o¼ÓÖ·•5<Lózå¤ËÅS®zã/¬ZZ~­šut…¥œl•RpŒê÷Ä᪹ަmiÍÀ”qMPŠƒ˜ßQµÏH½P9Êê3.‹Q{xxW¹¯/[ã#ÖÆô€}óXñ[ê Š„¥i†% ÕIö>ô!™&cÅp‘i#YYY™ñ„ö½5DçžÇ×ÀÇtqQl :@ï ÌybìÚJ06ýSzåýïµ ŸÁÃe50‘3ÞD㔚‰®æµlÂM¹}Ž¢{¨¤žË’¹¨µW/#£k¹“På-ZïJ³zxº4J¬¦„J`Ó×g%‘¢þ'¸¶æS€ÙÝ=6(Ѕ¡0€^²CK¶u¾¹R(b%ޗLÒñÎԝnRH ùÌçxµ·pÍèÍnE¢ ¢#m¤wÔw2 å`!@–¬Ä²iìeëMÒ«¶Ã‡^ÜãÅ<WY™¸òG`Ô]!`SÂKIpÚÖÒ!áLׇ¸çlØb<z­TÆK…js+ʧUð<©ÃšìÆa–%æí]ž­7ËiÚßd”ÎàÄ\$Áí:杢تt$f®6†©cÔßixhVV4w®gˆžã‰‰0—ÃNà‰˜BòftÍx[6†¦#†¢CšðX­õPžX3URÞ#Pݍ;¸½B]s»Ó1w¥ Ĺ·#XXX‰…qvÐ‡d³CÌޞ#XXX`®j^L€¢íÚwáÎþ&Ÿb=1qTû¾U8Ç!ó Êá´Ç#YYY,ÝÌtÌmƒL€ÐÍ,ô |ž=NŽ=*©€¢Š  %V¯Ú¿]®ûüüÁ¢#Ï,ÏKZGØzO§´N-;¼KÃk Ç9É|"o¸çåD8„øoòl—û*‚p‡zÅÚ%2ø1Äqdl* #XXX#XXX™jƒ^cˆxÌUåU£û\GË0'/Gm³ ì1¡ÿ¶„ÿҀü©A|O%‹®b© Èï%Òñ´Ðr¨‘•JB&XSÅWý’Œ¨Êh”öì÷#YYY'Sý°Áæè®†]/¼ñ>èb÷ß>«²ÉFN¡QÁ^¥IçMõcZTôÈéô)!½l ?Ün§²{#Êymèg5¥xÖ ¹ÉLŒàցÜÛÄÕoìp¢‚Œ–´›ÇIĚ8°)ÔâÀÆ` Á³j §šYa½¶"ˆ‚ƒ@(.º†Ê*È88ã¹ÌZkTؼáfÁv¼»#q)÷vøÿ²µà¢MW·'³J÷º’îX)Š Uw¬(LіVµªPQÒP !eå YÞÛ#ô ¬˜s¥Ê“ÄD®¹­ÌwÇ\Zäž9PîÂöv«u¥ ÙøÇÊxЏÙqDº&Æ¢¦ë©æ·§nRhØä²}Px7'­iꍔ¬]Fâh—„¢™Õ)Ëgè»Q>áô®ç*·÷¾ O™Š¹8²<hëÃ/W^îöVÃ×N©ücÏÒ;|úëdÍÌ¡!K¸’çUko­pRàtôˆ‡Ñ¥Í/ezvºéj^n%s‘äjwË®Eeܞ&h1'ã ]oŠñ8Fûê*îTƒÛÔoŽtgz@D˜F¸¸˜#YYYô¦”ªló#XXX$ÏdT0yíÕËà-¨‚@­zW†DeêIžØS&€n²xñãŠÑËŠæ^N<ê[²¹âž<ëEÈ£Ü(Õ9 îó÷ ñîòßFº“Hb‘P,ëÏ5i«·ƒ§?©Þ¶D"­ØÇ¹{\@¶Í-NÌljÆFƆySÃMCD»¤)IŽ«C”—¨ò.yqh§×¦æO=¤Õԛwpøˆ:#XXXmžÕÍ;´Ø#YYYŠÝU#XXXír…(zdé ÞŠ±®ƒLBT­§Zâ'M:N¤~ûèø±uß&HpÕ9od144c7D>Åf¸ªuàÞhNõÖD$A]f.M‚ê۞%˜Q›<qÓtÕmi’Û xäÞË´0ÁRCŽ<{l—N>€YuŠ#YYYÚs¬ï†¹çHç‡ÖLq­fÝ,ÖòŸwùª5o7~Ìï§­MðŠ2FK­r8;A´ÌÒçˆ1VD5¡í…#XXXwâ®/ ·›bI¤¨Ãªƒ¤¶6wò畵¼ô™\'Dz°8¢Ä€ü~jÕóõó£&•›BM=ý¦Þòç<ôM`œ„åÞNÉ%¾\åäÙ'ÌGǔva6y"ƒ­ìÄ0¸ÖMY£'ØÚïiő‘“Sfdžx71!BZÇ 1M‘MnÇIˆ{áÞF(×ÄõSSï˜#1qÉìòœ<ÁWü£—€=P@10ù^®€O…FUÁvHÁ$U!³½€ƒÎeÀ(§$`¤ &ÖÔjb†DØÙ*Æú€#XXX"¢€¤™#XXXXRqî;ÞL~öƬ½áÃЫÓ)YaÜ¿æ)@(DSÒ,± DDréÈz&Þ hüÄ=hJ›6·´Ý·n¸‚d±@&B@ˀ ì7¶dú’eÉ´`¯bíWê‡õÿe½x‰†dø~:ýòPvnнÀ{ž`Fd„Q }ô24¼ùù4´æT Ÿ¼/²SÜ;O&µt>àš#XXX¢¡%fIPj=<Þ¿TäÙŠ~æÁˆ˜M&`ùÐ/×±«&;ÅRôM8ø”`;¡s¿œ‘KEÏ‹U˜aY”ýíäv§DàDå´ö„ŠDD†Á2ÆÈˆ¤Æ¢Ói¶Ål.t–[]¨Ím¨msÆC’9ú'%ÙɈƒ’")酦‚%YQ€ˆ•’IëÈ>ˆO‰-eTö=š;ôwÏÆÑÀWï}yž™¤¢-ôåõ×¾¾]„bh1Œ,@ÜMÉë|jc#YYYm1â#YYY£bi¥6N_*Þ×½‚íÇ3 )—! ‡!)0O&c¼q‘®QT'×¢ÑDÁŒu¼ju#XXX¿‘~f¶WÇfô¾Ÿš ä ©ÔìÌ'»¿}ÞÇíÑò\î»ó7áì¾³[íz#YYY رJ™‚â<·hGIþl0‘IPíLÎÃ0óþ³äÓê^n›Oë—ì“é!ìŸÛt5%¨ÈÔdZ41MT¸²† 80$˜’8`Bc8˄a/Az&#û`ä%¤~TÄ«F±¿¥ÛuL±#XXXmb­&‹0„ŠL¼Y~¯ëm귓ƒ±ñȁ˳‡pX(û@?JH’T¬ÐŒ¢±¶X¬UM¥m¥™PŒI¥>·[ùµäþÑùw¾ÇîëqðšGÈœ#ïÔWf8S¾NX;Ú@a·Ú,Åaûb?h²ZÓÜ Eš‘¢g¬ ™ÌC Hۄ˜Æ±‘¦ÙÆ¥¹$pÈ%#YYYPM1„‚‚e–;8˜ƒ´J6DœƒLؒÓAI ót‘ Ø7©”Šât'QÌT4¸IˆiqÈN8p4‘ÌÅo¾æ­·6¡M´QQJLÃÆÿ…ÇÈç921ˆíã-k¸åÞNñÞ/ºô2ɚ,»u† ‰ZB!ì7‹Ý »ŒÌ²Öj!.G÷"ŠG×~+©:NÀüÂôÀxÙé=˜O9äô0½4¾£²B©ÒŸ‘f%3],kÁ” bH‚BR‰©eƒgЦšá=hú ŽRPÐb€óÌKê“}¯t’ÔØv';ÉE' Ý¿~.YZ²'ëSÍ(/ä’‰uE%+Æ··¨²#YYY+N‚-vµ»4”h74<l‰9»Ñ©§Ä<¨A ­!5!ÔpÏö_ׇ0[ä&–Î*ÑÌt +¥š¸wº`H‹lLn¢§Šc¬ÈØ#VìRBv¢“BfÝʬ`ȒÇm:ršVj÷S bÄÄ8ˆà&0Ñ[U=µÙ‘ÕÞnwµô͵ÇÃdǐÔÖµÙàÁEàaß®­}»‹tQPä#XXXÌ=»Õ+&)TÃ÷^ˆçŽf£GVÄ<bfÜ©Ž#YYY6ƒ^*[7´R†‰®ˆ^¶P•o ûW­:hm¶ÍE*G쩍^üÌÙzÈvkJëËl«™›W%è°å&£éWxÎìJsÐí…Îé¡­JhuÁ,RD¾"áÀ)os ‹„`ffl4¸^o‹Hç‰}%åP¨èñݳAÒݽ¹¨cÆ&>œ4È6ÆÙA‹Ò·¾kF!­¼j=±È‡à6ZxŠ 0c1óSË_“<ô#u¶ÜI#‚ÜGG}5îöŽéÛ#YYYd=G¼Lƒ-FÊ×ußw*½‘à› {ЭðwG&#YYY¼ „rüÓËc„C‰…†òIdT¾¡IÐ#YYY.Dñ—T©ZêvC>ˆ<;În৆bY3@££á)ÈQäxÏs'!ȳZ×»QU¾y­™¾ÌM°ÐPHl‘]cñ¦’CCýÑRpó)€b‚P†$w€_8a¦Æ±x;JÆ4ÐwpM¦u£­FæÎ·5Üæ3# 3HáŠt„y/EÄá”Yკ­F±K &SBL\`ÈQ¡VÂÉLcˆ €æ¥¤m'Y¶·+ZL‘”ŒdqH¢‘#XXX—3ĨÇT`©a”W1"ˆHBBQ­A’S*F2ÔPbæ %2EÄÉ0”ï9hCoPñ€²J‰¥,:*ZãÀ‚_8_Và䕱¦INù“áz½ÍÅí#YYY–×ýfF7yÇo‡PÖ—y.iVÔ]ìåÃz¦‚ÉIWñcìú5O)qLߑ:)ë?†ø~ üPd¥{áÁ6›]þŸ˜ùì±_šY ˜ëž…÷鑵#r6£½@(PÖz éGLE´b’ATÒrê ˜aŒ)BLV–93äâ¯y™³¶p<¯¡F´˜†„…]óé ^†ÂUرˆ¨I ⠓"ö&k #YYY‚wqݕcà¹*á­«~ÛZÂ6Â/HVðªlÃÞãá ´½Ìš¥j&ª(‹YËÃ+¨úólS3¿}ÌÃyqÖþ„Dª«Õ‹†›ÓoRðSÇqgeÌš Š89ºå–´¹wzñM¨,âW›àùÓùF‹™©QÆ{ÏUQEëXÆç³Ï ÂÓ0ÃìƒßbX½ùxeihÏ(@Í5¿1X¾PŠƒUòª¸È× ðª¶X–¢Âê_"d·˜I¤qwáç܃¢3Ÿ~zó¿>'¾ïˆïDuÈÁOØòËʄc€ÐB8qw¼7©;½£wž;ÎMVsŸ7æÛ²9S×Iï‡ö~<ÙzߔÁǙt“!,Ê͒yªF‘ðÌhˆðsәÒ4Ÿç¼Ö‹çü1ßZ®:n«j5)éb‚#YYYÔê*(÷eEl5"®m\ü_T#YYY®×‘Hªñ§¤Ý³7Ÿ?ãÓ¡ˆ"“‘”ËGPû/Ö¶Ö@Gtzèøvì9L“¾q̯CûÈÏÕÓ¯h#XXXVÿ–8A”&gBƒnKêdBØ".^$.¸¤ôM¾ÑY\óªñ¢/^·\im˜ë2&Ó§pHÔòðw­øu¶¥7Äß}×]bßâŠ~š.7ÆâwL÷œôÁ#YYYíÔqÀa]“)$„„$„$$$$$ `HHHHHHHHH@!!!!!!!!!„„„„„„„„„‘¾Å]wšµiaêuFÎÁÈ m!¶4 `#YYY4ÑPuŽãN%Pàì·Vtâ7xòִݳfírúç̾üq#ó{*¦‰²¨‡Û1ýæxˆ¼v @+H{ØÒàù¬½SC†¦ÌÌk›\­t«Ê¹¢­}F’RˆÐ²„°M¶hEDJÙ6L•¤S[vגm±²ÚÓkõ×çBBD†Q†¿›ÝK›]`2ÇŹ) E-¨Ù2kj2”J5Ý®Z$„Õ$ÐÊ$dÚ5ùÝ®©š„ÊZMNî°Û¸ÖþÞj×ShØ5L§ËºFJÈ´zîS–êý_^òÐTÌ¥·Ú±mՈŠE²¤›TGìíÒMÍn[M•‘‰‚bBÛ-²m”YŠUÙb•`±I4D¡mϙËYòRWuR ³âRKÑ<— ¥A2%›lKeb£bب¶FEE¾¾²­‘e I1­#1-f)4Åi¶‰³S)Jš¥-m-XªM¶4‡OÁêÌ>«c©}O_¡‹•¬ ãÓX16) 5¦Š—(†¢ô^@Šré‘!N 5 ІˆŠ¥i¢ B:)µáC›âY^àà_U<5èÌÆRޙòåQo{òÅf…Â}H¸Ú£«„ùk„ö“Þ÷öøjòDDSIÎ4hfø]#YYYé§ó'Ԗ(¶+œ@­jýÆÔ²±XCd“Y²Ø¢ ›J&¤ÛQQ1 ÐP0\ÁöÔ;þƒÅçÕ½#XXX=¼ä0ùjÚK?¡ ˆš#Ô>È5uË_Rhì_愇5@ýp’Bl—Ô‹¾öNQ‘YSvèÊt²í¹%FŽn»·w.¹D»Î§]s‰7ÔéÙµü¶£@TÒ4RÅ0ÉlžÀSç<˜ðZ"©·'0¤Í˜ €ß¦po,ߎӣr”>‡Í¤‰èJˆ…»ßÕ][üêbuWo]Õ&¥7ñŸD‰ù`(WwYéÀ0°„Ä3ÏꗟxÒÑõæ1)î ÅvxªÊTwCú㺆ê{‘¬ª©¡ÜÂ@ŸJ/Ç÷ŸB Ÿ2)3¬U¯éµìÅ2¦š8Ì Á܇Ö2fˆ2$ÈŽ,gùÀ!ƒ=ÍyªS„"Â&e*B82jˆ£lÁe¬Ö¤…ÚÛã×u·°¹“ʽ’êùóJ¬Z€¦*!(CJ ¤-ý=ï®Ñ§Òê+^”TZàÚëXc5¨†´8͆  Â°Š"²”ˆ#,@ Ù˜fWnÓlß,å®%8© ÅC”‡:m,d…z©íÀÒbÓ±«¹FhCdX`ÆðvÅ #YYY¥¹\2 ,Á„±6e¨5$žîE]{¼œ»AˆÆÑguºÞkstÕëˤ¡ÝÚ*½.®„ÝOur½7½7^»j,ÁÇ$²£ÒƒÄï®å+bž"Cí°Ó”äTmŒhi;¢.Ñ\j®ÞîÆìGVB)Æso]©ˆÈùSQ>j±1£"ínšçÊ\RÈÆ8¦ã ,¡¥—pECÑ1b60ÆuUQlC#i#´“tJmY"6›@Á¦]0:Bcƒ#YYYl€ØÄF­q…·#§lö=F{ژaÝÜ÷ŸZ"½cçø†h˜> xÚòێ @ýRŽ ÑbbgÇøØdaƒ|Mœ?ÎÀȈÿiüßë>Í!õxb•ÖsÛeTØûñÙ»~QÎ˂ ¸p¯„5UUÑ=Â;þÐ¥¸‰=Iê6“ØœP)h í23ð”Ö=ÓƒIÎo‰˜Æ-¨3F ×Õõð‘ѝ@ Tx†…í4ʧyëóFh|¿¯XhÖ°þYM•öυˌ÷æ>Ýe kÆAy`ÝYÊùIa˜¡a×ñm7KÝjš¼6kÎcŸ¨i@ЧdD42ʑ%Rè”ÅM¨îN‰:×>ô•û.—IäÏ©Pß@4‚¾o£Õ†g¡ÜµA¼ST@êôPHz=ÐÐç*/ùòÖH¯%]²§³ê×§ÉCFèoAãŠ8ññb–Ÿ-Ê ”c€HÕôq‰Ùq¯ƒ¦1N¡Ø ˜`ÀÈñäËˋ©{ ä’y¡Sï…Gʤ€LÁ×û¡z·€¶?­1âÇñC#YYYœâ&܄€Š!{°Œl’óÂ©Ý P§”$B—Ðt´ºÖ°:xZ@>yV@|®ñpUJ"·$f<¤]K ŽèÉhÔÓNB”- „(r w0FÖÜ+K[&™[tÛ\¶¸kV¹ÇCæÚ{ªÝˆ–Ú¹n—)ÝÍ˕uݱ­_|Ç>^÷wǗª/®»&¤yÑ¢ÛûÕµØUo12{Èwmƒ‹ â0€‰á#XXXV‡x㦜Óà!ÚzòÆEY©«e©µ¥©jjiX#Ü2–•€!ae—UÀeeCÄÄɐ`È`Á† \À`c!¦CX4€ò²JB–XC©«µ¶H™¥¯[-Œ-XyÜ3E¨Î2šù ÊàÄz‡›]dH?£èBÜáƘús…È2e–¥'PáD…ÝuMLÙVM`ÆfH°¦ŠÅ’Í»{¸ÔÙfj$‰hªYI^d9|Ðþ©þ»ßÓ}PRwéZVÿ\aœl;{±hð‘Ô‘mÂ)¡$5±Œ„½‹Sø >3æ]ñŒÎÒ’‡µ6zÏbaä„DÒ ´Zž¸õà…nöÌõ·qЧڈhà”¤©¥ܙ[Uø²cn™Õ_þOïøÚ:ԓþŸ:—ü>º Ÿ—÷·?IÝïÿ·&û•-\fÓöëöKë¹›œÖ»5RˆQŽÇ§Ëþÿ_ÿ;ù¬e("}.‹ñŽ^˜tM1ä¼ÓOèww#XXX‡ƒÎä?­æ¡Ý8w* ;\£@çtöÃq7f³ÎO’;ùãIæA÷áêWäAt õÁø?´²©>|2œœ(É1¡‹Pà͍ݯ6ãݙh´‘ŠÄË`¬Éw\¤ÔõÜø×;¯ËÑ}MñzåÕ}Ræy»Þµ¢ZUcds$¤Hr#YYY¢`à ÎwtÒ'Nš±X6œëLÛ=Ú睡h¨¶Mclf•·ÅtåËtœëšŠ]ÚêçH +œ"fO—mŽj(ŸyƵIDbəšLPÒÀk5W‘¯Ù—`º%=iµïjµ²ØÔß\V(fM&¨Õûì÷fY±#YYYú|/BJezqÈâësþ[ÿux¯©È…" ŠP”$ì ¦#*[-ª‡lxGoæq;»‡ú`i¡„”L‘C!¨$ €™FÌÃH'¡æ&>¾Ã˜'8#XXX•)RÚ-cV1Z4™›X¨¢´š2¥±DÂ4ċÚJ™¬Ö1lXªKŒE&Ê4Û|W+Q±Z#YYYRmh֍idm˜ÂEŠÆÕ"¤ÛS5&‹EX¨±dÑ¢#U¬˜R’Ô•cVÒiI+A%I‚Œ„–#Z1¬ÍbÌÈLѨÚ+QªÆÉ­clh*-QcfX­²Y´¶6¶ TmФ#kFµ""Ź[¤–ÉVÉd‹k%£U£IZfJ*MŠ6¤Ú(±HËM²›Š’¢C#i@Ú!"#XXXôšUڝÀEB×|#YYYT€yT¼{ÏÁB"ëûX¥N\ðÄ )LÌ]HdD³†êï{Ö-æót։H¼¹BV°ƒ¿–÷À«ê‘; Š<®JgoÌnÓÜ=µçy½ÝlÌôítíÃ0ƊJL‰¥¡ty‘5Ä!Ÿm£TÓPEMŒµ}y(BHJ&Õµ~Uþkó÷>¯±Q$”“¨ä‚›C€éކÀä&˜"¥¸ƒ 1^u0Ú•1°S†"—Ü®šY*4Ù³M•™)‰´ÌšBj‚b#BM£*ŒÚ°Ëd’cÄF´i¥ÝÙI¤Ê±4wmeb˜Åƒ Ê£·\ö¼¸ƒmžÝÏeqÃÃv=µÃYi:áÈlÅÉô0c׎ì5RZäõ‹ë÷éáNŠ¡Õ`øM€jw†z»ûG î*h¡‚@ V­ d vOžÌ@—"̔ƒ°E:@=½°Ù`ª†'ʝC²J¶xwG ãÃxÖ%~M▏7†Å5· kôŸð¿\‹²?Þ |9±Gå}g­Ô©@½Ç–*jfª!¡fl/#ÁŒR½­?à°kž1ÛZ2E˜¬O0„#YYY2‘8úà6‡Uãyª:]344Ëeê×µªž¡Œ¢÷\š·gr­Ô¬*Óÿ»GõXÕú»3íf‘Ö@fÚÓUiõ¨ºkOZÕÄê#YYYÈÖ%nªÒBȹÍDÄ*ª Èt¾¯“|AN¹á'P¥jÀmÄkÚV¹pÈ)Àk’#XXXs;Xi÷Âfû°ôó=x;Üçdvm„ ™WèÅûùWõËIë„A Lx8Ðî‚ý+cŒR!³r˜qQòn´'¨7†åÏk+œkeòðkâúèø^ÎjbÎ#YYYP#YYYÚîÎògm*wô9ó÷@D¾©Â¦(˜%2÷ŠKÿÈ ÇØwª<ŸŒž%ÊñæìټΜ÷דZNê(DHj†Ø´Ã\o‘VqZƒDHoh/ˆÈC"ýgL#?ìüX¿ÁÜþ•‚Æ`æÝuÂ+k®”$çJM˦iNëš8{{Jü]·j5cW*阻¤ÕÜ».:í£UÇ*u×f&uŽƒð}ت·òŽu•AW$„C¸UõÁ^pҞSÌ&Àvý֑餣RjФŽ¤ÄC° ìξ½Œ]荎a}gÖ½^ŠAëþÄC±ò3{„ùD‚&””SÅ=0cÜw©â«þBhGå¤ó͏ž2H€F%CÊ?󾁊æ@k»¸ámE ÁY½ÜÔÖS(«ø§•ÊÕígºêòÝWUkµ_›ÓE’FZÛWWìÍ{U¤I{¨PQáÚ9 +±=P|ò%êDË$û¬<þ?€R"CµCʞ>++PUSÇÚ<ŒÌK§ŠyÇ^Ù”˜,WØdè,GKéºÁÚ²›…GÝôÅýLlƒ? ÿ²£÷”Ô)–G~ùw( ã*˜ÀÔØâSòGº@ýõBM©ÀÀ§Ë(d D'á\V,šÞVÕû¼¶¯+ÂW@2€d Äۃ!CL ^d)öØ..b‚ädbÔ­l0ÁDùSâOœäkß4P|0ó9>ÿšáCI/§ÏJAâײe(š4Ì&ÉF£*5ßû[†&LT Éè$¥;§Ù|W‡Íò_AWŽ}<y5²5ìè¬P}¤÷êù3UL~|Éò|º×Ïe@<äÀõ`ŸÀRÁ=î3òÑSZˆˆˆˆˆ¶µ™†dhóI„Êó1„l”1",Q»1ÕDF¡¥ö·mû äЇ"€‰ ^%@¥}ƒÓh̀v.ãœ<à¦?ˆæúcÀiB?ziÀèñ3Ÿ¾Ÿí$Õñ1³´ŸàÕ5‹?Õä_aå4ˆÎ3yK…\'*§xD'ðq½+¸;W.˓ټ·ju ))[Õ¢ ‘öªŠÛþËsÈëÈŠLFZæÕ:˜„¸®žÇ]r×Ç:2#YYY3ñ>u Ña±yDø^8ya!SâpÃðÈ®ñ­kP_£‘¬´Õý¾êŽ"Kšó2sf¬í·N×!b´ËdDožn«o«4‡›º›#D½Cò´°-õ¹’ÚdÔuê ágÓ.\|ÓŊjf‰5W£U*ÇîŠy¢7½XuåÎæ~°ÚèñrŽ'3 sîYôϵƒyu[ÎXü¸úÐ v›™(¨¬ðJ,éëMLCˏíkF{Sß.ܼ¨ûÝöpät*L¦üïD‹Ù\+MãÚوµÉ³…(ÈmÑýS+ž:;®Ny,{xŒô‘p¬ZãŠÙn!Onè.. Lý´28¸Õ+"^ö’Ç递4Á¨’’Dˆ"·F]å'(ÂsY†±¦<M.jÑ­DFhÞ;¶£ÓÍÝÐÔ§…ß1¦a˜j @ƒHnҀµÁŠ- ö¬`oõä öwÌÉvb¦JмÏX'¸}ð^¬q‡·+ö­Àºqì</ÁD#XXX;?…Yš™ ¾Ï›Qâ—ϲ[FP~_êÓþŒürÇ_<_‡} #þýa¸¾ŒCþÿîYq¾ˆgf¬ÔdÉ/ìÍþÝ¡ƒGù§fçVi?#YYYÿHñîÀ?ݶ컂§,w;Öu-4•߬]’àû#YYY!­ÉŒ/†© üm%¤f"ž¦R䮸“ƒËdU!Âé% Ód'ªGS@†a BД­“#YYYF¬bm65ºü$Àé”$‚ (Jc G¸!Á³`qrß»X÷íUUUUl݂]óÆŒ‰öt´VÁ°‰øF<Þ4†ª"!žÔ ‚€R RŒ"I ‚$“%6Ù2i)(±FÅùŒšN’60ÿªF¾Ÿýt¨ßϞù¦Õ¿F×ß·~€€   $ߣÒ4×~küž¯¿ª>þA<èùÕtƒ˜JÑõ¿WÒ´ä‹ üC䋧âu£¡÷TÐ%¿tˆ˜Ãs¦£ÕcXō´cmD–£i4lZŠÅQ%´R¶SS-%­sZãK¹µÒÔRmš"MMT‹)k#XXXXƙDi6ʲªMlID[jŠMIµh‚?% SD‹•KÚ„.@Íz'2r("xS™Gã¤ó^äæ§#XXXýÑâ ò0=çòL„êY‚O´;Wæd£VLµü²í«·‹ßPùy8…ÙŒNÅÚyh ©ñÇÇX¥4DԔ”X«õRÛ¡|t¨šåÛckdTÛHbÐÒ¿Ôr¥Ù3lÍ£Y‘ŒRmꚹ£Kh¶–"š³IW×[]CMTàë¨iL–ÚP*¤#"eû!SapñDÓKIQ$•ôA’'(r˜€b%äá…)-µ¶M4›0&±µT„È$¢î魒†É/[.v%]RâQ¢=߸àø.Ýð—Õ|¼òã=`û$š@0µ†ýJ>2§z>Ÿ~ýÊ|¼åƒï'šòæ˜vvvýŽL¼„víKöö4û$ý’Õ=Ôb-ËÉn]ו¹<Ï1ޛå\Œˆ¤h¢%&j)Š ¢9Ÿ(vûºó#d 0&pr#?Ô÷÷G¤‚æãÙÛlÿ݌ë†#YYYçEǔüpÁ®_Ž<0ç¨Üœ»üýTÚD Ê#YYYIÊÈ‘ЄÙ#Eö`êõ}º]%¬Ðiè¢Kb‘€ÙߐŒÌñb’î'áMt5®[ÜFZ$é*t¸ß==UþP;9’¸\âbI"ù;—ài6ûô³ãšúµÍÙ¶&<ûh;°6¯£ƒïü¦>k!G3³"R1Œ,1J…Ë`rވҔo0ûùàoêѯ§Û®»DýÒ/óÞwÒ¢~å‘&G1òH!¡8 7{}B}qÄ>ÃoÑ}™ˆ@ULHd(˜7ʯÁ ?#XXX/Ëåù,~@ùwJdzGfÝXÚF8ôeò3X¶:iX•j#YYYyr£])\fókmƒ¶ca؛¥Þ.æ#YYYG§«Xábó¥=<á³eø‘#<˜.Æàƒ Ž2¾›1ÀÕ£1óM†¡ürQ‰²È¥Ëލ®g!ò±Ìü‰÷wX·áZ쪙Võ™&Y±‰Ø»I<sã䵅fNðÍHá#ŒD04×n.*˜jjن9ob™ØÂŒKÀ¹ÁÆ'uüð#Ý#XXX9*pÈÐú>8ù¬>°ý¡Ëiå”ü¯¬òÔ ½k?|Òe#YYYP„¬)èB¦%BwD$°ž³ì³–&3³ŒP˜Ù˜€l[,$Aú[áÚ»n¦‡GŒêc/ãO!Äy•Â`pH}òr“SAx}5 †£:Jî+ªC¾0NB²ô8u1a‚sBVÒèüšþñKLD1q™–K’ÀŽ7#‚uG#žñ•‰Qtƒ‹8?š[B錳hÛÙ¨6_vùmW®nbÙr<Ýü|qåaő¤˜Öj%÷> у€ñىà3×:Ru¹V‹5§3I[t Ã{¯æ‘÷SE1qºÂƒä°ŠGq»\GfÆÙ½Ä«–ˆpÆ#¥ÀC³K fb(†×-†öâĀu‰#XXX€DBE6“L€-1Q…d 6 8ÓR8ä‚#Z¶Éõ;°Ó4f#YYY*7Ÿ]Q7Ŷù†a butäéw]r×w@rRD‚ÉLjá¬5¡-F~(ዐšMF(§nBåÒÖvÌîÝwDÓêëÌ0•Ébry-dƳyš!Ê"¶¨1’7&D #º )#‚†˜’7Ä¢c‹xÊ8É#Ô+T]´jæ†)ÌPKý;"Z ŠZ41-µ¦ ¬«NvÈ»°©#YYYÆ.pÐDšiÚPôÎĦI¢=]‡\µkÓQ;’ “½xΧ9D½ró[4zSND —Lb#RÞ±…$ HgÏÈñy"ì˜9ØQ5D`ÿ—[†7kх9˜W.)Œ1†6òÌÐ&‡imWÊ¤¸‰—’ÀËéµPèìñüïԟ:ô@Å$RÀd~¡}av­óÏËÇ­'¸IðžìLdë-0¹CŸ~5|.4£kômZ‹¬I¶”#Mì^#ú{ÇÞuÁR„ܦ‹Új#XXX!$žaÛ½0(Ðc­–ÑڇǛòSˆ°ê¦Ð*Ä[#JŒH¢h [’12‘‰À¬ˆ#XXX†’¤s0”¨°&‡ðÑöa¿?-vdZµ~¹*•;áÒ4îúÈ ·tè­l²1Ó²“¦cÓÇSšA™˜–HØãve¹‰i£OLxÒndäµq„£sÇ\5£IÖ±…x×~·k#YYY1´Ûfu8ÌLÑ»Áa4â9’ÀÂpºñªÑ„o84… OHÍÙRäFŒ#YYY´­Œ7y˜ÅΞ„Æ#YYYˆÞîˆ;„"k­w7ºÞsFR‚ U­7¬#€Ñ§4M ÅÔfs ’$šîäšÙÞÝ99D#YYY©YޏöË0‰=Íu&ô«!(›PI'SƒNÀ¶O>{"b`ÚŽ6E˜…DC{Ô+ ]ŽâР¶“k-‘—¨ò/„±¶—Èröc+z°]ýHyø<a87#iHÂÅ£A%Üåsš‹¯¯c‹$Þw]¶æÉKœåî½eîY.ºîÖêWFL¯ßå>G©Êz½SÀèèˆí}4X&f]˕v¹p¥©uª±`Qc4ºc—É_Ÿ½cõßQ}dyفJT”É!0‘ÝéÒOïs"L!} ªÞý8Úd­Êx@q¡~Ò¼i‚!#XXXƒ$éÏ )­÷l;é2·~œ|2c.9ûù\<eфö¨{ðRî ß;Ò|¶W×Í}ÖΨu;lú¶òÈG™<º\.Gӆ®Ÿ öÁ£\) Ò@2G¼» Ðç,_Ïd/®{‘Ò#þ„Å›¢.)ø‰¹ u 9&­ö÷ûT´J]’÷ÁÿæÈ¥u~ø<üö::‡!ƒØ~{„øpIµbZËö›l}}9‰óÃÏ»pSyl%-CE-©C Zü%î´ÏËd4•´À0Þ³¿–¨R²Õ*Cp  Û5:rh´¡‚¹'¦‰ c½kZ¥‚¾|G„€Öˆà—ˆÑ"Ð8 ÒIíՀí4 XòxÛm—4Fך,FÚßj‰M7ŸãAøaƒÕ _d)}#XXX¿”g›ÊDúíQABO?lI‡Ûì0|!<ÓôhúAÂd¨ß!è=xä;ñZ_r‰×ëúE‹#YYY(“4í¡ú;~€sB}>û¶Í#sìÁ—7+ªß€öôº—«^×Óº­%ÆÐ™©¶&ª’Õö¤§µU8‘) …E…]oNJ £° UGjª{ÝÞÔö°¬ƒ†l@„ †?ŸÉbkè˜N~#Í5¢é<õ,ˆ‰€ãÈìŸ>°÷f€µ[tçæÆsX«oq}p‰í¶m¨Î 9ÈöÉ1¶­Ë>®÷Mkˆ“¥¤Dì̄p¦€È¢1‘Ö«d?¹hÊØðꖒa‘kn¨†ÔÈéfI%dÃìÌ-äHlãMàxTè‚#%‚Þ]~çZéu¨™®ˆU-Y½‹ t¹™¸ZÞ÷¢³1( 9°Õf¡ª#YYY&£´ÉK¼ ¬ŒX¥f ߯´…(ïZ »·×Ã]óçw’µûínQ%&=1A",jÈKX•®9I$Á¡´Gl4B0r<ˆ†G1`ÛuÁ„–Áä†<¶»(Q6c2ÄbËÊÃþ#YYY"B‚%Ë0Àç(©Ô¶XÖBsъ@ü¨x«!¥7í@=?®Æõ}öÍóáfžÓ-hûÌú4öj{·¥Þ•èšï.Â#YYY$+Óï.Od¼øz ¤¯ô0ê©€Èú¹s›˜®å¼%ü>Dï$"Q`„  aZBi˜(¯Æ'œê€cÐҞÄðaf*Rª† TNlžmvmt».eŽ»s Ýæ“owTMsºmÞíé²n\Ûdª{u®svó•Ýé¼Ù¥x}õߢø;9tçKS,šem¢©š²mDZ¢ÔjXm±Ý]„Ł*lÖM©–k×mç¥P†)™»v¿:M`¼® 3°> îæ‡T ª%6t»:²”æ8ì*|Ӓaˆ#YYYóuëÛ®ÜQ-ÕëSNêè×E֍ʙCµÓW6¨S%W¢E;dDØ *= 9ڬ˂o´ï=·]˺ޖÞUҝÒ)¨l¢t€{XàæCŽðÞ]»t¹[{{րÚü9±­j\Ÿ¢ÿý•ôÅø³"„äVY-™1ø£!udwãõh7øs”!ØŽÿ´w#XXX ì7ªþó £i\ìÑzèÙam_ l)©^S NDê<¦î=A?#YYYûxÀëÕ$I)lða³tWëCëƒþ„Ä;%|ï9†Éé Ø‡hüá>h¢¨°2b_ŠÉï2|/üY²Æòµ¾Aå±ðàxÜt„q>¿º~F#YYYì4ˆlèª/ý§”¢¤¨$ǃáâ.žj"ƚZE—SZ¶¾šŠ¬kh“,š{ t®!m r0W骈 ƒèí„à6Ք¾Q–2bmÕì´TjA³fk'0<û‹0¢oŠ´êF )ãñã‡;ÀæPüº5 Ž,”`h‚vf„ßãðô']ûÕQ«AÃ\“p÷C&Ï8Wþ¸Í^ӕ2CT…v²ŽŽu[Wò‘”Œ:ÈánºÑ½h|ÈVõŸ(>Á¡N<efâÈñžŠâA£E+Ó”k$ðý]Üm$›ƒQ<¢kÍÈÅ ÚZPðöÍf±ÒËHÍØM¬Ð«ã~J¨†@DÊïâ1S mˆàъzM4ó…L6ó|‹5§KU؈»ÀO£5B#YYY•¦4SxÙ½„îÂLØÌO)FÚªZ #4DpÁD¥ƒzrEY`ٖÖË-[Cq¬Jª×k†Ž¯gƒ’o¹_g6ÉÆ«:`#XXX&«}5§Ñ­uf›LЍ(bEI41#@/B#eØ÷9£1ekDC»è·r½ìÑY§”ÊÒºo Ç¥5¼ÕkicY·†’›ÔÛ ŽcÑNžŒ$HåÖãxN9±JT ×a–À[uÃN$q†4Øh˥ޘ…­vŽ™àÁV‚Ès3C³0cµDPz `†I¥sL‚Ýâ4lÞ`Ð<b’S`C,D,+ˆ¨(\‘t†—‘Sj5Û§A·U±Ù9¦:ôb‹30Æ·r¼†¼i"w›„X×G4p1![™Aë¸íQ‚G†‘m¤»â­;:U*E"(ºcÊ5£Á­³#YYY05ÚkPpˬQ„NlÒµ#YYY*›Èž_ ƒEX݅(U …Ó…§&P–î#YYYQ¶*6Z¬ÊC$º±X=TPD "aˆv,@6+s X#N¼· •rºmS,ý:ôžu¸dµÒA՘`Ïɂ##ˆo‡œh˜‡¥¢hµ³K˜óÓ¬ ¦Õ×#XXXóŠ}¶ó{gºº”~™’ dV™:a¶“8‘“fd&iåȉ¢Å¶cCi'µ $mš1•…¯'ÏvWœ%ËÒaìÖý›ä“"Òµd‚ kP7ˆß #ÃjtäãñïfºŸxÊrˆªZ˜K]tIyu4ßàZõzÑR™›cô!ýI7Ëëϯ³B¸M ój™6ì1½ÇˆÓßr Ôäk(©‘ Ãn$ L&4̝eÜVe.™Ðm³œBM6ó܈—PGÁ˜Ñ¼ *ÈyNç,Û¢I:ȹÁ˜‡\ÅÓSLÆoF–°Ó’&ƒ#ÜE´Ä°aÄÀ8µ#YYY4iǍ4úrԄIZbs‘KaË:6Xé9@†0ºÈ¡ç£)N‚[“Ñ3‘TÝ {xRK5QåF$ÇcæÅØå½l§VåݵáJßzW~:áðåß35¸…#XXXbÚh(ÁQaÝ;mÒv“L|ޗ{ÆœUj'}˜µ;Ìlšp:N—>O3Ú84Ñob½k `PÔ-1q]¦Öü© õÛU²2JjZ— bL5Ÿãã•ù‰;“JHd`€‚¦ ¨c,0Ñ4ˆçüø¡ÆÌ2*Œ8ø`4_kžFX”NNµŠæe!‘…d㉑¤ 0¡Ï—F–>#fžDÍlÃ,Œ,²Âdú"¾™WM ñ80ð2¤)PđJ1 ûç¨6ØóK#XXX?ºôEt…õ—¦Ô<©ÆvØ×*¾À²7û*¾©Óo­“F™É9eºë”Ľ:׫ Gð{Ó6½NR¶ùÜۓ…Ø«¦G "Šå0ã­ZžW¤#XXXP¾çB`m*{øCA=88æ†÷Ž¡ rŠaÔH8@#YYYoX1&£S´Þ°RQÒ¥ n]٘;µj œ˜Ì#XXXDÈÑðbÐT ƒnÉLƒ!WPâĪ” ¬Áxƒ#XXXÛÐ0àÄ7¬D˜BTd%(TÌSÍá¡¡ 5&ê†s–ÖYqº6ƒ(¨A¼#YYY). HòH! Mò$LStJA*QD®#ÇM Ò:‚!(fD"— ð’ÚSPJ¤!jTpY@wU#XXX1»dXÇ0Ä Œ£¦¶hw„TRR‚èd´ÐME¬•F)l)²¬˜¥-b3e¥fD”Õ-jÓF)VÀ³Ù8AR#ÚH#XXX `€<4>IŽPDŽ҆ ægv]ïžå½#mP5ä+ñjYÛ}«×+Ç9:(…TÄELT›1£3LÊ6–4!í¢¬E `#XXX" ZB’¨))¾Óèç£àý&Ž„kùÿߘ†±”Œ²_´Ÿ{üp0D?©ó×<4x‰à{ðÄÄƄ¤H¨Š•Mi3lS&ÄYµß¬uU‡O‹a°6 B%D ƒI+*„QºDaZ«¥¶5T³U¹[šÔUZ6±´mˆÕ«F«nB™ R¦@Ð#XXX†b46Š„¨®¦±Vßv½]wßZ$#XXX0H9±7 v#YYY.Ud šˆêW¢)1™a†…‰h±µcŠæ)HurâšqMMdiåVʖ¦Ò±: ©"a5Í OmEý=ã¾\Þ{‡šJ‰3“:4¶#XXX»< ðÀŒx`8CÔA&© dÑñ{‰ú:¹ò[?£â<û@?t/Qœ|0Råƒg§ (p[š°(Ò$´”ˆ²iº]I,뵺€(}$¸wàÜêM!ÚB/;Û/%ùçûÁ©""ÍM?ŒO´õ¡‹édN½ÝJ”Š/Û÷Zùzí«ÓIŒd…ÒB)#„µoÃÀ #`–ŸŒ™¶­$ÔTŽ0ƍ-Yî8PÜd‡)uj]ÁÆôdZP 3‘¡\„ i ¡iL2åø­jÎÎmtø´Ž)͋AÍSWhli©Ú›7Ý¢Tf"¢Ë#XXX#YYY ˆ!p¤…8¬ e4¬¦6E#KU뛈bêÙÎf`ë†2BÝÈ,qÇe­7…Übr'JÅ(†&³nK«™”iuë¼77/Žf®í_žî™“3(ÛÒÛ­$Ĭ¬™& -fðA4$®ª\xA20ì“&5oOPf‹4’Ì¡Þë©YbÇ 3Fµ!†F$˜3vrí™$µòꮢÖJ—»™"ù´¬ªºÝq$¸Ó0áÀ V°ÅՃlŒI‡&€Ã,!wèÖ Ú±ÞfŒ\ك²5oFªÑ@U˜(®rR@$Ê)ÀQ2ÙhÉÌıwÂ(ÀrC"p´˜1¼q!ÛfŠÙ”Àņ)€°Œ°bšdlÀI£‘ê“P¢èGkÖÞ×PÙ1}ë_}´j&bd‰xbBy<h4Š$Óm”Ñkl­E–› ¬¦ l©¹•nqA˜nw³o[à©¡ÍÝÕo¬Õ‡5J˜˜ƒœñCFª ’fmÙ­¹r£b$–‰¡#UTIr´ùW×è\øÞhþy³7:álô„eQ#XXX}8šžD`ö÷ù`Á;6tHhÑå")˜òˆº6ÊP@PR´OO˜EôÂïu‹Èd#YYY͇Åïsìü·ÙösHFÚW_8Ì_è-@u¨àÑYõ³G¨QÇMVªËé©Î{%ÈÈMGdqYi«¥‰Ò0¹ÉJt‡¡FñՐnNY‚h–)Ül¶!s'.VÔ9q ®¸¤*0Æà)2 xxß",Þ²ZP7˜ð@ÙX«J4aÅ¡.`̏"CȍŒöu¯ü¨tnö`ft×K!ª¡d#YYYhiÎòÈf¢+GMœ¤Þ¶gÞøƒ^uÆ*KÒìÀæ‡NžvŠ#XXX6ÒåPA¶¶ã ËUu›k»±i‚l±m¬g»¨€äOLzAĎ º·Ó#­—##XXXQ‡@΀‡b“ 4n±0tˆT n¦IžÍŽ´HÕõ:™†äs‚ -4òw^#XXXVN¬ÖáÐÉ´¤òδD0† ©ÌŠîe0%"žŽ$1‹!Ï(‘hûµ=9*”m"0Ñ6úÀì™ý œÔâK”ãŽÃyº³YH3­‘Éãr÷k4’MϞh&Ó,Q’¾:SO\®ûŽ:'3zHÉ#”^®\˜ÆöMN#YYY§µ,Å'ýxѕOö{•÷Ò ;Ö¾‰Ûè³ýæ…ý¦ª*dYîFXX6%Ð`'ýAä#YYYòf{Èït&SYS­œˆOb’ºäàà=Ǎìƒ$Jž÷ê_â«ôÍQ¿f[n‘ª5Þâ!P¨5”¤®»]¬ƒ)Àzª¦ßÙ’4}¿Âz¾öWá‹YÃL4X•ãÕ*?f"«ó«õ¦§M¦ß;y‚ù×ÕÕd½ÿNöØ>aGw#XXXŽöçµ"•V†‘#XXXSÿ×ÂDž=žZñ÷£ýÒF!fYU ¥@ˆzà}Óí;€ñ'ç#YYY©î^r¨y¥K¨ËL€jC3K â~'^£°£²ËñŸ)­|±TaU#XXX4¤5O»ú»Ì֞À#YYYœy€} `?<PD&ùly#æ=„öB<ÐdyE8ǖ­¸2†mæAB¸R8cQE&b¦°”6Z#YYYŸ¢L­7æýÆÜ7;œš¦W÷)bBjÃʼÌ^C+@@g©¢D¬Si¨ÖTÚÅ«b+Rµ~ŸH~¥~ÐÏÓ²Uč&¿†j´îÛ©ÅEžºC%4#YYY0ooæA­KrÛí¯"ɑ3óÜ«|kr™)B‘ €0šV”BÃrfnrØÌ¥ñÙMÒÖ[ͺDͲX©+G#YYY_®úâ> Z˜ tnJP)ƒÞ¤JP{f‡OB!)U¤#XXX†N¡Æ)†¨0p;$_iöv¿ñÌl̘˜7.Ù#á|xH ‘ûôcŠA¢qòvŽn(¡¤„Sxy0ñIuþ½š<vbdÑ¹f7þëndڅF²-öÚØ–²O íP7“]ÐäØv’<z¾°1ÉeMÁÆ´9Ñ~˜A¥#XXXM* ·µT,‡ÉdªÐЮöVwˆ`©¯*µ±miZUImm(•$˜ˆ’!$XEæH¹#T#YYY)3P´‡p¯ùŒsuuݦÊÂJerî–í’¢V5“óñ¬¡Še=ÍÖWéaà‚Ø,@Ê»ÅÈcîñãêúI´öž›¿jcËv ÛØxµó=8 Ìݧf÷Äð˜ÝHdü䀔u…Ä€(¤¡8EO`m€îïÜ<'É-'ÁÔ¦d#XXX(€H€`Hô›`y"µ÷c]ŸŽšãY»UÕm²‰úˆc äÚË1 »l"В<µ¨Ð¶q…f3büó¨áEÌ99Ƥ¶RwWM—&K}Ý®ù×}7³(1ŠÑŽ9+#•ŠٙTCià@ƒ1–ÈÓq$)!bÆkxqh“.[#AKE:Öì­îrq`Èäj")ºê@LdXõ†¤x’ß,LžËa˚kQ¦¬%½4@,e™Ê%4Ñ2ëÀ#XXX¬2j8MHæ®´íYôö½w&\{ÛÚõ˵î»^\¹Y:Tj‰úPá’îB•ÙhƒZç£-é ÉFZDÚ,s~2fޙ°[Ùøñ# Pw#XXX¹ äR¤Y†a äܰå¥Î0ÈTõÙ7D™˜;e*_x`îÌeINù/ F±Ep•È(êõÃΆ+ýؼGùg냃è`zCå“ˆ‡y…¨Äƒ3yß&†R”™Ó©Mýôý?׿uɏÔz•â`|uj3îý;)>Þ`DÑ` %(€ FD†–z>CʧÂÂzϯ„CÙ!ì@ö@f"\´ڪ󀁇PKa!Œ(d™#XXX¦M$„Ú]Æ$û (CÌ@àJ¿,ˆö¿Â£é°£ ¡@þ‰TɑÔ€GÕv÷y äÌ`5–#YYYI–Àû_“tBùÎáhƒÔKîì¢íYkNB/¼ÕSóù#YYYÇSG€áý͞D†UNòU2iQ É='ÝmòÛbh"¼Êž!­{€ èïBÀ¥¥Cü²ž¸V$ª»2”òЉìŒ$Òböâ€u % Ÿz÷#XXX{3ñ‡û%LÌÍš£lµý¯¦¿‘i,„ûÂÂÁ¬=ͯ¨ªŒÌÉ@»+•É5F“XÜ»5W¿‹êgÓ8eŸ *fF„J)hJ ×v¨yO€ƒ¹ÆUk—Ð*þ“{}ð_Çwð#YYYâuç³bm°ÖAe$•u×ZÉEêõIïkÞÛõ­½¤'¬ö'êÒ;Ì1 Ï·î0À_F’%¦D0E“ª¨A{“׌Pì%rÕýèÆÝ =a0úÙžËø13“ĺpÁ1kåàǧŠ#ʽ½¯c Þ΅£T'×#Ê!À¨jt‹+¯+å°"óf䇔‘ÒS²«1޹™­hl“!Ö+¢Y|~ˆ=‡eé)Eçúg…£‰ìÂ]½T°Eg_Òg3|ֆ¶CEDKö³)[#XXXŽ(PRãÕ*«Ù$Ӗˆ¨’\ôQT–eÔ$k]:þ>×Èpw}XåãďFû¥ªÛ,Ú½Ž.–½ï¿#XXXƒñÙG¯‡–®ÿì™qÆÓùޜÎP^Œ?S‡ôoÙýëî?£Ù¡¤ýòÌ/'¿êü\{Ìa)¥j†–ˆ¯ÖÇó²/oï}5>9LXa[‹ÛOÃusÞ+ÉolíB÷A÷Íyÿ<:üZ:]’\.Zc‚"9ú£7oã³Q<Z>¡p{Ìn’j¨Ñ¦—؃ÁOy(y¤•"a™#XXXPVÿ*ö¥÷ÕOÃ/ÂdS8s9ÄìîÚÛ6“VFÂa)|¬ŽÕ¹oø1˜Æâ¢§‰¼a3ŒÔaè|}ƃÖ#XXXÅtÅԃ¢%Äm Û%=¿çŒ2±ív¸ËkåÛÖÉ=0Hð÷ñÙö;vìâM‡Ñ*<øñ›•9}lœŒÙ†µ•OöBŸž™ÎÎÝáyâÖë3(Lbf" ¡hf Z•Øbˆ¸§puQ1FB˜ôqÒ¿ÁÃÜÿO‡:Cû~.§1 3— ½&:‘ìö|É·øáäGCáÄø¾cZ„˜ecæýGо•ô##G¾rÚ>ˆ|XºD›L@Ì6Û/Þð×\¹ÄNаM°Ró4P‚S(#YYY¤R€ éÜœB Ovôã£G‚ý2"ÁØC–w9ù~D> þªZHO¤Œ]F)F|F•ó• €ùÀ^¨‰z>òÇ´ÉîŸê›RÛö5KL–™Ûkík_°¢‡¨ÐID ^ßÇÜš÷!R/ˆ¡ø¼Òy5‘rü„/)TèCü0Ьš(§õFQËä~³ ‘SýЏt´DLAsØ;Ë>X!ìïöµæ í5¬¿aŸR5¡%¤b>•ÁËÂ4‰üVClþuõ@úT‘û9––ZˆßA-%'d9d.JI+OdPëºñ?)Í1#XXX[™YšsF ¯W¶å¦wêuí­XÚ«d¸úò!ëö‘Ío´4zi~ÒÑó·³",£²Œ‹ÀBÑõùÎP’È}2þþ´}Ù@ý”|GÅL½5Z&l÷²Ä›<©J7Ð;B1d­XD§ëë¯ðšù•é¹ÐC rîU»Êé$ÈÒêïŸÜúúõñŸZ+\é±òUÂ{®š6÷n14Äí–ÔŒæEGeŠÖ°o¯w ¤]~¡ŠøÙ.#XXX4»¥Û›kLÛé\¬ Ë‘EI·›Þ¹K&-Âà44fËXØÁ›Ú¶nPcm2#,JTwŽ) N› E¡ìÆ¢#Mü;²×»´õ’9Ø}Ï7}ú{š¶ún@ÊyÚZRÌJÍ}]֊¾uÆÅµ“6‰m<®Ý¢»šŠdL‰ZçÆÚå>J»•Ä×Ò»i³šì¹ÍTˆ/ç5Ń1G{·ãçY6M*ŸämÃtÑJ6#YYYñÞ¬[wW]Æ×Ó½ÛvÊKY"mwW&CX¥ßõ듧ɫ–kH­ ò,!-bd›Ì݇Žq¨‘tøàހqHäcir´À3lÍHÃFÖ8Wçǯ§JItä¡Ms‹äÞ½]}7,“4Ñõ:Mo«ÞÕ¼´“0ÔT˜¥¬« ‹&M)™¤&–#XXX#XXXî«â¼Æ9sk&¼ÜÀÕK!KGÇ{瀢†eÞe/Ò ÂˆYy·U–ZÌd×Ò®.+%½9Üùw{·Y®À»$Y"0Â¥hæÃJlã#YYY@ÎóZÍæìZRŽ'r–¦á́É'Ê fiÖ±íÄÚjÄå K¤ˆœêqd #h€ÓKâg¹âÔ5„¦8<SR. #YYY ÈÙFû0Ù¶ƒFÃa›œ5‘Z{—~7Î÷}W©½Ù^F?¨µá Êʳ,±¶D”ó3z1§:=9ê”ç*ádEŠAÒ H4"bIâèV‡U˜=é)@ÇF³0È£iŽ ¥Óí­aÍÆÇƒÚbcÑ„€ÚÆfMa`J®ˆ#YYY©æ\³zxԋP4òéK/.ÆÉD›>wJÛóN‘‘%ç?[†d²E&HfÖ-­÷hQ£!G–‹ÉàÑšé—6²»µ-ŸHˆ“_¥zÚ®òÑj#å*WMsšŠ¢-²X¨¶"ÖKˆ¶’Å‹DZ’ÆŠˆ¶KX‹IcE“_}º©Rj#XXX|뉺jíÜÊé_K®˜›¨CFc„)™I9…‚º¹_‡VÒU¨­E[Uåœ1AÌ˦L#YYY˜k 2o7¥26ÕD•÷ºí®Úé;v;pÐ<™ L6€mÐíÂÁX¢(¤`E„³#YYYhÅ0qcˆ“d©MW\Ó]#YYY+ÝÚfâï'L´U˜XcZœÛƒG/‡]Ý;\Ñ]»¥ò²êõ‹/ÈÛ@c{áÞ~ø{ 2H¤„èf¥vÚ}Cü©={v¯$èS0©üpÄf!–V@ˆH‚ˆÌŊa”$ÃNâëJ+ ”¤×c¾6,îƒäóþF߈ÀÊý8sƒ\¬ú„‘>R7;„ó+Þ)ûÕËÏý¬ìGŠz†ç™TÁñ‘ÙÇ1ÈFP–¯Å›^WœµÒÅovéš ¨Q(P(D#XXXbF„\ÍZа–²†ØÚ#YYYª³E#XXXPˆ2ª%#KHéCCþi;¯ES唟3ÔЙÑ÷wÀIÀ…ÁÀð6߀tF‘äMÒ2*R# u™2Q ÞúïœãÙ]ª°n¬l‘ÊÁ•Ñ–rÁÓYŒÆ(Ã-ˆP±”©‘¯MÏlלÖHÑFëÖâé¨ÝI2ÇhԆÌZ֊ÒX6ŠBƒPiÖ/ŸXƒlÁË Üê!‘„)vØÒ­e‘ˆVVXV˜ÊšÙfÐݼ¡´d­ 5•¡0dÇ(b2Èä³PBµz_+Ý9œ\ÍÝwÍî½rJ#S%˜æc•VôZ¤SåøÅˆPÌÊ@«¯S&ô‰öpóÂp¾ˆÈzð} é¢"IB@ø[¨ŠX˜|‰Èa…íô¯ž†e™Z&Qt7ŸÊù‹#YYY.‰|øg# %yG:«ûÃ9ÈWF´mCIrć|Ù8žc‰Å\(Bt‘âV‚ŠÅ>O;Dz4êC$>o¿¤Bh&RUœÐ×å#XXXä&zèÒ%j„†'ôÎ^è~]¾ý©ïé«á“Œ|áš{˜®,õŽØ5û¹RY¨“|]3i”1E%Ž5ÃBÁJ˜#XXX¢Pƒ‹*”’y¼y1Úú Gŕ „”†•(C†;=^¬<ãPhÌÊKZÓ¢ ÍQ)bQ¤B%O‰õÂ•x‰3œ!¾w¿#©}¥¦¼qŒÂ¡úmiÆ & ¿ ˜œfH˜<BÀUÙêC@8ÐÓ璏D!¡'QEY…u[©ÝœësMËnÚ)–¥"*`ûät¤¦™jƒ0º¹—v×+XÕÈÇM]µÕn),lQU›ýÕ¹u¤Á0ÌZŒÆ’ŸõI𼡥 ‡‰ë?ÞÕTó¼¹P+úBc#YYY¡ã%ylŠ]Š;æ> úr!ÅL¦©øª^Ns–28£¿û‚ €`MˆmyõæÍü"¼a‹¤žÈwœ`pR&€’‡YhΦ×v¡5"‰¨PÔ#XXXìفôßL¨õ«Me«ª–¨ÓAª•¢#XXX"*¸Á£à¤ê¡»û>ڒ€Ì —;Cƒ{Ü èÊ)HIò‰2Šò܈I*A ­Ìt(ô±ƒB¦H|^\Cb¤H ”Ò¡À9L±°ªA’‘ HQ¶J5m)µQZۺ쨓Ne $êE^ýAiL%(Ä1#YYYI)¦ `JPC0È´wº< L&%F•;Ÿ$d§ $‰Ö¥pQap‰€IÞ±´à 28!ø`JQ)F…P&)˜‚¡K@=Þ'8ý#XXXÐ䝐QT̑QH= /záhCaI#YYYÓ ž»µN¾¡¹AٌË0ñÊÁˆÌ‡yíÞ~/Œ0H•=»(’f)`‰–VR«Aº6o–+ݰҚŸÉ P'mLb8Å0¥1½ÝÕv®ÔSS×p$$ê(Ž3œœRk€¢-Ê.Bræúm¹“¹J¤`†#YYY˜„£qaŠ!‰„ sFêJƒ–™njܖÒF6Òéd]»¹NµÝ# –Us¬cB±]×W:êê[IivÚÔ·j»]–¦WufcSP“X%…Vì-”Âb¶´ÖŒÄý"¥I›PfA™˜LR‘QÐQ¼"¼•~è‚*%R%$GÌǙNs!#XXXÀ„€~ÑÍè©1ÚUäç32ÑÆÄ ¦Ð܄©„VЭW dIïnïRo%ºâC+…Œƒ’ š"Â3 Ðwëwø½zD«¶mǯõVù©Î~ë™~¹¡õÉJ6WS:ד]k$ÖvÏ*k3VOãªódš•@o#ºŠš©ŠKNHñÞ^¥Ó†ÛjrÜՀÎîÅ$;¡Ž£B©ÒR™ÙP¸áõK”CwÏì›M¶çÌFM ì—Íúú/í£v̳Äàϐ‹dCÃwØY“S½ì2Ï×FN ´t·M²;/#t×RÞÕÒ]VšÝ&–u©CÙ= Å! [Þ§ßõ*×êÑ37>ééJ>‚.³"OLÒìß'8ߏG©¿l¦²ž54F²55®"9†õ©@Èz!¤ÁÌLNôK&LÊ[Vä&8BAHÕT^¸XÂVç3˜ÖPºë°ãӛ¯ýûFÐÒ@ÚfoÃ4h«Kɀ–#XXX4±ñÆ0²w»l:“îÖ¬øyoG4z&՘HÎýñŽÚ˜öÈûë>Û7­mhr'©”,c ÁIm£­bøÍq£Z؍ŠwG#XXXšØE9s#YYYRJjꈁïq”Äõ9ÈI|S-8Šlûµj“%b&ê#YYY]"\’¶HU½‰ž‡O4¨ ­{E/V¢…µâïx€µi÷:K‰h´é䆁:ŒR=T•;+ÐÚ­t-m¤îGL€²¨ùÏεÍê\sßÌâqP—«Cʤ|Ÿ‡¹[jvÞxô*§37®¼ Q¯'Ô£ÍV×øœtE?g™hj«ƒñ ”âxÝïş—e¯¶ø¼ëº#YYYµ[÷Í·ƒóR¥í>‡|3@ušKšÙlڂ¡–‘Ý뙞·¥ÈÿTÍÔÁ{7ïÖxí]p۞Ô:¿#c­MáEepõr]j×^‹[ñ!IUõ7Nœsݰ¥·¸gàæD֍£IpºE&“,ª¢h·I­\|ÆÖ§!„V9# <ÀìÚD¸æ1¦8$‹žªV?\´73ŠqÎK^¤ld ciµz‡w1è0xCúå°çz‚_jRxDT9FÈx-înŸ*7N:ÕiËÖ컖Q±épd#XXXĂѤ„rTdË2X‹ÍBT#XXXdë¥14ÃÜ»ï#YYYk½æMˆc?³îØ5ˆâа-Q†"oDžµ“–Nà7ÆÓÎÓÛ9Í8øY¬Q9€NûjtîЪä=jû©íÚq3$#ºșˆôrñ¶³—öTQן:º¡ÂÐËq,õ3Ð@Æý¹³#YYYÄہ›ž"ðq hõ#Úî¨Ï^Öõ#XXXʽÎ&úS!!ò®ç§â\Ix‰„š·Ù_¾ÍÃŸÉø Þ¤œ°©¾üδќãz3W'>;(@ûҚÛÌ”ü@ôX‹Óx¹¤Øü\è]+ŠJ!ªG^ж+_†×ály¨¥î䔚ɯ®»ƂÙíS€-Øã牗•øu¢àjÙnÃJ4VcvȁiàÁâ¢W3FTäLÁ´™„Ri§EäTõS_~·ô\|\78T3¢1×0Ýù§lE¤CÜ¿•91èðÎpÃbÚÌ`Fðg¶M¾¨ë—D¯°X«9"ˆôdk6Ÿ¹ëG]sg€uUºÅ¶P¸-¼9:–¤CŒl—if#XXXØÐ2xñõ«r¡±#YYY Fö©õð Õs¦fsš¿8÷®#ҝaö¸PZâÇLҙñœQâ òïUPÚïÏ'hºéÆÙÒ𧒁%FlmdŒ5£Ð¨ßnܰτé•úCy6iœ2ð’0@©ŸÌïßTZH¤xqö'oÅKgsëpYävb!×¼H“7e‹ÍO'¢ÍèÖyfîžÞ™äzÈVàÛÈ@¹!¤6ñàÙ!–T{œo}öut64#XXX,R½abD6¡ œØ&éýªŠK¾üµ¡³´Þݐ™jQöȇTUïùæ¼NŽs×rÈ<}³oIÀxfT“bC*îÍ5¢¾²Â&1¹'±‡Hœ:gvº(žù#;R1ž[º<ìº$k×#XXXñg†cKŠ-”N‘§oUÚ«NÈwõ3Óâà—֍©´è2Ÿ„#XXXwV„ûhaXˆT½âqì÷àðç‹*êM¹œnSÓ§† †[†òZÝÞۍ?Âf®õL£ŸzÕÜ<î)ùz×1v˃Rov„D{ÄS ¸ôµžT&ƒÖ¢]x8“ħ)#XXXD™(@óoFZ¢Ãƒ¿”ôÆ#5…4S*§gD¦î»’ZÓÅw|»°J<ûŸ9ãÑ>#YYY‹Ñ²j${žX»ržZԐëÖpõ›Ä̯[!·ë/´­z™Îœìß)j“Q̈’P#YYYlÎdÈt;Öh¬#;½…áà¯2üUˆ”?QvQÝà™µÛÓ&Ô? wX¨Ò~£É¤æþ2¬ªMˆé¢ÒW/Â}D,À¶}”Tú9 $ê"Ù¯9l;G;oµ¡T£ÕŽÁõ9¿ ÛH“¿Ÿ®Ü×H^¼;DñΉÖéý±^áúVچ£ÜùmùhÜñ”&aæà„;ï‡Õêþ‘¸¿U)Û]‹ljK׿t&~(çUáie߇×%Ë0™¶ˆCBfI™¥3`@ûƓ a=ð¿<â&6Ž‘ðf= œ2µ[–ÌaEÎ/ùåN8`û¹Ý¾lYÊéžČ}ž]²»#Wê>vÿP™ãÓvYÃgzž$Žû¯¯ û‘=éÈÔr•`3Ü'ë 㒇e׍ÁßO ÝÇÍAQ&Ut¼&¤ÁIóËßќØõË“ñWvj8ÚcÆØÖµñh|ü¹†óšI%iòÑÃÎùæ:v25D PÉåˆï~Žuw#XXXäë‡Î*öñ%DÏ1ë[ß«äQƍ’ÏÓä=¹C¸ëTBéW¼Å®1´$…ggNâqãeiFôZŽE8µÀ|7^Ï ÙÎ$×3ùùÜo¸â™ÌXzvz£K­ØJ'iôõ½RêY՘¾«i‘#vo¹~4v…û|¬êÏ#XXX¬‘Š«,¥(Ñ¢6Ȉ sÄúéÃh·šeLMZQ6ð›U#“òTøE@î#¿\ËÊ%²ä7ݨú·ª¢99'µlSëßO5DƒPò:óy›òæsCm(4Е:0,ÈȘÇЇ"8 Ð>A¹iK1¥ë:e&½ ›×èVíVÆyƒì˜!¬ÓÑ)#YYYlm¬IvG¸*ј6^p0þú´¢(n|˜žÎ…YèùÓøBϲ–—H_ ¨vx‡·áu­=dpÈ”czas€~”½•1'á¸vçÅÁ:NûÃ@ø“Ø:# §±~Ù셥CÀé(½ÿ?3æ¾ ,ÜŸ¸Ž½@Å6ôHeáöÅAûvyþæñ° ¾ûZ‰®=}Šm‚#YYY|ë£Ï£UžæP·b¬¨m‘ÅÇ`Œ1vl‚‰¢Y}’LéIF³ž#XXXH#YYYã[95­ÑmDÊÊf i0¹UHB3XÄ ‰¥SãT¼em#YYYYµ§ªb1 ÆÆ²·NŸ±ê¶¶øõcV8á•Íf?¥íˆn³Czi´Á­­3᫚å+kŽ!p]aĤ$å$ÁTIc)Y-oÊ·-÷ÍÓÔ áÒ%µkÒÕn(܄®:<y™Bd-›aEƒÓløšZÅ.ÆÚ¡£)3+)b‡˜’™ G ÔĕýŸ²z¦ /d¾K²>M®êòceg-fhYm4A·¬ô-•”üÂ(išb0¬ƒ»8ЫZƚÎdÃ%ASÒ;®z§|Wu«ôþbLUûÉL,}„yu‘¼r(9eHcû?dž|Jr`5wüŒHÚx÷aåLC ^’¹æb”÷wȉE2Z”ò‹¹QˆCÚBÃÕ8ôk¼c½•‹¬V¸BI57+wÅ£c[ ¢Ó£Ä݊:Ú& €ad~V"°ë»éü† ô¡ñ(Þ1_¿þhM²SÎGs/˜þL}çaå[Ê£ÏAä@ðòš™óÒ Õ¸ª“ø<0ß 9dÅã)óú{ä;˜~ú€´?…Çí2Á|2Ô}f,«± ˆt!$À÷‡œY.81xËmCá›ë»/¤ƒ`œÎ»u?ĝÏ$é#YYY˜£M¼Ë Â>LÎ6(šN„«ÊCÒ¾‰Hž &)#XXXH¾M!ºv’„’Ç;6rT{Lï#D„q‘²5’ mÅqÆ3NËáˆAkŒA˜¦Ç:Û2¼/Λp¹u4Á™c¦îΌ8jü^ºœ£ÖÁI1W«ZGäíǔ$#YYYAž¯Þfo#YYYÿ×ç_,þ t§ ó©âk”üÂP’òÁi\²ˆ$ñãøåÜ®×ôÓb‘™£RH͌~<SÖGÂ=›0»ØÀ­ìUà“öÝÄ€yª¨šR¨Ã¹UØsɃ2lRd̾‘Ì‹òkMMOÅsMÝÌ㫇ëaÜÇ ²íàš(‚:€¨4Deû:¹¬j±¶s”J±P¤³S@ͽ?‹{¨l’û¾ûÚÝ>®õꙥ3÷õz⽦¶ÖPÚ"¯ç«5¬ªÍµ¦ÚI:X¬CeݘÛ3&Ù2=1C Àîö8 "#XXXˆ®ŒðtèŒkëȉ¸#YYYV0#XXXË`™ ººCIÀ«BêDW$†R:…ASD&¨ämyNåǍiă‰MRœ®0Ø›8ҊòŠG„ÌÁæ= Í©¦ÙÛW"Ø1F©“jMU¦DÅÐóyXÂÃŒ¨I¢F†„ØcƒE3wNUK©[DKu‚€Ú"Ü[ÃM=V45°Œ†:ÒHƨÈЊڴ"N¡± WfaeÊtŠHŒÖ·a¥hF,A5k4÷HlÖÈÅD´„#*W[Ý ÚC`&j]Éî6¨„ZÀ1#r8°”q#¸#XXX„tS9qœˆC†‰^R @ï75Á¥`aj8ÅN$ÚiLrhªVšxµ-zm‹H¢Rgßëõ_–¯Â¹rÃP­ÐÉR–:Óe7DA¹˜"ª 5@¹¨+kâßD³(ڙ#YYY™I6Ëj±Uo’ëemÎʐ«‹Áy¢˜ÀãE8™XSñ1FöAo’äa:¿·•^Í#YYY° =Çsì86ñœÞaÚä¥D'~ÔóEsØ"r<jl/žÐƒü…±”jÆ@ˆÔhÂt'áz(fð b‘#XXX )*?兘Z‘23<š(B|fê^??h£ÿ,@LT%ˆÉ ƒZU‘YK(°Xªd™QˆÈ2¬¥²$„¬2 ŠJ‰(C_»p}AAm÷«="kì„ø“ö·ý„!ô¢ãˆM¦ÆŽ©Uº•Œ*m¯ºA´ ùÛ¦®ë¸œúW—¥±±‘2HWÚYJf‚WˆL’?²0„ç1ö선óïë ·ïŽ‹'äûê=½ë1øÉ œW|_Ô9ù.Ï£}S#YYY6"vÀ–bef9#XXX+‡~ÌM0PÄ|Ž{&£;Êĸ}Á°<†›F9Ê Âá8åÅüœ9hlÄ ¥ÖYÀ=¦ÑÜPþ×ÀıóèҚ2œUr“dØ`ZÇð_~\ÅyÉÔ@s<CÚxi0!Ÿm„ÆÖ/8É»/ü š2õÖ¹Ød†X>G2¢`¨õYrÀà!‡WRšºn•ÓnWJUv^óºM÷æíLD`e{¬H­`c˜áÎ)ŠR:Ê>¤#YYY†AÄV0<b˜¼ Š#XXXj;Æ9ÆRL[V*Û1~릆Òëà6i@UEҘø‚i}².@¢mWMUÒ֙µ&Ґ¦ƒÃœ¦f8XNßjþkj; ÷x†ªrTì–aO`@}çh;BUñÚg8ž˜éÝø*z¤: $#AD1Ãۇå_˳̛O\½†ê“#YYYõiðM×ÁÈߦhEÈÉ<e þqT_cWŽœZ7e¿V|Ô+åÿžè- ÿBõ?z}}µáQ-þ=øóD;¾LC%ۋ]ËN:aæi˜§Çôñ­ñ¦Ôô Ž¯ƒ'#YYYw…ÜÚ{Œ #Ð#YYYÛî%sÚ•Ësã˜Í½ÈçRkoçw8&ü/òrÁf)ÒÈgàvTE#Ÿjˆ»î cߨoÎE’ӓ؈ÐhD)ÝÃg#YYY¢¼„…O¼¾–· ƒÒ͛‚àÖ1ƒ›qˆ´ŽÌ4°Yð›'4Ž6h" -¯z‘ú4€¬Û4H ζzwÍa¼’¬Mr#²bb×z">™ØDÙÜÇde8Ð M ]á.“B°;eÄä0†ŠUŒÁ 1¡^%DD­€Œ:—%Czq;4h偢Á" Y…ÁˆKjj• ’›÷öë¨u½—Ldl½ù.)Û6Öb¤–M÷ÙÛv­ìØÖCßÅî=ùRÙ«¦éXžG‰…:7Lðw-¤•“Ü»i4ÁrwPŽ#XXXH¨å¤áàÙÆ‡%H™Ç„Çpñ²Gƒ+æ[ÒeAçÚwÍxkÏIœM#YYYÚÕ½-ü¾¾{†¦JÊB¼Ü”S§]¢qTC¦&d—dWÕÚr–'íßb¾Ýª»†W-Stò¶ìÌ1 ˜:C#YYYŠ+b^2˜˜ê±Ä¡(q5¾àt‰X„–@'&uѹ•ÛÞU;%Ý¢àš&Â'ŽoHvÑ`´&”/·"ŠÛÅ*˜B¬“m¶¨NÐú8îÛ9ŽAŽ'eqÕRFžxõª\ðÏ"Hu$»´”X¤5£SᣦÞm ‰¥^èƒfÓ´@™•;¾‘v"æ,¸i’'eCænIFäÝ6ۙڔRCîâd¨<W§2¸Áy’¢Ý†¾™"¦E,·f•ð­ S³!q叛ӒIŒ(Ýå46ûMÜfêßB`“J¡øM &VŠ7i΢Ñâ5É5.B”i‚â(ÙrÒòä&†¶}æÌ]M?tru:ç4t¹Ú‰.qå̜–_.ù_[ƒ,¾œ^9Žçè¦ÈÖÊœtE½ê‹‘â^j îܑ#E=¡HåR•TSÄÔ»ÊZ&‚ôYá“—U`ô `ŒUo|¦€;lqÁÆyž›Î$øžø8€lXžùˆ¢M;î³·C'Y¨t;ÇS3X÷8èOáߖÝÀÜ>uۄu“>Ób#§™Î#XXXQ­®,}î%n%Èa êiSY#xphƒc<ܱ¸715UHj‰^t“F¤µš<Ë­ 6³h”ØÖ3³WfB—¤0xzÙ¸–ûÈiò½24H£”o•¤Už‚¡‘žfΣdìè0wÅ ´5&V_RÝ­í\h^&6è~KiïV‚ µ8ðÐ" v‡«Þ¨Ê±‚G´ƒgsÔμÀp¹ô%¢é&B=.Íq±ÎSµ9,jAÍ]6œ¸µhLHéí"%å/X ˜i®¡ÇæØ°“ò!5Î^xÀ]ê¸=óÎÜô¢H×véN¶GçQ[2&ò÷d0™nŸPˆÁ¦›Òs¹¸G‰)C$™ NŽ* Í’÷çnZa[¦NÛ»ª¿^‹`ún÷äYÓ#YYYh$k¤Ev#XXXi‘G£yAo¥¢ìÍf#YYYˆ£Ü¶¡–Bäèg«ß!¡ÞϪ£þ6úÝÞ¨»¼¨xcŸ]/T’DùwC«DHÌàçm²‰!õžxÈX&bvÔVcÔÓsde+P{ß9ƒñÚ`=i&Q"ˆì…*1¦£µÎÓ6õ^Y@!¥Äúóğg¡‡mDB@™—d½3Ї^\àáä Ijù¬B5˜±é„b>¯Åà'ï'lZç}ß8µ„k¦‹¤gG|¦ÈF8šÙcƒïaç:¹k~WËÈðÕîxC<jí­×ZBAƒ:nG vi4f¦,ÝDRCl°‰0&ÈE~/ʼ“úN9ã+uÄXjŒcdRfÚ.'9¥|¨1…&ûöÖ°’GƁ»$Æ %ZõkšÙkFì{o/[ѽ©Øîl«fªÜ-ŒÔ–­pãc5of”Ûv ÐÞî#YYYÑ`p‹^5׍ëZ¥Lh6.i#XXX!­ëIé¾…„yÌdØÔu c|֕Œ»4V¨¯€bušòM5&ãÁ•]Lµìbxbá”””aÖDŒRµ¿˜Íÿ&“)¦}G)AÅvQc.Ý˺ˆx¸j)la9ÇU+#XXXԄHÂçò°Ö¥›#(zk+6Xq˜±­Qi«Gh¢˜E~÷ߍPÉžØP‘áÁùŽj`́ “p,Ø lvaÒ‘i‘ˆÙ·m#2bÝ­RB/-w;̙­&ðxüӑ"b7ÉÌ7Ñ ÍiÚjsõ$õ…Ʊ!s³)¢u§Ì«x^<LZ¾´Ñê'Û×.º„¨‰s)\aI%ÉÖd°ÖåoE¯+d}¦V,t¢µ·mºÓÒ÷E'–N’^ß!më"#YYYñ¦Þj÷R´šЦ 6Lòó$ÔÄ×n)Š\Ɖ£/)¨sWmá§¶uÝS™5ÊN:m÷,ÑÍÎGYT5¢_˜#YYY̐iËz{—iCíÜÍh’«dœºæ÷ï¦hLb-è$^@È`[J¥¥³R²¥¦Ê–Y¤ Bj†¤ 2㞛¢Ê°L€”i—ÛM*šÛMX6Á¬¨£®Ò½º#í€@4 VhCGgŽ·˜îpŽDn¸3ps#YYY±˜Eë'GUÐ"èç0t!˜`£’j0—PbHa„Êb²ÚòÛÙم$X’ºn#YYYƒ…0á´6˜Z0t E` ¾¡E­ð#YYY—±CZ7"(O€˜ŘwÊÚhN^M“w׆ékm^.@åÞÌaÅ]:«¬fN»)wnJî¬ü4šÛr¶ŠW´6⥳zSL½rt‚‡&“‚á´oÃmÎÚc‰V[1ŠIb’±4¶j[L±¬ Ôº›š8;-µž“‡ÇLÂ%h`LU¡9Ž„ ÞjÌvÕ+œf#YYYÕßCÅmh;«Gh$iÇF:ÅC%ÅqN4šßad1¦„*qÄéÐÈVœB.ì"·5©­î¸ #XXX3â˜(šI%׆(؊h]`ݛÙ#YYYaőtfL»í„"›R”²j(ˆ©C£¼çÏ HN‰™LLV¼YϦž;Îm“R¶&†FÇdÚ2t—BéGK‰wR’2ËØo·ºÄçÓ4FÚۚ™Ò¯ó@¬‹Ãx½8ØFáô0¢Ûx*ÀâQÈp¨wʽa(¦½Wdˆª@„J$1MDJªÒ„J ŠˆÝ:Ê ’@½" #YYY@F¡3`ð…í_U)0:;^PDR ë  y’KDI;Þð8‡yk·XjP5dÚÈ¢Ôަ‚ƒR¼A©2#XXXu4¦u݃­wÇÆ’&;»é»{çt×KpÈa$„I ëEnI¨ÚQ@êRÞ)¬À8µa%!š1 zÀ13]:Þj„?¢Cø ?1„’@“d6ö%¾Þy‡Î ÝÆ*¶É°"cmF¿4Öcc¥º»¥kOÎFm#YYYE§¼‘16m”mᣀ-æ¤cNÏ:˜ñÆÈŽFs3®ƒ¬õN’¹ÆÚ°áD=ÌǦTjj1éš&dÉrhÕ¸«Iia„§ij˜ÅäŠsº)(ùv‡Öƒ@õcŒÌÌÌÌÌÌÌ̪´¤’JÉ%#YYY#YYY Ê #XXX³Ä }Œ¢is<è…ñXƒu×kî;´#XXXÑ͔žx#YYY(û½«SÆë?”Þ¢êÅø§;ñ6Þ³¼(Ôv°Y¾-Z/E‘e´؋b\§¾â$sIØÖùÝ št(åoi¨w{ ³žV›DF˜¬&%_g7\m°}…›3äñs­E&VQªk¦™#XXXš†‡x¼©e2%±fØíášD$·½Ã\pÖõ¬ÒëZ3{R#…KÃé›ocÉ­2YY—¬Ë«&ÝÞ¨ÐSM½Ü `.¶fŦ˜ÓqĘuvJàeh¸« #·YÊÍñϧqšà²”)H(‚X!¥a!FQ™TˆR˜)dàC­ST "§0åÔWÀ|ŠKóèSß!2I2̨ꥬ¢•‚îښWs£ôÔ&(KV‘ƒxy¸ÎÌÏ$ò@>î#XXX_LÆ2G# p†åbÜd"‡ ½åØ"¸ó‚ûpÁc ÀjÁ3°¥ãñÀὺLJL˜BLAI¤Â^%ÓMSÙVãƒ[üæ_F63(†™‘¡Ù¯Á³?ÑÁAñŸ'ylé<ã¦ñN _lqøp?Ss¹CùË#YYY”f|x}ÔIÙžÑtƒLk͊é„7³†öâPü#YYY„šéôkEÉù³:(ßç¿/fÕàòǎñ¤KÜÒÿŽyQcl=þ°ÃÁtvx`áù£‚ cå,A!Ž ˆgš=‰¾hþLƒ­HD6ßKt”Ñ™µ´U&íÜ}×kت5±k|Qªì¿±ÖK%*‘pÉäOÄ|ª|úNnÈt3FüçÓ͒ú>L~f€‰Õ÷HŒ§ísà<wY¤ð4jdjDk®wºÜò—ÝÖä½×Ü®žŠW/¯u¬½i}j^¦½w§»]Qµt÷U¨ÚKs!²Åô6¯¤mÂ}®²nožùy¶Mòho‘5adV¬¢òƒ )0¢#YYY—ð`ŠyeCP£æ–ÅJÌI țýú‚~LRIõƒÞ^ܽffà™’dPbÆa«QBX ;(äH€Ú8æ"]Å¢3?–µ† U1Ë$tŒòUŒi‰<ҁ±5Ÿ¤‘ˆmÁ¼y}DHŒ?ÚÃýÿê€/„‚¿ªjhîÑH•ÜD^ôÞ)À¿ˆŽC݅ 쌉±Ç݂U9÷Å$'Ëö]´å#XXXÛlbcæ! ø{ÄïÁØL*†mX¹Æ(Ù·7Þ§•VÁæ`ÍJÂ(d#XXX”þ)ùkÃD¨ðp‡_‹¦¡(€ˆ€ŽxOKÌER‡W¦ïH¡Š ó|˜Ç®õñ°yqaZú=É~]ôBÚ×Át˜Ú4â\œH'Æ3Þ[=ô‘Å[IVŽÕMA7r܉Üx3Y'Çe:„÷j£M7=Ž´bëz¦Ì«²yyɆõ-;Ó[H4’³{B ’B`Ail06õC»Ë•²cÌÁ+Ž4¼‡p\m#YYY˜ÈÁØIiR̚&i61ƒ°Dl°Ó´²R²îDc#YYYSN•Z›MÜÙ½iêHlFâXÙ\‹()3ŒJ\œƒ#rÄjݏ#XXXÑ"#"P´¬È\F4†3”n8œÓ1莔•ÈŒy`ò#YYY7’&£ ÑœŒw´Ì‹*´Ç)#d%ƒKHÊY¡Ó`)^õG@Ê¢¥tR#YYY©i$ÀBL 5B"XDø]xTÖ0y¡·š‘·l“-¦€˜ ãJªÈTZ2ln=]5 ZeÐØ5¤Ñe˜àŠÆÚ’»\±Èä¥v\0”C¸.:;Sxq±¶Üi§«ÁŽéu˵zïyÀm&–ãµ8Îq¦SôܝffˆHc™#YYYQfdP¸†¼tuiÊZ#XXX¡)U<Ò|0‡©eÜB- (#XXXzáÉJZ#XXXU#XXX ª)€ø \„¤R„<A86ÿD¦v„ù#ºÕGÍT‚†‘yòè=psC¬¿õI\ÓÒ¯¯äÃÏ,À)•Ñª+lURbÚs:·n»kµG»šÞZ+®Ó¦Í ¨«i5U5bî×m]×$¹»0&¹s«Ú5jÙ6´³F«k¶k$Ê)URÁˆM¯®ªS@AâsàTî…;àsJ·Gº÷kìçuGw†Y`‰ý~ܲêê)6´Ü®K*BljD£)ZK)QœÏáz}ÆÖLm0–8œ2'´kAB§hžh`Iڑ|m…óßr4¡)̐N„ 2ÈÀ20h È´ð¤Á·‚€ÖÕcQU¦e’-™3MU•²Ø)´Ó-E·µ§•Æ(Ìc$1µQùY=‹þ:QGùBƒðþé§ñ³ãtþm U5&Hͯ¹}t]a5­?£ß9&r ãƒ3‹µ³f¶kCN[]Õ¤l¤KÃW5#ÒMI½ïa±¹¨H·P?»„Å#YYYS¬Q…Ý,†–aœÖ±È;a[‘­y¶µ0ª¸ÚbQ’Hw¡{Å,‹[6Λ&›z*•효ħ‡„ƒ5cMNʨÆR%F„ÚeàWé Úb9`ËÅÆFbF±£ ¶ƒcjL’¦¨ÎÌl­>Ö-ÃZÅ_V©¾ ©³™ºdÎE¦KțlÓ­Æ ­‚cv6žæ4±Ì–AVFHÍÂTÚmµ³Q(ÖÈ©§¢±–<ºÃXôi)’ëZÌu2d‚ԍ•¾&L]I!G\2a CÚ¬ÛvX‚;‹AJ!q#ëžl¬m¡›„`Á  äöºM<a—ÀBú­üý”SLcXÓ%ˆ¬ªbÄ^ht§¨;C‘庩çÒžÛ hP;êD„”ËáÞj2ÍfõJ|˜*{UENôÍ!!Ÿæ<Å)ã˜yp֜=f6€#{ÇZÍJldDDÒLə¶‹ãùE²·E÷1ûxìyÌõfRTÌë žhFiqÌÆ›J(Ñû(ë#XXXê2@ðŠNÏR®|W±=P2H)˜Œ€1°‚PŒLû‰Yu Ü)-Uè·¥‘¬G‰Î®v¬B£¼5†„Ñ<ã*£™³A5ù³ÙÈÏÍ3|¦#XXX ˜˜ ™”]9¬iMËÝ*ó[î¾TƒÛL×ñ7†X«QcíȊš ˆ²KËFQfjRBQ„`%I`Hd !`”˜ƒIL•ªÀÆZ£<‰ZmÉQ™)Œ”2PfÛi¦¬’M¢¬’³%K2%’,•LÊSX­4҈Ô$ŠŠ ŠÈ£H@R¢¸‚H…"´ƒ"H´†BŒBQ#mDI$D‘ÐEH…DÚ @q9’ ¶XH‡‡Âà|€ë*¦úzL¿@{[#XXXŸ¢@ÈGÿ˜œ#Óõn½­¶J¬¢U2ږ{(Ɣâ,ÏÕKktÿhÅ÷ OE_Œf‰"YY™˜ê¨Q„JZ #XXXKHüj ‹Q*0 §QUêC$€Áá­wa•–E*g3$4±†8B‘4¿½8cŽjÛ))(ɚ-RQ¶---”¤Õ×ì^õÁA®W#,e#XXXj©U\$Â(¥1ÆÅ˜sˆ™ˆ›Vïê«£+‰ü4£Yì‡ê¿ÙºÿLÁ»Á>‡sOß#¤›_l²Ð>r'әƒÀ OÀn:/(óøÐÇ`wyJ¨A™#XXX†BL¢´ÑR¦Ú£U$%ЦUf¬¶Ø +2I Jèî4ÀÈêõG!>îѬÇY[>ݶ[}&lÉaµ_#XXXáb›#YYY•Ë‘î®_ ˜Ëý˜3DG8WÑP`!è=…æóQ$ú´>cX~¦ý:å#XXX‡Åd(RLҔ4·“Ö¯/?ä•;¥~p;@ƒåsûCŒ=‰.™<Þb€p?ã뮆Á×ՆTv©F9¨<ˆ£%Ï.÷ñ?âg¤ŸÝŠ(н°l?:‹4õË à;CÞ{Þîñ•+BÊ,‰"#% À¼<nA‚ Ä†@þZ Á¹ o?ffFdBdd^ìÏ ŠÛŒ ZL ÌjMhɓmK&‹C$Ê%ˆ€˜R&$Cï–;¡á¦ÈJJáÀ ¾ž1QDî• Ú;7DÿŠRLŠÄ¬ÂÌÆ÷`}fÀ¡ÝÂC؟UM™"=ՙ…k[¯›Y­Ã©03ˆÐicqò”BShŠ,e+QEKóÕs¸Å œ¨È<ëŽ/’EzÀä?Šº_T¢¿®¼ÕªúÉ6“D¥o0®ÐñKçœS1ͯâ?2|º<HM†bû¤Χ&>/§í¾¿³$5)HPäÑ@UN8å€TSÿð=ÁØy¤ò=dFÈl ßáUÒ É ~"*»çz^ˎº:»tº'uqmÚºn%YÄî\\æw]w;ºé(aˆ"’…’‡°{`ë3Œí{ýÜÌvIœÁ4ꐂæó‚ ’(‚¢‚”NÏÜ)æqõç·quij#/¢w˜6~f !]Æ-Ûé+¨îÂ÷Ÿ>‰£3ê5¯¬Š€¿³Lf¼ A“A—-â¡þr¹aKCKBY@D‚BƒóÂÕ"b Š$B©’$JД`‰,`¦°Xə+#YYYTj&(ÀšÉXÄ0ҍ ’5¤–$ D1PPr(Z#XXX#YYY¥C*¦±caª‘ˆbªÐÒ# É&±3!˜Yj#YYYPÀb£X¬Ã0Yhƒúñ¡kD5˜³1“( qÈâPˆÚ¬ 0,â® ¬¢92à®, ˜JP³ˆ¸«¡‘$8 ‚²Œ RÄ8£ˆ°ŒŠä HFà,ƒ¹+KJÆ âŒ9P„8‚2¦bP´1Š˜ƒ#XXXB®AE)#XXXb#’2b‚-KoLd*“¦šÄ±¼™¬S“U0˜Q¦‰¥2Œ…¬Faš£R-JÚæÄZWZº´Õ6«…£l»UÛR©ZÜ¢]U։XDÈ¥,W!aC!l$` [Å``\„ª#Å\€dÀZ’Þ ˜áaå©ä4…Šb0&JÆ €Èf"ӈJ@J™ ,#,„0&8’¹’Á “ ӌ‘74[`¶Ú[XióªÖâ—r­UŠQ10TÀJaSX&¥5T™U8È´$#XXX`©(’#XXXäÁ% Âb‰ˆ¤"B£’‘˜"@S¦g™óÈÒ'wÒð@\àSþd`‰OùOÎu#YYY10AG,9¨Àdê#ø*<Ò;—D+ùa7 ¯þqåm¯‡”Ec¢ÛÇ0ƒòã0Á.AˆLB9!£fdÓ8–2kžþ Éÿ/óÕ§Ä#YYYûÔŒgL+Œî5ƒæ0Q¤?0/“9IøáẀ'Ýòg¬»Vîjk$6—uÝk5Út#"r0`IÒL'́|Îî­wwwuÃ;\¶ê²'8KÃ1Wè®x³™Z²Æ ´-NФ4TQ±3~ò¹E¹¶ºÈ,Lˆ˜À,Ë9ž°À€b¹#lÁöç1U\cSò::I@P6QP°ÀDœ‘]p¡ÐäƒKAGt†*›ÂºØ™‘‡.QzfŒÆ’´_½ËLÁI ªš£BcF&ÉЁ¥=h@Ä!)HR”@@ÊOóN3BÊCB>\°Ÿ1̺_Å*]pN´„i¥9V܌MLZJ¢©.kˆÒ DKo&ªrPВ ©ŽPÕ šXCËÍ>$òáÏü¾@Ôüö­¬T«Î›]½]®›FŽÒ¶:T&4/þ^®×·j•s6ÂÓN_Ö²Wö½>u?ˆ_ŸŽr‘Õ“zÔ¼°n©Ji¤Éʟ~½o”_›"½ÑáZZY2e?&ü‡ùZÔ_NwÈ4Äv²W–eñ»Û }&=O2—ÁÈÝHòÀDýÒ¡¡Ç{A#XXX)܇ë„h)kÛqHA ±D’ëd©œÌMôÚ4°¡ÐA#åE£ÝÑøßùÐë#ÛHî¢71ÖF›Š’20v4J§ýpš“ü°?»M1¶V¯ÁÝùmF¿¥ÝKÇEd³»ÛÝ|p˜™ÝÝ۔üýúök¡‹qpbñpΘ^4±ÐÖ„ÄŒ83NXñÀmp€üÎ}4ÐRÂÉ¥JJ(±jMµ#XXXzHé É– ™sS0+-C×îóåê†lÿ:rNkB¬¤ªÂ ¢Ä¤È1_óÿäcÕ_ù0?7ÛØúú G}ÑsW›AØ@§êЁ<;½?4‹‡HOzûÿ»ïߍË•‹óuÀäØìˆ…R]w°Ýwbêé°Ê£žâÄ;åß,¡ž}ý.Hß(eÍwÑÌÕ{B¼*P#YYY0ãéé̂í‘ce ™§M4°–¾É=hº6¶šô·gL!ª™Z֝„oqyDÂvllg*ÃfIÇñ±TêENˆ“!AÈðÚèwHM±ÎXíi¹jh‹ÄÊuwuε1µ² ©im®Ó@’ÒPÔgÚ n«a\ª“Rv{•û\*ãº}JÒ]jdé퉢=!¥½Üó—”EØüK”ŠLœFŸjÑ~ÔÆëLX‰&)-Ü2ç)Ƽ`ô›V¡ FHâŽHÊô»vΉ¢+ì”2oD7œ*ºwDCÀUu^*•ʱr"YRÈ©tGl9JJ´†™0#ÿã„\ŠÄ©ÏÜzíù}êÝc0zê†26°¡LÜfœ gV±V©ÈË™É幒~×jò›øÌ¬ÑF>Ó²Q¡´`EÉ÷þíìiêé[…d­e(æIirÂróÝñÌ»|ß3Tzš¹ÎSOcÑǛæ$ùÇ#XXXÊÛW‚K¡æh×50ZËí„\wGTŠb¬»ÎM¯Œ÷0ÜìgËh^Ù匎Çfˆë*å£AÜX¼f¼ ùzú¾¯³'â<ö¬<|=^•_Ҟû—ÉqôóŠÊ|<Qy+Þ ¾°ìû X¦(Rhj‘¨UF´Ë *R’M‰)´)"e6ÍF­ÅjRm LVª[ „¶+%­’ÛlµEE2Ò«H€Š‘#AC)%%Ê2Ê&{ö«¾UÒ°ŒO¯§¾LeœÞX¿‰¯ãpÕ±=´•OÙU èÂ>yP>ì£ný<Â~¨œ'È¢5zÄ_ä:hF¿#YYYUüküŒÏwFÒÚ긲hÛIX#YYY´[KM€Ճj›EZ¤Ûo F”3FƒQ“j5!E%Dm­zÚºHSGçu#XXXŸ“#YYYHD”!2D”5"M¡µ˜ß5—$×ԌÅ1¯ïV·~z»&¡³ãq«•&•(8!rçÏ|*›«ãŽ?§“Â@DÎ?§ÿk^nÏÉ¿O+ú4ïäòBG²PhbR€F€F‘þdF;ƒÑyÝk[«#XXXÉ¥~ÝÝP5oÀ_¶iÿ+@‰ #XXXÿüÅd™Md9› LÇþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿü!…yÞ#ž½°—sVáÊR·½ÃՁ$«ï¾ùõö÷^÷žJsçU_Íjµ€øõ#XXXB®}÷ŠCF€wÜ_CéO®”vÏ}a÷ØøI #YYY>”÷]È R‚íƒ½#XXX H"B¥¨÷®ç@@ªu #XXX*T*E Û#YYY4½UíÛ#XXX|ÞTß{ ÷›ï²*¾Q@PP=í])Gw]¯>{€|yƒå%T¨f·Þ¤"ŠW;o»rîÀh>ï»ßo¢¾ºtêö/¯²Þ3”£À;Ú«S5l3tÝ=u[ïWl͚÷w½Ü½÷w·›–Ð6±ëë‹É6Þæë[ȧ§§¥›Û}»ížÞ¾î›Í÷ÍW{×yízÎûM‡½»FXzï³½2ŠÝÝÍ·¶™Þmz»ã¾^uÝ¢÷›ï´ö{ìFÕîç7NSžöK'ßn÷|w±Õ×Þ÷Îwgßxôéíë}±Ù÷ÝÞÓ»šÍåòú½¦.ûß>ՓV¡öÄyL÷¸T2ùåP(=4Q¶µ#XXXP¥õ¼3{Ô}\9ó|Ï=ƒºyÕ{à5½ï+¾öù øãíƒD­µk[ƒ“îï^îªëM€ëu›ƒ«ŠÀW;®¾îï]uöv˜;Mš-™}Þ¬O|¾cí{{†!¶šÑæÒÝBû½æ2ÏW¥Ý{ÙÜûÞu¼ÎÍ_÷Œ]¯KÝwß{æ#fX»glí}ííîÛ*ÞêÒI­]k@ºvv¼õVól½·dÛo­Üï>ú›ç—@§OoZ»¤ökYö_qÈϾÊT÷n6°ÂïWW ®×VûÞtòà…ÂU•‚î]ï” WFŒ­¾€2}Þìssá÷}öûyµ]»ïf¯¬í‡®Ýº»|õÞ·#ß)¾÷ô ô >_dž¯o^ƒcÖóÝÅìÎyG9¹Éõ|áôû㹎ãsÎÄ÷^÷ÝÕóff¶ÆØ=ښ´Ò×P¤Õ×;ß}U»ÉðBSD€h ¦šhh&‚b™¦©íFIåIêzM©šžQå=@”È"A&‚54Ó&†€SÊoTò™#YYYO)€ÐÔiF£B0 4R$ ©²OL˜M5<š™M‰¨z›Ð2#YYYªzG¨õ67ªÅ´M¡§¨J$ˆÒLU?ÓMT~J{&Jz‡”ýMOÕ#YYY=OQ£õM¨P S@ RD¡'¦‰‰<"z4ÓSU?d˜ªŸ´ByÑOFÔ2§©æ£ È4Ê$G …$ €$Á6¦©¶“ §êyFÒ1OÔ¦SŸªi¡“ÒHdК4ÍëÀ@ÿú~OÃÿd©ûˆCû¥@?9+¼#XXXq8˜¡ú¡ÚM?ÜJ¢Œ‡ô¤ŸWøØÒWæúíýÏôuË‚š*¯[IEiÄ1kliˆyc–iÿ§EÊ=K¦$ ¦Š‹Ìês×1vt×ÊLE>òùrf˜Ì¨)ÚT<5Yá¿íïC¯í+ͰQ|Yµ†J¢Œ’‚ŠÀãG?Ë©ªÕzcDŠ'-"#Ñÿ:„É$Sú±¬¦IÎõ³‚_þÆzRú)¿íýîЇ[Im”R瞆{NՖgîV®Q:f߃ƒÒ‡ÿ#mÁåj¿Å9Ntø#šÔ×OH±Gj T«É4 Úaº&kÏš5»˜!-h‡i9ÙuÃá]â“hçBÖ¥2;¤Ô=Qäˆ;–}~h"„@nXzòÈ?ŠA#XXX˜“dŒÑ!/ZÍYÕ监¼ÇüўèÙ1Šk#XXX¯x‘g”œ’cT_«”֌sç#YYYŒ¸u-Eµ–´íµØRòxÙÂ馚i‡Y"9¸`KÃ÷S&.ºŒ±L%¤;fÿævÿ‘삿õå—onRÍ<(BQ§ÙöÌþŒÄ’ÒêÆdÏrå#XXXZAâÏØÆ×ãìÿEҕhgè_öe/ê5ínôÕ<Ï95¿ñ²—Ûo}Ò\Rn÷Rh`tBN‡&7e^½È¯òØv¦|S׺/OǪ;ù1ßo#XXXc×r¢0UO“jõï|^¾üz΢*ȼåŸô×s®Sψ ¡­N|œ#м¢Ìk²÷υ\‹9xÇá·fñë ßÃRÄÖù–ÜŒ­¸h[«ºVÓ§k͍Q˜N®dÇ7$ˆŸO<–.çO#XXXP$1CönZjŠ ƒßLë†8Ê #YYY崅ênñؒmÌ0e1Ú¤Ô2PªYn.(íE¶³Ýòç:ûÒöÒÛoú=›=•\£<7Ó¹nuƒ-@ Aåd½Á5Sü´Euÿ'ᴛÿìK…j‰ËC›¦adÐuŽó‘ÇUÛäE<ñR:ò¿*ҝ;ŸÅyt§ûŸ]¼Þ¿<J´‰õvͲ§¡W§PÝçwh|E’ÆOWL#XXX\âi'êÁΞTÞRNŽîóC‹ø¯,ϑÇV*•²SzrŠu•1gaÏjÌKŽ3'ʈ<(”á´ÛnÜ[·$O¨R¥˜­K<Ì_Ò£ÒîÜ#QÞ¦¦¦‘Neÿô‘yä¡|YßJN#XXXIɖϨú}cAÁ^Æ2žW¿B¿ÒõæËI—Ï•Wis&tý¶{Ëޔm1®8֚§f(&bðwTÇéÞFXÿLÉýŸ«!¥ª`c µqÎÉÍÛ¸äÓ!¨6ЩǰùDç}î‚B˜˜ø|DÄ(÷ÕÜ99³!BœåíQ(LbSˆ»ÎýuE“ù\„Œ ³Ñ®g,i™œ¥ndRõ+¥°œâxd±E]õâ¼K爙Ë(8?𣛑 ÞœŸpˆ†^ØRûŽ<AϞĽ•ðÍœpŖAðç}Æ]àN¸ÿG¯8ǫᶻy°þŸž„Ì?N0;ú#ﱝ”u֖ڍË\K¯÷où.¡Nþ’ÃøQv4º%Ȓ‚E'/öóü½&ÙiÌ6…Û¦(éAIºþŠtËdÁ¯¥j…#XXXÑ ôÉPpŠÛl»™'Y"‡Ô”ª§ ÉÑ®6㝼QP—ó¸ju ù)J^êIžß©b$|rù=|ú¤×wùbÐQ3{çyã›Ñ3GËY™,Qtî⦎ð|ªÛ^ëÊœ‰”äò¶„—Žúq컎¼lüóy ‚Æ3”ç>©m#XãÑë/Ç?øèð_ÉÀ„¾ù†×ät$»‡eСh õñuÑuœSàè3ž;¾ð\˜êŸ#YYYåÖñ÷1~šQœ= V>l2$Ý0h7r¬LmÒ!L]•:A%~O͘±äAˉMJ_›ëU‰\¨Ér„šй-õcʘZm#XXXŠ÷l+,š8½ñ†PÌrt £·R†’>±•J%T&­×8tRŒÚĦ³Ó—žWtÊZ=ôeßIxÃ3‡’4Ú¯G滍ø´k¶í²)zÕ͡ˋQŰ¾æÒeánލåz²î¶Kw)ÎWûúR¼sL#ÅâÅ8pô.pr´nòáŽré±hÆ­²•× ‹R™„ˆœ™Hû\?¦õ¥§8†Ô5IK9_÷ÑDÛC2gßÕU«–àÀ¤¡”"§‘DÍ!Ò‡#ÎÊô§W”ދ5Öåaõ®MÄ:öæ·(òÈlNp ˆ ²çëY 5ÆJ­\ÐÛGòÞê•àC¡B%ϔT(Bª’s7t!œ<D³XfÁ^n* oIµ?3ˆ>í* Ïð>H+_|ìßX)A—Sdús 6 ·¦e{ÛC¡èHVujÃjjüž\&<½yۀ¡Š¢¡( ñgHU-KUJ‘Wßm:ñƒ#²É(hjb)ò&/w€ç'ßÇ" 'È¢d7¼f,„†Å {ÝØ/R¾GˆCˆx»‘ùdsã{/ו &šUX)J“Gê¤K4ÏHJ?q®ú™h׆R?‚¾ã¯'^4#YYY™†*¼y´r‹dΝ%S 'ІàÔ[ôzeáxãC·k,ò_>d})˾Jø[[ˎͧ"”ŵS ‘-6¢”‰IG×>9nuhîþãÛÑ^Åhrd¾© fƒoúJ¾?“äëWî<ÕùõAÈMCXÁËsœV4ÛãôÏ ð#XXXmiçÎT<4…YGå2Ðó#YYYðg¼P÷âܹW,Ãæ¡Â^q»ÐÈÛLäÁvd$£p‚ã1É ¡Ä÷Õ!çË>qœ“òzúm°A3š2ϛHzmžØâA=’œ¥? ,™#YYYSÔòŽ ÿ“‡¤¼‘¢2ƒkvçÛïáÕ¼~O-&¶Nº@2]¡)G™Þ8;IÂ9K«ïìþAæ;nå˜sÀÇY#XXXÈ¡~šÉhÔ¾òcT±¬G¯GñÍ»gDP<™ÕwÊû׺)ÝÂ1i|ëŒvROhÛê–æ¦²È§gŠ••V|±ÿç>cW–ñ†ôÀÎÍ!2re*ôÞ­æ¿ã‰ƒ3<Ñýs»¼+7•BF—½2¡µžCҋûsrEFáP¾ŽÒ‰iå•-ycÔ>)šàau,mi“EFu¥`ê{DbÙHÄî0ĐºÇˆøŸ,97VSÑ_œ¢$ö6¾:T·å:3ô¿Ky“m7É>ýÑ?Ó½—Ù/žþº4Ç*a˜°70¬±SV×#$6ºß;`Ja(-Ž•.=[4SG1†å¥L 9…û?P úS1ñYü½È–“ û1;PÙ0=Ùhàböi‘ÿQ†â×#YYY R¤>‹_>òOJ´ádirdŸ?,o#XXX0ŸJC yé2ꈰ•UÉ=- ÂÒÏW,vq Ô!$cz™"D³Ô•.D?֟9ˆÇ™#G•°øÏ:*Z"¤ë.ҖV9Y†}¸#Ý ÄWIãŒB )¥+ºðŽ0ïn[A©#YYYíA´ÙOÂԁÄä”ԏ;ikh(JŽyª”¡#|NĤ#YYYzòŠ×¯ÿ,¾²hõŠŠî?¶P} “i2@4¯òbˆbHš•7¯îýÛ(›À!IH€Rr‘?ê#XXX¤”?rÔqØÐ¨ü F‘˜Z)¨ ngoكJ R’šS°RiyCØß³À‘å¦B¥#øEÉ'^ñÈAè5ˆ‚–©iC݀L¦—(¨DÔ žjD| *@ò傆Ìïyç-±Ø2ˆš*“ìŒ8aòA¢ÿ[$[MRºGëâd—])@w4'Âi­ÌH²õÛ"z%ô}\悟³w†ƒÈ­Ø#YYY-?méÞË®k|R"yÏ1Ë65ÇÝÏ´>G’è/1£„އÔ¹rH¥]ãÊDÂ^Còê^r ô*Ûl`dd#¼/;ˆgófÂööZYñ ý˜z®*’’ #ÛbÐQê|뎲ùœˆPù.È8…›¸CP?„™;´Ð„³E3DHeÒ¡ŒK’‡°š½cóI„‘#XXX@#XXX? Æ1§$¡iAЉ¥T#&P†EtŒÔ´`•éÌü|ùpA¤U3%#XXXÄ£11BL1™›óď_þçjÕòKꟍ$~‹·üÿéèå1*Êi¾›8h&ýY9¡÷}±®^¨Å®j&N<6ž²¹ì£…ÍHS1–Ò½£ÃξÝÕ¯ïB=>¤Î8tN›„äò„þ&þ?vkL>‡…Ý*o¥ˆUðÁI­Íéì<© ›š;Ñ܋גkhȔÚY©õpx‘HßW›#XXXï8wCa«Û‡}"Ù¾w/¥Òn) ¢iÒtȝnÃòÛ L™Ç)#YYYښ†Ptͨší»O‰Þ›¦ Ɖ‰,òð®\XØV$<y9¦bm$Æã÷C3›pàØÔ¢ãPå&®4 B2ٯƱ³E°!sào[tâTk "ú>s›¦0à41ÔTžPC6”é~hÉi“©ó%–TQŠ"RŸ±ˆ´Äz@Ó· âb‚Øðƒ±tY¦k3°d‹;Di7–º<Ñv”T˙%ô\Qg¤1ŠQž]Å÷cø¹78à˔è¹=äjQD¹DˆZqÌ#XXX#XXX §U#XXX!¦R&`r0¿UàŒ=eý!¼þã—Ù{½f-^\…ÀÅñƒØOÃ'wÿá±ü犠ÐVûÿÀrUvw,ÿ%½³™>­)òúw÷yÍîb{ÎV †TUI–~¯dð¿YÜz€Hü_А™&Ó Å:Õ°ªÞÛ¼æ‘Ïo¦vUãB ”®í[_ ZŽU¾$Æpé7’di,µ•eõ°†m²¥d5“M5ߓ’ÅóMéôÄ6Ø»CáñçvÊ{äÏÝ`š¢”Ù†#I9Oq‰ì³Âg’X¨¯Íב/š.ª†JtwFFYS umƳ¦ÑÄoHÙJi’'‘¦×‘H‰-ÇÙ2\£¯¹Ï=ºó1Û>O–B¡ÐçSOŒðLئÄÿÖ$cÍüŸªÖA,÷Ž¥’f5[UƶnàI›¾#žç–SÊö£0hœ²3CO,†tÁÁ#YYYš$Öþ_GÇéÚukpv ¼@Æ8 8ömŒŒmˆÁŒŽìC( ’&˜â&ÅB¢(ˆF)š‡0Íömë=eXãÖÙ5Ó$ݤFþÉ¶Ö6ÚG#Kµ“»W$NUg‰°t8q4²&²ˆßžæÛ ™«õaùIÛn|=Øný/´õgÍéHQ2 —PßM‚M’™Aቋîêñúò´|¸ÅŒi„/HòôS™}^%åfôã®?7òåv ›T8i-¤H%ëY‘JBÒRñ“o윅Ú§ß#YYY²özœÑ'§ü1ñž’•Ÿuݶ›Ee·Õ~W’ªñG©‘Øe•ëù/ÌûùlþRF&¢*~¼H¼_.+Ûô=›9Ö^ž^UïHGÄ3ò8Ø`aƒ4ϓË)I6¿+g_ˆžTù9^¯{fï6¢@–(ɓÀàè®Wä±+ð];îxhõ À©®ÉM uªü´›7CõúšÙ8W8¼s$ð ,HgC¤„$:CŸ7 5-Оøù9ü¾\¦Ùa€z§Û<tR·OQGTI¤$&LPƒ„ðŠÃyéšçÞ6¶¶pDüo}vûÔX֟Â=MÁvÎYè4±å3J+iźz‹ùÏg•É,ÐþoHNÒF_gý½Sü×;Ј©k ÅËq“/A»tê맫ڟ5 á†]y”­QÁØ.w?g{NÊPöôÛßé-rË[?=öÁü¥C±;[ùÓß󞇩&çgÇßéšwcÓ,ÿ lÍßÞ0ÎVGäìlR½°¤Ìmßí,ãzÙ~s‹Ã”xy¼T6«~™bh.Z©q*‚N‰ðwSý~¹ë—³ÍluˋðL”xâë§¾zýºvð‚EgC&žTðSs¬¡ÝuÎÇÈ“†ÖÓ°o^‹ž•ç' €ó‡ ÈАsDÐ&í`óónŸƒtžÿxrÉW¿6:&¶]©™µ¿>|™øNµ"se/džk€ñìv{ÖÝ&uؤ!ŒO¦òÚ T›Ë#YYY‘æ«‘)Þ6ñw}¿o›jé”r¾ó8šEf¥fؕbNxI05¼ìïIžÏ'Êê2‚Ý!ˆ/.dÚ˚ÖÒаêOu2˛T¯cKÏV½›‹Iã Ø›™á6¡[¤Kcçzd®…4Ï—j#'¼ºÑӧۊóOì|ú‡×Tq©=}'¤ob¹%™ôù¼WªŸ;ÇM¨ÜW_£v’8AފØkß8«ñTyÎþ›ç=ßZè¾@Û4ˆÁV„杜ÎJ*cDnrÂ,R5“íÕ¯£¥y‡¨ Wõ°C… ÏÃö1Ñ*&„Ñ*fT#YYY=õÉôê&VŽ9Ê~½žó쓃ªs#YYYD$‰ÉäñûÉ1è´îÔêÅÕ[j8 Aš9ÆÚNœDüS8ҝ²Êíÿn3Êá±m²v‘"\s›‹e7·W£Ó–2«$ \/ž™3éh-‹Že#XXXËúpé#®´R¯˜þ*Cpábýÿ³ï†ò5ùSÍÝÚÛ÷8Й·õ~‰ú•-ˆÀ”¤höx‹Àì8=¿W‹ä·XØÀØ”áà½Dƒ Ä0ðÐ—/\A$[éOšµÝ©W}S·æûlÒ#XXX8Aæ+7 Bfhi3Ny~y¸Ýõƒu#YYY‰¡\ïòÓéžzŽyÒ³„8_T¾ÆxwO™Eû¢ÍA™Ï␶6ÆÜ{#YYYÃpíëъ,Œ¾žÍílO9©V§Vãä#pýcy´ÑبÓò`Òš@v!˜†a1³Ã#YYYTUøjKõÖ3—‡œkõÆVà5Íà—V/Ôxy¸Ü3ÊÏ\ԉY9zÛ¢–MÕkž3º#XXX d%=ȏ¶íwþkò#YYY6q“{>œe’\éÚ6GI¸] Z±°9@¹²«67çØø%Á[t¸•rl¹à×çÇÍS×èÖ7”ir Œ,9Ðs^®^‰ìˆN쪑ƒB?R\«ÞáæqÙf‚4lýpÞGͧ£,`Ô±ÝjU!êìJ^yûˆïaEÞ1ÝY@ÚkI³”¿PÛM$’Ì$Žn™¼ˆ47Êñ÷"¯×#XXXàcn¿Vý_ª ®üÊb«Ò°Æ#XXX¢")ìk¯_!Öùvۖ<’H""p̦(&%¥¢‹l°Ó1pöæH’wV¿ªLuá.P,¼-*,9Ú¹­èœøÔÖ¥"Lµ]h¦©Óf31]*~ÍÄo,çUêŠSÈMK¬±Ûž aïµ¢^e6§v^2.øg>%Òoù|ÃzzA«t/´?‰ÑÎ9¼×=ãe‘½ãWéÆÀnÁ‚YËÚ68OٝvÈk#YYY:â¶IÝٓ»5ˆÛA©hQ>Ù²M>¿A6œg>‰ã߉93ыYlt~‰1ÍÁ•mä¨CY1è¶BJÏÛýéŠVì8Å7l_£zx(^yuچˆ«Y5wiɝƒÏ༣|îà߇Qh¿1±bfç·)WžCO~·n5Òø·²ƒrèsyüƒÕ¸Ç[&*MÈ#YYYÚzpÀ4¹l5æÎ4ମ8(–0µÇú͙½B± §kYÑ#Y<ª tC z3•ۅ>œ{"HîÃ#Ùÿ¸y¸—…±œ¾G¬î;]~a½#sÆøÆqª4†¡!ºúüÃsØ5F˜Ð3Œ†ÒüØØÜ’°Ú ïɏò’ŒbÆÊlÎät&ÛqoãªSŒèÅrÏÇÉ6²Š÷ õûs§T½]»trzz†²3¦ÝÝí|¨JŽì 2›Šugé>N|ÌËcÓq¦’X¯1ƒî'W$—„r±È Œw,.wù¥„>bòǪ42¦2fï/Hˆ4³sÁâ¸l:GB=("ŒhGI(’„ˆcÃ:à2ajh˜<))ԍ)|Ÿ~³¨o;5ïÝÈí¸ÏŸ{šXo`4èÚö'œ\ðþ -}×ÖâÀK†ø)æ}®L@¹w_’ncÁçóo·„û,Òê³dYY ;šIp¤Éàr è\«5Šo¶…[TºŸšÙ9NµÖØ+Cúþ½l7h6€ÈoTúÁ¦ÌÝcK¦ûLarÞá$ÀyW9#YYYC¼¼Þaª×.>cæ'ôÔöztê|¯2u¨ä/^xp””àVüZñ˕AT¬„‡ž%¥„Ò½ò›²f ?jì(&W¡Ûº5îÇlI±#L1i3kÃ&Û1¦&ñ,y,8~(hÑ"Û`ªŒR|¨LÀª¶Ò+RZ×6JÜÇ£F´dfu2´0”E|DÓô¸bpö¹í=ÓVCR‡¬o'ˆÜg]ù<&ó$&.þz7%:íí;9åIW¾$£(|“›ƒ7!«Û~zjû‡päI¯Ý!ñDC´!»ÑkÔá*À]0ðɆÛkªLMlJ\dèÜIŽ5y=ážJÝӝ(7œlùrñêåq°<7ߦŽÌ(uáIÕO|M†4dDƹi׫ÇC×Ûd{Ͱ5/tú9mLú¶§¥¸4ƒZÇÖóì“`2`ÎË*«Èdg“pt—Ñ7«ÞÉ&4Ãù ‘üÄjÜÌí†í1ÍÄë=8W`ÿÝ~OÛïøvbÂ,Þ9ÍþS>Yq¼c 2õq~yùr]ùqêþ«iP3N#YYY!Ï_œVoIšr‰‚ãz¿×7ªÒXÁ^Îj9…ûÈƲ±0æ™Áésµ®9üÓpßÊ;ÍÉÇ3.-×oÇ#YYYkG„¤ã¼*nb…ËC릔WT* Gš›ß½ªñú öDiv,S߉¬e–Ò]6[Ðú¼‹÷²ùe£Ò…ÐÈRÕ áF×õ¤ȶ¸æŠê¯¬ìÒì™ÄÊÈv³ëŽ/‘|ìKâÎÙñ LØs”æ2·9ëRÖO¿8BÖN3aݤæ5\êEÇwèÏt®22¬«–-Ž`D1¶ƒ‹xzz5rûhVÌTiÿ“ûlZ÷­—íèrå5• ZêiÙîó!⎿âýÅ;GÇ·ê2ËÏlо¹šhÒ!ÁÞî#XXXçÃäé§GÕÔy]©íV¡Ìý“(+ü ñƒ/¥'Ú×zšÆÅ<P‰õ¯É¨óŒµ ´&$ˆDÓE]Œ%Œ¤à§éßË,SU燚p@à‘Ÿ‘c»êó‡ß‘Œv'³ûßô7—ô¯Ì«oß}¿›Xí¬Fˏ_àÂ(ÃñW¨­á4næjh¥’:¬þö#YYY^µ¯Íi~?ŸWBSÅpÝ"ݸ˜H|M]ä÷'íÞõ÷ríáLdü—©Cbw¢2æókjô֒¦õÊð4üc#XXXWå¨ÒÛ"7¥ñê>2å}¹ÂÒý0b0Ôra£MR´Ï9É¿V±Ÿ#XXX–2žP؍¢IŽ‹IN’Bk§¶#;]«!§åJŠðùGA¿\ëˆvu³Ä¸Ø®-¯ÎÁ֏w.ö!+˜½F͉ýPK¨ê¥KÕ¡ÓoŸlM²´ÝæZs]Ý=Òçh£R‡Ñ•¿j*É\ÒkL2'—~BùÐue|—g¿B\<ëia³F²"±_ØÒ¢Ï19­q56GHE p–K¿²+l¸^Øë`Ų‹"çÞ–ž®äOµ´º¯#Ò`^‰¥ YÈo"ûÇ$ҕ5­·&ý öüg1s/*8°.ìòˆW¡É2ªT9úI#XXX™0™],.]*´Â‰NDÄ ]M#YYYÆ®2Ü^°Qe2ßß$«Å7™2Xã)×N––µîÃñ|ð&îûÚì]%Ö'h ò×9Qy‚»ñúÈÀjYÜê~zð‰Ó-$r¬»«ŒÆß?Rú¿§ãuUÊÚîìÂúý®\wïî»,6Ç63"™(¨qÛ­ðÞvۊž/.Þuæ³ö'ìýý—ºf~»yYø*™ws~çušŒ¾ʬf›…ݲrøHꣲš»ÀéêõL8Žˆ®Ù=HEû†$,¹ù‚‚ |ÂXôખRK½´‰³¡’Ƈ³…þS&‹Ãô[)F"bƒº‹RŒp¼É1(<wÉ/ÖRÏí=Éb¾ÿö•ó—N÷y¾ Ú°‹ é*]?ÓP©^ÿEYfÂi3F±¬±“•Ê„sL[¸¸ÃµNÏ!©>6LÍ?±ƒpù÷ÛÄýÖ÷ñýÝÁì1! H~`Yüü­òZ“ÒÞg¦¿§Ê¼Áì #Væ¨ÇŸÈe&mïÓÓ·WÍɨ˜;ßÝ'ægFS»YÁ=þøð燬«¦–$¡X‚%P̆fj;¡°›:bñµ¼9E4·}ð¶«Ï*E#‚©Û(øy¨Ü%®}k,g¬/,º¹éðˆ….ü]Ñ¬ˆ!S¦BšqL–.;rMeßõv‘Fu—tàÂõwÄÍ#ªMHË8<±Õ1›ÁygLHË)kã¦4{ÑIMÊs’ùaúõ¼gÂHi™Îú=*=Vçì¼=Zªˆ‡&:ùü͉%˄_ÅÙÚᘣ$ܝ»µ'#8”¸Ê³;å]ñÇå»=äæpõOHwýâ¦/ǎ~ð†Eöß:Õúë €ÅŽyí‰ò|=øb”Œ„A£'ÂM3´Ðfló#YYY²…20Uzï$R#XXX#XXXXˆ ]Tá2†•Èa‚•`Yñ|œ®½ði·Ÿ„µê©Õ)ÅtïÒ.ô£qÃhÃĐÔɬ™GÂûuõ—ÄQ‹Ê˜?Š~Æ~#XXX(Gåæ߁c9Yµ’è¨LÉÉQLöuÔÁÁQ“¤ì„Baï‚Òp"zqvÌÏ(0N™5S÷®hÑIPAÂR…œ…>ø!ÐæjHiH­½S½Ìo.ýÜyæöŸÇ~[誫v³¢ugó#h>vç¯ÏÎÿ®0ç×F­KÞ>Ïë…VhOX¯ÍÆrԐb=38óǨ=¨ÐNü¡»O¯vå¦?3Â)ü´ØmRh_£nÙ4ayoÌúãëå¹$”²9F-ÅUI-ãã¯(©ƒßÌ_„¹e֚{bVçP.›ë»a‡íÂX¨ªg9%<hB§¡øZ!]ûíC(#YYYQ=ÚwåÛEA¾—ÚWç|õÇÛÚ¿#YYYîêÆ³-k«+ٕ«ð÷ý4¦Ì~-yK6ì«R®Ìé¤#.¾ªüNûö¾–#XXXÅ>޼­Vrædȝœï§èQ’ZÒ¶wðTro¾tã=#XXXQ<.a$ûb>Vk½NÑcNÚûÚ^^ÌÝxÛâfÉ!’I}`™†m| XÍ U;½}'$ÿ—S{C¨óÙVÞÉ>‚’ft6®Ùð8$!&bC}ýûc‰¦#YYY^<`jða<›âÍÚ󛹈¼QÏ~æ}9´‡©}O¶_„™Ê¯ÎFŸSºè‡=ߴʝœ¨üмÎÌó©¯mòâdëØó’àç&1ÁÇïm­òÓ©e…¬‰ŠåêÓëÀàJ§¦ÕM,¢b`lU¾ßͨìÕýHã'‡ŒòÀ¦.÷퇽[IÙ6Uy’É÷V•4H’!XÈqÉRüã©“±Ûòwë&Úé}#J÷óÚ%}"ú¥%íM>þÛõ¸;ñu)sð¤Óåd#XXX#XXXó{¸< Ò/YHq¯¶=W,Ã'º{ð9Ê[û›©›¬áEÌ7µágÝâÄ«ß8ÑÙòÞ~ް˲}8>íø®ÔQÈîcÅO¹¨˜KH9»$2HB!áixÀûäHçwò ü“a¸ëñ®—©®Ù5:“´—´…Xt:šn£½8TøùûÃàøy§µP£ÅõmOž!³ 8©—|9D›Xá¬Ð©…MvŸÕ¾tĞ\ †˜h8Ïs®c›ý‘–†‹Ér΄€ÄéJwÌ>¿¦cUx&ž›Â>•”,¥Û¸™ÓvgWIÎ}þYqGvžjxNm øO'î–{aS»—G;ßÛ^Ò3ð¿Ëçõف³FZx³qò£¢&wŸ;ïε‘)VR“÷ÆÙòŠâÕeÍïDZk>‰ÓB</é©´etV‘÷n¡¥¦ÇÃ#XXX KUõ[ÓlÔ×j5xÓ½¶»W–Z#YYY#YYY5²(bþµ‹&¨Ðí2ßSð´ÅM¥uŸ-PzÎÆµ/±~[⯠3 Œ‰ˆ,´E:œò‘ÍVó#ÀCêaŒV¤G‰_ðé¹k–1ãáZ[ëìNxcëÉÆ¢Õ±D!ˆf’¡â‹Iòyƅ¦|÷â˜êƏ–r××OI‹Ê~ÏFÜQ'Ž —’ðÖÊÑidðx ñːÑÂR“##¤¾xõÆýæûùxºùúûµÞcMäq~óÇ朗÷ª,¯…zEӕ*R%û6‘؉VH—ñ·ÛY¶–ðŒHñ±"‚–o)¸|þaöço>;{š³AÊQ<¦F´¦Š­…6WºìñùO7gˆ™ ÀüJт``dŸ·x2ùµ€hû©Ù#Æwå‡Ô³Àóp®žc®®izñÓÞ.xÆÒdb_ ©D½ vÈvȰvÁ3è]ÃTo‡åêÌÎ;˜áCbýÎvøúÿcíõiDâ`hª’ŠºA¤h…"R"ø3ʄ#YYYÓÊóë£áKÜø¦:Í='µe9ȁÿ[# UŠE½ÿNóÒ£ãÀ–)Kk)[³ìœYæÎäiWÃkĵ{QÏvù¯§ÐÀ€rë#YYYÄõüÙ§ˆ6¢>°8!ŠjøOÜ«ä›êMÈMH… ˆJòÎwëI[µ}M„ç^©&ú/ü®ôzt‚«°DRšAŒqê‹Ït2´&Rôö^uZ”ÎX^•0GÔ¸·²†ê ³!]ý¸ËãÈjKi‘¹·“)J³;äŸËH–ûª“ØÉ#6úÊò„Ói-}Þ}÷}ú{Žy%þú ­_4GoÅM¢}ü¦¦AÛáÈöùüÓLÂL’E˜ìdp›c ‰pø£Xo\™Ö ÕF¡Áï>ø5ŠûN"{ö;iVsEäYSŽŠö;k3GcßL()JZ«ÖuÛøÍé‡ïœí?væ]¨èîL„;2fnŠR0aJ)¸iú%Ìqp²ƒU«ð³£³4&ÇÊãÑقx´]§‹±ªJ%«3M¦åg:Š´aP!§®ÇLdg¯cœºzç ý˜<;>”Û•#YYYÅOh󈖿s¾yûaä¿m}s#XXXðúéŸÁ÷…Ûl6ÊvÆVÏÓ(EèV$ž©¬ý«Σ©gé,R{Ô^í±M•*êò¼J‡ÚóÁR–›Š,íôàµàÙMšËnƒ2…¿èÕ~KGúu¤¿HÙÊV“´CwHò‰Ä¹qŠ·4NJMž’Ï¡(TüÐ<Òñ¦d…+˜{-–;QÖF={í\gžec³§å~º¶zõÙ½)$½®:[úGŽ­ñæíšYp5ÎRۆV}ìòjt”­‹…I_zšç*(KŸÇêæÿuÏ;âbt´+QŠðÈñí³Îkß/‚ØWÕAéÅürßïQ~u•¡#Kg|¤T@å²ÑÍñÈltŸ:iU¡“Å#XXX|MÞX˜È.Âl°ÉMßkstý4%÷¬¾$ôUpÃïwK®r” |·ù c2ùæô)H4l$¿.²hþ;iކ‡¿(㲛ë©(®dv@Ðî3Žà=`0\Gqt)nŏû#XXX;ʼ~¯”‹É˜>yxÆÔšhëY8®Ôëz³;Ö''ºULáÓ+Òcô”êÓïè¼~þ€ê㶸cöCÈÑùW½Ù°‚ˆì ÐÂU'qãŒS&¯ß|A k>d3a#XXXH¢¸±m‘8:Z‹èI¸®»=dº›—#YYYx8úéž#YYY‘ rrXv7 V/îÛe¾tš*gœóË»w—7=rsQð»_V%»U[ÜõÝ7͉É[—L'jûjýÏÊv^õ¾“°§åñ¿#YYY=Óºß0Á.¼»FÂríð‚R~wfë½2”$5# JIVRvÍÈõŸÚ¾97ˆ„µzˆpö¢䲚­4†Ì¼<S±¸&ŽåóWÌüñï+QIÃä呴¶üE֜—S‘©kí†ð»Ç=;ÞMc—èÄjåm%¤ê¬¦Ñ%áHžn³).’±$Ý;³vhÅK·2lÕ-”|tyrÄhζQÔ2þ¹CU—9¸R $BLoÔãwq✮òœYq ¹³‘c‘l¬- »=c É3#YYY‘:¦]»§!í÷+# ‘ zI7˜{·8¨ÎÀ½­ž1‚…Ý·¨šd˜âÿŒTœºæ>+Jyö´0’\¦ážá»ÝgòÉ;;ξnòÿ¶G_—­;q#XXXʂ…‡Å=ÛS·êæ’D–Þ˜ê¦3.O‡Y[%—ž¦£q¯jËÞ~ùð<¸ÿ3ðï]šW¡)?ÌÆ]¬ì ”æûãNƤÎq=aÞbž1\“tÈÒ9ޗc5S5¶6§Ø}ú+½Þ…<¶ß£ž§øo΍äc|`T1Ly#XXX1•E‰Bi~Bk*›5¨&GÆgõkô¾AdoG…ßZ š7S&ñ ô噻ŠÝY\¹g¥p¬C "²­ÉB•Ñ[„íôT¤ªœH3‹c#܈-\tüî¨4ˆ¾ø©úŸ ;çêç‹äQ¨%4àđòF3yáÎ,…ŠÏ9?>x/La"¸$ü’Ø{ù]¼×¦[ªªÝé­#XXX"YM§*ëÏÿ^8àm•Átêzwù3TÞsô¸)þ/öôb[©ê’§CþM~*ہ|¤#b|ëÁ¼Ä°F~VÔ7õc¤c’#1«éùc‹/x¸!—ï}¿=‚ãÒ_/6rœ›íÆT~Œµû°Ïï›6ó釅#YYY)ËÒû¦ÞxÐëÂÍOñ«©•¶Ò¸ï™¤¨Áñ“=ú“Óa'’ä䝝:˧f£ANÅ|l{ä!*$¹Ë¦ÓnÝ å•JÝ­U5þâ„:Wí™%F~Ûªü0·H°¨@£z4FÚ[´ê=ße5ôFzK »çʄ¼Ij¬ Ûj÷FP7¤¨à“-‡c[åÌéᥛDÍdÆ(cykäô7¿$^XxÛi'ß/!jfÏ-Ägy¬%º åR=‰Ôž3Й0“?_CFÑ¥Ó,UL:öÚÆº¾‹+½Æé§W…t.šÊPüD! MšÂ„øÊ#úG®U»Ì±M;g jšÌ¡i<÷Â}b-c¥d³¶gž"´_š„7`ÍËQHƒ^…¶{V1ì”OçlD‘Iqҟ”—î»áÀuÓ=õ[Ù´E7¦R˜ŽNI“aBÎo¼ÉkIð4’ðr#YYYn¦=…ÆÛùZ]c}Gtî»=>ÎñºÚ#YYYSoÙÃn#YYYôN„¤œdÈ ÜLí’()) i &¿«ì«ù_ӆ¾ÍkJ¯>jP;³¤’ÀA§¦:#YYY£ØŸ©ýW£[°ž,¼Çe©¬Û‘žÉØÈi#YYYQßۓài€6¦BÅÌt¹R|«†Žrbï¨Ñ±mÔA½«iZ£ÏJ3¦ Šµ™'úèJUy3[t¬®ç…Šë}$wÕï¬ó|pœ¡W y;OTßLëdv£gú.« ÀL&JÔ탮š|¾Ý5l4㡘ô¦‹#XXXµ#XXX·§`p’ôvõ[«&tEÒ)»¾öÝÚ_YAÚúušDò⌧±L¹2µ†ëYŠ¶5缍»Û[ä¤6kTÌF·åyh0ýåóq@Ý2ÍI儸QžltÞÖRçGmrR¥„}ýïÒ.BÌL^q#|W, %)¼Ç9Çs,‡3ωZ#YYYKµÐˆË(6qn•¦~ܱ×‹xnÓ©€öJð>¹Ci¶!42‡?t·’ºp.¤¢šJ ºà®BD”†›$¤(÷ׄÇ}r¾zãÑmpʅL$J0pÿhpåx¨™‰˜zQ*8ãÄ#XXXgà‚£p#XXX¡jÆø#>›gAUð$W…4ú% „û:ŠÑ6ºV;¥“a&ñ«Ê;C0ùÌfbqÇàìåÑÄ;à#YYYwùmáO?‰ðۙn[,[4½p–¸ýô¢G<¾JHlp¼p ~œwìæÛÏPʰֻœüK2HHøÛ­ÚÛ£c'¯l¯åΙ';í¼Ìç§2³âŒú±å]§Ú¾' ÿõáÉ,øâ¦]´3ù#XXX³Qñ”…œú‡ñl¨vv}u¥uÎÇo>SʸÞú&XR¤ÓÑØw·áՌ‹¯.yøp;VÅ1 &dš2¬˜I-±8ïm \%Ֆò®O>O"Uµå4tÛÕVk̶ÏLeyýRŸ²—PŒª2à60¶\,>óéîmgkåÂÙe, < ¾Rçæ<½AäÝlüe®mQÈMꦽR9O¸t°~Ušiìsg¬VÛ2EybÛáهÛ=;謮'«æïîΓîOlŒ9Ì];»nˆ™Þ#XXXÈ§‡kñ“_3KŸÛȤ盦az‰&Ó 3$GíÂHÆà€ô}!/}¬ˆ¬#YYY4±ÅŸ÷⻟ŒåÅÝ/…·U¬„덶lqkÆËcÍíåP©±¾¹ö] ‘ˆè Ù3*¸‰•,FAó,àÃSÇ ±žO~Ø«I ¾ÌÇ1¤S³–Ç#YYYa±ò®¥àº¢J0C‚à¿,O"Mû¤V¶ ðœÛ×hd®È±2A%…Úgɔ+øNE„dãÖ¯ÙØ¼\Å[7Z¹$`aÆK§a‘ÕYˆ8>cã¿¶²ÉT;¥§xÝɊV® 'Ó³©¯—¯×Òt4ŠØZzøiIxP1tóe—ïnÖl=JÕá[iLB¯o%v•ëç¶)?Kt‘£§õ‰piI¯™?&Êïl‡ž‘ºX_IBË(‡}Â\÷Ûk0°‡‚ÖdɃ¹cA(z"¡­¢UŠÐ‘”eÁ±} ­­AË -¿žµ6­D!Ó;âË»ü²aOº%vƒr>ä*F¢‰ñ$ú]dø°5N7úV¦p­ÚÀéM$û»9åj•‚Œ˜²Î¹%múP5¥óQßrìû>¢ Yõ’n ?6TÁÊ*¤±‚ë ºa1ëX¾'†3›%Jüä3毯>ȅŽ>~ÉtÙóJÜГˆø‘’%yÔͼÅd…ÑEƗ·£ëƄ!>.¦:\g¯|xg^ÞR0êŸ=¦¹m9ZŒ"ú#ó{u™f§ý u^~Ÿo·Jõ-’lö¸¹‹w®î#YYY«#(Kì†|$%Y¥ê™‘‚#XXX¾0U”4CsE\%ì¥J,¥Úίs÷u‘2â½·<ô„ÏèÌuÁßäúä©Ë‡šjßw}볯:×Bà#sKNtşjxßw^[®džm¥ÁÊê#XXXZã›Z#(y(ÊIÚڞիvêÚj;ÓH'½É=ê}û͖bé·æ²šh¹4eîù•r“ƒÆ8ñ\s“֎VuW ¹*MŒ3­Å6à³ÛñŸe×`«ÆÎû}qÞÍyïY\ruºÂ9ÕcàóԄ†½‘êv¹íõÕJ7®=þ'…‹µÏ‡çx™3+WØ·¦Ì¯3wºtEñ£XMOtÖ°Ù>˜QŠ­+Ž–1¯dRñ]Ò9tìôWˆÕmO*%jÜpÌ$ÙÉç\êßZƒ¯gÞ¶¥Šg5l¹õ|÷;ç0¯¾lØ;g†×Žx¢øÃ÷z“3’yðCð¿ÞSµw“ÑØ &q0“f™&N9–ÔÎMªÕdOQR‰J8žÍæ÷W¯œÂÊ]·ë{í솸õ¬Ö 5QMñ-"qiB‘”r¦¶Ð®Ü£|ò|•vô35Âr9ÌÒês¶ÔÕñ-jr¸â#YYY¬ìpJ¦8ËŠcá²»vRP(œ¹$Æý0×Û¿;՞gÁ×÷+<s¦Ðø*¿²,°Â†JԂmÖÒªäÉÉ1;$Š—¾ùÕxN1 Ç¡ ¡©õ'~DIã,P‰|{O:Ñ­?M:’wì¦ëš£*¤,–#nVù»¨ÎüO­òä .×td[‹Û+î§8D÷“™nž»àúk]¿Â?w«§Õyï”låöÏjڒ£Yu%8†ª±h¬[:èñ¬äM«¥¥æ±Î{ÆÃo=¦ºÞ.{õÖ·Ãgƒ‰÷Ï_}=§àÄ6¯’fª­/­³†„¼ž\-£OËÅ鄏Þ£Ã#YYY¬a¯q#XXXuKÍwšò)¨¦¬}øÈÔ÷•t¾žÞ}’ÃG2ì0Ò50’ó ×ޞMM;DùóNÑßóŞÌñ‘÷ÿ¡ÁË ÞG­ès4Ïm¾½ò‡?¯¡Öœ%ã—M±uù}аË`NߝŒBÝj°‚ÑÚ|¸•–Þq[ÄÔ×ÏX©Æ)„æ›ÀSú¥¢ÀËEKKŽ1ýp׎#¤Wîó÷ò¾qoo6{w5ôŒ>êt˜Í&Éýú “Ü\”ë$®ææ¦‘‰/JõæKÇS±Gµÿœ¤öü:˳ªæ&ùÉǡżÑvQ{uTC“)ÔV#,Âd™þøó÷æk®UßhÅQk‹È“Î%œ¢ø=`Ÿb¥uPµ¯•¨H§§ÐßiÃñ‘Œý[…"ü}©§—¨†‹“‰¯QÁaéØêÆËƒ™·r”™}Zøøí¸ýP>ÿn ùúô­ÜÒU$P̺¿+–ïÓºc=½±yÔðhðƒpú˜á—:êî#¡PNcÓ9± &ËXÖçëlÿƒeŸ}´ÅæÎÍrÇpQ C´èbÜx׊1çÅr(Ž˜â·~‡g&ӌT~UÅW*wk2½Ö†lêEÝ|czÖÿ’ÚÞ<RF¶~ÿÏÎõ¤ã3˜.㓿H$dãY@QþºººèÉ Iª!Ó®ÎÎ>ý3®¶Z™ÐœB“ÊOžQ5‘±ˆP¶ñ¹ŽÝmÝØX»W2rtö>U¥{¾í.Á\q¦é °2ˆFÍ#sT«Y8Î&f‘ÄO]øÚ¤¦?ˆaÔË5¼"”Ñ8Œ²Ãvñ§ ¿¿(ÑN³„¸XÝÞÜÛ)Ÿó¬rA¼é ©piü%Æ¿dsæua-9ˆfn §Ԓò¸}IÙáë>ßxyïX›~ۘc"iÃkÍí ¥dN$U3±xâO{Ô`q9%ŒñB¢çÕEÉו009c…‚#Ä–Ûßnj¬on3lH—‰O~½uñw½O¯yÄÖwÑ<¿,8¼²‹ãY}×iHVTÏõOŒï­±4(G]kêgXmŸ‚¶nôÉQ;˜¦†ë‡'Øä«&#¿ñd!¶8ç×§<+ZÐËëæm¼Ê}׋3鄆“uu-Ô.ç¢h×;¤Òï4~5i,ßçRjÎW ö?V™kŽz>*„ÞO Ÿ¶Ó;4³VV·j|ì¦P›y¾4—íûga­áö`Ó]½“–í"/„1ÀR˜æîõڅq²ëœ97ù>5ñ˜s'™£ùÞ%¤Ùèé$ÂM†tn夾÷x]PaBÔ%Íñ ØšžZ¬5;w¶ú)‘‘â#YYYxžôåׇÛh—Yg—Q4%#UBR|¦†††´%&(¡¼˜a4R8ˆ|²›ÆÕU- Ò4PWi¤(i¤ër7»å#Ç)HRÐ*^¡…B…!KK)% -A)#YYY%2B–un éœZ€Ê-µØûöÓÖ8dt"ƒD($ÎBo¡ ÓõÇÖ6_||O÷}ÿ×jö; ÞÇ?3C–0󒰪ä†Þz¾]¦«]öhÌÏ-{ÏÛ#XXX„M)P,äÕ 0/ÐX3@A š‘OÉû¼ggӇNIêîÏÚ¡´¡ýˆj(`Š /óÁ‘õŚFªª@Ÿ<¯£þ“Ø}/ºCrf¬5éäläë0%Õ`k-iðƒø~ëïùÍõs_ÝJG0‘—þøÿ‰0HqØÜóóø%Ǎ…bEO¹¯æn…îçFsñ8ûדM‰¹TÞk3ŸùIˆ7NŒÄÁ.Qü‰†Ž(ª×4¿ð†nh°ºEbí3Œdò@II¼Ã&Ç04LßÁv/£ðôú$~»9ÄLp2ÿY ÿ%þT~äĖñò “ááL#×Û.Û~¯ÕG¨#XXX'Ì^j"–*ID3#‚vÁ“µAԎŒ"›ô)-p<Äþø{+Òþ%~#XXX#XXXIß«å<Wˀß&9ýl6ÍÝBeyÀüϟñ?¢Ä}iÿ‹ÿ¢tžÁ½ÒÎR4ï…ogf÷mò‘,ûb ¿ï®“íۏù°§«¡k¬–I›“CËw!¤“;³’ÂÇùÜݧgù½G§ßÛcFÏhÉcòÚY»OT, Õhót„Óÿ_²§´³i‹¤µõüFŒ !“Íû¨uðÄǞOÛNØãMÿv[ã°Ûˆö£üaø³ºM$“ Hd—ûÝ&„ß?þnІ3û²åz#î\ÅU—Œð†G³øÿ唦^¸`R“TjGiŸ>ÿïþ›y·ø¾¾ß¢?™Rá/'ãJӏ”ó”(–1ل°_Ä^«;c!ý™Ó+Ù!Ááß²c©=Ÿ ëǤd©ûC óÚ|â¿ÕOûPÉÆIݦ>P±`uùå2(~ý¡Ü¶WÆ©þVžép€ç J›Æ5¡r!®æ4Gü™ÒAöOdé`¶¦'ÔºÖÜ´ØÓXˆÒ$NÈ)oÊ2xÊ’è•ØÜW¨DõOãþ$¡m‡úÝ|ûz×ø8§ï¸~]ÍiôÎ~ÄÌcdSœÍh0’k៯±ÚœpÊöHzƒc{×±þ ÙÏà8AžÎ–ù Û3h¡‹AùÇh‘è¾n¿¾|ÝÁÂãL Nõ<Âlð@ÌJaI¸“gˆÀÇòàÏܰÕkþÌÿ]µ'þWhևBzƪ‘>ñKˆÍø}|8ˆÂ³‰€Qf»Ku°ÁD&Ëåíðþï¯Çßa—³æ÷+[IXýëøèþ‡ov²*Öþ^»åoµy>¼Y8ð,‘ 8z*;¸Úþî¨c¢ü?«ü?ÆAÙ§ýî}‰íþ®Ùç×IÐñüzpóymÝßÛÛ¬É,§83ÛÌY¾»3zP„)o'fߎ¸oô~<½}3ÉþÙ‡£ú¿Éûþ/ëÞ»ï¿C««üPMï{Þõ9rìwå÷üsþ¿G¢‹ nHb©‚Âf¥¡ªúF´ßÑ'œÍU'eÔßÁõ÷®¡"¿f|—ï6ÀˆýqæÑƒóæÌtGñëügFs>m‡žÇ,Ⱦ~>ü*ôCIäÞÖ§k;¹T~ll¨©8’i·ѭ͹:ëþNGr¼É/d&\žºW#æéZŸï•6ø0U‘ú ‚= «,Ø×Ž¡ˆ<qÔ´¸ ”¿»¬ì5dÇúΪ5ÁښŠ®~žÃ¦iBï_íAc–Ø‘ÜìÚ¤¿˜Xª&­[€"*hâèruZ®#YYYÎ"ïŵ"Š×%Q¤-Qø(¥ik|Vø]ÇOÓ¿›ësyÿ—TqãŸD[¨‡ÍÕÞ®*®ÔÓùñžŽ;ß|cç‰:ÎçéæÚþ¼'¦›\Ïß}7§äÇ!͛ rðÊX^¹§œå"ÏE|̨0÷¸“TV²òo©²cSN\¬¾¸îçNôðŠùK—–• •sP¥(/lyLx¨…±¦Ù2ñÅ*·;R˜u©³íÉB0I£K ÃÖ\'›-ó²TDuÚû~ʉ Ü¢T2ÿ‡âsÜf##ÄÞí–ùҖ_.úúþÁš,Á$õ´GÖ§?`?ϖ[…ò%}þ#YYYϯz• í"|áB&!‘b‚Ä›”ÈŽ(Mi\ä•pV`Î阌à…zÁQ•«§jíØ¯èòËÚݪÊû¼s4kÞô(FaZ~C¾£½Λíé-3Jx‡èËÕBî¢$cؖäëÆò«6Ði ã e\öŸ¡fÑ7Ô½ë]Ñë|\4„bÉhð ”QP§¼»á~̋ó¸ƒ*­xꐄ¹Ô"ìYye¯s‚ðY1b„î'`Bb’#YYYk$eó)›Y¢0Òõ¦_LW‰rÕfü ‹uàNMb£#XXXƒ@yâÚæÉŒ“$7ƒT¼¦Nª_ÅÁ£°óÀ݈i_üï&ûÒ;ÿª ®ÇH.waËÖþa»øáÙHäýÎá:¯ürp$\¸î©•ú——üÙÿ±kéÏL$™ŽÈ3Ldp uY'i *ýͯ… íp©…Ç´Ùºöj{ûyšÏ:¦[3çIћ­J2ïLB šq§pq´Vö•KoïpL‹Î:)h5?oEdèá\RÕ¥)ã¦#úP¥$µÎi‘ìì…>5…EK`„ÛP„2bO(þŝûáÿ7nª¢;?•¯[å3º«Q-±iÈS¤Yw‡všÁ+¾-w²Ò%8RkMí/WÁ“&î»ãWý)fo 5®#YYYÚÀä’UF:¾ãÎ/`½³¤9Ýџ¼Øs8á›ß#XXX¢ãþÑçùŸã›QÔtttU5ãNI ;§¼pó=~9ÒT/ˆ#…hH95aö5ӯϾx uƒ{õØÊg[y]šÖyCÃF&O¨ÖLÜ£ÅÜÀN—U¬†šXföë^{f9çhO#¯GYüó- V™¦Ž¶̃¯,y}^ÍÛs3È*Ÿé6l°ðsu&æÂãÀLüD¶^8½ÉˆFÖ.ÀüÜYò©ŸÐTÌï¹HNi<WÈ㸌gÔv¬ 3]6gÁ<o œ—e#®òör¤DéDì÷#<t8b>_ŸÌߚmïÐzê{W€]ü$Ö3¦øÚ°#—µê4̨_do#6¡Xz£nÞ˜F®J“CeFT#YYY%d ¶Ÿ5´f¥j:/Â:i>#XXXKÒΟß•Z¡ƒ™îS·|ž˜?÷XÀ$v1»f œ'*ݜÅ&ã¬g-‹ØrWL c/#YYYȲÆ5çMæÍÊ0Íî¦nê(kÕvqën#¸øoFøÍ•‡½ÚHB5P³XÖïBiK—ª)«O‡Q®¶ló0âKÎÄÝ­†«M$ØæY ¥õ:|T"·´1òI‰pnj5P¡#…$ǟ¬ß8`§ÉŽ"æ³»jnúKI9 ð-¤¦ówõŸ1ùùßÍàÞ¸¼p…“À¿CãÐë{#ïÓGí=o)·Gx(‡R´¤`²o6ñ&1Í̖$îêMs޲§#XXX^ØRê“tI?‰!ÛnöÙ"6ûlëüOO(öâyþÞØpøiäúºû՛ŽŸ¼Ãvà©ß»¿‘ß–‹xÑ´Çޚõ´·q!žzWÂî°¦?¿Lv¹jã—¢Úæ|N)ù8]¹˜ïOLçږX@<éÙ ’0@U¿ÛžpIŠ_<е>–•+ S²°r1^¨#YYYW$“ALrJ”“s‚A%ǨWú©®,s{V2”¼°µ$›`¤Î•ÉõwB 'göÈÏ#YYY·‘mäFãSOõ.™ØÙ ÓG‘Æ÷QÇ;]†8ð†Zš×ÑÆ6(,˜Ssô£Ÿ/$ÿ `.&Á0¦ùV9öuKwo!íq!û³O\pHIu¶{Cl›dÕÝÍô1›gè¯+­‚í\|ÑS¯I#YYYnNxìHII+þY×IéÃŽÛï\4/‚oôñT—sºÇÛRáÉ\Û>ºY[;"³~ôªjo·njdóBvÂÎø/Öñi³áNõö&néD¯Œ´ÅÛÿ¬<%Iú'ãŒq/µÁÇãúãÿTQ[ô”uù¤î­ma:®ý¯ýU#!SÞ<¯Ç…áêý‰óÂ|ægßÂ՝óÍ[ÙÇßÝÜÜnՎÈ’™'”N÷„AîüŒæ™Eäu#kW¡Écõˆ#YYY¯ñP¥hMÓ¶—ÂMKQ<Sõm¦﬏d´If“¦ÚS†”³xâÚc"AÛlcyi0ü<*Ã^4ÇØÜòŸ¬¸ãÞë4ßE?c±î"/;ô½kÊÎNðPB®@a½©-òàHà°XŽgïáŽTgÍ ø\¼‘×H—W9ƒŽÞJE»wÛ¿Ùõ덛 .?“Ë塕Nòäw7 ‘ÉO¿´vËË¥²—YjuS™ëk€! 3“Ûùv’z¥"^®Y_J{÷Ek&(f˜lí´·’'-¡¹‹×W®Ô<ßK9¢/²ý>WÂKeð#Z|"ôwPÉ2…úåÛÖØŽwӊù—±>ìïîŸr%¿4¼$w.Š’^+iqƒ›{žÊZx'Çì.ǒ˜5CÓ HïÍ>HþT´nmT¢ÈšA’iþÇð_j› 2ëÝóãQÆÝ3­Óm{þ:ÛÍL.#ÐLJMY»£#µÂIµÉ9¹Ò-&=C+/|ÇiËÇ­ö$üꓣXx#ïÆOw%‡§|ç,X—§„ã¥4×1‡Ï;z+"†Õ#YYY‰æÓ"SÉt"Þœ ë“*=½15ÉrÿÇKóJ}ò=þü¥)Nï<ëݞ¿~Ýݾ~{r5l¨~‚£¬e€¥Ã Mò¼bՙ뒗4Ò¤Ùý“Òîå'QÁ¿q«t;痲)¦®ÝqõYÝ靽¸óŒäÝQ¤Š~¥Ùê¼õV<˜¼cÖ馪¨Ð¨¹VT—‰‘7¯5C§ë3ÆïùlsÒ+„L¾RñZ«OIs“M:õÄâ™"ÚùúÎ¬ô®XÕäOÑn±Lˆ®Aå…ß)O‡[âØO ÖA*(&’QsÓæ®ÊsW—mÂԖOzmä«ëG‚Ñ’kŽùÉ«±Gå)¶Ÿ^S—mGӎ+¬ãByùZˆó+Úó´Ö½q'—w¶¬³¦§dÞ#XXX£Ú19”Ïw§Ñû¾œIaw$É2ˆÏDz‡kïNò/C)Ùw5dˆ²n ½ÇÀÄB×(˜¡\óœZþ¹Ì欥H‚¦¶Ã„±£–s·Ï× ÄQ Ul&”ä8ísnžY`Ò+y#XXX¹ñÒrG˜EÑÓ~ÅI4‡z¿voØâ‰1ØDKª­ÇÅÞVœ¡ç>sEKpÕë&æŒ,’~I¢N¹'ãíå^eyßßÕ.gœÝ£¯{äÂỡÎ)ã9—¤ü²sÍ_?éñÞ³ãc.zÒ3‘¬Žk­~Œqöc<ÝùgM&y—–•Á鿇Ӷ\7ï­Î*úÊT£âšKZ:q:N'åÜÔ»©²¤•¹ïïÇ#ðšýRe#XXXь:dQÖhž>—ƆG¢ñ·šXυ+~#o¦R)^,«1Ÿ}o|áœVÆ1ڑ¤Z|.ùÈ'€Êo^w'Yï¿K³Ùì$3d™ C£Ì|nÍÚº½ÕÛ×Vê8Øøccê¿QáÿPëlDð¸]µš³Ú’𐆿8zo¶³Ã[ݒwA̞—hGréë¡9PfÆET\J_€ Ñq×3*4pærÛ[Ñ }W{¤?e¼;0>ó\Á4ôÞ¬æRrd™ªhsÆí…HdƒU{¦f&º’WG F6­'·ÃTf£Çðk¼-t',Nœ·‚‰¹üd²ú“Ж=pÇ1ª#YYYÞ%'ï#YYYT/æÇ·…b§¼4?˜øçïšy5ªZÈyK„·¸¨X+–ŠªëÉÓ£¶š¸åêI첺UýeCþd¾Ò´Ž%S˜È}»‡CÒ(’ „¤rî҆£öôH} þ²/՟1>žù2PT¾©˜þƒg?ô‡âlú¿þ†ˆ† 3Å0àê÷/]iݹy1ǽ!Ï‘2N_V¸ûš›‰Ï±30C(…Ï~?9ù6žu{#XXXñëL›]âYÏ7›Cáwü‹ š©Û%žÏ,~§†ÃÙËøþ©F¿¸a‡ |ñʾÆëÀ³ò/ƒMdˆCñÕëKF—— ÄÏZëó³o,«”De€°ÁùÛ-2ҙò_Vô¦QY]dQÞ\`xOõâÀV97Íøïô~þ«[Fn¥¨Ü–¼g¿oåÝ«_D4dK±úô¹§k¾¬¸ðR“le#+ÍúpzÌq4Vd(wü’íRÇ!µéøÝùø?Q>ô¤HÑà9R™*Ží:ñòAgVoƒO Y‹½ìWÃß#YYY‘7þ:îæåôùc‡K뉆L’Æ8oèÓ%°oC ~BoáoN’¼_Öëà]¼›ßl¼½Æ¼»y(?¨Ü㯄vLž¸úüer{h©-ƗýޟgR_°…xØâgøËïçۈÝC/¿¶çjÇӏYÖ7‰üçê÷?oX܆Cwx c\ùò5ýo*Ô7ô †~GÈ6z󟳉‰ïâ7‹Ræ#\Þü*îΛ2!‘]FÓá´*x#YYYÂï>cË»ãµ6ïð’ÁçŸaTDäø&ˆ|½é³ÚÕ/Ô|zè6Ôo*ô±dŒvù³§P²öJ/¿XÄ,øg1SÔ¶µhrwBÙºo`ø•g“#ɯµ·¨M“Å£¬î"•Æ­ñcè—rãC˜FÓ7­g¬yû)uú°ë:Ì=Ě1>­9ïÈá.#XXXmsK΍%MÎ&Ëáû3'ˆÖ Ányʈ­'CFŒ©7¤»í1¡‚ÕJ\±kP#çÊÄ¥[c%=ï?¦õ–F•Gbi&$5Šx6ÁªÇ$ނå7¡PÉòõHÐÉ»ú°.gìh-Šä\øÌuÞòÑÒÎÓãÓ_5ƒöS«~Iô"ypÒz4ŒðúñÙÖxNñ ä)’NCR¸!Ñ^þÖÂ.¦¤òLò§º#9'ŽÜà-)J3¬V£‘ ¬¨¡¥F#u‹mßy”í´#`Åö¨-2&\Å×zåÝážòm|rãœÛAK§Puo]åß":¶ØÁ¤u¿bЏì–Å­fì'M(¥ëUïIR$ÞŒ;«®Qû2•¦Ö Ê H:ðåÆDò±y½è¤_Èu·cy–¾8uk›mß8á;«Î½‘©BmiɦJ–&ž‘œ¹c&#XXX÷½pÕÏU0ö¦üÍü00™ ´Ÿ¿F”ý9òàÿáNêî¾ÓpÿPtÖÉÀZýß»Òzí€?žFHZv¿d<þ½+Øt|lþóQ9žóÒZÆ­ósY“íöxÆ´¼Äúü/ÅÒ«"Ÿkç1=Í:tç¦h}¡³u#V¥Áy9Ï ìÁyCmfêãÙý¿§öõôtZw}^Ñ«²êc_ÞG#YYYͨe7¯ƒç=± ɚ‡ƒÙ)Nl6huy”¥F£Rq'Ck7«ŒöM9¦g ÿžÿ‘É£=O¿È4Úcc÷6éöŸŒýÊ]ñyúùÑtý_5 8§íäæzNxçI­g*~¯ÍJê»§øZEñÉÅð§)}¼eKÑíi¬/RŠ”ôWè^s9Wm9ImMþÏÓôõ¿ÃÓ\.¹þªw|êðmóíWù/‹ùŸ\Tëê\#;›\;7Gšµ‰ÕTÅ\¥Gª§efKâ5—Ù®K.Xf–iè&Õ¢jó&¾<ß·Vh¤-¾ö™oçvWk?MpÚ¿Ï1ñ¦­ùî]sù-êùo+ë«ã/¤^çòlc>$x¸—G„4OR²Wn*>ÎÁ°•œ®Yõe<-2²ò W9½P˜TY¸Õ]-—:¶0:& ˆ•–.Ñ=ýeªGljVmuzœUYç4,M%[ªöwýÉNëÓòÑíkm>9gû¥ÖúóO|)£õà¨i'%¦u+JM2‘“©›É$¿ŸZÂøØü¾/µoŸÍAE9Þy]ÚWTÆ-Jrž хҶ„ðD^l?괋|Ê;fÅÝÒ6¢lc”1 Frý-áz%²I(^?W\ûæ|U¬ÛUhûj3ã ó%´ýׅ†”lñ;%.[=Ô°PéøæÚæa6 ª‡ôŠgÐõG¤óNIŸ©²æí‹x‰ÌøÜ:ÖçÇ;ã^V¯œÏZÕÈTÊ]nê»öªL­ëñyOIb@¬É"ÛðsêÒq’n¤c„ׁZã:”:!ҍ)(ò&¦šåƒaY”‰.­Ôòý)³•÷O^(ùã»B“/Ž™[?Z  ö‹f¾?9I B|bÙCe;£yWCNs<F¯ì)áZF/‚i*ø¸AÍòêÓÕ<É:ii¦>£ó¼S—ìޔ–9i;tÅç]¯¶ol¹çœ”ÞØÍˆÒûµïë’;꿽ÝõGù´M3Ë­«.²$ü¤àˁŽ&òz¡ã®Â)ºŽ·wcÿ?´ü~CùŸ5zìÛ1ìñïïã I×&ëLõ‡x“Ê!û#XXXÒnÎÔt°îNšöj)ÀÑc€W²Xù±)JNXS7•¥_š½Œ»(úk½”Ë1U Ïè¡#§oº9S%/t:§$m"ÿFU(Åå•"ü¬†bÝ®FqeJ E#ôà"¢=ÙýSIbO\!$p¬üU鈄(/÷æ"0Šzè…Çì_Y¤ñøt¸rQ-Ÿ¤°ÂP¡ØÛ"K”Ë@-6cŽõ5d‰!Ü¿Õà¡#XXX-³‰ã·3¡C“Ì’e  à±Òܪ˂A§”šÂ÷…Ĥ#B‚fÎ9( I%˜ãԛÇ€À¸€¡#XXXõ^¯“:óWÎ?9 $ýÁÿ#XXXR#YYYŠÊšÚ¬$„HOóP/î7ÿ„:?ÑdÿcÑ?¢X¦öÎHH©ŠVÖ#XXX"!ˆƒ©Ù¶fƒB û5ç6 š–#XXX8j* ­#çBЬX0 %=‘{‰‚ˆ„‚6ÁQDºLËm± ÌòâNÖÛg›‘\ÙÖØ†– Ö­ÉÁÉu\‹Z(¡¡®c"ÖE–™È§/#YYYþÏÐl ,RE’(°"Ň¡z ~êw'IÔ2WRªêUX#XXX ®JZ·Zaý ÍͶh«DL\œ#VØ« ¬Íh™‚Ìì‚ŭK¶ Ëu‚åқu×'Té”U­]¬#XXXšTµQh#u’Ç"¨tE§¨)j«m¤mU­¥*µ€²)®Õ¶¥eEZ™&k’ ™W%#XXXÊÅ[¶-¥J ¡µ¦£v™©#XXX&I™˜¤Î1X Á¸ÚQ*#XXXŒ«JDÔlÈPi#YYYÜ+»3lV²SµXt Z¦a\¢« «#XXX—#YYYDV5Â6˜s(«-¬QÝ\%mlµNÛms‹˜Ã% Ë5‹š#XXX[(¢{ßÎ|åø~Åðù|?³?w~/ôý~‹iû}móRU?εû=ÏØ*¦¿¾¦çÍÓß'‘o”«4åŽqÙå„ÜD°ã£lkM¸*ÒßsU{Äáð†‚à¥?.؇õ’~‘þ_÷¶ôüC'æ'S’‘#D '÷_OiJ!Þ3F ÛòqœþCãá#YYYþtì~.çÄ\=\éÿˑ h™ž?«ñHBüÍÄõѳƒ·á¤ûã¶ÂéPR)ý?â88ЎÇÏðü×ðè<£ğ)Ì?Oû´i<«² èì.{&p´_‰Í*A€)' EG‘%1ä‘èÕ=(QE=-õ[y·(ª ­)nÅ;7®·uÔ¯×çQMPî%k¶0Ìbº1VŒSž—#YYY#YYYÁÁ¸¸î•úN¸?›Í£ùŒÛžÂ`kÊ0*¢ˆU0ULTöŽÎog\´mÀðø„ãlÊˇ¶é_ÏCB+h{²ؓJ$囜dÔSé9H¡C'ksrɇA®Ç¤·ë™t˜c°ÆÌ#YYYVôΦ»qÿOݽq?l˜8&f$2$˜cÖwçÀÜ-Tjÿæ÷ÉÄ&0ÆÏ,ˆh5ÏvâA’ôf «Kã4Ì8¦1A&]…xû~Vßõ5LIǬ#YYY¦îS:náMÎsŸë#XXXÂp=,C<ø¿ÞK ¯Y< k;Æ#?Í~þëƒM¸˜h„ÚQ>š0>¼7ù~üA·oâAEĹe¡çœªxEàŸæ—áxqÝ~zí³Ïûž#YYYáüù|z :g;Ñ4C ý//ï3‹—$ìj4aûS4¿"“Ë($Ó\¡¤çÉËÿçé—m¸2ܔz!ÚQ(R–©K…©±')Ö¤˜"KÁ»"iÄv¬oý0u‹ úônÁª´Y·Cëiµ¹A¹ë4;x2쉱7‹ªÞÞÚ/süËêøÎô‡?Âv_gd!j·’ËÔ8Q]¤Z,_ó€õÛŒVxC:ìœä¶‡LÍäL&ØÚF{$95Hɰ$µÌv`Ö/ö*&y¥ «®a©‡|÷Bé,ˆW®H&ÒÂ.f7ŽÁ@ˆP‘rj¥8¨Î½ó^#YYY&c޵ûa.[s¢È‹7‘";EƵ&Ûs7º™cn¡ÄK€÷»:¢CõGû'Ã÷bœÀ‘#XXXA~qTRU {üm¶6Û¯»øg§‡šôƒII6:s&ô}cŒ‹þo#w³0ÀߛɅ#XXX÷±ú:"ÂISéÞwd×dj>ýu=_àêøh ?ô™üLúRõOq#XXXΚ#hfëåh¤¦ïø®»|!"ɯúÚˆ·•ÌUV¨aÖv‹»>›~2õ¹Å ±ð¢êÃûZÍÅ_ð¤þDÌqޑMÇÒ}pLJ¯W䨚;¯i¶Iª×qU:äðˆft™1á´Ɖþ…ôn]3õUt¬†b’ˆo™`åyó$1òx—8ɊhL—óÄ7b)AÇÜ ê£dþͽýfRãó9ÚyòÊH(~U|2P÷N¬Š&«¹Ýå~¯‡Uï?£’y@Ä0!/"ݚñ4c›øpÕ³›’QÇ|¸`ð/M—Ëó-¼»$õœw÷S‘™ÙÕ ÉŽh„j/¿N~ãtøÑøiñêÃÛ±4÷SçO¨R½) I-Ӟw:„Çt¥„‚ªH¢ &„7|{\PôÿC´ùrŽ´ÛÕÆûvꎹ&H’eÁ굗Ÿðêà8Y)|Ÿ¿vAHvþfg5ž×5aŸ¸É§½žÝ7zhEÐIZ¯T f™‰k¾¶›¸ðÖwlƝwÕH'|äUIQØ*ŠÅ9Hœ¹›È´‡cʘÉVnÌ#`s©6Yçpg*Îã#YYYd:î´G¡MÇés#XXXuCÂts{ˆw¼‹©œèãYÞåǪMÑgÍË#UÇ ª ¡ÝâO”4êՎ‘(Éš>µ4¶LE1F¨’ªADÙ(LÒæíÅB’Háü”Žüå'ñ†œ;nšÉxđ¢2£.†ªÛü)LžÚÓ¤„amé&Ÿ5ÆðD߻ׄxìåÿâåᛉ•ëD½yåR:‘9f vÔù{3offȧ_Ó,ëzczàî5U‘²dŽÌ°Œ.¹g!’ð:Ñq¾²#YYYOu®˜=³’»ûpÚå-ÐÜpÀžûm±ñy³§Nzh¤4ÔE__núGCIõ9žæ°¦æ›yë#m\8pøum½¸Ùƒ©Òk_@5»j6u´í‚¢n´ !è™|޹;6dþÎOè¾­ŽÏ\gænڞJR²¢|]1T+«¢simF±ê¹y¿iÓ´ßìäR!è€"! Ûãq9(ÔKHDP!AI#YYYDÒQ˞‹ògfãŸãú6¸þÖßólûãôÏ/ì?÷çí ¿ÖÿlM·õò.Üa²Ð„Êr;3'¿âÂs˜>>}é#YYY›Õc4”i4QzÇ JÖѾŠUý³¬Ù¡<Ðã!B´Ðÿ,WtM#XXXöeFsĸk@tðÄ#YYYAÊ(j‡Û4£íóù{ÓâGª,òæ#YYY)ɾF'áË?¾ùfאF#YYYr)0ÿ~h`ÚRê@Uô’¡é(|‰±ab#XXX™¢#XXX(£ÍDx9?ÈÎ#XXX<9UQDUÀRš4•®Ý:ÝWÃ'*(¦"Â;±Õ7#YYY !¬+à‘æ†aÀJcÓ·S¦¡£X೉"‚lM™3@Ì3P”3ìΠј Ò4›mVN¸Nq”T£¥²…k1)ÒqG†åޛ†6]óÝÊ%’p¼Ø¼Ç!^Í Ètæf4/!Ò?jî<ÁZۍ.]™†ÖNZf´ø¥§ÅòÁL^rI«ôqûYZC#YYYVj#XXX#XXX¤¿=¢ì›øZj$“< ú#XXX$åÑËM ƒE‰¹6HH!';–rHgwА@„“I0ӛï[6­®A/ɋ«²ëك­Õl`EWú¿š å6#K°|8ؒ8LŸ'a.îh˜e¦]÷íôÔ9ËÃþ³Pÿgúû÷£Ÿ~i˶«©ÜŽÙd±âìÍè?áYÊ9;¤îxaùd¥ƒ¾è½uïíòcoH!$$Ù3µlµÙ¿oi—Ÿ^§°ÕCˆ‡óûþŸÞþOсƒ‡æ<äÅþÒ Ì¦"ÿP.°Âööôà¸6Ñÿ#ÍÞ:ë ^õè$ð¯öÃú  4 rfÍ·¶ Ø F 5—tÚ›¿q<ÿظ¼¼hwÁÁL&2$4ÛÎ5 Ô¬$ c0°ÀÔcÀHðîW­øºj#XXX‘=Ãýÿs zx<2³†â6…K1¿±™8{‡ Hæ@„~öäVlGô¤§$Ø0ål7#ôᙃ~@ü¡VõI‰àT#ÜÎèõ™4ÂìáÿÎà!:î|A!ö„›>=ܕû WQ&A2x¡ÍÁאv“„¡Ö£ªÿ#YYYÜÚ¹‰ëú€Áõþýg`Ú°8ÀfÁžóý£¯ ä!Àˆ“Íñ¹ØÇóŸügzò&1×!¾„Žàòþ$޶:Ãü† ÈAћµ{~@0¸xSZ&µ>¿¨útoÃϯ°_‘J[;×ãªDÜnÞq $Žð`¸ÎrãS!¾Pؾ€'ˆ÷‰ˆ½ŠBàîân!å¥ð¹'^¡üHMC/8áßÜ~“Øû ¨†¥è>$y =l‰¡ìO±9:æ¡ÜkÝò4|,;¢œ&AO|ùÏÿö´©íâ~Àü8ÇV<NÆ&ð¡Ý 0@?œL8HA‹ò;Cyܜ¡qj(Bô0MœÇPL@PŒÚcdƒ¤§?³m¸ {Cý*Ç©lžr'¯¸;wâ"<RXýŸ#?v½ùó‘óÙ-W;ƒ|N0}óüAú†}~Í †Ä˜Û–­#ü‰Ód='¡Ë ¤Ÿ – ä#YYYê>0q‘RQpü3õÊÀ3l6>A‰ Hl›´uËXröÏôæ:£‡ûuÈ5x<䯩¤4ý·ýõ ÐÜFk €¸`cˆÆÓ˜òwsìƒBm 0äßO·É!7Óéÿ(¥îyQž _ÉÐsq7ååñ§\çÝýk"Y*>éü}'¬€i‡Ì† Ý pEàˆ5ÏY™°r1Ø¢ÒM\c >ÞáϽý}<ù‰;«×åJ ±lÚ#yj˜9;†d„†pÄÏÎüdøÌIú ÔŸpþ8³®Ã°<‡qúGC@߀Ê~q¤#YYYÀy¢W¨Îaö§Aïîz:_[;ÜKí0õɋ ŠD­'.«xFhª#Z/ónbì[Dg˜ˆñًܢ[ƒÉÅ¡:¼Œ˜xŒ=A+£šÒF©'2L©ŠŸr)݇Â#YYY uºï߬Üz§ˆšå×.Êø3ížìÖù~`ÈnOÜkbÌÝCpÂ¥H»G®6ã;7PØTƒ|†Ð“@ùö9xHwR!˜¨Ü†Ûi6ClÁÿòÖYÚP4À°ÆL#YYYÜ·æ¨xûG{=Cçæ 9Lù˜gl ‹€ç´xWžÆéäâ8¾0ôòþqÅÈúWOù1Ú$:;W¾ºñîs±î+âØè'f›<Ç:’p åØÇ±™&ôž¿D½?7ÝP“›ú=Æ&‰IÝÀ”¥)|Eåj0ÔÞÍ啬—ÍâôþæÂÏñ›'Š:”èàøæÏgß幃pYŒ§—§lDv›©£ùNmªX·ó*ª{¼3ÿý©ÏŠiê'øºy4l’\<=ÇÀ}À#YYYëd…ôþydzb:8ùƒº>'oÏÒöNesØY7Áð7pÈ{î#éGòûÌÁoi{“e~ÃØØ{‡B»#XXX'f㛈YŸçï#YYY»Ëk@ý/2;_ÂÇÞyƒÂCØO(ÌâLÊ÷É&Dž£F`ÐïîÇõÌâ2¿½C…‡Â:Šâf¢01M øûÛÇéTx?ˆåÕ×ÜU÷›9Ù㇃ÛÖ¢M͎f±þ>OBŠ¿¾$ÛNˆçi:Ÿ—5îÞ·ÅÔ>%Ð <‰‰‰›Ò¹Pq‡E1³#Иv“ZEÜ⪜;·v àGöT¹ý 1NÌ2÷ýì§@釐ç«Õ«#w™>ùEýò?Œcø~QO°”#YYYŸ´Ý?O‡›ÕÌt;àìÇã°;Àbð‚~•ÀÙ3 àÖM öR±Fˆý©µ\T½³koÍÏ¿xs«Ã5l´T~yW² Í'¡IöÝs@›ÊRR14ŒA´ÔDôjx I'h#XXXì½leoàö(ÿí:ÎL ‘Íé†dÇYßËnÉø—éCX÷À}×õm”ÆÖ“çïÛb‹Å)øÏÑwÕø¯’ù¯öӂ£í!)ežVŠ{ÏËͺ`ª#Ó#XXX£ãÎÍBÁÛÔ4 5Ä2û¬Þ)€o{`|w¥>ÚOæ\¢Œôj÷{Óõɜ‚Î*ºQ4¿·BÀêWd>èƇû óȅö;—€y†¾` Ÿë^ŸâÛ_o»Vçì`Ę&@&gЛo–³føÿ@¹DßóGÙ?Ä`ë¼j3ٚ`Q¿hØHi#YYYô65 Ø#YYYc嵀ɜ‡éÇqêËF³sHs?Wz/ñ³)ƒ)ˆ8)ÔCä<y‹(~á#YYYÞ ²cƒT¿gþªÝ%¨ÍÆãn:4à0vF'QÀ9u€~¡åõâi n`?¸qOˆ|ƒÔ;vvƒý&ÀáÖ=ˆø w6GÁÍ&áÛܜ‘fï=ƒõIØ úÔ´hÓéìÀý? Aý‘Y Öµh ’'Bò¶Oõš#YYYÑý:´Má">ùN'êwê~ðë!"é'gU5¡#³÷§Ò}änÃp$?Zxa|MQµßôˆùˆ ®‚2SFmù^mk6‰’Oo‹äl~«¬åF«ù?I½¼t„Ö°ÿ#YYYüÛeG++3ðÙ!Û#ÈCñ¿n»ŒûJ`ã„¥sŸ‹žÇÜyþÕ¡6$thpb HB¶Ä;¸~H —JqŽ›Ô‰¥N@8e†hö8mü0 p¢vÀ6_¤ƒ­ûz‘|ƒ}¥5ÿBa÷~d)‚`Ú葆| ô»!GµdÆ"ú©?h†ïôh°î9€BrAÛ¶~$C#XXXd ðê1>bB9QB9AïÓÙ²ô©Òv„4Ýsµ¸=Æ{;Eñ^2%‡ë_Xü>·ëgÈ÷»cã~m/³—Õ鄌Bæ{æ4ñ:ü×ãPZÍù¿'ɘs;ן×<@@χý&åI2(À~Ñ_½¼Þ@*#YYYærãU€É˜Mß`0{' øNJRî Ðâ;Ðu óˤõ7·Ö|ºO×kò¦Éû#P|ïðò>D#YYYÐöKì!>ƒ§·Ց5‡¡áœô|)tŒ<ª#YYYõ˜N™¹ÙÒc :;ð†nðnÁÉÿ¡BTi Jñ‰öúEô ¾Ã̘F @ /Rs8ñŽVjâAëæ?pý£º<`W˜IýxÜbDƒ;³U±6y4ÚC7Ý[Mšèyáîù^¼õtÅÀá*Ž7éÐ_Tåaå€ÂGR‰Øå%Gï´ýäè€OðþžÐ̧xz÷ŒÚµ`mƒcò¤˜í;¢>òؑ !&Ôq_rm‰å·—/_¸Î1ëÃ^O_·—‡Ýð#XXX>⩆‘ç›:ü } ¡ºˆëÉ4¿¨Å{ñW·‘<¡0{07AøÐ¦ XkX`¯ƒTi:C\mZVi3TL¡ÇC¿‹Ø;oÛùÌÜzCÄ€s¤}JnnœßÚ`ñs·èè!¼§&üþ¿îžªî0ù^±ۘ„=#î“ÃÛ¸ðzìw uà ¤ñ5Ôÿ67ÃõÉ>”ûŸ6Æ´»c|ÿ·ôC®û;Ûlm¦–Ûim.im-ØÏÐyÏ£`>?q›Ç˜?¤žÃiú—òÙ?jrýßð¯ Éæ~O¯ÍÀ<„2ü¶f¥oIê½¶{e†š#ãÄ}Iò¿:¿Rý š=ÿDðÈ~¯±:þÓÔq”:¹ç_¼x›!§k ÆËÙٙóÆ3@޲ۀH8a]™žãu¦oÒØ·_ÁŸoÁ™äýDiµCšõ¿)÷ü3 (ðO‡ñõa®}¼!ý~Ó-tHø.ÛDÑi89ÌÇ[fÁâæ9\z'çx¨rúLÜÀce(wKØÿûK°t–Ð7õŒ˜´œ~lÀc˜#YYY0g¿mŸmñÚÛs1`Šƒ?ú¼üš>±hšÁô?€üÂqah`}ƒÏ/2’`Çñ^_Á·©rôO)Ôøbd~Ž‹ ýD¸&†í4ûøxº”€1^gÃà¯n#XXXé:ŒyI¶ÃÄ/âÀ¿$ét<žowAO¸¦‡ñ{yr òp âN"iÝSó·çð_CÄxq˜Ã,$4âäI<׸C v]Ƭ $”$ÂÏñ|Ÿ ¼¾Sòøb{ÿ—ÎiJrof’ÊúC›í}¬<WŽÿVþß×±ÈëÎ$õçÑcak^ÜØèü]÷lv³Ìª{ù8_UÓȔõÉæ@ú&bЊ¢?|ä#YYYä–@´ìzŒElº&|”ñ4w¾àë ôƒû´zÃÏt>¡?<””QèÌÉʇÒ3æL°è}Pÿ7i àޑ½Ã#YYY/<È«#YYYsÅI• >`ìe‹ ýç´Ð÷íû0ê=FýÜ 2móõl;ŽÇ7¹êwÉç¹ß¾âpñ먈è(/˜:0‹ˆVHh…e~²Ÿ“§¾£Þ=<PÛ[£±©ÆíPÁƒj€hœì¸—g(¤ïÓêcÑA&âi(.º‚/Gߘx¾Ñ„9ñð½À>2÷™ÛȄ;P#YYY—©¡0rqŠò>¼[¹|ËTñë<ß9`þf©êi‡¨M ãa›H;‘ñ{™³ö¤ÿ qýú„•“)xýÜ"íé}àpu?ïŽwmø£5uè~´„÷{«™û/kdWòú_»3CÛ>Þmû#nŒÿ2K3q‹û#ñl½‚Š¢†‚ªjµôà}ÂÑøè,n»ü æûïÀê>ýÞ¤™züŸ!õeO/Ëdc.þJí›—£ ôí“ÿ_g«ÙaŒ…÷`¡ê߈öú;/Ö»llfÑ©#YYY%â"kZ8<­}à n;¤™#XXX*Gâ%d–ÛÌa•TTHø~`§Oú8Ÿ`=? |ÿíN×ó/#Ãoö0QÂ9ðõ#YYY!Iï“ûá7 H“­5:kǟ¹Ï»Ïj}龩œWƒB)ŸTóØ­F^ç*ª1A“D ‚Îìã~`—©3ñ#‡ÚAâiƒÔ¯[úyMÈÏv²ыãJXˆ•…Iƒ ¤@ðÙmm MAû¹sf•<zúÂÎu9"ˆà8¬z£»u@æ½zËúþ¬÷s‘‡Áfæ=`ö$¯p0ñ;4pNƒ¨q¨0ão"t'P<Pí< 6'#YYY‡ïG®G±æaÓO+©AÈ@ܓ¶#YYY` ¬1ïĘz›`lÊ+Úª ªz&€’<ymäHGÿ—b~¯æfó úDðæ?xÛñ†Ñç’q-¶EkX¦´g使wÏ?—ÇWuxNú{aúNºuüU¶pàš2˜ŸÅj~¸˜ÁE9øén~:ÝÞ% %—Q¶•åvýÒ=ùÛû·æœ«GùáO}õsÝӒt.•UUY¿ç:ž)ó“§ò.Ïw’à>4íy§ ²{Ý'Ò³haÛ1Fü hIœ#YYY.fE8)ñf2; 0;©0¬S *„v:@ž]žØVèJ%VÌ@{S9®M[vf††.Õ Ì€<S¢½ï]bÒÖ¾¾ó¹çÖ_ópþ¡ÜÔz™#XXXÃdXp™ºÀ#YYY˜Úf¬Ü˜Ìûÿã6aMÑñtò~yìw$!&Àl“5/#ÄëÏ65Xa™¢äð ÆÜ<…~@§ý0®¿läô“˜õÁŎ¦kïêD‡ëÑ¿‘€ùŽF.êöå1 ¸†B2´Sû6L6ï:lxqÁ®©8ÀA#àjŸî„ÚE.¢#è!/tÆ>†d`~AøŒäcƒýB[M!ƒ‹ü®ˆòu†|>¡>¡>¯Ç¸|à À¤ÿ¨gÝÓ†} ÔôŸéïZ@»õ.}'Šp~iY»·-‡^hG†8ÄýƒÏˆöáä?·sàpp9¼=± wz7x›®âú|L?/èôÃCíû3 ~–]@}åð„ÕÏâ ÐÓæO*NA¥nî·gçý"ŠŠd ”˜óI‰jÐÞàÑ#®Ly›Ùùó¥Z3~—€¡7Lƒ`û}uq¤Ø=§Ç÷~]‰$útŸˆ;€C>—Aóއ總‰=?>Gëÿ3ý)È8<£Í–'Ëú>˜!5=+Ü#YYYÊí°LLB•FvIh<aöùÞòð7U>¤Áqܓ³>"bÒH¯gÍü½ò<r=gcòÞÁ“rWÂOº\ã!è;G£È۝f|ß©‚"“g·xكןá?ˆ¹p{T;ä\SÛûöûÏÎw vÁ{0{‡Ä"މŒcD4ô{ï$'§ú;Çú¾aÀ~QÁíéCGªƒÓëg‚phŽ |mWʌ9õL"rl‰… &ݨ(š„Æ'qÃý\¹|HüÁÊ?÷Á×Ýc`¬ìþ¹„¦‰8—j|›—ÀT?åÆ÷ùÁϬqþ%“;¶¬ñÓڍÑ$Z8ÄÓù¢ˆ¤7üsfVgÙFËûûÿ·É鈂ƒB?܅'?õLiÕ|°ÿT¦ §ûP?©D#YYYå"³p>;†•ÊÓç¾Å£¤|¸ˆ‚B"€R¯Þûæ8(0g)õ0< ÇQǃ‚ ÏGéñŒç#%8þ¾ê~‰àד¦s¸ÈÓ~Ûµ’BEåéq %ªÒm:2a7®â›hpM#YYY5½×ÅLg÷ 6ã\¸g˜FõžÛ1燘´gf´SÿãÒ^ãÌ~érg}ˆùøHñùýßïô×ñãq†pœG߀ñÒV¡G÷|GßÿMñ­‰Jo©™NùdϪ$ìõ’„²#þï#çígòiö3¡5ÿ’£g*èÑðü¿oŸwðs½™ú•¾æø¿D¿7_åqCMKÉåɦý-õg\аC6ú#XXX4¥ñ½Os¸¾üý#XXX²êZ‡S2¦$_ËE© AýïÈ~}<ïÿ¥ÕøÎ¼ŽÏ¡×ÿpêÐí.m¿r\^>¬:{Æì2«³õC™°Ô(ÊG<ŸÁµad¤£WזjµM2¢wÑ€åmcYIcRçsÏû/<æÆ#XXXæ€ÏKH—xÓiÁ¨¢/؁“(Ì$WFLùM$eF"(xSþ„6µíõ^eÓ&Bg6¥×„¦Pæ"’Ê}£’r¤ÉóØéÔtŽG-Dz! á)ӏ·3@gIÚ¶ 2¹8_w”;NK¯Ë$ž}µéEVôh³®5xŒROG!åÖo3nÞp“Pý¾i‹Ç´0ú,À5LNökzø=§#XXX÷Ÿ2``Ûý~Ï×°të້Åñ÷ørBñ#¬µÞb¢g,:ý¼DâÛ|Ó Äs¹"/Ûa¼TqÐÈ& %ùàÀ@rü¿+35K0Xi‰4Øï¶NOÿgaG {y/”sÏyõn&Ž|#YYYtTóϓ¯kdàO™ëÖߗˆ•àoµUUQÙ 1tÆâ]Mf™BŠæ0%àÔ Vyò”Úƒ6{ dS1éŽÁë[BaÌ æv‹]¤é«ÖÌí0î¶ ’3éˆ3ð<±‰"¡ao…Gå÷0zçCzža›PpüìW֎ËÈû@éîž8~oÚUì„¡‰D¸‰X¢šš˜í>åö"`0 ©!!¦¢A·#XXXè¢[ò#YYYÿ¡^àñVwFR¢ÚS9+%"ˆ‹Zä¨ns…œsÎw‰åîOIjÐÐÈìrlû“´3 þuN±SÌœÄàa$—Žt‘…¸1ëæ åý^ߺþýÕNû³ þð™ˆí™òù ’&ÍšÌÖj¼Cä|ØòÈiôN¶f¨ªˆ¨šbbF)ˆ)*kÑ0€ò¤&áwPÑÝdFúÍF—ƒºò®‡¢}i€û]@9=Á*x¾NUQµÓhž6ÚŌÔÔ|cï|ùÅéO¯Ûט+azzdá4M2tÍS@vB†ŸPýev›+Í\â§_ÉìfñÙ8š¥Ó.-^æÌ#XXX8<¦°¢Áv#gÞð°4–lûێqVh†$å&Ÿ3ä“WW å;¯\$†9!è홊ËS©+–Øï.ÅUà†'S˱Ѹì·ÖD_Ê{‡Œ’8ªŸà›*éꪚ#XXXкˆš´]nÀc$îŽO\]1 Ú‡ßë¯ùÃnC§ÄÐP¯— ò’pH8<¬åŒÍP8kBG‡W„‰ Ö“*º#XXXŽˆø›†ŸEVé Ø<´šF®C¶ÍݏÉ2‚~¢Ͷ3qêÖ¤§æ<!Ô\\3ÜûOH0& L!‡‰~‚Ô<£Ê5ð±å8L)0SÝٜ¹?#XXX ª"´^1D©@PžKõé&ÙªòT”όij„¶ƒJZU•n˜3–+S>=)S&ÕUÁéÉÜv&_7®”•¡¢Ø1§çèúkBvÜx¡$?. É碢#XXX8#XXXÉτ\Mˆ><D2#ðèi@_ž¨˜°w7tU´J"b‰<¬Öì âÕbÅçV¡1ä L„%fTLíÀOÑ"‡a‹›îe`˜MÜÐùÁÛ(1zk%ê3fH!Ý$ ³ùžø98ôRêHìõ»ÈN³gA·ÅÆ9œÃý‚q›–²ëö&áϝcè‡6#YYY’’J(–@Ü©ŸîÅ3 <‡äIÏ璉‰¢ ea~¢C2Å/ƒcNE’‹£UDGÃé€&ƒÀp6{Ä⃮F F…S9¯g3GIQô†!ÏÃéíZ`ƒˆŸIþpÈ 8?á${ öÉ´R¯üYPχóŸË¯¯3ü§³¦Èy %>Ç~ýmy›ޖ@»JÀ\òÈ~7 ÃÿÔúp('¾@Ìâm¯‘êšùöãËÞQ_õŽ€1’J$I„<(òN‹Xˆ•dX$bVºá°Üºkm°c×Fæ­b&çí#XXX‡ù“úLÌz©ç´鍙Åcª?#XXX¿4‡r[mìµxXo«W= uº CŽJ÷ÀåÅzÄ#[ÁI0ÄÎË¥á˜w:º`t–ã[›o´:龤#XXXD¼XÆOgÙSß>Àaè=ùW!ìL8ñ#ÒõS‚€t8›¨3‘àxÁB€" …—†ÇDæÓKÙò¨’bH‚õ`dEFþüÑ,|¡Ât~Â!ë ‘È߬'O³`؛æ¶é°È’UqœžŒ×iQñC08é:òAïSàB¡Â½ïäúƒAÞúÀÖ²J0 ý}ì)aª"J"’H" !™""‚#XXXQ¢‰d¢ ah$&E=½éH$Á#YYYA:Ãû bÅ ’Œ”DS"‰BƒÊ@dÈòJä&…B{M@}‡Á>˜Ð;+~Ò¯Ì1©<Œ!‘ª“h1_D»ÛÛ/›† –`Ö!A“Œ™†·škÝßh<ŒAP•Þ@H,|Ù†oG3¾ƒ7bf.‚¿ÛŒrÒê#’þýD“,¡@@ʧéíZ™XØÞé¾å#YYYNæ,p=7óðfnH͂ŒYÿ©y¿äáû¦3óÞÝ]âf(ÒoVÒ#k¤Á÷¡‡p5®ÒuTFÌxPNņƒI‡•à„ƒKìןDp,©ÍŠÌâ($–Òß°’AqK¾ç÷Ž‘‡ ‘œ¼y␒Dkƒ«ŽÁÀÖ@»*@IpÁ8E@¿ŠJA„”¦‘$¯!(Gƒ ­ù*½:l’IˆQ(Øb8ÂS®bDç–'l®¡Z#YYYÆËéz÷ØÛ¢‹¿¢:õŸíÇìÒA/C4øšÑŠ ÃøÆIðþ½jä†`âa.Ƹ‡#YYYÆðø ²æ{`Ãkž%vþ•ö¼À¦W´ô@a*ŸY„Æ×â€Ä¤"T‘ÊdÎ%ITë=¼¾ÁÏðôÁùwœ³ô>mõÈh")¤˜išÙLÚ¥hX‘-m6eHêÚ¦aE,DA}Y°&3ëõ#XXX…]òL&H+MrHTÌ~“n8‰o¦D!úHÑ¥%"Ε0Q(2¯’RTBK";۞ÿTËÛ²>žôÙ A2…ø!G€Ÿt;&ØÃèÇê—¬Û·Orˆ_´KÄá a3~O’wÌFuÑ~[G€‚‹$È*V†V±J:—d"Š ÔӐ@RP½ÄÂaêi-TQ©ˆ†A̐؅ãçìx§@û`$‰(I(§Ý=ªŸÐ’nŠqth,9m´såņXh9á"dL”•)/[)l˜‘W’‹&y/e˜‰DC16»b®Éæ*ò5e[¢zÕîY4"í@lD@vFôjˆB¢eL‘Ûh8˜9¼>œÕîåDyâ*‹,‚R0)$%*†@‚ȀX @à€“² ¼;8úñ<Ö}n¸¨j<"@ý‘Fª©€éYŒJ­jx„M ¥XÅEuo.¤º¼Ö¶IÕæY~o'‚«ÏkÚ¼•C õ%Ó¹‘âkâAKŸ/“ÑöÄS¹;½¡ÕŠaüç<ÿÐÄñyÐ?.î9­!.éùñªv÷¤1`1¼’9ë ašûôf\ó#YYY£Ç™E#˜Ö€íúO[ë;³Ö/BV”|--漊OÌu˜ª0„xJ¡÷=ðÀÙCÞžTçI†ð`GØpéy ÃOQ4_3Pþ÷›´7”„üè@M7>ø†—†FAR4›L¶4´hM ˆfPBDàtÛf´õ¦•Z’¨#XXX‰ßùk/Ÿ,?Ù?ë‡iv‡Nî¯ó”„JÁ2iHG¼\î'ö€~ʈG7·~KÊ9¸‰Þwæïôuuºð’:òd×ðà’‚•žØW¸zb£æ#XXX%vXK߆М?ñ[ßÞÏåžG%iÂAˆ,ûuû#YYYmsîùl¿K(NŒ@Ý6ÿ3åŝ¸<+)ìT1ék× £©³éugµêtxìíÍ`õØuד°‡3ÎÏéCñ”¸C±€6*u¶ó#ÞHÈӊ‰øyDv!ómgϚCܰuÊnAèµ£º8.ÊnÀÚ €ÊgžÜ»ƒbdc@£Ž‘8ìªýG¼pF˜ùÍ‹F/ŒÍ†6\êÑHي˜4EhÐÕ'‡¨ò0*Ð,§„ŸB‰ˆ¢>±ÃŽã52iP¨Š@¸;ï©Ç;—‹ Ýç7+.˜”¢aR®|Î|+Fù¢–Cs]g¡ié”dk˜<y¤Ô”BÒPPĔ4LÔ"UL£Ì2€þ"!‘é菱ÀÖb@µ) 𾽡7R¨.¤©C˜8>}Bh8åé0Ç]cřî»a;¦ÄN—°ò¹HñdäC耈á'!T?®µÃd*#XXXÊ(À‚ˆùÕÁ¡ûáìëٗ¿#YYYž„Ê¥]f¦ÅÇb#YYY¸Þ÷ï¸>gîûq÷½Ê¿AaúŸX 0š>€ÆƒŽÂSdRL!€ÁP4ШœAô¬DxÈȪ"‡Ç1v.дqÑ!åê³Öcòõëaú70}RB‡¦#YYY&z£ÙêÛm¶Ûl5Ñ9/yÁ‰,"Š“¼ª¤k*¶ zM‹FÉÏë…Ø‘æ>°Òpoß©Ç÷ž­Í¶0rƒ¼„¾@HóÌ©Ôbœ0Àõ³ÒôAÿòMÖQ‘—„®òžcFYz-8°[bŸÕîõ1ý©Xv€ÆŸÑ@üé?£—"ï- 仓ê¾Ävƒ ‹C0j“c$ºT³ìºË–Ӑc&ypD¨*! âæ´ÂNþQn㼅‰-#XXX§=F †%Ðþ|?ãôΦ ¬H“Éý¨•k„1DÍäò—)ç?ê½ko÷¼ ò*¤þ~÷‚hLÂ…bù{,µùê•b€–û²î6Ÿ~á%Ü/cîØG•T@lU„>ç=î¿ÝÖä+Åã²Õ[—‡'/‡¼ê‡Y#äZüUƒ^é=Q ŸlÿŒ4èôb‡ò‰ûù‰Î0®>†[ZÉhTE€Cé5 hˆ1DùÄ?á÷ëò.ÉÂDçØt× Ò!O­.ʧÞ?8I@D´ÊW"¡pȼ€¥4šAQá ]!ÈÒòv#XXXÙ,M6)ǐùG^π{óœ €…d´-RJP’@Á‘Ԕð/Ôl ! 4®MÓÐÁË#YYYEÿÝHLÀÚõý§B݊%&Ƹß#YYY÷v1>R¯àplDTçÃcÊìý—™Îh¾õÌu兜KõM‘÷Œ¦Jí¨¥O.A –¤­MMêªnè «Š’˜‹¹T]²U9%A8™Ó!‡ÿf<Îpžc—>ù@Ò)Üei6ʺŠÔŒ8œ7íôpô¡æš'h‰ÆjöéB¸¤j@-e82ljäe#YYYB‡W9åñ9/MzYݝ¿éœ)áð‘ VšæuU7`©"„4á*»uI@h™µI#XXXµ“@;@ĄK¶äÎMGÒÏ(¢²Nøí¼eb㉦AìR)@ š…xœ'\Ý,âpmͪ˜<ùs“$z6‘¡¢#XXXG¦°PŠq¢#XXX÷åçaH|JŽ£ÚzðÁʞ÷9“³¡¡6±Duý퇵! KPÑ˒6“kjI§žyÈ®³:v8¦›2“æ |ÄÔBB¥Úý¢h.Q@Ò2¡—'h%]ß3#£wV:Rå¥âÁ#E@Á¨8™MÑQ ¢’@…ÅêÂÜBTÈJœÕ1ÄJÑn6"®œ#XXX"ŠÚ&Ù2ÃzŽ9œ!Nðmœ4Ö£RÒPð“ˑڃìŽñl£ëª ò£ÍíáÛä\åµaêA¸P`P{(¾d¨È$øôç/ÜHq‚"bˆ,C 8ä` Èp™$‘dIÜAàË£,Á1b1³.µ´0蝌jÕsÀ”â>¢ÈF§Á^áãŸSØè„Njñ¢D,D¯°÷"Ë°AJ̇KÁ>A%þÀuùsø­å0pH2êü’˜´M$ÐÄHÈÅDÔB%F@ÂÀ@K*ǧQ}ðIë4|õ…é¡Óð ·ÔGêt)&€:á¶àëã¿¥ýš&òÔ\d²`þàZ²,ÃPÕ·#XXXš¢J#ä¹ :ƒÌ!?8çlÊÂ\m™õÀð0?ËI)ÎûîDÕ9Ò<:ww1¹1¼y”Að­ÓÃ8×Âü#YYY‹ŒõMê8¬)úJ<5¡k¶Odöïêû;Qǀ'(Š”é v}O‡}±^í:§“Xºóѱ¬07ilÜó<S¼íñ°Ø'²3ùˆî«ËÀö“ÇaìǾ2ãP‘=N|¶Aº È<â$ô²#YYY#XXXr`|ÜÖ MCãIÚ`€‰d  I‘a ^óâ1Õy¼•z¹ xJ§YR=í™xÎ¥s´ÙôK`™ ¸N’†­&=¦×l»MºÝÆ”?e¼)ž^âZIÒdÐïcT¢Í„ÑÃÌCëHuØ“>B†=ëî|Wo™˜zùý߅Àz§„áy}RvÇÓð²c^ÌïOˆL™¥WõþÎʯ\ðEáÏBv—òƒYÉZTÇû­@B.˜¹—tÌIÍaW ‘3Üä-$ª,$=ÐA&ÔÌCe¹f¶ø—žæ!…¨ŽŒ©MÇ?9&‘¯L6Ñ ´ṳ̈i®5Եѷ;Çãҙ&0d÷#âÄy°ôžsiá™(ÌKyEo/s{ˆh)@MQ"Ü|ˆñ.ü…›ÔìhÕ:Ó80#YYY±-¢U}fíH‚ŧ„ˆfJ"#XXX†@àœZz8n±_ƒ^yâ.JB½yãNõ¸æ©NÅJ[`ÊC\£Ÿq"؁Âà†9#‚;¦ׯDÅ m*§ Š ‚8§—é<!ØoO[žŽ™¢œNA—@Àp£É²÷w a¢Èˆ¨~QÖó¯J=Ô3ËâK™N‰1Ï1æ̼ÍIðÍfô</Ƙpƒ cbæ²ÅHÐm¤žE+´í»‘©³O+æìžñÑ®’HDTðvýC‰ó%àt¹ÕÌyÎkPî!s£“F–¬WÑnl) ð@ƒaח Ⱥ'=I ·#YYY÷~Vf‘RÞ×5MÁ5"%öî:Q¸ÀÌstxQf¸ôèx'‹*F¹Ó£PÁذމï‰/‹r»e¡á†CÔ´K•끩:Ûf1S™1JJ#YYYødWŸ=ž"5»FmG9Z!>dI@!#Ìä‘çË҈òcGê5pO.öz¼ñãD£~¤”¢M³Œîc•œ°9/‡¤öv`Ým¡åÓȬ|pGŽê15‡Š[êó/ÚÍÑÇO‚A¨ô£Ð.HÕó1æËxÍÖB"Ȧ{#XXX­Rìõ#XXXâ#³^&#®<j"0c™¢“<¡ÄÍodžt5ë¼æ‹Ž<zìÛ5½I®(U³UÃ$ iÛ9Ð`Bf åìÒmòÊUÎ(ŦÅÍn©Wœ5uê»q—/[“Îmć#ݑts³Û/¶!B҃¤ñó×TcfDoŽÔª’VåÈà™~}æÈ+Òø8䎣GM u$ã!Š2IÃ\6í¬ØÆ´4C3JŠ†ËV‰\”G˜C瞯6²=W×VU2™ÍÕ¡MŸ÷jOgúîwã{K´ý²Y™?<Öpz‘Ľr S%$v¢3¿mù0ö“ŠPåá7A’ÛvùòØÜìя™]ƒˆñ/}àÂeÁ¶“ØLÔ³¶6m°o½̂VÔh¡4¬0ØÓð+QåoI”"}ØL8FžÖ{&r¸˜½µQèXŠeé¥T ID¯š¨àѳך ð†™ïQáDöüs’@ZBGc†£¥orÐ(ZCŠchHög[®ÑêØï㸕Á€&ÉhõƒÑéqA’¬aKեưExm”'Í_7%-f6éoZ—I ó4S¦eQ5pPLQׇhÚ̐axã'QƒéŒáËå* -,í¼„§|cŠ ¶Jó£Û#¸Ñ[=¬ã0»‰2'ž5^; çæ¥{k"D×T#†+ùáT‘g<ßy‡–`£³Á×HJãŽyo^þ˝1®/Ž•© šæô­¶¸UĶÒ2kíA­–$`J)TšŽ—])Áâ$¬õVßY¢#XXXÊ¥QQS-ÙØ1À/ŸqB eüñh&‰~·È¹g““p¢ZúE±Êc P UîðÉæúÂŊð<I­sÜÆøŽ· ¾°¬á¹_`4$#XXX-)0T£˜Ž'žš•©ÐŠãzæÕêŸ֏î{7Ú®R­ÖcœQ5¹î0„«ÖÆi•*äS©[å†KâcwѨ҉¬Cìíù^TòÌî©“àPçÇc켓s‘Ünfhó'-EÎvó¡}sÅcàE-”Qáx¨y}\ tÒz¸å-óÆî/S1ƒrúÝ*&8×ä×<Î#eÒ'Q1% &”²ÕÁHÛ¤‘ñ©ËÝ"©GZœ~Û´]CèNš#YYYFî²fš»pÏÚâc!í±H±ñšÉ—ШQ:D.òàU­œíœ. $8Ý>'—Îè×îu}\¨å8õŒ:ON%Dš]Nü\ð 8Gâ`:ћ­ª@læ¡ 9PÎêkaÔ,å”#k€:Î)zªD˜d_lb…Ó6• ŒR˜ÓÐ*ÎßÁë™hらígÊÚVË^_¬ÑqK5O¾¨›<7Rʍë.8”+|æ·ECªÖ³&¸¹´j¸)T‚ž²¦ýÞ§ÆsZŒêTêfÜR׎£Š‰ñ:êøJùæøs¤gà}È:…ß ž’ñ‘P\É$œí.ü”hDi{jã=XCõ%bP†¸˜Ür»IE(´Ð\Zk[bBÚcX‚éƖh•#YYY”Jµ*²knÉ-e¹Ý¼D猸´•¦ᴐ‘Æî’u 4…k‰„ZŠ®Ã!„½3¨Ì¨=ÐåÝL“sÎßg»ï:_uCDwÆXÏ|ÉD½@ÖÌr'‘ÛÌÜ:]#YYYqÜF _%QDqÚ¬äÂZàQrD° ‰ Œ"b€³¨¨ŒSs„Å4 -Í9öLZØ؁eê,V'‡ÉNp&™â{ âu°BÄ`r866åÐuNY§[ Ì]è-CÂE4v¡_Û>£ƒ»â9EË£‰³e=Q2jí²`„$ AJCÑ54±$˅\F€A‘æ( ȃt!ôbC†Õ”Àö@Àžg§H¢;Þa°“% HQÕ:Q3ETU1Û½SÁS¬9Žƒ­$à¦æèyðȘ7±4wšÓØÈR†ðUMˆ05çe,Á°A¨û ëi‚HA1ŽÈT#YYYàDÐtˆ|_æÿ.àxrÿ:¦¿®t'y™ñxi³e#YYYŽiš];.¤   ¦¨D…;T?Å-P)IK" &1¸`-ǂÀ{:A' ò‰=Àvõ§U‡`ïDÃ9$<HÃôTCÜ&’ÜëþáôÎðü6‰§“¹}0Ñ£AÖT¥òSžAö^H„Ž!z80ž‡àÛVÉzRJšaÒÍð1{Ÿ’qȞž2<˜vÁ¯'IK–}iÊSL–^¢’0™/ÏE߬žW¤Ðwy<F¼ù h¼’ÿm÷ŸÐrah«{!Wh¤9¶ Æö,Փèd­&ʗ&AçĎNç~;#‘Àáø¹ðVîúïhWF&Ž*f(„O‰æJÊ·£­rsCëM<æc¾N Ëê~ºàþ.h#XXXrbç©ék–$¨ÇÄ ïåVØÞèâš[q˜ë…5/VUèáG»¤zÎgZ™:Ֆ²ö¥Áql¹á-ñ¿{¸˜ÍºTr…p 2yÕMYs#YYYM„4ÍH­6ô`²¬•–liÂà¦àÅÜÔ¡>yÍE‹æ¤ë]ç;žo®õϧ|Ԅòß\”ëSlRBë‡[3 Г„(Q_½a¬q;u8"®ÊÈP'–OÏ\\gŽcPLDóË÷gTA‡îj*‚capqÏ%ߥ\ܿާQì'R¿AUpäê1‹ƒ’’#XXX„4æ<DE²ŒñÐ2¢: NÄÓ¢ø2>ý¯NÄ:Ç©a]§kvÙø™€Œo³Þü 1Íj0ÿºú€}"”!aC„ø†:d)ÒE*õ Êt]´¸óXˆxÄH¤¡#XXXÿW ”Í$Ò5QÉV"hL*#YYY^ìpLk¶ÌÆiŽ#&8•±p$}Æ>“×/‚ø5ÃÁ™“™ºÍS³±ð-7Õ.6H ‚Ž\ñÃqôkð>Ï¥<‰ºýDBLJ—™SDÿ¤þm4¥D 'oéøæ?,õgÖ;µE-0CeOÒ&ÜõB_ŸNX‚"V*¨'`€Òi(cæzLùÄ$b7ï%Y“É}+ï§eæÉ"ù{$4è µw£¬Û³c”nӉúXd)¹¬,v%É Fã‚:”ãA#YYYå/of¬‹'ý¯yÈte›y !™®M›ß”8’#>ߕÙvnoT÷u¿6²ŽŽ¤ ‹Àrã¹'æÝÕ¬;ÃC·È±,’PŒ3‘E4”SÐõl:ùàá â¨cì¢HŠ"|þE>ÈF!= <G™î8ˆHë~þ½ñÂf²¨É5ˆg›†ÍÄèÐïÓÅ ³£âÁìMà’eöڀb¶¡¼’‰Ê°õï¹.;¨fCl›–ÈäyyfçÎîNQ||,GÊÀi Ä0Pӂî“A2å‘Ðô?NÜÍÞvp<Àô=9rAŠ`#XXX!Kí²y¾0ÇÉ&™î2>7ãPçÚöËДûBIõ)â‡ÔÃ@L‘PD ²Ô^ô†‚#XXXN³Û‰¨ J%I$‚„”6Þ0G¯Wëfi»0Á`Àŀ‚ÌwϜaÛë읓§Kq™úÉPEñ1Sà”Hð|¨œU¨£E¦¸™äAj9üMê£2hO¿*º9ΦW‘§—øóˆ4iFELH5V#G¿>f7ËBáù\TeŽQ®2%¨¥¸§—†à)ˆŒP ðÍéߤ8BÛ/›¾w’u›ªáÉ㻀µÆ zn¸cKÈHIhéíi8#XXX¢éÄõ6k…¸ë®p6¥k‡JRHˆ¥üBŠB( 9ÏfÚ5$UÓÛ®™…0Àñw–™iKdÑ1#YYYM*Žh#žüC…ww½O>‘‘’/‘¶9M¯…¢¯\nU´«&f*„‡‡j=l»ßÎ!øŸ2üOGä¹>þÃèéÕhR‡ ‡.#ˆó¾|WFŽâ@0 «QN.CʐT+â&âÍ'b&€í]ÒV‡ª~Çr‹q:²¦Î |¥J |R*>~q@9µK÷IôŸuŠßÁúlgË-³óÇճՕCåv’ÿŽ}¾Hmu$›…šN:B“‰2~}SˣÓ´¡ª™>ѯ9¡iïRޓôm|Ð, 2L–EP3ìiòù£)õKL¾Ð‡Š3w‹^ùÚÖ҆0çòóÑ®¤/oL†o±GE{6ç‚ÍT?¯6x\¸ñŸ¥wë¼­Ôw"Z2Þ¼ùÆW/™vÃmÝ·¢Ðˆ¤¸ç“à<Œ6À‹<õÎ[}Æ.؁¦³+•è¨)Ž,°ãljÚil0.nãޏ5¨î¸¯ˆ³³æ:ñÄY¨*ë­]ê‰ê†fܨK!ÎôHq.#YYYê#6Å[)^²8¬%#YYYÆ¡ösÅ:óŸÏ;KF—B®‘ÍÑÒ¨ž·$|²PsW#ÕOÙn”Gwþë^üõDsÙÀA(ï¸à…“”ƈ­ ÒXa£Q†AšŒ#YYYÞÀ̏Ø^€Éˆ dˆ‰8¯¿¸¢™ "€“^? ŽäèȄJ“Râdw¢4:#XXX#XXX ­†šE‰€&b¿Ùè10JB”BT•ÑKMHÃT®ÔvÙx펡 ì9œäÈ"¤ª#XXX’¡ë°"ˆy¦ITÑ@,DDPSHvïâj‰½²è@‚V€)j(¾­¢˜`¢T =¥Ë@;8#XXXH’I’Z¡Z…lˆƒÀ0ˆ/’ë8 0„{ǔÜ'‰l ›ËD )@1±HGžD!QDA 0U)D‰*Ñ4B2¡„ Ò>×ÉøžÍjË5›}øw[§ß`rx¦“·Go 6LfŽ=0@>I ¡^„¨âixÝØ ㇼ6]3c©´‘SOqc¤8žN0; ø;…ļôü¶úMÒyõEF!Z d³oÑ‘­òxò ZbZ$ªˆ$¢Zˆˆ†jB¤¥ZX(0á>'áß¾ðwy/OèÓºJÌ>ò½9A^üû÷Ø ±³óg¤~Lí$˜ð2<Š û%3š`øñß§Kàzê¯ì£º#YYY‹. É!¶"b&¨(¤ ªŠŠš#XXXîCêp6!(ˆq7ÓÂÎô~ý±ü“¼ÀLzÏã׿; þî4†ì=`ì 2ðœWQÑ´ò7RpM#ùaû`æ3K˜`ˆ$)¡:Ò5‰”þŸ+Ã,ÐA> ï×X&>ÌM†üfPeOæ¬ÍÞÓå"†¦I_Ïìó|Ÿƒä>¢þ<b Y‚þïï9ÂîUM^C…D9Ÿ:êЁÀŒ½îC„‡f]ì”5HvNB\ƹ&ƒ†äIŠ"#YYYQ†¨9'Ø#YYY‡×L Ç…BÐxÓõ©a9J$NZÉ+#XXXíf°*æ¬5.-Š""ÈfdV”ÈèXD¶­*$0¦La¶£uihÙ«ë!µF n¢?ïE|‡šéAˆƒ(’Ýå`ëcm°Ð¿Œ›»¯Fråˆu.Ð'Î=Óé« sØF"}ŸIý}¸Ùa͹?•~jy0ŸPŒ;¡/ì‚süøƒ†‘ ÿ|ªQ0Ãú^§âRhZBz"#YYYEņ`¿³Hmn:#YYYM¢*¦Ù‰OÓAl@dküÚöüàdé@íˆ!º8·Pá \zÑ5)mƒ¿Ë­ÝÐ1ÑÈN§5yÀ:þ;¸¤4‡ðŽgž~<J@?MØKŸ‘,{#XXX§.¥Ñ™÷ç¬¹aã&Iþ¿Ôzà|ÞÁí>l4PìV'N¤æ{"ȟ2…(EÒD¤?iiẋÝLEX¨õÞ^ó¨u¸XTH>9k'+d.ùؙ„¾‰á9 ¥ .¥N¨ÿßUí ØV9?ÅÅPœ£{jß#YYY삒–/æÒhŠ%ÞSµ|3¸@“LÐKÙ~§6ãƒ#Ö#YYYôLiè3z)0ݙf‡A/HÈ:Ô¡ùܹPꅠÒGjúÏN9Yò‡óÍ)Úè}Šq•æ<«¿$¾BÑü¨o*Ï.ýåÑÙ'Õ*A!L=²dL„ïÉrJ@ZI–'¶–!zV€}Hô‚©S°P÷9R„‚’Ð.ÃBò(Ry+ÙUéÉ͐РH³l®ˆŒÔl†@’aiTƒ×­·9§I ̆R)šN gI=ê½³(́*±‚)Ü eí© êºœ‰¨yFh!#XXX}2(c#;`è$tšÀp$#Ÿdöl ²yºv8ÙDþn/™:t’t§hpÜO‰îX&„6Ç@ðAHCB±D*D *G——òÉãžç8ˆ&Šâœ¶AÞ8–<{zi4„005Ó¥0A˜!€©ªU„{®•#YYYÀ‹·J,R²®ÖS‹ÞgÅ€ÿ»'1 Êÿ Žä9pßéõ#YYYüºôBT4Á=×ÚsÔxDGô©m°-¶‰C\"†&CZÀáy]™žiÐÒh’~-ÍàNb=¸w+LvœöOԞ¯Mx`äTT0AF¬VB¨˜‘ ¥eOë5Ú.­0šfIª%ª5¨¹×1¨‘¢­œƑ¦‚€’P(„(^í2sJ‘ËIs\ÎÛDAïéN–‚ bQÎɌLTL°’S33DM#YYY ~…5pHqØm:á#¾xs"cD'@09‚@HÌPþP×ϛ#|ˆ\¦ *¼„€ìÚAÿMJ*µ1‘RJ²Ÿ|ªã(̝ïH€#XXX¢*Z¨ÅÐ<¦öׂs„€¤|'*-w!NKº!ÂE¢•(`„#XXX (–– i¦šZ¥JR‘ Z#XXXª¢Ši ªF% iF …¤¤‚© )B& f†¨‚‰&ˆA¡Z¨¥‰"#XXX#XXXB V ‘ˆ¥@ ‘Š‚¦ˆ”i&—Ôs¢"¶KcÇ7Öáz‡Ÿ=Lÿ;T`ÀjƒB曀Ÿ’“´µ)Ö³S0]™Q¬·ÓV-]‚IÀ“Ç£Y@í;£û:Ÿ”ž¿·Ää«Ú@ùöv#ÝJ„Á$€l!Ìæ;ÈlÁ%ãë7wÍÚ(‚" Ó˜ ÐÑM3“L¬AEU¡K3#Ë«$بs÷Ïn…è#XXX"cM"ÅÅxA BBÜ´)¾ÐxÂOo:‚{˜îä=Ë)! 5xe#YYY ~OÞ¨ˆ1²• ð>ó{»! ‚h(°|ƒ˜iòç±ÿ…*?Lû¸®í”zJ2O_>c>ÛZž ù³ZWÛÙlÎmƒ-ú®[çhaÔ=f#YYYØ+ôŸµ?_•<Ç6`[Cø‡]ôFÎûÃ͟°Ä#YYY^#!Èøc ˆ`eWú·¹¤Ø¦G7[_Nn¸àˆ§#Ûðøçðþü̆õ‡³CÆ®–G¨r" ;²wrá}r®„B€’$?#YYY¹X/¿zZ4‘Ô0zÚ룤­ÁÌ%Åävwäc ­CŠØøúú7`û#XXX°ÊÍPiRÃC}Hųµ”k·@o+äp߬¼zÞÀ»4ìIËzÙn½i2#XXXAü°}q_.NG͹†²Uhömr#YYYØÂy.ƒ²í(D%$Â'‡8 Åk±ÉJ¢dÛNœCݹܜ &l”¾-#ô€3­V²bœF±cgKA°²*~3ß&'Õ#à>=úS1cÏGñ3Î×l~ïg4ØËAû Ôԇ«¡ûtuv„õˆŠ"h¤!`<?Þe&0VÈ÷üóáÌè“òxúɈ¦€óO`ˆü‹«þBÅq”(q!êæä~ˆy , UIÙ#XXXj“J#œ—i „^š]†Se}±‹íà§0äQ] „}¤Ù‰FBd<ÛOWåû|‡~ªy§€E2ÀfPÛä ¼ò¯µCé"€¥˜‘8ƒ)æOCá=°}&Jé¾ybN##XXXxäq»¼È`½r?l¡/"—ÛyZ©­är ªy†@?Í"?܅é´·³Ø¡/#YYY:V؍7.ëévqå%ãÙ8Á§Û¥ ¿ÚDÜ_|4Ãõ$üo_Ä#õF/ä Ûðù¼ÍRẗ¢BˆèþsA¯ÂНÔ‹'¸÷#XXX³/N‡Ø† +ñÆfˆÑWÔÊ2~•!ýX#YYY@E Yi_­=ßÌ´ž"G°ØÈð#ól vÉíûÿÉ¿7kþhˆ”8ÙÒ úLšqz#XXX–ñi&Á5tBNJL¶¾¾c¦oÜÆ‡ÇõXøi÷éë|ró“À”AO<{zW·÷NÊWNÈ"Ò$2¤±¤)Ä ðRzó9X¥—ФZƒ‘ñ í9HbáÙÔÇT/ã5$$+ò‹ÌŠÉµspâ”BFþQu\¶5-ÄkŽMWǼQKŒ¦#±Ž¸"\³ÃC `ÆÒ ¨Å&’K¯WlòښâoužX˜ŽâaÓòX¢#XXXäý—(s:Ú`Tâ@¤iS`„ X !æÍA‘¨Äó¼ç9ç炨Î(½ÐD‡zúr†Cˆ@PH ‘9©˜Á ÀŸ':§0Ò ÆNI€òt'=«uii^&Ÿ/Ëæü:Â2ÎÏO¦­`pÔóÄœxwŒ'5x~`Ð1õ ùåþ/“¯H@GÝË<s,Ô3#É`›ði!¢î_·çaº/ø˜vÏò$<˜ÏÉã²%¡EÀk ˆc‹Áæ@û†7¹Jׂ‡™ =2ž)MC¸2eEBH† ÿ¡Þ3ŸŒðOÄÒ¼žÀ}“ß9ÉðÛÿ¦<FÜÕÈCGáq“eÿXë2‘^ ˜+óƒ›}AÞ÷ª$`‘Íҗ¥§ågÖ{'˜§Öu#“õ@R@ATÅJÄSKA@±(P5´2AH‘$@­$ÌÑRQJD@0P•CA 4 @DQ3J#‹BQH'[îe=Ýf@ŸH§ñÈ<öXGéOgR÷¤?¦ZMÄ!÷rq˜RD}²Š¡À~;J;OˆŠ ˜)IÇœã”¨(ª¥šJ‘¡Š¨ˆh¤ H¦&©%#YYYƒ#$3-Œ`¨š *fŠJif!¨øN’”H•,ŒÈX „ʆ|½ôOú××!ßÓ}ä6C“üé£úÄqËGL6<¼†eŠP ¤±<~<òSŸÚy¿cž±ä‡ðH™ËÙ „i¡‰³„>#œÄÃ##11#! m÷¨óQ‡}o‚ÿYüúƒcû>½}óÛܘr—ˆBA)¸IO@³;8#YYY×7¹Þ6”úg”í„çÏs3  zº/ÝÝÙÈᆐ9{}Gtœr»èFTFTŽe @àNßjžÃBׯ1™Lp؎Ç9‚ÝÝì$õ^ü+YQ]htöʉ~Ð1ÅOùäÀ܅<P;‚}e0§ ¾¾=~çÌIu:š#YYYàà35 ý¥Óªö§½µé/0.ìzòœš#Xe¢WG.Ì|RFÒ>cŽ€ß[nðþ¢){€øƒ–þå)¤ÄB^Ê ~’@è›Îÿ¢*%á©WLEED ›iØÇ’šÊÀvPÒèé†ç§ó`c ù’wÁ²ž·§˜ˆìË2HÌ\4Žf—5¾z™3ñ#Š3…•A áÇE ¶nÕlÓI¹N@ü;q;£lÀï²÷—É ƒQ»)÷X†Üðz¨íjçäm.Ap¡)p=¸3#XXXöù›xêr(ŠÌq3Â{#šnnk¡ÀsJ8·@“ê4ŸN±ØY˒rëž@øö!!c\Ôñ»Im)|zøm$û ø’έcü\âý0{KãRh6ÂFÂ`a¶>[(œ¥%Js D¥i SiØ­pæ¸AiÒBeìnÝ^PEAc/iåtØ´¦kNvÉlud ÙJ*«^FØ¢ŽŸ†4QØÖplú”|ﮝ|€=„Òg4èÁ ö‡AçZnº48C'‹Ù7".\=ŽÄDQ +C©Ûvs%WŒ4³Ð1LÅ#@Ð4A$Ã,]rº˜2¯è÷úz<‡Móí ÂQœÎ;, ô9òéæ/€½˜Œ #YYYQ‚t  Šˆ‚„#·ðSßõ'¬}”È”­)B’P(L$¬¡RRàïÓøûÐ;:ÀÄÑÁ٘/ï¨V%‰B{¤ô¾?:áéóTwt“á£ò`n›ž.甕«#%HcŒµ_’^)!\ŒgÝ_Ò~“ö“wÔ~g¦<ÛJèá‹MŒbŠrpP—·AÝtt¨Žhõz@ñåº÷nôÁçpKãíyzpk»­gHçORhè^‡ßê÷ΒK0M',#YYY«A‘mű…CÍL8*±B©‡Šœu.'‹b\Ré l°3bÜù ú¹ryíž ³Ç‘1pUN  fáE‰Hîœ8*!\ˆ¬Î©wâh@69òtî/ ‘Àñêpóۋ¨"!x!ÔAH@öç…ä "|½¯BC€Ÿ.Ûå‡om¶a1d”6`øãªJƒC((Ȫ"(©m¥ g$â’ÔÑ130ò<fç7‡wM-!ïa" T$åÇðŸe¢ûߌ|ÑhËx€ª¾j(õ:/ßÁ‚ˆèý¤¶‚²óósFhtc:ðÓ ú« êÓä ĉëæfÆmðí]¼ø`ÁÛ¼é‰6ëц1»Œ†:¤Í.Nj ¹”þ@™cÇ{TQ|Uaßì öÕÐx5÷üÚ5 þxúøš’‡Œá%§Ëå#ËYc‡†8=O†ê¿'“nŒÄ),„ÐàhÀ‘>E‰‚—ìϗdÎñm˜A`tt½§MyôRQ¶*y´˜Dyä£eíò™'ÊXYЋ±SA(¬kГ²q”{ä].ZÁ“£ïÓ"ø²D$K¢3†ƒÂ7cbÞGHy_qAäÄI ‡iرŸªæ:±j&s¢g­#YYYƒóƧ¼ŒxüòPІ½°x+F!ün8Wì Ž‚?1Œ—ç¶ÉÁç2d͹ ìt„¡H° %#ãŠWbPù`@4¹°‰Ï5ºæP2ýYÁè*3É*Á¿Zu‘Æ$9®g»§VÚðùMê¾|Û}™'àZ‹ô¿#XXXa†›j%ª&#XXXÒ%NRsªàÿ*°0¹é€òÛR—,Ȗ„¸©ª”b¥˜8ӌ8‡2EP)`Hˆ–Gæ_ñþ_ÙGIρ¦é<Ÿ <²"M‰±æLrӖ³%ú‘»C›FÆÜ"* b†ewoÁOy‡I'B#!ÛS½LA+ŸpI$&C«&MfЬÉY£}e8{§.'ÅÑCà >ë³ ‡3¶x²Í<t’A› ÐµôƒÐáû_»°áì€9ô(%ì×˶ÆtNµwãøhå²ëŠ*b|¤½eÎ_t¯—éÕ®L>>jžýàWDLýáÄÃ\¶Mð{à]T6pAAO‘Îl”-ÙÏc@nc˜0D0T¶Å‡ÆL&”­   Ju¡‚ˆ‚gF(ƒN™‰FŠc/ÆÚ'ašÜ`p{ ³òíc?wÚøòBbž¯î*´,ºÌ×<¶,勸3´5scƒ<•§¬EM¹8!€d‘ãLÓ…£4p\ê½t½zI¨MÀ5(§C³òëëâŽL‘†íûŒ·0UÜhcme¶}»üå÷OU€ê€öÂK›[.k¤“rx‹D¤E¤AK‰#vgÜ #XXXŽLj¹èùÿþ’•öÉîæ{¡yzÐêé #YYYº•!ì`ÏÄ|ésø¥Î¹ñÔ@ ÁìvðÔ!‚QüHÄŠêÀ°„à®ò§ø冘&$‚%!BP… ^‚C⒏$©-ӆœ¡É!O‡£ŠgHá¤Ñ«vlB³ÏAåèmD†ü j“ûŠÔ>ôƒdù0y[Hv šç¡’$L86¼ÕÞ¾âãðÛÂó;]lñ¸9îsOÂSøÎô;Ñ»øº&% ÿŽ^¼AW¡Qµ.©5›É»d¡ŽK¤ä"‘ #YYY ²XT€¸´ËPfL3«;ÇQq8Õ(p3QXÁ„…œÇ; ø,¯H£:¦L÷—­Šxë;t“n­{oG'2ÎxN­qƒE5ý¨(Šu2\°“H.-mpŒ*áèi·h·҃¤c4JUvâ$]¦ ¹ržáAPDNӂøº#XXX7e#YYYËݓ¡ëÃÎøtš®#XXXoYÞ^;u»½… €†‡!Ó!x”N‰M™=EÍz:‡–¼8 9Ê8xžK‘"äB²òª²,|ÂE1”­p]…”ªKª„ŠŠpɉ)eTSJ[ÍËRm - a (žÎÚæØöõ¼H ±I^åÉò^&– ¤ÝóßÏš/0öìóÎIêW»gë‘é×£rëU“ºÀy¿Nº³©£›®YN2ÊÎn7.—&4á(„@"‘êDUQ(®@]BîQ1$#XXXz†#¬cúw{ú³&=ÿ½¦Ötš#¥#XXXÀà¾%ôó³2‡¡l¶¬¶ärÓeÐëS3…F1°kÍÆQ>Yq(1AJˆ-6Ðô( ELÄ)RjÐe¸ Pƒâ’Ç(´eð¢)ŽeÀ #%ÄZ8OÍg(ÞN¦µZNV–kh8„ ¡äcȧ·›##=¤Y€(ɎE—@H($.5uQ³(I ‰J*#XXXˆ «¢rC\FÐ ðô˜L&g§Èϝö‡<ýC¬òs|dÂyI<D=Îô®*jÊW–ܰÐ)@­*ñ`˜LÑatž³Â?†q¢((§Û¡vMˆ„?£õàìUäú½ç4ü_#XXX‰2ÌÈăÑ_$§T#?,~«‡Ca‘qþ%TayL¤$©$Jsl¢Ÿî°pؔÅO؁U,3$௒’ YWàâ“ß=]¡ï°6ÎÍñíǔ„æ(³£ÐxÿÎx yDJ\F̒;F°ç»È.<$5˜š#¸ÅÄ£'„W® …+$/#XXXâw™^P#YYY’¾Hv]Žƒ:0Ð;5nùâyÙ|íÌiíÉ¨iL"ðHèZIIJ°¸ Bj¤‘à û%³ $ŠIRwn3Œ×½¡¡.&úa&b©™*HX¡Šìz¦Ùãöbböìú)˜bøŒm†?t02ÁҍÅã‚OHžd4#YYYBà÷Áׯz…)J!H¸{©ßÇüÿ-A†§IBÏûð;‚vÛ+‡TJEU0²D‘@PQ@ȌÁ(è`‘d”ˆTfh)A¤@JZù"bŒ@ Oh…:0ï#æêy{Iƒž3×Û|oÆÁVÚk<Α•cÓÐÊ|Ó@TG˜ 9I8¾þœè9Ü^ q ŽTT/,”³A:Î2­( ·y(&lì¡Ú…ž„_b·õ<ø"y=N€ìD†;ð:å •–`€¤~’hhh#XXXÁ;ôýϽÎ$äþV-•úƒ³Úy|bi<¿/…Cä…»!ŸFO¯ÅNó;éÍÅ$É8œw„Ô¢ßp)ۇ8ê{܊b‘¦©SæéòR[lð¹¦{%†MŽ3Å x(p]„4mˆø<G„ )@«2ªÇ°è•XY½÷Ê|~–†ÔÍ‹(‹Ô`Á‹lÄÅgPÀL£MRA^Ü7.Ú$˜"Š˜`;Î&ŽÚ%Hdç5r@"H CHbM¤ Ps`ªX&‰«fàË\“:d€ÁF#XXXЮÁ­‹h199¶vĔQ#YYY ÛDSp¹„¥ÒP?Äaê£!ސFÄ C2Ô˘™RCƒá#XXXš€Ó/!ÉJ‡È#YYYmš¦y›‹½=<‘4ª(Á ®¬˜;»§8ØÙ‚(Ã›ÃìNj_KêCa nÇ ¥”¦IŠ4Žaҙ¸A¤>óî9ú‡Ù*Ž÷ú±€›÷o#õ|´þgÛC¤ =RÚOݝ†KܺðFRRÊLÕ ·‚¶#XXX©¥„ Ð?'Þ@ùä1„Ç=?—ã(ñŒ?݃¨ˆ’—¯3LÉØN§Uüv.aøØ}VÏ®øbÖ\“¦åŽ£ ZÔÖ³AqC‹J¢%G* Ç;{Öø[‚Ô­Sã‰Ê4¨!-ÍrK<¸J#YYY²_…å¶@4œùäpo]z*ҍÞŸ½ö‚‘ʈu©ñ<[ƒµ@x1HŽû2W ‰ D…2|ðfCÉ%ˆ#XXXÏô]#XXX8t¡—>EÇBv’ssUÆ=’xÖ%ö:lHÁ£ññ‘‚ÿhø½ˆ˜ר[ÿf”ÒôÌ,Žæè 2õnècß‘Ð<ìê@=ÿŠ#XXXZý~,È@僜ܥtÐbBLGg¸FÿÐw¡ó“é9V@>b*þHO× §Ê¡Zòw¾h|ò¥<š<€ à9²¡1á~Õ*„#ÇßåîT?¸…!*u?0ytºÙD:ÁKÝÔÃ0‹¨n¦À°œz=~¢~ã<_Ø4SC'åê@; ü4Ä !ÎïcÀö8ËۈµQ㳈N ÏÏÊî?ڛMQa„!CÚï½ÌÜ#XXX)&xŒ˜´„ɸ|­=w„ªÂ@Ž£7¯Âˆ1ÆYB’‘¯kk&O!ã#YYY ét˜€Ñ<²ÊPÁBuœ ’ƒ±}—Ë0(0қÕDMPq‘( há9t‰ýWᆇˆ…=H”×Ðó#XXXô1ðÞ$Ä'‘Ô|²ù`ä2,Êl½ä‘ºoò(ÅôD6bªlN·„×,Xæ¨@Ç ] #™Õ% A¥‰$…#XXXJ!pÉ òõÉç¨Ão¹U%[§§ÓëÁ= 2Ms×B{‡”þÊ1a×7fp”'9›3ÛË~cÀAïÉA!‰!Ñ|Âe‡9 {F½},+%Ïfj÷]ž¬½ØòmØ”:J´ŠnL<Ö$?ï·õ“ÏϘdu”cºRøç¬s7ÊLÃG±@áûåʊd9Ԗꕖ¨0r¦…Ô÷ðèÝqj-Š¢6Û¡ 9!“-„–N0NE¹œW Pø¶ô7¨Nfy—#†Ë¬í3^g‘ȉ ¤Ûf}‰Ï <`DžDƒ Y cÀ FC{`søÓ´O‘֬˜›/dúkc †‰ŽX˜;®¬˜€u¡d‡ˆŸ÷AÜ~S<¿ÎtCøî´É4–cRX¦&U"ü·`œ]˜©ˆD$€ß8ï#YYYSÝ!óÚ Daä“Ú>DùªªHɘb)C@&¶t1ø ^)ä”î!Ê¢|\:¿•ÀÇXÀ“R„PažOўŸ/•wý%æàøûÊë±B€1ŸoQš:$&wkØã±ç;ž¿ôß÷hÁe ¿Fnƒé•~€”2QóTWëÜÝ}/¡÷*7|C‚cãN|ÇYêyf¤¤ ŸO ÁM(s.a|îŠ`Žä;?ÆàñüÆß¿¿)hvÚ_á#YYY|×ÏÔûOwââ£á%œÙR”*i Re€àvqî3¯_B'¼mèƒâà=™ìn­§-5¦db©¤0WR§£ó¬ÿsŠ;ÂØ5¨ªñXUý¸=høŠ"UÄ ä©ivǗZ“DhÇôFÔÔîþ신BY&)š_¾WQ‹ ÀO¤ëŠ|Ó³ÀMµÔÆIŠÑèC²Àì§ñnâ<Š"„ã8$X¥O_Íõ}ËO+rf{©ÏÚN©{ó“5R~ºPiÊ nuZ²­¯¦Ô·ÿÄИ‹LøŽ&ƺyh'¥íí¯D½øßàä|E•›!ì@(އP̗P#YYYƒ!®Ä±>èŠ%#]\ýáòª˜ÄCø…I÷þ¿—ë/äÝõ(0§yC™S÷c˧W€¸v3®³ ÿ;Åý˜×(éäôäa„bÖuïeÙ6C»7þе1¾>vÇl]ÖlDSÚJޞð¼~Y’̰DÉ0˜&%²é5€-ˆ ˜0H²“‘‘(Ê!ÄâÀœP8»ÄÜñɤùi"ÂéJ#Jëœ …Kéìƒ%(Ç÷øº9na¦ñØ0 ã’m¼pSÓ «‡b§`õl)¹ ÝM?zr¿a‡ãÛ)Ž˜ïM¾$Ñbàpïx:9¿´4ö¯¡33ÌÄb‡Óïù½ßÞÝNQà$;Ðñ“oPAóNJi䃠Š@Õ¦di”H7ñß¾û±‰Ó¥Zèn#YYYe :„ =ÜÓ1(òiœÌÛcÍçÌH™|~ûm ªEÔ¦>ÉA#'€@x }¹‘–PnD ñ¨rá%ÁÀ.Ë1pÀ<ðt@Öò0“Ÿ‹¯àÖx˜8ìS–SSW˜áÆ}3—ແ´«çۏ懴ƒã)üRoƒP$m€™ùü©Ùvž|óðB9@@@ÃĘ{Iu*zøŸŸ†³Õ•²ÈÜ=d0¥!©CWÝÊﲼǔG©Üêh;"t»C䜈öaBE“…IŽ*dÈú9/ë<°SëNàzx&€2k„†#Ùâíã²îÈʚ¦°Ã.y­!ÅÄ©4i~þ™Š“! Ë0=Ršw:ë–'-%‚$êm©Çˆ= B‹m±‘PŸç¨q󹴍;âæUmÁ(Œcz4rq#YYY HÒÓE%C¨ù ‡$µ[)Dê܈FA²*¢ŠZN¦*¥¥A‡#<6H#YYY³±N€iÑ]¥*­ZôkZfΐ_D385ˆ/ Ú!m&h±Ŗ#&ˆÑ>¸<ôNùócÕ]¤í—¶Q«QTçM)Ì\Š¥­’ÀQSDhv±²ÐD3%Vݹ3ÒÅÉç#YYYøíAÜ5ª$¢˜„&µ'¢Û”~.°þjH•›r!C0¡WRö WrP,ˆH#JPÐrPéC»^XÔx^u ­R²3ÅÏGoîaÕµ/BcÖ^Ž•äQ4VG£ž¡ëc»„¾»z½lºíVUQ‰(ß.el%ÔÕK’ö:¦¦ÔºA2*·Zl]ݸAé(¦e$ JE „#ÔBEÑ%*ÝÆ+dcb ™¹rö° = Ô¸P:³ÎΜV([Ôqµ Ù:¹‡ŠwÊqDÖX°¢­3JuaW…Úõeà‹z·82þ)Ò]Ž"Áó$;¢0!¬wÎwì1áp¬ºP­áJ6^è8è»äðä½HIŒ1 J@K™MˆüQÚ¨#XXXNÉ ¶y„:‡¡/'¡ŠY¥!dgPy d€r&&&Ì¡Ô@bÈ$£éž€é7N]-àÀ%ˆô9 @BA>e?^ 1‰­u¾'æÌÏ)õzOǹ·ŒW¸d~á׃¢O3ÃÐ6妊h „Áò„#XXXä1DÒ1L3ìø‰±Ë„N»œ‡tÀ@Â2C+,AH´QB œêêZ7R)O±ƒä¼¶X<øñÔè£z“t[…iÆP¨Y©L9”¨ÐN3´ÓY84J•Ú÷ÎpŒjr™™lá£qZ²é- h"ôrÏwáàt ŽÕ0Õ]ޝyšÝ›øà®<ÇØs=GçFËEùvuÊ%&BçÍâm~y3·7S밊”>ô ½¿‰D Péð@Ð`çº)˜…“Ö`aQTÁíMÉ<ƒ÷ûP ޝ¬ÏfƒO2œœÍsÇ÷AðøyÑßt â@ê €ã¢ÜSîò½|øk؄æ‘'Ýø*'«C®3cÅJ&¶b'û,™z%ÂgàIŒ›I(x—TŠ#ùäÃpFLfÂ}FâæX85ÑîææìbŸÞü<`¤ ¢(8VɍA¦–q!„˜âb&œ‰&>¯7(¯ëè]¡³ÜJxÀ<¹<žYN›!Òw&Pá Ùé.É<XXH2ÍdºÍü¦ít˜IÖ¾ì=4$ù¸‚`’¢ |Øð}ø«®íCÙG×àQbŸ¨[ÑW\9ŠÙÕ*ŽCÛÚA=!3 9xÖO¢ÐlvIÙÅÛ¢_{™@†ÒÅøGÌÒD4D0RÁ35U05$‘”6¡¤"öm<„4ÒÌÒD›ëâ83SE)@;…¹‡R…+@DIÄ4F&0&‡`>|/’>à÷CÙ'¬=ž€žà_á„5ÒX”fX‚Hˆ‘fB7ëêanÍÑv§©@EÒAðúŽÁÍ÷ ”{!óÌz‡Ýõ§ð/1W§–Pr žP?D `’*¢¾J‚nÀqäšU $¨¤IA€eJb’Ñ À>¸ _ïLÂ&”g"À!8ò‘ëÐ*n qIüùÔ#YYY·¶#XXX@¦X"‰@ F„>1(”€… ,L´"R#XXX;¸;À>5 6”VWAøh>#XXXjA¨…H€=#XXXà'©9©àªbÃâS»Ð` x-ÄæA@Á „uNÕ1ÎcàúŽŽà#pæI$&ð7öØæxùHàö|$!© Üð8*Œ‡ 1p‘™¨ "†ˆ•#YYYÖÚiñÖiã‡NœÎH#XXXuK¢Ã‚‚"–“5t— rìrî¶'FbŽ‰úçˆd1‡ÜD ª#XXX¦OMè:/RH\Ê`–€†,Jí6²EŒž8`!#XXXj¥¢„^#YYY+ˆ‚Ò:ŽaŠËe-e¡GCH®ÊæŠ)"YEõéõÈ|6Ùt»H`ƒ*ü<|N´# <_AZ—,æ™ „MË4dŶ1;ŽªiÀ44Ñ2ºd¢^̉·ÐKɸó­ÎÂXRç:cybñ„lj&”C¦¿çÜ%©×"GÈÝ-Ì~NaNj¹“ÅfùšqÒÏ齇9ýªHôææù}FÝ9‰_Øs]ÞœcKFvžt}žyå·ôM ÀÅ랛lÓѲ!:=B(^’v^«ßÜ¥’†ï݋”ï\ëYšvïŒÌ³&/p¿?x@çÎþ_Щ‚E/ár°É{²rQíI A´{»§Èô(‰LàF¥*b¡é#XXX6„KPØv!>W M·%e\\fV:œšœB±ôϥЉ‡zšůÆñ#YYY@œ&ðp¤í~e£kä…ð¥X΋.ÆqƎMч#XXX.˜YT(ûÀÔlšD³U46I3ª*®qœçÑie<Ÿ=T±Zr¿Oo“ Œã}ñ°œ!äµýÅ>néÙ<·Ú[HtÅé(ÜÞDïœJ’r#H–(c®Ð‘m•ÛùíJޓVj*¾˜Èzã²IHš®nŕEß,8=™Â€ºxjnŠTen‘Ëɇb#YYYMM%¶wRÝièûöãžõ1±¥Á³…ÝsV«҆ û4Iã7 –r|zè2¦É–IJ* Vj.žÃ3>Òq1ìÉødîºy+¸{æ#XXXÃ>ÄcqÄï¥6‰”ã¶E×ÍM±ÝÑÍ\Åó&+¤#YYYcçÅjÒYґkh¶žîډÔ,6Î-p‹Éؔ(Dh¤£”¯+¨#й‚XàÜZDZ´ÅwVø$yéÒîcKéÑ×q3•·¬,ªÈV*‰@$c‘ªúž÷EvvÈúÎë²õƸ眫•=ucÊIñÓW$ïX°Á-ÔkJkšßd\Ufôâ-&ñG¹æo†Ö ݱ®pïDCœNðÙ¤QÚ«žg(²5\¡xßÏ7¸@…ð®™c9™ñÍNõ¥âbùp¥A½ó‘‡ÑsύµFnk‡3ʊdQl`Žº»ËÈÂIt¼ÕžôÄzŸ3(|òy¸Äí)‰ä[¢X®ìCÔÃ_Ãîsp/e""[ÎmÊÏ­vm°"@¡œ#É᜜Ñ:]8ß6MGˆg*ÁÙâºÍ5}L˻ՙªô>‹GÌõyÞ»ˆU0…UèÙhIñÚ÷(šãWGÏ&/2‹1(d& Üâ¨2êEöjlõyKmËãÞË£”á¯üÕ{7TÉ<ã5Ç|îîlùöõãXÝ<.9dy=¹ÝZ7àîVò4däu'›óÌֵ˃5ĝ!ks#YYY5)”+%·Ò·ëܧ¾°¨KèÚ_O’L“G(ïÈ7|(ãj'º*ˆ¦¼B“¤4GÚËR\ú˹mED1#XXXԍ˜™!Ž’ÄÅÒk_wx/ڗ2PñÀs£GC•Oc-Oîèç›8?n ûvã>Tes… úâ ·¾Ty“|¸ÍK¿ŸiÒڔ-ŸqË+ÉWÖæºQŸ_\b‡É´áQÍ2‚áWŠéQõ0aêÏuÑÒWuï5Ú¨5œÑ3§''0£ÉEïW5Œ´*>=ãV-›íL#7hm¸Ä§ÍÊ«Õ÷Òф#YYY1œ%)ˆuÞwÂ÷­±‹,n¦TÚCm¾î£¥Õг‰ÝÐÕÖH’I $•ejí]dÑÀ‹Q§d«M9R&dÊR*Öæ³&R¹Iª jD{³c5­Ç•Á­–-F™…Ý(ö#XXX¶×w púÛåÖ5LÓ&«8Ìèn»k½Š{îRcï7q#XXX Dž¾§ìÞf>Ц¦£ˆúØÍecŽ©¬†}E¸#XXX‚B°QøF|ÄX(L=ÏDhŒÔsÒðLaÄqfx0,#YYYÀìÐ  b2rØàØú•Ò=ºÙ\žäÝÑ/âf&<C5ì)8ñ<0ì¸V±Z[l=®)`»˜Î³ŠQƒM@£*^À™*1Žs“ï"°wz)ɀ=°vO#pÓ¥8p#YYY‘4NǖÙÝÄNA ¾¿¥ñ.ýŽ£#Y­ŠŠuü ä]ç×(J(kÊ/ t{O‹î_W­|’RP›_«mKuïß;$£ŒK¾`µ„(X Î4IwnE¨ˆ-}-—ñˆmŠJZ'l2kD|I{ç;å,¼ÔS—ÉÐAéÎà—ÄzÓ°I`t"“! “%"d¢¤„ö!ëoÄí€<çœ=3äÞoa©Æt â‘#XXXeðK؄Á°à9”ÊJ@H@Hz`.'˜cR7!) ¹ÄLí—,æ0ãCCÂb]‰Oô”4€î̀Ëe!2"6t ;#ÌŸ9=vsîÌxýxè ÐÉù´x£ÔrïŽ;^¹¡¸#XXXˆà\XÇóÛß²!@½ÎÃiü dÐ)D‰&Q2 êsõŸ+¾7vàGè3ÄþL`ãZÖÀ¿S9lY'ŒûteÂâuŠ\<·uÄW÷sÀáàÞg–@óñ¨ ž°RáJS띸LãKÇ Ô!˜n'qö>ã*;3ž0 cdÝ3NßY§z{>AÖ w8hçל`ýžÓù+‘ÊCF¨ðBóõϤ§+æ‚I5¬2Cqñ_~p‡©µXï8ý–3Ph†'E±OiNŠx¨qy¤ø/š2ºG`×ǏïÔ÷`Uþ¨4"Vn:“^¡êfޙ’×ãxr˜“ÿ¤éç0ö_Å£꒔ øÍƒØÒC 1ü&&´öü¼u õ÷C؅ùÓ³<ÎÉ#¯B›t’²Á€"V©¦áÑy›-Ÿ™Úâf/qÜfùc/Ë4_Îðò7¾…%h;L#XXXùã’ÒP,A§AÌ)T£Ë³§k`ˆ"E%CCóOcäI h™>°C„TT _f&ƒŽ~7åϊEÀý?yùyå¥tÝÀxk&ã„ÀÑRE É!:ÁÉåÖÝ#YYYŽè#YYYÆ T@D̾˜åbÃé€ìt…y”°Bn± fD¬(* „(‹a0LŘT¸j¦‘xuçé;‚tìHêm%‘aôtCw®K^põzeñ9 ¾cG¯Bz“p:àâ®É€Y„.ªnK0RK ¢iÀ8†Q¨e{@́¢%ÐSIF”Ã"úST— PÈ#XXXy‡#XXX†`4"À€q€tà&#郊zD0„¦L¹:„€ ÙxPë×àd@=É­*Zr‘-ò“H_’½_lÂvüß>¾)q'éŒzÝñ}´‘Uôb¹L„ÌÅTILÓ|ÂöÊ>;Õ˜PO‚í#XXXyØ¢Ÿçbòž†ÙM˜w€x|FÎnöÛ=³ŒÈzoE«0CIhZh„Š˜’”à¶´ð´Qlˆ†0IAÏXx/~8¿O°öN’lî£}4C¢)ŸgÌ/iÜ{? t€†(ùÏCcŸtØf:¼ `>÷<ôPíQV5꬘fåÞ ˆGˆì…wcŸ€Œªä´ÄV´Zv…4'öî.¦•3ƒåyÂË9/*|ú Xfsû*T„Ê̇KÍP€*‚ÊÒÍQæmµvm)²Êd”5¾k`àVçàt'¯|ô ù±6Ù{,EöA@ìQD”SHTµAIŽàow“`ªƒi#XXXŇŒˆˆq<SðsFB‚èh¢ ¨T8kŸˉy´š³Ö}z¡ €Ð]„»Ù‹é¡·Š‚‰*¡×`éR•iV`ŒKÒ¤ÊD“ B§k´Ü‰Øôº,øo8Ú¢O-°þ)>ÌWºð>3¾f¶‘çÞx¨çð Žøè9öaã¼Pw™Döf*6OcGÔg9#XXX3Y±ö¦ó¬i£W‘Óñlé&š'_Ý4MÝÚOø·Î‹6ž~œ©às4Î$ØD÷s1½nÄSQ#2Mͬéjup@ŽïjL/÷jëÁ‡'ª×¨kKTXYXEበÃQ­i9ªk”@`(@Ÿ žÍÏb®õ:¹YKà·XKä© ±j¥N¥ùñrZŽ"XqºÑ\`„Y®¨(Dóãm”Ú£\úæôéãpà!%%î8L݅÷—é Ù%æ¡M$Á #4ðµ¾úÆ75†’‹ƒÝoÖoºvVFŏ Ú3{¼g‰9%ܦ3«×:…Ñ»²Kf®O⹕od¦(B»ÜEïŽd‚)Ìï·¯= èäòu¶ñ(Gly`5D.ᐑ‘ëE×bàz#YYYLKîMãЖĺ$|œ%ã¿3˜oÍP¤¬ªŽ JNs¹¨¶ü[ÍC„$²hY¸Á&b2H/ÄëWƒ|qS­A¥P!1 AŒIÂÍú(“L j#=–³ÆÅ[„­Œëd⨾7+HDÑDV„2Mu”ZÈT$f‹5F@’D!K< 7]ú¡ËŒ9!TiO=_(.4xª \íœáuq<­Ó˕ˊàžxÕÊÒ F¢«xùŒ%RɌÓIs‘PXš`àiç{šQQ#XXXȑ>¦•€+%ö¬šr\’§ž™ÁÞ1Æôo%\!ÇG‹”Fb5$`¢’k»œŠpY˂VK5·\z½3£l;g·Æ'·ÊÅÎ̇,írI'Z•hŸ8¯iW‘¢u!s„y}ݒËíÊK§k¼ð÷¸ŽëqÄEfB\_­C\ÆâT2tj®h€‘«ž13Â-7UA¡ËH¥1AŠ#›ÒæŽç|Ãéø’vC7„¼KñDÇ]O0]ðöªs‰N½ÄòìDs´> ff3³ß*V¶õj¶¤·µâž´·'²¥†aì­‹HÈ~Ÿ¶«;§$hÉd¤zðEkÚ¬w¤Lv"vm LªÎӓus^Š|TiqOglŠGœzÙ´-×*`@¢°â*£šòõÇœ`ôf)•0‰G¨I¾ zU£XÁ3à@ðÖ;kÂ^¨0¦vݚ‰žè:E¯Û²޲¥œAæ4=Ì#XXX”8(¨¦\ÚsÜÕÕ#XXXKі6âñH£†ê°Zd§WN•ۍ*ƒEi†µ€]°Œ…„]D]Ñ1EDñ[ÁÁ.fk{ÐJ(#YYY„ëdû۞0Ü hQ±0F¨ãM^D`\ÃF0ŶF*ýN ԅ“j‚è‹¶rÉ¥ÍåT´&ÛB!wEN«8V!-Õê® SäMçîf%4ÄÖÅwo艁 ǹÛΗ¢SV¨êKp"Þ±1R˰Ì(²cӛOKM¿WށNYìG#XXX&{™¼ŽîÀ띍qÓx7¨©™lø¨¾2&`}7¾î.ÆB8PC›U:cë&©D·‹âÇ­Î$„²åo/&ø½a“#YYYjLɹ©æ³Þüj„I·%“`ù"еP‡†ín2±3SFÑr†R)2{ª,åå1r2¥Õ½Jj˜lMVÇ]K;ñr«¦x뎗:4„Œ[4÷"tŠBBEÀ7ƒÃ‡$ˆ‚&!fhJ˜jBR#XXXªk¾v»túýnÞ½¸eLÌkå^Ð=ºRõ¹›ýìðê QA’œxñ?ç݇)ß2¢*£€ Kô{ßO@ÀpU~£€tœ8Yëƒ&\ú!>AÐõðIíÂH’‰‹ŒˆÁ˜:ŒnpàˎÏm`°nëq7å²¾T8&âñH$Ò0pgÞPÝ££”ðpN¼BS„½=žJ·â/]ù<—uw9(nἕ-5K@Ì!1SK#YYY)2Àr)SÈnHï&ê‡#FÆÁÕ`ðÜà!‡p(@(w(&Ì-JJ  æ-àcÑLûæ0ÑGä_B=A5@`û€‹*(ê jLϲõ#XXX0l„eħbÃÒåðdT8A#YYYLÏX0š(¡4Âó5…ô|áëÀa0w§=‚dqâ¢ñƒÅæ²ÔBz:9WÓãà<@:’pX‚\DÁðÁE4ôæ 됉 ‚Y='‰ÁdðL\šϬÆÎÙV%E¨}EDT  €?b¬õš£€o{´8fb¹R=¬©¡:eâ\)ÊïöçËÿŽàö @I L.Û¯.ÍÆ (NÕì‘pPВàxòÂPؼä1$*›€îsÜ’0’Â1£/ùtH‡;²à†æ.ÁÅÍ´¢*Dp#DŔ´w;Ýt=3/dæâ¦D2³…îr…e1)„£[rÃ=“½Âø——ƒ:IKày»¥é4"`p¬:CôÉÁ9´^>Þ_ LC9>Žl1O‡Ú˜½¡Kkbª¤’B™»i×ê#‹ÊMjXIUø¢M˜‘H/«Å<¹[®GÓq;.rEê"B#XXX„# އ^R„+víDO¼#XXX ñ„=ºòÀ3Ÿ¾1UpC2ÂbÂÛ®œvg¿Ä×±VNöV¡FH£tµbµ%ß±uSq‹üää´f̚6:ª 8Ö­,@eDÕª§Hsâ•ÚLQfÉ Q6e›÷{Òq&ð"êéïjdÑZ¡À”$R*Jts%ë,$#XXXù‘4$ßiYS[ ©cR$"¢äÜ6:€ç̕€ànªýþKÕD㺁xS!Ð4ëHçY£‰wlXñ­hÈ7’ðìí¼C ÄrSs÷¦ïv{Zƍ&:[¯€ÏÛ'{[›±ð&M)!؁Ý~<]˜æ"iAp!ˆùdÒA ššDˆóaˆIôtN±!ÀP…ø‹íä¤É:›ÏR0`#+ãË`þå=†‚}¤¢ïBïÀ[åNqûÁõÂܧ('àlo²;L00ŽœFXVKÊYû”¢Dܘ ôzS‹á„`2aP§´Ùq“ÿ³óþ—S¡þZx‡¶)ì‡1%þ¾i0Yì·%ZÏeÄ©¼¯.|ƒ†Cb¿Ï£anO ëšMƒøý­{2ê!Àšސ€ˆ)¡#XXX•7XÀ"…:ž¬ÓHo»Ë s=>»¼(=qòŽ{6 œO¼NÃâ<¤Ÿ‰>_µcõ¿¹ <fç ¹N"êdS÷vӃ#XXXtÊq¤§>ï9'%6½a3'I‰ª2öE³¡¥©¯YÍ1UlœƒÐțZn³B‚lI³ '@øZ”ûØ*¾Th¡H‘O¦D2ә^Y?n‡XkSe(z ."þÿ4u,îH qF XC9ôÙÍ1ŒÏ@õ'òÍ÷4#üR!ðQ8Xúb6ͱºf Þx¢‡Ð>­Ïº¸ôt¯8§ìà 3? æÙŒ ûìθ74ÔÙ8“÷ܽf1Dj Ì3<~b¸;ªïï7Mæ…Ó¸î}Aê9ÌÆ#bdÁí\ÓN¢Å=í'›4ëCfeëŸÂµÑP3÷MùŽ„G´1œ§ñM²â]UmÓó}™V ËÑ.@‹#YYYMŸÌ¹–¡™’í·‚Y6µ¤A9 AEÃÓ ¹Ã‰‘9Áۚ¥aS´ÉÆ/A¥I”½Û0Î\]enBÛk+e%´Y:J!5”ŒÔ,Õ½Ù¹§±¹±®Ü£¡ÓâIPËrèhÏ ©H¥áNºØé8GâRœå@ËKS¤ôž sÒ{y³ÀŠDHI§”QWӆY™´áÐ×Àwúƒ†í±•EHDžùr)p”Jh…#XXX)ì<Œ=œÝ;Æ"{ƒ7ÁcǗŸaÉzm‡ë}]ñÌØrMü¦7ll‚tRE RºÕˆAÎÀ1<uSЖv*@‡ð¿|,F#XXX)‡ìp¦”»Yï|=Óâö¯rq5BЪÓKC ބc?¯¡äBÝåŒYÆÛ~Ìҝ-û#YYYl#XXXt’™Hf""HH ¨ !Þ:"$ƒÑã£é9D_#XXXR* ™$˜i%(¦… YT†‘ BB$BU:§0xJl@…Oqٓ_£öö_NÚÞÛÝ{LÞX8Lk#Ì|i>Äf=úç¬T8‹ŽXkN'ŽÓ®ì”{Ôxd±*œ#û UÕ êZB¨u­LÂ1QöLUèRN G08b#YYY•ºŒ1©”J8v˧y±Ì’P®líݚòK¾ñ“ÍSא|'Påì§ì<`v˜ˆa …IËyi–+MÓaž#XXX"“”¼¹€¼Ô^å8Ô2' Gºg¶ï-0Cˆ„;E$^©óeôŸ–À|´E¬I£æðìА¬,¤T±ÚìÌ<¬"jC ˆØ¨Œ`Ø8#YYYLR2u@{‰GHÓÎh:Gð œ#XXXG_mÉèàE™…‹¥"‰&ԉ£)CP bXä‘Óðé ІɈqC¤¤œ5äDLw;#XXX÷Íh؉.'ö$IÝèWÿÛÚ|Oä!+þ,"¨ˆ0Gð¡v}\À:"hÑP!#$Œ´ÈU*òR~ò"ƒ¡žJ¯RTȺ〠B×ïì‘1TQQUUJKIîlLâ#XXXpPUÐÐ[ÌH,üßáayÊd¦ì1ZC½_§‘»¡|Žjø’hþ#HBb=ZíQ|ö¿€¹o›ýÔ±·ÆDh#XXX"&*ŠV¡¦€HU„…–!!… !%OóÐjQÜÚ\°;V;Òò!º‡r½ˆH:]/èêŽQþØi)‚gÈʂ±SO@ò£ã¬'âþ·Á9©Ö_æç¨ÇÛåp:Ÿ÷bt5ŸÛ~¨O‘H~°˜Õü˜8ßÏieø½É;gÖ±²ãC Óà'±,ÇÄ¢¤›˜žÀúoËFØ{¼³a‘~‘1Ü#wò±}€ÈÉs鿯C‡¿¬êón‰¶Ô\·ÐikF4®XE ÎT‚fˆ$” < ̋’‰Å¡¤Ñ¾´'.5P ?ÒJ">îHö€È±<ž‰‚{‡?èÇ©ôaa€71€ú;_m$#XXXd¨¹âá'¯äÃͨ0õBï;ÊOT€xÎcˆ–Çé€ü±Ê ZsE.||]^øˆø9„@Ó>^O“´ã¾«cðcŒáFÔE‰ZgCÅ´QX­8ɤ*A¨)hˆ¥ˆOãa]†—‹Ôsγ@x‡˜kèc–g§̃Ĉ~ùF8‡‘††‘¾ à‡¯… Â@4GCÌ;ÞÃøgäpañP¨Š‚ ìd0wš‰æñ8;z"á>st¦€£ïÉ)+ºýƒšp”¨#‚#XXX-c)ì¼ß®16ÔêÓùÈÿþhéîlCòÞÌ+Z$N桑t´bÄ`Ã+{:üªîEülñ<Oñà>ùíð¦²H½Þ%~¾x^²‘Œ[*6 ×åœyœRªqxü2CÇ8v (|ÙÛ…¤”ñ0@ëüíY I¡Ú¦&Td·$£…¼9±c‡Cçà˜Op3À}ÅɋÇÏFqu1áNsÍÃ#YYYÍÉ^|°rU@‡žîÃèøtßôÁ I(Ó MDËLER‰KŽ-ÁÜÐrCŽcߛ^²b!~=©ßŒÒIU' |¶4†Äíi= 14áÞ?àÄØ àòÿ³<† í”þ/oœƒÞ;*·ô{—$:`öFOùlIö?ÕíVA>˜SPètLp; À€“É(ž9{Ig1¦}R¡ï“²JW©(vM3Üaã#XXX>ôŸ½¶×Ì â¦*6Ùݔ<%Š&$=PàW@Ë»¸Åäø­ÈÕ'R@ú`Ãöãã´æáîØ?línì;Žýp?X(s17ð)ýQ _ÁŠd¢dÇòøiÚvd*#YYY³=g×úðáÃnö©2T=—@® EDÿæë¬›B*î ”<M,&^ðʧ9AV â % †T‘„ MVú9ð†úC„”Iˆ%‚ƒ¸ÿïÝùh~ïGÍà¢'Ê­<•hˆÒć&rl#YYYAŠÿwÜÐ0l yÛÛó=ø|³œ¾8)ç™á,Ö¿§ti`OîLNâ[$cÒ T1ñÅ7‡”çŒUF<µ†o[oÞ:ý,<´ÊhÍJ-¸Ç’·«´ŠäTIZ‘M1…£\dîCÑ©ʔËnZ¯PJš3ø$*R9x¤á&LS´R#YYY1Ûyß6/OGH#KŝC"©éõ\bß™šÞ5ÁNbó¶7bÌs7 Ì8[TtÄ¥çZ³Ül*î½ya[ʌ¢%©rBjbæy“‹Ù:ÖÛD’‰ —t筝®^1ç cRù98Ø0mM“1xMÁ@TETˆëÌÀœ$`“dΚíÙç—>¶GìGO*lñ³âÌ7æMŐàoÔt€ðâ'Àˆe%¡’ …¨P)ÈCÄN£.°ÄW¨Bp'©#XXXRªè"f±í='þrÿŸÿÀ†4ÈHC È> ?áÔ¤9 Áû@•OøýϹ#YYYƒckiL@R€PiŠ[e4”*hCBP4#XXX”ƒE4 #XXXЦ ªõ؟Öô"ú‚<…Þª`ŽF‚™¨…à`šáµ°f‘d3ë>¸tø%!óàg˜™ÛÙ³ö#YYY!ÿ‘w$S… #YYY9É` #<==
lgpl-2.1
dpac-vlsi/SynchroTrace
src/python/m5/SimObject.py
7
42060
# Copyright (c) 2012 ARM Limited # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Copyright (c) 2004-2006 The Regents of The University of Michigan # Copyright (c) 2010 Advanced Micro Devices, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Steve Reinhardt # Nathan Binkert # Andreas Hansson import sys from types import FunctionType, MethodType, ModuleType import m5 from m5.util import * # Have to import params up top since Param is referenced on initial # load (when SimObject class references Param to create a class # variable, the 'name' param)... from m5.params import * # There are a few things we need that aren't in params.__all__ since # normal users don't need them from m5.params import ParamDesc, VectorParamDesc, \ isNullPointer, SimObjectVector, Port from m5.proxy import * from m5.proxy import isproxy ##################################################################### # # M5 Python Configuration Utility # # The basic idea is to write simple Python programs that build Python # objects corresponding to M5 SimObjects for the desired simulation # configuration. For now, the Python emits a .ini file that can be # parsed by M5. In the future, some tighter integration between M5 # and the Python interpreter may allow bypassing the .ini file. # # Each SimObject class in M5 is represented by a Python class with the # same name. The Python inheritance tree mirrors the M5 C++ tree # (e.g., SimpleCPU derives from BaseCPU in both cases, and all # SimObjects inherit from a single SimObject base class). To specify # an instance of an M5 SimObject in a configuration, the user simply # instantiates the corresponding Python object. The parameters for # that SimObject are given by assigning to attributes of the Python # object, either using keyword assignment in the constructor or in # separate assignment statements. For example: # # cache = BaseCache(size='64KB') # cache.hit_latency = 3 # cache.assoc = 8 # # The magic lies in the mapping of the Python attributes for SimObject # classes to the actual SimObject parameter specifications. This # allows parameter validity checking in the Python code. Continuing # the example above, the statements "cache.blurfl=3" or # "cache.assoc='hello'" would both result in runtime errors in Python, # since the BaseCache object has no 'blurfl' parameter and the 'assoc' # parameter requires an integer, respectively. This magic is done # primarily by overriding the special __setattr__ method that controls # assignment to object attributes. # # Once a set of Python objects have been instantiated in a hierarchy, # calling 'instantiate(obj)' (where obj is the root of the hierarchy) # will generate a .ini file. # ##################################################################### # list of all SimObject classes allClasses = {} # dict to look up SimObjects based on path instanceDict = {} def public_value(key, value): return key.startswith('_') or \ isinstance(value, (FunctionType, MethodType, ModuleType, classmethod, type)) # The metaclass for SimObject. This class controls how new classes # that derive from SimObject are instantiated, and provides inherited # class behavior (just like a class controls how instances of that # class are instantiated, and provides inherited instance behavior). class MetaSimObject(type): # Attributes that can be set only at initialization time init_keywords = { 'abstract' : bool, 'cxx_class' : str, 'cxx_type' : str, 'type' : str } # Attributes that can be set any time keywords = { 'check' : FunctionType } # __new__ is called before __init__, and is where the statements # in the body of the class definition get loaded into the class's # __dict__. We intercept this to filter out parameter & port assignments # and only allow "private" attributes to be passed to the base # __new__ (starting with underscore). def __new__(mcls, name, bases, dict): assert name not in allClasses, "SimObject %s already present" % name # Copy "private" attributes, functions, and classes to the # official dict. Everything else goes in _init_dict to be # filtered in __init__. cls_dict = {} value_dict = {} for key,val in dict.items(): if public_value(key, val): cls_dict[key] = val else: # must be a param/port setting value_dict[key] = val if 'abstract' not in value_dict: value_dict['abstract'] = False cls_dict['_value_dict'] = value_dict cls = super(MetaSimObject, mcls).__new__(mcls, name, bases, cls_dict) if 'type' in value_dict: allClasses[name] = cls return cls # subclass initialization def __init__(cls, name, bases, dict): # calls type.__init__()... I think that's a no-op, but leave # it here just in case it's not. super(MetaSimObject, cls).__init__(name, bases, dict) # initialize required attributes # class-only attributes cls._params = multidict() # param descriptions cls._ports = multidict() # port descriptions # class or instance attributes cls._values = multidict() # param values cls._children = multidict() # SimObject children cls._port_refs = multidict() # port ref objects cls._instantiated = False # really instantiated, cloned, or subclassed # We don't support multiple inheritance. If you want to, you # must fix multidict to deal with it properly. if len(bases) > 1: raise TypeError, "SimObjects do not support multiple inheritance" base = bases[0] # Set up general inheritance via multidicts. A subclass will # inherit all its settings from the base class. The only time # the following is not true is when we define the SimObject # class itself (in which case the multidicts have no parent). if isinstance(base, MetaSimObject): cls._base = base cls._params.parent = base._params cls._ports.parent = base._ports cls._values.parent = base._values cls._children.parent = base._children cls._port_refs.parent = base._port_refs # mark base as having been subclassed base._instantiated = True else: cls._base = None # default keyword values if 'type' in cls._value_dict: if 'cxx_class' not in cls._value_dict: cls._value_dict['cxx_class'] = cls._value_dict['type'] cls._value_dict['cxx_type'] = '%s *' % cls._value_dict['cxx_class'] # Export methods are automatically inherited via C++, so we # don't want the method declarations to get inherited on the # python side (and thus end up getting repeated in the wrapped # versions of derived classes). The code below basicallly # suppresses inheritance by substituting in the base (null) # versions of these methods unless a different version is # explicitly supplied. for method_name in ('export_methods', 'export_method_cxx_predecls', 'export_method_swig_predecls'): if method_name not in cls.__dict__: base_method = getattr(MetaSimObject, method_name) m = MethodType(base_method, cls, MetaSimObject) setattr(cls, method_name, m) # Now process the _value_dict items. They could be defining # new (or overriding existing) parameters or ports, setting # class keywords (e.g., 'abstract'), or setting parameter # values or port bindings. The first 3 can only be set when # the class is defined, so we handle them here. The others # can be set later too, so just emulate that by calling # setattr(). for key,val in cls._value_dict.items(): # param descriptions if isinstance(val, ParamDesc): cls._new_param(key, val) # port objects elif isinstance(val, Port): cls._new_port(key, val) # init-time-only keywords elif cls.init_keywords.has_key(key): cls._set_keyword(key, val, cls.init_keywords[key]) # default: use normal path (ends up in __setattr__) else: setattr(cls, key, val) def _set_keyword(cls, keyword, val, kwtype): if not isinstance(val, kwtype): raise TypeError, 'keyword %s has bad type %s (expecting %s)' % \ (keyword, type(val), kwtype) if isinstance(val, FunctionType): val = classmethod(val) type.__setattr__(cls, keyword, val) def _new_param(cls, name, pdesc): # each param desc should be uniquely assigned to one variable assert(not hasattr(pdesc, 'name')) pdesc.name = name cls._params[name] = pdesc if hasattr(pdesc, 'default'): cls._set_param(name, pdesc.default, pdesc) def _set_param(cls, name, value, param): assert(param.name == name) try: value = param.convert(value) except Exception, e: msg = "%s\nError setting param %s.%s to %s\n" % \ (e, cls.__name__, name, value) e.args = (msg, ) raise cls._values[name] = value # if param value is a SimObject, make it a child too, so that # it gets cloned properly when the class is instantiated if isSimObjectOrVector(value) and not value.has_parent(): cls._add_cls_child(name, value) def _add_cls_child(cls, name, child): # It's a little funky to have a class as a parent, but these # objects should never be instantiated (only cloned, which # clears the parent pointer), and this makes it clear that the # object is not an orphan and can provide better error # messages. child.set_parent(cls, name) cls._children[name] = child def _new_port(cls, name, port): # each port should be uniquely assigned to one variable assert(not hasattr(port, 'name')) port.name = name cls._ports[name] = port # same as _get_port_ref, effectively, but for classes def _cls_get_port_ref(cls, attr): # Return reference that can be assigned to another port # via __setattr__. There is only ever one reference # object per port, but we create them lazily here. ref = cls._port_refs.get(attr) if not ref: ref = cls._ports[attr].makeRef(cls) cls._port_refs[attr] = ref return ref # Set attribute (called on foo.attr = value when foo is an # instance of class cls). def __setattr__(cls, attr, value): # normal processing for private attributes if public_value(attr, value): type.__setattr__(cls, attr, value) return if cls.keywords.has_key(attr): cls._set_keyword(attr, value, cls.keywords[attr]) return if cls._ports.has_key(attr): cls._cls_get_port_ref(attr).connect(value) return if isSimObjectOrSequence(value) and cls._instantiated: raise RuntimeError, \ "cannot set SimObject parameter '%s' after\n" \ " class %s has been instantiated or subclassed" \ % (attr, cls.__name__) # check for param param = cls._params.get(attr) if param: cls._set_param(attr, value, param) return if isSimObjectOrSequence(value): # If RHS is a SimObject, it's an implicit child assignment. cls._add_cls_child(attr, coerceSimObjectOrVector(value)) return # no valid assignment... raise exception raise AttributeError, \ "Class %s has no parameter \'%s\'" % (cls.__name__, attr) def __getattr__(cls, attr): if attr == 'cxx_class_path': return cls.cxx_class.split('::') if attr == 'cxx_class_name': return cls.cxx_class_path[-1] if attr == 'cxx_namespaces': return cls.cxx_class_path[:-1] if cls._values.has_key(attr): return cls._values[attr] if cls._children.has_key(attr): return cls._children[attr] raise AttributeError, \ "object '%s' has no attribute '%s'" % (cls.__name__, attr) def __str__(cls): return cls.__name__ # See ParamValue.cxx_predecls for description. def cxx_predecls(cls, code): code('#include "params/$cls.hh"') # See ParamValue.swig_predecls for description. def swig_predecls(cls, code): code('%import "python/m5/internal/param_$cls.i"') # Hook for exporting additional C++ methods to Python via SWIG. # Default is none, override using @classmethod in class definition. def export_methods(cls, code): pass # Generate the code needed as a prerequisite for the C++ methods # exported via export_methods() to be compiled in the _wrap.cc # file. Typically generates one or more #include statements. If # any methods are exported, typically at least the C++ header # declaring the relevant SimObject class must be included. def export_method_cxx_predecls(cls, code): pass # Generate the code needed as a prerequisite for the C++ methods # exported via export_methods() to be processed by SWIG. # Typically generates one or more %include or %import statements. # If any methods are exported, typically at least the C++ header # declaring the relevant SimObject class must be included. def export_method_swig_predecls(cls, code): pass # Generate the declaration for this object for wrapping with SWIG. # Generates code that goes into a SWIG .i file. Called from # src/SConscript. def swig_decl(cls, code): class_path = cls.cxx_class.split('::') classname = class_path[-1] namespaces = class_path[:-1] # The 'local' attribute restricts us to the params declared in # the object itself, not including inherited params (which # will also be inherited from the base class's param struct # here). params = cls._params.local.values() ports = cls._ports.local code('%module(package="m5.internal") param_$cls') code() code('%{') code('#include "params/$cls.hh"') for param in params: param.cxx_predecls(code) cls.export_method_cxx_predecls(code) code('''\ /** * This is a workaround for bug in swig. Prior to gcc 4.6.1 the STL * headers like vector, string, etc. used to automatically pull in * the cstddef header but starting with gcc 4.6.1 they no longer do. * This leads to swig generated a file that does not compile so we * explicitly include cstddef. Additionally, including version 2.0.4, * swig uses ptrdiff_t without the std:: namespace prefix which is * required with gcc 4.6.1. We explicitly provide access to it. */ #include <cstddef> using std::ptrdiff_t; ''') code('%}') code() for param in params: param.swig_predecls(code) cls.export_method_swig_predecls(code) code() if cls._base: code('%import "python/m5/internal/param_${{cls._base}}.i"') code() for ns in namespaces: code('namespace $ns {') if namespaces: code('// avoid name conflicts') sep_string = '_COLONS_' flat_name = sep_string.join(class_path) code('%rename($flat_name) $classname;') code() code('// stop swig from creating/wrapping default ctor/dtor') code('%nodefault $classname;') code('class $classname') if cls._base: code(' : public ${{cls._base.cxx_class}}') code('{') code(' public:') cls.export_methods(code) code('};') for ns in reversed(namespaces): code('} // namespace $ns') code() code('%include "params/$cls.hh"') # Generate the C++ declaration (.hh file) for this SimObject's # param struct. Called from src/SConscript. def cxx_param_decl(cls, code): # The 'local' attribute restricts us to the params declared in # the object itself, not including inherited params (which # will also be inherited from the base class's param struct # here). params = cls._params.local.values() ports = cls._ports.local try: ptypes = [p.ptype for p in params] except: print cls, p, p.ptype_str print params raise class_path = cls._value_dict['cxx_class'].split('::') code('''\ #ifndef __PARAMS__${cls}__ #define __PARAMS__${cls}__ ''') # A forward class declaration is sufficient since we are just # declaring a pointer. for ns in class_path[:-1]: code('namespace $ns {') code('class $0;', class_path[-1]) for ns in reversed(class_path[:-1]): code('} // namespace $ns') code() # The base SimObject has a couple of params that get # automatically set from Python without being declared through # the normal Param mechanism; we slip them in here (needed # predecls now, actual declarations below) if cls == SimObject: code(''' #ifndef PY_VERSION struct PyObject; #endif #include <string> class EventQueue; ''') for param in params: param.cxx_predecls(code) for port in ports.itervalues(): port.cxx_predecls(code) code() if cls._base: code('#include "params/${{cls._base.type}}.hh"') code() for ptype in ptypes: if issubclass(ptype, Enum): code('#include "enums/${{ptype.__name__}}.hh"') code() # now generate the actual param struct code("struct ${cls}Params") if cls._base: code(" : public ${{cls._base.type}}Params") code("{") if not hasattr(cls, 'abstract') or not cls.abstract: if 'type' in cls.__dict__: code(" ${{cls.cxx_type}} create();") code.indent() if cls == SimObject: code(''' SimObjectParams() { extern EventQueue mainEventQueue; eventq = &mainEventQueue; } virtual ~SimObjectParams() {} std::string name; PyObject *pyobj; EventQueue *eventq; ''') for param in params: param.cxx_decl(code) for port in ports.itervalues(): port.cxx_decl(code) code.dedent() code('};') code() code('#endif // __PARAMS__${cls}__') return code # The SimObject class is the root of the special hierarchy. Most of # the code in this class deals with the configuration hierarchy itself # (parent/child node relationships). class SimObject(object): # Specify metaclass. Any class inheriting from SimObject will # get this metaclass. __metaclass__ = MetaSimObject type = 'SimObject' abstract = True @classmethod def export_method_cxx_predecls(cls, code): code(''' #include <Python.h> #include "sim/serialize.hh" #include "sim/sim_object.hh" ''') @classmethod def export_method_swig_predecls(cls, code): code(''' %include <std_string.i> ''') @classmethod def export_methods(cls, code): code(''' enum State { Running, Draining, Drained }; void init(); void loadState(Checkpoint *cp); void initState(); void regStats(); void regFormulas(); void resetStats(); void startup(); unsigned int drain(Event *drain_event); void resume(); void switchOut(); void takeOverFrom(BaseCPU *cpu); ''') # Initialize new instance. For objects with SimObject-valued # children, we need to recursively clone the classes represented # by those param values as well in a consistent "deep copy"-style # fashion. That is, we want to make sure that each instance is # cloned only once, and that if there are multiple references to # the same original object, we end up with the corresponding # cloned references all pointing to the same cloned instance. def __init__(self, **kwargs): ancestor = kwargs.get('_ancestor') memo_dict = kwargs.get('_memo') if memo_dict is None: # prepare to memoize any recursively instantiated objects memo_dict = {} elif ancestor: # memoize me now to avoid problems with recursive calls memo_dict[ancestor] = self if not ancestor: ancestor = self.__class__ ancestor._instantiated = True # initialize required attributes self._parent = None self._name = None self._ccObject = None # pointer to C++ object self._ccParams = None self._instantiated = False # really "cloned" # Clone children specified at class level. No need for a # multidict here since we will be cloning everything. # Do children before parameter values so that children that # are also param values get cloned properly. self._children = {} for key,val in ancestor._children.iteritems(): self.add_child(key, val(_memo=memo_dict)) # Inherit parameter values from class using multidict so # individual value settings can be overridden but we still # inherit late changes to non-overridden class values. self._values = multidict(ancestor._values) # clone SimObject-valued parameters for key,val in ancestor._values.iteritems(): val = tryAsSimObjectOrVector(val) if val is not None: self._values[key] = val(_memo=memo_dict) # clone port references. no need to use a multidict here # since we will be creating new references for all ports. self._port_refs = {} for key,val in ancestor._port_refs.iteritems(): self._port_refs[key] = val.clone(self, memo_dict) # apply attribute assignments from keyword args, if any for key,val in kwargs.iteritems(): setattr(self, key, val) # "Clone" the current instance by creating another instance of # this instance's class, but that inherits its parameter values # and port mappings from the current instance. If we're in a # "deep copy" recursive clone, check the _memo dict to see if # we've already cloned this instance. def __call__(self, **kwargs): memo_dict = kwargs.get('_memo') if memo_dict is None: # no memo_dict: must be top-level clone operation. # this is only allowed at the root of a hierarchy if self._parent: raise RuntimeError, "attempt to clone object %s " \ "not at the root of a tree (parent = %s)" \ % (self, self._parent) # create a new dict and use that. memo_dict = {} kwargs['_memo'] = memo_dict elif memo_dict.has_key(self): # clone already done & memoized return memo_dict[self] return self.__class__(_ancestor = self, **kwargs) def _get_port_ref(self, attr): # Return reference that can be assigned to another port # via __setattr__. There is only ever one reference # object per port, but we create them lazily here. ref = self._port_refs.get(attr) if not ref: ref = self._ports[attr].makeRef(self) self._port_refs[attr] = ref return ref def __getattr__(self, attr): if self._ports.has_key(attr): return self._get_port_ref(attr) if self._values.has_key(attr): return self._values[attr] if self._children.has_key(attr): return self._children[attr] # If the attribute exists on the C++ object, transparently # forward the reference there. This is typically used for # SWIG-wrapped methods such as init(), regStats(), # regFormulas(), resetStats(), startup(), drain(), and # resume(). if self._ccObject and hasattr(self._ccObject, attr): return getattr(self._ccObject, attr) raise AttributeError, "object '%s' has no attribute '%s'" \ % (self.__class__.__name__, attr) # Set attribute (called on foo.attr = value when foo is an # instance of class cls). def __setattr__(self, attr, value): # normal processing for private attributes if attr.startswith('_'): object.__setattr__(self, attr, value) return if self._ports.has_key(attr): # set up port connection self._get_port_ref(attr).connect(value) return if isSimObjectOrSequence(value) and self._instantiated: raise RuntimeError, \ "cannot set SimObject parameter '%s' after\n" \ " instance been cloned %s" % (attr, `self`) param = self._params.get(attr) if param: try: value = param.convert(value) except Exception, e: msg = "%s\nError setting param %s.%s to %s\n" % \ (e, self.__class__.__name__, attr, value) e.args = (msg, ) raise self._values[attr] = value # implicitly parent unparented objects assigned as params if isSimObjectOrVector(value) and not value.has_parent(): self.add_child(attr, value) return # if RHS is a SimObject, it's an implicit child assignment if isSimObjectOrSequence(value): self.add_child(attr, value) return # no valid assignment... raise exception raise AttributeError, "Class %s has no parameter %s" \ % (self.__class__.__name__, attr) # this hack allows tacking a '[0]' onto parameters that may or may # not be vectors, and always getting the first element (e.g. cpus) def __getitem__(self, key): if key == 0: return self raise TypeError, "Non-zero index '%s' to SimObject" % key # Also implemented by SimObjectVector def clear_parent(self, old_parent): assert self._parent is old_parent self._parent = None # Also implemented by SimObjectVector def set_parent(self, parent, name): self._parent = parent self._name = name # Also implemented by SimObjectVector def get_name(self): return self._name # Also implemented by SimObjectVector def has_parent(self): return self._parent is not None # clear out child with given name. This code is not likely to be exercised. # See comment in add_child. def clear_child(self, name): child = self._children[name] child.clear_parent(self) del self._children[name] # Add a new child to this object. def add_child(self, name, child): child = coerceSimObjectOrVector(child) if child.has_parent(): print "warning: add_child('%s'): child '%s' already has parent" % \ (name, child.get_name()) if self._children.has_key(name): # This code path had an undiscovered bug that would make it fail # at runtime. It had been here for a long time and was only # exposed by a buggy script. Changes here will probably not be # exercised without specialized testing. self.clear_child(name) child.set_parent(self, name) self._children[name] = child # Take SimObject-valued parameters that haven't been explicitly # assigned as children and make them children of the object that # they were assigned to as a parameter value. This guarantees # that when we instantiate all the parameter objects we're still # inside the configuration hierarchy. def adoptOrphanParams(self): for key,val in self._values.iteritems(): if not isSimObjectVector(val) and isSimObjectSequence(val): # need to convert raw SimObject sequences to # SimObjectVector class so we can call has_parent() val = SimObjectVector(val) self._values[key] = val if isSimObjectOrVector(val) and not val.has_parent(): print "warning: %s adopting orphan SimObject param '%s'" \ % (self, key) self.add_child(key, val) def path(self): if not self._parent: return '<orphan %s>' % self.__class__ ppath = self._parent.path() if ppath == 'root': return self._name return ppath + "." + self._name def __str__(self): return self.path() def ini_str(self): return self.path() def find_any(self, ptype): if isinstance(self, ptype): return self, True found_obj = None for child in self._children.itervalues(): if isinstance(child, ptype): if found_obj != None and child != found_obj: raise AttributeError, \ 'parent.any matched more than one: %s %s' % \ (found_obj.path, child.path) found_obj = child # search param space for pname,pdesc in self._params.iteritems(): if issubclass(pdesc.ptype, ptype): match_obj = self._values[pname] if found_obj != None and found_obj != match_obj: raise AttributeError, \ 'parent.any matched more than one: %s and %s' % (found_obj.path, match_obj.path) found_obj = match_obj return found_obj, found_obj != None def find_all(self, ptype): all = {} # search children for child in self._children.itervalues(): if isinstance(child, ptype) and not isproxy(child) and \ not isNullPointer(child): all[child] = True if isSimObject(child): # also add results from the child itself child_all, done = child.find_all(ptype) all.update(dict(zip(child_all, [done] * len(child_all)))) # search param space for pname,pdesc in self._params.iteritems(): if issubclass(pdesc.ptype, ptype): match_obj = self._values[pname] if not isproxy(match_obj) and not isNullPointer(match_obj): all[match_obj] = True return all.keys(), True def unproxy(self, base): return self def unproxyParams(self): for param in self._params.iterkeys(): value = self._values.get(param) if value != None and isproxy(value): try: value = value.unproxy(self) except: print "Error in unproxying param '%s' of %s" % \ (param, self.path()) raise setattr(self, param, value) # Unproxy ports in sorted order so that 'append' operations on # vector ports are done in a deterministic fashion. port_names = self._ports.keys() port_names.sort() for port_name in port_names: port = self._port_refs.get(port_name) if port != None: port.unproxy(self) def print_ini(self, ini_file): print >>ini_file, '[' + self.path() + ']' # .ini section header instanceDict[self.path()] = self if hasattr(self, 'type'): print >>ini_file, 'type=%s' % self.type if len(self._children.keys()): print >>ini_file, 'children=%s' % \ ' '.join(self._children[n].get_name() \ for n in sorted(self._children.keys())) for param in sorted(self._params.keys()): value = self._values.get(param) if value != None: print >>ini_file, '%s=%s' % (param, self._values[param].ini_str()) for port_name in sorted(self._ports.keys()): port = self._port_refs.get(port_name, None) if port != None: print >>ini_file, '%s=%s' % (port_name, port.ini_str()) print >>ini_file # blank line between objects # generate a tree of dictionaries expressing all the parameters in the # instantiated system for use by scripts that want to do power, thermal # visualization, and other similar tasks def get_config_as_dict(self): d = attrdict() if hasattr(self, 'type'): d.type = self.type if hasattr(self, 'cxx_class'): d.cxx_class = self.cxx_class # Add the name and path of this object to be able to link to # the stats d.name = self.get_name() d.path = self.path() for param in sorted(self._params.keys()): value = self._values.get(param) if value != None: try: # Use native type for those supported by JSON and # strings for everything else. skipkeys=True seems # to not work as well as one would hope if type(self._values[param].value) in \ [str, unicode, int, long, float, bool, None]: d[param] = self._values[param].value else: d[param] = str(self._values[param]) except AttributeError: pass for n in sorted(self._children.keys()): child = self._children[n] # Use the name of the attribute (and not get_name()) as # the key in the JSON dictionary to capture the hierarchy # in the Python code that assembled this system d[n] = child.get_config_as_dict() for port_name in sorted(self._ports.keys()): port = self._port_refs.get(port_name, None) if port != None: # Represent each port with a dictionary containing the # prominent attributes d[port_name] = port.get_config_as_dict() return d def getCCParams(self): if self._ccParams: return self._ccParams cc_params_struct = getattr(m5.internal.params, '%sParams' % self.type) cc_params = cc_params_struct() cc_params.pyobj = self cc_params.name = str(self) param_names = self._params.keys() param_names.sort() for param in param_names: value = self._values.get(param) if value is None: fatal("%s.%s without default or user set value", self.path(), param) value = value.getValue() if isinstance(self._params[param], VectorParamDesc): assert isinstance(value, list) vec = getattr(cc_params, param) assert not len(vec) for v in value: vec.append(v) else: setattr(cc_params, param, value) port_names = self._ports.keys() port_names.sort() for port_name in port_names: port = self._port_refs.get(port_name, None) if port != None: port_count = len(port) else: port_count = 0 setattr(cc_params, 'port_' + port_name + '_connection_count', port_count) self._ccParams = cc_params return self._ccParams # Get C++ object corresponding to this object, calling C++ if # necessary to construct it. Does *not* recursively create # children. def getCCObject(self): if not self._ccObject: # Make sure this object is in the configuration hierarchy if not self._parent and not isRoot(self): raise RuntimeError, "Attempt to instantiate orphan node" # Cycles in the configuration hierarchy are not supported. This # will catch the resulting recursion and stop. self._ccObject = -1 params = self.getCCParams() self._ccObject = params.create() elif self._ccObject == -1: raise RuntimeError, "%s: Cycle found in configuration hierarchy." \ % self.path() return self._ccObject def descendants(self): yield self for child in self._children.itervalues(): for obj in child.descendants(): yield obj # Call C++ to create C++ object corresponding to this object def createCCObject(self): self.getCCParams() self.getCCObject() # force creation def getValue(self): return self.getCCObject() # Create C++ port connections corresponding to the connections in # _port_refs def connectPorts(self): for portRef in self._port_refs.itervalues(): portRef.ccConnect() def getMemoryMode(self): if not isinstance(self, m5.objects.System): return None return self._ccObject.getMemoryMode() def changeTiming(self, mode): if isinstance(self, m5.objects.System): # i don't know if there's a better way to do this - calling # setMemoryMode directly from self._ccObject results in calling # SimObject::setMemoryMode, not the System::setMemoryMode self._ccObject.setMemoryMode(mode) def takeOverFrom(self, old_cpu): self._ccObject.takeOverFrom(old_cpu._ccObject) # Function to provide to C++ so it can look up instances based on paths def resolveSimObject(name): obj = instanceDict[name] return obj.getCCObject() def isSimObject(value): return isinstance(value, SimObject) def isSimObjectClass(value): return issubclass(value, SimObject) def isSimObjectVector(value): return isinstance(value, SimObjectVector) def isSimObjectSequence(value): if not isinstance(value, (list, tuple)) or len(value) == 0: return False for val in value: if not isNullPointer(val) and not isSimObject(val): return False return True def isSimObjectOrSequence(value): return isSimObject(value) or isSimObjectSequence(value) def isRoot(obj): from m5.objects import Root return obj and obj is Root.getInstance() def isSimObjectOrVector(value): return isSimObject(value) or isSimObjectVector(value) def tryAsSimObjectOrVector(value): if isSimObjectOrVector(value): return value if isSimObjectSequence(value): return SimObjectVector(value) return None def coerceSimObjectOrVector(value): value = tryAsSimObjectOrVector(value) if value is None: raise TypeError, "SimObject or SimObjectVector expected" return value baseClasses = allClasses.copy() baseInstances = instanceDict.copy() def clear(): global allClasses, instanceDict allClasses = baseClasses.copy() instanceDict = baseInstances.copy() # __all__ defines the list of symbols that get exported when # 'from config import *' is invoked. Try to keep this reasonably # short to avoid polluting other namespaces. __all__ = [ 'SimObject' ]
bsd-3-clause
sgzsh269/django
tests/auth_tests/test_auth_backends.py
8
27450
from __future__ import unicode_literals from datetime import date from django.contrib.auth import ( BACKEND_SESSION_KEY, SESSION_KEY, authenticate, get_user, signals, ) from django.contrib.auth.backends import ModelBackend from django.contrib.auth.hashers import MD5PasswordHasher from django.contrib.auth.models import AnonymousUser, Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured, PermissionDenied from django.http import HttpRequest from django.test import ( SimpleTestCase, TestCase, mock, modify_settings, override_settings, ) from .models import ( CustomPermissionsUser, CustomUser, CustomUserWithoutIsActiveField, ExtensionUser, UUIDUser, ) class CountingMD5PasswordHasher(MD5PasswordHasher): """Hasher that counts how many times it computes a hash.""" calls = 0 def encode(self, *args, **kwargs): type(self).calls += 1 return super(CountingMD5PasswordHasher, self).encode(*args, **kwargs) class BaseModelBackendTest(object): """ A base class for tests that need to validate the ModelBackend with different User models. Subclasses should define a class level UserModel attribute, and a create_users() method to construct two users for test purposes. """ backend = 'django.contrib.auth.backends.ModelBackend' def setUp(self): self.patched_settings = modify_settings( AUTHENTICATION_BACKENDS={'append': self.backend}, ) self.patched_settings.enable() self.create_users() def tearDown(self): self.patched_settings.disable() # The custom_perms test messes with ContentTypes, which will # be cached; flush the cache to ensure there are no side effects # Refs #14975, #14925 ContentType.objects.clear_cache() def test_has_perm(self): user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertIs(user.has_perm('auth.test'), False) user.is_staff = True user.save() self.assertIs(user.has_perm('auth.test'), False) user.is_superuser = True user.save() self.assertIs(user.has_perm('auth.test'), True) user.is_staff = True user.is_superuser = True user.is_active = False user.save() self.assertIs(user.has_perm('auth.test'), False) def test_custom_perms(self): user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) # reloading user to purge the _perm_cache user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions(), {'auth.test'}) self.assertEqual(user.get_group_permissions(), set()) self.assertIs(user.has_module_perms('Group'), False) self.assertIs(user.has_module_perms('auth'), True) perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2') user.user_permissions.add(perm) perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3') user.user_permissions.add(perm) user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions(), {'auth.test2', 'auth.test', 'auth.test3'}) self.assertIs(user.has_perm('test'), False) self.assertIs(user.has_perm('auth.test'), True) self.assertIs(user.has_perms(['auth.test2', 'auth.test3']), True) perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group') group = Group.objects.create(name='test_group') group.permissions.add(perm) user.groups.add(group) user = self.UserModel._default_manager.get(pk=self.user.pk) exp = {'auth.test2', 'auth.test', 'auth.test3', 'auth.test_group'} self.assertEqual(user.get_all_permissions(), exp) self.assertEqual(user.get_group_permissions(), {'auth.test_group'}) self.assertIs(user.has_perms(['auth.test3', 'auth.test_group']), True) user = AnonymousUser() self.assertIs(user.has_perm('test'), False) self.assertIs(user.has_perms(['auth.test2', 'auth.test3']), False) def test_has_no_object_perm(self): """Regressiontest for #12462""" user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) self.assertIs(user.has_perm('auth.test', 'object'), False) self.assertEqual(user.get_all_permissions('object'), set()) self.assertIs(user.has_perm('auth.test'), True) self.assertEqual(user.get_all_permissions(), {'auth.test'}) def test_anonymous_has_no_permissions(self): """ #17903 -- Anonymous users shouldn't have permissions in ModelBackend.get_(all|user|group)_permissions(). """ backend = ModelBackend() user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user') group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group') user.user_permissions.add(user_perm) group = Group.objects.create(name='test_group') user.groups.add(group) group.permissions.add(group_perm) self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'}) self.assertEqual(backend.get_user_permissions(user), {'auth.test_user', 'auth.test_group'}) self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'}) with mock.patch.object(self.UserModel, 'is_anonymous', True): self.assertEqual(backend.get_all_permissions(user), set()) self.assertEqual(backend.get_user_permissions(user), set()) self.assertEqual(backend.get_group_permissions(user), set()) def test_inactive_has_no_permissions(self): """ #17903 -- Inactive users shouldn't have permissions in ModelBackend.get_(all|user|group)_permissions(). """ backend = ModelBackend() user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user') group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group') user.user_permissions.add(user_perm) group = Group.objects.create(name='test_group') user.groups.add(group) group.permissions.add(group_perm) self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'}) self.assertEqual(backend.get_user_permissions(user), {'auth.test_user', 'auth.test_group'}) self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'}) user.is_active = False user.save() self.assertEqual(backend.get_all_permissions(user), set()) self.assertEqual(backend.get_user_permissions(user), set()) self.assertEqual(backend.get_group_permissions(user), set()) def test_get_all_superuser_permissions(self): """A superuser has all permissions. Refs #14795.""" user = self.UserModel._default_manager.get(pk=self.superuser.pk) self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all())) @override_settings(PASSWORD_HASHERS=['auth_tests.test_auth_backends.CountingMD5PasswordHasher']) def test_authentication_timing(self): """Hasher is run once regardless of whether the user exists. Refs #20760.""" # Re-set the password, because this tests overrides PASSWORD_HASHERS self.user.set_password('test') self.user.save() CountingMD5PasswordHasher.calls = 0 username = getattr(self.user, self.UserModel.USERNAME_FIELD) authenticate(username=username, password='test') self.assertEqual(CountingMD5PasswordHasher.calls, 1) CountingMD5PasswordHasher.calls = 0 authenticate(username='no_such_user', password='test') self.assertEqual(CountingMD5PasswordHasher.calls, 1) class ModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the default User model. """ UserModel = User user_credentials = {'username': 'test', 'password': 'test'} def create_users(self): self.user = User.objects.create_user(email='test@example.com', **self.user_credentials) self.superuser = User.objects.create_superuser( username='test2', email='test2@example.com', password='test', ) def test_authenticate_inactive(self): """ An inactive user can't authenticate. """ self.assertEqual(authenticate(**self.user_credentials), self.user) self.user.is_active = False self.user.save() self.assertIsNone(authenticate(**self.user_credentials)) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithoutIsActiveField') def test_authenticate_user_without_is_active_field(self): """ A custom user without an `is_active` field is allowed to authenticate. """ user = CustomUserWithoutIsActiveField.objects._create_user( username='test', email='test@example.com', password='test', ) self.assertEqual(authenticate(username='test', password='test'), user) @override_settings(AUTH_USER_MODEL='auth_tests.ExtensionUser') class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the custom ExtensionUser model. This isn't a perfect test, because both the User and ExtensionUser are synchronized to the database, which wouldn't ordinary happen in production. As a result, it doesn't catch errors caused by the non- existence of the User table. The specific problem is queries on .filter(groups__user) et al, which makes an implicit assumption that the user model is called 'User'. In production, the auth.User table won't exist, so the requested join won't exist either; in testing, the auth.User *does* exist, and so does the join. However, the join table won't contain any useful data; for testing, we check that the data we expect actually does exist. """ UserModel = ExtensionUser def create_users(self): self.user = ExtensionUser._default_manager.create_user( username='test', email='test@example.com', password='test', date_of_birth=date(2006, 4, 25) ) self.superuser = ExtensionUser._default_manager.create_superuser( username='test2', email='test2@example.com', password='test', date_of_birth=date(1976, 11, 8) ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomPermissionsUser') class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the CustomPermissionsUser model. As with the ExtensionUser test, this isn't a perfect test, because both the User and CustomPermissionsUser are synchronized to the database, which wouldn't ordinary happen in production. """ UserModel = CustomPermissionsUser def create_users(self): self.user = CustomPermissionsUser._default_manager.create_user( email='test@example.com', password='test', date_of_birth=date(2006, 4, 25) ) self.superuser = CustomPermissionsUser._default_manager.create_superuser( email='test2@example.com', password='test', date_of_birth=date(1976, 11, 8) ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') class CustomUserModelBackendAuthenticateTest(TestCase): """ Tests that the model backend can accept a credentials kwarg labeled with custom user model's USERNAME_FIELD. """ def test_authenticate(self): test_user = CustomUser._default_manager.create_user( email='test@example.com', password='test', date_of_birth=date(2006, 4, 25) ) authenticated_user = authenticate(email='test@example.com', password='test') self.assertEqual(test_user, authenticated_user) @override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser') class UUIDUserTests(TestCase): def test_login(self): """ A custom user with a UUID primary key should be able to login. """ user = UUIDUser.objects.create_user(username='uuid', password='test') self.assertTrue(self.client.login(username='uuid', password='test')) self.assertEqual(UUIDUser.objects.get(pk=self.client.session[SESSION_KEY]), user) class TestObj(object): pass class SimpleRowlevelBackend(object): def has_perm(self, user, perm, obj=None): if not obj: return # We only support row level perms if isinstance(obj, TestObj): if user.username == 'test2': return True elif user.is_anonymous and perm == 'anon': return True elif not user.is_active and perm == 'inactive': return True return False def has_module_perms(self, user, app_label): if not user.is_anonymous and not user.is_active: return False return app_label == "app1" def get_all_permissions(self, user, obj=None): if not obj: return [] # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if user.is_anonymous: return ['anon'] if user.username == 'test2': return ['simple', 'advanced'] else: return ['simple'] def get_group_permissions(self, user, obj=None): if not obj: return # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if 'test_group' in [group.name for group in user.groups.all()]: return ['group_perm'] else: return ['none'] @modify_settings(AUTHENTICATION_BACKENDS={ 'append': 'auth_tests.test_auth_backends.SimpleRowlevelBackend', }) class RowlevelBackendTest(TestCase): """ Tests for auth backend that supports object level permissions """ def setUp(self): self.user1 = User.objects.create_user('test', 'test@example.com', 'test') self.user2 = User.objects.create_user('test2', 'test2@example.com', 'test') self.user3 = User.objects.create_user('test3', 'test3@example.com', 'test') def tearDown(self): # The get_group_permissions test messes with ContentTypes, which will # be cached; flush the cache to ensure there are no side effects # Refs #14975, #14925 ContentType.objects.clear_cache() def test_has_perm(self): self.assertIs(self.user1.has_perm('perm', TestObj()), False) self.assertIs(self.user2.has_perm('perm', TestObj()), True) self.assertIs(self.user2.has_perm('perm'), False) self.assertIs(self.user2.has_perms(['simple', 'advanced'], TestObj()), True) self.assertIs(self.user3.has_perm('perm', TestObj()), False) self.assertIs(self.user3.has_perm('anon', TestObj()), False) self.assertIs(self.user3.has_perms(['simple', 'advanced'], TestObj()), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), {'simple'}) self.assertEqual(self.user2.get_all_permissions(TestObj()), {'simple', 'advanced'}) self.assertEqual(self.user2.get_all_permissions(), set()) def test_get_group_permissions(self): group = Group.objects.create(name='test_group') self.user3.groups.add(group) self.assertEqual(self.user3.get_group_permissions(TestObj()), {'group_perm'}) @override_settings( AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'], ) class AnonymousUserBackendTest(SimpleTestCase): """ Tests for AnonymousUser delegating to backend. """ def setUp(self): self.user1 = AnonymousUser() def test_has_perm(self): self.assertIs(self.user1.has_perm('perm', TestObj()), False) self.assertIs(self.user1.has_perm('anon', TestObj()), True) def test_has_perms(self): self.assertIs(self.user1.has_perms(['anon'], TestObj()), True) self.assertIs(self.user1.has_perms(['anon', 'perm'], TestObj()), False) def test_has_module_perms(self): self.assertIs(self.user1.has_module_perms("app1"), True) self.assertIs(self.user1.has_module_perms("app2"), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), {'anon'}) @override_settings(AUTHENTICATION_BACKENDS=[]) class NoBackendsTest(TestCase): """ Tests that an appropriate error is raised if no auth backends are provided. """ def setUp(self): self.user = User.objects.create_user('test', 'test@example.com', 'test') def test_raises_exception(self): with self.assertRaises(ImproperlyConfigured): self.user.has_perm(('perm', TestObj())) @override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend']) class InActiveUserBackendTest(TestCase): """ Tests for an inactive user """ def setUp(self): self.user1 = User.objects.create_user('test', 'test@example.com', 'test') self.user1.is_active = False self.user1.save() def test_has_perm(self): self.assertIs(self.user1.has_perm('perm', TestObj()), False) self.assertIs(self.user1.has_perm('inactive', TestObj()), True) def test_has_module_perms(self): self.assertIs(self.user1.has_module_perms("app1"), False) self.assertIs(self.user1.has_module_perms("app2"), False) class PermissionDeniedBackend(object): """ Always raises PermissionDenied in `authenticate`, `has_perm` and `has_module_perms`. """ def authenticate(self, username=None, password=None): raise PermissionDenied def has_perm(self, user_obj, perm, obj=None): raise PermissionDenied def has_module_perms(self, user_obj, app_label): raise PermissionDenied class PermissionDeniedBackendTest(TestCase): """ Tests that other backends are not checked once a backend raises PermissionDenied """ backend = 'auth_tests.test_auth_backends.PermissionDeniedBackend' def setUp(self): self.user1 = User.objects.create_user('test', 'test@example.com', 'test') self.user_login_failed = [] signals.user_login_failed.connect(self.user_login_failed_listener) def tearDown(self): signals.user_login_failed.disconnect(self.user_login_failed_listener) def user_login_failed_listener(self, sender, credentials, **kwargs): self.user_login_failed.append(credentials) @modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend}) def test_permission_denied(self): "user is not authenticated after a backend raises permission denied #2550" self.assertIsNone(authenticate(username='test', password='test')) # user_login_failed signal is sent. self.assertEqual(self.user_login_failed, [{'password': '********************', 'username': 'test'}]) @modify_settings(AUTHENTICATION_BACKENDS={'append': backend}) def test_authenticates(self): self.assertEqual(authenticate(username='test', password='test'), self.user1) @modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend}) def test_has_perm_denied(self): content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') self.user1.user_permissions.add(perm) self.assertIs(self.user1.has_perm('auth.test'), False) self.assertIs(self.user1.has_module_perms('auth'), False) @modify_settings(AUTHENTICATION_BACKENDS={'append': backend}) def test_has_perm(self): content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') self.user1.user_permissions.add(perm) self.assertIs(self.user1.has_perm('auth.test'), True) self.assertIs(self.user1.has_module_perms('auth'), True) class NewModelBackend(ModelBackend): pass class ChangedBackendSettingsTest(TestCase): """ Tests for changes in the settings.AUTHENTICATION_BACKENDS """ backend = 'auth_tests.test_auth_backends.NewModelBackend' TEST_USERNAME = 'test_user' TEST_PASSWORD = 'test_password' TEST_EMAIL = 'test@example.com' def setUp(self): User.objects.create_user(self.TEST_USERNAME, self.TEST_EMAIL, self.TEST_PASSWORD) @override_settings(AUTHENTICATION_BACKENDS=[backend]) def test_changed_backend_settings(self): """ Tests that removing a backend configured in AUTHENTICATION_BACKENDS make already logged-in users disconnect. """ # Get a session for the test user self.assertTrue(self.client.login( username=self.TEST_USERNAME, password=self.TEST_PASSWORD) ) # Prepare a request object request = HttpRequest() request.session = self.client.session # Remove NewModelBackend with self.settings(AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend']): # Get the user from the request user = get_user(request) # Assert that the user retrieval is successful and the user is # anonymous as the backend is not longer available. self.assertIsNotNone(user) self.assertTrue(user.is_anonymous) class TypeErrorBackend(object): """ Always raises TypeError. """ def authenticate(self, username=None, password=None): raise TypeError class TypeErrorBackendTest(TestCase): """ Tests that a TypeError within a backend is propagated properly. Regression test for ticket #18171 """ backend = 'auth_tests.test_auth_backends.TypeErrorBackend' def setUp(self): self.user1 = User.objects.create_user('test', 'test@example.com', 'test') @override_settings(AUTHENTICATION_BACKENDS=[backend]) def test_type_error_raised(self): with self.assertRaises(TypeError): authenticate(username='test', password='test') class ImproperlyConfiguredUserModelTest(TestCase): """ Tests that an exception from within get_user_model is propagated and doesn't raise an UnboundLocalError. Regression test for ticket #21439 """ def setUp(self): self.user1 = User.objects.create_user('test', 'test@example.com', 'test') self.client.login( username='test', password='test' ) @override_settings(AUTH_USER_MODEL='thismodel.doesntexist') def test_does_not_shadow_exception(self): # Prepare a request object request = HttpRequest() request.session = self.client.session with self.assertRaises(ImproperlyConfigured): get_user(request) class ImportedModelBackend(ModelBackend): pass class CustomModelBackend(ModelBackend): pass class OtherModelBackend(ModelBackend): pass class ImportedBackendTests(TestCase): """ #23925 - The backend path added to the session should be the same as the one defined in AUTHENTICATION_BACKENDS setting. """ backend = 'auth_tests.backend_alias.ImportedModelBackend' @override_settings(AUTHENTICATION_BACKENDS=[backend]) def test_backend_path(self): username = 'username' password = 'password' User.objects.create_user(username, 'email', password) self.assertTrue(self.client.login(username=username, password=password)) request = HttpRequest() request.session = self.client.session self.assertEqual(request.session[BACKEND_SESSION_KEY], self.backend) class SelectingBackendTests(TestCase): backend = 'auth_tests.test_auth_backends.CustomModelBackend' other_backend = 'auth_tests.test_auth_backends.OtherModelBackend' username = 'username' password = 'password' def assertBackendInSession(self, backend): request = HttpRequest() request.session = self.client.session self.assertEqual(request.session[BACKEND_SESSION_KEY], backend) @override_settings(AUTHENTICATION_BACKENDS=[backend]) def test_backend_path_login_without_authenticate_single_backend(self): user = User.objects.create_user(self.username, 'email', self.password) self.client._login(user) self.assertBackendInSession(self.backend) @override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend]) def test_backend_path_login_without_authenticate_multiple_backends(self): user = User.objects.create_user(self.username, 'email', self.password) expected_message = ( 'You have multiple authentication backends configured and ' 'therefore must provide the `backend` argument or set the ' '`backend` attribute on the user.' ) with self.assertRaisesMessage(ValueError, expected_message): self.client._login(user) @override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend]) def test_backend_path_login_with_explicit_backends(self): user = User.objects.create_user(self.username, 'email', self.password) self.client._login(user, self.other_backend) self.assertBackendInSession(self.other_backend) @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']) class AllowAllUsersModelBackendTest(TestCase): """ Inactive users may authenticate with the AllowAllUsersModelBackend. """ user_credentials = {'username': 'test', 'password': 'test'} @classmethod def setUpTestData(cls): cls.user = User.objects.create_user( email='test@example.com', is_active=False, **cls.user_credentials ) def test_authenticate(self): self.assertFalse(self.user.is_active) self.assertEqual(authenticate(**self.user_credentials), self.user) def test_get_user(self): self.client.force_login(self.user) request = HttpRequest() request.session = self.client.session user = get_user(request) self.assertEqual(user, self.user)
bsd-3-clause
abhishek-ch/hue
desktop/core/ext-py/lxml/src/lxml/html/_diffcommand.py
36
2082
import optparse import sys import re import os from lxml.html.diff import htmldiff description = """\ """ parser = optparse.OptionParser( usage="%prog [OPTIONS] FILE1 FILE2\n" "%prog --annotate [OPTIONS] INFO1 FILE1 INFO2 FILE2 ...", description=description, ) parser.add_option( '-o', '--output', metavar="FILE", dest="output", default="-", help="File to write the difference to", ) parser.add_option( '-a', '--annotation', action="store_true", dest="annotation", help="Do an annotation") def main(args=None): if args is None: args = sys.argv[1:] options, args = parser.parse_args(args) if options.annotation: return annotate(options, args) if len(args) != 2: print 'Error: you must give two files' parser.print_help() sys.exit(1) file1, file2 = args input1 = read_file(file1) input2 = read_file(file2) body1 = split_body(input1)[1] pre, body2, post = split_body(input2) result = htmldiff(body1, body2) result = pre + result + post if options.output == '-': if not result.endswith('\n'): result += '\n' sys.stdout.write(result) else: f = open(options.output, 'wb') f.write(result) f.close() def read_file(filename): if filename == '-': c = sys.stdin.read() elif not os.path.exists(filename): raise OSError( "Input file %s does not exist" % filename) else: f = open(filename, 'rb') c = f.read() f.close() return c body_start_re = re.compile( r"<body.*?>", re.I|re.S) body_end_re = re.compile( r"</body.*?>", re.I|re.S) def split_body(html): match = body_start_re.search(html) if match: pre = html[:match.end()] html = html[match.end():] match = body_end_re.search(html) if match: post = html[match.start():] html = html[:match.start()] return pre, html, post def annotate(options, args): print "Not yet implemented" sys.exit(1)
apache-2.0
yushiro/svg-edit
extras/tojson.py
48
1598
import sys, json, codecs infile = codecs.open(sys.argv[1], "r", "utf-8") outfile = codecs.open(sys.argv[1][:-3], "w", "utf-8") indata = infile.readlines() look = False out = "[\n" js = [] jss = "" def readfrompos(pos): global out global js if (indata[pos].startswith("#, -x-svg-edit-title")) or (indata[pos].startswith("#, -x-svg-edit-textContent")): out += '{' out += '"id": ' out += " ".join(indata[pos+1].split()[1:]) + ", " out += '"' + line[15:].strip() + '": ' out += " ".join(indata[pos+2].split()[1:]) out += '}' elif (indata[pos].startswith("#, -x-svg-edit-both")): out += '{' out += '"id": ' out += " ".join(indata[pos+1].split()[1:]) + ", " out += '"textContent": ' out += '"' + " ".join(indata[pos+2].split()[1:]).split('|')[1] + ', ' out += '"title": ' out += " ".join(indata[pos+2].split()[1:]).split('|')[0] + '"' out += '}' elif (indata[pos].startswith("#, -x-svg-edit-js_strings")): js.append((" ".join(indata[pos+1].split()[1:]), " ".join(indata[pos+2].split()[1:]))) for pos, line in enumerate(indata): if (not look) and (line.startswith('# ---')): look = True marker = pos elif (look) and (line.startswith('#, -x-svg-edit')): readfrompos(pos) js.sort() for j in js: jss += " %s: %s,\n" % (j[0], j[1]) out += '{\n "js_strings": {\n' out += str(jss) out += ' "": ""\n }' out += "\n}" out += "\n]" out = out.replace('}{', '},\n{') outfile.write(out)
mit
Emergen/zivios-agent
modules/ntp.py
1
2447
""" * Copyright (c) 2008 Zivios, LLC. * * This file is part of Zivios. * * Zivios is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Zivios is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Zivios. If not, see <http://www.gnu.org/licenses/>. * * @package ZiviosAgent * @copyright Copyright (c) 2008 Zivios, LLC. (http://www.zivios.org) * @license http://www.zivios.org/legal/license * @version $Id: Exception.php 908 2008-08-25 11:03:00Z fkhan $ * @subpackage Core """ from twisted.web import xmlrpc import logging import os import popen2 import re import string import time import datetime from twisted.python import log import ZiviosAgent class ntp(ZiviosAgent.ZiviosAgent): def xmlrpc_addService(self): print 'ntp addService function called' def xmlrpc_serviceStatus(self): response,regexcode,exitcode = self.command("statusntpcommand") return (exitcode==0) def xmlrpc_stopService(self): response,regexcode,exitcode = self.command("stopntpcommand") return (exitcode==0) def xmlrpc_startService(self): response,regexcode,exitcode = self.command("startntpcommand") return (exitcode==0) def xmlrpc_currentTime(self): now = datetime.datetime.now() return now.ctime() def xmlrpc_getTimezone(self): tz,tzm = time.tzname return tzm; def xmlrpc_getsyncstatus(self): #sanitizing output! response,regexcode,exitcode = self.command("ntpq"); resp = response.split('\n') if (len(resp) <= 2): return -1 del resp[0:2] length = len(resp) del resp[length-1] retarray = [] for a in resp: a = a.lstrip() a = a.rstrip() joinarray = re.split('\s+',a) retarray.append(joinarray) return retarray def xmlrpc_getGmtOffset(self): return time.timezone/3600;
gpl-3.0
ancafarcas/superdesk-core
superdesk/datalayer.py
1
4241
# -*- coding: utf-8; -*- # # This file is part of Superdesk. # # Copyright 2013, 2014 Sourcefabric z.u. and contributors. # # For the full copyright and license information, please see the # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license import superdesk from eve.io.base import DataLayer from eve.io.mongo import Mongo from eve.utils import config, ParsedRequest from eve_elastic import Elastic, InvalidSearchString # noqa from flask import current_app from superdesk.lock import lock, unlock from superdesk.json_utils import SuperdeskJSONEncoder class SuperdeskDataLayer(DataLayer): """Superdesk Data Layer. Implements eve data layer interface, is used to make eve work with superdesk service layer. It handles app initialization and later it forwards eve calls to respective service. """ serializers = {} serializers.update(Mongo.serializers) serializers.update({'datetime': Elastic.serializers['datetime']}) def init_app(self, app): app.data = self # app.data must be set for locks to work self.mongo = Mongo(app) self.driver = self.mongo.driver self.storage = self.driver self.elastic = Elastic(app, serializer=SuperdeskJSONEncoder(), skip_index_init=True, retry_on_timeout=True) def pymongo(self, resource=None, prefix=None): return self.mongo.pymongo(resource, prefix) def init_elastic(self, app): """Init elastic index. It will create index and put mapping. It should run only once so locks are in place. Thus mongo must be already setup before running this. """ with app.app_context(): if lock('elastic', expire=10): try: self.elastic.init_index(app) finally: unlock('elastic') def find(self, resource, req, lookup): return superdesk.get_resource_service(resource).get(req=req, lookup=lookup) def find_all(self, resource, max_results=1000): req = ParsedRequest() req.max_results = max_results return self._backend(resource).find(resource, req, None) def find_one(self, resource, req, **lookup): return superdesk.get_resource_service(resource).find_one(req=req, **lookup) def find_one_raw(self, resource, _id): return self._backend(resource).find_one_raw(resource, _id) def find_list_of_ids(self, resource, ids, client_projection=None): return self._backend(resource).find_list_of_ids(resource, ids, client_projection) def insert(self, resource, docs, **kwargs): return superdesk.get_resource_service(resource).create(docs, **kwargs) def update(self, resource, id_, updates, original): return superdesk.get_resource_service(resource).update(id=id_, updates=updates, original=original) def update_all(self, resource, query, updates): datasource = self.datasource(resource) driver = self._backend(resource).driver collection = driver.db[datasource[0]] return collection.update(query, {'$set': updates}, multi=True) def replace(self, resource, id_, document, original): return superdesk.get_resource_service(resource).replace(id=id_, document=document, original=original) def remove(self, resource, lookup=None): if lookup is None: lookup = {} return superdesk.get_resource_service(resource).delete(lookup=lookup) def is_empty(self, resource): return self._backend(resource).is_empty(resource) def _search_backend(self, resource): if resource.endswith(current_app.config['VERSIONS']): return datasource = self.datasource(resource) backend = config.SOURCES.get(datasource[0], {}).get('search_backend', None) return getattr(self, backend) if backend is not None else None def _backend(self, resource): datasource = self.datasource(resource) backend = config.SOURCES.get(datasource[0], {'backend': 'mongo'}).get('backend', 'mongo') return getattr(self, backend) def get_mongo_collection(self, resource): return self.mongo.pymongo('users').db[resource]
agpl-3.0
chrisVdd/Time2web
vendor/sonata-project/exporter/docs/conf.py
63
7898
# -*- coding: utf-8 -*- # # IoC documentation build configuration file, created by # sphinx-quickstart on Fri Mar 29 01:43:00 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sensio.sphinx.refinclude', 'sensio.sphinx.configurationblock', 'sensio.sphinx.phpcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Sonata Project ~ Exporter' copyright = u'2010-2014, Thomas Rabaix' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. #version = '0.0.1' # The full version, including alpha/beta/rc tags. #release = '0.0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- import sphinx_rtd_theme # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'doc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). #latex_documents = [ # ('index', 'PythonElement.tex', u'Python Documentation', # u'Thomas Rabaix', 'manual'), #] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples #(source start file, name, description, authors, manual section). #man_pages = [ # ('index', 'ioc', u'IoC Documentation', # [u'Thomas Rabaix'], 1) #] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) #texinfo_documents = [ # ('index', 'IoC', u'IoC Documentation', # u'Thomas Rabaix', 'IoC', 'One line description of project.', # 'Miscellaneous'), #] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote'
mit
SravanthiSinha/edx-platform
lms/djangoapps/support/tests/test_views.py
70
3760
""" Tests for support views. """ import ddt from django.test import TestCase from django.core.urlresolvers import reverse from student.roles import GlobalStaff, SupportStaffRole from student.tests.factories import UserFactory class SupportViewTestCase(TestCase): """ Base class for support view tests. """ USERNAME = "support" EMAIL = "support@example.com" PASSWORD = "support" def setUp(self): """Create a user and log in. """ super(SupportViewTestCase, self).setUp() self.user = UserFactory(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD) success = self.client.login(username=self.USERNAME, password=self.PASSWORD) self.assertTrue(success, msg="Could not log in") @ddt.ddt class SupportViewAccessTests(SupportViewTestCase): """ Tests for access control of support views. """ @ddt.data( ("support:index", GlobalStaff, True), ("support:index", SupportStaffRole, True), ("support:index", None, False), ("support:certificates", GlobalStaff, True), ("support:certificates", SupportStaffRole, True), ("support:certificates", None, False), ("support:refund", GlobalStaff, True), ("support:refund", SupportStaffRole, True), ("support:refund", None, False), ) @ddt.unpack def test_access(self, url_name, role, has_access): if role is not None: role().add_users(self.user) url = reverse(url_name) response = self.client.get(url) if has_access: self.assertEqual(response.status_code, 200) else: self.assertEqual(response.status_code, 403) @ddt.data("support:index", "support:certificates", "support:refund") def test_require_login(self, url_name): url = reverse(url_name) # Log out then try to retrieve the page self.client.logout() response = self.client.get(url) # Expect a redirect to the login page redirect_url = "{login_url}?next={original_url}".format( login_url=reverse("signin_user"), original_url=url, ) self.assertRedirects(response, redirect_url) class SupportViewIndexTests(SupportViewTestCase): """ Tests for the support index view. """ EXPECTED_URL_NAMES = [ "support:certificates", "support:refund", ] def setUp(self): """Make the user support staff. """ super(SupportViewIndexTests, self).setUp() SupportStaffRole().add_users(self.user) def test_index(self): response = self.client.get(reverse("support:index")) self.assertContains(response, "Support") # Check that all the expected links appear on the index page. for url_name in self.EXPECTED_URL_NAMES: self.assertContains(response, reverse(url_name)) class SupportViewCertificatesTests(SupportViewTestCase): """ Tests for the certificates support view. """ def setUp(self): """Make the user support staff. """ super(SupportViewCertificatesTests, self).setUp() SupportStaffRole().add_users(self.user) def test_certificates_no_query(self): # Check that an empty initial query is passed to the JavaScript client correctly. response = self.client.get(reverse("support:certificates")) self.assertContains(response, "userQuery: ''") def test_certificates_with_query(self): # Check that an initial query is passed to the JavaScript client. url = reverse("support:certificates") + "?query=student@example.com" response = self.client.get(url) self.assertContains(response, "userQuery: 'student@example.com'")
agpl-3.0
artdent/mingus-python3
unittest/test_Note.py
3
4184
#!/usr/bin/python # -*- coding: utf-8 -*- import sys sys.path += ['../'] from mingus.containers.Note import Note import unittest from mingus.containers.mt_exceptions import NoteFormatError class test_Note(unittest.TestCase): def setUp(self): self.c = Note('C', 5) self.c1 = Note('C') self.c2 = Note('C', 3) self.b4 = Note('B', 4) self.b5 = Note('B', 5) def test_cmp(self): self.assert_(self.c1 <= self.b5) self.assert_(self.c < self.b5) self.assert_(self.c1 < self.b5) self.assert_(self.c2 < self.b5) self.assert_(self.c > self.b4, '%s %s' % (self.c, self.b4)) self.assert_(self.c1 < self.b4) self.assert_(self.c2 < self.b4) self.assert_(self.b4 < self.b5) self.assert_(Note('C') > Note('Cb')) self.assert_(self.c > None) def test_eq(self): self.assert_(self.c != self.c1) self.assert_(self.c == self.c) self.assert_(Note('C') == Note('C')) self.assert_(self.c != None) def test_to_int(self): self.assertEqual(48, Note('C', 4)) self.assertEqual(47, Note('Cb', 4)) self.assertEqual(36, int(self.c2)) self.assertEqual(71, int(self.b5)) self.assertEqual(59, int(self.b4)) def test_set_note(self): n = Note() self.assert_(n.set_note('C', 5, {})) n.empty() self.assert_(n.set_note('C-5')) self.assert_(n.set_note('C', 5)) self.assert_(n.set_note('C#-12', 5)) self.assertRaises(NoteFormatError, n.set_note, 'H') self.assertRaises(NoteFormatError, n.set_note, 'C 23') self.assertRaises(NoteFormatError, n.set_note, 'C# 123') def test_to_hertz(self): self.assertEqual(Note('A', 0).to_hertz(), 27.5) self.assertEqual(Note('A', 1).to_hertz(), 55) self.assertEqual(Note('A', 2).to_hertz(), 110) self.assertEqual(Note('A', 3).to_hertz(), 220) self.assertEqual(Note('A', 4).to_hertz(), 440) self.assertEqual(Note('A', 5).to_hertz(), 880) self.assertEqual(Note('A', 6).to_hertz(), 1760) def test_from_hertz(self): a = Note() self.assertEqual(a.from_hertz(55.5), Note('A', 1)) self.assertEqual(a.from_hertz(110), Note('A', 2)) a.from_hertz(220) self.assertEqual(a, Note('A', 3)) a.from_hertz(440) self.assertEqual(a, Note('A', 4)) a.from_hertz(880) self.assertEqual(a, Note('A', 5)) a.from_hertz(1760) self.assertEqual(a, Note('A', 6)) def test_transpose(self): a = Note('C') a.transpose('3') self.assertEqual(Note('E'), a) a.transpose('b2') self.assertEqual(Note('F'), a) a.transpose('5') self.assertEqual(Note('C', 5), a) a.transpose('5', False) self.assertEqual(Note('F'), a) a = Note('G-5') a.transpose('5') self.assertEqual(Note('D-6'), a) a.transpose('5', False) self.assertEqual(Note('G-5'), a) a.transpose('5', False) self.assertEqual(Note('C-5'), a) def test_from_int(self): self.assertEqual(Note('C', 0), Note().from_int(0)) self.assertEqual(Note('C', 1), Note().from_int(12)) def test_measure(self): self.assert_(Note('C').measure(Note('D')) == 2) self.assert_(Note('D').measure(Note('C')) == -2) def test_to_shorthand(self): self.assert_(Note('C-0').to_shorthand() == 'C,,') self.assert_(Note('C-2').to_shorthand() == 'C') self.assert_(Note('C-3').to_shorthand() == 'c') self.assert_(Note('C-4').to_shorthand() == "c'") self.assert_(Note('C-9').to_shorthand() == "c''''''") def test_from_shorthand(self): self.assert_(Note().from_shorthand('C,,') == Note('C-0')) self.assert_(Note().from_shorthand('C') == Note('C-2')) self.assert_(Note().from_shorthand('c') == Note('C-3')) self.assert_(Note().from_shorthand("c'") == Note('C-4')) self.assert_(Note().from_shorthand("c''''''") == Note('C-9')) def suite(): return unittest.TestLoader().loadTestsFromTestCase(test_Note)
gpl-3.0
ff94315/hiwifi-openwrt-HC5661-HC5761
staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/json/tests/test_encode_basestring_ascii.py
143
2004
from collections import OrderedDict from json.tests import PyTest, CTest CASES = [ (u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'), (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'), (u'controls', '"controls"'), (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'), (u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'), (u' s p a c e d ', '" s p a c e d "'), (u'\U0001d120', '"\\ud834\\udd20"'), (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), ('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'), (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), ('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'), (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), (u'\u03b1\u03a9', '"\\u03b1\\u03a9"'), (u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'), (u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'), (u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'), ] class TestEncodeBasestringAscii(object): def test_encode_basestring_ascii(self): fname = self.json.encoder.encode_basestring_ascii.__name__ for input_string, expect in CASES: result = self.json.encoder.encode_basestring_ascii(input_string) self.assertEqual(result, expect, '{0!r} != {1!r} for {2}({3!r})'.format( result, expect, fname, input_string)) def test_ordered_dict(self): # See issue 6105 items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)] s = self.dumps(OrderedDict(items)) self.assertEqual(s, '{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}') class TestPyEncodeBasestringAscii(TestEncodeBasestringAscii, PyTest): pass class TestCEncodeBasestringAscii(TestEncodeBasestringAscii, CTest): pass
gpl-2.0
dxwu/BinderFilter
resources/android-toolchain-16/lib/python2.7/wsgiref/util.py
247
5576
"""Miscellaneous WSGI-related Utilities""" import posixpath __all__ = [ 'FileWrapper', 'guess_scheme', 'application_uri', 'request_uri', 'shift_path_info', 'setup_testing_defaults', ] class FileWrapper: """Wrapper to convert file-like objects to iterables""" def __init__(self, filelike, blksize=8192): self.filelike = filelike self.blksize = blksize if hasattr(filelike,'close'): self.close = filelike.close def __getitem__(self,key): data = self.filelike.read(self.blksize) if data: return data raise IndexError def __iter__(self): return self def next(self): data = self.filelike.read(self.blksize) if data: return data raise StopIteration def guess_scheme(environ): """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https' """ if environ.get("HTTPS") in ('yes','on','1'): return 'https' else: return 'http' def application_uri(environ): """Return the application's base URI (no PATH_INFO or QUERY_STRING)""" url = environ['wsgi.url_scheme']+'://' from urllib import quote if environ.get('HTTP_HOST'): url += environ['HTTP_HOST'] else: url += environ['SERVER_NAME'] if environ['wsgi.url_scheme'] == 'https': if environ['SERVER_PORT'] != '443': url += ':' + environ['SERVER_PORT'] else: if environ['SERVER_PORT'] != '80': url += ':' + environ['SERVER_PORT'] url += quote(environ.get('SCRIPT_NAME') or '/') return url def request_uri(environ, include_query=1): """Return the full request URI, optionally including the query string""" url = application_uri(environ) from urllib import quote path_info = quote(environ.get('PATH_INFO',''),safe='/;=,') if not environ.get('SCRIPT_NAME'): url += path_info[1:] else: url += path_info if include_query and environ.get('QUERY_STRING'): url += '?' + environ['QUERY_STRING'] return url def shift_path_info(environ): """Shift a name from PATH_INFO to SCRIPT_NAME, returning it If there are no remaining path segments in PATH_INFO, return None. Note: 'environ' is modified in-place; use a copy if you need to keep the original PATH_INFO or SCRIPT_NAME. Note: when PATH_INFO is just a '/', this returns '' and appends a trailing '/' to SCRIPT_NAME, even though empty path segments are normally ignored, and SCRIPT_NAME doesn't normally end in a '/'. This is intentional behavior, to ensure that an application can tell the difference between '/x' and '/x/' when traversing to objects. """ path_info = environ.get('PATH_INFO','') if not path_info: return None path_parts = path_info.split('/') path_parts[1:-1] = [p for p in path_parts[1:-1] if p and p != '.'] name = path_parts[1] del path_parts[1] script_name = environ.get('SCRIPT_NAME','') script_name = posixpath.normpath(script_name+'/'+name) if script_name.endswith('/'): script_name = script_name[:-1] if not name and not script_name.endswith('/'): script_name += '/' environ['SCRIPT_NAME'] = script_name environ['PATH_INFO'] = '/'.join(path_parts) # Special case: '/.' on PATH_INFO doesn't get stripped, # because we don't strip the last element of PATH_INFO # if there's only one path part left. Instead of fixing this # above, we fix it here so that PATH_INFO gets normalized to # an empty string in the environ. if name=='.': name = None return name def setup_testing_defaults(environ): """Update 'environ' with trivial defaults for testing purposes This adds various parameters required for WSGI, including HTTP_HOST, SERVER_NAME, SERVER_PORT, REQUEST_METHOD, SCRIPT_NAME, PATH_INFO, and all of the wsgi.* variables. It only supplies default values, and does not replace any existing settings for these variables. This routine is intended to make it easier for unit tests of WSGI servers and applications to set up dummy environments. It should *not* be used by actual WSGI servers or applications, since the data is fake! """ environ.setdefault('SERVER_NAME','127.0.0.1') environ.setdefault('SERVER_PROTOCOL','HTTP/1.0') environ.setdefault('HTTP_HOST',environ['SERVER_NAME']) environ.setdefault('REQUEST_METHOD','GET') if 'SCRIPT_NAME' not in environ and 'PATH_INFO' not in environ: environ.setdefault('SCRIPT_NAME','') environ.setdefault('PATH_INFO','/') environ.setdefault('wsgi.version', (1,0)) environ.setdefault('wsgi.run_once', 0) environ.setdefault('wsgi.multithread', 0) environ.setdefault('wsgi.multiprocess', 0) from StringIO import StringIO environ.setdefault('wsgi.input', StringIO("")) environ.setdefault('wsgi.errors', StringIO()) environ.setdefault('wsgi.url_scheme',guess_scheme(environ)) if environ['wsgi.url_scheme']=='http': environ.setdefault('SERVER_PORT', '80') elif environ['wsgi.url_scheme']=='https': environ.setdefault('SERVER_PORT', '443') _hoppish = { 'connection':1, 'keep-alive':1, 'proxy-authenticate':1, 'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1, 'upgrade':1 }.__contains__ def is_hop_by_hop(header_name): """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header""" return _hoppish(header_name.lower())
mit
gatkin/declxml
tests/test_hooks_validation.py
1
14857
"""Tests using hooks for validation""" from collections import namedtuple import pytest import declxml as xml from .helpers import strip_xml _UserTuple = namedtuple('_UserTuple', [ 'name', 'age', ]) class _UserClass(object): def __init__(self, name=None, age=None): self.name = name self.age = age def __eq__(self, other): return isinstance(other, _UserClass) and\ other.name == self.name and\ other.age == self.age def __repr__(self): return '_UserClass(name=\'{}\', age={})'.format( self.name, self.age ) class _ValidationError(Exception): """Custom validation error class""" class TestCustomErrorMessage(object): """Provide custom validation error messages.""" def test_array_non_root(self): """Custom error message for array values.""" processor = xml.dictionary('data', [ xml.array(xml.integer('value'), nested='values', hooks=self._hooks) ]) xml_string = strip_xml(""" <data> <values> <value>1</value> </values> </data> """) value = { 'values': [1], } location = 'data/values' self._assert_error_message(processor, value, xml_string, location) def test_array_root(self): """Custom error message for array values.""" processor = xml.array(xml.integer('value'), nested='data', hooks=self._hooks) xml_string = strip_xml(""" <data> <value>1</value> </data> """) value = [1] location = 'data' self._assert_error_message(processor, value, xml_string, location) def test_dictionary_non_root(self): """Custom error message for dictionary values.""" processor = xml.dictionary('data', [ xml.dictionary('user', [ xml.string('name'), xml.integer('age'), ], hooks=self._hooks) ]) xml_string = strip_xml(""" <data> <user> <name>Bob</name> <age>24</age> </user> </data> """) value = { 'user': { 'name': 'Bob', 'age': 24, } } location = 'data/user' self._assert_error_message(processor, value, xml_string, location) def test_dictionary_root(self): """Custom error message for dictionary values.""" processor = xml.dictionary('data', [ xml.string('name'), xml.integer('age'), ], hooks=self._hooks) xml_string = strip_xml(""" <data> <name>Bob</name> <age>24</age> </data> """) value = { 'name': 'Bob', 'age': 24, } location = 'data' self._assert_error_message(processor, value, xml_string, location) def test_named_tuple_non_root(self): """Custom error message for namedtuple values.""" processor = xml.dictionary('data', [ xml.named_tuple('user', _UserTuple, [ xml.string('name'), xml.integer('age'), ], hooks=self._hooks) ]) xml_string = strip_xml(""" <data> <user> <name>Bob</name> <age>24</age> </user> </data> """) value = {'user': _UserTuple(name='Bob', age=24)} location = 'data/user' self._assert_error_message(processor, value, xml_string, location) def test_named_tuple_root(self): """Custom error message for namedtuple values.""" processor = xml.named_tuple('data', _UserTuple, [ xml.string('name'), xml.integer('age'), ], hooks=self._hooks) xml_string = strip_xml(""" <data> <name>Bob</name> <age>24</age> </data> """) value = _UserTuple(name='Bob', age=24) location = 'data' self._assert_error_message(processor, value, xml_string, location) def test_primitive(self): """Custom error message for primitive values.""" processor = xml.dictionary('data', [ xml.integer('value', hooks=self._hooks) ]) xml_string = strip_xml(""" <data> <value>1</value> </data> """) value = {'value': 1} location = 'data/value' self._assert_error_message(processor, value, xml_string, location) def test_user_object_non_root(self): """Custom error message for user object values.""" processor = xml.dictionary('data', [ xml.user_object('user', _UserClass, [ xml.string('name'), xml.integer('age'), ], hooks=self._hooks) ]) xml_string = strip_xml(""" <data> <user> <name>Bob</name> <age>24</age> </user> </data> """) value = {'user': _UserClass(name='Bob', age=24)} location = 'data/user' self._assert_error_message(processor, value, xml_string, location) def test_user_object_root(self): """Custom error message for user object values.""" processor = xml.user_object('data', _UserClass, [ xml.string('name'), xml.integer('age'), ], hooks=self._hooks) xml_string = strip_xml(""" <data> <name>Bob</name> <age>24</age> </data> """) value = _UserClass(name='Bob', age=24) location = 'data' self._assert_error_message(processor, value, xml_string, location) @staticmethod def _assert_error_message(processor, value, xml_string, expected_location): with pytest.raises(_ValidationError) as parse_exception: xml.parse_from_string(processor, xml_string) actual_parse_message = str(parse_exception.value) print(actual_parse_message) assert actual_parse_message.endswith(expected_location) with pytest.raises(_ValidationError) as serialize_exception: xml.serialize_to_string(processor, value) actual_serialize_message = str(serialize_exception.value) assert actual_serialize_message.endswith(expected_location) @property def _hooks(self): def validate(state, _): state.raise_error(_ValidationError, 'Invalid value') return xml.Hooks( after_parse=validate, before_serialize=validate, ) class TestValidateArray(object): """Use hooks to validate array values.""" def test_invalid(self): """Invalid array value.""" xml_string = strip_xml(""" <data> <value>1</value> <value>3</value> <value>3</value> </data> """) value = [1, 3, 3] _assert_invalid(self._processor, value, xml_string) def test_valid(self): """Valid array value.""" xml_string = strip_xml(""" <data> <value>1</value> <value>2</value> <value>3</value> </data> """) value = [1, 2, 3] _assert_valid(self._processor, value, xml_string) @property def _processor(self): def validate(state, value): if len(value) != len(set(value)): state.raise_error(_ValidationError) return value hooks = xml.Hooks( after_parse=validate, before_serialize=validate, ) processor = xml.array(xml.integer('value'), hooks=hooks, nested='data') return processor class TestValidateDictionary(object): """Use hooks to validate dictionary values.""" def test_invalid(self): """Invalid dictionary value.""" xml_string = strip_xml(""" <data> <a>5</a> <b>6</b> </data> """) value = { 'a': 5, 'b': 6, } _assert_invalid(self._processor, value, xml_string) def test_valid(self): """Valid dictionary value.""" xml_string = strip_xml(""" <data> <a>32</a> <b>67</b> </data> """) value = { 'a': 32, 'b': 67, } _assert_valid(self._processor, value, xml_string) @property def _processor(self): def validate(state, value): if value['a'] == 5 and value['b'] == 6: state.raise_error(_ValidationError) return value hooks = xml.Hooks( after_parse=validate, before_serialize=validate, ) processor = xml.dictionary('data', [ xml.integer('a'), xml.integer('b'), ], hooks=hooks) return processor class TestValidateNamedTuple(object): """Use hooks for validating namedtuple values.""" def test_invalid(self): """Invalid namedtuple value""" xml_string = strip_xml(""" <user> <name>Bob</name> <age>24</age> </user> """) value = _UserTuple(name='Bob', age=24) _assert_invalid(self._processor, value, xml_string) def test_valid(self): """Valid namedtuple value""" xml_string = strip_xml(""" <user> <name>Jill</name> <age>28</age> </user> """) value = _UserTuple(name='Jill', age=28) _assert_valid(self._processor, value, xml_string) @property def _processor(self): def validate(state, value): if value.name == 'Bob' and value.age == 24: state.raise_error(_ValidationError) return value hooks = xml.Hooks( after_parse=validate, before_serialize=validate, ) processor = xml.named_tuple('user', _UserTuple, [ xml.string('name'), xml.integer('age') ], hooks=hooks) return processor class TestValidatePrimitive(object): """Use hooks for validating primitive values.""" def test_invalid(self): """Invalid primitive value""" xml_string = strip_xml(""" <data> <value>-91</value> </data> """) value = {'value': -91} _assert_invalid(self._processor, value, xml_string) def test_valid(self): """Valid primitive value""" xml_string = strip_xml(""" <data> <value>32</value> </data> """) value = {'value': 32} _assert_valid(self._processor, value, xml_string) @property def _processor(self): def validate(state, value): if value < 0: state.raise_error(_ValidationError) return value hooks = xml.Hooks( after_parse=validate, before_serialize=validate ) processor = xml.dictionary('data', [ xml.integer('value', hooks=hooks) ]) return processor class TestValidateUserObject(object): """Use hooks for validating user object values.""" def test_invalid(self): """Invalid namedtuple value""" xml_string = strip_xml(""" <user> <name>Bob</name> <age>24</age> </user> """) value = _UserClass(name='Bob', age=24) _assert_invalid(self._processor, value, xml_string) def test_valid(self): """Valid namedtuple value""" xml_string = strip_xml(""" <user> <name>Jill</name> <age>28</age> </user> """) value = _UserClass(name='Jill', age=28) _assert_valid(self._processor, value, xml_string) @property def _processor(self): def validate(state, value): if value.name == 'Bob' and value.age == 24: state.raise_error(_ValidationError) return value hooks = xml.Hooks( after_parse=validate, before_serialize=validate, ) processor = xml.user_object('user', _UserClass, [ xml.string('name'), xml.integer('age') ], hooks=hooks) return processor def test_aggregate_missing_hooks(): """Process with missing aggregate hooks.""" hooks = xml.Hooks( after_parse=None, before_serialize=None ) processor = xml.dictionary('data', [ xml.integer('a'), xml.integer('b') ], hooks=hooks) xml_string = strip_xml(""" <data> <a>1</a> <b>2</b> </data> """) value = { 'a': 1, 'b': 2, } _assert_valid(processor, value, xml_string) def test_primitive_missing_hooks(): """Process primitive value with missing hooks.""" hooks = xml.Hooks( after_parse=None, before_serialize=None ) processor = xml.dictionary('data', [ xml.integer('value', hooks=hooks) ]) xml_string = strip_xml(""" <data> <value>1</value> </data> """) value = {'value': 1} _assert_valid(processor, value, xml_string) def test_processor_locations_parsing(): """Get processor location in hooks callback.""" expected_locations = [ xml.ProcessorLocation(element_path='data', array_index=None), xml.ProcessorLocation(element_path='value', array_index=None) ] def trace(state, _): assert isinstance(state, xml.ProcessorStateView) assert expected_locations == list(state.locations) hooks = xml.Hooks( after_parse=trace, before_serialize=trace, ) processor = xml.dictionary('data', [ xml.integer('value', hooks=hooks), ]) xml_string = strip_xml(""" <data> <value>1</value> </data> """) value = {'value': 1} xml.parse_from_string(processor, xml_string) xml.serialize_to_string(processor, value) def _assert_invalid(processor, value, xml_string): """Assert the processor rejects the XML and value as invalid.""" with pytest.raises(_ValidationError): xml.parse_from_string(processor, xml_string) with pytest.raises(_ValidationError): xml.serialize_to_string(processor, value) def _assert_valid(processor, value, xml_string): """Assert the processor accepts the XML and value as valid.""" actual_value = xml.parse_from_string(processor, xml_string) assert value == actual_value actual_xml_string = xml.serialize_to_string(processor, value) assert xml_string == actual_xml_string
mit
openaid-IATI/OIPA
OIPA/iati_synchroniser/tests/test_create_publisher_organisation.py
1
1867
import unittest from django.test import TestCase from iati.factory import iati_factory from iati_organisation.models import Organisation from iati_synchroniser.create_publisher_organisation import ( create_publisher_organisation ) from iati_synchroniser.factory import synchroniser_factory class CreatePublisherOrganisationTestCase(TestCase): """ Test creation of a organisation on adding a publisher """ def setUp(self): iati_factory.LanguageFactory.create(code='en', name='English') iati_factory.VersionFactory.create(code='2.02', name='2.02') iati_factory.OrganisationTypeFactory.create( code='22', name='Multilateral') @unittest.skip("Not implemented") def test_update_or_create_publisher_organisation(self): """ check if dataset is saved as expected """ # setup publisher = synchroniser_factory.PublisherFactory.create( organisation=None) publisher_organization_type = "22" # call create_publisher_organisation(publisher, publisher_organization_type) # prepare publisher.refresh_from_db() organisation = Organisation.objects.get( organisation_identifier=publisher.publisher_iati_id) # assert self.assertEqual(publisher.publisher_iati_id, organisation.organisation_identifier) self.assertEqual(publisher.display_name, organisation.name.narratives.first().content) self.assertEqual(publisher_organization_type, organisation.type.code) self.assertEqual(publisher.publisher_iati_id, organisation.reporting_org.reporting_org_identifier) self.assertEqual(publisher.display_name, organisation.reporting_org.narratives.first().content)
agpl-3.0
zakuro9715/lettuce
tests/integration/lib/Django-1.3/tests/modeltests/proxy_model_inheritance/tests.py
50
1253
""" XX. Proxy model inheritance Proxy model inheritance across apps can result in syncdb not creating the table for the proxied model (as described in #12286). This test creates two dummy apps and calls syncdb, then verifies that the table has been created. """ import os import sys from django.conf import settings, Settings from django.core.management import call_command from django.db.models.loading import load_app from django.test import TransactionTestCase class ProxyModelInheritanceTests(TransactionTestCase): def setUp(self): self.old_sys_path = sys.path[:] sys.path.append(os.path.dirname(os.path.abspath(__file__))) self.old_installed_apps = settings.INSTALLED_APPS settings.INSTALLED_APPS = ('app1', 'app2') map(load_app, settings.INSTALLED_APPS) call_command('syncdb', verbosity=0) global ProxyModel, NiceModel from app1.models import ProxyModel from app2.models import NiceModel def tearDown(self): settings.INSTALLED_APPS = self.old_installed_apps sys.path = self.old_sys_path def test_table_exists(self): self.assertEqual(NiceModel.objects.all().count(), 0) self.assertEqual(ProxyModel.objects.all().count(), 0)
gpl-3.0
ErykB2000/home-assistant
homeassistant/components/notify/pushover.py
8
3132
""" homeassistant.components.notify.pushover ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pushover platform for notify component. Configuration: To use the Pushover notifier you will need to add something like the following to your config/configuration.yaml notify: platform: pushover api_key: ABCDEFGHJKLMNOPQRSTUVXYZ user_key: ABCDEFGHJKLMNOPQRSTUVXYZ Variables: api_key *Required This parameter is optional but should be configured, in order to get an API key you should go to https://pushover.net and register a new application. This is a quote from the pushover website regarding free/open source apps: "If you are creating a client-side library, application, or open source project that will be redistributed and installed by end-users, you may want to require each of your users to register their own application rather than including your own API token with the software." When setting up the application I recommend using the icon located here: https://home-assistant.io/images/favicon-192x192.png user_key *Required To retrieve this value log into your account at https://pushover.net """ import logging from homeassistant.helpers import validate_config from homeassistant.components.notify import ( DOMAIN, ATTR_TITLE, BaseNotificationService) from homeassistant.const import CONF_API_KEY REQUIREMENTS = ['python-pushover>=0.2'] _LOGGER = logging.getLogger(__name__) # pylint: disable=unused-variable def get_service(hass, config): """ Get the pushover notification service. """ if not validate_config(config, {DOMAIN: ['user_key', CONF_API_KEY]}, _LOGGER): return None try: # pylint: disable=no-name-in-module, unused-variable from pushover import InitError except ImportError: _LOGGER.exception( "Unable to import pushover. " "Did you maybe not install the 'python-pushover.py' package?") return None try: api_token = config[DOMAIN].get(CONF_API_KEY) return PushoverNotificationService( config[DOMAIN]['user_key'], api_token) except InitError: _LOGGER.error( "Wrong API key supplied. " "Get it at https://pushover.net") # pylint: disable=too-few-public-methods class PushoverNotificationService(BaseNotificationService): """ Implements notification service for Pushover. """ def __init__(self, user_key, api_token): # pylint: disable=no-name-in-module, unused-variable from pushover import Client self._user_key = user_key self._api_token = api_token self.pushover = Client( self._user_key, api_token=self._api_token) def send_message(self, message="", **kwargs): """ Send a message to a user. """ # pylint: disable=no-name-in-module from pushover import RequestError title = kwargs.get(ATTR_TITLE) try: self.pushover.send_message(message, title=title) except RequestError: _LOGGER.exception("Could not send pushover notification")
mit
sergiocorato/account-invoicing
account_invoice_merge_payment/__openerp__.py
12
1535
# -*- coding: utf-8 -*- ############################################################################## # # This file is part of account_invoice_merge_payment, # an Odoo module. # # Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>) # # account_invoice_merge_payment is free software: # you can redistribute it and/or modify it under the terms of the GNU # Affero General Public License as published by the Free Software # Foundation,either version 3 of the License, or (at your option) any # later version. # # account_invoice_merge_payment is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with account_invoice_merge_payment. # If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "account_invoice_merge_payment", 'summary': """ Use invoice merge regarding fields on Account Payment Partner""", 'author': "ACSONE SA/NV,Odoo Community Association (OCA)", 'website': "http://acsone.eu", 'category': 'Invoicing & Payments', 'version': '8.0.0.1.0', 'license': 'AGPL-3', 'depends': [ 'account_invoice_merge', 'account_payment_partner', ], 'auto_install': True }
agpl-3.0
40223119/2015cd_0505
static/Brython3.1.1-20150328-091302/Lib/site-packages/pygame/colordict.py
621
24077
## pygame - Python Game Library ## Copyright (C) 2000-2003 Pete Shinners ## ## This library is free software; you can redistribute it and/or ## modify it under the terms of the GNU Library General Public ## License as published by the Free Software Foundation; either ## version 2 of the License, or (at your option) any later version. ## ## This library is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## Library General Public License for more details. ## ## You should have received a copy of the GNU Library General Public ## License along with this library; if not, write to the Free ## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ## ## Pete Shinners ## pete@shinners.org THECOLORS = { 'gray17' : (43, 43, 43, 255) , 'gold' : (255, 215, 0, 255) , 'gray10' : (26, 26, 26, 255) , 'yellow' : (255, 255, 0, 255) , 'gray11' : (28, 28, 28, 255) , 'grey61' : (156, 156, 156, 255) , 'grey60' : (153, 153, 153, 255) , 'darkseagreen' : (143, 188, 143, 255) , 'grey62' : (158, 158, 158, 255) , 'grey65' : (166, 166, 166, 255) , 'gray12' : (31, 31, 31, 255) , 'grey67' : (171, 171, 171, 255) , 'grey66' : (168, 168, 168, 255) , 'grey69' : (176, 176, 176, 255) , 'gray21' : (54, 54, 54, 255) , 'lightsalmon4' : (139, 87, 66, 255) , 'lightsalmon2' : (238, 149, 114, 255) , 'lightsalmon3' : (205, 129, 98, 255) , 'lightsalmon1' : (255, 160, 122, 255) , 'gray32' : (82, 82, 82, 255) , 'green4' : (0, 139, 0, 255) , 'gray30' : (77, 77, 77, 255) , 'gray31' : (79, 79, 79, 255) , 'green1' : (0, 255, 0, 255) , 'gray37' : (94, 94, 94, 255) , 'green3' : (0, 205, 0, 255) , 'green2' : (0, 238, 0, 255) , 'darkslategray1' : (151, 255, 255, 255) , 'darkslategray2' : (141, 238, 238, 255) , 'darkslategray3' : (121, 205, 205, 255) , 'aquamarine1' : (127, 255, 212, 255) , 'aquamarine3' : (102, 205, 170, 255) , 'aquamarine2' : (118, 238, 198, 255) , 'papayawhip' : (255, 239, 213, 255) , 'black' : (0, 0, 0, 255) , 'darkorange3' : (205, 102, 0, 255) , 'oldlace' : (253, 245, 230, 255) , 'lightgoldenrod4' : (139, 129, 76, 255) , 'gray90' : (229, 229, 229, 255) , 'orchid1' : (255, 131, 250, 255) , 'orchid2' : (238, 122, 233, 255) , 'orchid3' : (205, 105, 201, 255) , 'grey68' : (173, 173, 173, 255) , 'brown' : (165, 42, 42, 255) , 'purple2' : (145, 44, 238, 255) , 'gray80' : (204, 204, 204, 255) , 'antiquewhite3' : (205, 192, 176, 255) , 'antiquewhite2' : (238, 223, 204, 255) , 'antiquewhite1' : (255, 239, 219, 255) , 'palevioletred3' : (205, 104, 137, 255) , 'hotpink' : (255, 105, 180, 255) , 'lightcyan' : (224, 255, 255, 255) , 'coral3' : (205, 91, 69, 255) , 'gray8' : (20, 20, 20, 255) , 'gray9' : (23, 23, 23, 255) , 'grey32' : (82, 82, 82, 255) , 'bisque4' : (139, 125, 107, 255) , 'cyan' : (0, 255, 255, 255) , 'gray0' : (0, 0, 0, 255) , 'gray1' : (3, 3, 3, 255) , 'gray6' : (15, 15, 15, 255) , 'bisque1' : (255, 228, 196, 255) , 'bisque2' : (238, 213, 183, 255) , 'bisque3' : (205, 183, 158, 255) , 'skyblue' : (135, 206, 235, 255) , 'gray' : (190, 190, 190, 255) , 'darkturquoise' : (0, 206, 209, 255) , 'rosybrown4' : (139, 105, 105, 255) , 'deepskyblue3' : (0, 154, 205, 255) , 'grey63' : (161, 161, 161, 255) , 'indianred1' : (255, 106, 106, 255) , 'grey78' : (199, 199, 199, 255) , 'lightpink' : (255, 182, 193, 255) , 'gray88' : (224, 224, 224, 255) , 'gray22' : (56, 56, 56, 255) , 'red' : (255, 0, 0, 255) , 'grey11' : (28, 28, 28, 255) , 'lemonchiffon3' : (205, 201, 165, 255) , 'lemonchiffon2' : (238, 233, 191, 255) , 'lemonchiffon1' : (255, 250, 205, 255) , 'indianred3' : (205, 85, 85, 255) , 'violetred1' : (255, 62, 150, 255) , 'plum2' : (238, 174, 238, 255) , 'plum1' : (255, 187, 255, 255) , 'lemonchiffon4' : (139, 137, 112, 255) , 'gray99' : (252, 252, 252, 255) , 'grey13' : (33, 33, 33, 255) , 'grey55' : (140, 140, 140, 255) , 'darkcyan' : (0, 139, 139, 255) , 'chocolate4' : (139, 69, 19, 255) , 'lightgoldenrodyellow' : (250, 250, 210, 255) , 'gray54' : (138, 138, 138, 255) , 'lavender' : (230, 230, 250, 255) , 'chartreuse3' : (102, 205, 0, 255) , 'chartreuse2' : (118, 238, 0, 255) , 'chartreuse1' : (127, 255, 0, 255) , 'grey48' : (122, 122, 122, 255) , 'grey16' : (41, 41, 41, 255) , 'thistle' : (216, 191, 216, 255) , 'chartreuse4' : (69, 139, 0, 255) , 'darkorchid4' : (104, 34, 139, 255) , 'grey42' : (107, 107, 107, 255) , 'grey41' : (105, 105, 105, 255) , 'grey17' : (43, 43, 43, 255) , 'dimgrey' : (105, 105, 105, 255) , 'dodgerblue4' : (16, 78, 139, 255) , 'darkorchid2' : (178, 58, 238, 255) , 'darkorchid3' : (154, 50, 205, 255) , 'blue' : (0, 0, 255, 255) , 'rosybrown2' : (238, 180, 180, 255) , 'honeydew' : (240, 255, 240, 255) , 'gray18' : (46, 46, 46, 255) , 'cornflowerblue' : (100, 149, 237, 255) , 'grey91' : (232, 232, 232, 255) , 'gray14' : (36, 36, 36, 255) , 'gray15' : (38, 38, 38, 255) , 'gray16' : (41, 41, 41, 255) , 'maroon4' : (139, 28, 98, 255) , 'maroon3' : (205, 41, 144, 255) , 'maroon2' : (238, 48, 167, 255) , 'maroon1' : (255, 52, 179, 255) , 'gray13' : (33, 33, 33, 255) , 'gold3' : (205, 173, 0, 255) , 'gold2' : (238, 201, 0, 255) , 'gold1' : (255, 215, 0, 255) , 'grey79' : (201, 201, 201, 255) , 'palevioletred1' : (255, 130, 171, 255) , 'palevioletred2' : (238, 121, 159, 255) , 'gold4' : (139, 117, 0, 255) , 'gray41' : (105, 105, 105, 255) , 'gray84' : (214, 214, 214, 255) , 'mediumpurple' : (147, 112, 219, 255) , 'rosybrown1' : (255, 193, 193, 255) , 'lightblue2' : (178, 223, 238, 255) , 'lightblue3' : (154, 192, 205, 255) , 'grey57' : (145, 145, 145, 255) , 'lightblue1' : (191, 239, 255, 255) , 'lightblue4' : (104, 131, 139, 255) , 'gray33' : (84, 84, 84, 255) , 'skyblue4' : (74, 112, 139, 255) , 'grey97' : (247, 247, 247, 255) , 'skyblue1' : (135, 206, 255, 255) , 'gray27' : (69, 69, 69, 255) , 'skyblue3' : (108, 166, 205, 255) , 'skyblue2' : (126, 192, 238, 255) , 'lavenderblush1' : (255, 240, 245, 255) , 'darkgrey' : (169, 169, 169, 255) , 'lavenderblush3' : (205, 193, 197, 255) , 'darkslategrey' : (47, 79, 79, 255) , 'lavenderblush4' : (139, 131, 134, 255) , 'deeppink4' : (139, 10, 80, 255) , 'grey99' : (252, 252, 252, 255) , 'gray36' : (92, 92, 92, 255) , 'coral4' : (139, 62, 47, 255) , 'magenta3' : (205, 0, 205, 255) , 'lightskyblue4' : (96, 123, 139, 255) , 'mediumturquoise' : (72, 209, 204, 255) , 'gray34' : (87, 87, 87, 255) , 'floralwhite' : (255, 250, 240, 255) , 'grey39' : (99, 99, 99, 255) , 'grey36' : (92, 92, 92, 255) , 'grey37' : (94, 94, 94, 255) , 'grey34' : (87, 87, 87, 255) , 'gray26' : (66, 66, 66, 255) , 'royalblue2' : (67, 110, 238, 255) , 'grey33' : (84, 84, 84, 255) , 'turquoise1' : (0, 245, 255, 255) , 'grey31' : (79, 79, 79, 255) , 'steelblue1' : (99, 184, 255, 255) , 'sienna4' : (139, 71, 38, 255) , 'steelblue3' : (79, 148, 205, 255) , 'lavenderblush2' : (238, 224, 229, 255) , 'sienna1' : (255, 130, 71, 255) , 'steelblue4' : (54, 100, 139, 255) , 'sienna3' : (205, 104, 57, 255) , 'aquamarine4' : (69, 139, 116, 255) , 'lightyellow1' : (255, 255, 224, 255) , 'lightyellow2' : (238, 238, 209, 255) , 'lightsteelblue' : (176, 196, 222, 255) , 'lightyellow4' : (139, 139, 122, 255) , 'magenta2' : (238, 0, 238, 255) , 'lightskyblue1' : (176, 226, 255, 255) , 'lightgoldenrod' : (238, 221, 130, 255) , 'magenta4' : (139, 0, 139, 255) , 'gray87' : (222, 222, 222, 255) , 'greenyellow' : (173, 255, 47, 255) , 'navajowhite4' : (139, 121, 94, 255) , 'darkslategray4' : (82, 139, 139, 255) , 'olivedrab' : (107, 142, 35, 255) , 'navajowhite1' : (255, 222, 173, 255) , 'navajowhite2' : (238, 207, 161, 255) , 'darkgoldenrod1' : (255, 185, 15, 255) , 'sienna' : (160, 82, 45, 255) , 'blue1' : (0, 0, 255, 255) , 'yellow1' : (255, 255, 0, 255) , 'gray61' : (156, 156, 156, 255) , 'magenta1' : (255, 0, 255, 255) , 'grey52' : (133, 133, 133, 255) , 'orangered4' : (139, 37, 0, 255) , 'palegreen' : (152, 251, 152, 255) , 'gray86' : (219, 219, 219, 255) , 'grey80' : (204, 204, 204, 255) , 'seashell' : (255, 245, 238, 255) , 'royalblue' : (65, 105, 225, 255) , 'firebrick3' : (205, 38, 38, 255) , 'blue4' : (0, 0, 139, 255) , 'peru' : (205, 133, 63, 255) , 'gray60' : (153, 153, 153, 255) , 'aquamarine' : (127, 255, 212, 255) , 'grey53' : (135, 135, 135, 255) , 'tan4' : (139, 90, 43, 255) , 'darkgoldenrod' : (184, 134, 11, 255) , 'tan2' : (238, 154, 73, 255) , 'tan1' : (255, 165, 79, 255) , 'darkslategray' : (47, 79, 79, 255) , 'royalblue3' : (58, 95, 205, 255) , 'red2' : (238, 0, 0, 255) , 'red1' : (255, 0, 0, 255) , 'dodgerblue' : (30, 144, 255, 255) , 'violetred4' : (139, 34, 82, 255) , 'lightyellow' : (255, 255, 224, 255) , 'paleturquoise1' : (187, 255, 255, 255) , 'firebrick2' : (238, 44, 44, 255) , 'mediumaquamarine' : (102, 205, 170, 255) , 'lemonchiffon' : (255, 250, 205, 255) , 'chocolate' : (210, 105, 30, 255) , 'orchid4' : (139, 71, 137, 255) , 'maroon' : (176, 48, 96, 255) , 'gray38' : (97, 97, 97, 255) , 'darkorange4' : (139, 69, 0, 255) , 'mintcream' : (245, 255, 250, 255) , 'darkorange1' : (255, 127, 0, 255) , 'antiquewhite' : (250, 235, 215, 255) , 'darkorange2' : (238, 118, 0, 255) , 'grey18' : (46, 46, 46, 255) , 'grey19' : (48, 48, 48, 255) , 'grey38' : (97, 97, 97, 255) , 'moccasin' : (255, 228, 181, 255) , 'grey10' : (26, 26, 26, 255) , 'chocolate1' : (255, 127, 36, 255) , 'chocolate2' : (238, 118, 33, 255) , 'chocolate3' : (205, 102, 29, 255) , 'saddlebrown' : (139, 69, 19, 255) , 'grey15' : (38, 38, 38, 255) , 'darkslateblue' : (72, 61, 139, 255) , 'lightskyblue' : (135, 206, 250, 255) , 'gray69' : (176, 176, 176, 255) , 'gray68' : (173, 173, 173, 255) , 'deeppink' : (255, 20, 147, 255) , 'gray65' : (166, 166, 166, 255) , 'gray64' : (163, 163, 163, 255) , 'gray67' : (171, 171, 171, 255) , 'gray66' : (168, 168, 168, 255) , 'gray25' : (64, 64, 64, 255) , 'coral' : (255, 127, 80, 255) , 'gray63' : (161, 161, 161, 255) , 'gray62' : (158, 158, 158, 255) , 'goldenrod4' : (139, 105, 20, 255) , 'grey35' : (89, 89, 89, 255) , 'gray89' : (227, 227, 227, 255) , 'goldenrod1' : (255, 193, 37, 255) , 'goldenrod2' : (238, 180, 34, 255) , 'goldenrod3' : (205, 155, 29, 255) , 'springgreen1' : (0, 255, 127, 255) , 'springgreen2' : (0, 238, 118, 255) , 'springgreen3' : (0, 205, 102, 255) , 'springgreen4' : (0, 139, 69, 255) , 'mistyrose1' : (255, 228, 225, 255) , 'sandybrown' : (244, 164, 96, 255) , 'grey30' : (77, 77, 77, 255) , 'seashell2' : (238, 229, 222, 255) , 'seashell3' : (205, 197, 191, 255) , 'tan' : (210, 180, 140, 255) , 'seashell1' : (255, 245, 238, 255) , 'mistyrose3' : (205, 183, 181, 255) , 'magenta' : (255, 0, 255, 255) , 'pink' : (255, 192, 203, 255) , 'ivory2' : (238, 238, 224, 255) , 'ivory1' : (255, 255, 240, 255) , 'lightcyan2' : (209, 238, 238, 255) , 'mediumseagreen' : (60, 179, 113, 255) , 'ivory4' : (139, 139, 131, 255) , 'darkorange' : (255, 140, 0, 255) , 'powderblue' : (176, 224, 230, 255) , 'dodgerblue1' : (30, 144, 255, 255) , 'gray95' : (242, 242, 242, 255) , 'firebrick1' : (255, 48, 48, 255) , 'gray7' : (18, 18, 18, 255) , 'mistyrose4' : (139, 125, 123, 255) , 'tomato' : (255, 99, 71, 255) , 'indianred2' : (238, 99, 99, 255) , 'steelblue2' : (92, 172, 238, 255) , 'gray100' : (255, 255, 255, 255) , 'seashell4' : (139, 134, 130, 255) , 'grey89' : (227, 227, 227, 255) , 'grey88' : (224, 224, 224, 255) , 'grey87' : (222, 222, 222, 255) , 'grey86' : (219, 219, 219, 255) , 'grey85' : (217, 217, 217, 255) , 'grey84' : (214, 214, 214, 255) , 'midnightblue' : (25, 25, 112, 255) , 'grey82' : (209, 209, 209, 255) , 'grey81' : (207, 207, 207, 255) , 'yellow3' : (205, 205, 0, 255) , 'ivory3' : (205, 205, 193, 255) , 'grey22' : (56, 56, 56, 255) , 'gray85' : (217, 217, 217, 255) , 'violetred3' : (205, 50, 120, 255) , 'dodgerblue2' : (28, 134, 238, 255) , 'gray42' : (107, 107, 107, 255) , 'sienna2' : (238, 121, 66, 255) , 'grey72' : (184, 184, 184, 255) , 'grey73' : (186, 186, 186, 255) , 'grey70' : (179, 179, 179, 255) , 'palevioletred' : (219, 112, 147, 255) , 'lightslategray' : (119, 136, 153, 255) , 'grey77' : (196, 196, 196, 255) , 'grey74' : (189, 189, 189, 255) , 'slategray1' : (198, 226, 255, 255) , 'pink1' : (255, 181, 197, 255) , 'mediumpurple1' : (171, 130, 255, 255) , 'pink3' : (205, 145, 158, 255) , 'antiquewhite4' : (139, 131, 120, 255) , 'lightpink1' : (255, 174, 185, 255) , 'honeydew2' : (224, 238, 224, 255) , 'khaki4' : (139, 134, 78, 255) , 'darkolivegreen4' : (110, 139, 61, 255) , 'gray45' : (115, 115, 115, 255) , 'slategray3' : (159, 182, 205, 255) , 'darkolivegreen1' : (202, 255, 112, 255) , 'khaki1' : (255, 246, 143, 255) , 'khaki2' : (238, 230, 133, 255) , 'khaki3' : (205, 198, 115, 255) , 'lavenderblush' : (255, 240, 245, 255) , 'honeydew4' : (131, 139, 131, 255) , 'salmon3' : (205, 112, 84, 255) , 'salmon2' : (238, 130, 98, 255) , 'gray92' : (235, 235, 235, 255) , 'salmon4' : (139, 76, 57, 255) , 'gray49' : (125, 125, 125, 255) , 'gray48' : (122, 122, 122, 255) , 'linen' : (250, 240, 230, 255) , 'burlywood1' : (255, 211, 155, 255) , 'green' : (0, 255, 0, 255) , 'gray47' : (120, 120, 120, 255) , 'blueviolet' : (138, 43, 226, 255) , 'brown2' : (238, 59, 59, 255) , 'brown3' : (205, 51, 51, 255) , 'peachpuff' : (255, 218, 185, 255) , 'brown4' : (139, 35, 35, 255) , 'firebrick4' : (139, 26, 26, 255) , 'azure1' : (240, 255, 255, 255) , 'azure3' : (193, 205, 205, 255) , 'azure2' : (224, 238, 238, 255) , 'azure4' : (131, 139, 139, 255) , 'tomato4' : (139, 54, 38, 255) , 'orange4' : (139, 90, 0, 255) , 'firebrick' : (178, 34, 34, 255) , 'indianred' : (205, 92, 92, 255) , 'orange1' : (255, 165, 0, 255) , 'orange3' : (205, 133, 0, 255) , 'orange2' : (238, 154, 0, 255) , 'darkolivegreen' : (85, 107, 47, 255) , 'gray2' : (5, 5, 5, 255) , 'slategrey' : (112, 128, 144, 255) , 'gray81' : (207, 207, 207, 255) , 'darkred' : (139, 0, 0, 255) , 'gray3' : (8, 8, 8, 255) , 'lightsteelblue1' : (202, 225, 255, 255) , 'lightsteelblue2' : (188, 210, 238, 255) , 'lightsteelblue3' : (162, 181, 205, 255) , 'lightsteelblue4' : (110, 123, 139, 255) , 'tomato3' : (205, 79, 57, 255) , 'gray43' : (110, 110, 110, 255) , 'darkgoldenrod4' : (139, 101, 8, 255) , 'grey50' : (127, 127, 127, 255) , 'yellow4' : (139, 139, 0, 255) , 'mediumorchid' : (186, 85, 211, 255) , 'yellow2' : (238, 238, 0, 255) , 'darkgoldenrod2' : (238, 173, 14, 255) , 'darkgoldenrod3' : (205, 149, 12, 255) , 'chartreuse' : (127, 255, 0, 255) , 'mediumblue' : (0, 0, 205, 255) , 'gray4' : (10, 10, 10, 255) , 'springgreen' : (0, 255, 127, 255) , 'orange' : (255, 165, 0, 255) , 'gray5' : (13, 13, 13, 255) , 'lightsalmon' : (255, 160, 122, 255) , 'gray19' : (48, 48, 48, 255) , 'turquoise' : (64, 224, 208, 255) , 'lightseagreen' : (32, 178, 170, 255) , 'grey8' : (20, 20, 20, 255) , 'grey9' : (23, 23, 23, 255) , 'grey6' : (15, 15, 15, 255) , 'grey7' : (18, 18, 18, 255) , 'grey4' : (10, 10, 10, 255) , 'grey5' : (13, 13, 13, 255) , 'grey2' : (5, 5, 5, 255) , 'grey3' : (8, 8, 8, 255) , 'grey0' : (0, 0, 0, 255) , 'grey1' : (3, 3, 3, 255) , 'gray50' : (127, 127, 127, 255) , 'goldenrod' : (218, 165, 32, 255) , 'grey58' : (148, 148, 148, 255) , 'grey59' : (150, 150, 150, 255) , 'gray51' : (130, 130, 130, 255) , 'grey54' : (138, 138, 138, 255) , 'mediumorchid4' : (122, 55, 139, 255) , 'grey56' : (143, 143, 143, 255) , 'navajowhite3' : (205, 179, 139, 255) , 'mediumorchid1' : (224, 102, 255, 255) , 'grey51' : (130, 130, 130, 255) , 'mediumorchid3' : (180, 82, 205, 255) , 'mediumorchid2' : (209, 95, 238, 255) , 'cyan2' : (0, 238, 238, 255) , 'cyan3' : (0, 205, 205, 255) , 'gray23' : (59, 59, 59, 255) , 'cyan1' : (0, 255, 255, 255) , 'darkgreen' : (0, 100, 0, 255) , 'gray24' : (61, 61, 61, 255) , 'cyan4' : (0, 139, 139, 255) , 'darkviolet' : (148, 0, 211, 255) , 'peachpuff4' : (139, 119, 101, 255) , 'gray28' : (71, 71, 71, 255) , 'slateblue4' : (71, 60, 139, 255) , 'slateblue3' : (105, 89, 205, 255) , 'peachpuff1' : (255, 218, 185, 255) , 'peachpuff2' : (238, 203, 173, 255) , 'peachpuff3' : (205, 175, 149, 255) , 'gray29' : (74, 74, 74, 255) , 'paleturquoise' : (175, 238, 238, 255) , 'darkgray' : (169, 169, 169, 255) , 'grey25' : (64, 64, 64, 255) , 'darkmagenta' : (139, 0, 139, 255) , 'palegoldenrod' : (238, 232, 170, 255) , 'grey64' : (163, 163, 163, 255) , 'grey12' : (31, 31, 31, 255) , 'deeppink3' : (205, 16, 118, 255) , 'gray79' : (201, 201, 201, 255) , 'gray83' : (212, 212, 212, 255) , 'deeppink2' : (238, 18, 137, 255) , 'burlywood4' : (139, 115, 85, 255) , 'palevioletred4' : (139, 71, 93, 255) , 'deeppink1' : (255, 20, 147, 255) , 'slateblue2' : (122, 103, 238, 255) , 'grey46' : (117, 117, 117, 255) , 'royalblue4' : (39, 64, 139, 255) , 'yellowgreen' : (154, 205, 50, 255) , 'royalblue1' : (72, 118, 255, 255) , 'slateblue1' : (131, 111, 255, 255) , 'lightgoldenrod3' : (205, 190, 112, 255) , 'lightgoldenrod2' : (238, 220, 130, 255) , 'navy' : (0, 0, 128, 255) , 'orchid' : (218, 112, 214, 255) , 'ghostwhite' : (248, 248, 255, 255) , 'purple' : (160, 32, 240, 255) , 'darkkhaki' : (189, 183, 107, 255) , 'grey45' : (115, 115, 115, 255) , 'gray94' : (240, 240, 240, 255) , 'wheat4' : (139, 126, 102, 255) , 'gray96' : (245, 245, 245, 255) , 'gray97' : (247, 247, 247, 255) , 'wheat1' : (255, 231, 186, 255) , 'gray91' : (232, 232, 232, 255) , 'wheat3' : (205, 186, 150, 255) , 'wheat2' : (238, 216, 174, 255) , 'indianred4' : (139, 58, 58, 255) , 'coral2' : (238, 106, 80, 255) , 'coral1' : (255, 114, 86, 255) , 'violetred' : (208, 32, 144, 255) , 'rosybrown3' : (205, 155, 155, 255) , 'deepskyblue2' : (0, 178, 238, 255) , 'deepskyblue1' : (0, 191, 255, 255) , 'bisque' : (255, 228, 196, 255) , 'grey49' : (125, 125, 125, 255) , 'khaki' : (240, 230, 140, 255) , 'wheat' : (245, 222, 179, 255) , 'lightslateblue' : (132, 112, 255, 255) , 'mediumpurple3' : (137, 104, 205, 255) , 'gray55' : (140, 140, 140, 255) , 'deepskyblue' : (0, 191, 255, 255) , 'gray98' : (250, 250, 250, 255) , 'steelblue' : (70, 130, 180, 255) , 'aliceblue' : (240, 248, 255, 255) , 'lightskyblue2' : (164, 211, 238, 255) , 'lightskyblue3' : (141, 182, 205, 255) , 'lightslategrey' : (119, 136, 153, 255) , 'blue3' : (0, 0, 205, 255) , 'blue2' : (0, 0, 238, 255) , 'gainsboro' : (220, 220, 220, 255) , 'grey76' : (194, 194, 194, 255) , 'purple3' : (125, 38, 205, 255) , 'plum4' : (139, 102, 139, 255) , 'gray56' : (143, 143, 143, 255) , 'plum3' : (205, 150, 205, 255) , 'plum' : (221, 160, 221, 255) , 'lightgrey' : (211, 211, 211, 255) , 'mediumslateblue' : (123, 104, 238, 255) , 'mistyrose' : (255, 228, 225, 255) , 'lightcyan1' : (224, 255, 255, 255) , 'grey71' : (181, 181, 181, 255) , 'darksalmon' : (233, 150, 122, 255) , 'beige' : (245, 245, 220, 255) , 'grey24' : (61, 61, 61, 255) , 'azure' : (240, 255, 255, 255) , 'honeydew1' : (240, 255, 240, 255) , 'slategray2' : (185, 211, 238, 255) , 'dodgerblue3' : (24, 116, 205, 255) , 'slategray4' : (108, 123, 139, 255) , 'grey27' : (69, 69, 69, 255) , 'lightcyan3' : (180, 205, 205, 255) , 'cornsilk' : (255, 248, 220, 255) , 'tomato1' : (255, 99, 71, 255) , 'gray57' : (145, 145, 145, 255) , 'mediumvioletred' : (199, 21, 133, 255) , 'tomato2' : (238, 92, 66, 255) , 'snow4' : (139, 137, 137, 255) , 'grey75' : (191, 191, 191, 255) , 'snow2' : (238, 233, 233, 255) , 'snow3' : (205, 201, 201, 255) , 'snow1' : (255, 250, 250, 255) , 'grey23' : (59, 59, 59, 255) , 'cornsilk3' : (205, 200, 177, 255) , 'lightcoral' : (240, 128, 128, 255) , 'orangered' : (255, 69, 0, 255) , 'navajowhite' : (255, 222, 173, 255) , 'mediumpurple2' : (159, 121, 238, 255) , 'slategray' : (112, 128, 144, 255) , 'pink2' : (238, 169, 184, 255) , 'grey29' : (74, 74, 74, 255) , 'grey28' : (71, 71, 71, 255) , 'gray82' : (209, 209, 209, 255) , 'burlywood' : (222, 184, 135, 255) , 'mediumpurple4' : (93, 71, 139, 255) , 'mediumspringgreen' : (0, 250, 154, 255) , 'grey26' : (66, 66, 66, 255) , 'grey21' : (54, 54, 54, 255) , 'grey20' : (51, 51, 51, 255) , 'blanchedalmond' : (255, 235, 205, 255) , 'pink4' : (139, 99, 108, 255) , 'gray78' : (199, 199, 199, 255) , 'tan3' : (205, 133, 63, 255) , 'gray76' : (194, 194, 194, 255) , 'gray77' : (196, 196, 196, 255) , 'white' : (255, 255, 255, 255) , 'gray75' : (191, 191, 191, 255) , 'gray72' : (184, 184, 184, 255) , 'gray73' : (186, 186, 186, 255) , 'gray70' : (179, 179, 179, 255) , 'gray71' : (181, 181, 181, 255) , 'lightgray' : (211, 211, 211, 255) , 'ivory' : (255, 255, 240, 255) , 'gray46' : (117, 117, 117, 255) , 'gray74' : (189, 189, 189, 255) , 'lightyellow3' : (205, 205, 180, 255) , 'lightpink2' : (238, 162, 173, 255) , 'lightpink3' : (205, 140, 149, 255) , 'paleturquoise4' : (102, 139, 139, 255) , 'lightpink4' : (139, 95, 101, 255) , 'paleturquoise3' : (150, 205, 205, 255) , 'seagreen4' : (46, 139, 87, 255) , 'seagreen3' : (67, 205, 128, 255) , 'seagreen2' : (78, 238, 148, 255) , 'seagreen1' : (84, 255, 159, 255) , 'paleturquoise2' : (174, 238, 238, 255) , 'gray52' : (133, 133, 133, 255) , 'cornsilk4' : (139, 136, 120, 255) , 'cornsilk2' : (238, 232, 205, 255) , 'darkolivegreen3' : (162, 205, 90, 255) , 'cornsilk1' : (255, 248, 220, 255) , 'limegreen' : (50, 205, 50, 255) , 'darkolivegreen2' : (188, 238, 104, 255) , 'grey' : (190, 190, 190, 255) , 'violetred2' : (238, 58, 140, 255) , 'salmon1' : (255, 140, 105, 255) , 'grey92' : (235, 235, 235, 255) , 'grey93' : (237, 237, 237, 255) , 'grey94' : (240, 240, 240, 255) , 'grey95' : (242, 242, 242, 255) , 'grey96' : (245, 245, 245, 255) , 'grey83' : (212, 212, 212, 255) , 'grey98' : (250, 250, 250, 255) , 'lightgoldenrod1' : (255, 236, 139, 255) , 'palegreen1' : (154, 255, 154, 255) , 'red3' : (205, 0, 0, 255) , 'palegreen3' : (124, 205, 124, 255) , 'palegreen2' : (144, 238, 144, 255) , 'palegreen4' : (84, 139, 84, 255) , 'cadetblue' : (95, 158, 160, 255) , 'violet' : (238, 130, 238, 255) , 'mistyrose2' : (238, 213, 210, 255) , 'slateblue' : (106, 90, 205, 255) , 'grey43' : (110, 110, 110, 255) , 'grey90' : (229, 229, 229, 255) , 'gray35' : (89, 89, 89, 255) , 'turquoise3' : (0, 197, 205, 255) , 'turquoise2' : (0, 229, 238, 255) , 'burlywood3' : (205, 170, 125, 255) , 'burlywood2' : (238, 197, 145, 255) , 'lightcyan4' : (122, 139, 139, 255) , 'rosybrown' : (188, 143, 143, 255) , 'turquoise4' : (0, 134, 139, 255) , 'whitesmoke' : (245, 245, 245, 255) , 'lightblue' : (173, 216, 230, 255) , 'grey40' : (102, 102, 102, 255) , 'gray40' : (102, 102, 102, 255) , 'honeydew3' : (193, 205, 193, 255) , 'dimgray' : (105, 105, 105, 255) , 'grey47' : (120, 120, 120, 255) , 'seagreen' : (46, 139, 87, 255) , 'red4' : (139, 0, 0, 255) , 'grey14' : (36, 36, 36, 255) , 'snow' : (255, 250, 250, 255) , 'darkorchid1' : (191, 62, 255, 255) , 'gray58' : (148, 148, 148, 255) , 'gray59' : (150, 150, 150, 255) , 'cadetblue4' : (83, 134, 139, 255) , 'cadetblue3' : (122, 197, 205, 255) , 'cadetblue2' : (142, 229, 238, 255) , 'cadetblue1' : (152, 245, 255, 255) , 'olivedrab4' : (105, 139, 34, 255) , 'purple4' : (85, 26, 139, 255) , 'gray20' : (51, 51, 51, 255) , 'grey44' : (112, 112, 112, 255) , 'purple1' : (155, 48, 255, 255) , 'olivedrab1' : (192, 255, 62, 255) , 'olivedrab2' : (179, 238, 58, 255) , 'olivedrab3' : (154, 205, 50, 255) , 'orangered3' : (205, 55, 0, 255) , 'orangered2' : (238, 64, 0, 255) , 'orangered1' : (255, 69, 0, 255) , 'darkorchid' : (153, 50, 204, 255) , 'thistle3' : (205, 181, 205, 255) , 'thistle2' : (238, 210, 238, 255) , 'thistle1' : (255, 225, 255, 255) , 'salmon' : (250, 128, 114, 255) , 'gray93' : (237, 237, 237, 255) , 'thistle4' : (139, 123, 139, 255) , 'gray39' : (99, 99, 99, 255) , 'lawngreen' : (124, 252, 0, 255) , 'hotpink3' : (205, 96, 144, 255) , 'hotpink2' : (238, 106, 167, 255) , 'hotpink1' : (255, 110, 180, 255) , 'lightgreen' : (144, 238, 144, 255) , 'hotpink4' : (139, 58, 98, 255) , 'darkseagreen4' : (105, 139, 105, 255) , 'darkseagreen3' : (155, 205, 155, 255) , 'darkseagreen2' : (180, 238, 180, 255) , 'darkseagreen1' : (193, 255, 193, 255) , 'deepskyblue4' : (0, 104, 139, 255) , 'gray44' : (112, 112, 112, 255) , 'navyblue' : (0, 0, 128, 255) , 'darkblue' : (0, 0, 139, 255) , 'forestgreen' : (34, 139, 34, 255) , 'gray53' : (135, 135, 135, 255) , 'grey100' : (255, 255, 255, 255) , 'brown1' : (255, 64, 64, 255) , }
agpl-3.0
andykimpe/chromium-test-npapi
android_webview/tools/webview_licenses.py
17
13334
#!/usr/bin/python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Checks third-party licenses for the purposes of the Android WebView build. The Android tree includes a snapshot of Chromium in order to power the system WebView. This tool checks that all code uses open-source licenses compatible with Android, and that we meet the requirements of those licenses. It can also be used to generate an Android NOTICE file for the third-party code. It makes use of src/tools/licenses.py and the README.chromium files on which it depends. It also makes use of a data file, third_party_files_whitelist.txt, which whitelists indicidual files which contain third-party code but which aren't in a third-party directory with a README.chromium file. """ import glob import imp import optparse import os import re import subprocess import sys import textwrap REPOSITORY_ROOT = os.path.abspath(os.path.join( os.path.dirname(__file__), '..', '..')) # Import third_party/PRESUBMIT.py via imp to avoid importing a random # PRESUBMIT.py from $PATH, also make sure we don't generate a .pyc file. sys.dont_write_bytecode = True third_party = \ imp.load_source('PRESUBMIT', \ os.path.join(REPOSITORY_ROOT, 'third_party', 'PRESUBMIT.py')) sys.path.append(os.path.join(REPOSITORY_ROOT, 'tools')) import licenses import known_issues class InputApi(object): def __init__(self): self.re = re def GetIncompatibleDirectories(): """Gets a list of third-party directories which use licenses incompatible with Android. This is used by the snapshot tool. Returns: A list of directories. """ result = [] for directory in _FindThirdPartyDirs(): if directory in known_issues.KNOWN_ISSUES: result.append(directory) continue try: metadata = licenses.ParseDir(directory, REPOSITORY_ROOT, require_license_file=False) except licenses.LicenseError as e: print 'Got LicenseError while scanning ' + directory raise if metadata.get('License Android Compatible', 'no').upper() == 'YES': continue license = re.split(' [Ll]icenses?$', metadata['License'])[0] if not third_party.LicenseIsCompatibleWithAndroid(InputApi(), license): result.append(directory) return result def GetUnknownIncompatibleDirectories(): """Gets a list of third-party directories which use licenses incompatible with Android which are not present in the known_issues.py file. This is used by the AOSP bot. Returns: A list of directories. """ incompatible_directories = frozenset(GetIncompatibleDirectories()) known_incompatible = [] for path, exclude_list in known_issues.KNOWN_INCOMPATIBLE.iteritems(): for exclude in exclude_list: if glob.has_magic(exclude): exclude_dirname = os.path.dirname(exclude) if glob.has_magic(exclude_dirname): print ('Exclude path %s contains an unexpected glob expression,' \ ' skipping.' % exclude) exclude = exclude_dirname known_incompatible.append(os.path.normpath(os.path.join(path, exclude))) known_incompatible = frozenset(known_incompatible) return incompatible_directories.difference(known_incompatible) class ScanResult(object): Ok, Warnings, Errors = range(3) def _CheckLicenseHeaders(excluded_dirs_list, whitelisted_files): """Checks that all files which are not in a listed third-party directory, and which do not use the standard Chromium license, are whitelisted. Args: excluded_dirs_list: The list of directories to exclude from scanning. whitelisted_files: The whitelist of files. Returns: ScanResult.Ok if all files with non-standard license headers are whitelisted and the whitelist contains no stale entries; ScanResult.Warnings if there are stale entries; ScanResult.Errors if new non-whitelisted entries found. """ excluded_dirs_list = [d for d in excluded_dirs_list if not 'third_party' in d] # Using a common pattern for third-partyies makes the ignore regexp shorter excluded_dirs_list.append('third_party') # VCS dirs excluded_dirs_list.append('.git') excluded_dirs_list.append('.svn') # Build output excluded_dirs_list.append('out/Debug') excluded_dirs_list.append('out/Release') # 'Copyright' appears in license agreements excluded_dirs_list.append('chrome/app/resources') # Quickoffice js files from internal src used on buildbots. crbug.com/350472. excluded_dirs_list.append('chrome/browser/resources/chromeos/quickoffice') # This is a test output directory excluded_dirs_list.append('chrome/tools/test/reference_build') # blink style copy right headers. excluded_dirs_list.append('content/shell/renderer/test_runner') # blink style copy right headers. excluded_dirs_list.append('content/shell/tools/plugin') # This is tests directory, doesn't exist in the snapshot excluded_dirs_list.append('content/test/data') # This is a tests directory that doesn't exist in the shipped product. excluded_dirs_list.append('gin/test') # This is a test output directory excluded_dirs_list.append('data/dom_perf') # This is a tests directory that doesn't exist in the shipped product. excluded_dirs_list.append('tools/perf/page_sets') excluded_dirs_list.append('tools/perf/page_sets/tough_animation_cases') # Histogram tools, doesn't exist in the snapshot excluded_dirs_list.append('tools/histograms') # Swarming tools, doesn't exist in the snapshot excluded_dirs_list.append('tools/swarming_client') # Arm sysroot tools, doesn't exist in the snapshot excluded_dirs_list.append('arm-sysroot') # Data is not part of open source chromium, but are included on some bots. excluded_dirs_list.append('data') # This is not part of open source chromium, but are included on some bots. excluded_dirs_list.append('skia/tools/clusterfuzz-data') args = ['android_webview/tools/find_copyrights.pl', '.' ] + excluded_dirs_list p = subprocess.Popen(args=args, cwd=REPOSITORY_ROOT, stdout=subprocess.PIPE) lines = p.communicate()[0].splitlines() offending_files = [] allowed_copyrights = '^(?:\*No copyright\*' \ '|20[0-9][0-9](?:-20[0-9][0-9])? The Chromium Authors\. ' \ 'All rights reserved.*)$' allowed_copyrights_re = re.compile(allowed_copyrights) for l in lines: entries = l.split('\t') if entries[1] == "GENERATED FILE": continue copyrights = entries[1].split(' / ') for c in copyrights: if c and not allowed_copyrights_re.match(c): offending_files.append(os.path.normpath(entries[0])) break unknown = set(offending_files) - set(whitelisted_files) if unknown: print 'The following files contain a third-party license but are not in ' \ 'a listed third-party directory and are not whitelisted. You must ' \ 'add the following files to the whitelist.\n%s' % \ '\n'.join(sorted(unknown)) stale = set(whitelisted_files) - set(offending_files) if stale: print 'The following files are whitelisted unnecessarily. You must ' \ 'remove the following files from the whitelist.\n%s' % \ '\n'.join(sorted(stale)) missing = [f for f in whitelisted_files if not os.path.exists(f)] if missing: print 'The following files are whitelisted, but do not exist.\n%s' % \ '\n'.join(sorted(missing)) if unknown: return ScanResult.Errors elif stale or missing: return ScanResult.Warnings else: return ScanResult.Ok def _ReadFile(path): """Reads a file from disk. Args: path: The path of the file to read, relative to the root of the repository. Returns: The contents of the file as a string. """ return open(os.path.join(REPOSITORY_ROOT, path), 'rb').read() def _FindThirdPartyDirs(): """Gets the list of third-party directories. Returns: The list of third-party directories. """ # Please don't add here paths that have problems with license files, # as they will end up included in Android WebView snapshot. # Instead, add them into known_issues.py. prune_paths = [ # Temporary until we figure out how not to check out quickoffice on the # Android license check bot. Tracked in crbug.com/350472. os.path.join('chrome', 'browser', 'resources', 'chromeos', 'quickoffice'), # Placeholder directory, no third-party code. os.path.join('third_party', 'adobe'), # Apache 2.0 license. See # https://code.google.com/p/chromium/issues/detail?id=140478. os.path.join('third_party', 'bidichecker'), # Isn't checked out on clients os.path.join('third_party', 'gles2_conform'), # The llvm-build doesn't exist for non-clang builder os.path.join('third_party', 'llvm-build'), # Binaries doesn't apply to android os.path.join('third_party', 'widevine'), # third_party directories in this tree aren't actually third party, but # provide a way to shadow experimental buildfiles into those directories. os.path.join('build', 'secondary'), # Not shipped, Chromium code os.path.join('tools', 'swarming_client'), ] third_party_dirs = licenses.FindThirdPartyDirs(prune_paths, REPOSITORY_ROOT) return licenses.FilterDirsWithFiles(third_party_dirs, REPOSITORY_ROOT) def _Scan(): """Checks that license meta-data is present for all third-party code and that all non third-party code doesn't contain external copyrighted code. Returns: ScanResult.Ok if everything is in order; ScanResult.Warnings if there are non-fatal problems (e.g. stale whitelist entries) ScanResult.Errors otherwise. """ third_party_dirs = _FindThirdPartyDirs() # First, check designated third-party directories using src/tools/licenses.py. all_licenses_valid = True for path in sorted(third_party_dirs): try: licenses.ParseDir(path, REPOSITORY_ROOT) except licenses.LicenseError, e: if not (path in known_issues.KNOWN_ISSUES): print 'Got LicenseError "%s" while scanning %s' % (e, path) all_licenses_valid = False # Second, check for non-standard license text. files_data = _ReadFile(os.path.join('android_webview', 'tools', 'third_party_files_whitelist.txt')) whitelisted_files = [] for line in files_data.splitlines(): match = re.match(r'([^#\s]+)', line) if match: whitelisted_files.append(match.group(1)) licenses_check = _CheckLicenseHeaders(third_party_dirs, whitelisted_files) return licenses_check if all_licenses_valid else ScanResult.Errors def GenerateNoticeFile(): """Generates the contents of an Android NOTICE file for the third-party code. This is used by the snapshot tool. Returns: The contents of the NOTICE file. """ third_party_dirs = _FindThirdPartyDirs() # Don't forget Chromium's LICENSE file content = [_ReadFile('LICENSE')] # We provide attribution for all third-party directories. # TODO(steveblock): Limit this to only code used by the WebView binary. for directory in sorted(third_party_dirs): metadata = licenses.ParseDir(directory, REPOSITORY_ROOT, require_license_file=False) license_file = metadata['License File'] if license_file and license_file != licenses.NOT_SHIPPED: content.append(_ReadFile(license_file)) return '\n'.join(content) def _ProcessIncompatibleResult(incompatible_directories): if incompatible_directories: print ("Incompatibly licensed directories found:\n" + "\n".join(sorted(incompatible_directories))) return ScanResult.Errors return ScanResult.Ok def main(): class FormatterWithNewLines(optparse.IndentedHelpFormatter): def format_description(self, description): paras = description.split('\n') formatted_paras = [textwrap.fill(para, self.width) for para in paras] return '\n'.join(formatted_paras) + '\n' parser = optparse.OptionParser(formatter=FormatterWithNewLines(), usage='%prog [options]') parser.description = (__doc__ + '\nCommands:\n' \ ' scan Check licenses.\n' \ ' notice Generate Android NOTICE file on stdout.\n' \ ' incompatible_directories Scan for incompatibly' ' licensed directories.\n' ' all_incompatible_directories Scan for incompatibly' ' licensed directories (even those in' ' known_issues.py).\n') (_, args) = parser.parse_args() if len(args) != 1: parser.print_help() return ScanResult.Errors if args[0] == 'scan': scan_result = _Scan() if scan_result == ScanResult.Ok: print 'OK!' return scan_result elif args[0] == 'notice': print GenerateNoticeFile() return ScanResult.Ok elif args[0] == 'incompatible_directories': return _ProcessIncompatibleResult(GetUnknownIncompatibleDirectories()) elif args[0] == 'all_incompatible_directories': return _ProcessIncompatibleResult(GetIncompatibleDirectories()) parser.print_help() return ScanResult.Errors if __name__ == '__main__': sys.exit(main())
bsd-3-clause
AusTac/parma
b3/parsers/et.py
1
7934
# BigBrotherBot(B3) (www.bigbrotherbot.net) # Copyright (C) 2005 Michael "ThorN" Thornton # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # __author__ = 'ThorN' __version__ = '0.0.1' import re, string import b3 from b3.parsers.q3a.abstractParser import AbstractParser import PunkBuster class EtParser(AbstractParser): gameName = 'et' privateMsg = False _settings = {} _settings['line_length'] = 65 _settings['min_wrap_length'] = 90 _commands = {} _commands['message'] = 'qsay %s %s ^8[pm]^7 %s' _commands['say'] = 'qsay %s %s' _commands['set'] = 'set %s %s' _commands['kick'] = 'clientkick %s %s' _commands['ban'] = 'banid %s %s' _commands['tempban'] = 'clientkick %s %s' _eventMap = { 'warmup' : b3.events.EVT_GAME_WARMUP, 'restartgame' : b3.events.EVT_GAME_ROUND_END } # remove the time off of the line _lineClear = re.compile(r'^(?:[0-9:.]+\s?)?') _lineFormats = ( #1579:03ConnectInfo: 0: E24F9B2702B9E4A1223E905BF597FA92: ^w[^2AS^w]^2Lead: 3: 3: 24.153.180.106:2794 re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<pbid>[0-9A-Z]{32}):\s*(?P<name>[^:]+):\s*(?P<num1>[0-9]+):\s*(?P<num2>[0-9]+):\s*(?P<ip>[0-9.]+):(?P<port>[0-9]+))$', re.IGNORECASE), #1536:17sayc: 0: ^w[^2AS^w]^2Lead: sorry... #1536:34sayteamc: 17: ^1[^7DP^1]^4Timekiller: ^4ammo ^2here !!!!! re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<name>.+):\s+(?P<text>.*))$', re.IGNORECASE), #1536:37Kill: 1 18 9: ^1klaus killed ^1[pura]fox.nl by MOD_MP40 re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+)\s(?P<acid>[0-9]+)\s(?P<aweap>[0-9]+):\s*(?P<text>.*))$', re.IGNORECASE), re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<text>.*))$', re.IGNORECASE), re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+)\s(?P<text>.*))$', re.IGNORECASE), re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>.*)$', re.IGNORECASE) ) PunkBuster = None def startup(self): # add the world client client = self.clients.newBaseClient() client.name = 'World' client.cid = -1 client.guid = self.gameName + ':WORLD' client.maxLevel = -1 client.hide = True self.clients.update(client) self.PunkBuster = PunkBuster.PunkBuster(self) def message(self, client, text): try: if client == None: self.say(text) elif client.cid == None: pass else: lines = [] for line in self.getWrap(text, self._settings['line_length'], self._settings['min_wrap_length']): lines.append('qsay %s ^8[%s^8]^7 %s' % (self.msgPrefix, client.exactName, line)) self.writelines(lines) except: pass # join #1579:03ConnectInfo: 0: E24F9B2702B9E4A1223E905BF597FA92: ^w[^2AS^w]^2Lead: 3: 3: 24.153.180.106:2794 def OnConnectinfo(self, action, data, match=None): guid = match.group('pbid') client = self.clients.getByCID(match.group('cid')) if client: if client.guid == guid: # this is the same player if client.exactName != match.group('name'): client.exactName = match.group('name') client.setName(self.stripColors(client.exactName)) return b3.events.Event(b3.events.EVT_CLIENT_JOIN, None, client) else: # disconnect the existing client self.verbose('disconnect the existing client %s %s => %s %s', match.group('cid'), guid, client.cid, client) client.disconnect() client = self.clients.newBaseClient() client.cid = match.group('cid') #if match.group('guid') == '0': # client.guid = None #else: client.pbid = client.guid = self.gameName + ':' + guid client.ip = match.group('ip') client.exactName = match.group('name') client.name = self.stripColors(client.exactName) self.clients.update(client) #1579:03ClientUserinfoChangedGUID: 0 E24F9B2702B9E4A1223E905BF597FA92 n\^w[^2AS^w]^2Lead\t\3\c\3\r\0\m\0000000\s\0000000\dn\\dr\0\w\3\lw\3\sw\7\mu\0\ref\0 def OnClientuserinfochangedguid(self, action, data, match=None): client = self.clients.getByCID(match.group('cid')) cid, pbid, data = string.split(data, ' ', 2) bclient = self.parseUserInfo(cid + ' ' + data) if bclient: self.clients.update(bclient, client) def OnGib(self, action, data, match=None): #1538:42Gib: 5 10 1: ^0Apache Death gibbed ^,^t^9^8that ^2guy by MOD_MACHINEGUN victim = self.clients.getByCID(match.group('cid')) if not victim: self.debug('No victim') #self.OnJ(action, data, match) return None attacker = self.clients.getByCID(match.group('acid')) if not attacker: self.debug('No attacker') return None event = b3.events.EVT_CLIENT_GIB if attacker.cid == victim.cid: event = b3.events.EVT_CLIENT_GIB_SELF elif attacker.team != b3.TEAM_UNKNOWN and attacker.team == victim.team: event = b3.events.EVT_CLIENT_GIB_TEAM return b3.events.Event(event, (100, match.group('aweap'), ''), attacker, victim) def OnKill(self, action, data, match=None): #1536:37Kill: 1 18 9: ^1klaus killed ^1[pura]fox.nl by MOD_MP40 victim = self.clients.getByCID(match.group('cid')) if not victim: self.debug('No victim') #self.OnJ(action, data, match) return None attacker = self.clients.getByCID(match.group('acid')) if not attacker: self.debug('No attacker') return None event = b3.events.EVT_CLIENT_KILL if attacker.cid == victim.cid: event = b3.events.EVT_CLIENT_SUICIDE elif attacker.team != b3.TEAM_UNKNOWN and attacker.team == victim.team: event = b3.events.EVT_CLIENT_KILL_TEAM return b3.events.Event(event, (100, match.group('aweap'), ''), attacker, victim) def OnSayteamc(self, action, data, match=None): #1536:34sayteamc: 17: ^1[^7DP^1]^4Timekiller: ^4ammo ^2here !!!!! client = self.clients.getByCID(match.group('cid')) if not client: self.debug('No client - attempt join') #self.OnJ(action, data, match) #client = self.clients.getByCID(match.group('cid')) #if not client: return None return b3.events.Event(b3.events.EVT_CLIENT_TEAM_SAY, match.group('text'), client) def OnSayc(self, action, data, match=None): #1536:17sayc: 0: ^w[^2AS^w]^2Lead: sorry... client = self.clients.getByCID(match.group('cid')) if not client: self.debug('No client - attempt join') #self.OnJ(action, data, match) #client = self.clients.getByCID(match.group('cid')) #if not client: return None return b3.events.Event(b3.events.EVT_CLIENT_SAY, match.group('text'), client)
gpl-2.0
symoro/symoro
symoroutils/filemgr.py
3
2549
# -*- coding: utf-8 -*- # This file is part of the OpenSYMORO project. Please see # https://github.com/symoro/symoro/blob/master/LICENCE for the licence. """Perform file management operations for the SYMORO package.""" import os SYMORO_ROBOTS_FOLDER = "symoro-robots" def get_base_path(base_folder=SYMORO_ROBOTS_FOLDER): """ Return the base path for storing all SYMORO robot files. Returns: A string specifying the base folder path. """ home_folder = os.path.expanduser("~") return os.path.join(home_folder, base_folder) def get_clean_name(name, char='-'): """ Return a string that is lowercase and all whitespaces are replaced by a specified character. Args: name: The string to be cleaned up. char: The character to replace all whitespaces. The default character is "-" (hyphen). Returns: A string that is fully lowercase and all whitespaces replaced by the specified character. >>> get_clean_name('Random teXt') 'random-text' >>> get_clean_name('Random teXt', '#') 'random#text' """ return name.lower().replace(' ', char) def make_folders(folder_path): """ Check if a specified folder path exists and create the folder path if it does not exist. Args: folder_path: The folder path (string) to check and create. """ if not os.path.exists(folder_path): os.makedirs(folder_path) def get_folder_path(robot_name): """ Return the folder path to store the robot data. Also create the folders if they are not already present. Args: robot_name: The name of the robot (string). Returns: A string specifying the folder path. """ robot_name = get_clean_name(robot_name) folder_path = os.path.join(get_base_path(), robot_name) make_folders(folder_path) return folder_path def get_file_path(robo, ext=None): """ Create the file path with the appropriate extension appended to the file name using an underscore. Args: robo: An instance of the `Robot` class. ext: The extension (string) that is to be appended to the file name with an underscore. Returns: The file path (string) created. """ if ext is None: fname = '{0}.par'.format(get_clean_name(robo.name)) else: fname = '{0}_{1}.txt'.format(get_clean_name(robo.name), ext) file_path = os.path.join(robo.directory, fname) make_folders(robo.directory) return file_path
mit
sarvex/tensorflow
tensorflow/lite/python/convert_phase.py
6
7967
# Lint as: python2, python3 # Copyright 2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utilities for collecting TFLite metrics.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import enum import functools from typing import Text from tensorflow.lite.python.metrics_wrapper import converter_error_data_pb2 # pylint: disable=g-import-not-at-top try: from tensorflow.lite.python import metrics_portable as metrics except ImportError: from tensorflow.lite.python import metrics_nonportable as metrics # pylint: enable=g-import-not-at-top class Component(enum.Enum): """Enum class defining name of the converter components.""" # Validate the given input and prepare and optimize TensorFlow Model. PREPARE_TF_MODEL = "PREPARE_TF_MODEL" # Convert to TFLite model format. CONVERT_TF_TO_TFLITE_MODEL = "CONVERT_TF_TO_TFLITE_MODEL" # RUN quantization and sparsification. OPTIMIZE_TFLITE_MODEL = "OPTIMIZE_TFLITE_MODEL" SubComponentItem = collections.namedtuple("SubComponentItem", ["name", "component"]) class SubComponent(enum.Enum): """Enum class defining name of the converter subcomponents. This enum only defines the subcomponents in Python, there might be more subcomponents defined in C++. """ def __str__(self): return self.value.name @property def name(self): return self.value.name @property def component(self): return self.value.component # The subcomponent name is unspecified. UNSPECIFIED = SubComponentItem("UNSPECIFIED", None) # Valid the given input and parameters. VALIDATE_INPUTS = SubComponentItem("VALIDATE_INPUTS", Component.PREPARE_TF_MODEL) # Load GraphDef from SavedModel. LOAD_SAVED_MODEL = SubComponentItem("LOAD_SAVED_MODEL", Component.PREPARE_TF_MODEL) # Convert a SavedModel to frozen graph. FREEZE_SAVED_MODEL = SubComponentItem("FREEZE_SAVED_MODEL", Component.PREPARE_TF_MODEL) # Save a Keras model to SavedModel. CONVERT_KERAS_TO_SAVED_MODEL = SubComponentItem( "CONVERT_KERAS_TO_SAVED_MODEL", Component.PREPARE_TF_MODEL) # Convert a Keras model to a frozen graph. FREEZE_KERAS_MODEL = SubComponentItem("FREEZE_KERAS_MODEL", Component.PREPARE_TF_MODEL) # Replace all the variables with constants in a ConcreteFunction. FREEZE_CONCRETE_FUNCTION = SubComponentItem("FREEZE_CONCRETE_FUNCTION", Component.PREPARE_TF_MODEL) # Run grappler optimization. OPTIMIZE_TF_MODEL = SubComponentItem("OPTIMIZE_TF_MODEL", Component.PREPARE_TF_MODEL) # Convert using the old TOCO converter. CONVERT_GRAPHDEF_USING_DEPRECATED_CONVERTER = SubComponentItem( "CONVERT_GRAPHDEF_USING_DEPRECATED_CONVERTER", Component.CONVERT_TF_TO_TFLITE_MODEL) # Convert a GraphDef to TFLite model. CONVERT_GRAPHDEF = SubComponentItem("CONVERT_GRAPHDEF", Component.CONVERT_TF_TO_TFLITE_MODEL) # Convert a SavedModel to TFLite model. CONVERT_SAVED_MODEL = SubComponentItem("CONVERT_SAVED_MODEL", Component.CONVERT_TF_TO_TFLITE_MODEL) # Do quantization by the deprecated quantizer. QUANTIZE_USING_DEPRECATED_QUANTIZER = SubComponentItem( "QUANTIZE_USING_DEPRECATED_QUANTIZER", Component.OPTIMIZE_TFLITE_MODEL) # Do calibration. CALIBRATE = SubComponentItem("CALIBRATE", Component.OPTIMIZE_TFLITE_MODEL) # Do quantization by MLIR. QUANTIZE = SubComponentItem("QUANTIZE", Component.OPTIMIZE_TFLITE_MODEL) # Do sparsification by MLIR. SPARSIFY = SubComponentItem("SPARSIFY", Component.OPTIMIZE_TFLITE_MODEL) class ConverterError(Exception): """Raised when an error occurs during model conversion.""" def __init__(self, message): super(ConverterError, self).__init__(message) self.errors = [] self._parse_error_message(message) def append_error(self, error_data: converter_error_data_pb2.ConverterErrorData): self.errors.append(error_data) def _parse_error_message(self, message): """If the message matches a pattern, assigns the associated error code. It is difficult to assign an error code to some errrors in MLIR side, Ex: errors thrown by other components than TFLite or not using mlir::emitError. This function try to detect them by the error message and assign the corresponding error code. Args: message: The error message of this exception. """ error_code_mapping = { "Failed to functionalize Control Flow V1 ops. Consider using Control " "Flow V2 ops instead. See https://www.tensorflow.org/api_docs/python/" "tf/compat/v1/enable_control_flow_v2.": converter_error_data_pb2.ConverterErrorData .ERROR_UNSUPPORTED_CONTROL_FLOW_V1, } for pattern, error_code in error_code_mapping.items(): if pattern in message: error_data = converter_error_data_pb2.ConverterErrorData() error_data.error_message = message error_data.error_code = error_code self.append_error(error_data) return def convert_phase(component, subcomponent=SubComponent.UNSPECIFIED): """The decorator to identify converter component and subcomponent. Args: component: Converter component name. subcomponent: Converter subcomponent name. Returns: Forward the result from the wrapped function. Raises: ValueError: if component and subcomponent name is not valid. """ if component not in Component: raise ValueError("Given component name not found") if subcomponent not in SubComponent: raise ValueError("Given subcomponent name not found") if (subcomponent != SubComponent.UNSPECIFIED and subcomponent.component != component): raise ValueError("component and subcomponent name don't match") def report_error(error_data: converter_error_data_pb2.ConverterErrorData): # Always overwrites the component information, but only overwrites the # subcomponent if it is not available. error_data.component = component.value if not error_data.subcomponent: error_data.subcomponent = subcomponent.name tflite_metrics = metrics.TFLiteConverterMetrics() tflite_metrics.set_converter_error(error_data) def report_error_message(error_message: Text): error_data = converter_error_data_pb2.ConverterErrorData() error_data.error_message = error_message report_error(error_data) def actual_decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except ConverterError as converter_error: if converter_error.errors: for error_data in converter_error.errors: report_error(error_data) else: report_error_message(str(converter_error)) raise converter_error from None # Re-throws the exception. except Exception as error: report_error_message(str(error)) raise error from None # Re-throws the exception. return wrapper return actual_decorator
apache-2.0
seanli9jan/tensorflow
tensorflow/contrib/learn/python/learn/estimators/state_saving_rnn_estimator_test.py
19
24449
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for learn.estimators.state_saving_rnn_estimator.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tempfile import numpy as np from tensorflow.contrib import lookup from tensorflow.contrib.layers.python.layers import feature_column from tensorflow.contrib.layers.python.layers import target_column as target_column_lib from tensorflow.contrib.learn.python.learn.estimators import constants from tensorflow.contrib.learn.python.learn.estimators import model_fn as model_fn_lib from tensorflow.contrib.learn.python.learn.estimators import prediction_key from tensorflow.contrib.learn.python.learn.estimators import rnn_common from tensorflow.contrib.learn.python.learn.estimators import run_config from tensorflow.contrib.learn.python.learn.estimators import state_saving_rnn_estimator as ssre from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import sparse_tensor from tensorflow.python.ops import array_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import lookup_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test class PrepareInputsForRnnTest(test.TestCase): def _test_prepare_inputs_for_rnn(self, sequence_features, context_features, sequence_feature_columns, num_unroll, expected): features_by_time = ssre._prepare_inputs_for_rnn(sequence_features, context_features, sequence_feature_columns, num_unroll) with self.cached_session() as sess: sess.run(variables.global_variables_initializer()) sess.run(lookup_ops.tables_initializer()) features_val = sess.run(features_by_time) self.assertAllEqual(expected, features_val) def testPrepareInputsForRnnBatchSize1(self): num_unroll = 3 expected = [ np.array([[11., 31., 5., 7.]]), np.array([[12., 32., 5., 7.]]), np.array([[13., 33., 5., 7.]]) ] sequence_features = { 'seq_feature0': constant_op.constant([[11., 12., 13.]]), 'seq_feature1': constant_op.constant([[31., 32., 33.]]) } sequence_feature_columns = [ feature_column.real_valued_column( 'seq_feature0', dimension=1), feature_column.real_valued_column( 'seq_feature1', dimension=1), ] context_features = { 'ctx_feature0': constant_op.constant([[5.]]), 'ctx_feature1': constant_op.constant([[7.]]) } self._test_prepare_inputs_for_rnn(sequence_features, context_features, sequence_feature_columns, num_unroll, expected) def testPrepareInputsForRnnBatchSize2(self): num_unroll = 3 expected = [ np.array([[11., 31., 5., 7.], [21., 41., 6., 8.]]), np.array([[12., 32., 5., 7.], [22., 42., 6., 8.]]), np.array([[13., 33., 5., 7.], [23., 43., 6., 8.]]) ] sequence_features = { 'seq_feature0': constant_op.constant([[11., 12., 13.], [21., 22., 23.]]), 'seq_feature1': constant_op.constant([[31., 32., 33.], [41., 42., 43.]]) } sequence_feature_columns = [ feature_column.real_valued_column( 'seq_feature0', dimension=1), feature_column.real_valued_column( 'seq_feature1', dimension=1), ] context_features = { 'ctx_feature0': constant_op.constant([[5.], [6.]]), 'ctx_feature1': constant_op.constant([[7.], [8.]]) } self._test_prepare_inputs_for_rnn(sequence_features, context_features, sequence_feature_columns, num_unroll, expected) def testPrepareInputsForRnnNoContext(self): num_unroll = 3 expected = [ np.array([[11., 31.], [21., 41.]]), np.array([[12., 32.], [22., 42.]]), np.array([[13., 33.], [23., 43.]]) ] sequence_features = { 'seq_feature0': constant_op.constant([[11., 12., 13.], [21., 22., 23.]]), 'seq_feature1': constant_op.constant([[31., 32., 33.], [41., 42., 43.]]) } sequence_feature_columns = [ feature_column.real_valued_column( 'seq_feature0', dimension=1), feature_column.real_valued_column( 'seq_feature1', dimension=1), ] context_features = None self._test_prepare_inputs_for_rnn(sequence_features, context_features, sequence_feature_columns, num_unroll, expected) def testPrepareInputsForRnnSparse(self): num_unroll = 2 embedding_dimension = 8 expected = [ np.array([[1., 1., 1., 1., 1., 1., 1., 1.], [1., 1., 1., 1., 1., 1., 1., 1.], [1., 1., 1., 1., 1., 1., 1., 1.]]), np.array([[1., 1., 1., 1., 1., 1., 1., 1.], [2., 2., 2., 2., 2., 2., 2., 2.], [1., 1., 1., 1., 1., 1., 1., 1.]]) ] sequence_features = { 'wire_cast': sparse_tensor.SparseTensor( indices=[[0, 0, 0], [0, 1, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1], [2, 0, 0], [2, 1, 1]], values=[ b'marlo', b'stringer', b'omar', b'stringer', b'marlo', b'marlo', b'omar' ], dense_shape=[3, 2, 2]) } wire_cast = feature_column.sparse_column_with_keys( 'wire_cast', ['marlo', 'omar', 'stringer']) sequence_feature_columns = [ feature_column.embedding_column( wire_cast, dimension=embedding_dimension, combiner='sum', initializer=init_ops.ones_initializer()) ] context_features = None self._test_prepare_inputs_for_rnn(sequence_features, context_features, sequence_feature_columns, num_unroll, expected) def testPrepareInputsForRnnSparseAndDense(self): num_unroll = 2 embedding_dimension = 8 dense_dimension = 2 expected = [ np.array([[1., 1., 1., 1., 1., 1., 1., 1., 111., 112.], [1., 1., 1., 1., 1., 1., 1., 1., 211., 212.], [1., 1., 1., 1., 1., 1., 1., 1., 311., 312.]]), np.array([[1., 1., 1., 1., 1., 1., 1., 1., 121., 122.], [2., 2., 2., 2., 2., 2., 2., 2., 221., 222.], [1., 1., 1., 1., 1., 1., 1., 1., 321., 322.]]) ] sequence_features = { 'wire_cast': sparse_tensor.SparseTensor( indices=[[0, 0, 0], [0, 1, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1], [2, 0, 0], [2, 1, 1]], values=[ b'marlo', b'stringer', b'omar', b'stringer', b'marlo', b'marlo', b'omar' ], dense_shape=[3, 2, 2]), 'seq_feature0': constant_op.constant([[[111., 112.], [121., 122.]], [[211., 212.], [221., 222.]], [[311., 312.], [321., 322.]]]) } wire_cast = feature_column.sparse_column_with_keys( 'wire_cast', ['marlo', 'omar', 'stringer']) wire_cast_embedded = feature_column.embedding_column( wire_cast, dimension=embedding_dimension, combiner='sum', initializer=init_ops.ones_initializer()) seq_feature0_column = feature_column.real_valued_column( 'seq_feature0', dimension=dense_dimension) sequence_feature_columns = [seq_feature0_column, wire_cast_embedded] context_features = None self._test_prepare_inputs_for_rnn(sequence_features, context_features, sequence_feature_columns, num_unroll, expected) class StateSavingRnnEstimatorTest(test.TestCase): def testPrepareFeaturesForSQSS(self): mode = model_fn_lib.ModeKeys.TRAIN seq_feature_name = 'seq_feature' sparse_seq_feature_name = 'wire_cast' ctx_feature_name = 'ctx_feature' sequence_length = 4 embedding_dimension = 8 features = { sparse_seq_feature_name: sparse_tensor.SparseTensor( indices=[[0, 0, 0], [0, 1, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1], [2, 0, 0], [2, 1, 1]], values=[ b'marlo', b'stringer', b'omar', b'stringer', b'marlo', b'marlo', b'omar' ], dense_shape=[3, 2, 2]), seq_feature_name: constant_op.constant( 1.0, shape=[sequence_length]), ctx_feature_name: constant_op.constant(2.0) } labels = constant_op.constant(5.0, shape=[sequence_length]) wire_cast = feature_column.sparse_column_with_keys( 'wire_cast', ['marlo', 'omar', 'stringer']) sequence_feature_columns = [ feature_column.real_valued_column( seq_feature_name, dimension=1), feature_column.embedding_column( wire_cast, dimension=embedding_dimension, initializer=init_ops.ones_initializer()) ] context_feature_columns = [ feature_column.real_valued_column( ctx_feature_name, dimension=1) ] expected_sequence = { rnn_common.RNNKeys.LABELS_KEY: np.array([5., 5., 5., 5.]), seq_feature_name: np.array([1., 1., 1., 1.]), sparse_seq_feature_name: sparse_tensor.SparseTensor( indices=[[0, 0, 0], [0, 1, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1], [2, 0, 0], [2, 1, 1]], values=[ b'marlo', b'stringer', b'omar', b'stringer', b'marlo', b'marlo', b'omar' ], dense_shape=[3, 2, 2]), } expected_context = {ctx_feature_name: 2.} sequence, context = ssre._prepare_features_for_sqss( features, labels, mode, sequence_feature_columns, context_feature_columns) def assert_equal(expected, got): self.assertEqual(sorted(expected), sorted(got)) for k, v in expected.items(): if isinstance(v, sparse_tensor.SparseTensor): self.assertAllEqual(v.values.eval(), got[k].values) self.assertAllEqual(v.indices.eval(), got[k].indices) self.assertAllEqual(v.dense_shape.eval(), got[k].dense_shape) else: self.assertAllEqual(v, got[k]) with self.cached_session() as sess: sess.run(variables.global_variables_initializer()) sess.run(lookup_ops.tables_initializer()) actual_sequence, actual_context = sess.run( [sequence, context]) assert_equal(expected_sequence, actual_sequence) assert_equal(expected_context, actual_context) def _getModelFnOpsForMode(self, mode): """Helper for testGetRnnModelFn{Train,Eval,Infer}().""" num_units = [4] seq_columns = [ feature_column.real_valued_column( 'inputs', dimension=1) ] features = { 'inputs': constant_op.constant([1., 2., 3.]), } labels = constant_op.constant([1., 0., 1.]) model_fn = ssre._get_rnn_model_fn( cell_type='basic_rnn', target_column=target_column_lib.multi_class_target(n_classes=2), optimizer='SGD', num_unroll=2, num_units=num_units, num_threads=1, queue_capacity=10, batch_size=1, # Only CLASSIFICATION yields eval metrics to test for. problem_type=constants.ProblemType.CLASSIFICATION, sequence_feature_columns=seq_columns, context_feature_columns=None, learning_rate=0.1) model_fn_ops = model_fn(features=features, labels=labels, mode=mode) return model_fn_ops # testGetRnnModelFn{Train,Eval,Infer}() test which fields # of ModelFnOps are set depending on mode. def testGetRnnModelFnTrain(self): model_fn_ops = self._getModelFnOpsForMode(model_fn_lib.ModeKeys.TRAIN) self.assertIsNotNone(model_fn_ops.predictions) self.assertIsNotNone(model_fn_ops.loss) self.assertIsNotNone(model_fn_ops.train_op) # None may get normalized to {}; we accept neither. self.assertNotEqual(len(model_fn_ops.eval_metric_ops), 0) def testGetRnnModelFnEval(self): model_fn_ops = self._getModelFnOpsForMode(model_fn_lib.ModeKeys.EVAL) self.assertIsNotNone(model_fn_ops.predictions) self.assertIsNotNone(model_fn_ops.loss) self.assertIsNone(model_fn_ops.train_op) # None may get normalized to {}; we accept neither. self.assertNotEqual(len(model_fn_ops.eval_metric_ops), 0) def testGetRnnModelFnInfer(self): model_fn_ops = self._getModelFnOpsForMode(model_fn_lib.ModeKeys.INFER) self.assertIsNotNone(model_fn_ops.predictions) self.assertIsNone(model_fn_ops.loss) self.assertIsNone(model_fn_ops.train_op) # None may get normalized to {}; we accept both. self.assertFalse(model_fn_ops.eval_metric_ops) def testExport(self): input_feature_key = 'magic_input_feature_key' batch_size = 8 num_units = [4] sequence_length = 10 num_unroll = 2 num_classes = 2 seq_columns = [ feature_column.real_valued_column( 'inputs', dimension=4) ] def get_input_fn(mode, seed): def input_fn(): features = {} random_sequence = random_ops.random_uniform( [sequence_length + 1], 0, 2, dtype=dtypes.int32, seed=seed) labels = array_ops.slice(random_sequence, [0], [sequence_length]) inputs = math_ops.to_float( array_ops.slice(random_sequence, [1], [sequence_length])) features = {'inputs': inputs} if mode == model_fn_lib.ModeKeys.INFER: input_examples = array_ops.placeholder(dtypes.string) features[input_feature_key] = input_examples labels = None return features, labels return input_fn model_dir = tempfile.mkdtemp() def estimator_fn(): return ssre.StateSavingRnnEstimator( constants.ProblemType.CLASSIFICATION, num_units=num_units, num_unroll=num_unroll, batch_size=batch_size, sequence_feature_columns=seq_columns, num_classes=num_classes, predict_probabilities=True, model_dir=model_dir, queue_capacity=2 + batch_size, seed=1234) # Train a bit to create an exportable checkpoint. estimator_fn().fit(input_fn=get_input_fn( model_fn_lib.ModeKeys.TRAIN, seed=1234), steps=100) # Now export, but from a fresh estimator instance, like you would # in an export binary. That means .export() has to work without # .fit() being called on the same object. export_dir = tempfile.mkdtemp() print('Exporting to', export_dir) estimator_fn().export( export_dir, input_fn=get_input_fn( model_fn_lib.ModeKeys.INFER, seed=4321), use_deprecated_input_fn=False, input_feature_key=input_feature_key) # Smoke tests to ensure deprecated constructor functions still work. class LegacyConstructorTest(test.TestCase): def _get_input_fn(self, sequence_length, seed=None): def input_fn(): random_sequence = random_ops.random_uniform( [sequence_length + 1], 0, 2, dtype=dtypes.int32, seed=seed) labels = array_ops.slice(random_sequence, [0], [sequence_length]) inputs = math_ops.to_float( array_ops.slice(random_sequence, [1], [sequence_length])) return {'inputs': inputs}, labels return input_fn # TODO(jtbates): move all tests below to a benchmark test. class StateSavingRNNEstimatorLearningTest(test.TestCase): """Learning tests for state saving RNN Estimators.""" def testLearnSineFunction(self): """Tests learning a sine function.""" batch_size = 8 num_unroll = 5 sequence_length = 64 train_steps = 250 eval_steps = 20 num_rnn_layers = 1 num_units = [4] * num_rnn_layers learning_rate = 0.3 loss_threshold = 0.035 def get_sin_input_fn(sequence_length, increment, seed=None): def input_fn(): start = random_ops.random_uniform( (), minval=0, maxval=(np.pi * 2.0), dtype=dtypes.float32, seed=seed) sin_curves = math_ops.sin( math_ops.linspace(start, (sequence_length - 1) * increment, sequence_length + 1)) inputs = array_ops.slice(sin_curves, [0], [sequence_length]) labels = array_ops.slice(sin_curves, [1], [sequence_length]) return {'inputs': inputs}, labels return input_fn seq_columns = [ feature_column.real_valued_column( 'inputs', dimension=1) ] config = run_config.RunConfig(tf_random_seed=1234) dropout_keep_probabilities = [0.9] * (num_rnn_layers + 1) sequence_estimator = ssre.StateSavingRnnEstimator( constants.ProblemType.LINEAR_REGRESSION, num_units=num_units, cell_type='lstm', num_unroll=num_unroll, batch_size=batch_size, sequence_feature_columns=seq_columns, learning_rate=learning_rate, dropout_keep_probabilities=dropout_keep_probabilities, config=config, queue_capacity=2 * batch_size, seed=1234) train_input_fn = get_sin_input_fn(sequence_length, np.pi / 32, seed=1234) eval_input_fn = get_sin_input_fn(sequence_length, np.pi / 32, seed=4321) sequence_estimator.fit(input_fn=train_input_fn, steps=train_steps) loss = sequence_estimator.evaluate( input_fn=eval_input_fn, steps=eval_steps)['loss'] self.assertLess(loss, loss_threshold, 'Loss should be less than {}; got {}'.format(loss_threshold, loss)) def testLearnShiftByOne(self): """Tests that learning a 'shift-by-one' example. Each label sequence consists of the input sequence 'shifted' by one place. The RNN must learn to 'remember' the previous input. """ batch_size = 16 num_classes = 2 num_unroll = 32 sequence_length = 32 train_steps = 300 eval_steps = 20 num_units = [4] learning_rate = 0.5 accuracy_threshold = 0.9 def get_shift_input_fn(sequence_length, seed=None): def input_fn(): random_sequence = random_ops.random_uniform( [sequence_length + 1], 0, 2, dtype=dtypes.int32, seed=seed) labels = array_ops.slice(random_sequence, [0], [sequence_length]) inputs = math_ops.to_float( array_ops.slice(random_sequence, [1], [sequence_length])) return {'inputs': inputs}, labels return input_fn seq_columns = [ feature_column.real_valued_column( 'inputs', dimension=1) ] config = run_config.RunConfig(tf_random_seed=21212) sequence_estimator = ssre.StateSavingRnnEstimator( constants.ProblemType.CLASSIFICATION, num_units=num_units, cell_type='lstm', num_unroll=num_unroll, batch_size=batch_size, sequence_feature_columns=seq_columns, num_classes=num_classes, learning_rate=learning_rate, config=config, predict_probabilities=True, queue_capacity=2 + batch_size, seed=1234) train_input_fn = get_shift_input_fn(sequence_length, seed=12321) eval_input_fn = get_shift_input_fn(sequence_length, seed=32123) sequence_estimator.fit(input_fn=train_input_fn, steps=train_steps) evaluation = sequence_estimator.evaluate( input_fn=eval_input_fn, steps=eval_steps) accuracy = evaluation['accuracy'] self.assertGreater(accuracy, accuracy_threshold, 'Accuracy should be higher than {}; got {}'.format( accuracy_threshold, accuracy)) # Testing `predict` when `predict_probabilities=True`. prediction_dict = sequence_estimator.predict( input_fn=eval_input_fn, as_iterable=False) self.assertListEqual( sorted(list(prediction_dict.keys())), sorted([ prediction_key.PredictionKey.CLASSES, prediction_key.PredictionKey.PROBABILITIES, ssre._get_state_name(0) ])) predictions = prediction_dict[prediction_key.PredictionKey.CLASSES] probabilities = prediction_dict[prediction_key.PredictionKey.PROBABILITIES] self.assertListEqual(list(predictions.shape), [batch_size, sequence_length]) self.assertListEqual( list(probabilities.shape), [batch_size, sequence_length, 2]) def testLearnLyrics(self): lyrics = 'if I go there will be trouble and if I stay it will be double' lyrics_list = lyrics.split() sequence_length = len(lyrics_list) vocab = set(lyrics_list) batch_size = 16 num_classes = len(vocab) num_unroll = 7 # not a divisor of sequence_length train_steps = 350 eval_steps = 30 num_units = [4] learning_rate = 0.4 accuracy_threshold = 0.65 def get_lyrics_input_fn(seed): def input_fn(): start = random_ops.random_uniform( (), minval=0, maxval=sequence_length, dtype=dtypes.int32, seed=seed) # Concatenate lyrics_list so inputs and labels wrap when start > 0. lyrics_list_concat = lyrics_list + lyrics_list inputs_dense = array_ops.slice(lyrics_list_concat, [start], [sequence_length]) indices = array_ops.constant( [[i, 0] for i in range(sequence_length)], dtype=dtypes.int64) dense_shape = [sequence_length, 1] inputs = sparse_tensor.SparseTensor( indices=indices, values=inputs_dense, dense_shape=dense_shape) table = lookup.string_to_index_table_from_tensor( mapping=list(vocab), default_value=-1, name='lookup') labels = table.lookup( array_ops.slice(lyrics_list_concat, [start + 1], [sequence_length])) return {'lyrics': inputs}, labels return input_fn sequence_feature_columns = [ feature_column.embedding_column( feature_column.sparse_column_with_keys('lyrics', vocab), dimension=8) ] config = run_config.RunConfig(tf_random_seed=21212) sequence_estimator = ssre.StateSavingRnnEstimator( constants.ProblemType.CLASSIFICATION, num_units=num_units, cell_type='basic_rnn', num_unroll=num_unroll, batch_size=batch_size, sequence_feature_columns=sequence_feature_columns, num_classes=num_classes, learning_rate=learning_rate, config=config, predict_probabilities=True, queue_capacity=2 + batch_size, seed=1234) train_input_fn = get_lyrics_input_fn(seed=12321) eval_input_fn = get_lyrics_input_fn(seed=32123) sequence_estimator.fit(input_fn=train_input_fn, steps=train_steps) evaluation = sequence_estimator.evaluate( input_fn=eval_input_fn, steps=eval_steps) accuracy = evaluation['accuracy'] self.assertGreater(accuracy, accuracy_threshold, 'Accuracy should be higher than {}; got {}'.format( accuracy_threshold, accuracy)) if __name__ == '__main__': test.main()
apache-2.0
basicthinker/ThyNVM
src/arch/sparc/SparcSystem.py
69
3713
# Copyright (c) 2007 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Nathan Binkert from m5.params import * from SimpleMemory import SimpleMemory from System import System class SparcSystem(System): type = 'SparcSystem' cxx_header = 'arch/sparc/system.hh' _rom_base = 0xfff0000000 _nvram_base = 0x1f11000000 _hypervisor_desc_base = 0x1f12080000 _partition_desc_base = 0x1f12000000 # ROM for OBP/Reset/Hypervisor rom = Param.SimpleMemory( SimpleMemory(range=AddrRange(_rom_base, size='8MB')), "Memory to hold the ROM data") # nvram nvram = Param.SimpleMemory( SimpleMemory(range=AddrRange(_nvram_base, size='8kB')), "Memory to hold the nvram data") # hypervisor description hypervisor_desc = Param.SimpleMemory( SimpleMemory(range=AddrRange(_hypervisor_desc_base, size='8kB')), "Memory to hold the hypervisor description") # partition description partition_desc = Param.SimpleMemory( SimpleMemory(range=AddrRange(_partition_desc_base, size='8kB')), "Memory to hold the partition description") reset_addr = Param.Addr(_rom_base, "Address to load ROM at") hypervisor_addr = Param.Addr(Addr('64kB') + _rom_base, "Address to load hypervisor at") openboot_addr = Param.Addr(Addr('512kB') + _rom_base, "Address to load openboot at") nvram_addr = Param.Addr(_nvram_base, "Address to put the nvram") hypervisor_desc_addr = Param.Addr(_hypervisor_desc_base, "Address for the hypervisor description") partition_desc_addr = Param.Addr(_partition_desc_base, "Address for the partition description") reset_bin = Param.String("file that contains the reset code") hypervisor_bin = Param.String("file that contains the hypervisor code") openboot_bin = Param.String("file that contains the openboot code") nvram_bin = Param.String("file that contains the contents of nvram") hypervisor_desc_bin = Param.String("file that contains the hypervisor description") partition_desc_bin = Param.String("file that contains the partition description") load_addr_mask = 0xffffffffff
bsd-3-clause
skakri/django-unstructured
wiki/core/permissions.py
1
3004
from wiki.conf import settings ############################### # TARGET PERMISSION HANDLING # ############################### # # All functions are: # can_something(target, user) # => True/False # # All functions can be replaced by pointing their relevant # settings variable in wiki.conf.settings to a callable(target, user) def can_read(target, user): if callable(settings.CAN_READ): return settings.CAN_READ(target, user) else: # Deny reading access to deleted entities if user has no delete access is_deleted = target.current_revision and target.deleted if is_deleted and not target.can_delete(user): return False # Check access for other users... if user.is_anonymous() and not settings.ANONYMOUS: return False elif target.other_read: return True elif user.is_anonymous(): return False if user == target.owner: return True if target.group_read: if target.group and user.groups.filter(id=target.group.id).exists(): return True if target.can_moderate(user): return True return False def can_write(target, user): if callable(settings.CAN_WRITE): return settings.CAN_WRITE(target, user) # Check access for other users... if user.is_anonymous() and not settings.ANONYMOUS_WRITE: return False elif target.other_write: return True elif user.is_anonymous(): return False if user == target.owner: return True if target.group_write: if target.group and user and user.groups.filter(id=target.group.id).exists(): return True if target.can_moderate(user): return True return False def can_assign(target, user): if callable(settings.CAN_ASSIGN): return settings.CAN_ASSIGN(target, user) return not user.is_anonymous() and user.has_perm('wiki.assign') def can_assign_owner(target, user): if callable(settings.CAN_ASSIGN_OWNER): return settings.CAN_ASSIGN_OWNER(target, user) return False def can_change_permissions(target, user): if callable(settings.CAN_CHANGE_PERMISSIONS): return settings.CAN_CHANGE_PERMISSIONS(target, user) return ( not user.is_anonymous() and ( target.owner == user or user.has_perm('wiki.assign') ) ) def can_delete(target, user): if callable(settings.CAN_DELETE): return settings.CAN_DELETE(target, user) return not user.is_anonymous() and target.can_write(user) def can_moderate(target, user): if callable(settings.CAN_MODERATE): return settings.CAN_MODERATE(target, user) return not user.is_anonymous() and user.has_perm('wiki.moderate') def can_admin(target, user): if callable(settings.CAN_ADMIN): return settings.CAN_ADMIN(target, user) return not user.is_anonymous() and user.has_perm('wiki.admin')
gpl-3.0
alephu5/Soundbyte
environment/lib/python3.3/site-packages/pygments/styles/autumn.py
364
2144
# -*- coding: utf-8 -*- """ pygments.styles.autumn ~~~~~~~~~~~~~~~~~~~~~~ A colorful style, inspired by the terminal highlighting style. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace class AutumnStyle(Style): """ A colorful style, inspired by the terminal highlighting style. """ default_style = "" styles = { Whitespace: '#bbbbbb', Comment: 'italic #aaaaaa', Comment.Preproc: 'noitalic #4c8317', Comment.Special: 'italic #0000aa', Keyword: '#0000aa', Keyword.Type: '#00aaaa', Operator.Word: '#0000aa', Name.Builtin: '#00aaaa', Name.Function: '#00aa00', Name.Class: 'underline #00aa00', Name.Namespace: 'underline #00aaaa', Name.Variable: '#aa0000', Name.Constant: '#aa0000', Name.Entity: 'bold #800', Name.Attribute: '#1e90ff', Name.Tag: 'bold #1e90ff', Name.Decorator: '#888888', String: '#aa5500', String.Symbol: '#0000aa', String.Regex: '#009999', Number: '#009999', Generic.Heading: 'bold #000080', Generic.Subheading: 'bold #800080', Generic.Deleted: '#aa0000', Generic.Inserted: '#00aa00', Generic.Error: '#aa0000', Generic.Emph: 'italic', Generic.Strong: 'bold', Generic.Prompt: '#555555', Generic.Output: '#888888', Generic.Traceback: '#aa0000', Error: '#F00 bg:#FAA' }
gpl-3.0
ansible/ansible-modules-extras
cloud/amazon/s3_logging.py
48
5856
#!/usr/bin/python # # This is a free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This Ansible library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this library. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'committer', 'version': '1.0'} DOCUMENTATION = ''' --- module: s3_logging short_description: Manage logging facility of an s3 bucket in AWS description: - Manage logging facility of an s3 bucket in AWS version_added: "2.0" author: Rob White (@wimnat) options: name: description: - "Name of the s3 bucket." required: true state: description: - "Enable or disable logging." required: false default: present choices: [ 'present', 'absent' ] target_bucket: description: - "The bucket to log to. Required when state=present." required: false default: null target_prefix: description: - "The prefix that should be prepended to the generated log files written to the target_bucket." required: false default: "" extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' # Note: These examples do not set authentication details, see the AWS Guide for details. - name: Enable logging of s3 bucket mywebsite.com to s3 bucket mylogs s3_logging: name: mywebsite.com target_bucket: mylogs target_prefix: logs/mywebsite.com state: present - name: Remove logging on an s3 bucket s3_logging: name: mywebsite.com state: absent ''' try: import boto.ec2 from boto.s3.connection import OrdinaryCallingFormat, Location from boto.exception import BotoServerError, S3CreateError, S3ResponseError HAS_BOTO = True except ImportError: HAS_BOTO = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import AnsibleAWSError, ec2_argument_spec, get_aws_connection_info def compare_bucket_logging(bucket, target_bucket, target_prefix): bucket_log_obj = bucket.get_logging_status() if bucket_log_obj.target != target_bucket or bucket_log_obj.prefix != target_prefix: return False else: return True def enable_bucket_logging(connection, module): bucket_name = module.params.get("name") target_bucket = module.params.get("target_bucket") target_prefix = module.params.get("target_prefix") changed = False try: bucket = connection.get_bucket(bucket_name) except S3ResponseError as e: module.fail_json(msg=e.message) try: if not compare_bucket_logging(bucket, target_bucket, target_prefix): # Before we can enable logging we must give the log-delivery group WRITE and READ_ACP permissions to the target bucket try: target_bucket_obj = connection.get_bucket(target_bucket) except S3ResponseError as e: if e.status == 301: module.fail_json(msg="the logging target bucket must be in the same region as the bucket being logged") else: module.fail_json(msg=e.message) target_bucket_obj.set_as_logging_target() bucket.enable_logging(target_bucket, target_prefix) changed = True except S3ResponseError as e: module.fail_json(msg=e.message) module.exit_json(changed=changed) def disable_bucket_logging(connection, module): bucket_name = module.params.get("name") changed = False try: bucket = connection.get_bucket(bucket_name) if not compare_bucket_logging(bucket, None, None): bucket.disable_logging() changed = True except S3ResponseError as e: module.fail_json(msg=e.message) module.exit_json(changed=changed) def main(): argument_spec = ec2_argument_spec() argument_spec.update( dict( name = dict(required=True), target_bucket = dict(required=False, default=None), target_prefix = dict(required=False, default=""), state = dict(required=False, default='present', choices=['present', 'absent']) ) ) module = AnsibleModule(argument_spec=argument_spec) if not HAS_BOTO: module.fail_json(msg='boto required for this module') region, ec2_url, aws_connect_params = get_aws_connection_info(module) if region in ('us-east-1', '', None): # S3ism for the US Standard region location = Location.DEFAULT else: # Boto uses symbolic names for locations but region strings will # actually work fine for everything except us-east-1 (US Standard) location = region try: connection = boto.s3.connect_to_region(location, is_secure=True, calling_format=OrdinaryCallingFormat(), **aws_connect_params) # use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases if connection is None: connection = boto.connect_s3(**aws_connect_params) except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e: module.fail_json(msg=str(e)) state = module.params.get("state") if state == 'present': enable_bucket_logging(connection, module) elif state == 'absent': disable_bucket_logging(connection, module) if __name__ == '__main__': main()
gpl-3.0
Yannig/ansible
lib/ansible/modules/cloud/ovirt/ovirt_host_networks.py
25
13992
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_host_networks short_description: Module to manage host networks in oVirt/RHV version_added: "2.3" author: "Ondra Machacek (@machacekondra)" description: - "Module to manage host networks in oVirt/RHV." options: name: description: - "Name of the host to manage networks for." required: true state: description: - "Should the host be present/absent." choices: ['present', 'absent'] default: present bond: description: - "Dictionary describing network bond:" - "C(name) - Bond name." - "C(mode) - Bonding mode." - "C(interfaces) - List of interfaces to create a bond." interface: description: - "Name of the network interface where logical network should be attached." networks: description: - "List of dictionary describing networks to be attached to interface or bond:" - "C(name) - Name of the logical network to be assigned to bond or interface." - "C(boot_protocol) - Boot protocol one of the I(none), I(static) or I(dhcp)." - "C(address) - IP address in case of I(static) boot protocol is used." - "C(prefix) - Routing prefix in case of I(static) boot protocol is used." - "C(gateway) - Gateway in case of I(static) boot protocol is used." - "C(version) - IP version. Either v4 or v6. Default is v4." labels: description: - "List of names of the network label to be assigned to bond or interface." check: description: - "If I(true) verify connectivity between host and engine." - "Network configuration changes will be rolled back if connectivity between engine and the host is lost after changing network configuration." save: description: - "If I(true) network configuration will be persistent, by default they are temporary." extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Create bond on eth0 and eth1 interface, and put 'myvlan' network on top of it: - name: Bonds ovirt_host_networks: name: myhost bond: name: bond0 mode: 2 interfaces: - eth1 - eth2 networks: - name: myvlan boot_protocol: static address: 1.2.3.4 prefix: 24 gateway: 1.2.3.4 version: v4 # Remove bond0 bond from host interfaces: - ovirt_host_networks: state: absent name: myhost bond: name: bond0 # Assign myvlan1 and myvlan2 vlans to host eth0 interface: - ovirt_host_networks: name: myhost interface: eth0 networks: - name: myvlan1 - name: myvlan2 # Remove myvlan2 vlan from host eth0 interface: - ovirt_host_networks: state: absent name: myhost interface: eth0 networks: - name: myvlan2 # Remove all networks/vlans from host eth0 interface: - ovirt_host_networks: state: absent name: myhost interface: eth0 ''' RETURN = ''' id: description: ID of the host NIC which is managed returned: On success if host NIC is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c host_nic: description: "Dictionary of all the host NIC attributes. Host NIC attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/host_nic." returned: On success if host NIC is found. type: dict ''' import traceback try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_sdk, create_connection, equal, get_dict_of_struct, get_entity, get_link_name, ovirt_full_argument_spec, search_by_name, ) class HostNetworksModule(BaseModule): def build_entity(self): return otypes.Host() def update_address(self, attachments_service, attachment, network): # Check if there is any change in address assignenmts and # update it if needed: for ip in attachment.ip_address_assignments: if str(ip.ip.version) == network.get('version', 'v4'): changed = False if not equal(network.get('boot_protocol'), str(ip.assignment_method)): ip.assignment_method = otypes.BootProtocol(network.get('boot_protocol')) changed = True if not equal(network.get('address'), ip.ip.address): ip.ip.address = network.get('address') changed = True if not equal(network.get('gateway'), ip.ip.gateway): ip.ip.gateway = network.get('gateway') changed = True if not equal(network.get('prefix'), int(ip.ip.netmask) if ip.ip.netmask else None): ip.ip.netmask = str(network.get('prefix')) changed = True if changed: if not self._module.check_mode: attachments_service.service(attachment.id).update(attachment) self.changed = True break def has_update(self, nic_service): update = False bond = self._module.params['bond'] networks = self._module.params['networks'] nic = get_entity(nic_service) if nic is None: return update # Check if bond configuration should be updated: if bond: update = not ( equal(str(bond.get('mode')), nic.bonding.options[0].value) and equal( sorted(bond.get('interfaces')) if bond.get('interfaces') else None, sorted(get_link_name(self._connection, s) for s in nic.bonding.slaves) ) ) if not networks: return update # Check if networks attachments configuration should be updated: attachments_service = nic_service.network_attachments_service() network_names = [network.get('name') for network in networks] attachments = {} for attachment in attachments_service.list(): name = get_link_name(self._connection, attachment.network) if name in network_names: attachments[name] = attachment for network in networks: attachment = attachments.get(network.get('name')) # If attachment don't exsits, we need to create it: if attachment is None: return True self.update_address(attachments_service, attachment, network) return update def _action_save_configuration(self, entity): if self._module.params['save']: if not self._module.check_mode: self._service.service(entity.id).commit_net_config() self.changed = True def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent'], default='present', ), name=dict(default=None, aliases=['host'], required=True), bond=dict(default=None, type='dict'), interface=dict(default=None), networks=dict(default=None, type='list'), labels=dict(default=None, type='list'), check=dict(default=None, type='bool'), save=dict(default=None, type='bool'), ) module = AnsibleModule(argument_spec=argument_spec) check_sdk(module) try: auth = module.params.pop('auth') connection = create_connection(auth) hosts_service = connection.system_service().hosts_service() host_networks_module = HostNetworksModule( connection=connection, module=module, service=hosts_service, ) host = host_networks_module.search_entity() if host is None: raise Exception("Host '%s' was not found." % module.params['name']) bond = module.params['bond'] interface = module.params['interface'] networks = module.params['networks'] labels = module.params['labels'] nic_name = bond.get('name') if bond else module.params['interface'] nics_service = hosts_service.host_service(host.id).nics_service() nic = search_by_name(nics_service, nic_name) state = module.params['state'] if ( state == 'present' and (nic is None or host_networks_module.has_update(nics_service.service(nic.id))) ): host_networks_module.action( entity=host, action='setup_networks', post_action=host_networks_module._action_save_configuration, check_connectivity=module.params['check'], modified_bonds=[ otypes.HostNic( name=bond.get('name'), bonding=otypes.Bonding( options=[ otypes.Option( name="mode", value=str(bond.get('mode')), ) ], slaves=[ otypes.HostNic(name=i) for i in bond.get('interfaces', []) ], ), ), ] if bond else None, modified_labels=[ otypes.NetworkLabel( name=str(name), host_nic=otypes.HostNic( name=bond.get('name') if bond else interface ), ) for name in labels ] if labels else None, modified_network_attachments=[ otypes.NetworkAttachment( network=otypes.Network( name=network['name'] ) if network['name'] else None, host_nic=otypes.HostNic( name=bond.get('name') if bond else interface ), ip_address_assignments=[ otypes.IpAddressAssignment( assignment_method=otypes.BootProtocol( network.get('boot_protocol', 'none') ), ip=otypes.Ip( address=network.get('address'), gateway=network.get('gateway'), netmask=network.get('netmask'), version=otypes.IpVersion( network.get('version') ) if network.get('version') else None, ), ), ], ) for network in networks ] if networks else None, ) elif state == 'absent' and nic: attachments_service = nics_service.nic_service(nic.id).network_attachments_service() attachments = attachments_service.list() if networks: network_names = [network['name'] for network in networks] attachments = [ attachment for attachment in attachments if get_link_name(connection, attachment.network) in network_names ] if labels or bond or attachments: host_networks_module.action( entity=host, action='setup_networks', post_action=host_networks_module._action_save_configuration, check_connectivity=module.params['check'], removed_bonds=[ otypes.HostNic( name=bond.get('name'), ), ] if bond else None, removed_labels=[ otypes.NetworkLabel( name=str(name), ) for name in labels ] if labels else None, removed_network_attachments=list(attachments), ) nic = search_by_name(nics_service, nic_name) module.exit_json(**{ 'changed': host_networks_module.changed, 'id': nic.id if nic else None, 'host_nic': get_dict_of_struct(nic), }) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
gpl-3.0
vipul-sharma20/oh-mainline
vendor/packages/twisted/twisted/conch/checkers.py
18
9828
# -*- test-case-name: twisted.conch.test.test_checkers -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Provide L{ICredentialsChecker} implementations to be used in Conch protocols. """ import os, base64, binascii, errno try: import pwd except ImportError: pwd = None else: import crypt try: # get this from http://www.twistedmatrix.com/users/z3p/files/pyshadow-0.2.tar.gz import shadow except: shadow = None try: from twisted.cred import pamauth except ImportError: pamauth = None from zope.interface import implements, providedBy from twisted.conch import error from twisted.conch.ssh import keys from twisted.cred.checkers import ICredentialsChecker from twisted.cred.credentials import IUsernamePassword, ISSHPrivateKey from twisted.cred.error import UnauthorizedLogin, UnhandledCredentials from twisted.internet import defer from twisted.python import failure, reflect, log from twisted.python.util import runAsEffectiveUser from twisted.python.filepath import FilePath def verifyCryptedPassword(crypted, pw): if crypted[0] == '$': # md5_crypt encrypted salt = '$1$' + crypted.split('$')[2] else: salt = crypted[:2] return crypt.crypt(pw, salt) == crypted class UNIXPasswordDatabase: credentialInterfaces = IUsernamePassword, implements(ICredentialsChecker) def requestAvatarId(self, credentials): if pwd: try: cryptedPass = pwd.getpwnam(credentials.username)[1] except KeyError: return defer.fail(UnauthorizedLogin("invalid username")) else: if cryptedPass not in ['*', 'x'] and \ verifyCryptedPassword(cryptedPass, credentials.password): return defer.succeed(credentials.username) if shadow: gid = os.getegid() uid = os.geteuid() os.setegid(0) os.seteuid(0) try: shadowPass = shadow.getspnam(credentials.username)[1] except KeyError: os.setegid(gid) os.seteuid(uid) return defer.fail(UnauthorizedLogin("invalid username")) os.setegid(gid) os.seteuid(uid) if verifyCryptedPassword(shadowPass, credentials.password): return defer.succeed(credentials.username) return defer.fail(UnauthorizedLogin("invalid password")) return defer.fail(UnauthorizedLogin("unable to verify password")) class SSHPublicKeyDatabase: """ Checker that authenticates SSH public keys, based on public keys listed in authorized_keys and authorized_keys2 files in user .ssh/ directories. """ credentialInterfaces = ISSHPrivateKey, implements(ICredentialsChecker) def requestAvatarId(self, credentials): d = defer.maybeDeferred(self.checkKey, credentials) d.addCallback(self._cbRequestAvatarId, credentials) d.addErrback(self._ebRequestAvatarId) return d def _cbRequestAvatarId(self, validKey, credentials): """ Check whether the credentials themselves are valid, now that we know if the key matches the user. @param validKey: A boolean indicating whether or not the public key matches a key in the user's authorized_keys file. @param credentials: The credentials offered by the user. @type credentials: L{ISSHPrivateKey} provider @raise UnauthorizedLogin: (as a failure) if the key does not match the user in C{credentials}. Also raised if the user provides an invalid signature. @raise ValidPublicKey: (as a failure) if the key matches the user but the credentials do not include a signature. See L{error.ValidPublicKey} for more information. @return: The user's username, if authentication was successful. """ if not validKey: return failure.Failure(UnauthorizedLogin("invalid key")) if not credentials.signature: return failure.Failure(error.ValidPublicKey()) else: try: pubKey = keys.Key.fromString(credentials.blob) if pubKey.verify(credentials.signature, credentials.sigData): return credentials.username except: # any error should be treated as a failed login log.err() return failure.Failure(UnauthorizedLogin('error while verifying key')) return failure.Failure(UnauthorizedLogin("unable to verify key")) def getAuthorizedKeysFiles(self, credentials): """ Return a list of L{FilePath} instances for I{authorized_keys} files which might contain information about authorized keys for the given credentials. On OpenSSH servers, the default location of the file containing the list of authorized public keys is U{$HOME/.ssh/authorized_keys<http://www.openbsd.org/cgi-bin/man.cgi?query=sshd_config>}. I{$HOME/.ssh/authorized_keys2} is also returned, though it has been U{deprecated by OpenSSH since 2001<http://marc.info/?m=100508718416162>}. @return: A list of L{FilePath} instances to files with the authorized keys. """ pwent = pwd.getpwnam(credentials.username) root = FilePath(pwent.pw_dir).child('.ssh') files = ['authorized_keys', 'authorized_keys2'] return [root.child(f) for f in files] def checkKey(self, credentials): """ Retrieve files containing authorized keys and check against user credentials. """ uid, gid = os.geteuid(), os.getegid() ouid, ogid = pwd.getpwnam(credentials.username)[2:4] for filepath in self.getAuthorizedKeysFiles(credentials): if not filepath.exists(): continue try: lines = filepath.open() except IOError, e: if e.errno == errno.EACCES: lines = runAsEffectiveUser(ouid, ogid, filepath.open) else: raise for l in lines: l2 = l.split() if len(l2) < 2: continue try: if base64.decodestring(l2[1]) == credentials.blob: return True except binascii.Error: continue return False def _ebRequestAvatarId(self, f): if not f.check(UnauthorizedLogin): log.msg(f) return failure.Failure(UnauthorizedLogin("unable to get avatar id")) return f class SSHProtocolChecker: """ SSHProtocolChecker is a checker that requires multiple authentications to succeed. To add a checker, call my registerChecker method with the checker and the interface. After each successful authenticate, I call my areDone method with the avatar id. To get a list of the successful credentials for an avatar id, use C{SSHProcotolChecker.successfulCredentials[avatarId]}. If L{areDone} returns True, the authentication has succeeded. """ implements(ICredentialsChecker) def __init__(self): self.checkers = {} self.successfulCredentials = {} def get_credentialInterfaces(self): return self.checkers.keys() credentialInterfaces = property(get_credentialInterfaces) def registerChecker(self, checker, *credentialInterfaces): if not credentialInterfaces: credentialInterfaces = checker.credentialInterfaces for credentialInterface in credentialInterfaces: self.checkers[credentialInterface] = checker def requestAvatarId(self, credentials): """ Part of the L{ICredentialsChecker} interface. Called by a portal with some credentials to check if they'll authenticate a user. We check the interfaces that the credentials provide against our list of acceptable checkers. If one of them matches, we ask that checker to verify the credentials. If they're valid, we call our L{_cbGoodAuthentication} method to continue. @param credentials: the credentials the L{Portal} wants us to verify """ ifac = providedBy(credentials) for i in ifac: c = self.checkers.get(i) if c is not None: d = defer.maybeDeferred(c.requestAvatarId, credentials) return d.addCallback(self._cbGoodAuthentication, credentials) return defer.fail(UnhandledCredentials("No checker for %s" % \ ', '.join(map(reflect.qual, ifac)))) def _cbGoodAuthentication(self, avatarId, credentials): """ Called if a checker has verified the credentials. We call our L{areDone} method to see if the whole of the successful authentications are enough. If they are, we return the avatar ID returned by the first checker. """ if avatarId not in self.successfulCredentials: self.successfulCredentials[avatarId] = [] self.successfulCredentials[avatarId].append(credentials) if self.areDone(avatarId): del self.successfulCredentials[avatarId] return avatarId else: raise error.NotEnoughAuthentication() def areDone(self, avatarId): """ Override to determine if the authentication is finished for a given avatarId. @param avatarId: the avatar returned by the first checker. For this checker to function correctly, all the checkers must return the same avatar ID. """ return True
agpl-3.0
18padx08/PPTex
PPTexEnv_x86_64/lib/python2.7/site-packages/setuptools/depends.py
462
6370
import sys import imp import marshal from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN from distutils.version import StrictVersion from setuptools import compat __all__ = [ 'Require', 'find_module', 'get_module_constant', 'extract_constant' ] class Require: """A prerequisite to building or installing a distribution""" def __init__(self, name, requested_version, module, homepage='', attribute=None, format=None): if format is None and requested_version is not None: format = StrictVersion if format is not None: requested_version = format(requested_version) if attribute is None: attribute = '__version__' self.__dict__.update(locals()) del self.self def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: return '%s-%s' % (self.name,self.requested_version) return self.name def version_ok(self, version): """Is 'version' sufficiently up-to-date?""" return self.attribute is None or self.format is None or \ str(version) != "unknown" and version >= self.requested_version def get_version(self, paths=None, default="unknown"): """Get version number of installed module, 'None', or 'default' Search 'paths' for module. If not found, return 'None'. If found, return the extracted version attribute, or 'default' if no version attribute was specified, or the value cannot be determined without importing the module. The version is formatted according to the requirement's version format (if any), unless it is 'None' or the supplied 'default'. """ if self.attribute is None: try: f,p,i = find_module(self.module,paths) if f: f.close() return default except ImportError: return None v = get_module_constant(self.module, self.attribute, default, paths) if v is not None and v is not default and self.format is not None: return self.format(v) return v def is_present(self, paths=None): """Return true if dependency is present on 'paths'""" return self.get_version(paths) is not None def is_current(self, paths=None): """Return true if dependency is present and up-to-date on 'paths'""" version = self.get_version(paths) if version is None: return False return self.version_ok(version) def _iter_code(code): """Yield '(op,arg)' pair for each operation in code object 'code'""" from array import array from dis import HAVE_ARGUMENT, EXTENDED_ARG bytes = array('b',code.co_code) eof = len(code.co_code) ptr = 0 extended_arg = 0 while ptr<eof: op = bytes[ptr] if op>=HAVE_ARGUMENT: arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg ptr += 3 if op==EXTENDED_ARG: extended_arg = arg * compat.long_type(65536) continue else: arg = None ptr += 1 yield op,arg def find_module(module, paths=None): """Just like 'imp.find_module()', but with package support""" parts = module.split('.') while parts: part = parts.pop(0) f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) if kind==PKG_DIRECTORY: parts = parts or ['__init__'] paths = [path] elif parts: raise ImportError("Can't find %r in %s" % (parts,module)) return info def get_module_constant(module, symbol, default=-1, paths=None): """Find 'module' by searching 'paths', and extract 'symbol' Return 'None' if 'module' does not exist on 'paths', or it does not define 'symbol'. If the module defines 'symbol' as a constant, return the constant. Otherwise, return 'default'.""" try: f, path, (suffix, mode, kind) = find_module(module, paths) except ImportError: # Module doesn't exist return None try: if kind==PY_COMPILED: f.read(8) # skip magic & date code = marshal.load(f) elif kind==PY_FROZEN: code = imp.get_frozen_object(module) elif kind==PY_SOURCE: code = compile(f.read(), path, 'exec') else: # Not something we can parse; we'll have to import it. :( if module not in sys.modules: imp.load_module(module, f, path, (suffix, mode, kind)) return getattr(sys.modules[module], symbol, None) finally: if f: f.close() return extract_constant(code, symbol, default) def extract_constant(code, symbol, default=-1): """Extract the constant value of 'symbol' from 'code' If the name 'symbol' is bound to a constant value by the Python code object 'code', return that value. If 'symbol' is bound to an expression, return 'default'. Otherwise, return 'None'. Return value is based on the first assignment to 'symbol'. 'symbol' must be a global, or at least a non-"fast" local in the code block. That is, only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' must be present in 'code.co_names'. """ if symbol not in code.co_names: # name's not there, can't possibly be an assigment return None name_idx = list(code.co_names).index(symbol) STORE_NAME = 90 STORE_GLOBAL = 97 LOAD_CONST = 100 const = default for op, arg in _iter_code(code): if op==LOAD_CONST: const = code.co_consts[arg] elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): return const else: const = default def _update_globals(): """ Patch the globals to remove the objects not available on some platforms. XXX it'd be better to test assertions about bytecode instead. """ if not sys.platform.startswith('java') and sys.platform != 'cli': return incompatible = 'extract_constant', 'get_module_constant' for name in incompatible: del globals()[name] __all__.remove(name) _update_globals()
mit
Allow2CEO/browser-ios
brave/node_modules/ad-block/vendor/depot_tools/win_toolchain/package_from_installed.py
9
12502
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ From a system-installed copy of the toolchain, packages all the required bits into a .zip file. It assumes default install locations for tools, in particular: - C:\Program Files (x86)\Microsoft Visual Studio 12.0\... - C:\Program Files (x86)\Windows Kits\10\... 1. Start from a fresh Win7 VM image. 2. Install VS Pro. Deselect everything except MFC. 3. Install Windows 10 SDK. Select only the Windows SDK and Debugging Tools for Windows. 4. Run this script, which will build a <sha1>.zip. Express is not yet supported by this script, but patches welcome (it's not too useful as the resulting zip can't be redistributed, and most will presumably have a Pro license anyway). """ import optparse import os import platform import shutil import sys import tempfile import zipfile import get_toolchain_if_necessary VS_VERSION = None WIN_VERSION = None def BuildFileList(): result = [] # Subset of VS corresponding roughly to VC. paths = [ 'DIA SDK/bin', 'DIA SDK/idl', 'DIA SDK/include', 'DIA SDK/lib', 'VC/atlmfc', 'VC/bin', 'VC/crt', 'VC/include', 'VC/lib', 'VC/redist', ] if VS_VERSION == '2013': paths += [ ('VC/redist/x86/Microsoft.VC120.CRT', 'sys32'), ('VC/redist/x86/Microsoft.VC120.MFC', 'sys32'), ('VC/redist/Debug_NonRedist/x86/Microsoft.VC120.DebugCRT', 'sys32'), ('VC/redist/Debug_NonRedist/x86/Microsoft.VC120.DebugMFC', 'sys32'), ('VC/redist/x64/Microsoft.VC120.CRT', 'sys64'), ('VC/redist/x64/Microsoft.VC120.MFC', 'sys64'), ('VC/redist/Debug_NonRedist/x64/Microsoft.VC120.DebugCRT', 'sys64'), ('VC/redist/Debug_NonRedist/x64/Microsoft.VC120.DebugMFC', 'sys64'), ] elif VS_VERSION == '2015': paths += [ ('VC/redist/x86/Microsoft.VC140.CRT', 'sys32'), ('VC/redist/x86/Microsoft.VC140.MFC', 'sys32'), ('VC/redist/debug_nonredist/x86/Microsoft.VC140.DebugCRT', 'sys32'), ('VC/redist/debug_nonredist/x86/Microsoft.VC140.DebugMFC', 'sys32'), ('VC/redist/x64/Microsoft.VC140.CRT', 'sys64'), ('VC/redist/x64/Microsoft.VC140.MFC', 'sys64'), ('VC/redist/debug_nonredist/x64/Microsoft.VC140.DebugCRT', 'sys64'), ('VC/redist/debug_nonredist/x64/Microsoft.VC140.DebugMFC', 'sys64'), ] else: raise ValueError('VS_VERSION %s' % VS_VERSION) if VS_VERSION == '2013': vs_path = r'C:\Program Files (x86)\Microsoft Visual Studio 12.0' else: vs_path = r'C:\Program Files (x86)\Microsoft Visual Studio 14.0' for path in paths: src = path[0] if isinstance(path, tuple) else path combined = os.path.join(vs_path, src) assert os.path.exists(combined) and os.path.isdir(combined) for root, _, files in os.walk(combined): for f in files: final_from = os.path.normpath(os.path.join(root, f)) if isinstance(path, tuple): result.append( (final_from, os.path.normpath(os.path.join(path[1], f)))) else: assert final_from.startswith(vs_path) dest = final_from[len(vs_path) + 1:] if VS_VERSION == '2013' and dest.lower().endswith('\\xtree'): # Patch for C4702 in xtree on VS2013. http://crbug.com/346399. (handle, patched) = tempfile.mkstemp() with open(final_from, 'rb') as unpatched_f: unpatched_contents = unpatched_f.read() os.write(handle, unpatched_contents.replace('warning(disable: 4127)', 'warning(disable: 4127 4702)')) result.append((patched, dest)) else: result.append((final_from, dest)) # Just copy the whole SDK. sdk_path = r'C:\Program Files (x86)\Windows Kits\10' for root, _, files in os.walk(sdk_path): for f in files: combined = os.path.normpath(os.path.join(root, f)) # Some of the files in this directory are exceedingly long (and exceed #_MAX_PATH for any moderately long root), so exclude them. We don't need # them anyway. Exclude the Windows Performance Toolkit just to save space. tail = combined[len(sdk_path) + 1:] if (tail.startswith('References\\') or tail.startswith('Windows Performance Toolkit\\')): continue if VS_VERSION == '2015': # There may be many Include\Lib\Source directories for many different # versions of Windows and packaging them all wastes ~450 MB # (uncompressed) per version and wastes time. Only copy the specified # version. if (tail.startswith('Include\\') or tail.startswith('Lib\\') or tail.startswith('Source\\')): if tail.count(WIN_VERSION) == 0: continue to = os.path.join('win_sdk', tail) result.append((combined, to)) if VS_VERSION == '2015': system_crt_files = [ 'api-ms-win-core-file-l1-2-0.dll', 'api-ms-win-core-file-l2-1-0.dll', 'api-ms-win-core-localization-l1-2-0.dll', 'api-ms-win-core-processthreads-l1-1-1.dll', 'api-ms-win-core-synch-l1-2-0.dll', 'api-ms-win-core-timezone-l1-1-0.dll', 'api-ms-win-core-xstate-l2-1-0.dll', 'api-ms-win-crt-conio-l1-1-0.dll', 'api-ms-win-crt-convert-l1-1-0.dll', 'api-ms-win-crt-environment-l1-1-0.dll', 'api-ms-win-crt-filesystem-l1-1-0.dll', 'api-ms-win-crt-heap-l1-1-0.dll', 'api-ms-win-crt-locale-l1-1-0.dll', 'api-ms-win-crt-math-l1-1-0.dll', 'api-ms-win-crt-multibyte-l1-1-0.dll', 'api-ms-win-crt-private-l1-1-0.dll', 'api-ms-win-crt-process-l1-1-0.dll', 'api-ms-win-crt-runtime-l1-1-0.dll', 'api-ms-win-crt-stdio-l1-1-0.dll', 'api-ms-win-crt-string-l1-1-0.dll', 'api-ms-win-crt-time-l1-1-0.dll', 'api-ms-win-crt-utility-l1-1-0.dll', 'api-ms-win-eventing-provider-l1-1-0.dll', 'ucrtbase.dll', 'ucrtbased.dll', ] bitness = platform.architecture()[0] # When running 64-bit python the x64 DLLs will be in System32 x64_path = 'System32' if bitness == '64bit' else 'Sysnative' x64_path = os.path.join(r'C:\Windows', x64_path) for system_crt_file in system_crt_files: result.append((os.path.join(r'C:\Windows\SysWOW64', system_crt_file), os.path.join('sys32', system_crt_file))) result.append((os.path.join(x64_path, system_crt_file), os.path.join('sys64', system_crt_file))) # Generically drop all arm stuff that we don't need, and # drop .msi files because we don't need installers. return [(f, t) for f, t in result if 'arm\\' not in f.lower() and 'arm64\\' not in f.lower() and not f.lower().endswith('.msi')] def GenerateSetEnvCmd(target_dir): """Generate a batch file that gyp expects to exist to set up the compiler environment. This is normally generated by a full install of the SDK, but we do it here manually since we do not do a full install.""" with open(os.path.join( target_dir, r'win_sdk\bin\SetEnv.cmd'), 'w') as f: f.write('@echo off\n' ':: Generated by win_toolchain\\package_from_installed.py.\n' # Common to x86 and x64 'set PATH=%~dp0..\\..\\Common7\\IDE;%PATH%\n' 'set INCLUDE=%~dp0..\\..\\win_sdk\\Include\\WINVERSION\\um;' '%~dp0..\\..\\win_sdk\\Include\\WINVERSION\\shared;' '%~dp0..\\..\\win_sdk\\Include\\WINVERSION\\winrt;' '%~dp0..\\..\\win_sdk\\Include\\WINVERSION\\ucrt;' # VS 2015 '%~dp0..\\..\\VC\\include;' '%~dp0..\\..\\VC\\atlmfc\\include\n' 'if "%1"=="/x64" goto x64\n'.replace('WINVERSION', WIN_VERSION)) # x86. Always use amd64_x86 cross, not x86 on x86. f.write('set PATH=%~dp0..\\..\\win_sdk\\bin\\x86;' '%~dp0..\\..\\VC\\bin\\amd64_x86;' '%~dp0..\\..\\VC\\bin\\amd64;' # Needed for mspdb1x0.dll. '%PATH%\n') f.write('set LIB=%~dp0..\\..\\VC\\lib;' '%~dp0..\\..\\win_sdk\\Lib\\WINVERSION\\um\\x86;' '%~dp0..\\..\\win_sdk\\Lib\\WINVERSION\\ucrt\\x86;' # VS 2015 '%~dp0..\\..\\VC\\atlmfc\\lib\n' 'goto :EOF\n'.replace('WINVERSION', WIN_VERSION)) # x64. f.write(':x64\n' 'set PATH=%~dp0..\\..\\win_sdk\\bin\\x64;' '%~dp0..\\..\\VC\\bin\\amd64;' '%PATH%\n') f.write('set LIB=%~dp0..\\..\\VC\\lib\\amd64;' '%~dp0..\\..\\win_sdk\\Lib\\WINVERSION\\um\\x64;' '%~dp0..\\..\\win_sdk\\Lib\\WINVERSION\\ucrt\\x64;' # VS 2015 '%~dp0..\\..\\VC\\atlmfc\\lib\\amd64\n' .replace('WINVERSION', WIN_VERSION)) def AddEnvSetup(files): """We need to generate this file in the same way that the "from pieces" script does, so pull that in here.""" tempdir = tempfile.mkdtemp() os.makedirs(os.path.join(tempdir, 'win_sdk', 'bin')) GenerateSetEnvCmd(tempdir) files.append((os.path.join(tempdir, 'win_sdk', 'bin', 'SetEnv.cmd'), 'win_sdk\\bin\\SetEnv.cmd')) vs_version_file = os.path.join(tempdir, 'VS_VERSION') with open(vs_version_file, 'wb') as version: print >>version, VS_VERSION files.append((vs_version_file, 'VS_VERSION')) def RenameToSha1(output): """Determine the hash in the same way that the unzipper does to rename the # .zip file.""" print 'Extracting to determine hash...' tempdir = tempfile.mkdtemp() old_dir = os.getcwd() os.chdir(tempdir) if VS_VERSION == '2013': rel_dir = 'vs2013_files' else: rel_dir = 'vs_files' with zipfile.ZipFile( os.path.join(old_dir, output), 'r', zipfile.ZIP_DEFLATED, True) as zf: zf.extractall(rel_dir) print 'Hashing...' sha1 = get_toolchain_if_necessary.CalculateHash(rel_dir) os.chdir(old_dir) shutil.rmtree(tempdir) final_name = sha1 + '.zip' os.rename(output, final_name) print 'Renamed %s to %s.' % (output, final_name) def main(): usage = 'usage: %prog [options] 2013|2015' parser = optparse.OptionParser(usage) parser.add_option('-w', '--winver', action='store', type='string', dest='winver', default='10.0.10586.0', help='Windows SDK version, such as 10.0.10586.0') parser.add_option('-d', '--dryrun', action='store_true', dest='dryrun', default=False, help='scan for file existence and prints statistics') (options, args) = parser.parse_args() if len(args) != 1 or args[0] not in ('2013', '2015'): print 'Must specify 2013 or 2015' parser.print_help(); return 1 global VS_VERSION VS_VERSION = args[0] global WIN_VERSION WIN_VERSION = options.winver print 'Building file list for VS %s Windows %s...' % (VS_VERSION, WIN_VERSION) files = BuildFileList() AddEnvSetup(files) if False: for f in files: print f[0], '->', f[1] return 0 output = 'out.zip' if os.path.exists(output): os.unlink(output) count = 0 version_match_count = 0 total_size = 0 missing_files = False with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED, True) as zf: for disk_name, archive_name in files: sys.stdout.write('\r%d/%d ...%s' % (count, len(files), disk_name[-40:])) sys.stdout.flush() count += 1 if disk_name.count(WIN_VERSION) > 0: version_match_count += 1 if os.path.exists(disk_name): if options.dryrun: total_size += os.path.getsize(disk_name) else: zf.write(disk_name, archive_name) else: missing_files = True sys.stdout.write('\r%s does not exist.\n\n' % disk_name) sys.stdout.flush() if options.dryrun: sys.stdout.write('\r%1.3f GB of data in %d files, %d files for %s.%s\n' % (total_size / 1e9, count, version_match_count, WIN_VERSION, ' '*50)) return 0 if missing_files: raise 'One or more files were missing - aborting' if version_match_count == 0: raise 'No files found that match the specified winversion' sys.stdout.write('\rWrote to %s.%s\n' % (output, ' '*50)) sys.stdout.flush() RenameToSha1(output) return 0 if __name__ == '__main__': sys.exit(main())
mpl-2.0
andreaso/ansible
lib/ansible/modules/packaging/language/cpanm.py
70
7024
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2012, Franck Cuny <franck@lumberjaph.net> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cpanm short_description: Manages Perl library dependencies. description: - Manage Perl library dependencies. version_added: "1.6" options: name: description: - The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz required: false default: null aliases: ["pkg"] from_path: description: - The local directory from where to install required: false default: null notest: description: - Do not run unit tests required: false default: false locallib: description: - Specify the install base to install modules required: false default: false mirror: description: - Specifies the base URL for the CPAN mirror to use required: false default: false mirror_only: description: - Use the mirror's index file instead of the CPAN Meta DB required: false default: false installdeps: description: - Only install dependencies required: false default: false version_added: "2.0" version: description: - minimum version of perl module to consider acceptable required: false default: false version_added: "2.1" system_lib: description: - Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work. - This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation. required: false default: false version_added: "2.0" aliases: ['use_sudo'] executable: description: - Override the path to the cpanm executable required: false default: null version_added: "2.1" notes: - Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host. author: "Franck Cuny (@franckcuny)" ''' EXAMPLES = ''' # install Dancer perl package - cpanm: name: Dancer # install version 0.99_05 of the Plack perl package - cpanm: name: MIYAGAWA/Plack-0.99_05.tar.gz # install Dancer into the specified locallib - cpanm: name: Dancer locallib: /srv/webapps/my_app/extlib # install perl dependencies from local directory - cpanm: from_path: /srv/webapps/my_app/src/ # install Dancer perl package without running the unit tests in indicated locallib - cpanm: name: Dancer notest: True locallib: /srv/webapps/my_app/extlib # install Dancer perl package from a specific mirror - cpanm: name: Dancer mirror: 'http://cpan.cpantesters.org/' # install Dancer perl package into the system root path - cpanm: name: Dancer system_lib: yes # install Dancer if it's not already installed # OR the installed version is older than version 1.0 - cpanm: name: Dancer version: '1.0' ''' def _is_package_installed(module, name, locallib, cpanm, version): cmd = "" if locallib: os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib cmd = "%s perl -e ' use %s" % (cmd, name) if version: cmd = "%s %s;'" % (cmd, version) else: cmd = "%s;'" % cmd res, stdout, stderr = module.run_command(cmd, check_rc=False) if res == 0: return True else: return False def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo): # this code should use "%s" like everything else and just return early but not fixing all of it now. # don't copy stuff like this if from_path: cmd = cpanm + " " + from_path else: cmd = cpanm + " " + name if notest is True: cmd = cmd + " -n" if locallib is not None: cmd = cmd + " -l " + locallib if mirror is not None: cmd = cmd + " --mirror " + mirror if mirror_only is True: cmd = cmd + " --mirror-only" if installdeps is True: cmd = cmd + " --installdeps" if use_sudo is True: cmd = cmd + " --sudo" return cmd def _get_cpanm_path(module): if module.params['executable']: return module.params['executable'] else: return module.get_bin_path('cpanm', True) def main(): arg_spec = dict( name=dict(default=None, required=False, aliases=['pkg']), from_path=dict(default=None, required=False, type='path'), notest=dict(default=False, type='bool'), locallib=dict(default=None, required=False, type='path'), mirror=dict(default=None, required=False), mirror_only=dict(default=False, type='bool'), installdeps=dict(default=False, type='bool'), system_lib=dict(default=False, type='bool', aliases=['use_sudo']), version=dict(default=None, required=False), executable=dict(required=False, type='path'), ) module = AnsibleModule( argument_spec=arg_spec, required_one_of=[['name', 'from_path']], ) cpanm = _get_cpanm_path(module) name = module.params['name'] from_path = module.params['from_path'] notest = module.boolean(module.params.get('notest', False)) locallib = module.params['locallib'] mirror = module.params['mirror'] mirror_only = module.params['mirror_only'] installdeps = module.params['installdeps'] use_sudo = module.params['system_lib'] version = module.params['version'] changed = False installed = _is_package_installed(module, name, locallib, cpanm, version) if not installed: cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo) rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False) if rc_cpanm != 0: module.fail_json(msg=err_cpanm, cmd=cmd) if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1): changed = True module.exit_json(changed=changed, binary=cpanm, name=name) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
kenshay/ImageScript
ProgramData/SystemFiles/Python/Lib/compiler/ast.py
206
37508
"""Python abstract syntax node definitions This file is automatically generated by Tools/compiler/astgen.py """ from compiler.consts import CO_VARARGS, CO_VARKEYWORDS def flatten(seq): l = [] for elt in seq: t = type(elt) if t is tuple or t is list: for elt2 in flatten(elt): l.append(elt2) else: l.append(elt) return l def flatten_nodes(seq): return [n for n in flatten(seq) if isinstance(n, Node)] nodes = {} class Node: """Abstract base class for ast nodes.""" def getChildren(self): pass # implemented by subclasses def __iter__(self): for n in self.getChildren(): yield n def asList(self): # for backwards compatibility return self.getChildren() def getChildNodes(self): pass # implemented by subclasses class EmptyNode(Node): pass class Expression(Node): # Expression is an artificial node class to support "eval" nodes["expression"] = "Expression" def __init__(self, node): self.node = node def getChildren(self): return self.node, def getChildNodes(self): return self.node, def __repr__(self): return "Expression(%s)" % (repr(self.node)) class Add(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "Add((%s, %s))" % (repr(self.left), repr(self.right)) class And(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "And(%s)" % (repr(self.nodes),) class AssAttr(Node): def __init__(self, expr, attrname, flags, lineno=None): self.expr = expr self.attrname = attrname self.flags = flags self.lineno = lineno def getChildren(self): return self.expr, self.attrname, self.flags def getChildNodes(self): return self.expr, def __repr__(self): return "AssAttr(%s, %s, %s)" % (repr(self.expr), repr(self.attrname), repr(self.flags)) class AssList(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "AssList(%s)" % (repr(self.nodes),) class AssName(Node): def __init__(self, name, flags, lineno=None): self.name = name self.flags = flags self.lineno = lineno def getChildren(self): return self.name, self.flags def getChildNodes(self): return () def __repr__(self): return "AssName(%s, %s)" % (repr(self.name), repr(self.flags)) class AssTuple(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "AssTuple(%s)" % (repr(self.nodes),) class Assert(Node): def __init__(self, test, fail, lineno=None): self.test = test self.fail = fail self.lineno = lineno def getChildren(self): children = [] children.append(self.test) children.append(self.fail) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.test) if self.fail is not None: nodelist.append(self.fail) return tuple(nodelist) def __repr__(self): return "Assert(%s, %s)" % (repr(self.test), repr(self.fail)) class Assign(Node): def __init__(self, nodes, expr, lineno=None): self.nodes = nodes self.expr = expr self.lineno = lineno def getChildren(self): children = [] children.extend(flatten(self.nodes)) children.append(self.expr) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) nodelist.append(self.expr) return tuple(nodelist) def __repr__(self): return "Assign(%s, %s)" % (repr(self.nodes), repr(self.expr)) class AugAssign(Node): def __init__(self, node, op, expr, lineno=None): self.node = node self.op = op self.expr = expr self.lineno = lineno def getChildren(self): return self.node, self.op, self.expr def getChildNodes(self): return self.node, self.expr def __repr__(self): return "AugAssign(%s, %s, %s)" % (repr(self.node), repr(self.op), repr(self.expr)) class Backquote(Node): def __init__(self, expr, lineno=None): self.expr = expr self.lineno = lineno def getChildren(self): return self.expr, def getChildNodes(self): return self.expr, def __repr__(self): return "Backquote(%s)" % (repr(self.expr),) class Bitand(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Bitand(%s)" % (repr(self.nodes),) class Bitor(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Bitor(%s)" % (repr(self.nodes),) class Bitxor(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Bitxor(%s)" % (repr(self.nodes),) class Break(Node): def __init__(self, lineno=None): self.lineno = lineno def getChildren(self): return () def getChildNodes(self): return () def __repr__(self): return "Break()" class CallFunc(Node): def __init__(self, node, args, star_args = None, dstar_args = None, lineno=None): self.node = node self.args = args self.star_args = star_args self.dstar_args = dstar_args self.lineno = lineno def getChildren(self): children = [] children.append(self.node) children.extend(flatten(self.args)) children.append(self.star_args) children.append(self.dstar_args) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.node) nodelist.extend(flatten_nodes(self.args)) if self.star_args is not None: nodelist.append(self.star_args) if self.dstar_args is not None: nodelist.append(self.dstar_args) return tuple(nodelist) def __repr__(self): return "CallFunc(%s, %s, %s, %s)" % (repr(self.node), repr(self.args), repr(self.star_args), repr(self.dstar_args)) class Class(Node): def __init__(self, name, bases, doc, code, decorators = None, lineno=None): self.name = name self.bases = bases self.doc = doc self.code = code self.decorators = decorators self.lineno = lineno def getChildren(self): children = [] children.append(self.name) children.extend(flatten(self.bases)) children.append(self.doc) children.append(self.code) children.append(self.decorators) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.bases)) nodelist.append(self.code) if self.decorators is not None: nodelist.append(self.decorators) return tuple(nodelist) def __repr__(self): return "Class(%s, %s, %s, %s, %s)" % (repr(self.name), repr(self.bases), repr(self.doc), repr(self.code), repr(self.decorators)) class Compare(Node): def __init__(self, expr, ops, lineno=None): self.expr = expr self.ops = ops self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.extend(flatten(self.ops)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) nodelist.extend(flatten_nodes(self.ops)) return tuple(nodelist) def __repr__(self): return "Compare(%s, %s)" % (repr(self.expr), repr(self.ops)) class Const(Node): def __init__(self, value, lineno=None): self.value = value self.lineno = lineno def getChildren(self): return self.value, def getChildNodes(self): return () def __repr__(self): return "Const(%s)" % (repr(self.value),) class Continue(Node): def __init__(self, lineno=None): self.lineno = lineno def getChildren(self): return () def getChildNodes(self): return () def __repr__(self): return "Continue()" class Decorators(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Decorators(%s)" % (repr(self.nodes),) class Dict(Node): def __init__(self, items, lineno=None): self.items = items self.lineno = lineno def getChildren(self): return tuple(flatten(self.items)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.items)) return tuple(nodelist) def __repr__(self): return "Dict(%s)" % (repr(self.items),) class Discard(Node): def __init__(self, expr, lineno=None): self.expr = expr self.lineno = lineno def getChildren(self): return self.expr, def getChildNodes(self): return self.expr, def __repr__(self): return "Discard(%s)" % (repr(self.expr),) class Div(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "Div((%s, %s))" % (repr(self.left), repr(self.right)) class Ellipsis(Node): def __init__(self, lineno=None): self.lineno = lineno def getChildren(self): return () def getChildNodes(self): return () def __repr__(self): return "Ellipsis()" class Exec(Node): def __init__(self, expr, locals, globals, lineno=None): self.expr = expr self.locals = locals self.globals = globals self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.append(self.locals) children.append(self.globals) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) if self.locals is not None: nodelist.append(self.locals) if self.globals is not None: nodelist.append(self.globals) return tuple(nodelist) def __repr__(self): return "Exec(%s, %s, %s)" % (repr(self.expr), repr(self.locals), repr(self.globals)) class FloorDiv(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "FloorDiv((%s, %s))" % (repr(self.left), repr(self.right)) class For(Node): def __init__(self, assign, list, body, else_, lineno=None): self.assign = assign self.list = list self.body = body self.else_ = else_ self.lineno = lineno def getChildren(self): children = [] children.append(self.assign) children.append(self.list) children.append(self.body) children.append(self.else_) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.assign) nodelist.append(self.list) nodelist.append(self.body) if self.else_ is not None: nodelist.append(self.else_) return tuple(nodelist) def __repr__(self): return "For(%s, %s, %s, %s)" % (repr(self.assign), repr(self.list), repr(self.body), repr(self.else_)) class From(Node): def __init__(self, modname, names, level, lineno=None): self.modname = modname self.names = names self.level = level self.lineno = lineno def getChildren(self): return self.modname, self.names, self.level def getChildNodes(self): return () def __repr__(self): return "From(%s, %s, %s)" % (repr(self.modname), repr(self.names), repr(self.level)) class Function(Node): def __init__(self, decorators, name, argnames, defaults, flags, doc, code, lineno=None): self.decorators = decorators self.name = name self.argnames = argnames self.defaults = defaults self.flags = flags self.doc = doc self.code = code self.lineno = lineno self.varargs = self.kwargs = None if flags & CO_VARARGS: self.varargs = 1 if flags & CO_VARKEYWORDS: self.kwargs = 1 def getChildren(self): children = [] children.append(self.decorators) children.append(self.name) children.append(self.argnames) children.extend(flatten(self.defaults)) children.append(self.flags) children.append(self.doc) children.append(self.code) return tuple(children) def getChildNodes(self): nodelist = [] if self.decorators is not None: nodelist.append(self.decorators) nodelist.extend(flatten_nodes(self.defaults)) nodelist.append(self.code) return tuple(nodelist) def __repr__(self): return "Function(%s, %s, %s, %s, %s, %s, %s)" % (repr(self.decorators), repr(self.name), repr(self.argnames), repr(self.defaults), repr(self.flags), repr(self.doc), repr(self.code)) class GenExpr(Node): def __init__(self, code, lineno=None): self.code = code self.lineno = lineno self.argnames = ['.0'] self.varargs = self.kwargs = None def getChildren(self): return self.code, def getChildNodes(self): return self.code, def __repr__(self): return "GenExpr(%s)" % (repr(self.code),) class GenExprFor(Node): def __init__(self, assign, iter, ifs, lineno=None): self.assign = assign self.iter = iter self.ifs = ifs self.lineno = lineno self.is_outmost = False def getChildren(self): children = [] children.append(self.assign) children.append(self.iter) children.extend(flatten(self.ifs)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.assign) nodelist.append(self.iter) nodelist.extend(flatten_nodes(self.ifs)) return tuple(nodelist) def __repr__(self): return "GenExprFor(%s, %s, %s)" % (repr(self.assign), repr(self.iter), repr(self.ifs)) class GenExprIf(Node): def __init__(self, test, lineno=None): self.test = test self.lineno = lineno def getChildren(self): return self.test, def getChildNodes(self): return self.test, def __repr__(self): return "GenExprIf(%s)" % (repr(self.test),) class GenExprInner(Node): def __init__(self, expr, quals, lineno=None): self.expr = expr self.quals = quals self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.extend(flatten(self.quals)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) nodelist.extend(flatten_nodes(self.quals)) return tuple(nodelist) def __repr__(self): return "GenExprInner(%s, %s)" % (repr(self.expr), repr(self.quals)) class Getattr(Node): def __init__(self, expr, attrname, lineno=None): self.expr = expr self.attrname = attrname self.lineno = lineno def getChildren(self): return self.expr, self.attrname def getChildNodes(self): return self.expr, def __repr__(self): return "Getattr(%s, %s)" % (repr(self.expr), repr(self.attrname)) class Global(Node): def __init__(self, names, lineno=None): self.names = names self.lineno = lineno def getChildren(self): return self.names, def getChildNodes(self): return () def __repr__(self): return "Global(%s)" % (repr(self.names),) class If(Node): def __init__(self, tests, else_, lineno=None): self.tests = tests self.else_ = else_ self.lineno = lineno def getChildren(self): children = [] children.extend(flatten(self.tests)) children.append(self.else_) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.tests)) if self.else_ is not None: nodelist.append(self.else_) return tuple(nodelist) def __repr__(self): return "If(%s, %s)" % (repr(self.tests), repr(self.else_)) class IfExp(Node): def __init__(self, test, then, else_, lineno=None): self.test = test self.then = then self.else_ = else_ self.lineno = lineno def getChildren(self): return self.test, self.then, self.else_ def getChildNodes(self): return self.test, self.then, self.else_ def __repr__(self): return "IfExp(%s, %s, %s)" % (repr(self.test), repr(self.then), repr(self.else_)) class Import(Node): def __init__(self, names, lineno=None): self.names = names self.lineno = lineno def getChildren(self): return self.names, def getChildNodes(self): return () def __repr__(self): return "Import(%s)" % (repr(self.names),) class Invert(Node): def __init__(self, expr, lineno=None): self.expr = expr self.lineno = lineno def getChildren(self): return self.expr, def getChildNodes(self): return self.expr, def __repr__(self): return "Invert(%s)" % (repr(self.expr),) class Keyword(Node): def __init__(self, name, expr, lineno=None): self.name = name self.expr = expr self.lineno = lineno def getChildren(self): return self.name, self.expr def getChildNodes(self): return self.expr, def __repr__(self): return "Keyword(%s, %s)" % (repr(self.name), repr(self.expr)) class Lambda(Node): def __init__(self, argnames, defaults, flags, code, lineno=None): self.argnames = argnames self.defaults = defaults self.flags = flags self.code = code self.lineno = lineno self.varargs = self.kwargs = None if flags & CO_VARARGS: self.varargs = 1 if flags & CO_VARKEYWORDS: self.kwargs = 1 def getChildren(self): children = [] children.append(self.argnames) children.extend(flatten(self.defaults)) children.append(self.flags) children.append(self.code) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.defaults)) nodelist.append(self.code) return tuple(nodelist) def __repr__(self): return "Lambda(%s, %s, %s, %s)" % (repr(self.argnames), repr(self.defaults), repr(self.flags), repr(self.code)) class LeftShift(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "LeftShift((%s, %s))" % (repr(self.left), repr(self.right)) class List(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "List(%s)" % (repr(self.nodes),) class ListComp(Node): def __init__(self, expr, quals, lineno=None): self.expr = expr self.quals = quals self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.extend(flatten(self.quals)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) nodelist.extend(flatten_nodes(self.quals)) return tuple(nodelist) def __repr__(self): return "ListComp(%s, %s)" % (repr(self.expr), repr(self.quals)) class ListCompFor(Node): def __init__(self, assign, list, ifs, lineno=None): self.assign = assign self.list = list self.ifs = ifs self.lineno = lineno def getChildren(self): children = [] children.append(self.assign) children.append(self.list) children.extend(flatten(self.ifs)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.assign) nodelist.append(self.list) nodelist.extend(flatten_nodes(self.ifs)) return tuple(nodelist) def __repr__(self): return "ListCompFor(%s, %s, %s)" % (repr(self.assign), repr(self.list), repr(self.ifs)) class ListCompIf(Node): def __init__(self, test, lineno=None): self.test = test self.lineno = lineno def getChildren(self): return self.test, def getChildNodes(self): return self.test, def __repr__(self): return "ListCompIf(%s)" % (repr(self.test),) class SetComp(Node): def __init__(self, expr, quals, lineno=None): self.expr = expr self.quals = quals self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.extend(flatten(self.quals)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) nodelist.extend(flatten_nodes(self.quals)) return tuple(nodelist) def __repr__(self): return "SetComp(%s, %s)" % (repr(self.expr), repr(self.quals)) class DictComp(Node): def __init__(self, key, value, quals, lineno=None): self.key = key self.value = value self.quals = quals self.lineno = lineno def getChildren(self): children = [] children.append(self.key) children.append(self.value) children.extend(flatten(self.quals)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.key) nodelist.append(self.value) nodelist.extend(flatten_nodes(self.quals)) return tuple(nodelist) def __repr__(self): return "DictComp(%s, %s, %s)" % (repr(self.key), repr(self.value), repr(self.quals)) class Mod(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "Mod((%s, %s))" % (repr(self.left), repr(self.right)) class Module(Node): def __init__(self, doc, node, lineno=None): self.doc = doc self.node = node self.lineno = lineno def getChildren(self): return self.doc, self.node def getChildNodes(self): return self.node, def __repr__(self): return "Module(%s, %s)" % (repr(self.doc), repr(self.node)) class Mul(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "Mul((%s, %s))" % (repr(self.left), repr(self.right)) class Name(Node): def __init__(self, name, lineno=None): self.name = name self.lineno = lineno def getChildren(self): return self.name, def getChildNodes(self): return () def __repr__(self): return "Name(%s)" % (repr(self.name),) class Not(Node): def __init__(self, expr, lineno=None): self.expr = expr self.lineno = lineno def getChildren(self): return self.expr, def getChildNodes(self): return self.expr, def __repr__(self): return "Not(%s)" % (repr(self.expr),) class Or(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Or(%s)" % (repr(self.nodes),) class Pass(Node): def __init__(self, lineno=None): self.lineno = lineno def getChildren(self): return () def getChildNodes(self): return () def __repr__(self): return "Pass()" class Power(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "Power((%s, %s))" % (repr(self.left), repr(self.right)) class Print(Node): def __init__(self, nodes, dest, lineno=None): self.nodes = nodes self.dest = dest self.lineno = lineno def getChildren(self): children = [] children.extend(flatten(self.nodes)) children.append(self.dest) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) if self.dest is not None: nodelist.append(self.dest) return tuple(nodelist) def __repr__(self): return "Print(%s, %s)" % (repr(self.nodes), repr(self.dest)) class Printnl(Node): def __init__(self, nodes, dest, lineno=None): self.nodes = nodes self.dest = dest self.lineno = lineno def getChildren(self): children = [] children.extend(flatten(self.nodes)) children.append(self.dest) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) if self.dest is not None: nodelist.append(self.dest) return tuple(nodelist) def __repr__(self): return "Printnl(%s, %s)" % (repr(self.nodes), repr(self.dest)) class Raise(Node): def __init__(self, expr1, expr2, expr3, lineno=None): self.expr1 = expr1 self.expr2 = expr2 self.expr3 = expr3 self.lineno = lineno def getChildren(self): children = [] children.append(self.expr1) children.append(self.expr2) children.append(self.expr3) return tuple(children) def getChildNodes(self): nodelist = [] if self.expr1 is not None: nodelist.append(self.expr1) if self.expr2 is not None: nodelist.append(self.expr2) if self.expr3 is not None: nodelist.append(self.expr3) return tuple(nodelist) def __repr__(self): return "Raise(%s, %s, %s)" % (repr(self.expr1), repr(self.expr2), repr(self.expr3)) class Return(Node): def __init__(self, value, lineno=None): self.value = value self.lineno = lineno def getChildren(self): return self.value, def getChildNodes(self): return self.value, def __repr__(self): return "Return(%s)" % (repr(self.value),) class RightShift(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "RightShift((%s, %s))" % (repr(self.left), repr(self.right)) class Set(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Set(%s)" % (repr(self.nodes),) class Slice(Node): def __init__(self, expr, flags, lower, upper, lineno=None): self.expr = expr self.flags = flags self.lower = lower self.upper = upper self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.append(self.flags) children.append(self.lower) children.append(self.upper) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) if self.lower is not None: nodelist.append(self.lower) if self.upper is not None: nodelist.append(self.upper) return tuple(nodelist) def __repr__(self): return "Slice(%s, %s, %s, %s)" % (repr(self.expr), repr(self.flags), repr(self.lower), repr(self.upper)) class Sliceobj(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Sliceobj(%s)" % (repr(self.nodes),) class Stmt(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Stmt(%s)" % (repr(self.nodes),) class Sub(Node): def __init__(self, leftright, lineno=None): self.left = leftright[0] self.right = leftright[1] self.lineno = lineno def getChildren(self): return self.left, self.right def getChildNodes(self): return self.left, self.right def __repr__(self): return "Sub((%s, %s))" % (repr(self.left), repr(self.right)) class Subscript(Node): def __init__(self, expr, flags, subs, lineno=None): self.expr = expr self.flags = flags self.subs = subs self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.append(self.flags) children.extend(flatten(self.subs)) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) nodelist.extend(flatten_nodes(self.subs)) return tuple(nodelist) def __repr__(self): return "Subscript(%s, %s, %s)" % (repr(self.expr), repr(self.flags), repr(self.subs)) class TryExcept(Node): def __init__(self, body, handlers, else_, lineno=None): self.body = body self.handlers = handlers self.else_ = else_ self.lineno = lineno def getChildren(self): children = [] children.append(self.body) children.extend(flatten(self.handlers)) children.append(self.else_) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.body) nodelist.extend(flatten_nodes(self.handlers)) if self.else_ is not None: nodelist.append(self.else_) return tuple(nodelist) def __repr__(self): return "TryExcept(%s, %s, %s)" % (repr(self.body), repr(self.handlers), repr(self.else_)) class TryFinally(Node): def __init__(self, body, final, lineno=None): self.body = body self.final = final self.lineno = lineno def getChildren(self): return self.body, self.final def getChildNodes(self): return self.body, self.final def __repr__(self): return "TryFinally(%s, %s)" % (repr(self.body), repr(self.final)) class Tuple(Node): def __init__(self, nodes, lineno=None): self.nodes = nodes self.lineno = lineno def getChildren(self): return tuple(flatten(self.nodes)) def getChildNodes(self): nodelist = [] nodelist.extend(flatten_nodes(self.nodes)) return tuple(nodelist) def __repr__(self): return "Tuple(%s)" % (repr(self.nodes),) class UnaryAdd(Node): def __init__(self, expr, lineno=None): self.expr = expr self.lineno = lineno def getChildren(self): return self.expr, def getChildNodes(self): return self.expr, def __repr__(self): return "UnaryAdd(%s)" % (repr(self.expr),) class UnarySub(Node): def __init__(self, expr, lineno=None): self.expr = expr self.lineno = lineno def getChildren(self): return self.expr, def getChildNodes(self): return self.expr, def __repr__(self): return "UnarySub(%s)" % (repr(self.expr),) class While(Node): def __init__(self, test, body, else_, lineno=None): self.test = test self.body = body self.else_ = else_ self.lineno = lineno def getChildren(self): children = [] children.append(self.test) children.append(self.body) children.append(self.else_) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.test) nodelist.append(self.body) if self.else_ is not None: nodelist.append(self.else_) return tuple(nodelist) def __repr__(self): return "While(%s, %s, %s)" % (repr(self.test), repr(self.body), repr(self.else_)) class With(Node): def __init__(self, expr, vars, body, lineno=None): self.expr = expr self.vars = vars self.body = body self.lineno = lineno def getChildren(self): children = [] children.append(self.expr) children.append(self.vars) children.append(self.body) return tuple(children) def getChildNodes(self): nodelist = [] nodelist.append(self.expr) if self.vars is not None: nodelist.append(self.vars) nodelist.append(self.body) return tuple(nodelist) def __repr__(self): return "With(%s, %s, %s)" % (repr(self.expr), repr(self.vars), repr(self.body)) class Yield(Node): def __init__(self, value, lineno=None): self.value = value self.lineno = lineno def getChildren(self): return self.value, def getChildNodes(self): return self.value, def __repr__(self): return "Yield(%s)" % (repr(self.value),) for name, obj in globals().items(): if isinstance(obj, type) and issubclass(obj, Node): nodes[name.lower()] = obj
gpl-3.0
jamesyli/solum
solum/objects/sqlalchemy/migration/alembic_migrations/env.py
7
2194
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from logging import config as log_config from alembic import context from solum.objects.sqlalchemy import models import solum.openstack.common.db.sqlalchemy.session as sqlalchemy_session # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. log_config.fileConfig(config.config_file_name) # set the target for 'autogenerate' support target_metadata = models.Base.metadata def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure(url=url) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = sqlalchemy_session.get_session().get_bind() with engine.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
apache-2.0
procangroup/edx-platform
common/test/acceptance/tests/video/test_studio_video_module.py
11
12257
# -*- coding: utf-8 -*- """ Acceptance tests for CMS Video Module. """ import os from unittest import skipIf from mock import patch from nose.plugins.attrib import attr from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc from common.test.acceptance.pages.common.auto_auth import AutoAuthPage from common.test.acceptance.pages.studio.overview import CourseOutlinePage from common.test.acceptance.pages.studio.video.video import VideoComponentPage from common.test.acceptance.tests.helpers import UniqueCourseTest, YouTubeStubConfig, is_youtube_available @skipIf(is_youtube_available() is False, 'YouTube is not available!') class CMSVideoBaseTest(UniqueCourseTest): """ CMS Video Module Base Test Class """ def setUp(self): """ Initialization of pages and course fixture for tests """ super(CMSVideoBaseTest, self).setUp() self.video = VideoComponentPage(self.browser) # This will be initialized later self.unit_page = None self.outline = CourseOutlinePage( self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run'] ) self.course_fixture = CourseFixture( self.course_info['org'], self.course_info['number'], self.course_info['run'], self.course_info['display_name'] ) self.assets = [] self.addCleanup(YouTubeStubConfig.reset) def _create_course_unit(self, youtube_stub_config=None, subtitles=False): """ Create a Studio Video Course Unit and Navigate to it. Arguments: youtube_stub_config (dict) subtitles (bool) """ if youtube_stub_config: YouTubeStubConfig.configure(youtube_stub_config) if subtitles: self.assets.append('subs_3_yD_cEKoCk.srt.sjson') self.navigate_to_course_unit() def _create_video(self): """ Create Xblock Video Component. """ self.video.create_video() video_xblocks = self.video.xblocks() # Total video xblock components count should be equals to 2 # Why 2? One video component is created by default for each test. Please see # test_studio_video_module.py:CMSVideoTest._create_course_unit # And we are creating second video component here. self.assertEqual(video_xblocks, 2) def _install_course_fixture(self): """ Prepare for tests by creating a course with a section, subsection, and unit. Performs the following: Create a course with a section, subsection, and unit Create a user and make that user a course author Log the user into studio """ if self.assets: self.course_fixture.add_asset(self.assets) # Create course with Video component self.course_fixture.add_children( XBlockFixtureDesc('chapter', 'Test Section').add_children( XBlockFixtureDesc('sequential', 'Test Subsection').add_children( XBlockFixtureDesc('vertical', 'Test Unit').add_children( XBlockFixtureDesc('video', 'Video') ) ) ) ).install() # Auto login and register the course AutoAuthPage( self.browser, staff=False, username=self.course_fixture.user.get('username'), email=self.course_fixture.user.get('email'), password=self.course_fixture.user.get('password') ).visit() def _navigate_to_course_unit_page(self): """ Open the course from the dashboard and expand the section and subsection and click on the Unit link The end result is the page where the user is editing the newly created unit """ # Visit Course Outline page self.outline.visit() # Visit Unit page self.unit_page = self.outline.section('Test Section').subsection('Test Subsection').expand_subsection().unit( 'Test Unit').go_to() self.video.wait_for_video_component_render() def navigate_to_course_unit(self): """ Install the course with required components and navigate to course unit page """ self._install_course_fixture() self._navigate_to_course_unit_page() def edit_component(self, xblock_index=1): """ Open component Edit Dialog for first component on page. Arguments: xblock_index: number starting from 1 (0th entry is the unit page itself) """ self.unit_page.xblocks[xblock_index].edit() def open_advanced_tab(self): """ Open components advanced tab. """ # The 0th entry is the unit page itself. self.unit_page.xblocks[1].open_advanced_tab() def open_basic_tab(self): """ Open components basic tab. """ # The 0th entry is the unit page itself. self.unit_page.xblocks[1].open_basic_tab() def save_unit_settings(self): """ Save component settings. """ # The 0th entry is the unit page itself. self.unit_page.xblocks[1].save_settings() @attr(shard=4) class CMSVideoTest(CMSVideoBaseTest): """ CMS Video Test Class """ def test_youtube_stub_proxy(self): """ Scenario: YouTube stub server proxies YouTube API correctly Given youtube stub server proxies YouTube API And I have created a Video component Then I can see video button "play" And I click video button "play" Then I can see video button "pause" """ self._create_course_unit(youtube_stub_config={'youtube_api_blocked': False}) self.assertTrue(self.video.is_button_shown('play')) self.video.click_player_button('play') self.video.wait_for_state('playing') self.assertTrue(self.video.is_button_shown('pause')) def test_youtube_stub_blocks_youtube_api(self): """ Scenario: YouTube stub server can block YouTube API Given youtube stub server blocks YouTube API And I have created a Video component Then I do not see video button "play" """ self._create_course_unit(youtube_stub_config={'youtube_api_blocked': True}) self.assertFalse(self.video.is_button_shown('play')) def test_autoplay_is_disabled(self): """ Scenario: Autoplay is disabled in Studio Given I have created a Video component Then when I view the video it does not have autoplay enabled """ self._create_course_unit() self.assertFalse(self.video.is_autoplay_enabled) def test_video_creation_takes_single_click(self): """ Scenario: Creating a video takes a single click And creating a video takes a single click """ self._create_course_unit() # This will create a video by doing a single click and then ensure that video is created self._create_video() def test_captions_hidden_correctly(self): """ Scenario: Captions are hidden correctly Given I have created a Video component with subtitles And I have hidden captions Then when I view the video it does not show the captions """ self._create_course_unit(subtitles=True) self.video.hide_captions() self.assertFalse(self.video.is_captions_visible()) def test_video_controls_shown_correctly(self): """ Scenario: Video controls for all videos show correctly Given I have created two Video components And first is private video When I reload the page Then video controls for all videos are visible And the error message isn't shown """ self._create_course_unit(youtube_stub_config={'youtube_api_private_video': True}) self.video.create_video() # change id of first default video self.edit_component(1) self.open_advanced_tab() self.video.set_field_value('YouTube ID', 'sampleid123') self.save_unit_settings() # again open unit page and check that video controls show for both videos self._navigate_to_course_unit_page() self.assertTrue(self.video.is_controls_visible()) # verify that the error message isn't shown by default self.assertFalse(self.video.is_error_message_shown) def test_captions_shown_correctly(self): """ Scenario: Captions are shown correctly Given I have created a Video component with subtitles Then when I view the video it does show the captions """ self._create_course_unit(subtitles=True) self.assertTrue(self.video.is_captions_visible()) def test_captions_toggling(self): """ Scenario: Captions are toggled correctly Given I have created a Video component with subtitles And I have toggled captions Then when I view the video it does show the captions """ self._create_course_unit(subtitles=True) self.video.click_player_button('transcript_button') self.assertFalse(self.video.is_captions_visible()) self.video.click_player_button('transcript_button') self.assertTrue(self.video.is_captions_visible()) def test_caption_line_focus(self): """ Scenario: When enter key is pressed on a caption, an outline shows around it Given I have created a Video component with subtitles And Make sure captions are opened Then I focus on first caption line And I see first caption line has focused """ self._create_course_unit(subtitles=True) self.video.show_captions() self.video.focus_caption_line(2) self.assertTrue(self.video.is_caption_line_focused(2)) def test_slider_range_works(self): """ Scenario: When start and end times are specified, a range on slider is shown Given I have created a Video component with subtitles And Make sure captions are closed And I edit the component And I open tab "Advanced" And I set value "00:00:12" to the field "Video Start Time" And I set value "00:00:24" to the field "Video Stop Time" And I save changes And I click video button "play" Then I see a range on slider """ self._create_course_unit(subtitles=True) self.video.hide_captions() self.edit_component() self.open_advanced_tab() self.video.set_field_value('Video Start Time', '00:00:12') self.video.set_field_value('Video Stop Time', '00:00:24') self.save_unit_settings() self.video.click_player_button('play') @attr('a11y') class CMSVideoA11yTest(CMSVideoBaseTest): """ CMS Video Accessibility Test Class """ def setUp(self): browser = os.environ.get('SELENIUM_BROWSER', 'firefox') # the a11y tests run in CI under phantomjs which doesn't # support html5 video or flash player, so the video tests # don't work in it. We still want to be able to run these # tests in CI, so override the browser setting if it is # phantomjs. if browser == 'phantomjs': browser = 'firefox' with patch.dict(os.environ, {'SELENIUM_BROWSER': browser}): super(CMSVideoA11yTest, self).setUp() def test_video_player_a11y(self): # we're loading a shorter transcript to ensure both skip links are available self._create_course_unit(subtitles=True) self.edit_component() self.video.upload_transcript('english_single_transcript.srt') self.save_unit_settings() self.video.wait_for_captions() self.assertTrue(self.video.is_captions_visible()) # limit the scope of the audit to the video player only. self.outline.a11y_audit.config.set_scope( include=["div.video"] ) self.outline.a11y_audit.check_for_accessibility_errors()
agpl-3.0
geopython/QGIS
python/plugins/processing/algs/grass7/ext/r_li_padsd_ascii.py
12
1538
# -*- coding: utf-8 -*- """ *************************************************************************** r_li_padsd_ascii.py ------------------- Date : February 2016 Copyright : (C) 2016 by Médéric Ribreux Email : medspx at medspx dot fr *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Médéric Ribreux' __date__ = 'February 2016' __copyright__ = '(C) 2016, Médéric Ribreux' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from .r_li import checkMovingWindow, configFile, moveOutputTxtFile def checkParameterValuesBeforeExecuting(alg, parameters, context): return checkMovingWindow(alg, parameters, context, True) def processCommand(alg, parameters, context, feedback): configFile(alg, parameters, context, feedback, True) def processOutputs(alg, parameters, context, feedback): moveOutputTxtFile(alg, parameters, context)
gpl-2.0
ryfeus/lambda-packs
Sklearn_scipy_numpy/source/scipy/cluster/setup.py
71
1216
#!/usr/bin/env python from __future__ import division, print_function, absolute_import import sys if sys.version_info[0] >= 3: DEFINE_MACROS = [("SCIPY_PY3K", None)] else: DEFINE_MACROS = [] def configuration(parent_package='', top_path=None): from numpy.distutils.system_info import get_info from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs config = Configuration('cluster', parent_package, top_path) blas_opt = get_info('lapack_opt') config.add_data_dir('tests') config.add_extension('_vq', sources=[('_vq.c')], include_dirs=[get_numpy_include_dirs()], extra_info=blas_opt) config.add_extension('_hierarchy', sources=[('_hierarchy.c')], include_dirs=[get_numpy_include_dirs()]) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(maintainer="SciPy Developers", author="Eric Jones", maintainer_email="scipy-dev@scipy.org", description="Clustering Algorithms (Information Theory)", url="http://www.scipy.org", license="SciPy License (BSD Style)", **configuration(top_path='').todict() )
mit
kustodian/ansible
test/units/parsing/test_metadata.py
32
9992
# coding: utf-8 # (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import ast import pytest from ansible.parsing import metadata as md LICENSE = b"""# some license text boilerplate # That we have at the top of files """ FUTURE_IMPORTS = b""" from __future__ import (absolute_import, division, print_function) """ REGULAR_IMPORTS = b""" import test from foo import bar """ STANDARD_METADATA = b""" ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'core'} """ TEXT_STD_METADATA = b""" ANSIBLE_METADATA = u''' metadata_version: '1.1' status: - 'stableinterface' supported_by: 'core' ''' """ BYTES_STD_METADATA = b""" ANSIBLE_METADATA = b''' metadata_version: '1.1' status: - 'stableinterface' supported_by: 'core' ''' """ TRAILING_COMMENT_METADATA = b""" ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'core'} # { Testing } """ MULTIPLE_STATEMENTS_METADATA = b""" DOCUMENTATION = "" ; ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'core'} ; RETURNS = "" """ EMBEDDED_COMMENT_METADATA = b""" ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], # { Testing } 'supported_by': 'core'} """ HASH_SYMBOL_METADATA = b""" ANSIBLE_METADATA = {'metadata_version': '1.1 # 4', 'status': ['stableinterface'], 'supported_by': 'core # Testing '} """ HASH_SYMBOL_METADATA = b""" ANSIBLE_METADATA = {'metadata_version': '1.1 # 4', 'status': ['stableinterface'], 'supported_by': 'core # Testing '} """ HASH_COMBO_METADATA = b""" ANSIBLE_METADATA = {'metadata_version': '1.1 # 4', 'status': ['stableinterface'], # { Testing } 'supported_by': 'core'} # { Testing } """ METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'core'} HASH_SYMBOL_METADATA = {'metadata_version': '1.1 # 4', 'status': ['stableinterface'], 'supported_by': 'core'} METADATA_EXAMPLES = ( # Standard import (LICENSE + FUTURE_IMPORTS + STANDARD_METADATA + REGULAR_IMPORTS, (METADATA, 5, 0, 7, 42, ['ANSIBLE_METADATA'])), # Metadata at end of file (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + STANDARD_METADATA.rstrip(), (METADATA, 8, 0, 10, 42, ['ANSIBLE_METADATA'])), # Metadata at beginning of file (STANDARD_METADATA + LICENSE + REGULAR_IMPORTS, (METADATA, 1, 0, 3, 42, ['ANSIBLE_METADATA'])), # Standard import with a trailing comment (LICENSE + FUTURE_IMPORTS + TRAILING_COMMENT_METADATA + REGULAR_IMPORTS, (METADATA, 5, 0, 7, 42, ['ANSIBLE_METADATA'])), # Metadata at end of file with a trailing comment (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + TRAILING_COMMENT_METADATA.rstrip(), (METADATA, 8, 0, 10, 42, ['ANSIBLE_METADATA'])), # Metadata at beginning of file with a trailing comment (TRAILING_COMMENT_METADATA + LICENSE + REGULAR_IMPORTS, (METADATA, 1, 0, 3, 42, ['ANSIBLE_METADATA'])), # FIXME: Current code cannot handle multiple statements on the same line. # This is bad style so we're just going to ignore it for now # Standard import with other statements on the same line # (LICENSE + FUTURE_IMPORTS + MULTIPLE_STATEMENTS_METADATA + REGULAR_IMPORTS, # (METADATA, 5, 0, 7, 42, ['ANSIBLE_METADATA'])), # Metadata at end of file with other statements on the same line # (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + MULTIPLE_STATEMENTS_METADATA.rstrip(), # (METADATA, 8, 0, 10, 42, ['ANSIBLE_METADATA'])), # Metadata at beginning of file with other statements on the same line # (MULTIPLE_STATEMENTS_METADATA + LICENSE + REGULAR_IMPORTS, # (METADATA, 1, 0, 3, 42, ['ANSIBLE_METADATA'])), # Standard import with comment inside the metadata (LICENSE + FUTURE_IMPORTS + EMBEDDED_COMMENT_METADATA + REGULAR_IMPORTS, (METADATA, 5, 0, 8, 42, ['ANSIBLE_METADATA'])), # Metadata at end of file with comment inside the metadata (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + EMBEDDED_COMMENT_METADATA.rstrip(), (METADATA, 8, 0, 11, 42, ['ANSIBLE_METADATA'])), # Metadata at beginning of file with comment inside the metadata (EMBEDDED_COMMENT_METADATA + LICENSE + REGULAR_IMPORTS, (METADATA, 1, 0, 4, 42, ['ANSIBLE_METADATA'])), # FIXME: Current code cannot handle hash symbols in the last element of # the metadata. Fortunately, the metadata currently fully specifies all # the strings inside of metadata and none of them can contain a hash. # Need to fix this to future-proof it against strings containing hashes # Standard import with hash symbol in metadata # (LICENSE + FUTURE_IMPORTS + HASH_SYMBOL_METADATA + REGULAR_IMPORTS, # (HASH_SYMBOL_METADATA, 5, 0, 7, 53, ['ANSIBLE_METADATA'])), # Metadata at end of file with hash symbol in metadata # (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + HASH_SYMBOL_HASH_SYMBOL_METADATA.rstrip(), # (HASH_SYMBOL_METADATA, 8, 0, 10, 53, ['ANSIBLE_METADATA'])), # Metadata at beginning of file with hash symbol in metadata # (HASH_SYMBOL_HASH_SYMBOL_METADATA + LICENSE + REGULAR_IMPORTS, # (HASH_SYMBOL_METADATA, 1, 0, 3, 53, ['ANSIBLE_METADATA'])), # Standard import with a bunch of hashes everywhere (LICENSE + FUTURE_IMPORTS + HASH_COMBO_METADATA + REGULAR_IMPORTS, (HASH_SYMBOL_METADATA, 5, 0, 8, 42, ['ANSIBLE_METADATA'])), # Metadata at end of file with a bunch of hashes everywhere (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + HASH_COMBO_METADATA.rstrip(), (HASH_SYMBOL_METADATA, 8, 0, 11, 42, ['ANSIBLE_METADATA'])), # Metadata at beginning of file with a bunch of hashes everywhere (HASH_COMBO_METADATA + LICENSE + REGULAR_IMPORTS, (HASH_SYMBOL_METADATA, 1, 0, 4, 42, ['ANSIBLE_METADATA'])), # Standard import with a junk ANSIBLE_METADATA as well (LICENSE + FUTURE_IMPORTS + b"\nANSIBLE_METADATA = 10\n" + HASH_COMBO_METADATA + REGULAR_IMPORTS, (HASH_SYMBOL_METADATA, 7, 0, 10, 42, ['ANSIBLE_METADATA'])), ) # FIXME: String/yaml metadata is not implemented yet. Need more test cases once it is implemented STRING_METADATA_EXAMPLES = ( # Standard import (LICENSE + FUTURE_IMPORTS + TEXT_STD_METADATA + REGULAR_IMPORTS, (METADATA, 5, 0, 10, 3, ['ANSIBLE_METADATA'])), # Metadata at end of file (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + TEXT_STD_METADATA.rstrip(), (METADATA, 8, 0, 13, 3, ['ANSIBLE_METADATA'])), # Metadata at beginning of file (TEXT_STD_METADATA + LICENSE + REGULAR_IMPORTS, (METADATA, 1, 0, 6, 3, ['ANSIBLE_METADATA'])), # Standard import (LICENSE + FUTURE_IMPORTS + BYTES_STD_METADATA + REGULAR_IMPORTS, (METADATA, 5, 0, 10, 3, ['ANSIBLE_METADATA'])), # Metadata at end of file (LICENSE + FUTURE_IMPORTS + REGULAR_IMPORTS + BYTES_STD_METADATA.rstrip(), (METADATA, 8, 0, 13, 3, ['ANSIBLE_METADATA'])), # Metadata at beginning of file (BYTES_STD_METADATA + LICENSE + REGULAR_IMPORTS, (METADATA, 1, 0, 6, 3, ['ANSIBLE_METADATA'])), ) @pytest.mark.parametrize("code, expected", METADATA_EXAMPLES) def test_dict_metadata(code, expected): assert md.extract_metadata(module_data=code, offsets=True) == expected @pytest.mark.parametrize("code, expected", STRING_METADATA_EXAMPLES) def test_string_metadata(code, expected): # FIXME: String/yaml metadata is not implemented yet. with pytest.raises(NotImplementedError): assert md.extract_metadata(module_data=code, offsets=True) == expected def test_required_params(): with pytest.raises(TypeError, match='One of module_ast or module_data must be given'): assert md.extract_metadata() def test_module_data_param_given_with_offset(): with pytest.raises(TypeError, match='If offsets is True then module_data must also be given'): assert md.extract_metadata(module_ast='something', offsets=True) def test_invalid_dict_metadata(): with pytest.raises(SyntaxError): assert md.extract_metadata(module_data=LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1",\n' + REGULAR_IMPORTS) with pytest.raises(md.ParseError, match='Unable to find the end of dictionary'): assert md.extract_metadata(module_ast=ast.parse(LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1"}\n' + REGULAR_IMPORTS), module_data=LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1",\n' + REGULAR_IMPORTS, offsets=True) def test_multiple_statements_limitation(): with pytest.raises(md.ParseError, match='Multiple statements per line confuses the module metadata parser.'): assert md.extract_metadata(module_data=LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1"}; a=b\n' + REGULAR_IMPORTS, offsets=True)
gpl-3.0
wrouesnel/ansible
lib/ansible/module_utils/crypto.py
70
4764
# -*- coding: utf-8 -*- # # (c) 2016, Yanis Guenane <yanis+ansible@guenane.org> # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. try: from OpenSSL import crypto except ImportError: # An error will be raised in the calling class to let the end # user know that OpenSSL couldn't be found. pass import abc import errno import hashlib import os from ansible.module_utils import six from ansible.module_utils._text import to_bytes class OpenSSLObjectError(Exception): pass def get_fingerprint(path, passphrase=None): """Generate the fingerprint of the public key. """ fingerprint = {} privatekey = load_privatekey(path, passphrase) try: publickey = crypto.dump_publickey(crypto.FILETYPE_ASN1, privatekey) for algo in hashlib.algorithms: f = getattr(hashlib, algo) pubkey_digest = f(publickey).hexdigest() fingerprint[algo] = ':'.join(pubkey_digest[i:i + 2] for i in range(0, len(pubkey_digest), 2)) except AttributeError: # If PyOpenSSL < 16.0 crypto.dump_publickey() will fail. # By doing this we prevent the code from raising an error # yet we return no value in the fingerprint hash. pass return fingerprint def load_privatekey(path, passphrase=None): """Load the specified OpenSSL private key.""" try: if passphrase: privatekey = crypto.load_privatekey(crypto.FILETYPE_PEM, open(path, 'rb').read(), to_bytes(passphrase)) else: privatekey = crypto.load_privatekey(crypto.FILETYPE_PEM, open(path, 'rb').read()) return privatekey except (IOError, OSError) as exc: raise OpenSSLObjectError(exc) def load_certificate(path): """Load the specified certificate.""" try: cert_content = open(path, 'rb').read() cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_content) return cert except (IOError, OSError) as exc: raise OpenSSLObjectError(exc) def load_certificate_request(path): """Load the specified certificate signing request.""" try: csr_content = open(path, 'rb').read() csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, csr_content) return csr except (IOError, OSError) as exc: raise OpenSSLObjectError(exc) def parse_name_field(input_dict): """Take a dict with key: value or key: list_of_values mappings and return a list of tuples""" result = [] for key in input_dict: if isinstance(input_dict[key], list): for entry in input_dict[key]: result.append((key, entry)) else: result.append((key, input_dict[key])) return result @six.add_metaclass(abc.ABCMeta) class OpenSSLObject(object): def __init__(self, path, state, force, check_mode): self.path = path self.state = state self.force = force self.name = os.path.basename(path) self.changed = False self.check_mode = check_mode def check(self, module, perms_required=True): """Ensure the resource is in its desired state.""" def _check_state(): return os.path.exists(self.path) def _check_perms(module): file_args = module.load_file_common_arguments(module.params) return not module.set_fs_attributes_if_different(file_args, False) if not perms_required: return _check_state() return _check_state() and _check_perms(module) @abc.abstractmethod def dump(self): """Serialize the object into a dictionary.""" pass @abc.abstractmethod def generate(self): """Generate the resource.""" pass def remove(self): """Remove the resource from the filesystem.""" try: os.remove(self.path) self.changed = True except OSError as exc: if exc.errno != errno.ENOENT: raise OpenSSLObjectError(exc) else: pass
gpl-3.0
topxiaoke/myedx
cms/djangoapps/models/settings/course_grading.py
6
7865
from datetime import timedelta from xmodule.modulestore.django import modulestore from xblock.fields import Scope class CourseGradingModel(object): """ Basically a DAO and Model combo for CRUD operations pertaining to grading policy. """ # Within this class, allow access to protected members of client classes. # This comes up when accessing kvs data and caches during kvs saves and modulestore writes. def __init__(self, course_descriptor): self.graders = [ CourseGradingModel.jsonize_grader(i, grader) for i, grader in enumerate(course_descriptor.raw_grader) ] # weights transformed to ints [0..100] self.grade_cutoffs = course_descriptor.grade_cutoffs self.grace_period = CourseGradingModel.convert_set_grace_period(course_descriptor) @classmethod def fetch(cls, course_key): """ Fetch the course grading policy for the given course from persistence and return a CourseGradingModel. """ descriptor = modulestore('direct').get_course(course_key) model = cls(descriptor) return model @staticmethod def fetch_grader(course_key, index): """ Fetch the course's nth grader Returns an empty dict if there's no such grader. """ descriptor = modulestore('direct').get_course(course_key) index = int(index) if len(descriptor.raw_grader) > index: return CourseGradingModel.jsonize_grader(index, descriptor.raw_grader[index]) # return empty model else: return {"id": index, "type": "", "min_count": 0, "drop_count": 0, "short_label": None, "weight": 0 } @staticmethod def update_from_json(course_key, jsondict, user): """ Decode the json into CourseGradingModel and save any changes. Returns the modified model. Probably not the usual path for updates as it's too coarse grained. """ descriptor = modulestore('direct').get_course(course_key) graders_parsed = [CourseGradingModel.parse_grader(jsonele) for jsonele in jsondict['graders']] descriptor.raw_grader = graders_parsed descriptor.grade_cutoffs = jsondict['grade_cutoffs'] modulestore('direct').update_item(descriptor, user.id) CourseGradingModel.update_grace_period_from_json(course_key, jsondict['grace_period'], user) return CourseGradingModel.fetch(course_key) @staticmethod def update_grader_from_json(course_key, grader, user): """ Create or update the grader of the given type (string key) for the given course. Returns the modified grader which is a full model on the client but not on the server (just a dict) """ descriptor = modulestore('direct').get_course(course_key) # parse removes the id; so, grab it before parse index = int(grader.get('id', len(descriptor.raw_grader))) grader = CourseGradingModel.parse_grader(grader) if index < len(descriptor.raw_grader): descriptor.raw_grader[index] = grader else: descriptor.raw_grader.append(grader) modulestore('direct').update_item(descriptor, user.id) return CourseGradingModel.jsonize_grader(index, descriptor.raw_grader[index]) @staticmethod def update_cutoffs_from_json(course_key, cutoffs, user): """ Create or update the grade cutoffs for the given course. Returns sent in cutoffs (ie., no extra db fetch). """ descriptor = modulestore('direct').get_course(course_key) descriptor.grade_cutoffs = cutoffs modulestore('direct').update_item(descriptor, user.id) return cutoffs @staticmethod def update_grace_period_from_json(course_key, graceperiodjson, user): """ Update the course's default grace period. Incoming dict is {hours: h, minutes: m} possibly as a grace_period entry in an enclosing dict. It is also safe to call this method with a value of None for graceperiodjson. """ descriptor = modulestore('direct').get_course(course_key) # Before a graceperiod has ever been created, it will be None (once it has been # created, it cannot be set back to None). if graceperiodjson is not None: if 'grace_period' in graceperiodjson: graceperiodjson = graceperiodjson['grace_period'] grace_timedelta = timedelta(**graceperiodjson) descriptor.graceperiod = grace_timedelta modulestore('direct').update_item(descriptor, user.id) @staticmethod def delete_grader(course_key, index, user): """ Delete the grader of the given type from the given course. """ descriptor = modulestore('direct').get_course(course_key) index = int(index) if index < len(descriptor.raw_grader): del descriptor.raw_grader[index] # force propagation to definition descriptor.raw_grader = descriptor.raw_grader modulestore('direct').update_item(descriptor, user.id) @staticmethod def delete_grace_period(course_key, user): """ Delete the course's grace period. """ descriptor = modulestore('direct').get_course(course_key) del descriptor.graceperiod modulestore('direct').update_item(descriptor, user.id) @staticmethod def get_section_grader_type(location): descriptor = modulestore('direct').get_item(location) return { "graderType": descriptor.format if descriptor.format is not None else 'notgraded', "location": unicode(location), } @staticmethod def update_section_grader_type(descriptor, grader_type, user): if grader_type is not None and grader_type != u'notgraded': descriptor.format = grader_type descriptor.graded = True else: del descriptor.format del descriptor.graded modulestore('direct').update_item(descriptor, user.id) return {'graderType': grader_type} @staticmethod def convert_set_grace_period(descriptor): # 5 hours 59 minutes 59 seconds => converted to iso format rawgrace = descriptor.graceperiod if rawgrace: hours_from_days = rawgrace.days * 24 seconds = rawgrace.seconds hours_from_seconds = int(seconds / 3600) hours = hours_from_days + hours_from_seconds seconds -= hours_from_seconds * 3600 minutes = int(seconds / 60) seconds -= minutes * 60 graceperiod = {'hours': 0, 'minutes': 0, 'seconds': 0} if hours > 0: graceperiod['hours'] = hours if minutes > 0: graceperiod['minutes'] = minutes if seconds > 0: graceperiod['seconds'] = seconds return graceperiod else: return None @staticmethod def parse_grader(json_grader): # manual to clear out kruft result = {"type": json_grader["type"], "min_count": int(json_grader.get('min_count', 0)), "drop_count": int(json_grader.get('drop_count', 0)), "short_label": json_grader.get('short_label', None), "weight": float(json_grader.get('weight', 0)) / 100.0 } return result @staticmethod def jsonize_grader(i, grader): grader['id'] = i if grader['weight']: grader['weight'] *= 100 if not 'short_label' in grader: grader['short_label'] = "" return grader
agpl-3.0
davekennewell/geonode
geonode/layers/views.py
5
22258
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2012 OpenPlans # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### import os import sys import logging import shutil import traceback from guardian.shortcuts import get_perms from django.contrib import messages from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render_to_response from django.conf import settings from django.template import RequestContext from django.utils.translation import ugettext as _ from django.utils import simplejson as json from django.utils.html import escape from django.template.defaultfilters import slugify from django.forms.models import inlineformset_factory from django.db.models import F from django.forms.util import ErrorList from geonode.tasks.deletion import delete_layer from geonode.services.models import Service from geonode.layers.forms import LayerForm, LayerUploadForm, NewLayerUploadForm, LayerAttributeForm from geonode.base.forms import CategoryForm from geonode.layers.models import Layer, Attribute, UploadSession from geonode.base.enumerations import CHARSETS from geonode.base.models import TopicCategory from geonode.utils import default_map_config from geonode.utils import GXPLayer from geonode.utils import GXPMap from geonode.layers.utils import file_upload, is_raster, is_vector from geonode.utils import resolve_object, llbbox_to_mercator from geonode.people.forms import ProfileForm, PocForm from geonode.security.views import _perms_info_json from geonode.documents.models import get_related_documents from geonode.utils import build_social_links from geonode.geoserver.helpers import cascading_delete, gs_catalog CONTEXT_LOG_FILE = None if 'geonode.geoserver' in settings.INSTALLED_APPS: from geonode.geoserver.helpers import _render_thumbnail from geonode.geoserver.helpers import ogc_server_settings CONTEXT_LOG_FILE = ogc_server_settings.LOG_FILE logger = logging.getLogger("geonode.layers.views") DEFAULT_SEARCH_BATCH_SIZE = 10 MAX_SEARCH_BATCH_SIZE = 25 GENERIC_UPLOAD_ERROR = _("There was an error while attempting to upload your data. \ Please try again, or contact and administrator if the problem continues.") _PERMISSION_MSG_DELETE = _("You are not permitted to delete this layer") _PERMISSION_MSG_GENERIC = _('You do not have permissions for this layer.') _PERMISSION_MSG_MODIFY = _("You are not permitted to modify this layer") _PERMISSION_MSG_METADATA = _( "You are not permitted to modify this layer's metadata") _PERMISSION_MSG_VIEW = _("You are not permitted to view this layer") def log_snippet(log_file): if not os.path.isfile(log_file): return "No log file at %s" % log_file with open(log_file, "r") as f: f.seek(0, 2) # Seek @ EOF fsize = f.tell() # Get Size f.seek(max(fsize - 10024, 0), 0) # Set pos @ last n chars return f.read() def _resolve_layer(request, typename, permission='base.view_resourcebase', msg=_PERMISSION_MSG_GENERIC, **kwargs): """ Resolve the layer by the provided typename (which may include service name) and check the optional permission. """ service_typename = typename.split(":", 1) if Service.objects.filter(name=service_typename[0]).exists(): service = Service.objects.filter(name=service_typename[0]) return resolve_object(request, Layer, {'service': service[0], 'typename': service_typename[1] if service[0].method != "C" else typename}, permission=permission, permission_msg=msg, **kwargs) else: return resolve_object(request, Layer, {'typename': typename, 'service': None}, permission=permission, permission_msg=msg, **kwargs) # Basic Layer Views # @login_required def layer_upload(request, template='upload/layer_upload.html'): if request.method == 'GET': ctx = { 'charsets': CHARSETS, 'is_layer': True, } return render_to_response(template, RequestContext(request, ctx)) elif request.method == 'POST': form = NewLayerUploadForm(request.POST, request.FILES) tempdir = None errormsgs = [] out = {'success': False} if form.is_valid(): title = form.cleaned_data["layer_title"] # Replace dots in filename - GeoServer REST API upload bug # and avoid any other invalid characters. # Use the title if possible, otherwise default to the filename if title is not None and len(title) > 0: name_base = title else: name_base, __ = os.path.splitext( form.cleaned_data["base_file"].name) name = slugify(name_base.replace(".", "_")) try: # Moved this inside the try/except block because it can raise # exceptions when unicode characters are present. # This should be followed up in upstream Django. tempdir, base_file = form.write_files() saved_layer = file_upload( base_file, name=name, user=request.user, overwrite=False, charset=form.cleaned_data["charset"], abstract=form.cleaned_data["abstract"], title=form.cleaned_data["layer_title"], ) except Exception as e: exception_type, error, tb = sys.exc_info() logger.exception(e) out['success'] = False out['errors'] = str(error) # Assign the error message to the latest UploadSession from that user. latest_uploads = UploadSession.objects.filter(user=request.user).order_by('-date') if latest_uploads.count() > 0: upload_session = latest_uploads[0] upload_session.error = str(error) upload_session.traceback = traceback.format_exc(tb) upload_session.context = log_snippet(CONTEXT_LOG_FILE) upload_session.save() out['traceback'] = upload_session.traceback out['context'] = upload_session.context out['upload_session'] = upload_session.id else: out['success'] = True if hasattr(saved_layer, 'info'): out['info'] = saved_layer.info out['url'] = reverse( 'layer_detail', args=[ saved_layer.service_typename]) upload_session = saved_layer.upload_session upload_session.processed = True upload_session.save() permissions = form.cleaned_data["permissions"] if permissions is not None and len(permissions.keys()) > 0: saved_layer.set_permissions(permissions) finally: if tempdir is not None: shutil.rmtree(tempdir) else: for e in form.errors.values(): errormsgs.extend([escape(v) for v in e]) out['errors'] = form.errors out['errormsgs'] = errormsgs if out['success']: status_code = 200 else: status_code = 400 return HttpResponse( json.dumps(out), mimetype='application/json', status=status_code) def layer_detail(request, layername, template='layers/layer_detail.html'): layer = _resolve_layer( request, layername, 'base.view_resourcebase', _PERMISSION_MSG_VIEW) # assert False, str(layer_bbox) config = layer.attribute_config() # Add required parameters for GXP lazy-loading layer_bbox = layer.bbox bbox = [float(coord) for coord in list(layer_bbox[0:4])] config["srs"] = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:900913') config["bbox"] = bbox if config["srs"] != 'EPSG:900913' \ else llbbox_to_mercator([float(coord) for coord in bbox]) config["title"] = layer.title config["queryable"] = True if layer.storeType == "remoteStore": service = layer.service source_params = { "ptype": service.ptype, "remote": True, "url": service.base_url, "name": service.name} maplayer = GXPLayer( name=layer.typename, ows_url=layer.ows_url, layer_params=json.dumps(config), source_params=json.dumps(source_params)) else: maplayer = GXPLayer( name=layer.typename, ows_url=layer.ows_url, layer_params=json.dumps(config)) # Update count for popularity ranking, # but do not includes admins or resource owners if request.user != layer.owner and not request.user.is_superuser: Layer.objects.filter( id=layer.id).update(popular_count=F('popular_count') + 1) # center/zoom don't matter; the viewer will center on the layer bounds map_obj = GXPMap(projection=getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:900913')) NON_WMS_BASE_LAYERS = [ la for la in default_map_config()[1] if la.ows_url is None] metadata = layer.link_set.metadata().filter( name__in=settings.DOWNLOAD_FORMATS_METADATA) context_dict = { "resource": layer, 'perms_list': get_perms(request.user, layer.get_self_resource()), "permissions_json": _perms_info_json(layer), "documents": get_related_documents(layer), "metadata": metadata, "is_layer": True, "wps_enabled": settings.OGC_SERVER['default']['WPS_ENABLED'], } context_dict["viewer"] = json.dumps( map_obj.viewer_json(request.user, * (NON_WMS_BASE_LAYERS + [maplayer]))) context_dict["preview"] = getattr( settings, 'LAYER_PREVIEW_LIBRARY', 'leaflet') if request.user.has_perm('download_resourcebase', layer.get_self_resource()): if layer.storeType == 'dataStore': links = layer.link_set.download().filter( name__in=settings.DOWNLOAD_FORMATS_VECTOR) else: links = layer.link_set.download().filter( name__in=settings.DOWNLOAD_FORMATS_RASTER) context_dict["links"] = links if settings.SOCIAL_ORIGINS: context_dict["social_links"] = build_social_links(request, layer) return render_to_response(template, RequestContext(request, context_dict)) @login_required def layer_metadata(request, layername, template='layers/layer_metadata.html'): layer = _resolve_layer( request, layername, 'base.change_resourcebase_metadata', _PERMISSION_MSG_METADATA) layer_attribute_set = inlineformset_factory( Layer, Attribute, extra=0, form=LayerAttributeForm, ) topic_category = layer.category poc = layer.poc metadata_author = layer.metadata_author if request.method == "POST": layer_form = LayerForm(request.POST, instance=layer, prefix="resource") attribute_form = layer_attribute_set( request.POST, instance=layer, prefix="layer_attribute_set", queryset=Attribute.objects.order_by('display_order')) category_form = CategoryForm( request.POST, prefix="category_choice_field", initial=int( request.POST["category_choice_field"]) if "category_choice_field" in request.POST else None) else: layer_form = LayerForm(instance=layer, prefix="resource") attribute_form = layer_attribute_set( instance=layer, prefix="layer_attribute_set", queryset=Attribute.objects.order_by('display_order')) category_form = CategoryForm( prefix="category_choice_field", initial=topic_category.id if topic_category else None) if request.method == "POST" and layer_form.is_valid( ) and attribute_form.is_valid() and category_form.is_valid(): new_poc = layer_form.cleaned_data['poc'] new_author = layer_form.cleaned_data['metadata_author'] new_keywords = layer_form.cleaned_data['keywords'] if new_poc is None: if poc is None: poc_form = ProfileForm( request.POST, prefix="poc", instance=poc) else: poc_form = ProfileForm(request.POST, prefix="poc") if poc_form.is_valid(): if len(poc_form.cleaned_data['profile']) == 0: # FIXME use form.add_error in django > 1.7 errors = poc_form._errors.setdefault('profile', ErrorList()) errors.append(_('You must set a point of contact for this resource')) poc = None if poc_form.has_changed and poc_form.is_valid(): new_poc = poc_form.save() if new_author is None: if metadata_author is None: author_form = ProfileForm(request.POST, prefix="author", instance=metadata_author) else: author_form = ProfileForm(request.POST, prefix="author") if author_form.is_valid(): if len(author_form.cleaned_data['profile']) == 0: # FIXME use form.add_error in django > 1.7 errors = author_form._errors.setdefault('profile', ErrorList()) errors.append(_('You must set an author for this resource')) metadata_author = None if author_form.has_changed and author_form.is_valid(): new_author = author_form.save() new_category = TopicCategory.objects.get( id=category_form.cleaned_data['category_choice_field']) for form in attribute_form.cleaned_data: la = Attribute.objects.get(id=int(form['id'].id)) la.description = form["description"] la.attribute_label = form["attribute_label"] la.visible = form["visible"] la.display_order = form["display_order"] la.save() if new_poc is not None and new_author is not None: new_keywords = layer_form.cleaned_data['keywords'] layer.keywords.clear() layer.keywords.add(*new_keywords) the_layer = layer_form.save() up_sessions = UploadSession.objects.filter(layer=the_layer.id) if up_sessions.count() > 0 and up_sessions[0].user != the_layer.owner: up_sessions.update(user=the_layer.owner) the_layer.poc = new_poc the_layer.metadata_author = new_author Layer.objects.filter(id=the_layer.id).update( category=new_category ) if getattr(settings, 'SLACK_ENABLED', False): try: from geonode.contrib.slack.utils import build_slack_message_layer, send_slack_messages send_slack_messages(build_slack_message_layer("layer_edit", the_layer)) except: print "Could not send slack message." return HttpResponseRedirect( reverse( 'layer_detail', args=( layer.service_typename, ))) if poc is not None: layer_form.fields['poc'].initial = poc.id poc_form = ProfileForm(prefix="poc") poc_form.hidden = True if metadata_author is not None: layer_form.fields['metadata_author'].initial = metadata_author.id author_form = ProfileForm(prefix="author") author_form.hidden = True return render_to_response(template, RequestContext(request, { "layer": layer, "layer_form": layer_form, "poc_form": poc_form, "author_form": author_form, "attribute_form": attribute_form, "category_form": category_form, })) @login_required def layer_change_poc(request, ids, template='layers/layer_change_poc.html'): layers = Layer.objects.filter(id__in=ids.split('_')) if request.method == 'POST': form = PocForm(request.POST) if form.is_valid(): for layer in layers: layer.poc = form.cleaned_data['contact'] layer.save() # Process the data in form.cleaned_data # ... # Redirect after POST return HttpResponseRedirect('/admin/maps/layer') else: form = PocForm() # An unbound form return render_to_response( template, RequestContext( request, { 'layers': layers, 'form': form})) @login_required def layer_replace(request, layername, template='layers/layer_replace.html'): layer = _resolve_layer( request, layername, 'base.change_resourcebase', _PERMISSION_MSG_MODIFY) if request.method == 'GET': ctx = { 'charsets': CHARSETS, 'layer': layer, 'is_featuretype': layer.is_vector(), 'is_layer': True, } return render_to_response(template, RequestContext(request, ctx)) elif request.method == 'POST': form = LayerUploadForm(request.POST, request.FILES) tempdir = None out = {} if form.is_valid(): try: tempdir, base_file = form.write_files() if layer.is_vector() and is_raster(base_file): out['success'] = False out['errors'] = _("You are attempting to replace a vector layer with a raster.") elif (not layer.is_vector()) and is_vector(base_file): out['success'] = False out['errors'] = _("You are attempting to replace a raster layer with a vector.") else: # delete geoserver's store before upload cat = gs_catalog cascading_delete(cat, layer.typename) saved_layer = file_upload( base_file, name=layer.name, user=request.user, overwrite=True, charset=form.cleaned_data["charset"], ) out['success'] = True out['url'] = reverse( 'layer_detail', args=[ saved_layer.service_typename]) except Exception as e: out['success'] = False out['errors'] = str(e) finally: if tempdir is not None: shutil.rmtree(tempdir) else: errormsgs = [] for e in form.errors.values(): errormsgs.append([escape(v) for v in e]) out['errors'] = form.errors out['errormsgs'] = errormsgs if out['success']: status_code = 200 else: status_code = 400 return HttpResponse( json.dumps(out), mimetype='application/json', status=status_code) @login_required def layer_remove(request, layername, template='layers/layer_remove.html'): layer = _resolve_layer( request, layername, 'base.delete_resourcebase', _PERMISSION_MSG_DELETE) if (request.method == 'GET'): return render_to_response(template, RequestContext(request, { "layer": layer })) if (request.method == 'POST'): try: delete_layer.delay(object_id=layer.id) except Exception as e: message = '{0}: {1}.'.format(_('Unable to delete layer'), layer.typename) if 'referenced by layer group' in getattr(e, 'message', ''): message = _('This layer is a member of a layer group, you must remove the layer from the group ' 'before deleting.') messages.error(request, message) return render_to_response(template, RequestContext(request, {"layer": layer})) return HttpResponseRedirect(reverse("layer_browse")) else: return HttpResponse("Not allowed", status=403) def layer_thumbnail(request, layername): if request.method == 'POST': layer_obj = _resolve_layer(request, layername) try: image = _render_thumbnail(request.body) if not image: return filename = "layer-%s-thumb.png" % layer_obj.uuid layer_obj.save_thumbnail(filename, image) return HttpResponse('Thumbnail saved') except: return HttpResponse( content='error saving thumbnail', status=500, mimetype='text/plain' )
gpl-3.0
h2educ/scikit-learn
sklearn/feature_extraction/image.py
263
17600
""" The :mod:`sklearn.feature_extraction.image` submodule gathers utilities to extract features from images. """ # Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org> # Gael Varoquaux <gael.varoquaux@normalesup.org> # Olivier Grisel # Vlad Niculae # License: BSD 3 clause from itertools import product import numbers import numpy as np from scipy import sparse from numpy.lib.stride_tricks import as_strided from ..utils import check_array, check_random_state from ..utils.fixes import astype from ..base import BaseEstimator __all__ = ['PatchExtractor', 'extract_patches_2d', 'grid_to_graph', 'img_to_graph', 'reconstruct_from_patches_2d'] ############################################################################### # From an image to a graph def _make_edges_3d(n_x, n_y, n_z=1): """Returns a list of edges for a 3D image. Parameters =========== n_x: integer The size of the grid in the x direction. n_y: integer The size of the grid in the y direction. n_z: integer, optional The size of the grid in the z direction, defaults to 1 """ vertices = np.arange(n_x * n_y * n_z).reshape((n_x, n_y, n_z)) edges_deep = np.vstack((vertices[:, :, :-1].ravel(), vertices[:, :, 1:].ravel())) edges_right = np.vstack((vertices[:, :-1].ravel(), vertices[:, 1:].ravel())) edges_down = np.vstack((vertices[:-1].ravel(), vertices[1:].ravel())) edges = np.hstack((edges_deep, edges_right, edges_down)) return edges def _compute_gradient_3d(edges, img): n_x, n_y, n_z = img.shape gradient = np.abs(img[edges[0] // (n_y * n_z), (edges[0] % (n_y * n_z)) // n_z, (edges[0] % (n_y * n_z)) % n_z] - img[edges[1] // (n_y * n_z), (edges[1] % (n_y * n_z)) // n_z, (edges[1] % (n_y * n_z)) % n_z]) return gradient # XXX: Why mask the image after computing the weights? def _mask_edges_weights(mask, edges, weights=None): """Apply a mask to edges (weighted or not)""" inds = np.arange(mask.size) inds = inds[mask.ravel()] ind_mask = np.logical_and(np.in1d(edges[0], inds), np.in1d(edges[1], inds)) edges = edges[:, ind_mask] if weights is not None: weights = weights[ind_mask] if len(edges.ravel()): maxval = edges.max() else: maxval = 0 order = np.searchsorted(np.unique(edges.ravel()), np.arange(maxval + 1)) edges = order[edges] if weights is None: return edges else: return edges, weights def _to_graph(n_x, n_y, n_z, mask=None, img=None, return_as=sparse.coo_matrix, dtype=None): """Auxiliary function for img_to_graph and grid_to_graph """ edges = _make_edges_3d(n_x, n_y, n_z) if dtype is None: if img is None: dtype = np.int else: dtype = img.dtype if img is not None: img = np.atleast_3d(img) weights = _compute_gradient_3d(edges, img) if mask is not None: edges, weights = _mask_edges_weights(mask, edges, weights) diag = img.squeeze()[mask] else: diag = img.ravel() n_voxels = diag.size else: if mask is not None: mask = astype(mask, dtype=np.bool, copy=False) mask = np.asarray(mask, dtype=np.bool) edges = _mask_edges_weights(mask, edges) n_voxels = np.sum(mask) else: n_voxels = n_x * n_y * n_z weights = np.ones(edges.shape[1], dtype=dtype) diag = np.ones(n_voxels, dtype=dtype) diag_idx = np.arange(n_voxels) i_idx = np.hstack((edges[0], edges[1])) j_idx = np.hstack((edges[1], edges[0])) graph = sparse.coo_matrix((np.hstack((weights, weights, diag)), (np.hstack((i_idx, diag_idx)), np.hstack((j_idx, diag_idx)))), (n_voxels, n_voxels), dtype=dtype) if return_as is np.ndarray: return graph.toarray() return return_as(graph) def img_to_graph(img, mask=None, return_as=sparse.coo_matrix, dtype=None): """Graph of the pixel-to-pixel gradient connections Edges are weighted with the gradient values. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- img : ndarray, 2D or 3D 2D or 3D image mask : ndarray of booleans, optional An optional mask of the image, to consider only part of the pixels. return_as : np.ndarray or a sparse matrix class, optional The class to use to build the returned adjacency matrix. dtype : None or dtype, optional The data of the returned sparse matrix. By default it is the dtype of img Notes ----- For sklearn versions 0.14.1 and prior, return_as=np.ndarray was handled by returning a dense np.matrix instance. Going forward, np.ndarray returns an np.ndarray, as expected. For compatibility, user code relying on this method should wrap its calls in ``np.asarray`` to avoid type issues. """ img = np.atleast_3d(img) n_x, n_y, n_z = img.shape return _to_graph(n_x, n_y, n_z, mask, img, return_as, dtype) def grid_to_graph(n_x, n_y, n_z=1, mask=None, return_as=sparse.coo_matrix, dtype=np.int): """Graph of the pixel-to-pixel connections Edges exist if 2 voxels are connected. Parameters ---------- n_x : int Dimension in x axis n_y : int Dimension in y axis n_z : int, optional, default 1 Dimension in z axis mask : ndarray of booleans, optional An optional mask of the image, to consider only part of the pixels. return_as : np.ndarray or a sparse matrix class, optional The class to use to build the returned adjacency matrix. dtype : dtype, optional, default int The data of the returned sparse matrix. By default it is int Notes ----- For sklearn versions 0.14.1 and prior, return_as=np.ndarray was handled by returning a dense np.matrix instance. Going forward, np.ndarray returns an np.ndarray, as expected. For compatibility, user code relying on this method should wrap its calls in ``np.asarray`` to avoid type issues. """ return _to_graph(n_x, n_y, n_z, mask=mask, return_as=return_as, dtype=dtype) ############################################################################### # From an image to a set of small image patches def _compute_n_patches(i_h, i_w, p_h, p_w, max_patches=None): """Compute the number of patches that will be extracted in an image. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- i_h : int The image height i_w : int The image with p_h : int The height of a patch p_w : int The width of a patch max_patches : integer or float, optional default is None The maximum number of patches to extract. If max_patches is a float between 0 and 1, it is taken to be a proportion of the total number of patches. """ n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 all_patches = n_h * n_w if max_patches: if (isinstance(max_patches, (numbers.Integral)) and max_patches < all_patches): return max_patches elif (isinstance(max_patches, (numbers.Real)) and 0 < max_patches < 1): return int(max_patches * all_patches) else: raise ValueError("Invalid value for max_patches: %r" % max_patches) else: return all_patches def extract_patches(arr, patch_shape=8, extraction_step=1): """Extracts patches of any n-dimensional array in place using strides. Given an n-dimensional array it will return a 2n-dimensional array with the first n dimensions indexing patch position and the last n indexing the patch content. This operation is immediate (O(1)). A reshape performed on the first n dimensions will cause numpy to copy data, leading to a list of extracted patches. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- arr : ndarray n-dimensional array of which patches are to be extracted patch_shape : integer or tuple of length arr.ndim Indicates the shape of the patches to be extracted. If an integer is given, the shape will be a hypercube of sidelength given by its value. extraction_step : integer or tuple of length arr.ndim Indicates step size at which extraction shall be performed. If integer is given, then the step is uniform in all dimensions. Returns ------- patches : strided ndarray 2n-dimensional array indexing patches on first n dimensions and containing patches on the last n dimensions. These dimensions are fake, but this way no data is copied. A simple reshape invokes a copying operation to obtain a list of patches: result.reshape([-1] + list(patch_shape)) """ arr_ndim = arr.ndim if isinstance(patch_shape, numbers.Number): patch_shape = tuple([patch_shape] * arr_ndim) if isinstance(extraction_step, numbers.Number): extraction_step = tuple([extraction_step] * arr_ndim) patch_strides = arr.strides slices = [slice(None, None, st) for st in extraction_step] indexing_strides = arr[slices].strides patch_indices_shape = ((np.array(arr.shape) - np.array(patch_shape)) // np.array(extraction_step)) + 1 shape = tuple(list(patch_indices_shape) + list(patch_shape)) strides = tuple(list(indexing_strides) + list(patch_strides)) patches = as_strided(arr, shape=shape, strides=strides) return patches def extract_patches_2d(image, patch_size, max_patches=None, random_state=None): """Reshape a 2D image into a collection of patches The resulting patches are allocated in a dedicated array. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- image : array, shape = (image_height, image_width) or (image_height, image_width, n_channels) The original image data. For color images, the last dimension specifies the channel: a RGB image would have `n_channels=3`. patch_size : tuple of ints (patch_height, patch_width) the dimensions of one patch max_patches : integer or float, optional default is None The maximum number of patches to extract. If max_patches is a float between 0 and 1, it is taken to be a proportion of the total number of patches. random_state : int or RandomState Pseudo number generator state used for random sampling to use if `max_patches` is not None. Returns ------- patches : array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The collection of patches extracted from the image, where `n_patches` is either `max_patches` or the total number of patches that can be extracted. Examples -------- >>> from sklearn.feature_extraction import image >>> one_image = np.arange(16).reshape((4, 4)) >>> one_image array([[ 0, 1, 2, 3], [ 4, 5, 6, 7], [ 8, 9, 10, 11], [12, 13, 14, 15]]) >>> patches = image.extract_patches_2d(one_image, (2, 2)) >>> print(patches.shape) (9, 2, 2) >>> patches[0] array([[0, 1], [4, 5]]) >>> patches[1] array([[1, 2], [5, 6]]) >>> patches[8] array([[10, 11], [14, 15]]) """ i_h, i_w = image.shape[:2] p_h, p_w = patch_size if p_h > i_h: raise ValueError("Height of the patch should be less than the height" " of the image.") if p_w > i_w: raise ValueError("Width of the patch should be less than the width" " of the image.") image = check_array(image, allow_nd=True) image = image.reshape((i_h, i_w, -1)) n_colors = image.shape[-1] extracted_patches = extract_patches(image, patch_shape=(p_h, p_w, n_colors), extraction_step=1) n_patches = _compute_n_patches(i_h, i_w, p_h, p_w, max_patches) if max_patches: rng = check_random_state(random_state) i_s = rng.randint(i_h - p_h + 1, size=n_patches) j_s = rng.randint(i_w - p_w + 1, size=n_patches) patches = extracted_patches[i_s, j_s, 0] else: patches = extracted_patches patches = patches.reshape(-1, p_h, p_w, n_colors) # remove the color dimension if useless if patches.shape[-1] == 1: return patches.reshape((n_patches, p_h, p_w)) else: return patches def reconstruct_from_patches_2d(patches, image_size): """Reconstruct the image from all of its patches. Patches are assumed to overlap and the image is constructed by filling in the patches from left to right, top to bottom, averaging the overlapping regions. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- patches : array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The complete set of patches. If the patches contain colour information, channels are indexed along the last dimension: RGB patches would have `n_channels=3`. image_size : tuple of ints (image_height, image_width) or (image_height, image_width, n_channels) the size of the image that will be reconstructed Returns ------- image : array, shape = image_size the reconstructed image """ i_h, i_w = image_size[:2] p_h, p_w = patches.shape[1:3] img = np.zeros(image_size) # compute the dimensions of the patches array n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 for p, (i, j) in zip(patches, product(range(n_h), range(n_w))): img[i:i + p_h, j:j + p_w] += p for i in range(i_h): for j in range(i_w): # divide by the amount of overlap # XXX: is this the most efficient way? memory-wise yes, cpu wise? img[i, j] /= float(min(i + 1, p_h, i_h - i) * min(j + 1, p_w, i_w - j)) return img class PatchExtractor(BaseEstimator): """Extracts patches from a collection of images Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- patch_size : tuple of ints (patch_height, patch_width) the dimensions of one patch max_patches : integer or float, optional default is None The maximum number of patches per image to extract. If max_patches is a float in (0, 1), it is taken to mean a proportion of the total number of patches. random_state : int or RandomState Pseudo number generator state used for random sampling. """ def __init__(self, patch_size=None, max_patches=None, random_state=None): self.patch_size = patch_size self.max_patches = max_patches self.random_state = random_state def fit(self, X, y=None): """Do nothing and return the estimator unchanged This method is just there to implement the usual API and hence work in pipelines. """ return self def transform(self, X): """Transforms the image samples in X into a matrix of patch data. Parameters ---------- X : array, shape = (n_samples, image_height, image_width) or (n_samples, image_height, image_width, n_channels) Array of images from which to extract patches. For color images, the last dimension specifies the channel: a RGB image would have `n_channels=3`. Returns ------- patches: array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The collection of patches extracted from the images, where `n_patches` is either `n_samples * max_patches` or the total number of patches that can be extracted. """ self.random_state = check_random_state(self.random_state) n_images, i_h, i_w = X.shape[:3] X = np.reshape(X, (n_images, i_h, i_w, -1)) n_channels = X.shape[-1] if self.patch_size is None: patch_size = i_h // 10, i_w // 10 else: patch_size = self.patch_size # compute the dimensions of the patches array p_h, p_w = patch_size n_patches = _compute_n_patches(i_h, i_w, p_h, p_w, self.max_patches) patches_shape = (n_images * n_patches,) + patch_size if n_channels > 1: patches_shape += (n_channels,) # extract the patches patches = np.empty(patches_shape) for ii, image in enumerate(X): patches[ii * n_patches:(ii + 1) * n_patches] = extract_patches_2d( image, patch_size, self.max_patches, self.random_state) return patches
bsd-3-clause
mobify/iterstuff
iterstuff/recipes.py
1
4131
from __future__ import absolute_import from iterstuff.lookahead import Lookahead def repeatable_takewhile(predicate, iterable): """ Return successive entries from an iterable as long as the predicate evaluates to true for each entry. Like itertools.takewhile, but does not consume the first element of the iterable that fails the predicate test. :param predicate: a single-element callable that returns True for elements that satisfy a condition, False for those that do not. :param iterable: must be a Lookahead """ # Assert that the iterable is a Lookahead. The act of wrapping # an iterable in a Lookahead consumes the first element, so we # cannot do the wrapping inside this function. if not isinstance(iterable, Lookahead): raise TypeError("The iterable parameter must be a Lookahead") # Use 'peek' to check if the next element will satisfy the # predicate, and yield while this is True, or until we reach # the end of the iterable. while (not iterable.atend) and predicate(iterable.peek): yield iterable.next() def batch(iterable, size): """ Yield iterables for successive slices of `iterable`, each containing up to `size` items, with the last being less than `size` if there are not sufficient items in `iterable`. Pass over the input iterable once only. Yield iterables, not lists. @note: each output iterable must be consumed in full before the next one is yielded. So list(batch(xrange(10), 3)) won't work as expected, because the iterables are not consumed. @param iterable: an input iterable. @param size: the maximum number of items yielded by any output iterable. """ # Wrap an enumeration of the iterable in a Lookahead so that it # yields (count, element) tuples it = Lookahead(enumerate(iterable)) while not it.atend: # Set the end_count using the count value # of the next element. end_count = it.peek[0] + size # Yield a generator that will then yield up to # 'size' elements from 'it'. yield ( element for counter, element in repeatable_takewhile( # t[0] is the count part of each element lambda t: t[0] < end_count, it ) ) def chunked(i, f=lambda _x: _x): """ Given an iterable i, apply f over it to extract a value from each element and yield successive iterables where the result of f for all elements is the same. In simpler language, if i is an iterable sorted on some key, yield chunks of that list where the key value is the same, each chunk being a separate iterable. Note that this function yields B{iterators}, not lists, and they refer back to the iterator passed in, so each B{must} be consumed completely before the next one is requested. @param i: an iterable. @param f: a function to be applied to each element of the iterable to extract the key. """ # Build a generator that return tuples of (element, key-of-element), # so that we only apply the key method to each element once. it = Lookahead((_x, f(_x)) for _x in i) def takechunk(): """ A generator closure that will yield values while the keys remain the same. Note that we cannot use L{itertools.takewhile} for this, because that takes elements and B{then} checks the predicate, so successive calls to itertools.takewhile for the same generator will skip elements. """ while True: # Always yield the first element: if we're at the end of the # generator, this will raise StopIteration and we're done. (_x, key) = it.next() yield _x # Check the lookahead's peek value to see if we should break now. # We also break when we're at the end of the generator. if it.atend or key != it.peek[1]: break # Yield successive instances of takechunk. while not it.atend: yield takechunk()
mit
mvpoland/django-smsgateway
smsgateway/views.py
1
1834
from django import forms from django.http import Http404 from django.conf import settings from django.shortcuts import render from django.contrib.admin.views.decorators import staff_member_required from smsgateway import send, __version__ from smsgateway.backends import get_backend accounts = getattr(settings, 'SMSGATEWAY_ACCOUNTS', {}) class BackendDebugForm(forms.Form): account = forms.ChoiceField(choices=[(k, k) for k in list(accounts.keys()) if k != '__default__']) recipients = forms.CharField(help_text='Separate multiple recipients with a semicolon (;).') message = forms.CharField(widget=forms.widgets.Textarea()) signature = forms.CharField() @staff_member_required def backend_debug(request): """ A form to let you send an SMS for debugging purposes. """ context = {} if request.method == 'POST': form = BackendDebugForm(request.POST) if form.is_valid(): success = send( form.cleaned_data['recipients'].split(';'), form.cleaned_data['message'], form.cleaned_data['signature'], form.cleaned_data['account'] ) if success: context.update({'message': 'Text message sent'}) else: context.update({'message': 'Sending failed'}) else: form = BackendDebugForm() context.update({ 'form': form, 'version': __version__, }) return render(request, 'smsgateway/backend_debug.html', context) def backend_handle_incoming(request, backend_name): """ Call the backend's handle_incoming method. """ if backend_name == 'debug': return backend_debug(request) b = get_backend(backend_name) if b is None: raise Http404 return b.handle_incoming(request)
bsd-3-clause
tarzan0820/odoo
addons/l10n_in_hr_payroll/__openerp__.py
374
2622
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Indian Payroll', 'category': 'Localization', 'author': 'OpenERP SA', 'website':'http://www.openerp.com', 'depends': ['hr_payroll'], 'version': '1.0', 'description': """ Indian Payroll Salary Rules. ============================ -Configuration of hr_payroll for India localization -All main contributions rules for India payslip. * New payslip report * Employee Contracts * Allow to configure Basic / Gross / Net Salary * Employee PaySlip * Allowance / Deduction * Integrated with Holiday Management * Medical Allowance, Travel Allowance, Child Allowance, ... - Payroll Advice and Report - Yearly Salary by Head and Yearly Salary by Employee Report """, 'active': False, 'data': [ 'l10n_in_hr_payroll_view.xml', 'data/l10n_in_hr_payroll_data.xml', 'data/hr.salary.rule.csv', 'security/ir.model.access.csv', 'l10n_in_hr_payroll_report.xml', 'l10n_in_hr_payroll_sequence.xml', 'views/report_payslipdetails.xml', 'views/report_hrsalarybymonth.xml', 'wizard/hr_salary_employee_bymonth_view.xml', 'wizard/hr_yearly_salary_detail_view.xml', 'report/payment_advice_report_view.xml', 'report/payslip_report_view.xml', 'views/report_hryearlysalary.xml', 'views/report_payrolladvice.xml', ], 'test': [ 'test/payment_advice.yml', 'test/payment_advice_batch.yml' ], 'demo': ['l10n_in_hr_payroll_demo.xml'], 'installable': True } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
wldcordeiro/servo
tests/wpt/css-tests/tools/pywebsocket/src/test/set_sys_path.py
496
1815
# Copyright 2009, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Configuration for testing. Test files should import this module before mod_pywebsocket. """ import os import sys # Add the parent directory to sys.path to enable importing mod_pywebsocket. sys.path.insert(0, os.path.join(os.path.split(__file__)[0], '..')) # vi:sts=4 sw=4 et
mpl-2.0
wcota/dynSIS-py
dynamics.py
1
8344
#!/usr/bin/env python # ! ## File: dynamics.py # ! ## See README.md for more information and use # !----------------------------------------------------------------------------- # ! SIS epidemic model algorithm based on the article # ! Computer Physics Communications 219C (2017) pp. 303-312 # ! "Optimized Gillespie algorithms for the simulation of # ! Markovian epidemic processes on large and heterogeneous networks" # ! Copyright (C) 2017 Wesley Cota, Silvio C. Ferreira # ! # ! Please cite the above cited paper (available at <http://dx.doi.org/10.1016/j.cpc.2017.06.007> ) # ! as reference to our code. # ! # ! This program is free software: you can redistribute it and/or modify # ! it under the terms of the GNU General Public License as published by # ! the Free Software Foundation, either version 3 of the License, or # ! (at your option) any later version. # ! # ! This program is distributed in the hope that it will be useful, # ! but WITHOUT ANY WARRANTY; without even the implied warranty of # ! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # ! GNU General Public License for more details. # ! # ! You should have received a copy of the GNU General Public License # ! along with this program. If not, see <http://www.gnu.org/licenses/>. # !----------------------------------------------------------------------------- # ! Author : Wesley Cota # ! Email : wesley.cota@ufv.br # ! Date : 27 Mar 2017 # ! Version : 1.0 # !----------------------------------------------------------------------------- # ! See README.md for more details # ! This code is available at <https://github.com/wcota/dynSIS-py> # ! For performance, see <https://github.com/wcota/dynSIS> (Fortran implementation) # ! For NetworkX library, see <https://github.com/wcota/dynSIS-networkx> (NetworkX implementation) from network import * from tools import * from math import log import sys print( '################################################################################', '######### Optimized Gillespie algorithms for the simulation of Markovian ######', '####### epidemic processes on large and heterogeneous networks: SIS-OGA. #######', '##============ Copyright (C) 2017 Wesley Cota, Silvio C. Ferreira ============##', '##===== Paper available at <http://dx.doi.org/10.1016/j.cpc.2017.06.007> =====##', '##======= The codes are available at <https://github.com/wcota/dynSIS> =======##', '##======== Please cite the above cited paper as reference to our code ========##', '##=== This code is under GNU General Public License. Please see README.md. ===##', '################################################################################', '', sep='\n') # READING PARAMETERS if len(sys.argv) < 3: print_error('You must enter input and output names as arguments!') fnInput = sys.argv[1] fnOutput = sys.argv[2] print_info('Reading dynamical parameters...') dynp_sam = int(input('How much dynamics samples? ')) dynp_lb = float(input('Value of infection rate lambda (mu is defined as equal to 1): ')) dynp_tmax = int(input('Maximum time steps (it stops if the absorbing state is reached): ')) dynp_pINI = float(input('Fraction of infected vertices on the network as initial condition (is random \ for each sample): ')) # / READING PARAMETERS # LOADING NETWORK print_info('Loading network to memory...') netw = readEdges(fnInput) print_info('Everything ok!') # / LOADING NETWORK # PREPARING THE NECESSARY THINGS net_kmax = max(netw.k) # Used in the rejection probability avg_rho = np.zeros(dynp_tmax, np.float64) # Average for rho at times t, averaged avg_t = np.zeros(dynp_tmax, np.float64) avg_sam = np.zeros(dynp_tmax, np.int) # number of samples for each time t avg_samSurv = np.zeros(dynp_tmax, np.int) # and of survivng ones dyn_VI = np.zeros(netw.size, np.int) # list V^I dyn_sig = np.zeros(netw.size, np.int) # sigma # / PREPARING THE NECESSARY THINGS # RUNNING DYNAMICS print_info('Running dynamics...', True) dyn_dt_pos_max = 0 # Auxiliar for sam in range(1,dynp_sam+1): print_info('Sample #'+str(sam), True) # Initial conditions print_info('Initial condition...') dyn_sig[:] = 0.0 dyn_VI[:] = 0.0 dyn_NI = 0 # N_I dyn_Nk = 0 # N_k # Sort vertices and apply the initial condition for i in range(0, int(netw.size*dynp_pINI)): while True: ver = np.random.randint(0,netw.size) if dyn_sig[ver] == 0: dyn_VI[dyn_NI] = ver dyn_NI += 1 dyn_sig[ver] = 1 dyn_Nk += netw.k[ver] break # Run dynamics dyn_t = 0 dyn_dt = 0.0 dyn_dt_pos = 1 print_info('Running...') while dyn_t <= dynp_tmax and dyn_NI > 0: # SIS-OGA ALGORITHM # Calculate the total rate dyn_R = (dyn_NI + 1.0*dynp_lb * dyn_Nk) # Select the time step rnd = max(np.random.uniform(),1e-12) # Avoid u = 0 dyn_dt = -log(rnd) / dyn_R # Update the time dyn_t += dyn_dt # Probability m to heal dyn_m = 1.0*dyn_NI / dyn_R if np.random.uniform() < dyn_m: # Select a random occupied vertex and heal. pos_inf = np.random.randint(0,dyn_NI) ver = dyn_VI[pos_inf] # Then, heal it dyn_sig[ver] = 0 dyn_Nk -= netw.k[ver] dyn_NI -= 1 dyn_VI[pos_inf] = dyn_VI[dyn_NI] else: # If not, try to infect: w = 1 - m # Select the infected vertex i with prob. proportional to k_i while True: pos_inf = np.random.randint(0,dyn_NI) ver = dyn_VI[pos_inf] if np.random.uniform() < 1.0*netw.k[ver] / (1.0*net_kmax): break # Select one of its neighbors pos_nei = np.random.randint(netw.ini[ver], netw.ini[ver] + netw.k[ver]) ver = netw.con[pos_nei] if dyn_sig[ver] == 0: # if not a phantom process, infect dyn_sig[ver] = 1 dyn_Nk += netw.k[ver] dyn_VI[dyn_NI] = ver # Add one element to list dyn_NI += 1 # Increase by 1 the list # Try to save the dynamics by time unit while (dyn_t >= dyn_dt_pos): # Save data avg_rho[dyn_dt_pos - 1] += 1.0*dyn_NI/netw.size avg_t[dyn_dt_pos - 1] += dyn_t avg_sam[dyn_dt_pos - 1] += 1 if dyn_NI != 0: avg_samSurv[dyn_dt_pos - 1] += 1 dyn_dt_pos_max = max(dyn_dt_pos,dyn_dt_pos_max) # The maximum t with non-null rho dyn_dt_pos += 1 # if a absorbing state is reached, exit # Write output file flOutput = open(fnOutput, 'wt') print( '## ***** Algorithm used: Optimized Gillespie Algorithm for SIS (SIS-OGA, Python) *****', '#@ Network file: '+fnInput, '#@ Number of nodes: '+str(netw.size), '#@ Number of edges: '+str(netw.skk), '#@ Samples: '+str(dynp_sam), '#! Infection rate (lambda): '+str(dynp_lb), '#! Maximum time steps: '+str(dynp_tmax), '#! Fraction of infected vertices (initial condition): '+str(dynp_pINI), sep='\n', file=flOutput) for dt_pos in range(0,dyn_dt_pos_max): print(1.0*avg_t[dt_pos]/avg_sam[dt_pos], 1.0*avg_rho[dt_pos]/(1.0*sam), file=flOutput) # If you use /avg_samSurv[dt_pos] instead of /(1.0*sam) to write avg_rho (2nd column), you have # QS analysis :) flOutput.close() # / RUNNING DYNAMICS print_info('') print_info('Everything ok!',True) print_info('Input file (edges list): '+ fnInput) print_info('Output file: '+ fnOutput) print_info('') print_info('*****Algorithm used: Optimized Gillespie Algorithm for SIS (SIS-OGA, Python)*****') print_info('Codes available at <https://github.com/wcota/dynSIS>.')
gpl-3.0
drewtalati/talaticoin
qa/rpc-tests/python-bitcoinrpc/bitcoinrpc/authproxy.py
305
5784
""" Copyright 2011 Jeff Garzik AuthServiceProxy has the following improvements over python-jsonrpc's ServiceProxy class: - HTTP connections persist for the life of the AuthServiceProxy object (if server supports HTTP/1.1) - sends protocol 'version', per JSON-RPC 1.1 - sends proper, incrementing 'id' - sends Basic HTTP authentication headers - parses all JSON numbers that look like floats as Decimal - uses standard Python json lib Previous copyright, from python-jsonrpc/jsonrpc/proxy.py: Copyright (c) 2007 Jan-Klaas Kollhof This file is part of jsonrpc. jsonrpc is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this software; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ try: import http.client as httplib except ImportError: import httplib import base64 import decimal import json import logging try: import urllib.parse as urlparse except ImportError: import urlparse USER_AGENT = "AuthServiceProxy/0.1" HTTP_TIMEOUT = 30 log = logging.getLogger("BitcoinRPC") class JSONRPCException(Exception): def __init__(self, rpc_error): Exception.__init__(self) self.error = rpc_error def EncodeDecimal(o): if isinstance(o, decimal.Decimal): return round(o, 8) raise TypeError(repr(o) + " is not JSON serializable") class AuthServiceProxy(object): __id_count = 0 def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None): self.__service_url = service_url self.__service_name = service_name self.__url = urlparse.urlparse(service_url) if self.__url.port is None: port = 80 else: port = self.__url.port (user, passwd) = (self.__url.username, self.__url.password) try: user = user.encode('utf8') except AttributeError: pass try: passwd = passwd.encode('utf8') except AttributeError: pass authpair = user + b':' + passwd self.__auth_header = b'Basic ' + base64.b64encode(authpair) if connection: # Callables re-use the connection of the original proxy self.__conn = connection elif self.__url.scheme == 'https': self.__conn = httplib.HTTPSConnection(self.__url.hostname, port, None, None, False, timeout) else: self.__conn = httplib.HTTPConnection(self.__url.hostname, port, False, timeout) def __getattr__(self, name): if name.startswith('__') and name.endswith('__'): # Python internal stuff raise AttributeError if self.__service_name is not None: name = "%s.%s" % (self.__service_name, name) return AuthServiceProxy(self.__service_url, name, connection=self.__conn) def __call__(self, *args): AuthServiceProxy.__id_count += 1 log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name, json.dumps(args, default=EncodeDecimal))) postdata = json.dumps({'version': '1.1', 'method': self.__service_name, 'params': args, 'id': AuthServiceProxy.__id_count}, default=EncodeDecimal) self.__conn.request('POST', self.__url.path, postdata, {'Host': self.__url.hostname, 'User-Agent': USER_AGENT, 'Authorization': self.__auth_header, 'Content-type': 'application/json'}) response = self._get_response() if response['error'] is not None: raise JSONRPCException(response['error']) elif 'result' not in response: raise JSONRPCException({ 'code': -343, 'message': 'missing JSON-RPC result'}) else: return response['result'] def _batch(self, rpc_call_list): postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal) log.debug("--> "+postdata) self.__conn.request('POST', self.__url.path, postdata, {'Host': self.__url.hostname, 'User-Agent': USER_AGENT, 'Authorization': self.__auth_header, 'Content-type': 'application/json'}) return self._get_response() def _get_response(self): http_response = self.__conn.getresponse() if http_response is None: raise JSONRPCException({ 'code': -342, 'message': 'missing HTTP response from server'}) responsedata = http_response.read().decode('utf8') response = json.loads(responsedata, parse_float=decimal.Decimal) if "error" in response and response["error"] is None: log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal))) else: log.debug("<-- "+responsedata) return response
mit
NMTHydro/Recharge
utils/TAW_optimization_subroutine/create_geo_info_file.py
1
2526
# =============================================================================== # Copyright 2018 gabe-parrish # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= standard library imports ======================== import os import gdal import sys import yaml # ============= local library imports =========================== def extract_geo_info(geotiff_path): """""" gdal.AllRegister() # open the raster datasource datasource_obj = gdal.Open(geotiff_path) if datasource_obj is None: print "Can't open the datasource from {}".format(geotiff_path) sys.exit(1) # get the size of image (for reading) rows = datasource_obj.RasterYSize cols = datasource_obj.RasterXSize # x - cols, y - rows dimensions = (cols, rows) # get the projection proj = datasource_obj.GetProjection() # get georefference info to eventually calculate the offset: transform = datasource_obj.GetGeoTransform() geo_dict = {'geotransform': transform, 'dimensions': dimensions, 'projection': proj} return geo_dict def main(sample_file, output_path, filename): """ Taking a ETRM domain and saving the pertinent geo information to a text or yml file :param sample_file: filepath to geotiff representing the ETRM model domain for the TAW optimiation :return: """ geo_dict = extract_geo_info(sample_file) # write_raster(array, geotransform, output_path, output_filename, dimensions, projection) yml_file = os.path.join(output_path, filename) with open(yml_file, 'w') as w_file: yaml.dump(geo_dict, w_file) if __name__ == "__main__": sample_geotiff_file_path = '/Volumes/Seagate_Expansion_Drive/ETRM_espanola_aoi_inputs/statics/taw_reduced.tif' output_path = '/Volumes/Seagate_Expansion_Drive/taw_optimization_work_folder' main(sample_file=sample_geotiff_file_path, output_path=output_path, filename='geo_info_espanola.yml')
apache-2.0
odoocn/odoomrp-wip
product_secondary_unit/__openerp__.py
27
1577
# -*- encoding: utf-8 -*- ############################################################################## # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # ############################################################################## { "name": "Product Secondary Unit", "version": "1.0", "depends": [ "product", "sale_uos_price", "purchase_secondary_unit", ], "author": "OdooMRP team," "AvanzOSC," "Serv. Tecnol. Avanzados - Pedro M. Baeza", "website": "http://www.odoomrp.com", "contributors": [ "Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>", "Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>", "Ana Juaristi <ajuaristio@gmail.com>" ], "category": "Hidden/Dependency", "summary": "Secondary unit functionalities", "data": [ "views/pricelist_view.xml", ], "installable": True, "auto_install": True, }
agpl-3.0
demoforwork/public
SelfServiceProvisioning/lib/oauth2client/contrib/keyring_storage.py
39
3091
# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A keyring based Storage. A Storage for Credentials that uses the keyring module. """ import threading import keyring from oauth2client import client class Storage(client.Storage): """Store and retrieve a single credential to and from the keyring. To use this module you must have the keyring module installed. See <http://pypi.python.org/pypi/keyring/>. This is an optional module and is not installed with oauth2client by default because it does not work on all the platforms that oauth2client supports, such as Google App Engine. The keyring module <http://pypi.python.org/pypi/keyring/> is a cross-platform library for access the keyring capabilities of the local system. The user will be prompted for their keyring password when this module is used, and the manner in which the user is prompted will vary per platform. Usage:: from oauth2client import keyring_storage s = keyring_storage.Storage('name_of_application', 'user1') credentials = s.get() """ def __init__(self, service_name, user_name): """Constructor. Args: service_name: string, The name of the service under which the credentials are stored. user_name: string, The name of the user to store credentials for. """ super(Storage, self).__init__(lock=threading.Lock()) self._service_name = service_name self._user_name = user_name def locked_get(self): """Retrieve Credential from file. Returns: oauth2client.client.Credentials """ credentials = None content = keyring.get_password(self._service_name, self._user_name) if content is not None: try: credentials = client.Credentials.new_from_json(content) credentials.set_store(self) except ValueError: pass return credentials def locked_put(self, credentials): """Write Credentials to file. Args: credentials: Credentials, the credentials to store. """ keyring.set_password(self._service_name, self._user_name, credentials.to_json()) def locked_delete(self): """Delete Credentials file. Args: credentials: Credentials, the credentials to store. """ keyring.set_password(self._service_name, self._user_name, '')
apache-2.0
Gillingham/evething
thing/models/blueprintcomponent.py
2
2118
# ------------------------------------------------------------------------------ # Copyright (c) 2010-2013, EVEthing team # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY # OF SUCH DAMAGE. # ------------------------------------------------------------------------------ from django.db import models from thing.models.blueprint import Blueprint from thing.models.item import Item from thing.models.industryjob import IndustryJob class BlueprintComponent(models.Model): """Blueprint components""" blueprint = models.ForeignKey(Blueprint) activity = models.IntegerField(choices=IndustryJob.ACTIVITY_CHOICES) item = models.ForeignKey(Item) count = models.IntegerField() consumed = models.BooleanField(default=False) class Meta: app_label = 'thing' def __unicode__(self): return '%dx %s' % (self.count, self.item.name)
bsd-2-clause
qwertyjune/BethSaidaBible
venv/lib/python2.7/site-packages/django/db/backends/mysql/base.py
33
23719
""" MySQL database backend for Django. Requires mysqlclient: https://pypi.python.org/pypi/mysqlclient/ MySQLdb is supported for Python 2 only: http://sourceforge.net/projects/mysql-python """ from __future__ import unicode_literals import datetime import re import sys import warnings try: import MySQLdb as Database except ImportError as e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e) # We want version (1, 2, 1, 'final', 2) or later. We can't just use # lexicographic ordering in this check because then (1, 2, 1, 'gamma') # inadvertently passes the version test. version = Database.version_info if (version < (1, 2, 1) or (version[:3] == (1, 2, 1) and (len(version) < 5 or version[3] != 'final' or version[4] < 2))): from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__) from MySQLdb.converters import conversions, Thing2Literal from MySQLdb.constants import FIELD_TYPE, CLIENT try: import pytz except ImportError: pytz = None from django.conf import settings from django.db import utils from django.db.backends import (utils as backend_utils, BaseDatabaseFeatures, BaseDatabaseOperations, BaseDatabaseWrapper) from django.db.backends.mysql.client import DatabaseClient from django.db.backends.mysql.creation import DatabaseCreation from django.db.backends.mysql.introspection import DatabaseIntrospection from django.db.backends.mysql.validation import DatabaseValidation from django.utils.encoding import force_str, force_text from django.db.backends.mysql.schema import DatabaseSchemaEditor from django.utils.functional import cached_property from django.utils.safestring import SafeBytes, SafeText from django.utils import six from django.utils import timezone # Raise exceptions for database warnings if DEBUG is on if settings.DEBUG: warnings.filterwarnings("error", category=Database.Warning) DatabaseError = Database.DatabaseError IntegrityError = Database.IntegrityError # It's impossible to import datetime_or_None directly from MySQLdb.times parse_datetime = conversions[FIELD_TYPE.DATETIME] def parse_datetime_with_timezone_support(value): dt = parse_datetime(value) # Confirm that dt is naive before overwriting its tzinfo. if dt is not None and settings.USE_TZ and timezone.is_naive(dt): dt = dt.replace(tzinfo=timezone.utc) return dt def adapt_datetime_with_timezone_support(value, conv): # Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL. if settings.USE_TZ: if timezone.is_naive(value): warnings.warn("MySQL received a naive datetime (%s)" " while time zone support is active." % value, RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) value = value.astimezone(timezone.utc).replace(tzinfo=None) return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S"), conv) # MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like # timedelta in terms of actual behavior as they are signed and include days -- # and Django expects time, so we still need to override that. We also need to # add special handling for SafeText and SafeBytes as MySQLdb's type # checking is too tight to catch those (see Django ticket #6052). # Finally, MySQLdb always returns naive datetime objects. However, when # timezone support is active, Django expects timezone-aware datetime objects. django_conversions = conversions.copy() django_conversions.update({ FIELD_TYPE.TIME: backend_utils.typecast_time, FIELD_TYPE.DECIMAL: backend_utils.typecast_decimal, FIELD_TYPE.NEWDECIMAL: backend_utils.typecast_decimal, FIELD_TYPE.DATETIME: parse_datetime_with_timezone_support, datetime.datetime: adapt_datetime_with_timezone_support, }) # This should match the numerical portion of the version numbers (we can treat # versions like 5.0.24 and 5.0.24a as the same). Based on the list of version # at http://dev.mysql.com/doc/refman/4.1/en/news.html and # http://dev.mysql.com/doc/refman/5.0/en/news.html . server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})') # MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on # MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the # point is to raise Warnings as exceptions, this can be done with the Python # warning module, and this is setup when the connection is created, and the # standard backend_utils.CursorDebugWrapper can be used. Also, using sql_mode # TRADITIONAL will automatically cause most warnings to be treated as errors. class CursorWrapper(object): """ A thin wrapper around MySQLdb's normal cursor class so that we can catch particular exception instances and reraise them with the right types. Implemented as a wrapper, rather than a subclass, so that we aren't stuck to the particular underlying representation returned by Connection.cursor(). """ codes_for_integrityerror = (1048,) def __init__(self, cursor): self.cursor = cursor def execute(self, query, args=None): try: # args is None means no string interpolation return self.cursor.execute(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise def executemany(self, query, args): try: return self.cursor.executemany(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] else: return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) def __enter__(self): return self def __exit__(self, type, value, traceback): # Ticket #17671 - Close instead of passing thru to avoid backend # specific behavior. self.close() class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () update_can_self_select = False allows_group_by_pk = True related_fields_match_type = True allow_sliced_subqueries = False has_bulk_insert = True has_select_for_update = True has_select_for_update_nowait = False supports_forward_references = False supports_long_model_names = False # XXX MySQL DB-API drivers currently fail on binary data on Python 3. supports_binary_field = six.PY2 supports_microsecond_precision = False supports_regex_backreferencing = False supports_date_lookup_using_string = False can_introspect_binary_field = False can_introspect_boolean_field = False supports_timezones = False requires_explicit_null_ordering_when_grouping = True allows_auto_pk_0 = False uses_savepoints = True atomic_transactions = False supports_column_check_constraints = False def __init__(self, connection): super(DatabaseFeatures, self).__init__(connection) @cached_property def _mysql_storage_engine(self): "Internal method used in Django tests. Don't rely on this from your code" with self.connection.cursor() as cursor: cursor.execute('CREATE TABLE INTROSPECT_TEST (X INT)') # This command is MySQL specific; the second column # will tell you the default table type of the created # table. Since all Django's test tables will have the same # table type, that's enough to evaluate the feature. cursor.execute("SHOW TABLE STATUS WHERE Name='INTROSPECT_TEST'") result = cursor.fetchone() cursor.execute('DROP TABLE INTROSPECT_TEST') return result[1] @cached_property def can_introspect_foreign_keys(self): "Confirm support for introspected foreign keys" return self._mysql_storage_engine != 'MyISAM' @cached_property def has_zoneinfo_database(self): # MySQL accepts full time zones names (eg. Africa/Nairobi) but rejects # abbreviations (eg. EAT). When pytz isn't installed and the current # time zone is LocalTimezone (the only sensible value in this # context), the current time zone name will be an abbreviation. As a # consequence, MySQL cannot perform time zone conversions reliably. if pytz is None: return False # Test if the time zone definitions are installed. with self.connection.cursor() as cursor: cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1") return cursor.fetchone() is not None class DatabaseOperations(BaseDatabaseOperations): compiler_module = "django.db.backends.mysql.compiler" # MySQL stores positive fields as UNSIGNED ints. integer_field_ranges = dict(BaseDatabaseOperations.integer_field_ranges, PositiveSmallIntegerField=(0, 4294967295), PositiveIntegerField=(0, 18446744073709551615), ) def date_extract_sql(self, lookup_type, field_name): # http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html if lookup_type == 'week_day': # DAYOFWEEK() returns an integer, 1-7, Sunday=1. # Note: WEEKDAY() returns 0-6, Monday=0. return "DAYOFWEEK(%s)" % field_name else: return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name): fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape. format_def = ('0000-', '01', '-01', ' 00:', '00', ':00') try: i = fields.index(lookup_type) + 1 except ValueError: sql = field_name else: format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]]) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql def datetime_extract_sql(self, lookup_type, field_name, tzname): if settings.USE_TZ: field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name params = [tzname] else: params = [] # http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html if lookup_type == 'week_day': # DAYOFWEEK() returns an integer, 1-7, Sunday=1. # Note: WEEKDAY() returns 0-6, Monday=0. sql = "DAYOFWEEK(%s)" % field_name else: sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) return sql, params def datetime_trunc_sql(self, lookup_type, field_name, tzname): if settings.USE_TZ: field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name params = [tzname] else: params = [] fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape. format_def = ('0000-', '01', '-01', ' 00:', '00', ':00') try: i = fields.index(lookup_type) + 1 except ValueError: sql = field_name else: format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]]) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql, params def date_interval_sql(self, sql, connector, timedelta): return "(%s %s INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND)" % (sql, connector, timedelta.days, timedelta.seconds, timedelta.microseconds) def drop_foreignkey_sql(self): return "DROP FOREIGN KEY" def force_no_ordering(self): """ "ORDER BY NULL" prevents MySQL from implicitly ordering by grouped columns. If no ordering would otherwise be applied, we don't want any implicit sorting going on. """ return ["NULL"] def fulltext_search_sql(self, field_name): return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name def last_executed_query(self, cursor, sql, params): # With MySQLdb, cursor objects have an (undocumented) "_last_executed" # attribute where the exact query sent to the database is saved. # See MySQLdb/cursors.py in the source distribution. return force_text(getattr(cursor, '_last_executed', None), errors='replace') def no_limit_value(self): # 2**64 - 1, as recommended by the MySQL documentation return 18446744073709551615 def quote_name(self, name): if name.startswith("`") and name.endswith("`"): return name # Quoting once is enough. return "`%s`" % name def random_function_sql(self): return 'RAND()' def sql_flush(self, style, tables, sequences, allow_cascade=False): # NB: The generated SQL below is specific to MySQL # 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements # to clear all tables of all data if tables: sql = ['SET FOREIGN_KEY_CHECKS = 0;'] for table in tables: sql.append('%s %s;' % ( style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table)), )) sql.append('SET FOREIGN_KEY_CHECKS = 1;') sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql else: return [] def sequence_reset_by_name_sql(self, style, sequences): # Truncate already resets the AUTO_INCREMENT field from # MySQL version 5.0.13 onwards. Refs #16961. if self.connection.mysql_version < (5, 0, 13): return [ "%s %s %s %s %s;" % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_TABLE(self.quote_name(sequence['table'])), style.SQL_KEYWORD('AUTO_INCREMENT'), style.SQL_FIELD('= 1'), ) for sequence in sequences ] else: return [] def validate_autopk_value(self, value): # MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653. if value == 0: raise ValueError('The database backend does not accept 0 as a ' 'value for AutoField.') return value def value_to_db_datetime(self, value): if value is None: return None # MySQL doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = value.astimezone(timezone.utc).replace(tzinfo=None) else: raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.") # MySQL doesn't support microseconds return six.text_type(value.replace(microsecond=0)) def value_to_db_time(self, value): if value is None: return None # MySQL doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("MySQL backend does not support timezone-aware times.") # MySQL doesn't support microseconds return six.text_type(value.replace(microsecond=0)) def year_lookup_bounds_for_datetime_field(self, value): # Again, no microseconds first, second = super(DatabaseOperations, self).year_lookup_bounds_for_datetime_field(value) return [first.replace(microsecond=0), second.replace(microsecond=0)] def max_name_length(self): return 64 def bulk_insert_sql(self, fields, num_values): items_sql = "(%s)" % ", ".join(["%s"] * len(fields)) return "VALUES " + ", ".join([items_sql] * num_values) def combine_expression(self, connector, sub_expressions): """ MySQL requires special cases for ^ operators in query expressions """ if connector == '^': return 'POW(%s)' % ','.join(sub_expressions) return super(DatabaseOperations, self).combine_expression(connector, sub_expressions) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'mysql' operators = { 'exact': '= %s', 'iexact': 'LIKE %s', 'contains': 'LIKE BINARY %s', 'icontains': 'LIKE %s', 'regex': 'REGEXP BINARY %s', 'iregex': 'REGEXP %s', 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': 'LIKE BINARY %s', 'endswith': 'LIKE BINARY %s', 'istartswith': 'LIKE %s', 'iendswith': 'LIKE %s', } Database = Database def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.features = DatabaseFeatures(self) self.ops = DatabaseOperations(self) self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = DatabaseValidation(self) def get_connection_params(self): kwargs = { 'conv': django_conversions, 'charset': 'utf8', } if six.PY2: kwargs['use_unicode'] = True settings_dict = self.settings_dict if settings_dict['USER']: kwargs['user'] = settings_dict['USER'] if settings_dict['NAME']: kwargs['db'] = settings_dict['NAME'] if settings_dict['PASSWORD']: kwargs['passwd'] = force_str(settings_dict['PASSWORD']) if settings_dict['HOST'].startswith('/'): kwargs['unix_socket'] = settings_dict['HOST'] elif settings_dict['HOST']: kwargs['host'] = settings_dict['HOST'] if settings_dict['PORT']: kwargs['port'] = int(settings_dict['PORT']) # We need the number of potentially affected rows after an # "UPDATE", not the number of changed rows. kwargs['client_flag'] = CLIENT.FOUND_ROWS kwargs.update(settings_dict['OPTIONS']) return kwargs def get_new_connection(self, conn_params): conn = Database.connect(**conn_params) conn.encoders[SafeText] = conn.encoders[six.text_type] conn.encoders[SafeBytes] = conn.encoders[bytes] return conn def init_connection_state(self): with self.cursor() as cursor: # SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column # on a recently-inserted row will return when the field is tested for # NULL. Disabling this value brings this aspect of MySQL in line with # SQL standards. cursor.execute('SET SQL_AUTO_IS_NULL = 0') def create_cursor(self): cursor = self.connection.cursor() return CursorWrapper(cursor) def _rollback(self): try: BaseDatabaseWrapper._rollback(self) except Database.NotSupportedError: pass def _set_autocommit(self, autocommit): with self.wrap_database_errors: self.connection.autocommit(autocommit) def disable_constraint_checking(self): """ Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True, to indicate constraint checks need to be re-enabled. """ self.cursor().execute('SET foreign_key_checks=0') return True def enable_constraint_checking(self): """ Re-enable foreign key checks after they have been disabled. """ # Override needs_rollback in case constraint_checks_disabled is # nested inside transaction.atomic. self.needs_rollback, needs_rollback = False, self.needs_rollback try: self.cursor().execute('SET foreign_key_checks=1') finally: self.needs_rollback = needs_rollback def check_constraints(self, table_names=None): """ Checks each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides detailed information about the invalid reference in the error message. Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE") """ cursor = self.cursor() if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute(""" SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL""" % (primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name)) for bad_row in cursor.fetchall(): raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid " "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." % (table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name)) def schema_editor(self, *args, **kwargs): "Returns a new instance of this backend's SchemaEditor" return DatabaseSchemaEditor(self, *args, **kwargs) def is_usable(self): try: self.connection.ping() except Database.Error: return False else: return True @cached_property def mysql_version(self): with self.temporary_connection(): server_info = self.connection.get_server_info() match = server_version_re.match(server_info) if not match: raise Exception('Unable to determine MySQL version from version string %r' % server_info) return tuple(int(x) for x in match.groups())
gpl-3.0
chiefspace/udemy-rest-api
udemy_rest_api_section5/code/env/lib/python3.4/site-packages/jinja2/_compat.py
214
2596
# -*- coding: utf-8 -*- """ jinja2._compat ~~~~~~~~~~~~~~ Some py2/py3 compatibility support based on a stripped down version of six so we don't have to depend on a specific version of it. :copyright: Copyright 2013 by the Jinja team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys PY2 = sys.version_info[0] == 2 PYPY = hasattr(sys, 'pypy_translation_info') _identity = lambda x: x if not PY2: unichr = chr range_type = range text_type = str string_types = (str,) integer_types = (int,) iterkeys = lambda d: iter(d.keys()) itervalues = lambda d: iter(d.values()) iteritems = lambda d: iter(d.items()) import pickle from io import BytesIO, StringIO NativeStringIO = StringIO def reraise(tp, value, tb=None): if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value ifilter = filter imap = map izip = zip intern = sys.intern implements_iterator = _identity implements_to_string = _identity encode_filename = _identity else: unichr = unichr text_type = unicode range_type = xrange string_types = (str, unicode) integer_types = (int, long) iterkeys = lambda d: d.iterkeys() itervalues = lambda d: d.itervalues() iteritems = lambda d: d.iteritems() import cPickle as pickle from cStringIO import StringIO as BytesIO, StringIO NativeStringIO = BytesIO exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') from itertools import imap, izip, ifilter intern = intern def implements_iterator(cls): cls.next = cls.__next__ del cls.__next__ return cls def implements_to_string(cls): cls.__unicode__ = cls.__str__ cls.__str__ = lambda x: x.__unicode__().encode('utf-8') return cls def encode_filename(filename): if isinstance(filename, unicode): return filename.encode('utf-8') return filename def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a # dummy metaclass for one level of class instantiation that replaces # itself with the actual metaclass. class metaclass(type): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {}) try: from urllib.parse import quote_from_bytes as url_quote except ImportError: from urllib import quote as url_quote
gpl-2.0
andriibekker/biddingsbase
django/contrib/flatpages/views.py
295
2613
from django.contrib.flatpages.models import FlatPage from django.template import loader, RequestContext from django.shortcuts import get_object_or_404 from django.http import HttpResponse, HttpResponseRedirect from django.conf import settings from django.core.xheaders import populate_xheaders from django.utils.safestring import mark_safe from django.views.decorators.csrf import csrf_protect DEFAULT_TEMPLATE = 'flatpages/default.html' # This view is called from FlatpageFallbackMiddleware.process_response # when a 404 is raised, which often means CsrfViewMiddleware.process_view # has not been called even if CsrfViewMiddleware is installed. So we need # to use @csrf_protect, in case the template needs {% csrf_token %}. # However, we can't just wrap this view; if no matching flatpage exists, # or a redirect is required for authentication, the 404 needs to be returned # without any CSRF checks. Therefore, we only # CSRF protect the internal implementation. def flatpage(request, url): """ Public interface to the flat page view. Models: `flatpages.flatpages` Templates: Uses the template defined by the ``template_name`` field, or `flatpages/default.html` if template_name is not defined. Context: flatpage `flatpages.flatpages` object """ if not url.endswith('/') and settings.APPEND_SLASH: return HttpResponseRedirect("%s/" % request.path) if not url.startswith('/'): url = "/" + url f = get_object_or_404(FlatPage, url__exact=url, sites__id__exact=settings.SITE_ID) return render_flatpage(request, f) @csrf_protect def render_flatpage(request, f): """ Internal interface to the flat page view. """ # If registration is required for accessing this page, and the user isn't # logged in, redirect to the login page. if f.registration_required and not request.user.is_authenticated(): from django.contrib.auth.views import redirect_to_login return redirect_to_login(request.path) if f.template_name: t = loader.select_template((f.template_name, DEFAULT_TEMPLATE)) else: t = loader.get_template(DEFAULT_TEMPLATE) # To avoid having to always use the "|safe" filter in flatpage templates, # mark the title and content as already safe (since they are raw HTML # content in the first place). f.title = mark_safe(f.title) f.content = mark_safe(f.content) c = RequestContext(request, { 'flatpage': f, }) response = HttpResponse(t.render(c)) populate_xheaders(request, response, FlatPage, f.id) return response
bsd-3-clause
gurneyalex/odoo
addons/mrp/wizard/mrp_product_produce.py
3
8943
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from datetime import datetime from odoo import api, fields, models, _ from odoo.exceptions import UserError from odoo.tools import float_compare class MrpProductProduce(models.TransientModel): _name = "mrp.product.produce" _description = "Record Production" _inherit = ["mrp.abstract.workorder"] @api.model def default_get(self, fields): res = super(MrpProductProduce, self).default_get(fields) production = self.env['mrp.production'] production_id = self.env.context.get('default_production_id') or self.env.context.get('active_id') if production_id: production = self.env['mrp.production'].browse(production_id) if production.exists(): serial_finished = (production.product_id.tracking == 'serial') todo_uom = production.product_uom_id.id todo_quantity = self._get_todo(production) if serial_finished: todo_quantity = 1.0 if production.product_uom_id.uom_type != 'reference': todo_uom = self.env['uom.uom'].search([('category_id', '=', production.product_uom_id.category_id.id), ('uom_type', '=', 'reference')]).id if 'production_id' in fields: res['production_id'] = production.id if 'product_id' in fields: res['product_id'] = production.product_id.id if 'product_uom_id' in fields: res['product_uom_id'] = todo_uom if 'serial' in fields: res['serial'] = bool(serial_finished) if 'qty_producing' in fields: res['qty_producing'] = todo_quantity if 'consumption' in fields: res['consumption'] = production.bom_id.consumption return res serial = fields.Boolean('Requires Serial') product_tracking = fields.Selection(related="product_id.tracking") is_pending_production = fields.Boolean(compute='_compute_pending_production') move_raw_ids = fields.One2many(related='production_id.move_raw_ids', string="PO Components") move_finished_ids = fields.One2many(related='production_id.move_finished_ids') raw_workorder_line_ids = fields.One2many('mrp.product.produce.line', 'raw_product_produce_id', string='Components') finished_workorder_line_ids = fields.One2many('mrp.product.produce.line', 'finished_product_produce_id', string='By-products') production_id = fields.Many2one('mrp.production', 'Manufacturing Order', required=True, ondelete='cascade') @api.depends('qty_producing') def _compute_pending_production(self): """ Compute if it exits remaining quantity once the quantity on the current wizard will be processed. The purpose is to display or not button 'continue'. """ for product_produce in self: remaining_qty = product_produce._get_todo(product_produce.production_id) product_produce.is_pending_production = remaining_qty - product_produce.qty_producing > 0.0 def continue_production(self): """ Save current wizard and directly opens a new. """ self.ensure_one() self._record_production() action = self.production_id.open_produce_product() action['context'] = {'default_production_id': self.production_id.id} return action def action_generate_serial(self): self.ensure_one() product_produce_wiz = self.env.ref('mrp.view_mrp_product_produce_wizard', False) self.finished_lot_id = self.env['stock.production.lot'].create({ 'product_id': self.product_id.id, 'company_id': self.production_id.company_id.id }) return { 'name': _('Produce'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'mrp.product.produce', 'res_id': self.id, 'view_id': product_produce_wiz.id, 'target': 'new', } def do_produce(self): """ Save the current wizard and go back to the MO. """ self.ensure_one() self._record_production() self._check_company() return {'type': 'ir.actions.act_window_close'} def _get_todo(self, production): """ This method will return remaining todo quantity of production. """ main_product_moves = production.move_finished_ids.filtered(lambda x: x.product_id.id == production.product_id.id) todo_quantity = production.product_qty - sum(main_product_moves.mapped('quantity_done')) todo_quantity = todo_quantity if (todo_quantity > 0) else 0 return todo_quantity def _record_production(self): # Check all the product_produce line have a move id (the user can add product # to consume directly in the wizard) for line in self._workorder_line_ids(): if not line.move_id: # Find move_id that would match if line.raw_product_produce_id: moves = line.raw_product_produce_id.move_raw_ids else: moves = line.finished_product_produce_id.move_finished_ids move_id = moves.filtered(lambda m: m.product_id == line.product_id and m.state not in ('done', 'cancel')) if not move_id: # create a move to assign it to the line production = line._get_production() if line.raw_product_produce_id: values = { 'name': production.name, 'reference': production.name, 'product_id': line.product_id.id, 'product_uom': line.product_uom_id.id, 'location_id': production.location_src_id.id, 'location_dest_id': self.product_id.property_stock_production.id, 'raw_material_production_id': production.id, 'group_id': production.procurement_group_id.id, 'origin': production.name, 'state': 'confirmed', 'company_id': production.company_id.id, } else: values = production._get_finished_move_value(line.product_id.id, 0, line.product_uom_id.id) move_id = self.env['stock.move'].create(values) line.move_id = move_id.id # because of an ORM limitation (fields on transient models are not # recomputed by updates in non-transient models), the related fields on # this model are not recomputed by the creations above self.invalidate_cache(['move_raw_ids', 'move_finished_ids']) # Save product produce lines data into stock moves/move lines for wizard in self: quantity = wizard.qty_producing if float_compare(quantity, 0, precision_rounding=self.product_uom_id.rounding) <= 0: raise UserError(_("The production order for '%s' has no quantity specified.") % self.product_id.display_name) self._update_finished_move() self._update_moves() self.production_id.filtered(lambda mo: mo.state == 'confirmed').write({ 'date_start': datetime.now(), }) class MrpProductProduceLine(models.TransientModel): _name = 'mrp.product.produce.line' _inherit = ["mrp.abstract.workorder.line"] _description = "Record production line" raw_product_produce_id = fields.Many2one('mrp.product.produce', 'Component in Produce wizard') finished_product_produce_id = fields.Many2one('mrp.product.produce', 'Finished Product in Produce wizard') @api.model def _get_raw_workorder_inverse_name(self): return 'raw_product_produce_id' @api.model def _get_finished_workoder_inverse_name(self): return 'finished_product_produce_id' def _get_final_lots(self): product_produce_id = self.raw_product_produce_id or self.finished_product_produce_id return product_produce_id.finished_lot_id | product_produce_id.finished_workorder_line_ids.mapped('lot_id') def _get_production(self): product_produce_id = self.raw_product_produce_id or self.finished_product_produce_id return product_produce_id.production_id @api.onchange('lot_id') def _onchange_lot_id(self): """ When the user is encoding a produce line for a tracked product, we apply some logic to help him. This onchange will automatically switch `qty_done` to 1.0. """ if self.product_id.tracking == 'serial': if self.lot_id: self.qty_done = 1 else: self.qty_done = 0
agpl-3.0
puttarajubr/commcare-hq
corehq/apps/reports/tests/test_pillows_xforms.py
2
15578
import copy from django.test import TestCase from django.conf import settings from corehq.apps.api.es import report_term_filter from corehq.pillows.base import restore_property_dict, VALUE_TAG from corehq.pillows.mappings.reportxform_mapping import REPORT_XFORM_MAPPING from corehq.pillows.reportxform import ReportXFormPillow CONCEPT_XFORM = { "_id": "concept_xform", "domain": "test-domain", "form": { "@xmlns": "http://openrosa.org/formdesigner/test_concepts", "@uiVersion": "1", "@name": "Visit", "last_visit": "2013-09-01", "any_other_sick": { "#text": "no", "@concept_id": "1907" }, "cur_num_fp": "2", "#type": "data", "cur_counsel_topics": "bednet handwashing", "case": { "@xmlns": "http://commcarehq.org/case/transaction/v2", "@date_modified": "2013-09-01T11:02:34Z", "@user_id": "abcde", "@case_id": "test_case_123345", "update": { "location_code": "", "last_visit_counsel_topics": "bednet handwashing", "last_visit": "2013-10-09", "num_ec": "2" } }, "member_available": { "#text": "yes", "@concept_id": "1890" }, "modern_fp": [ { "fp_type": { "#text": "iud", "@concept_id": "374" } }, { "fp_type": { "#text": "ij", "@concept_id": "374" } } ], "meta": { "@xmlns": "http://openrosa.org/jr/xforms", "username": "airene", "instanceID": "some_form", "userID": "some_user", "timeEnd": "2013-09-09T11:02:34Z", "appVersion": { "@xmlns": "http://commcarehq.org/xforms", "#text": "some version" }, "timeStart": "2013-09-01T11:22:40Z", "deviceID": "unittests" }, "num_using_fp": { "#text": "2", "@concept_id": "1902" }, "location_code_1": "", "counseling": { "sanitation_counseling": { "handwashing_importance": "", "handwashing_instructions": "", "when_to_wash_hands": "" }, "counsel_type_ec": "bednet handwashing", "previous_counseling": "OK", "bednet_counseling": { "bednets_reduce_risk": "", "wash_bednet": "", "all_people_bednet": "" } }, "prev_location_code": "", "@version": "234", "num_ec": { "#text": "2", "@concept_id": "1901" }, "prev_counsel_topics": "handwashing" }, "initial_processing_complete": True, "computed_modified_on_": "2013-10-01T23:13:38Z", "app_id": "some_app", "auth_context": { "user_id": None, "domain": "some-domain", "authenticated": False, "doc_type": "AuthContext" }, "doc_type": "XFormInstance", "xmlns": "http://openrosa.org/formdesigner/something", "partial_submission": False, "#export_tag": [ "domain", "xmlns" ], "received_on": "2013-10-09T14:21:56Z", "submit_ip": "105.230.106.73", "computed_": {}, "openrosa_headers": { "HTTP_X_OPENROSA_VERSION": "1.0" }, "history": [ ], "__retrieved_case_ids": ["test_case_123345"], } class testReportXFormProcessing(TestCase): def testConvertAndRestoreReportXFormDicts(self): pillow = ReportXFormPillow(online=False) orig = CONCEPT_XFORM orig['domain'] = settings.ES_XFORM_FULL_INDEX_DOMAINS[0] for_indexing = pillow.change_transform(orig) restored = restore_property_dict(for_indexing) #appVersion might be munged in meta, so swapping it out orig_appversion = orig['form']['meta']['appVersion'] restored_appversion = restored['form']['meta']['appVersion'] if isinstance(orig_appversion, dict): self.assertEqual(restored_appversion, orig_appversion['#text']) else: self.assertEqual(restored_appversion, orig_appversion) del(orig['form']['meta']['appVersion']) del(restored['form']['meta']['appVersion']) self.assertNotEqual(for_indexing, orig) self.assertNotEqual(for_indexing, restored) self.assertEqual(orig, restored) def testSubCaseForm(self): """ Ensure that the dict format converter never touches any sub property that has a key of 'case' this is our way of handling case blocks. The properties in the case block ought not to be touched this current form only captures """ pillow = ReportXFormPillow(online=False) orig = { '_id': 'nested_case_blocks', 'form': { 'case': { "@xmlns": "http://commcarehq.org/case/transaction/v2", "@date_modified": "2013-10-14T10:59:44Z", "@user_id": "someuser", "@case_id": "mycase", }, 'subcase_0': { 'case': { "@xmlns": "http://commcarehq.org/case/transaction/v2", "index": { "parent": { "@case_type": "household", "#text": "some_parent" } }, "@date_modified": "2013-10-12T11:59:41Z", "create": { "case_type": "child", "owner_id": "some_owner", "case_name": "hello there" }, "@user_id": "someuser", "update": { "first_name": "asdlfjkasdf", "surname": "askljvlajskdlrwe", "dob": "2011-03-21", "sex": "male", "weight_date": "never", "household_head_health_id": "", "dob_known": "yes", "health_id": "", "length_date": "never", "dob_calc": "2011-03-21" }, "@case_id": "subcaseid" } }, 'really': { 'nested': { 'case': { "@xmlns": "http://commcarehq.org/case/transaction/v2", "index": { "parent": { "@case_type": "household", "#text": "some_parent" } }, "@date_modified": "2013-10-12T11:59:41Z", "create": { "case_type": "child", "owner_id": "some_owner", "case_name": "hello there" }, "@user_id": "someuser", "update": { "first_name": "asdlfjkasdf", "surname": "askljvlajskdlrwe", "dob": "2011-03-21", "sex": "male", "weight_date": "never", "household_head_health_id": "", "dob_known": "yes", "health_id": "", "length_date": "never", "dob_calc": "2011-03-21" }, "@case_id": "subcaseid2" } } }, 'array_cases': [ {'case': {'foo': 'bar'}}, {'case': {'boo': 'bar'}}, {'case': {'poo': 'bar'}}, ] } } orig['domain'] = settings.ES_XFORM_FULL_INDEX_DOMAINS[0] for_indexing = pillow.change_transform(orig) self.assertEqual(orig['form']['case'], for_indexing['form']['case']) self.assertEqual(orig['form']['subcase_0']['case'], for_indexing['form']['subcase_0']['case']) self.assertEqual(orig['form']['really']['nested']['case'], for_indexing['form']['really']['nested']['case']) def testBlanktoNulls(self): orig = { '_id': 'blank_strings', 'form': { 'case': { "@xmlns": "http://commcarehq.org/case/transaction/v2", "@date_modified": "2013-10-14T10:59:44Z", "@user_id": "someuser", "@case_id": "mycase", "index": "", "attachment": "", "create": "", "update": "", } } } dict_props = ['index', 'attachment', 'create', 'update'] pillow = ReportXFormPillow(online=False) all_blank = copy.deepcopy(orig) all_blank['domain'] = settings.ES_XFORM_FULL_INDEX_DOMAINS[0] for_indexing = pillow.change_transform(all_blank) for prop in dict_props: self.assertIsNone(for_indexing['form']['case'][prop]) all_dicts = copy.deepcopy(orig) all_dicts['domain'] = settings.ES_XFORM_FULL_INDEX_DOMAINS[0] for prop in dict_props: all_dicts['form']['case'][prop] = {} for_index2 = pillow.change_transform(all_dicts) for prop in dict_props: self.assertIsNotNone(for_index2['form']['case'][prop]) def testComputedConversion(self): """ Since we set dyanmic=True on reportxforms, need to do conversions on the computed_ properties so call conversion on computed_ dict as well, this test ensures that it's converted on change_transform :return: """ orig = { '_id': 'blank_strings', 'form': { 'case': { "@xmlns": "http://commcarehq.org/case/transaction/v2", "@date_modified": "2013-10-14T10:59:44Z", "@user_id": "someuser", "@case_id": "mycase", "index": "", "attachment": "", "create": "", "update": "", } }, 'computed_': { "mvp_indicators": { "last_muac": { "updated": "2013-02-04T21:54:28Z", "version": 1, "type": "FormDataAliasIndicatorDefinition", "multi_value": False, "value": None }, "muac": { "updated": "2013-02-04T21:54:28Z", "version": 1, "type": "FormDataAliasIndicatorDefinition", "multi_value": False, "value": { "#text": "", "@concept_id": "1343" } }, "vaccination_status": { "updated": "2013-02-04T21:54:28Z", "version": 1, "type": "FormDataAliasIndicatorDefinition", "multi_value": False, "value": "yes" }, } } } pillow = ReportXFormPillow(online=False) orig['domain'] = settings.ES_XFORM_FULL_INDEX_DOMAINS[0] for_indexing = pillow.change_transform(orig) restored = restore_property_dict(for_indexing) self.assertNotEqual(orig['computed_'], for_indexing['computed_']) self.assertEqual(orig['computed_'], restored['computed_']) def testReporXFormtQuery(self): unknown_terms = ['form.num_using_fp.#text', 'form.num_using_fp.@concept_id', 'form.counseling.sanitation_counseling.handwashing_importance', 'form.counseling.bednet_counseling.wash_bednet', 'form.prev_location_code', 'member_available.#text', 'location_code_1'] unknown_terms_query = report_term_filter(unknown_terms, REPORT_XFORM_MAPPING) manually_set = ['%s.%s' % (x, VALUE_TAG) for x in unknown_terms] self.assertEqual(manually_set, unknown_terms_query) known_terms = [ 'initial_processing_complete', 'doc_type', 'app_id', 'xmlns', '@uiVersion', '@version', 'form.#type', 'form.@name', 'form.meta.timeStart', 'form.meta.timeEnd', 'form.meta.appVersion', ] # shoot, TODO, cases are difficult to escape the VALUE_TAG term due to dynamic templates known_terms_query = report_term_filter(known_terms, REPORT_XFORM_MAPPING) self.assertEqual(known_terms_query, known_terms) def testConceptReportConversion(self): pillow = ReportXFormPillow(online=False) orig = CONCEPT_XFORM orig['domain'] = settings.ES_XFORM_FULL_INDEX_DOMAINS[0] for_indexing = pillow.change_transform(orig) self.assertTrue(isinstance(for_indexing['form']['last_visit'], dict)) self.assertTrue('#value' in for_indexing['form']['last_visit']) self.assertTrue(isinstance(for_indexing['form']['member_available'], dict)) self.assertTrue(isinstance(for_indexing['form']['member_available']['#text'], dict)) self.assertTrue(isinstance(for_indexing['form']['member_available']['@concept_id'], dict)) self.assertEqual(for_indexing['form']['member_available'], { "#text": { "#value": "yes" }, "@concept_id": { "#value": "1890" } } ) self.assertEqual(for_indexing['form']['modern_fp'], [ { "fp_type": { "#text": { "#value": "iud" }, "@concept_id": { "#value": "374" } } }, { "fp_type": { "#text": { "#value": "ij" }, "@concept_id": { "#value": "374" } } } ] )
bsd-3-clause
acbodine/koding
go/src/vendor/github.com/caglar10ur/lxc/src/python-lxc/setup.py
15
2493
#!/usr/bin/env python3 # # python-lxc: Python bindings for LXC # # (C) Copyright Canonical Ltd. 2012 # # Authors: # Stéphane Graber <stgraber@ubuntu.com> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # USA import os import subprocess from setuptools import setup, Extension from setuptools.command.build_ext import build_ext as BuildExtCommand class LxcBuildExtCommand(BuildExtCommand): user_options = BuildExtCommand.user_options + [ ('no-pkg-config', None, "don't use pkg-config to detect include/library paths") ] def initialize_options(self): super(LxcBuildExtCommand, self).initialize_options() self.no_pkg_config = False def build_extensions(self): if not self.no_pkg_config: pkg_config_executable = os.environ.get('PKG_CONFIG_EXECUTABLE', 'pkg-config') def get_pkg_config_var(name): args = [pkg_config_executable, '--variable', name, 'lxc'] output = subprocess.check_output(args, universal_newlines=True) return output.rstrip('\n') try: includedir = get_pkg_config_var('includedir') libdir = get_pkg_config_var('libdir') self.compiler.add_include_dir(includedir) self.compiler.add_library_dir(libdir) except subprocess.CalledProcessError: pass super(LxcBuildExtCommand, self).build_extensions() setup(name='lxc', version='0.1', description='LXC', packages=['lxc'], package_dir={'lxc': 'lxc'}, ext_modules=[Extension('_lxc', sources=['lxc.c'], libraries=['lxc'])], cmdclass={'build_ext': LxcBuildExtCommand}, )
agpl-3.0
watspidererik/testenv
flask/lib/python2.7/site-packages/coverage/collector.py
209
13412
"""Raw data collector for Coverage.""" import os, sys, threading try: # Use the C extension code when we can, for speed. from coverage.tracer import CTracer # pylint: disable=F0401,E0611 except ImportError: # Couldn't import the C extension, maybe it isn't built. if os.getenv('COVERAGE_TEST_TRACER') == 'c': # During testing, we use the COVERAGE_TEST_TRACER env var to indicate # that we've fiddled with the environment to test this fallback code. # If we thought we had a C tracer, but couldn't import it, then exit # quickly and clearly instead of dribbling confusing errors. I'm using # sys.exit here instead of an exception because an exception here # causes all sorts of other noise in unittest. sys.stderr.write( "*** COVERAGE_TEST_TRACER is 'c' but can't import CTracer!\n" ) sys.exit(1) CTracer = None class PyTracer(object): """Python implementation of the raw data tracer.""" # Because of poor implementations of trace-function-manipulating tools, # the Python trace function must be kept very simple. In particular, there # must be only one function ever set as the trace function, both through # sys.settrace, and as the return value from the trace function. Put # another way, the trace function must always return itself. It cannot # swap in other functions, or return None to avoid tracing a particular # frame. # # The trace manipulator that introduced this restriction is DecoratorTools, # which sets a trace function, and then later restores the pre-existing one # by calling sys.settrace with a function it found in the current frame. # # Systems that use DecoratorTools (or similar trace manipulations) must use # PyTracer to get accurate results. The command-line --timid argument is # used to force the use of this tracer. def __init__(self): self.data = None self.should_trace = None self.should_trace_cache = None self.warn = None self.cur_file_data = None self.last_line = 0 self.data_stack = [] self.last_exc_back = None self.last_exc_firstlineno = 0 self.arcs = False self.thread = None self.stopped = False def _trace(self, frame, event, arg_unused): """The trace function passed to sys.settrace.""" if self.stopped: return if 0: sys.stderr.write("trace event: %s %r @%d\n" % ( event, frame.f_code.co_filename, frame.f_lineno )) if self.last_exc_back: if frame == self.last_exc_back: # Someone forgot a return event. if self.arcs and self.cur_file_data: pair = (self.last_line, -self.last_exc_firstlineno) self.cur_file_data[pair] = None self.cur_file_data, self.last_line = self.data_stack.pop() self.last_exc_back = None if event == 'call': # Entering a new function context. Decide if we should trace # in this file. self.data_stack.append((self.cur_file_data, self.last_line)) filename = frame.f_code.co_filename if filename not in self.should_trace_cache: tracename = self.should_trace(filename, frame) self.should_trace_cache[filename] = tracename else: tracename = self.should_trace_cache[filename] #print("called, stack is %d deep, tracename is %r" % ( # len(self.data_stack), tracename)) if tracename: if tracename not in self.data: self.data[tracename] = {} self.cur_file_data = self.data[tracename] else: self.cur_file_data = None # Set the last_line to -1 because the next arc will be entering a # code block, indicated by (-1, n). self.last_line = -1 elif event == 'line': # Record an executed line. if self.cur_file_data is not None: if self.arcs: #print("lin", self.last_line, frame.f_lineno) self.cur_file_data[(self.last_line, frame.f_lineno)] = None else: #print("lin", frame.f_lineno) self.cur_file_data[frame.f_lineno] = None self.last_line = frame.f_lineno elif event == 'return': if self.arcs and self.cur_file_data: first = frame.f_code.co_firstlineno self.cur_file_data[(self.last_line, -first)] = None # Leaving this function, pop the filename stack. self.cur_file_data, self.last_line = self.data_stack.pop() #print("returned, stack is %d deep" % (len(self.data_stack))) elif event == 'exception': #print("exc", self.last_line, frame.f_lineno) self.last_exc_back = frame.f_back self.last_exc_firstlineno = frame.f_code.co_firstlineno return self._trace def start(self): """Start this Tracer. Return a Python function suitable for use with sys.settrace(). """ self.thread = threading.currentThread() sys.settrace(self._trace) return self._trace def stop(self): """Stop this Tracer.""" self.stopped = True if self.thread != threading.currentThread(): # Called on a different thread than started us: we can't unhook # ourseves, but we've set the flag that we should stop, so we won't # do any more tracing. return if hasattr(sys, "gettrace") and self.warn: if sys.gettrace() != self._trace: msg = "Trace function changed, measurement is likely wrong: %r" self.warn(msg % (sys.gettrace(),)) #print("Stopping tracer on %s" % threading.current_thread().ident) sys.settrace(None) def get_stats(self): """Return a dictionary of statistics, or None.""" return None class Collector(object): """Collects trace data. Creates a Tracer object for each thread, since they track stack information. Each Tracer points to the same shared data, contributing traced data points. When the Collector is started, it creates a Tracer for the current thread, and installs a function to create Tracers for each new thread started. When the Collector is stopped, all active Tracers are stopped. Threads started while the Collector is stopped will never have Tracers associated with them. """ # The stack of active Collectors. Collectors are added here when started, # and popped when stopped. Collectors on the stack are paused when not # the top, and resumed when they become the top again. _collectors = [] def __init__(self, should_trace, timid, branch, warn): """Create a collector. `should_trace` is a function, taking a filename, and returning a canonicalized filename, or None depending on whether the file should be traced or not. If `timid` is true, then a slower simpler trace function will be used. This is important for some environments where manipulation of tracing functions make the faster more sophisticated trace function not operate properly. If `branch` is true, then branches will be measured. This involves collecting data on which statements followed each other (arcs). Use `get_arc_data` to get the arc data. `warn` is a warning function, taking a single string message argument, to be used if a warning needs to be issued. """ self.should_trace = should_trace self.warn = warn self.branch = branch self.reset() if timid: # Being timid: use the simple Python trace function. self._trace_class = PyTracer else: # Being fast: use the C Tracer if it is available, else the Python # trace function. self._trace_class = CTracer or PyTracer def __repr__(self): return "<Collector at 0x%x>" % id(self) def tracer_name(self): """Return the class name of the tracer we're using.""" return self._trace_class.__name__ def reset(self): """Clear collected data, and prepare to collect more.""" # A dictionary mapping filenames to dicts with linenumber keys, # or mapping filenames to dicts with linenumber pairs as keys. self.data = {} # A cache of the results from should_trace, the decision about whether # to trace execution in a file. A dict of filename to (filename or # None). self.should_trace_cache = {} # Our active Tracers. self.tracers = [] def _start_tracer(self): """Start a new Tracer object, and store it in self.tracers.""" tracer = self._trace_class() tracer.data = self.data tracer.arcs = self.branch tracer.should_trace = self.should_trace tracer.should_trace_cache = self.should_trace_cache tracer.warn = self.warn fn = tracer.start() self.tracers.append(tracer) return fn # The trace function has to be set individually on each thread before # execution begins. Ironically, the only support the threading module has # for running code before the thread main is the tracing function. So we # install this as a trace function, and the first time it's called, it does # the real trace installation. def _installation_trace(self, frame_unused, event_unused, arg_unused): """Called on new threads, installs the real tracer.""" # Remove ourselves as the trace function sys.settrace(None) # Install the real tracer. fn = self._start_tracer() # Invoke the real trace function with the current event, to be sure # not to lose an event. if fn: fn = fn(frame_unused, event_unused, arg_unused) # Return the new trace function to continue tracing in this scope. return fn def start(self): """Start collecting trace information.""" if self._collectors: self._collectors[-1].pause() self._collectors.append(self) #print("Started: %r" % self._collectors, file=sys.stderr) # Check to see whether we had a fullcoverage tracer installed. traces0 = [] if hasattr(sys, "gettrace"): fn0 = sys.gettrace() if fn0: tracer0 = getattr(fn0, '__self__', None) if tracer0: traces0 = getattr(tracer0, 'traces', []) # Install the tracer on this thread. fn = self._start_tracer() for args in traces0: (frame, event, arg), lineno = args try: fn(frame, event, arg, lineno=lineno) except TypeError: raise Exception( "fullcoverage must be run with the C trace function." ) # Install our installation tracer in threading, to jump start other # threads. threading.settrace(self._installation_trace) def stop(self): """Stop collecting trace information.""" #print >>sys.stderr, "Stopping: %r" % self._collectors assert self._collectors assert self._collectors[-1] is self self.pause() self.tracers = [] # Remove this Collector from the stack, and resume the one underneath # (if any). self._collectors.pop() if self._collectors: self._collectors[-1].resume() def pause(self): """Pause tracing, but be prepared to `resume`.""" for tracer in self.tracers: tracer.stop() stats = tracer.get_stats() if stats: print("\nCoverage.py tracer stats:") for k in sorted(stats.keys()): print("%16s: %s" % (k, stats[k])) threading.settrace(None) def resume(self): """Resume tracing after a `pause`.""" for tracer in self.tracers: tracer.start() threading.settrace(self._installation_trace) def get_line_data(self): """Return the line data collected. Data is { filename: { lineno: None, ...}, ...} """ if self.branch: # If we were measuring branches, then we have to re-build the dict # to show line data. line_data = {} for f, arcs in self.data.items(): line_data[f] = ldf = {} for l1, _ in list(arcs.keys()): if l1: ldf[l1] = None return line_data else: return self.data def get_arc_data(self): """Return the arc data collected. Data is { filename: { (l1, l2): None, ...}, ...} Note that no data is collected or returned if the Collector wasn't created with `branch` true. """ if self.branch: return self.data else: return {}
mit
makinacorpus/reportlab-ecomobile
src/reportlab/graphics/charts/doughnut.py
1
13260
#Copyright ReportLab Europe Ltd. 2000-2004 #see license.txt for license details #history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/doughnut.py # doughnut chart __version__=''' $Id$ ''' __doc__="""Doughnut chart Produces a circular chart like the doughnut charts produced by Excel. Can handle multiple series (which produce concentric 'rings' in the chart). """ import copy from math import sin, cos, pi from types import ListType, TupleType from reportlab.lib import colors from reportlab.lib.validators import isColor, isNumber, isListOfNumbersOrNone,\ isListOfNumbers, isColorOrNone, isString,\ isListOfStringsOrNone, OneOf, SequenceOf,\ isBoolean, isListOfColors,\ isNoneOrListOfNoneOrStrings,\ isNoneOrListOfNoneOrNumbers,\ isNumberOrNone from reportlab.lib.attrmap import * from reportlab.pdfgen.canvas import Canvas from reportlab.graphics.shapes import Group, Drawing, Line, Rect, Polygon, Ellipse, \ Wedge, String, SolidShape, UserNode, STATE_DEFAULTS from reportlab.graphics.widgetbase import Widget, TypedPropertyCollection, PropHolder from reportlab.graphics.charts.piecharts import AbstractPieChart, WedgeProperties, _addWedgeLabel from reportlab.graphics.charts.textlabels import Label from reportlab.graphics.widgets.markers import Marker class SectorProperties(WedgeProperties): """This holds descriptive information about the sectors in a doughnut chart. It is not to be confused with the 'sector itself'; this just holds a recipe for how to format one, and does not allow you to hack the angles. It can format a genuine Sector object for you with its format method. """ _attrMap = AttrMap(BASE=WedgeProperties, ) class Doughnut(AbstractPieChart): _attrMap = AttrMap( x = AttrMapValue(isNumber, desc='X position of the chart within its container.'), y = AttrMapValue(isNumber, desc='Y position of the chart within its container.'), width = AttrMapValue(isNumber, desc='width of doughnut bounding box. Need not be same as width.'), height = AttrMapValue(isNumber, desc='height of doughnut bounding box. Need not be same as height.'), data = AttrMapValue(None, desc='list of numbers defining sector sizes; need not sum to 1'), labels = AttrMapValue(isListOfStringsOrNone, desc="optional list of labels to use for each data point"), startAngle = AttrMapValue(isNumber, desc="angle of first slice; like the compass, 0 is due North"), direction = AttrMapValue(OneOf('clockwise', 'anticlockwise'), desc="'clockwise' or 'anticlockwise'"), slices = AttrMapValue(None, desc="collection of sector descriptor objects"), simpleLabels = AttrMapValue(isBoolean, desc="If true(default) use String not super duper WedgeLabel"), ) def __init__(self): self.x = 0 self.y = 0 self.width = 100 self.height = 100 self.data = [1,1] self.labels = None # or list of strings self.startAngle = 90 self.direction = "clockwise" self.simpleLabels = 1 self.slices = TypedPropertyCollection(SectorProperties) self.slices[0].fillColor = colors.darkcyan self.slices[1].fillColor = colors.blueviolet self.slices[2].fillColor = colors.blue self.slices[3].fillColor = colors.cyan def demo(self): d = Drawing(200, 100) dn = Doughnut() dn.x = 50 dn.y = 10 dn.width = 100 dn.height = 80 dn.data = [10,20,30,40,50,60] dn.labels = ['a','b','c','d','e','f'] dn.slices.strokeWidth=0.5 dn.slices[3].popout = 10 dn.slices[3].strokeWidth = 2 dn.slices[3].strokeDashArray = [2,2] dn.slices[3].labelRadius = 1.75 dn.slices[3].fontColor = colors.red dn.slices[0].fillColor = colors.darkcyan dn.slices[1].fillColor = colors.blueviolet dn.slices[2].fillColor = colors.blue dn.slices[3].fillColor = colors.cyan dn.slices[4].fillColor = colors.aquamarine dn.slices[5].fillColor = colors.cadetblue dn.slices[6].fillColor = colors.lightcoral d.add(dn) return d def normalizeData(self, data=None): from operator import add sum = float(reduce(add,data,0)) return abs(sum)>=1e-8 and map(lambda x,f=360./sum: f*x, data) or len(data)*[0] def makeSectors(self): # normalize slice data if type(self.data) in (ListType, TupleType) and type(self.data[0]) in (ListType, TupleType): #it's a nested list, more than one sequence normData = [] n = [] for l in self.data: t = self.normalizeData(l) normData.append(t) n.append(len(t)) self._seriesCount = max(n) else: normData = self.normalizeData(self.data) n = len(normData) self._seriesCount = n #labels if self.labels is None: labels = [] if type(n) not in (ListType,TupleType): labels = [''] * n else: for m in n: labels = list(labels) + [''] * m else: labels = self.labels #there's no point in raising errors for less than enough labels if #we silently create all for the extreme case of no labels. if type(n) not in (ListType,TupleType): i = n-len(labels) if i>0: labels = list(labels) + [''] * i else: tlab = 0 for m in n: tlab += m i = tlab-len(labels) if i>0: labels = list(labels) + [''] * i xradius = self.width/2.0 yradius = self.height/2.0 centerx = self.x + xradius centery = self.y + yradius if self.direction == "anticlockwise": whichWay = 1 else: whichWay = -1 g = Group() sn = 0 startAngle = self.startAngle #% 360 styleCount = len(self.slices) if type(self.data[0]) in (ListType, TupleType): #multi-series doughnut iradius = (self.height/5.0)/len(self.data) for series in normData: i = 0 for angle in series: endAngle = (startAngle + (angle * whichWay)) #% 360 if abs(startAngle-endAngle)>=1e-5: if startAngle < endAngle: a1 = startAngle a2 = endAngle else: a1 = endAngle a2 = startAngle #if we didn't use %stylecount here we'd end up with the later sectors #all having the default style sectorStyle = self.slices[i%styleCount] # is it a popout? cx, cy = centerx, centery if sectorStyle.popout != 0: # pop out the sector averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle * pi/180.0 popdistance = sectorStyle.popout cx = centerx + popdistance * cos(aveAngleRadians) cy = centery + popdistance * sin(aveAngleRadians) if type(n) in (ListType,TupleType): theSector = Wedge(cx, cy, xradius+(sn*iradius)-iradius, a1, a2, yradius=yradius+(sn*iradius)-iradius, radius1=yradius+(sn*iradius)-(2*iradius)) else: theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=iradius) theSector.fillColor = sectorStyle.fillColor theSector.strokeColor = sectorStyle.strokeColor theSector.strokeWidth = sectorStyle.strokeWidth theSector.strokeDashArray = sectorStyle.strokeDashArray g.add(theSector) startAngle = endAngle text = self.getSeriesName(i,'') if text: averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle*pi/180.0 labelRadius = sectorStyle.labelRadius labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius) labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius) g.add(_addWedgeLabel(self,text,averageAngle,labelX,labelY,sectorStyle)) i += 1 sn += 1 else: i = 0 #single series doughnut iradius = self.height/5.0 for angle in normData: endAngle = (startAngle + (angle * whichWay)) #% 360 if abs(startAngle-endAngle)>=1e-5: if startAngle < endAngle: a1 = startAngle a2 = endAngle else: a1 = endAngle a2 = startAngle #if we didn't use %stylecount here we'd end up with the later sectors #all having the default style sectorStyle = self.slices[i%styleCount] # is it a popout? cx, cy = centerx, centery if sectorStyle.popout != 0: # pop out the sector averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle * pi/180.0 popdistance = sectorStyle.popout cx = centerx + popdistance * cos(aveAngleRadians) cy = centery + popdistance * sin(aveAngleRadians) if n > 1: theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=iradius) elif n==1: theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, iradius=iradius) theSector.fillColor = sectorStyle.fillColor theSector.strokeColor = sectorStyle.strokeColor theSector.strokeWidth = sectorStyle.strokeWidth theSector.strokeDashArray = sectorStyle.strokeDashArray g.add(theSector) # now draw a label if labels[i] != "": averageAngle = (a1+a2)/2.0 aveAngleRadians = averageAngle*pi/180.0 labelRadius = sectorStyle.labelRadius labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius) labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius) theLabel = String(labelX, labelY, labels[i]) theLabel.textAnchor = "middle" theLabel.fontSize = sectorStyle.fontSize theLabel.fontName = sectorStyle.fontName theLabel.fillColor = sectorStyle.fontColor g.add(theLabel) startAngle = endAngle i += 1 return g def draw(self): g = Group() g.add(self.makeSectors()) return g def sample1(): "Make up something from the individual Sectors" d = Drawing(400, 400) g = Group() s1 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=0, endangledegrees=120, radius1=100) s1.fillColor=colors.red s1.strokeColor=None d.add(s1) s2 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=120, endangledegrees=240, radius1=100) s2.fillColor=colors.green s2.strokeColor=None d.add(s2) s3 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=240, endangledegrees=260, radius1=100) s3.fillColor=colors.blue s3.strokeColor=None d.add(s3) s4 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=260, endangledegrees=360, radius1=100) s4.fillColor=colors.gray s4.strokeColor=None d.add(s4) return d def sample2(): "Make a simple demo" d = Drawing(400, 400) dn = Doughnut() dn.x = 50 dn.y = 50 dn.width = 300 dn.height = 300 dn.data = [10,20,30,40,50,60] d.add(dn) return d def sample3(): "Make a more complex demo" d = Drawing(400, 400) dn = Doughnut() dn.x = 50 dn.y = 50 dn.width = 300 dn.height = 300 dn.data = [[10,20,30,40,50,60], [10,20,30,40]] dn.labels = ['a','b','c','d','e','f'] d.add(dn) return d if __name__=='__main__': from reportlab.graphics.renderPDF import drawToFile d = sample1() drawToFile(d, 'doughnut1.pdf') d = sample2() drawToFile(d, 'doughnut2.pdf') d = sample3() drawToFile(d, 'doughnut3.pdf')
bsd-3-clause
sarvex/django
django/views/i18n.py
264
12156
import gettext as gettext_module import importlib import json import os from django import http from django.apps import apps from django.conf import settings from django.core.urlresolvers import translate_url from django.template import Context, Engine from django.utils import six from django.utils._os import upath from django.utils.encoding import smart_text from django.utils.formats import get_format, get_format_modules from django.utils.http import is_safe_url from django.utils.translation import ( LANGUAGE_SESSION_KEY, check_for_language, get_language, to_locale, ) DEFAULT_PACKAGES = ['django.conf'] LANGUAGE_QUERY_PARAMETER = 'language' def set_language(request): """ Redirect to a given url while setting the chosen language in the session or cookie. The url and the language code need to be specified in the request parameters. Since this view changes how the user will see the rest of the site, it must only be accessed as a POST request. If called as a GET request, it will redirect to the page in the request (the 'next' parameter) without changing any state. """ next = request.POST.get('next', request.GET.get('next')) if not is_safe_url(url=next, host=request.get_host()): next = request.META.get('HTTP_REFERER') if not is_safe_url(url=next, host=request.get_host()): next = '/' response = http.HttpResponseRedirect(next) if request.method == 'POST': lang_code = request.POST.get(LANGUAGE_QUERY_PARAMETER) if lang_code and check_for_language(lang_code): next_trans = translate_url(next, lang_code) if next_trans != next: response = http.HttpResponseRedirect(next_trans) if hasattr(request, 'session'): request.session[LANGUAGE_SESSION_KEY] = lang_code else: response.set_cookie(settings.LANGUAGE_COOKIE_NAME, lang_code, max_age=settings.LANGUAGE_COOKIE_AGE, path=settings.LANGUAGE_COOKIE_PATH, domain=settings.LANGUAGE_COOKIE_DOMAIN) return response def get_formats(): """ Returns all formats strings required for i18n to work """ FORMAT_SETTINGS = ( 'DATE_FORMAT', 'DATETIME_FORMAT', 'TIME_FORMAT', 'YEAR_MONTH_FORMAT', 'MONTH_DAY_FORMAT', 'SHORT_DATE_FORMAT', 'SHORT_DATETIME_FORMAT', 'FIRST_DAY_OF_WEEK', 'DECIMAL_SEPARATOR', 'THOUSAND_SEPARATOR', 'NUMBER_GROUPING', 'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS' ) result = {} for module in [settings] + get_format_modules(reverse=True): for attr in FORMAT_SETTINGS: result[attr] = get_format(attr) formats = {} for k, v in result.items(): if isinstance(v, (six.string_types, int)): formats[k] = smart_text(v) elif isinstance(v, (tuple, list)): formats[k] = [smart_text(value) for value in v] return formats js_catalog_template = r""" {% autoescape off %} (function(globals) { var django = globals.django || (globals.django = {}); {% if plural %} django.pluralidx = function(n) { var v={{ plural }}; if (typeof(v) == 'boolean') { return v ? 1 : 0; } else { return v; } }; {% else %} django.pluralidx = function(count) { return (count == 1) ? 0 : 1; }; {% endif %} /* gettext library */ django.catalog = django.catalog || {}; {% if catalog_str %} var newcatalog = {{ catalog_str }}; for (var key in newcatalog) { django.catalog[key] = newcatalog[key]; } {% endif %} if (!django.jsi18n_initialized) { django.gettext = function(msgid) { var value = django.catalog[msgid]; if (typeof(value) == 'undefined') { return msgid; } else { return (typeof(value) == 'string') ? value : value[0]; } }; django.ngettext = function(singular, plural, count) { var value = django.catalog[singular]; if (typeof(value) == 'undefined') { return (count == 1) ? singular : plural; } else { return value[django.pluralidx(count)]; } }; django.gettext_noop = function(msgid) { return msgid; }; django.pgettext = function(context, msgid) { var value = django.gettext(context + '\x04' + msgid); if (value.indexOf('\x04') != -1) { value = msgid; } return value; }; django.npgettext = function(context, singular, plural, count) { var value = django.ngettext(context + '\x04' + singular, context + '\x04' + plural, count); if (value.indexOf('\x04') != -1) { value = django.ngettext(singular, plural, count); } return value; }; django.interpolate = function(fmt, obj, named) { if (named) { return fmt.replace(/%\(\w+\)s/g, function(match){return String(obj[match.slice(2,-2)])}); } else { return fmt.replace(/%s/g, function(match){return String(obj.shift())}); } }; /* formatting library */ django.formats = {{ formats_str }}; django.get_format = function(format_type) { var value = django.formats[format_type]; if (typeof(value) == 'undefined') { return format_type; } else { return value; } }; /* add to global namespace */ globals.pluralidx = django.pluralidx; globals.gettext = django.gettext; globals.ngettext = django.ngettext; globals.gettext_noop = django.gettext_noop; globals.pgettext = django.pgettext; globals.npgettext = django.npgettext; globals.interpolate = django.interpolate; globals.get_format = django.get_format; django.jsi18n_initialized = true; } }(this)); {% endautoescape %} """ def render_javascript_catalog(catalog=None, plural=None): template = Engine().from_string(js_catalog_template) indent = lambda s: s.replace('\n', '\n ') context = Context({ 'catalog_str': indent(json.dumps( catalog, sort_keys=True, indent=2)) if catalog else None, 'formats_str': indent(json.dumps( get_formats(), sort_keys=True, indent=2)), 'plural': plural, }) return http.HttpResponse(template.render(context), 'text/javascript') def get_javascript_catalog(locale, domain, packages): default_locale = to_locale(settings.LANGUAGE_CODE) app_configs = apps.get_app_configs() allowable_packages = set(app_config.name for app_config in app_configs) allowable_packages.update(DEFAULT_PACKAGES) packages = [p for p in packages if p in allowable_packages] t = {} paths = [] en_selected = locale.startswith('en') en_catalog_missing = True # paths of requested packages for package in packages: p = importlib.import_module(package) path = os.path.join(os.path.dirname(upath(p.__file__)), 'locale') paths.append(path) # add the filesystem paths listed in the LOCALE_PATHS setting paths.extend(reversed(settings.LOCALE_PATHS)) # first load all english languages files for defaults for path in paths: try: catalog = gettext_module.translation(domain, path, ['en']) t.update(catalog._catalog) except IOError: pass else: # 'en' is the selected language and at least one of the packages # listed in `packages` has an 'en' catalog if en_selected: en_catalog_missing = False # next load the settings.LANGUAGE_CODE translations if it isn't english if default_locale != 'en': for path in paths: try: catalog = gettext_module.translation(domain, path, [default_locale]) except IOError: catalog = None if catalog is not None: t.update(catalog._catalog) # last load the currently selected language, if it isn't identical to the default. if locale != default_locale: # If the currently selected language is English but it doesn't have a # translation catalog (presumably due to being the language translated # from) then a wrong language catalog might have been loaded in the # previous step. It needs to be discarded. if en_selected and en_catalog_missing: t = {} else: locale_t = {} for path in paths: try: catalog = gettext_module.translation(domain, path, [locale]) except IOError: catalog = None if catalog is not None: locale_t.update(catalog._catalog) if locale_t: t = locale_t plural = None if '' in t: for l in t[''].split('\n'): if l.startswith('Plural-Forms:'): plural = l.split(':', 1)[1].strip() if plural is not None: # this should actually be a compiled function of a typical plural-form: # Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : # n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2; plural = [el.strip() for el in plural.split(';') if el.strip().startswith('plural=')][0].split('=', 1)[1] pdict = {} maxcnts = {} catalog = {} for k, v in t.items(): if k == '': continue if isinstance(k, six.string_types): catalog[k] = v elif isinstance(k, tuple): msgid = k[0] cnt = k[1] maxcnts[msgid] = max(cnt, maxcnts.get(msgid, 0)) pdict.setdefault(msgid, {})[cnt] = v else: raise TypeError(k) for k, v in pdict.items(): catalog[k] = [v.get(i, '') for i in range(maxcnts[msgid] + 1)] return catalog, plural def _get_locale(request): language = request.GET.get(LANGUAGE_QUERY_PARAMETER) if not (language and check_for_language(language)): language = get_language() return to_locale(language) def _parse_packages(packages): if packages is None: packages = list(DEFAULT_PACKAGES) elif isinstance(packages, six.string_types): packages = packages.split('+') return packages def null_javascript_catalog(request, domain=None, packages=None): """ Returns "identity" versions of the JavaScript i18n functions -- i.e., versions that don't actually do anything. """ return render_javascript_catalog() def javascript_catalog(request, domain='djangojs', packages=None): """ Returns the selected language catalog as a javascript library. Receives the list of packages to check for translations in the packages parameter either from an infodict or as a +-delimited string from the request. Default is 'django.conf'. Additionally you can override the gettext domain for this view, but usually you don't want to do that, as JavaScript messages go to the djangojs domain. But this might be needed if you deliver your JavaScript source from Django templates. """ locale = _get_locale(request) packages = _parse_packages(packages) catalog, plural = get_javascript_catalog(locale, domain, packages) return render_javascript_catalog(catalog, plural) def json_catalog(request, domain='djangojs', packages=None): """ Return the selected language catalog as a JSON object. Receives the same parameters as javascript_catalog(), but returns a response with a JSON object of the following format: { "catalog": { # Translations catalog }, "formats": { # Language formats for date, time, etc. }, "plural": '...' # Expression for plural forms, or null. } """ locale = _get_locale(request) packages = _parse_packages(packages) catalog, plural = get_javascript_catalog(locale, domain, packages) data = { 'catalog': catalog, 'formats': get_formats(), 'plural': plural, } return http.JsonResponse(data)
bsd-3-clause
jsomara/tuskar-ui-sat6
doc/source/conf.py
3
7947
# -*- coding: utf-8 -*- # # Tuskar UI documentation build configuration file, created by # sphinx-quickstart on Thu Apr 24 09:19:32 2014. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode', 'oslosphinx'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Tuskar UI' copyright = u'2014, Tuskar Team' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = 'Juno' # The full version, including alpha/beta/rc tags. release = 'Juno' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'TuskarUIdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'TuskarUI.tex', u'Tuskar UI Documentation', u'Tuskar Team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'tuskarui', u'Tuskar UI Documentation', [u'Tuskar Team'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'TuskarUI', u'Tuskar UI Documentation', u'Tuskar Team', 'TuskarUI', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None}
apache-2.0
AkizukiRyoko/mtasa-blue
vendor/google-breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/cpp_message.py
259
21288
# Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # http://code.google.com/p/protobuf/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Contains helper functions used to create protocol message classes from Descriptor objects at runtime backed by the protocol buffer C++ API. """ __author__ = 'petar@google.com (Petar Petrov)' import operator from google.protobuf.internal import _net_proto2___python from google.protobuf import message _LABEL_REPEATED = _net_proto2___python.LABEL_REPEATED _LABEL_OPTIONAL = _net_proto2___python.LABEL_OPTIONAL _CPPTYPE_MESSAGE = _net_proto2___python.CPPTYPE_MESSAGE _TYPE_MESSAGE = _net_proto2___python.TYPE_MESSAGE def GetDescriptorPool(): """Creates a new DescriptorPool C++ object.""" return _net_proto2___python.NewCDescriptorPool() _pool = GetDescriptorPool() def GetFieldDescriptor(full_field_name): """Searches for a field descriptor given a full field name.""" return _pool.FindFieldByName(full_field_name) def BuildFile(content): """Registers a new proto file in the underlying C++ descriptor pool.""" _net_proto2___python.BuildFile(content) def GetExtensionDescriptor(full_extension_name): """Searches for extension descriptor given a full field name.""" return _pool.FindExtensionByName(full_extension_name) def NewCMessage(full_message_name): """Creates a new C++ protocol message by its name.""" return _net_proto2___python.NewCMessage(full_message_name) def ScalarProperty(cdescriptor): """Returns a scalar property for the given descriptor.""" def Getter(self): return self._cmsg.GetScalar(cdescriptor) def Setter(self, value): self._cmsg.SetScalar(cdescriptor, value) return property(Getter, Setter) def CompositeProperty(cdescriptor, message_type): """Returns a Python property the given composite field.""" def Getter(self): sub_message = self._composite_fields.get(cdescriptor.name, None) if sub_message is None: cmessage = self._cmsg.NewSubMessage(cdescriptor) sub_message = message_type._concrete_class(__cmessage=cmessage) self._composite_fields[cdescriptor.name] = sub_message return sub_message return property(Getter) class RepeatedScalarContainer(object): """Container for repeated scalar fields.""" __slots__ = ['_message', '_cfield_descriptor', '_cmsg'] def __init__(self, msg, cfield_descriptor): self._message = msg self._cmsg = msg._cmsg self._cfield_descriptor = cfield_descriptor def append(self, value): self._cmsg.AddRepeatedScalar( self._cfield_descriptor, value) def extend(self, sequence): for element in sequence: self.append(element) def insert(self, key, value): values = self[slice(None, None, None)] values.insert(key, value) self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) def remove(self, value): values = self[slice(None, None, None)] values.remove(value) self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) def __setitem__(self, key, value): values = self[slice(None, None, None)] values[key] = value self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) def __getitem__(self, key): return self._cmsg.GetRepeatedScalar(self._cfield_descriptor, key) def __delitem__(self, key): self._cmsg.DeleteRepeatedField(self._cfield_descriptor, key) def __len__(self): return len(self[slice(None, None, None)]) def __eq__(self, other): if self is other: return True if not operator.isSequenceType(other): raise TypeError( 'Can only compare repeated scalar fields against sequences.') # We are presumably comparing against some other sequence type. return other == self[slice(None, None, None)] def __ne__(self, other): return not self == other def __hash__(self): raise TypeError('unhashable object') def sort(self, sort_function=cmp): values = self[slice(None, None, None)] values.sort(sort_function) self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) def RepeatedScalarProperty(cdescriptor): """Returns a Python property the given repeated scalar field.""" def Getter(self): container = self._composite_fields.get(cdescriptor.name, None) if container is None: container = RepeatedScalarContainer(self, cdescriptor) self._composite_fields[cdescriptor.name] = container return container def Setter(self, new_value): raise AttributeError('Assignment not allowed to repeated field ' '"%s" in protocol message object.' % cdescriptor.name) doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name return property(Getter, Setter, doc=doc) class RepeatedCompositeContainer(object): """Container for repeated composite fields.""" __slots__ = ['_message', '_subclass', '_cfield_descriptor', '_cmsg'] def __init__(self, msg, cfield_descriptor, subclass): self._message = msg self._cmsg = msg._cmsg self._subclass = subclass self._cfield_descriptor = cfield_descriptor def add(self, **kwargs): cmessage = self._cmsg.AddMessage(self._cfield_descriptor) return self._subclass(__cmessage=cmessage, __owner=self._message, **kwargs) def extend(self, elem_seq): """Extends by appending the given sequence of elements of the same type as this one, copying each individual message. """ for message in elem_seq: self.add().MergeFrom(message) def MergeFrom(self, other): for message in other[:]: self.add().MergeFrom(message) def __getitem__(self, key): cmessages = self._cmsg.GetRepeatedMessage( self._cfield_descriptor, key) subclass = self._subclass if not isinstance(cmessages, list): return subclass(__cmessage=cmessages, __owner=self._message) return [subclass(__cmessage=m, __owner=self._message) for m in cmessages] def __delitem__(self, key): self._cmsg.DeleteRepeatedField( self._cfield_descriptor, key) def __len__(self): return self._cmsg.FieldLength(self._cfield_descriptor) def __eq__(self, other): """Compares the current instance with another one.""" if self is other: return True if not isinstance(other, self.__class__): raise TypeError('Can only compare repeated composite fields against ' 'other repeated composite fields.') messages = self[slice(None, None, None)] other_messages = other[slice(None, None, None)] return messages == other_messages def __hash__(self): raise TypeError('unhashable object') def sort(self, sort_function=cmp): messages = [] for index in range(len(self)): # messages[i][0] is where the i-th element of the new array has to come # from. # messages[i][1] is where the i-th element of the old array has to go. messages.append([index, 0, self[index]]) messages.sort(lambda x,y: sort_function(x[2], y[2])) # Remember which position each elements has to move to. for i in range(len(messages)): messages[messages[i][0]][1] = i # Apply the transposition. for i in range(len(messages)): from_position = messages[i][0] if i == from_position: continue self._cmsg.SwapRepeatedFieldElements( self._cfield_descriptor, i, from_position) messages[messages[i][1]][0] = from_position def RepeatedCompositeProperty(cdescriptor, message_type): """Returns a Python property for the given repeated composite field.""" def Getter(self): container = self._composite_fields.get(cdescriptor.name, None) if container is None: container = RepeatedCompositeContainer( self, cdescriptor, message_type._concrete_class) self._composite_fields[cdescriptor.name] = container return container def Setter(self, new_value): raise AttributeError('Assignment not allowed to repeated field ' '"%s" in protocol message object.' % cdescriptor.name) doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name return property(Getter, Setter, doc=doc) class ExtensionDict(object): """Extension dictionary added to each protocol message.""" def __init__(self, msg): self._message = msg self._cmsg = msg._cmsg self._values = {} def __setitem__(self, extension, value): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) cdescriptor = extension._cdescriptor if (cdescriptor.label != _LABEL_OPTIONAL or cdescriptor.cpp_type == _CPPTYPE_MESSAGE): raise TypeError('Extension %r is repeated and/or a composite type.' % ( extension.full_name,)) self._cmsg.SetScalar(cdescriptor, value) self._values[extension] = value def __getitem__(self, extension): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) cdescriptor = extension._cdescriptor if (cdescriptor.label != _LABEL_REPEATED and cdescriptor.cpp_type != _CPPTYPE_MESSAGE): return self._cmsg.GetScalar(cdescriptor) ext = self._values.get(extension, None) if ext is not None: return ext ext = self._CreateNewHandle(extension) self._values[extension] = ext return ext def ClearExtension(self, extension): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) self._cmsg.ClearFieldByDescriptor(extension._cdescriptor) if extension in self._values: del self._values[extension] def HasExtension(self, extension): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) return self._cmsg.HasFieldByDescriptor(extension._cdescriptor) def _FindExtensionByName(self, name): """Tries to find a known extension with the specified name. Args: name: Extension full name. Returns: Extension field descriptor. """ return self._message._extensions_by_name.get(name, None) def _CreateNewHandle(self, extension): cdescriptor = extension._cdescriptor if (cdescriptor.label != _LABEL_REPEATED and cdescriptor.cpp_type == _CPPTYPE_MESSAGE): cmessage = self._cmsg.NewSubMessage(cdescriptor) return extension.message_type._concrete_class(__cmessage=cmessage) if cdescriptor.label == _LABEL_REPEATED: if cdescriptor.cpp_type == _CPPTYPE_MESSAGE: return RepeatedCompositeContainer( self._message, cdescriptor, extension.message_type._concrete_class) else: return RepeatedScalarContainer(self._message, cdescriptor) # This shouldn't happen! assert False return None def NewMessage(message_descriptor, dictionary): """Creates a new protocol message *class*.""" _AddClassAttributesForNestedExtensions(message_descriptor, dictionary) _AddEnumValues(message_descriptor, dictionary) _AddDescriptors(message_descriptor, dictionary) def InitMessage(message_descriptor, cls): """Constructs a new message instance (called before instance's __init__).""" cls._extensions_by_name = {} _AddInitMethod(message_descriptor, cls) _AddMessageMethods(message_descriptor, cls) _AddPropertiesForExtensions(message_descriptor, cls) def _AddDescriptors(message_descriptor, dictionary): """Sets up a new protocol message class dictionary. Args: message_descriptor: A Descriptor instance describing this message type. dictionary: Class dictionary to which we'll add a '__slots__' entry. """ dictionary['__descriptors'] = {} for field in message_descriptor.fields: dictionary['__descriptors'][field.name] = GetFieldDescriptor( field.full_name) dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [ '_cmsg', '_owner', '_composite_fields', 'Extensions'] def _AddEnumValues(message_descriptor, dictionary): """Sets class-level attributes for all enum fields defined in this message. Args: message_descriptor: Descriptor object for this message type. dictionary: Class dictionary that should be populated. """ for enum_type in message_descriptor.enum_types: for enum_value in enum_type.values: dictionary[enum_value.name] = enum_value.number def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary): """Adds class attributes for the nested extensions.""" extension_dict = message_descriptor.extensions_by_name for extension_name, extension_field in extension_dict.iteritems(): assert extension_name not in dictionary dictionary[extension_name] = extension_field def _AddInitMethod(message_descriptor, cls): """Adds an __init__ method to cls.""" # Create and attach message field properties to the message class. # This can be done just once per message class, since property setters and # getters are passed the message instance. # This makes message instantiation extremely fast, and at the same time it # doesn't require the creation of property objects for each message instance, # which saves a lot of memory. for field in message_descriptor.fields: field_cdescriptor = cls.__descriptors[field.name] if field.label == _LABEL_REPEATED: if field.cpp_type == _CPPTYPE_MESSAGE: value = RepeatedCompositeProperty(field_cdescriptor, field.message_type) else: value = RepeatedScalarProperty(field_cdescriptor) elif field.cpp_type == _CPPTYPE_MESSAGE: value = CompositeProperty(field_cdescriptor, field.message_type) else: value = ScalarProperty(field_cdescriptor) setattr(cls, field.name, value) # Attach a constant with the field number. constant_name = field.name.upper() + '_FIELD_NUMBER' setattr(cls, constant_name, field.number) def Init(self, **kwargs): """Message constructor.""" cmessage = kwargs.pop('__cmessage', None) if cmessage is None: self._cmsg = NewCMessage(message_descriptor.full_name) else: self._cmsg = cmessage # Keep a reference to the owner, as the owner keeps a reference to the # underlying protocol buffer message. owner = kwargs.pop('__owner', None) if owner is not None: self._owner = owner self.Extensions = ExtensionDict(self) self._composite_fields = {} for field_name, field_value in kwargs.iteritems(): field_cdescriptor = self.__descriptors.get(field_name, None) if field_cdescriptor is None: raise ValueError('Protocol message has no "%s" field.' % field_name) if field_cdescriptor.label == _LABEL_REPEATED: if field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: for val in field_value: getattr(self, field_name).add().MergeFrom(val) else: getattr(self, field_name).extend(field_value) elif field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: getattr(self, field_name).MergeFrom(field_value) else: setattr(self, field_name, field_value) Init.__module__ = None Init.__doc__ = None cls.__init__ = Init def _IsMessageSetExtension(field): """Checks if a field is a message set extension.""" return (field.is_extension and field.containing_type.has_options and field.containing_type.GetOptions().message_set_wire_format and field.type == _TYPE_MESSAGE and field.message_type == field.extension_scope and field.label == _LABEL_OPTIONAL) def _AddMessageMethods(message_descriptor, cls): """Adds the methods to a protocol message class.""" if message_descriptor.is_extendable: def ClearExtension(self, extension): self.Extensions.ClearExtension(extension) def HasExtension(self, extension): return self.Extensions.HasExtension(extension) def HasField(self, field_name): return self._cmsg.HasField(field_name) def ClearField(self, field_name): if field_name in self._composite_fields: del self._composite_fields[field_name] self._cmsg.ClearField(field_name) def Clear(self): return self._cmsg.Clear() def IsInitialized(self, errors=None): if self._cmsg.IsInitialized(): return True if errors is not None: errors.extend(self.FindInitializationErrors()); return False def SerializeToString(self): if not self.IsInitialized(): raise message.EncodeError( 'Message is missing required fields: ' + ','.join(self.FindInitializationErrors())) return self._cmsg.SerializeToString() def SerializePartialToString(self): return self._cmsg.SerializePartialToString() def ParseFromString(self, serialized): self.Clear() self.MergeFromString(serialized) def MergeFromString(self, serialized): byte_size = self._cmsg.MergeFromString(serialized) if byte_size < 0: raise message.DecodeError('Unable to merge from string.') return byte_size def MergeFrom(self, msg): if not isinstance(msg, cls): raise TypeError( "Parameter to MergeFrom() must be instance of same class.") self._cmsg.MergeFrom(msg._cmsg) def CopyFrom(self, msg): self._cmsg.CopyFrom(msg._cmsg) def ByteSize(self): return self._cmsg.ByteSize() def SetInParent(self): return self._cmsg.SetInParent() def ListFields(self): all_fields = [] field_list = self._cmsg.ListFields() fields_by_name = cls.DESCRIPTOR.fields_by_name for is_extension, field_name in field_list: if is_extension: extension = cls._extensions_by_name[field_name] all_fields.append((extension, self.Extensions[extension])) else: field_descriptor = fields_by_name[field_name] all_fields.append( (field_descriptor, getattr(self, field_name))) all_fields.sort(key=lambda item: item[0].number) return all_fields def FindInitializationErrors(self): return self._cmsg.FindInitializationErrors() def __str__(self): return self._cmsg.DebugString() def __eq__(self, other): if self is other: return True if not isinstance(other, self.__class__): return False return self.ListFields() == other.ListFields() def __ne__(self, other): return not self == other def __hash__(self): raise TypeError('unhashable object') def __unicode__(self): return text_format.MessageToString(self, as_utf8=True).decode('utf-8') # Attach the local methods to the message class. for key, value in locals().copy().iteritems(): if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'): setattr(cls, key, value) # Static methods: def RegisterExtension(extension_handle): extension_handle.containing_type = cls.DESCRIPTOR cls._extensions_by_name[extension_handle.full_name] = extension_handle if _IsMessageSetExtension(extension_handle): # MessageSet extension. Also register under type name. cls._extensions_by_name[ extension_handle.message_type.full_name] = extension_handle cls.RegisterExtension = staticmethod(RegisterExtension) def FromString(string): msg = cls() msg.MergeFromString(string) return msg cls.FromString = staticmethod(FromString) def _AddPropertiesForExtensions(message_descriptor, cls): """Adds properties for all fields in this protocol message type.""" extension_dict = message_descriptor.extensions_by_name for extension_name, extension_field in extension_dict.iteritems(): constant_name = extension_name.upper() + '_FIELD_NUMBER' setattr(cls, constant_name, extension_field.number)
gpl-3.0
hakabane/p2pool-altcoins
p2pool/util/fixargparse.py
283
1630
from __future__ import absolute_import import argparse import sys class FixedArgumentParser(argparse.ArgumentParser): ''' fixes argparse's handling of empty string arguments and changes @filename behaviour to accept multiple arguments on each line ''' def _read_args_from_files(self, arg_strings): # expand arguments referencing files new_arg_strings = [] for arg_string in arg_strings: # for regular arguments, just add them back into the list if not arg_string or arg_string[0] not in self.fromfile_prefix_chars: new_arg_strings.append(arg_string) # replace arguments referencing files with the file content else: try: args_file = open(arg_string[1:]) try: arg_strings = [] for arg_line in args_file.read().splitlines(): for arg in self.convert_arg_line_to_args(arg_line): arg_strings.append(arg) arg_strings = self._read_args_from_files(arg_strings) new_arg_strings.extend(arg_strings) finally: args_file.close() except IOError: err = sys.exc_info()[1] self.error(str(err)) # return the modified argument list return new_arg_strings def convert_arg_line_to_args(self, arg_line): return [arg for arg in arg_line.split() if arg.strip()]
gpl-3.0
roadmapper/ansible
test/units/modules/network/onyx/test_onyx_ospf.py
68
4494
# # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from units.compat.mock import patch from ansible.modules.network.onyx import onyx_ospf from units.modules.utils import set_module_args from .onyx_module import TestOnyxModule, load_fixture class TestOnyxOspfModule(TestOnyxModule): module = onyx_ospf def setUp(self): super(TestOnyxOspfModule, self).setUp() self._ospf_exists = True self.mock_get_config = patch.object( onyx_ospf.OnyxOspfModule, "_get_ospf_config") self.get_config = self.mock_get_config.start() self.mock_get_interfaces_config = patch.object( onyx_ospf.OnyxOspfModule, "_get_ospf_interfaces_config") self.get_interfaces_config = self.mock_get_interfaces_config.start() self.mock_load_config = patch( 'ansible.module_utils.network.onyx.onyx.load_config') self.load_config = self.mock_load_config.start() def tearDown(self): super(TestOnyxOspfModule, self).tearDown() self.mock_get_config.stop() self.mock_load_config.stop() def load_fixtures(self, commands=None, transport='cli'): if self._ospf_exists: config_file = 'onyx_ospf_show.cfg' self.get_config.return_value = load_fixture(config_file) config_file = 'onyx_ospf_interfaces_show.cfg' self.get_interfaces_config.return_value = load_fixture(config_file) else: self.get_config.return_value = None self.get_interfaces_config.return_value = None self.load_config.return_value = None def test_ospf_absent_no_change(self): set_module_args(dict(ospf=3, state='absent')) self.execute_module(changed=False) def test_ospf_present_no_change(self): interface = dict(name='Loopback 1', area='0.0.0.0') set_module_args(dict(ospf=2, router_id='10.2.3.4', interfaces=[interface])) self.execute_module(changed=False) def test_ospf_present_remove(self): set_module_args(dict(ospf=2, state='absent')) commands = ['no router ospf 2'] self.execute_module(changed=True, commands=commands) def test_ospf_change_router(self): interface = dict(name='Loopback 1', area='0.0.0.0') set_module_args(dict(ospf=2, router_id='10.2.3.5', interfaces=[interface])) commands = ['router ospf 2', 'router-id 10.2.3.5', 'exit'] self.execute_module(changed=True, commands=commands, sort=False) def test_ospf_remove_router(self): interface = dict(name='Loopback 1', area='0.0.0.0') set_module_args(dict(ospf=2, interfaces=[interface])) commands = ['router ospf 2', 'no router-id', 'exit'] self.execute_module(changed=True, commands=commands, sort=False) def test_ospf_add_interface(self): interfaces = [dict(name='Loopback 1', area='0.0.0.0'), dict(name='Loopback 2', area='0.0.0.0')] set_module_args(dict(ospf=2, router_id='10.2.3.4', interfaces=interfaces)) commands = ['interface loopback 2 ip ospf area 0.0.0.0'] self.execute_module(changed=True, commands=commands) def test_ospf_remove_interface(self): set_module_args(dict(ospf=2, router_id='10.2.3.4')) commands = ['interface loopback 1 no ip ospf area'] self.execute_module(changed=True, commands=commands) def test_ospf_add(self): self._ospf_exists = False interfaces = [dict(name='Loopback 1', area='0.0.0.0'), dict(name='Vlan 210', area='0.0.0.0'), dict(name='Eth1/1', area='0.0.0.0'), dict(name='Po1', area='0.0.0.0')] set_module_args(dict(ospf=2, router_id='10.2.3.4', interfaces=interfaces)) commands = ['router ospf 2', 'router-id 10.2.3.4', 'exit', 'interface loopback 1 ip ospf area 0.0.0.0', 'interface vlan 210 ip ospf area 0.0.0.0', 'interface ethernet 1/1 ip ospf area 0.0.0.0', 'interface port-channel 1 ip ospf area 0.0.0.0'] self.execute_module(changed=True, commands=commands)
gpl-3.0
goldeneye-source/ges-python
lib/subprocess.py
67
63971
# subprocess - Subprocesses with accessible I/O streams # # For more information about this module, see PEP 324. # # Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se> # # Licensed to PSF under a Contributor Agreement. # See http://www.python.org/2.4/license for licensing details. r"""subprocess - Subprocesses with accessible I/O streams This module allows you to spawn processes, connect to their input/output/error pipes, and obtain their return codes. This module intends to replace several older modules and functions: os.system os.spawn* Information about how the subprocess module can be used to replace these modules and functions can be found below. Using the subprocess module =========================== This module defines one class called Popen: class Popen(args, bufsize=-1, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=True, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0, restore_signals=True, start_new_session=False, pass_fds=()): Arguments are: args should be a string, or a sequence of program arguments. The program to execute is normally the first item in the args sequence or string, but can be explicitly set by using the executable argument. On POSIX, with shell=False (default): In this case, the Popen class uses os.execvp() to execute the child program. args should normally be a sequence. A string will be treated as a sequence with the string as the only item (the program to execute). On POSIX, with shell=True: If args is a string, it specifies the command string to execute through the shell. If args is a sequence, the first item specifies the command string, and any additional items will be treated as additional shell arguments. On Windows: the Popen class uses CreateProcess() to execute the child program, which operates on strings. If args is a sequence, it will be converted to a string using the list2cmdline method. Please note that not all MS Windows applications interpret the command line the same way: The list2cmdline is designed for applications using the same rules as the MS C runtime. bufsize will be supplied as the corresponding argument to the io.open() function when creating the stdin/stdout/stderr pipe file objects: 0 means unbuffered (read & write are one system call and can return short), 1 means line buffered, any other positive value means use a buffer of approximately that size. A negative bufsize, the default, means the system default of io.DEFAULT_BUFFER_SIZE will be used. stdin, stdout and stderr specify the executed programs' standard input, standard output and standard error file handles, respectively. Valid values are PIPE, an existing file descriptor (a positive integer), an existing file object, and None. PIPE indicates that a new pipe to the child should be created. With None, no redirection will occur; the child's file handles will be inherited from the parent. Additionally, stderr can be STDOUT, which indicates that the stderr data from the applications should be captured into the same file handle as for stdout. On POSIX, if preexec_fn is set to a callable object, this object will be called in the child process just before the child is executed. The use of preexec_fn is not thread safe, using it in the presence of threads could lead to a deadlock in the child process before the new executable is executed. If close_fds is true, all file descriptors except 0, 1 and 2 will be closed before the child process is executed. The default for close_fds varies by platform: Always true on POSIX. True when stdin/stdout/stderr are None on Windows, false otherwise. pass_fds is an optional sequence of file descriptors to keep open between the parent and child. Providing any pass_fds implicitly sets close_fds to true. if shell is true, the specified command will be executed through the shell. If cwd is not None, the current directory will be changed to cwd before the child is executed. On POSIX, if restore_signals is True all signals that Python sets to SIG_IGN are restored to SIG_DFL in the child process before the exec. Currently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals. This parameter does nothing on Windows. On POSIX, if start_new_session is True, the setsid() system call will be made in the child process prior to executing the command. If env is not None, it defines the environment variables for the new process. If universal_newlines is false, the file objects stdin, stdout and stderr are opened as binary files, and no line ending conversion is done. If universal_newlines is true, the file objects stdout and stderr are opened as a text files, but lines may be terminated by any of '\n', the Unix end-of-line convention, '\r', the old Macintosh convention or '\r\n', the Windows convention. All of these external representations are seen as '\n' by the Python program. Also, the newlines attribute of the file objects stdout, stdin and stderr are not updated by the communicate() method. The startupinfo and creationflags, if given, will be passed to the underlying CreateProcess() function. They can specify things such as appearance of the main window and priority for the new process. (Windows only) This module also defines some shortcut functions: call(*popenargs, **kwargs): Run command with arguments. Wait for command to complete, then return the returncode attribute. The arguments are the same as for the Popen constructor. Example: >>> retcode = subprocess.call(["ls", "-l"]) check_call(*popenargs, **kwargs): Run command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: >>> subprocess.check_call(["ls", "-l"]) 0 getstatusoutput(cmd): Return (status, output) of executing cmd in a shell. Execute the string 'cmd' in a shell with 'check_output' and return a 2-tuple (status, output). Universal newlines mode is used, meaning that the result with be decoded to a string. A trailing newline is stripped from the output. The exit status for the command can be interpreted according to the rules for the function 'wait'. Example: >>> subprocess.getstatusoutput('ls /bin/ls') (0, '/bin/ls') >>> subprocess.getstatusoutput('cat /bin/junk') (256, 'cat: /bin/junk: No such file or directory') >>> subprocess.getstatusoutput('/bin/junk') (256, 'sh: /bin/junk: not found') getoutput(cmd): Return output (stdout or stderr) of executing cmd in a shell. Like getstatusoutput(), except the exit status is ignored and the return value is a string containing the command's output. Example: >>> subprocess.getoutput('ls /bin/ls') '/bin/ls' check_output(*popenargs, **kwargs): Run command with arguments and return its output. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The arguments are the same as for the Popen constructor. Example: >>> output = subprocess.check_output(["ls", "-l", "/dev/null"]) There is an additional optional argument, "input", allowing you to pass a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument. Exceptions ---------- Exceptions raised in the child process, before the new program has started to execute, will be re-raised in the parent. Additionally, the exception object will have one extra attribute called 'child_traceback', which is a string containing traceback information from the child's point of view. The most common exception raised is OSError. This occurs, for example, when trying to execute a non-existent file. Applications should prepare for OSErrors. A ValueError will be raised if Popen is called with invalid arguments. Exceptions defined within this module inherit from SubprocessError. check_call() and check_output() will raise CalledProcessError if the called process returns a non-zero return code. TimeoutExpired be raised if a timeout was specified and expired. Security -------- Unlike some other popen functions, this implementation will never call /bin/sh implicitly. This means that all characters, including shell metacharacters, can safely be passed to child processes. Popen objects ============= Instances of the Popen class have the following methods: poll() Check if child process has terminated. Returns returncode attribute. wait() Wait for child process to terminate. Returns returncode attribute. communicate(input=None) Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate. The optional input argument should be a string to be sent to the child process, or None, if no data should be sent to the child. communicate() returns a tuple (stdout, stderr). Note: The data read is buffered in memory, so do not use this method if the data size is large or unlimited. The following attributes are also available: stdin If the stdin argument is PIPE, this attribute is a file object that provides input to the child process. Otherwise, it is None. stdout If the stdout argument is PIPE, this attribute is a file object that provides output from the child process. Otherwise, it is None. stderr If the stderr argument is PIPE, this attribute is file object that provides error output from the child process. Otherwise, it is None. pid The process ID of the child process. returncode The child return code. A None value indicates that the process hasn't terminated yet. A negative value -N indicates that the child was terminated by signal N (POSIX only). Replacing older functions with the subprocess module ==================================================== In this section, "a ==> b" means that b can be used as a replacement for a. Note: All functions in this section fail (more or less) silently if the executed program cannot be found; this module raises an OSError exception. In the following examples, we assume that the subprocess module is imported with "from subprocess import *". Replacing /bin/sh shell backquote --------------------------------- output=`mycmd myarg` ==> output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] Replacing shell pipe line ------------------------- output=`dmesg | grep hda` ==> p1 = Popen(["dmesg"], stdout=PIPE) p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) output = p2.communicate()[0] Replacing os.system() --------------------- sts = os.system("mycmd" + " myarg") ==> p = Popen("mycmd" + " myarg", shell=True) pid, sts = os.waitpid(p.pid, 0) Note: * Calling the program through the shell is usually not required. * It's easier to look at the returncode attribute than the exitstatus. A more real-world example would look like this: try: retcode = call("mycmd" + " myarg", shell=True) if retcode < 0: print("Child was terminated by signal", -retcode, file=sys.stderr) else: print("Child returned", retcode, file=sys.stderr) except OSError as e: print("Execution failed:", e, file=sys.stderr) Replacing os.spawn* ------------------- P_NOWAIT example: pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") ==> pid = Popen(["/bin/mycmd", "myarg"]).pid P_WAIT example: retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") ==> retcode = call(["/bin/mycmd", "myarg"]) Vector example: os.spawnvp(os.P_NOWAIT, path, args) ==> Popen([path] + args[1:]) Environment example: os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) ==> Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) """ import sys mswindows = (sys.platform == "win32") import io import os import time import signal import builtins import warnings import errno try: from time import monotonic as _time except ImportError: from time import time as _time # Exception classes used by this module. class SubprocessError(Exception): pass class CalledProcessError(SubprocessError): """This exception is raised when a process run by check_call() or check_output() returns a non-zero exit status. The exit status will be stored in the returncode attribute; check_output() will also store the output in the output attribute. """ def __init__(self, returncode, cmd, output=None): self.returncode = returncode self.cmd = cmd self.output = output def __str__(self): return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode) class TimeoutExpired(SubprocessError): """This exception is raised when the timeout expires while waiting for a child process. """ def __init__(self, cmd, timeout, output=None): self.cmd = cmd self.timeout = timeout self.output = output def __str__(self): return ("Command '%s' timed out after %s seconds" % (self.cmd, self.timeout)) if mswindows: import threading import msvcrt import _winapi class STARTUPINFO: dwFlags = 0 hStdInput = None hStdOutput = None hStdError = None wShowWindow = 0 else: import _posixsubprocess import select import selectors try: import threading except ImportError: import dummy_threading as threading # When select or poll has indicated that the file is writable, # we can write up to _PIPE_BUF bytes without risk of blocking. # POSIX defines PIPE_BUF as >= 512. _PIPE_BUF = getattr(select, 'PIPE_BUF', 512) # poll/select have the advantage of not requiring any extra file # descriptor, contrarily to epoll/kqueue (also, they require a single # syscall). if hasattr(selectors, 'PollSelector'): _PopenSelector = selectors.PollSelector else: _PopenSelector = selectors.SelectSelector __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput", "getoutput", "check_output", "CalledProcessError", "DEVNULL"] if mswindows: from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP, STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, STD_ERROR_HANDLE, SW_HIDE, STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW) __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP", "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE", "STD_ERROR_HANDLE", "SW_HIDE", "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"]) class Handle(int): closed = False def Close(self, CloseHandle=_winapi.CloseHandle): if not self.closed: self.closed = True CloseHandle(self) def Detach(self): if not self.closed: self.closed = True return int(self) raise ValueError("already closed") def __repr__(self): return "Handle(%d)" % int(self) __del__ = Close __str__ = __repr__ try: MAXFD = os.sysconf("SC_OPEN_MAX") except: MAXFD = 256 # This lists holds Popen instances for which the underlying process had not # exited at the time its __del__ method got called: those processes are wait()ed # for synchronously from _cleanup() when a new Popen object is created, to avoid # zombie processes. _active = [] def _cleanup(): for inst in _active[:]: res = inst._internal_poll(_deadstate=sys.maxsize) if res is not None: try: _active.remove(inst) except ValueError: # This can happen if two threads create a new Popen instance. # It's harmless that it was already removed, so ignore. pass PIPE = -1 STDOUT = -2 DEVNULL = -3 def _eintr_retry_call(func, *args): while True: try: return func(*args) except InterruptedError: continue # XXX This function is only used by multiprocessing and the test suite, # but it's here so that it can be imported when Python is compiled without # threads. def _args_from_interpreter_flags(): """Return a list of command-line arguments reproducing the current settings in sys.flags and sys.warnoptions.""" flag_opt_map = { 'debug': 'd', # 'inspect': 'i', # 'interactive': 'i', 'optimize': 'O', 'dont_write_bytecode': 'B', 'no_user_site': 's', 'no_site': 'S', 'ignore_environment': 'E', 'verbose': 'v', 'bytes_warning': 'b', 'quiet': 'q', 'hash_randomization': 'R', } args = [] for flag, opt in flag_opt_map.items(): v = getattr(sys.flags, flag) if v > 0: if flag == 'hash_randomization': v = 1 # Handle specification of an exact seed args.append('-' + opt * v) for opt in sys.warnoptions: args.append('-W' + opt) return args def call(*popenargs, timeout=None, **kwargs): """Run command with arguments. Wait for command to complete or timeout, then return the returncode attribute. The arguments are the same as for the Popen constructor. Example: retcode = call(["ls", "-l"]) """ with Popen(*popenargs, **kwargs) as p: try: return p.wait(timeout=timeout) except: p.kill() p.wait() raise def check_call(*popenargs, **kwargs): """Run command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the call function. Example: check_call(["ls", "-l"]) """ retcode = call(*popenargs, **kwargs) if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise CalledProcessError(retcode, cmd) return 0 def check_output(*popenargs, timeout=None, **kwargs): r"""Run command with arguments and return its output. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The arguments are the same as for the Popen constructor. Example: >>> check_output(["ls", "-l", "/dev/null"]) b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' The stdout argument is not allowed as it is used internally. To capture standard error in the result, use stderr=STDOUT. >>> check_output(["/bin/sh", "-c", ... "ls -l non_existent_file ; exit 0"], ... stderr=STDOUT) b'ls: non_existent_file: No such file or directory\n' There is an additional optional argument, "input", allowing you to pass a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it too will be used internally. Example: >>> check_output(["sed", "-e", "s/foo/bar/"], ... input=b"when in the course of fooman events\n") b'when in the course of barman events\n' If universal_newlines=True is passed, the return value will be a string rather than bytes. """ if 'stdout' in kwargs: raise ValueError('stdout argument not allowed, it will be overridden.') if 'input' in kwargs: if 'stdin' in kwargs: raise ValueError('stdin and input arguments may not both be used.') inputdata = kwargs['input'] del kwargs['input'] kwargs['stdin'] = PIPE else: inputdata = None with Popen(*popenargs, stdout=PIPE, **kwargs) as process: try: output, unused_err = process.communicate(inputdata, timeout=timeout) except TimeoutExpired: process.kill() output, unused_err = process.communicate() raise TimeoutExpired(process.args, timeout, output=output) except: process.kill() process.wait() raise retcode = process.poll() if retcode: raise CalledProcessError(retcode, process.args, output=output) return output def list2cmdline(seq): """ Translate a sequence of arguments into a command line string, using the same rules as the MS C runtime: 1) Arguments are delimited by white space, which is either a space or a tab. 2) A string surrounded by double quotation marks is interpreted as a single argument, regardless of white space contained within. A quoted string can be embedded in an argument. 3) A double quotation mark preceded by a backslash is interpreted as a literal double quotation mark. 4) Backslashes are interpreted literally, unless they immediately precede a double quotation mark. 5) If backslashes immediately precede a double quotation mark, every pair of backslashes is interpreted as a literal backslash. If the number of backslashes is odd, the last backslash escapes the next double quotation mark as described in rule 3. """ # See # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx # or search http://msdn.microsoft.com for # "Parsing C++ Command-Line Arguments" result = [] needquote = False for arg in seq: bs_buf = [] # Add a space to separate this argument from the others if result: result.append(' ') needquote = (" " in arg) or ("\t" in arg) or not arg if needquote: result.append('"') for c in arg: if c == '\\': # Don't know if we need to double yet. bs_buf.append(c) elif c == '"': # Double backslashes. result.append('\\' * len(bs_buf)*2) bs_buf = [] result.append('\\"') else: # Normal char if bs_buf: result.extend(bs_buf) bs_buf = [] result.append(c) # Add remaining backslashes, if any. if bs_buf: result.extend(bs_buf) if needquote: result.extend(bs_buf) result.append('"') return ''.join(result) # Various tools for executing commands and looking at their output and status. # def getstatusoutput(cmd): """ Return (status, output) of executing cmd in a shell. Execute the string 'cmd' in a shell with 'check_output' and return a 2-tuple (status, output). Universal newlines mode is used, meaning that the result with be decoded to a string. A trailing newline is stripped from the output. The exit status for the command can be interpreted according to the rules for the function 'wait'. Example: >>> import subprocess >>> subprocess.getstatusoutput('ls /bin/ls') (0, '/bin/ls') >>> subprocess.getstatusoutput('cat /bin/junk') (256, 'cat: /bin/junk: No such file or directory') >>> subprocess.getstatusoutput('/bin/junk') (256, 'sh: /bin/junk: not found') """ try: data = check_output(cmd, shell=True, universal_newlines=True, stderr=STDOUT) status = 0 except CalledProcessError as ex: data = ex.output status = ex.returncode if data[-1:] == '\n': data = data[:-1] return status, data def getoutput(cmd): """Return output (stdout or stderr) of executing cmd in a shell. Like getstatusoutput(), except the exit status is ignored and the return value is a string containing the command's output. Example: >>> import subprocess >>> subprocess.getoutput('ls /bin/ls') '/bin/ls' """ return getstatusoutput(cmd)[1] _PLATFORM_DEFAULT_CLOSE_FDS = object() class Popen(object): _child_created = False # Set here since __del__ checks it def __init__(self, args, bufsize=-1, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=_PLATFORM_DEFAULT_CLOSE_FDS, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0, restore_signals=True, start_new_session=False, pass_fds=()): """Create new Popen instance.""" _cleanup() # Held while anything is calling waitpid before returncode has been # updated to prevent clobbering returncode if wait() or poll() are # called from multiple threads at once. After acquiring the lock, # code must re-check self.returncode to see if another thread just # finished a waitpid() call. self._waitpid_lock = threading.Lock() self._input = None self._communication_started = False if bufsize is None: bufsize = -1 # Restore default if not isinstance(bufsize, int): raise TypeError("bufsize must be an integer") if mswindows: if preexec_fn is not None: raise ValueError("preexec_fn is not supported on Windows " "platforms") any_stdio_set = (stdin is not None or stdout is not None or stderr is not None) if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: if any_stdio_set: close_fds = False else: close_fds = True elif close_fds and any_stdio_set: raise ValueError( "close_fds is not supported on Windows platforms" " if you redirect stdin/stdout/stderr") else: # POSIX if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS: close_fds = True if pass_fds and not close_fds: warnings.warn("pass_fds overriding close_fds.", RuntimeWarning) close_fds = True if startupinfo is not None: raise ValueError("startupinfo is only supported on Windows " "platforms") if creationflags != 0: raise ValueError("creationflags is only supported on Windows " "platforms") self.args = args self.stdin = None self.stdout = None self.stderr = None self.pid = None self.returncode = None self.universal_newlines = universal_newlines # Input and output objects. The general principle is like # this: # # Parent Child # ------ ----- # p2cwrite ---stdin---> p2cread # c2pread <--stdout--- c2pwrite # errread <--stderr--- errwrite # # On POSIX, the child objects are file descriptors. On # Windows, these are Windows file handles. The parent objects # are file descriptors on both platforms. The parent objects # are -1 when not using PIPEs. The child objects are -1 # when not redirecting. (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) = self._get_handles(stdin, stdout, stderr) # We wrap OS handles *before* launching the child, otherwise a # quickly terminating child could make our fds unwrappable # (see #8458). if mswindows: if p2cwrite != -1: p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0) if c2pread != -1: c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0) if errread != -1: errread = msvcrt.open_osfhandle(errread.Detach(), 0) if p2cwrite != -1: self.stdin = io.open(p2cwrite, 'wb', bufsize) if universal_newlines: self.stdin = io.TextIOWrapper(self.stdin, write_through=True) if c2pread != -1: self.stdout = io.open(c2pread, 'rb', bufsize) if universal_newlines: self.stdout = io.TextIOWrapper(self.stdout) if errread != -1: self.stderr = io.open(errread, 'rb', bufsize) if universal_newlines: self.stderr = io.TextIOWrapper(self.stderr) self._closed_child_pipe_fds = False try: self._execute_child(args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session) except: # Cleanup if the child failed starting. for f in filter(None, (self.stdin, self.stdout, self.stderr)): try: f.close() except OSError: pass # Ignore EBADF or other errors. if not self._closed_child_pipe_fds: to_close = [] if stdin == PIPE: to_close.append(p2cread) if stdout == PIPE: to_close.append(c2pwrite) if stderr == PIPE: to_close.append(errwrite) if hasattr(self, '_devnull'): to_close.append(self._devnull) for fd in to_close: try: os.close(fd) except OSError: pass raise def _translate_newlines(self, data, encoding): data = data.decode(encoding) return data.replace("\r\n", "\n").replace("\r", "\n") def __enter__(self): return self def __exit__(self, type, value, traceback): if self.stdout: self.stdout.close() if self.stderr: self.stderr.close() if self.stdin: self.stdin.close() # Wait for the process to terminate, to avoid zombies. self.wait() def __del__(self, _maxsize=sys.maxsize): if not self._child_created: # We didn't get to successfully create a child process. return # In case the child hasn't been waited on, check if it's done. self._internal_poll(_deadstate=_maxsize) if self.returncode is None and _active is not None: # Child is still running, keep us alive until we can wait on it. _active.append(self) def _get_devnull(self): if not hasattr(self, '_devnull'): self._devnull = os.open(os.devnull, os.O_RDWR) return self._devnull def communicate(self, input=None, timeout=None): """Interact with process: Send data to stdin. Read data from stdout and stderr, until end-of-file is reached. Wait for process to terminate. The optional input argument should be bytes to be sent to the child process, or None, if no data should be sent to the child. communicate() returns a tuple (stdout, stderr).""" if self._communication_started and input: raise ValueError("Cannot send input after starting communication") # Optimization: If we are not worried about timeouts, we haven't # started communicating, and we have one or zero pipes, using select() # or threads is unnecessary. if (timeout is None and not self._communication_started and [self.stdin, self.stdout, self.stderr].count(None) >= 2): stdout = None stderr = None if self.stdin: if input: try: self.stdin.write(input) except OSError as e: if e.errno != errno.EPIPE and e.errno != errno.EINVAL: raise self.stdin.close() elif self.stdout: stdout = _eintr_retry_call(self.stdout.read) self.stdout.close() elif self.stderr: stderr = _eintr_retry_call(self.stderr.read) self.stderr.close() self.wait() else: if timeout is not None: endtime = _time() + timeout else: endtime = None try: stdout, stderr = self._communicate(input, endtime, timeout) finally: self._communication_started = True sts = self.wait(timeout=self._remaining_time(endtime)) return (stdout, stderr) def poll(self): return self._internal_poll() def _remaining_time(self, endtime): """Convenience for _communicate when computing timeouts.""" if endtime is None: return None else: return endtime - _time() def _check_timeout(self, endtime, orig_timeout): """Convenience for checking if a timeout has expired.""" if endtime is None: return if _time() > endtime: raise TimeoutExpired(self.args, orig_timeout) if mswindows: # # Windows methods # def _get_handles(self, stdin, stdout, stderr): """Construct and return tuple with IO objects: p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite """ if stdin is None and stdout is None and stderr is None: return (-1, -1, -1, -1, -1, -1) p2cread, p2cwrite = -1, -1 c2pread, c2pwrite = -1, -1 errread, errwrite = -1, -1 if stdin is None: p2cread = _winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE) if p2cread is None: p2cread, _ = _winapi.CreatePipe(None, 0) p2cread = Handle(p2cread) _winapi.CloseHandle(_) elif stdin == PIPE: p2cread, p2cwrite = _winapi.CreatePipe(None, 0) p2cread, p2cwrite = Handle(p2cread), Handle(p2cwrite) elif stdin == DEVNULL: p2cread = msvcrt.get_osfhandle(self._get_devnull()) elif isinstance(stdin, int): p2cread = msvcrt.get_osfhandle(stdin) else: # Assuming file-like object p2cread = msvcrt.get_osfhandle(stdin.fileno()) p2cread = self._make_inheritable(p2cread) if stdout is None: c2pwrite = _winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE) if c2pwrite is None: _, c2pwrite = _winapi.CreatePipe(None, 0) c2pwrite = Handle(c2pwrite) _winapi.CloseHandle(_) elif stdout == PIPE: c2pread, c2pwrite = _winapi.CreatePipe(None, 0) c2pread, c2pwrite = Handle(c2pread), Handle(c2pwrite) elif stdout == DEVNULL: c2pwrite = msvcrt.get_osfhandle(self._get_devnull()) elif isinstance(stdout, int): c2pwrite = msvcrt.get_osfhandle(stdout) else: # Assuming file-like object c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) c2pwrite = self._make_inheritable(c2pwrite) if stderr is None: errwrite = _winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE) if errwrite is None: _, errwrite = _winapi.CreatePipe(None, 0) errwrite = Handle(errwrite) _winapi.CloseHandle(_) elif stderr == PIPE: errread, errwrite = _winapi.CreatePipe(None, 0) errread, errwrite = Handle(errread), Handle(errwrite) elif stderr == STDOUT: errwrite = c2pwrite elif stderr == DEVNULL: errwrite = msvcrt.get_osfhandle(self._get_devnull()) elif isinstance(stderr, int): errwrite = msvcrt.get_osfhandle(stderr) else: # Assuming file-like object errwrite = msvcrt.get_osfhandle(stderr.fileno()) errwrite = self._make_inheritable(errwrite) return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _make_inheritable(self, handle): """Return a duplicate of handle, which is inheritable""" h = _winapi.DuplicateHandle( _winapi.GetCurrentProcess(), handle, _winapi.GetCurrentProcess(), 0, 1, _winapi.DUPLICATE_SAME_ACCESS) return Handle(h) def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, unused_restore_signals, unused_start_new_session): """Execute program (MS Windows version)""" assert not pass_fds, "pass_fds not supported on Windows." if not isinstance(args, str): args = list2cmdline(args) # Process startup details if startupinfo is None: startupinfo = STARTUPINFO() if -1 not in (p2cread, c2pwrite, errwrite): startupinfo.dwFlags |= _winapi.STARTF_USESTDHANDLES startupinfo.hStdInput = p2cread startupinfo.hStdOutput = c2pwrite startupinfo.hStdError = errwrite if shell: startupinfo.dwFlags |= _winapi.STARTF_USESHOWWINDOW startupinfo.wShowWindow = _winapi.SW_HIDE comspec = os.environ.get("COMSPEC", "cmd.exe") args = '{} /c "{}"'.format (comspec, args) # Start the process try: hp, ht, pid, tid = _winapi.CreateProcess(executable, args, # no special security None, None, int(not close_fds), creationflags, env, cwd, startupinfo) finally: # Child is launched. Close the parent's copy of those pipe # handles that only the child should have open. You need # to make sure that no handles to the write end of the # output pipe are maintained in this process or else the # pipe will not close when the child process exits and the # ReadFile will hang. if p2cread != -1: p2cread.Close() if c2pwrite != -1: c2pwrite.Close() if errwrite != -1: errwrite.Close() if hasattr(self, '_devnull'): os.close(self._devnull) # Retain the process handle, but close the thread handle self._child_created = True self._handle = Handle(hp) self.pid = pid _winapi.CloseHandle(ht) def _internal_poll(self, _deadstate=None, _WaitForSingleObject=_winapi.WaitForSingleObject, _WAIT_OBJECT_0=_winapi.WAIT_OBJECT_0, _GetExitCodeProcess=_winapi.GetExitCodeProcess): """Check if child process has terminated. Returns returncode attribute. This method is called by __del__, so it can only refer to objects in its local scope. """ if self.returncode is None: if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0: self.returncode = _GetExitCodeProcess(self._handle) return self.returncode def wait(self, timeout=None, endtime=None): """Wait for child process to terminate. Returns returncode attribute.""" if endtime is not None: timeout = self._remaining_time(endtime) if timeout is None: timeout_millis = _winapi.INFINITE else: timeout_millis = int(timeout * 1000) if self.returncode is None: result = _winapi.WaitForSingleObject(self._handle, timeout_millis) if result == _winapi.WAIT_TIMEOUT: raise TimeoutExpired(self.args, timeout) self.returncode = _winapi.GetExitCodeProcess(self._handle) return self.returncode def _readerthread(self, fh, buffer): buffer.append(fh.read()) fh.close() def _communicate(self, input, endtime, orig_timeout): # Start reader threads feeding into a list hanging off of this # object, unless they've already been started. if self.stdout and not hasattr(self, "_stdout_buff"): self._stdout_buff = [] self.stdout_thread = \ threading.Thread(target=self._readerthread, args=(self.stdout, self._stdout_buff)) self.stdout_thread.daemon = True self.stdout_thread.start() if self.stderr and not hasattr(self, "_stderr_buff"): self._stderr_buff = [] self.stderr_thread = \ threading.Thread(target=self._readerthread, args=(self.stderr, self._stderr_buff)) self.stderr_thread.daemon = True self.stderr_thread.start() if self.stdin: if input is not None: try: self.stdin.write(input) except OSError as e: if e.errno == errno.EPIPE: # communicate() should ignore pipe full error pass elif (e.errno == errno.EINVAL and self.poll() is not None): # Issue #19612: stdin.write() fails with EINVAL # if the process already exited before the write pass else: raise self.stdin.close() # Wait for the reader threads, or time out. If we time out, the # threads remain reading and the fds left open in case the user # calls communicate again. if self.stdout is not None: self.stdout_thread.join(self._remaining_time(endtime)) if self.stdout_thread.is_alive(): raise TimeoutExpired(self.args, orig_timeout) if self.stderr is not None: self.stderr_thread.join(self._remaining_time(endtime)) if self.stderr_thread.is_alive(): raise TimeoutExpired(self.args, orig_timeout) # Collect the output from and close both pipes, now that we know # both have been read successfully. stdout = None stderr = None if self.stdout: stdout = self._stdout_buff self.stdout.close() if self.stderr: stderr = self._stderr_buff self.stderr.close() # All data exchanged. Translate lists into strings. if stdout is not None: stdout = stdout[0] if stderr is not None: stderr = stderr[0] return (stdout, stderr) def send_signal(self, sig): """Send a signal to the process """ if sig == signal.SIGTERM: self.terminate() elif sig == signal.CTRL_C_EVENT: os.kill(self.pid, signal.CTRL_C_EVENT) elif sig == signal.CTRL_BREAK_EVENT: os.kill(self.pid, signal.CTRL_BREAK_EVENT) else: raise ValueError("Unsupported signal: {}".format(sig)) def terminate(self): """Terminates the process """ try: _winapi.TerminateProcess(self._handle, 1) except PermissionError: # ERROR_ACCESS_DENIED (winerror 5) is received when the # process already died. rc = _winapi.GetExitCodeProcess(self._handle) if rc == _winapi.STILL_ACTIVE: raise self.returncode = rc kill = terminate else: # # POSIX methods # def _get_handles(self, stdin, stdout, stderr): """Construct and return tuple with IO objects: p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite """ p2cread, p2cwrite = -1, -1 c2pread, c2pwrite = -1, -1 errread, errwrite = -1, -1 if stdin is None: pass elif stdin == PIPE: p2cread, p2cwrite = os.pipe() elif stdin == DEVNULL: p2cread = self._get_devnull() elif isinstance(stdin, int): p2cread = stdin else: # Assuming file-like object p2cread = stdin.fileno() if stdout is None: pass elif stdout == PIPE: c2pread, c2pwrite = os.pipe() elif stdout == DEVNULL: c2pwrite = self._get_devnull() elif isinstance(stdout, int): c2pwrite = stdout else: # Assuming file-like object c2pwrite = stdout.fileno() if stderr is None: pass elif stderr == PIPE: errread, errwrite = os.pipe() elif stderr == STDOUT: errwrite = c2pwrite elif stderr == DEVNULL: errwrite = self._get_devnull() elif isinstance(stderr, int): errwrite = stderr else: # Assuming file-like object errwrite = stderr.fileno() return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) def _close_fds(self, fds_to_keep): start_fd = 3 for fd in sorted(fds_to_keep): if fd >= start_fd: os.closerange(start_fd, fd) start_fd = fd + 1 if start_fd <= MAXFD: os.closerange(start_fd, MAXFD) def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] else: args = list(args) if shell: args = ["/bin/sh", "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [os.fsencode(k) + b'=' + os.fsencode(v) for k, v in env.items()] else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, sorted(fds_to_keep), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) # self._devnull is not always defined. devnull_fd = getattr(self, '_devnull', None) if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd: os.close(p2cread) if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd: os.close(c2pwrite) if errwrite != -1 and errread != -1 and errwrite != devnull_fd: os.close(errwrite) if devnull_fd is not None: os.close(devnull_fd) # Prevent a double close of these fds from __init__ on error. self._closed_child_pipe_fds = True # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = _eintr_retry_call(os.read, errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: _eintr_retry_call(os.waitpid, self.pid, 0) except OSError as e: if e.errno != errno.ECHILD: raise try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = (b'Bad exception data from child: ' + repr(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) err_msg = err_msg.decode(errors="surrogatepass") if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" if errno_num != 0: err_msg = os.strerror(errno_num) if errno_num == errno.ENOENT: if child_exec_never_called: # The error must be from chdir(cwd). err_msg += ': ' + repr(cwd) else: err_msg += ': ' + repr(orig_executable) raise child_exception_type(errno_num, err_msg) raise child_exception_type(err_msg) def _handle_exitstatus(self, sts, _WIFSIGNALED=os.WIFSIGNALED, _WTERMSIG=os.WTERMSIG, _WIFEXITED=os.WIFEXITED, _WEXITSTATUS=os.WEXITSTATUS): """All callers to this function MUST hold self._waitpid_lock.""" # This method is called (indirectly) by __del__, so it cannot # refer to anything outside of its local scope. if _WIFSIGNALED(sts): self.returncode = -_WTERMSIG(sts) elif _WIFEXITED(sts): self.returncode = _WEXITSTATUS(sts) else: # Should never happen raise SubprocessError("Unknown child exit status!") def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid, _WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD): """Check if child process has terminated. Returns returncode attribute. This method is called by __del__, so it cannot reference anything outside of the local scope (nor can any methods it calls). """ if self.returncode is None: if not self._waitpid_lock.acquire(False): # Something else is busy calling waitpid. Don't allow two # at once. We know nothing yet. return None try: if self.returncode is not None: return self.returncode # Another thread waited. pid, sts = _waitpid(self.pid, _WNOHANG) if pid == self.pid: self._handle_exitstatus(sts) except OSError as e: if _deadstate is not None: self.returncode = _deadstate elif e.errno == _ECHILD: # This happens if SIGCLD is set to be ignored or # waiting for child processes has otherwise been # disabled for our process. This child is dead, we # can't get the status. # http://bugs.python.org/issue15756 self.returncode = 0 finally: self._waitpid_lock.release() return self.returncode def _try_wait(self, wait_flags): """All callers to this function MUST hold self._waitpid_lock.""" try: (pid, sts) = _eintr_retry_call(os.waitpid, self.pid, wait_flags) except OSError as e: if e.errno != errno.ECHILD: raise # This happens if SIGCLD is set to be ignored or waiting # for child processes has otherwise been disabled for our # process. This child is dead, we can't get the status. pid = self.pid sts = 0 return (pid, sts) def wait(self, timeout=None, endtime=None): """Wait for child process to terminate. Returns returncode attribute.""" if self.returncode is not None: return self.returncode # endtime is preferred to timeout. timeout is only used for # printing. if endtime is not None or timeout is not None: if endtime is None: endtime = _time() + timeout elif timeout is None: timeout = self._remaining_time(endtime) if endtime is not None: # Enter a busy loop if we have a timeout. This busy loop was # cribbed from Lib/threading.py in Thread.wait() at r71065. delay = 0.0005 # 500 us -> initial delay of 1 ms while True: if self._waitpid_lock.acquire(False): try: if self.returncode is not None: break # Another thread waited. (pid, sts) = self._try_wait(os.WNOHANG) assert pid == self.pid or pid == 0 if pid == self.pid: self._handle_exitstatus(sts) break finally: self._waitpid_lock.release() remaining = self._remaining_time(endtime) if remaining <= 0: raise TimeoutExpired(self.args, timeout) delay = min(delay * 2, remaining, .05) time.sleep(delay) else: while self.returncode is None: with self._waitpid_lock: if self.returncode is not None: break # Another thread waited. (pid, sts) = self._try_wait(0) # Check the pid and loop as waitpid has been known to # return 0 even without WNOHANG in odd situations. # http://bugs.python.org/issue14396. if pid == self.pid: self._handle_exitstatus(sts) return self.returncode def _communicate(self, input, endtime, orig_timeout): if self.stdin and not self._communication_started: # Flush stdio buffer. This might block, if the user has # been writing to .stdin in an uncontrolled fashion. self.stdin.flush() if not input: self.stdin.close() stdout = None stderr = None # Only create this mapping if we haven't already. if not self._communication_started: self._fileobj2output = {} if self.stdout: self._fileobj2output[self.stdout] = [] if self.stderr: self._fileobj2output[self.stderr] = [] if self.stdout: stdout = self._fileobj2output[self.stdout] if self.stderr: stderr = self._fileobj2output[self.stderr] self._save_input(input) if self._input: input_view = memoryview(self._input) with _PopenSelector() as selector: if self.stdin and input: selector.register(self.stdin, selectors.EVENT_WRITE) if self.stdout: selector.register(self.stdout, selectors.EVENT_READ) if self.stderr: selector.register(self.stderr, selectors.EVENT_READ) while selector.get_map(): timeout = self._remaining_time(endtime) if timeout is not None and timeout < 0: raise TimeoutExpired(self.args, orig_timeout) ready = selector.select(timeout) self._check_timeout(endtime, orig_timeout) # XXX Rewrite these to use non-blocking I/O on the file # objects; they are no longer using C stdio! for key, events in ready: if key.fileobj is self.stdin: chunk = input_view[self._input_offset : self._input_offset + _PIPE_BUF] try: self._input_offset += os.write(key.fd, chunk) except OSError as e: if e.errno == errno.EPIPE: selector.unregister(key.fileobj) key.fileobj.close() else: raise else: if self._input_offset >= len(self._input): selector.unregister(key.fileobj) key.fileobj.close() elif key.fileobj in (self.stdout, self.stderr): data = os.read(key.fd, 32768) if not data: selector.unregister(key.fileobj) key.fileobj.close() self._fileobj2output[key.fileobj].append(data) self.wait(timeout=self._remaining_time(endtime)) # All data exchanged. Translate lists into strings. if stdout is not None: stdout = b''.join(stdout) if stderr is not None: stderr = b''.join(stderr) # Translate newlines, if requested. # This also turns bytes into strings. if self.universal_newlines: if stdout is not None: stdout = self._translate_newlines(stdout, self.stdout.encoding) if stderr is not None: stderr = self._translate_newlines(stderr, self.stderr.encoding) return (stdout, stderr) def _save_input(self, input): # This method is called from the _communicate_with_*() methods # so that if we time out while communicating, we can continue # sending input if we retry. if self.stdin and self._input is None: self._input_offset = 0 self._input = input if self.universal_newlines and input is not None: self._input = self._input.encode(self.stdin.encoding) def send_signal(self, sig): """Send a signal to the process """ os.kill(self.pid, sig) def terminate(self): """Terminate the process with SIGTERM """ self.send_signal(signal.SIGTERM) def kill(self): """Kill the process with SIGKILL """ self.send_signal(signal.SIGKILL)
gpl-3.0
sve-odoo/odoo
addons/account_check_writing/wizard/__init__.py
437
1082
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import account_check_batch_printing # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
annarev/tensorflow
tensorflow/python/compiler/tensorrt/test/testdata/gen_tftrt_model.py
12
4892
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Saves a SavedModel after TensorRT conversion. The saved model is loaded and executed by tests to ensure backward compatibility across TF versions. The script may not work in TF1.x. Instructions on how to use this script: - Execute the script as follows: python gen_tftrt_model - Rename tftrt_saved_model to what makes sense for your test. - Delete directory tf_saved_model unless you want to use it. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python import Session from tensorflow.python.compiler.tensorrt import trt_convert from tensorflow.python.eager import def_function from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_spec from tensorflow.python.ops import array_ops from tensorflow.python.ops import variables from tensorflow.python.saved_model import builder from tensorflow.python.saved_model import save from tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model import signature_def_utils from tensorflow.python.saved_model import tag_constants from tensorflow.python.saved_model import utils from tensorflow.python.training.tracking import tracking def GetGraph(input1, input2, var): """Define graph.""" add = input1 + var mul = input1 * add add = mul + add add = add + input2 out = array_ops.identity(add, name="output") return out def GenerateModelV2(tf_saved_model_dir, tftrt_saved_model_dir): """Generate and convert a model using TFv2 API.""" class SimpleModel(tracking.AutoTrackable): """Define model with a TF function.""" def __init__(self): self.v = None @def_function.function(input_signature=[ tensor_spec.TensorSpec(shape=[None, 1, 1], dtype=dtypes.float32), tensor_spec.TensorSpec(shape=[None, 1, 1], dtype=dtypes.float32) ]) def run(self, input1, input2): if self.v is None: self.v = variables.Variable([[[1.0]]], dtype=dtypes.float32) return GetGraph(input1, input2, self.v) root = SimpleModel() # Saved TF model save(root, tf_saved_model_dir, {signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: root.run}) # Convert TF model to TensorRT converter = trt_convert.TrtGraphConverterV2( input_saved_model_dir=tf_saved_model_dir) converter.convert() converter.save(tftrt_saved_model_dir) def GenerateModelV1(tf_saved_model_dir, tftrt_saved_model_dir): """Generate and convert a model using TFv1 API.""" def SimpleModel(): """Define model with a TF graph.""" def GraphFn(): input1 = array_ops.placeholder( dtype=dtypes.float32, shape=[None, 1, 1], name="input1") input2 = array_ops.placeholder( dtype=dtypes.float32, shape=[None, 1, 1], name="input2") var = variables.Variable([[[1.0]]], dtype=dtypes.float32, name="v1") out = GetGraph(input1, input2, var) return g, var, input1, input2, out g = ops.Graph() with g.as_default(): return GraphFn() g, var, input1, input2, out = SimpleModel() signature_def = signature_def_utils.build_signature_def( inputs={ "input1": utils.build_tensor_info(input1), "input2": utils.build_tensor_info(input2) }, outputs={"output": utils.build_tensor_info(out)}, method_name=signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY) saved_model_builder = builder.SavedModelBuilder(tf_saved_model_dir) with Session(graph=g) as sess: sess.run(var.initializer) saved_model_builder.add_meta_graph_and_variables( sess, [tag_constants.SERVING], signature_def_map={ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature_def }) saved_model_builder.save() # Convert TF model to TensorRT converter = trt_convert.TrtGraphConverter( input_saved_model_dir=tf_saved_model_dir, is_dynamic_op=True) converter.convert() converter.save(tftrt_saved_model_dir) if __name__ == "__main__": GenerateModelV2( tf_saved_model_dir="tf_saved_model", tftrt_saved_model_dir="tftrt_saved_model")
apache-2.0
PsychoTV/PsychoTeam.repository
plugin.video.SportsDevil/lib/utils/beta/t0mm0/common/addon.py
17
26782
''' common XBMC Module Copyright (C) 2011 t0mm0 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re import os try: import cPickle as pickle except: import pickle import urllib import urlparse import xbmc import xbmcaddon import xbmcgui import xbmcplugin class Addon: ''' This class provides a lot of code that is used across many XBMC addons in the hope that it will simplify some of the common tasks an addon needs to perform. Mostly this is achieved by providing a wrapper around commonly used parts of :mod:`xbmc`, :mod:`xbmcaddon`, :mod:`xbmcgui` and :mod:`xbmcplugin`. You probably want to have exactly one instance of this class in your addon which you can call from anywhere in your code. Example:: import sys from t0mm0.common.addon import Addon addon = Addon('my.plugin.id', argv=sys.argv) ''' def __init__(self, addon_id, argv=None): ''' Args: addon_id (str): Your addon's id (eg. 'plugin.video.t0mm0.test'). Kwargs: argv (list): List of arguments passed to your addon if applicable (eg. sys.argv). ''' self.addon = xbmcaddon.Addon(id=addon_id) if argv: self.url = argv[0] self.handle = int(argv[1]) self.queries = self.parse_query(argv[2][1:]) def get_author(self): '''Returns the addon author as defined in ``addon.xml``.''' return self.addon.getAddonInfo('author') def get_changelog(self): '''Returns the addon changelog.''' return self.addon.getAddonInfo('changelog') def get_description(self): '''Returns the addon description as defined in ``addon.xml``.''' return self.addon.getAddonInfo('description') def get_disclaimer(self): '''Returns the addon disclaimer as defined in ``addon.xml``.''' return self.addon.getAddonInfo('disclaimer') def get_fanart(self): '''Returns the full path to the addon fanart.''' return self.addon.getAddonInfo('fanart') def get_icon(self): '''Returns the full path to the addon icon.''' return self.addon.getAddonInfo('icon') def get_id(self): '''Returns the addon id as defined in ``addon.xml``.''' return self.addon.getAddonInfo('id') def get_name(self): '''Returns the addon name as defined in ``addon.xml``.''' return self.addon.getAddonInfo('name') def get_path(self): '''Returns the full path to the addon directory.''' return self.addon.getAddonInfo('path') def get_profile(self): ''' Returns the full path to the addon profile directory (useful for storing files needed by the addon such as cookies). ''' return xbmc.translatePath(self.addon.getAddonInfo('profile')) def get_stars(self): '''Returns the number of stars for this addon.''' return self.addon.getAddonInfo('stars') def get_summary(self): '''Returns the addon summary as defined in ``addon.xml``.''' return self.addon.getAddonInfo('summary') def get_type(self): ''' Returns the addon summary as defined in ``addon.xml`` (eg. xbmc.python.pluginsource). ''' return self.addon.getAddonInfo('type') def get_version(self): '''Returns the addon version as defined in ``addon.xml``.''' return self.addon.getAddonInfo('version') def get_setting(self, setting): ''' Returns an addon setting. Settings must be defined in your addon's ``resources/settings.xml`` file. Args: setting (str): Name of the setting to be retrieved. Returns: str containing the requested setting. ''' return self.addon.getSetting(setting) def get_string(self, string_id): ''' Returns a localized string. Strings must be defined in your addon's ``resources/language/[lang_name]/strings.xml`` file. Args: string_id (int): id of the translated string to retrieve. Returns: str containing the localized requested string. ''' return self.addon.getLocalizedString(string_id) def parse_query(self, query, defaults={'mode': 'main'}): ''' Parse a query string as used in a URL or passed to your addon by XBMC. Example: >>> addon.parse_query('name=test&type=basic') {'mode': 'main', 'name': 'test', 'type': 'basic'} Args: query (str): A query string. Kwargs: defaults (dict): A dictionary containing key/value pairs parsed from the query string. If a key is repeated in the query string its value will be a list containing all of that keys values. ''' queries = urlparse.parse_qs(query) q = defaults for key, value in queries.items(): if len(value) == 1: q[key] = value[0] else: q[key] = value return q def build_plugin_url(self, queries): ''' Returns a ``plugin://`` URL which can be used to call the addon with the specified queries. Example: >>> addon.build_plugin_url({'name': 'test', 'type': 'basic'}) 'plugin://your.plugin.id/?name=test&type=basic' Args: queries (dict): A dctionary of keys/values to be added to the ``plugin://`` URL. Retuns: A string containing a fully formed ``plugin://`` URL. ''' out_dict = {} for k, v in queries.iteritems(): if isinstance(v, unicode): v = v.encode('utf8') elif isinstance(v, str): # Must be encoded in UTF-8 v.decode('utf8') out_dict[k] = v return self.url + '?' + urllib.urlencode(out_dict) def log(self, msg, level=xbmc.LOGDEBUG): ''' Writes a string to the XBMC log file. The addon name is inserted into the beginning of the message automatically to help you find relevent messages in the log file. The available log levels are defined in the :mod:`xbmc` module and are currently as follows:: xbmc.LOGDEBUG = 0 xbmc.LOGERROR = 4 xbmc.LOGFATAL = 6 xbmc.LOGINFO = 1 xbmc.LOGNONE = 7 xbmc.LOGNOTICE = 2 xbmc.LOGSEVERE = 5 xbmc.LOGWARNING = 3 Args: msg (str or unicode): The message to be written to the log file. Kwargs: level (int): The XBMC log level to write at. ''' #msg = unicodedata.normalize('NFKD', unicode(msg)).encode('ascii', # 'ignore') xbmc.log('%s: %s' % (self.get_name(), msg), level) def log_error(self, msg): ''' Convenience method to write to the XBMC log file at the ``xbmc.LOGERROR`` error level. Use when something has gone wrong in your addon code. This will show up in the log prefixed with 'ERROR:' whether you have debugging switched on or not. ''' self.log(msg, xbmc.LOGERROR) def log_debug(self, msg): ''' Convenience method to write to the XBMC log file at the ``xbmc.LOGDEBUG`` error level. Use this when you want to print out lots of detailed information that is only usefull for debugging. This will show up in the log only when debugging is enabled in the XBMC settings, and will be prefixed with 'DEBUG:'. ''' self.log(msg, xbmc.LOGDEBUG) def log_notice(self, msg): ''' Convenience method to write to the XBMC log file at the ``xbmc.LOGNOTICE`` error level. Use for general log messages. This will show up in the log prefixed with 'NOTICE:' whether you have debugging switched on or not. ''' self.log(msg, xbmc.LOGNOTICE) def show_ok_dialog(self, msg, title=None, is_error=False): ''' Display an XBMC dialog with a message and a single 'OK' button. The message is also written to the XBMC log file at the appropriate log level. .. warning:: Don't forget that `msg` must be a list of strings and not just a string even if you only want to display a single line! Example:: addon.show_ok_dialog(['My message'], 'My Addon') Args: msg (list of strings): The message to be displayed in the dialog. Only the first 3 list items will be displayed. Kwargs: title (str): String to be displayed as the title of the dialog box. Defaults to the addon name. is_error (bool): If ``True``, the log message will be written at the ERROR log level, otherwise NOTICE will be used. ''' if not title: title = self.get_name() log_msg = ' '.join(msg) while len(msg) < 3: msg.append('') if is_error: self.log_error(log_msg) else: self.log_notice(log_msg) xbmcgui.Dialog().ok(title, msg[0], msg[1], msg[2]) def show_error_dialog(self, msg): ''' Convenience method to show an XBMC dialog box with a single OK button and also write the message to the log file at the ERROR log level. The title of the dialog will be the addon's name with the prefix 'Error: '. .. warning:: Don't forget that `msg` must be a list of strings and not just a string even if you only want to display a single line! Args: msg (list of strings): The message to be displayed in the dialog. Only the first 3 list items will be displayed. ''' self.show_ok_dialog(msg, 'Error: %s' % self.get_name(), True) def show_small_popup(self, title='', msg='', delay=5000, image=''): ''' Displays a small popup box in the lower right corner. The default delay is 5 seconds. Code inspired by anarchintosh and daledude's Icefilms addon. Example:: import os logo = os.path.join(addon.get_path(), 'art','logo.jpg') addon.show_small_popup('MyAddonName','Is now loaded enjoy', 5000, logo) Kwargs: title (str): title to be displayed at the top of the box msg (str): Main message body delay (int): delay in milliseconds until it disapears image (str): Path to the image you want to display ''' xbmc.executebuiltin('XBMC.Notification("%s","%s",%d,"%s")' % (title, msg, delay, image)) def show_countdown(self, time_to_wait, title='', text=''): ''' Show a countdown dialog with a progress bar for XBMC while delaying execution. Necessary for some filehosters eg. megaupload The original version of this code came from Anarchintosh. Args: time_to_wait (int): number of seconds to pause for. Kwargs: title (str): Displayed in the title of the countdown dialog. Default is blank. text (str): A line of text to be displayed in the dialog. Default is blank. Returns: ``True`` if countdown is allowed to complete, ``False`` if the user cancelled the countdown. ''' dialog = xbmcgui.DialogProgress() dialog.create(title) self.log_notice('waiting %d secs' % time_to_wait) secs = 0 increment = 100 / time_to_wait cancelled = False while secs <= time_to_wait: if (dialog.iscanceled()): cancelled = True break if secs != 0: xbmc.sleep(1000) secs_left = time_to_wait - secs if secs_left == 0: percent = 100 else: percent = increment * secs remaining_display = ('Wait %d seconds for the ' + 'video stream to activate...') % secs_left dialog.update(percent, text, remaining_display) secs += 1 if cancelled == True: self.log_notice('countdown cancelled') return False else: self.log_debug('countdown finished waiting') return True def show_settings(self): '''Shows the settings dialog for this addon.''' self.addon.openSettings() def resolve_url(self, stream_url): ''' Tell XBMC that you have resolved a URL (or not!). This method should be called as follows: #. The user selects a list item that has previously had ``isPlayable`` set (this is true for items added with :meth:`add_item`, :meth:`add_music_item` or :meth:`add_music_item`) #. Your code resolves the item requested by the user to a media URL #. Your addon calls this method with the resolved URL Args: stream_url (str or ``False``): If a string, tell XBMC that the media URL ha been successfully resolved to stream_url. If ``False`` or an empty string tell XBMC the resolving failed and pop up an error messsage. ''' if stream_url: self.log_debug('resolved to: %s' % stream_url) xbmcplugin.setResolvedUrl(self.handle, True, xbmcgui.ListItem(path=stream_url)) else: self.show_error_dialog(['sorry, failed to resolve URL :(']) xbmcplugin.setResolvedUrl(self.handle, False, xbmcgui.ListItem()) def get_playlist(self, pl_type, new=False): ''' Return a :class:`xbmc.Playlist` object of the specified type. The available playlist types are defined in the :mod:`xbmc` module and are currently as follows:: xbmc.PLAYLIST_MUSIC = 0 xbmc.PLAYLIST_VIDEO = 1 .. seealso:: :meth:`get_music_playlist`, :meth:`get_video_playlist` Args: pl_type (int): The type of playlist to get. new (bool): If ``False`` (default), get the current :class:`xbmc.Playlist` object of the type specified. If ``True`` then return a new blank :class:`xbmc.Playlist`. Returns: A :class:`xbmc.Playlist` object. ''' pl = xbmc.PlayList(pl_type) if new: pl.clear() return pl def get_music_playlist(self, new=False): ''' Convenience method to return a music :class:`xbmc.Playlist` object. .. seealso:: :meth:`get_playlist` Kwargs: new (bool): If ``False`` (default), get the current music :class:`xbmc.Playlist` object. If ``True`` then return a new blank music :class:`xbmc.Playlist`. Returns: A :class:`xbmc.Playlist` object. ''' self.get_playlist(xbmc.PLAYLIST_MUSIC, new) def get_video_playlist(self, new=False): ''' Convenience method to return a video :class:`xbmc.Playlist` object. .. seealso:: :meth:`get_playlist` Kwargs: new (bool): If ``False`` (default), get the current video :class:`xbmc.Playlist` object. If ``True`` then return a new blank video :class:`xbmc.Playlist`. Returns: A :class:`xbmc.Playlist` object. ''' self.get_playlist(xbmc.PLAYLIST_VIDEO, new) def add_item(self, queries, infolabels, contextmenu_items='', context_replace=False, img='', fanart='', resolved=False, total_items=0, playlist=False, item_type='video', is_folder=False): ''' Adds an item to the list of entries to be displayed in XBMC or to a playlist. Use this method when you want users to be able to select this item to start playback of a media file. ``queries`` is a dict that will be sent back to the addon when this item is selected:: add_item({'host': 'youtube.com', 'media_id': 'ABC123XYZ'}, {'title': 'A youtube vid'}) will add a link to:: plugin://your.plugin.id/?host=youtube.com&media_id=ABC123XYZ .. seealso:: :meth:`add_music_item`, :meth:`add_video_item`, :meth:`add_directory` Args: queries (dict): A set of keys/values to be sent to the addon when the user selects this item. infolabels (dict): A dictionary of information about this media (see the `XBMC Wiki InfoLabels entry <http://wiki.xbmc.org/?title=InfoLabels>`_). Kwargs: contextmenu_items (list): A list of contextmenu items context_replace (bool): To replace the xbmc default contextmenu items img (str): A URL to an image file to be used as an icon for this entry. fanart (str): A URL to a fanart image for this entry. resolved (str): If not empty, ``queries`` will be ignored and instead the added item will be the exact contentes of ``resolved``. total_items (int): Total number of items to be added in this list. If supplied it enables XBMC to show a progress bar as the list of items is being built. playlist (playlist object): If ``False`` (default), the item will be added to the list of entries to be displayed in this directory. If a playlist object is passed (see :meth:`get_playlist`) then the item will be added to the playlist instead item_type (str): The type of item to add (eg. 'music', 'video' or 'pictures') ''' infolabels = self.unescape_dict(infolabels) if not resolved: if not is_folder: queries['play'] = 'True' play = self.build_plugin_url(queries) else: play = resolved listitem = xbmcgui.ListItem(infolabels['title']) listitem.setInfo(item_type, infolabels) listitem.setProperty('IsPlayable', 'true') listitem.setProperty('fanart_image', fanart) try: listitem.setArt({'thumb': img}) except: listitem.setThumbnailImage(img) self.log_debug('t0mm0-addon.py: setThumbnailImage is deprecated') if contextmenu_items: listitem.addContextMenuItems(contextmenu_items, replaceItems=context_replace) if playlist is not False: self.log_debug('adding item: %s - %s to playlist' % \ (infolabels['title'], play)) playlist.add(play, listitem) else: self.log_debug('adding item: %s - %s' % (infolabels['title'], play)) xbmcplugin.addDirectoryItem(self.handle, play, listitem, isFolder=is_folder, totalItems=total_items) def add_video_item(self, queries, infolabels, contextmenu_items='', context_replace=False, img='', fanart='', resolved=False, total_items=0, playlist=False): ''' Convenience method to add a video item to the directory list or a playlist. See :meth:`add_item` for full infomation ''' self.add_item(queries, infolabels, contextmenu_items, context_replace, img, fanart, resolved, total_items, playlist, item_type='video') def add_music_item(self, queries, infolabels, contextmenu_items='', context_replace=False, img='', fanart='', resolved=False, total_items=0, playlist=False): ''' Convenience method to add a music item to the directory list or a playlist. See :meth:`add_item` for full infomation ''' self.add_item(queries, infolabels, contextmenu_items, img, context_replace, fanart, resolved, total_items, playlist, item_type='music') def add_directory(self, queries, infolabels, contextmenu_items='', context_replace=False, img='', fanart='', total_items=0, is_folder=True): ''' Convenience method to add a directory to the display list or a playlist. See :meth:`add_item` for full infomation ''' self.add_item(queries, infolabels, contextmenu_items, context_replace, img, fanart, total_items=total_items, resolved=self.build_plugin_url(queries), is_folder=is_folder) def end_of_directory(self): '''Tell XBMC that we have finished adding items to this directory.''' xbmcplugin.endOfDirectory(self.handle) def _decode_callback(self, matches): '''Callback method used by :meth:`decode`.''' _id = matches.group(1) try: return unichr(int(_id)) except: return _id def decode(self, data): ''' Regular expression to convert entities such as ``&#044`` to the correct characters. It is called by :meth:`unescape` and so it is not required to call it directly. This method was found `on the web <http://stackoverflow.com/questions/1208916/decoding-html-entities-with-python/1208931#1208931>`_ Args: data (str): String to be cleaned. Returns: Cleaned string. ''' return re.sub("&#(\d+)(;|(?=\s))", self._decode_callback, data).strip() def unescape(self, text): ''' Decodes HTML entities in a string. You can add more entities to the ``rep`` dictionary. Args: text (str): String to be cleaned. Returns: Cleaned string. ''' try: text = self.decode(text) rep = {'&lt;': '<', '&gt;': '>', '&quot': '"', '&rsquo;': '\'', '&acute;': '\'', } for s, r in rep.items(): text = text.replace(s, r) # this has to be last: text = text.replace("&amp;", "&") #we don't want to fiddle with non-string types except TypeError: pass return text def unescape_dict(self, d): ''' Calls :meth:`unescape` on all values in a dictionary. Args: d (dict): A dictionary containing string values Returns: A dictionary with HTML entities removed from the values. ''' out = {} for key, value in d.items(): out[key] = self.unescape(value) return out def save_data(self, filename, data): ''' Saves the data structure using pickle. If the addon data path does not exist it will be automatically created. This save function has the same restrictions as the pickle module. Args: filename (string): name of the file you want to save data to. This file will be saved in your addon's profile directory. data (data object/string): you want to save. Returns: True on success False on failure ''' profile_path = self.get_profile() try: os.makedirs(profile_path) except: pass save_path = os.path.join(profile_path, filename) try: pickle.dump(data, open(save_path, 'wb')) return True except pickle.PickleError: return False def load_data(self,filename): ''' Load the data that was saved with save_data() and returns the data structure. Args: filename (string): Name of the file you want to load data from. This file will be loaded from your addons profile directory. Returns: Data stucture on success False on failure ''' profile_path = self.get_profile() load_path = os.path.join(profile_path, filename) print profile_path if not os.path.isfile(load_path): self.log_debug('%s does not exist' % load_path) return False try: data = pickle.load(open(load_path)) except: return False return data
gpl-2.0
zentralopensource/zentral
zentral/utils/rison.py
1
2938
# from https://github.com/pifantastic/python-rison # encode a json payload in rison # used in kibana urls import re IDCHAR_PUNCTUATION = '_-./~' NOT_IDCHAR = ''.join([c for c in (chr(i) for i in range(127)) if not (c.isalnum() or c in IDCHAR_PUNCTUATION)]) # Additionally, we need to distinguish ids and numbers by first char. NOT_IDSTART = '-0123456789' # Regexp string matching a valid id. IDRX = ('[^' + NOT_IDSTART + NOT_IDCHAR + '][^' + NOT_IDCHAR + ']*') # Regexp to check for valid rison ids. ID_OK_RE = re.compile('^' + IDRX + '$', re.M) class Encoder(object): def __init__(self): pass @staticmethod def encoder(v): if isinstance(v, list): return Encoder.list elif isinstance(v, str): return Encoder.string elif isinstance(v, bool): return Encoder.bool elif isinstance(v, (float, int)): return Encoder.number elif isinstance(v, type(None)): return Encoder.none elif isinstance(v, dict): return Encoder.dict else: raise AssertionError('Unable to encode type: {0}'.format(type(v))) @staticmethod def encode(v): encoder = Encoder.encoder(v) return encoder(v) @staticmethod def list(x): a = ['!('] b = None for i in range(len(x)): v = x[i] f = Encoder.encoder(v) if f: v = f(v) if isinstance(v, str): if b: a.append(',') a.append(v) b = True a.append(')') return ''.join(a) @staticmethod def number(v): return str(v).replace('+', '') @staticmethod def none(_): return '!n' @staticmethod def bool(v): return '!t' if v else '!f' @staticmethod def string(v): if v == '': return "''" if ID_OK_RE.match(v): return v def replace(match): if match.group(0) in ["'", '!']: return '!' + match.group(0) return match.group(0) v = re.sub(r'([\'!])', replace, v) return "'" + v + "'" @staticmethod def dict(x): a = ['('] b = None ks = sorted(x.keys()) for i in ks: v = x[i] f = Encoder.encoder(v) if f: v = f(v) if isinstance(v, str): if b: a.append(',') a.append(Encoder.string(i)) a.append(':') a.append(v) b = True a.append(')') return ''.join(a) def dumps(o): if not isinstance(o, (dict, list)) or o is None: raise TypeError('object must be a dict a list or None') return Encoder.encode(o)
apache-2.0
CUFCTL/DLBD
face-detection-code/object_detection/models/ssd_feature_extractor_test.py
2
4864
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Base test class SSDFeatureExtractors.""" from abc import abstractmethod import itertools import numpy as np import tensorflow as tf from object_detection.utils import test_case class SsdFeatureExtractorTestBase(test_case.TestCase): @abstractmethod def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, use_explicit_padding=False): """Constructs a new feature extractor. Args: depth_multiplier: float depth multiplier for feature extractor pad_to_multiple: the nearest multiple to zero pad the input height and width dimensions to. use_explicit_padding: use 'VALID' padding for convolutions, but prepad inputs so that the output dimensions are the same as if 'SAME' padding were used. Returns: an ssd_meta_arch.SSDFeatureExtractor object. """ pass def check_extract_features_returns_correct_shape( self, batch_size, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shapes, use_explicit_padding=False): def graph_fn(image_tensor): feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple, use_explicit_padding) feature_maps = feature_extractor.extract_features(image_tensor) return feature_maps image_tensor = np.random.rand(batch_size, image_height, image_width, 3).astype(np.float32) feature_maps = self.execute(graph_fn, [image_tensor]) for feature_map, expected_shape in itertools.izip( feature_maps, expected_feature_map_shapes): self.assertAllEqual(feature_map.shape, expected_shape) def check_extract_features_returns_correct_shapes_with_dynamic_inputs( self, batch_size, image_height, image_width, depth_multiplier, pad_to_multiple, expected_feature_map_shapes, use_explicit_padding=False): def graph_fn(image_height, image_width): feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple, use_explicit_padding) image_tensor = tf.random_uniform([batch_size, image_height, image_width, 3], dtype=tf.float32) feature_maps = feature_extractor.extract_features(image_tensor) return feature_maps feature_maps = self.execute_cpu(graph_fn, [ np.array(image_height, dtype=np.int32), np.array(image_width, dtype=np.int32) ]) for feature_map, expected_shape in itertools.izip( feature_maps, expected_feature_map_shapes): self.assertAllEqual(feature_map.shape, expected_shape) def check_extract_features_raises_error_with_invalid_image_size( self, image_height, image_width, depth_multiplier, pad_to_multiple): feature_extractor = self._create_feature_extractor(depth_multiplier, pad_to_multiple) preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3)) feature_maps = feature_extractor.extract_features(preprocessed_inputs) test_preprocessed_image = np.random.rand(4, image_height, image_width, 3) with self.test_session() as sess: sess.run(tf.global_variables_initializer()) with self.assertRaises(tf.errors.InvalidArgumentError): sess.run(feature_maps, feed_dict={preprocessed_inputs: test_preprocessed_image}) def check_feature_extractor_variables_under_scope( self, depth_multiplier, pad_to_multiple, scope_name): g = tf.Graph() with g.as_default(): feature_extractor = self._create_feature_extractor( depth_multiplier, pad_to_multiple) preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3)) feature_extractor.extract_features(preprocessed_inputs) variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) for variable in variables: self.assertTrue(variable.name.startswith(scope_name))
mit
YYWen0o0/python-frame-django
django/contrib/gis/geos/tests/test_io.py
43
3920
from __future__ import unicode_literals import binascii import unittest from unittest import skipUnless from django.utils.six import memoryview from ..import HAS_GEOS if HAS_GEOS: from .. import GEOSGeometry, WKTReader, WKTWriter, WKBReader, WKBWriter @skipUnless(HAS_GEOS, "Geos is required.") class GEOSIOTest(unittest.TestCase): def test01_wktreader(self): # Creating a WKTReader instance wkt_r = WKTReader() wkt = 'POINT (5 23)' # read() should return a GEOSGeometry ref = GEOSGeometry(wkt) g1 = wkt_r.read(wkt.encode()) g2 = wkt_r.read(wkt) for geom in (g1, g2): self.assertEqual(ref, geom) # Should only accept six.string_types objects. self.assertRaises(TypeError, wkt_r.read, 1) self.assertRaises(TypeError, wkt_r.read, memoryview(b'foo')) def test02_wktwriter(self): # Creating a WKTWriter instance, testing its ptr property. wkt_w = WKTWriter() self.assertRaises(TypeError, wkt_w._set_ptr, WKTReader.ptr_type()) ref = GEOSGeometry('POINT (5 23)') ref_wkt = 'POINT (5.0000000000000000 23.0000000000000000)' self.assertEqual(ref_wkt, wkt_w.write(ref).decode()) def test03_wkbreader(self): # Creating a WKBReader instance wkb_r = WKBReader() hex = b'000000000140140000000000004037000000000000' wkb = memoryview(binascii.a2b_hex(hex)) ref = GEOSGeometry(hex) # read() should return a GEOSGeometry on either a hex string or # a WKB buffer. g1 = wkb_r.read(wkb) g2 = wkb_r.read(hex) for geom in (g1, g2): self.assertEqual(ref, geom) bad_input = (1, 5.23, None, False) for bad_wkb in bad_input: self.assertRaises(TypeError, wkb_r.read, bad_wkb) def test04_wkbwriter(self): wkb_w = WKBWriter() # Representations of 'POINT (5 23)' in hex -- one normal and # the other with the byte order changed. g = GEOSGeometry('POINT (5 23)') hex1 = b'010100000000000000000014400000000000003740' wkb1 = memoryview(binascii.a2b_hex(hex1)) hex2 = b'000000000140140000000000004037000000000000' wkb2 = memoryview(binascii.a2b_hex(hex2)) self.assertEqual(hex1, wkb_w.write_hex(g)) self.assertEqual(wkb1, wkb_w.write(g)) # Ensuring bad byteorders are not accepted. for bad_byteorder in (-1, 2, 523, 'foo', None): # Equivalent of `wkb_w.byteorder = bad_byteorder` self.assertRaises(ValueError, wkb_w._set_byteorder, bad_byteorder) # Setting the byteorder to 0 (for Big Endian) wkb_w.byteorder = 0 self.assertEqual(hex2, wkb_w.write_hex(g)) self.assertEqual(wkb2, wkb_w.write(g)) # Back to Little Endian wkb_w.byteorder = 1 # Now, trying out the 3D and SRID flags. g = GEOSGeometry('POINT (5 23 17)') g.srid = 4326 hex3d = b'0101000080000000000000144000000000000037400000000000003140' wkb3d = memoryview(binascii.a2b_hex(hex3d)) hex3d_srid = b'01010000A0E6100000000000000000144000000000000037400000000000003140' wkb3d_srid = memoryview(binascii.a2b_hex(hex3d_srid)) # Ensuring bad output dimensions are not accepted for bad_outdim in (-1, 0, 1, 4, 423, 'foo', None): # Equivalent of `wkb_w.outdim = bad_outdim` self.assertRaises(ValueError, wkb_w._set_outdim, bad_outdim) # Now setting the output dimensions to be 3 wkb_w.outdim = 3 self.assertEqual(hex3d, wkb_w.write_hex(g)) self.assertEqual(wkb3d, wkb_w.write(g)) # Telling the WKBWriter to include the srid in the representation. wkb_w.srid = True self.assertEqual(hex3d_srid, wkb_w.write_hex(g)) self.assertEqual(wkb3d_srid, wkb_w.write(g))
bsd-3-clause
stuntman723/rap-analyzer
rap_analyzer/lib/python2.7/site-packages/django/contrib/sessions/models.py
82
2229
from __future__ import unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ class SessionManager(models.Manager): use_in_migrations = True def encode(self, session_dict): """ Returns the given session dictionary serialized and encoded as a string. """ return SessionStore().encode(session_dict) def save(self, session_key, session_dict, expire_date): s = self.model(session_key, self.encode(session_dict), expire_date) if session_dict: s.save() else: s.delete() # Clear sessions with no data. return s @python_2_unicode_compatible class Session(models.Model): """ Django provides full support for anonymous sessions. The session framework lets you store and retrieve arbitrary data on a per-site-visitor basis. It stores data on the server side and abstracts the sending and receiving of cookies. Cookies contain a session ID -- not the data itself. The Django sessions framework is entirely cookie-based. It does not fall back to putting session IDs in URLs. This is an intentional design decision. Not only does that behavior make URLs ugly, it makes your site vulnerable to session-ID theft via the "Referer" header. For complete documentation on using Sessions in your code, consult the sessions documentation that is shipped with Django (also available on the Django Web site). """ session_key = models.CharField(_('session key'), max_length=40, primary_key=True) session_data = models.TextField(_('session data')) expire_date = models.DateTimeField(_('expire date'), db_index=True) objects = SessionManager() class Meta: db_table = 'django_session' verbose_name = _('session') verbose_name_plural = _('sessions') def __str__(self): return self.session_key def get_decoded(self): return SessionStore().decode(self.session_data) # At bottom to avoid circular import from django.contrib.sessions.backends.db import SessionStore # isort:skip
mit
quodlibetor/dedupe
setup.py
1
1174
from __future__ import with_statement import distribute_setup distribute_setup.use_setuptools() from setuptools import setup import os with open(os.path.join(os.path.dirname(__file__),"README.rst"), 'r') as fh: long_desc = fh.read() VERSION = "0.1.2" setup(name="dedupe", version=VERSION, description="A thing to detect duplicate music", long_description=long_desc, author="Brandon W Maister", author_email="quodlibetor@gmail.com", url="http://bitbucket.org/quodlibetor/dedupe", py_modules=['dedupe', 'distribute_setup', 'setup'], entry_points= {'console_scripts': [ 'dedupe = dedupe:main' ]}, install_requires=['mutagen', 'argparse'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.5", # min "Operating System :: OS Independent", # I think? "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", ] )
gpl-3.0
RITct/Rita
app/secret_sauce/seqtoseq_model.py
1
11243
import random import torch import torch.nn as nn from torch.autograd import Variable from torch import optim import torch.nn.functional as F import pickle as pk use_cuda = torch.cuda.is_available() SOS_token = 0 EOS_token = 1 MAX_LENGTH = 80 class Dataset: def __init__(self, name): self.name = name self.word2index = {} self.word2count = {} self.index2word = {0: "SOS", 1: "EOS"} self.n_words = 2 # Count SOS and EOS def addSentence(self, sentence): for word in sentence.split(' '): self.addWord(word) def addWord(self, word): if word not in self.word2index: self.word2index[word] = self.n_words self.word2count[word] = 1 self.index2word[self.n_words] = word self.n_words += 1 else: self.word2count[word] += 1 def dataclean(training_data): input_data = Dataset('input') output_data = Dataset('ouput') for pair in training_data: input_data.addSentence(pair[0]) output_data.addSentence(pair[1]) return input_data, output_data, input_data.n_words, output_data.n_words class EncoderRNN(nn.Module): def __init__(self, input_size, hidden_size, n_layers=1): super(EncoderRNN, self).__init__() self.n_layers = n_layers self.hidden_size = hidden_size if use_cuda: self.embedding = nn.Embedding(input_size, hidden_size).cuda() self.gru = nn.GRU(hidden_size, hidden_size).cuda() else: self.embedding = nn.Embedding(input_size, hidden_size) self.gru = nn.GRU(hidden_size, hidden_size) def forward(self, input, hidden): embedded = self.embedding(input).view(1, 1, -1) output = embedded for i in range(self.n_layers): output, hidden = self.gru(output, hidden) output = output.cuda() if use_cuda else output hidden = hidden.cuda() if use_cuda else output return output, hidden def initHidden(self): result = Variable(torch.zeros(1, 1, self.hidden_size)) if use_cuda: return result.cuda() return result class AttnDecoderRNN(nn.Module): def __init__(self, hidden_size, output_size, n_layers=1, dropout_p=0.1, max_length=MAX_LENGTH): super(AttnDecoderRNN, self).__init__() self.hidden_size = hidden_size self.output_size = output_size self.n_layers = n_layers self.dropout_p = dropout_p self.max_length = max_length if use_cuda: self.embedding = nn.Embedding(self.output_size, self.hidden_size).cuda() self.attn = nn.Linear(self.hidden_size * 2, self.max_length).cuda() self.attn_combine = nn.Linear(self.hidden_size * 2, self.hidden_size).cuda() self.dropout = nn.Dropout(self.dropout_p).cuda() self.gru = nn.GRU(self.hidden_size, self.hidden_size).cuda() self.out = nn.Linear(self.hidden_size, self.output_size).cuda() else: self.embedding = nn.Embedding(self.output_size, self.hidden_size) self.attn = nn.Linear(self.hidden_size * 2, self.max_length) self.attn_combine = nn.Linear(self.hidden_size * 2, self.hidden_size) self.dropout = nn.Dropout(self.dropout_p) self.gru = nn.GRU(self.hidden_size, self.hidden_size) self.out = nn.Linear(self.hidden_size, self.output_size) def forward(self, input, hidden, encoder_output, encoder_outputs): embedded = self.embedding(input).view(1, 1, -1) embedded = self.dropout(embedded) attn_weights = F.softmax( self.attn(torch.cat((embedded[0], hidden[0]), 1))) attn_weights = attn_weights.cuda() if use_cuda else attn_weights attn_applied = torch.bmm(attn_weights.unsqueeze(0), encoder_outputs.unsqueeze(0)) attn_applied = attn_applied.cuda() if use_cuda else attn_applied output = torch.cat((embedded[0], attn_applied[0]), 1) output = output.cuda() if use_cuda else output output = self.attn_combine(output).unsqueeze(0) for i in range(self.n_layers): output = F.relu(output) output = output.cuda() if use_cuda else output output, hidden = self.gru(output, hidden) output = F.log_softmax(self.out(output[0])) output = output.cuda() if use_cuda else output return output, hidden, attn_weights def initHidden(self): result = Variable(torch.zeros(1, 1, self.hidden_size)) if use_cuda: return result.cuda() return result def indexesFromSentence(lang, sentence): out = [] for word in sentence.split(' '): if word not in lang.word2index: continue k = lang.word2index[word] out.append(k) return out def variableFromSentence(lang, sentence): indexes = indexesFromSentence(lang, sentence) indexes.append(EOS_token) result = Variable(torch.LongTensor(indexes).view(-1, 1)) if use_cuda: return result.cuda() return result def variablesFromPair(pair, input_lang, output_lang): input_variable = variableFromSentence(input_lang, pair[0]) target_variable = variableFromSentence(output_lang, pair[1]) return (input_variable, target_variable) teacher_forcing_ratio = 0.5 def train(input_variable, target_variable, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, max_length=MAX_LENGTH): encoder_hidden = encoder.initHidden() encoder_optimizer.zero_grad() decoder_optimizer.zero_grad() input_length = input_variable.size()[0] target_length = target_variable.size()[0] encoder_outputs = Variable(torch.zeros(max_length, encoder.hidden_size)) encoder_outputs = encoder_outputs.cuda() if use_cuda else encoder_outputs loss = 0 for ei in range(input_length): encoder_output, encoder_hidden = encoder( input_variable[ei], encoder_hidden) encoder_outputs[ei] = encoder_output[0][0] decoder_input = Variable(torch.LongTensor([[SOS_token]])) decoder_input = decoder_input.cuda() if use_cuda else decoder_input decoder_hidden = encoder_hidden use_teacher_forcing = True if random.random() < teacher_forcing_ratio else False if use_teacher_forcing: # Teacher forcing: Feed the target as the next input for di in range(target_length): decoder_output, decoder_hidden, decoder_attention = decoder( decoder_input, decoder_hidden, encoder_output, encoder_outputs) loss += criterion(decoder_output[0], target_variable[di]) decoder_input = target_variable[di] # Teacher forcing else: # Without teacher forcing: use its own predictions as the next input for di in range(target_length): decoder_output, decoder_hidden, decoder_attention = decoder( decoder_input, decoder_hidden, encoder_output, encoder_outputs) topv, topi = decoder_output.data.topk(1) ni = topi[0][0] decoder_input = Variable(torch.LongTensor([[ni]])) loss += criterion(decoder_output[0], target_variable[di]) if ni == EOS_token: break loss.backward() encoder_optimizer.step() decoder_optimizer.step() return loss.data[0] / target_length def seqtoseq_train(n_iters, training_data,print_every=1000, learning_rate=0.01, tfl=False): print_loss_total = 0 hidden_size = 256 in_lang, out_lang, inwords, outwords = dataclean(training_data) metadata = open('app/brain/seqtoseq_meta.pkl', 'wb') pk.dump([in_lang, out_lang], metadata) if tfl == False: encoder = EncoderRNN(inwords, hidden_size) decoder = AttnDecoderRNN(hidden_size, outwords, dropout_p=0.1) else: encoder = torch.load('app/brain/encoder.pt') decoder = torch.load('app/brain/decoder.pt') if use_cuda: encoder = encoder.cuda() decoder = decoder.cuda() encoder_optimizer = optim.SGD(encoder.parameters(), lr=learning_rate) decoder_optimizer = optim.SGD(decoder.parameters(), lr=learning_rate) training_data = [variablesFromPair(random.choice(training_data),in_lang,out_lang) for i in range(n_iters)] criterion = nn.NLLLoss() if use_cuda: criterion = criterion.cuda() for iter in range(1, n_iters + 1): training_pair = training_data[iter - 1] input_variable = training_pair[0] target_variable = training_pair[1] loss = train(input_variable, target_variable, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion) print_loss_total += loss accuracy = 100-(loss*100) if accuracy < 0: accuracy = 0 if iter%1000 == 0: print(accuracy,"%") torch.save(encoder, 'app/brain/encoder.pt') torch.save(decoder, 'app/brain/decoder.pt') def evaluate(encoder, decoder, input_lang, output_lang, sentence, max_length=MAX_LENGTH): input_variable = variableFromSentence(input_lang, sentence) input_length = input_variable.size()[0] encoder_hidden = encoder.initHidden() encoder_outputs = Variable(torch.zeros(max_length, encoder.hidden_size)) encoder_outputs = encoder_outputs.cuda() if use_cuda else encoder_outputs for ei in range(input_length): encoder_output, encoder_hidden = encoder(input_variable[ei], encoder_hidden) encoder_outputs[ei] = encoder_outputs[ei] + encoder_output[0][0] decoder_input = Variable(torch.LongTensor([[SOS_token]])) # SOS decoder_input = decoder_input.cuda() if use_cuda else decoder_input decoder_hidden = encoder_hidden decoded_words = [] decoder_attentions = torch.zeros(max_length, max_length) for di in range(max_length): decoder_output, decoder_hidden, decoder_attention = decoder( decoder_input, decoder_hidden, encoder_output, encoder_outputs) decoder_attentions[di] = decoder_attention.data topv, topi = decoder_output.data.topk(1) ni = topi[0][0] if ni == EOS_token: decoded_words.append('<EOS>') break else: decoded_words.append(output_lang.index2word[ni]) decoder_input = Variable(torch.LongTensor([[ni]])) decoder_input = decoder_input.cuda() if use_cuda else decoder_input return decoded_words, decoder_attentions[:di + 1] def reply_predict(sentence): try: encoder = torch.load('app/brain/encoder.pt') decoder = torch.load('app/brain/decoder.pt') with open('app/brain/seqtoseq_meta.pkl','rb') as pickle_file: meta = pk.load(pickle_file) input_lang = meta[0] output_lang = meta[1] output_words, attentions = evaluate(encoder, decoder, input_lang, output_lang, sentence) output_sentence = ' '.join(output_words) return output_sentence.split("<EOS>")[0] except(KeyError): return random.choice(["sorry i didnt get that","no idea", "i may be malfunctioning", "sorry this is a prototype"])
bsd-3-clause
joergdietrich/astropy
astropy/stats/lombscargle/tests/test_lombscargle.py
2
16929
import numpy as np from numpy.testing import assert_allclose from .... import units from ....tests.helper import pytest, assert_quantity_allclose from .. import LombScargle ALL_METHODS = LombScargle.available_methods ALL_METHODS_NO_AUTO = [method for method in ALL_METHODS if method != 'auto'] FAST_METHODS = [method for method in ALL_METHODS if 'fast' in method] NTERMS_METHODS = [method for method in ALL_METHODS if 'chi2' in method] NORMALIZATIONS = ['standard', 'psd', 'log', 'model'] @pytest.fixture def data(N=100, period=1, theta=[10, 2, 3], dy=1, rseed=0): """Generate some data for testing""" rng = np.random.RandomState(rseed) t = 20 * period * rng.rand(N) omega = 2 * np.pi / period y = theta[0] + theta[1] * np.sin(omega * t) + theta[2] * np.cos(omega * t) dy = dy * (0.5 + rng.rand(N)) y += dy * rng.randn(N) return t, y, dy @pytest.mark.parametrize('minimum_frequency', [None, 1.0]) @pytest.mark.parametrize('maximum_frequency', [None, 5.0]) @pytest.mark.parametrize('nyquist_factor', [1, 10]) @pytest.mark.parametrize('samples_per_peak', [1, 5]) def test_autofrequency(data, minimum_frequency, maximum_frequency, nyquist_factor, samples_per_peak): t, y, dy = data baseline = t.max() - t.min() freq = LombScargle(t, y, dy).autofrequency(samples_per_peak, nyquist_factor, minimum_frequency, maximum_frequency) df = freq[1] - freq[0] # Check sample spacing assert_allclose(df, 1. / baseline / samples_per_peak) # Check minimum frequency if minimum_frequency is None: assert_allclose(freq[0], 0.5 * df) else: assert_allclose(freq[0], minimum_frequency) if maximum_frequency is None: avg_nyquist = 0.5 * len(t) / baseline assert_allclose(freq[-1], avg_nyquist * nyquist_factor, atol=0.5*df) else: assert_allclose(freq[-1], maximum_frequency, atol=0.5*df) @pytest.mark.parametrize('method', ALL_METHODS_NO_AUTO) @pytest.mark.parametrize('center_data', [True, False]) @pytest.mark.parametrize('fit_mean', [True, False]) @pytest.mark.parametrize('with_errors', [True, False]) @pytest.mark.parametrize('with_units', [True, False]) @pytest.mark.parametrize('normalization', NORMALIZATIONS) def test_all_methods(data, method, center_data, fit_mean, with_errors, with_units, normalization): if method == 'scipy' and (fit_mean or with_errors): return t, y, dy = data frequency = 0.8 + 0.01 * np.arange(40) if with_units: t = t * units.day y = y * units.mag dy = dy * units.mag frequency = frequency / t.unit if not with_errors: dy = None kwds = dict(normalization=normalization) ls = LombScargle(t, y, dy, center_data=center_data, fit_mean=fit_mean) P_expected = ls.power(frequency, **kwds) # don't use the fft approximation here; we'll test this elsewhere if method in FAST_METHODS: kwds['method_kwds'] = dict(use_fft=False) P_method = ls.power(frequency, method=method, **kwds) if with_units: if normalization == 'psd' and not with_errors: assert P_method.unit == y.unit ** 2 else: assert P_method.unit == units.dimensionless_unscaled else: assert not hasattr(P_method, 'unit') assert_quantity_allclose(P_expected, P_method) @pytest.mark.parametrize('method', ALL_METHODS_NO_AUTO) @pytest.mark.parametrize('center_data', [True, False]) @pytest.mark.parametrize('fit_mean', [True, False]) @pytest.mark.parametrize('with_errors', [True, False]) @pytest.mark.parametrize('normalization', NORMALIZATIONS) def test_integer_inputs(data, method, center_data, fit_mean, with_errors, normalization): if method == 'scipy' and (fit_mean or with_errors): return t, y, dy = data t = np.floor(100 * t) t_int = t.astype(int) y = np.floor(100 * y) y_int = y.astype(int) dy = np.floor(100 * dy) dy_int = dy.astype('int32') frequency = 1E-2 * (0.8 + 0.01 * np.arange(40)) if not with_errors: dy = None dy_int = None kwds = dict(center_data=center_data, fit_mean=fit_mean) P_float = LombScargle(t, y, dy, **kwds).power(frequency, method=method, normalization=normalization) P_int = LombScargle(t_int, y_int, dy_int, **kwds).power(frequency, method=method, normalization=normalization) assert_allclose(P_float, P_int) @pytest.mark.parametrize('method', NTERMS_METHODS) @pytest.mark.parametrize('center_data', [True, False]) @pytest.mark.parametrize('fit_mean', [True, False]) @pytest.mark.parametrize('with_errors', [True, False]) @pytest.mark.parametrize('nterms', [0, 2, 4]) @pytest.mark.parametrize('normalization', NORMALIZATIONS) def test_nterms_methods(method, center_data, fit_mean, with_errors, nterms, normalization, data): t, y, dy = data frequency = 0.8 + 0.01 * np.arange(40) if not with_errors: dy = None ls = LombScargle(t, y, dy, center_data=center_data, fit_mean=fit_mean, nterms=nterms) kwds = dict(normalization=normalization) if nterms == 0 and not fit_mean: with pytest.raises(ValueError) as err: ls.power(frequency, method=method, **kwds) assert 'nterms' in str(err.value) and 'bias' in str(err.value) else: P_expected = ls.power(frequency, **kwds) # don't use fast fft approximations here if 'fast' in method: kwds['method_kwds'] = dict(use_fft=False) P_method = ls.power(frequency, method=method, **kwds) assert_allclose(P_expected, P_method, rtol=1E-7, atol=1E-25) @pytest.mark.parametrize('method', FAST_METHODS) @pytest.mark.parametrize('center_data', [True, False]) @pytest.mark.parametrize('fit_mean', [True, False]) @pytest.mark.parametrize('with_errors', [True, False]) @pytest.mark.parametrize('nterms', [0, 1, 2]) def test_fast_approximations(method, center_data, fit_mean, with_errors, nterms, data): t, y, dy = data frequency = 0.8 + 0.01 * np.arange(40) if not with_errors: dy = None ls = LombScargle(t, y, dy, center_data=center_data, fit_mean=fit_mean, nterms=nterms) # use only standard normalization because we compare via absolute tolerance kwds = dict(method=method, normalization='standard') if method == 'fast' and nterms != 1: with pytest.raises(ValueError) as err: ls.power(frequency, **kwds) assert 'nterms' in str(err.value) elif nterms == 0 and not fit_mean: with pytest.raises(ValueError) as err: ls.power(frequency, **kwds) assert 'nterms' in str(err.value) and 'bias' in str(err.value) else: P_fast = ls.power(frequency, **kwds) kwds['method_kwds'] = dict(use_fft=False) P_slow = ls.power(frequency, **kwds) assert_allclose(P_fast, P_slow, atol=0.008) @pytest.mark.parametrize('method', LombScargle.available_methods) @pytest.mark.parametrize('shape', [(), (1,), (2,), (3,), (2, 3)]) def test_output_shapes(method, shape, data): t, y, dy = data freq = np.asarray(np.zeros(shape)) freq.flat = np.arange(1, freq.size + 1) PLS = LombScargle(t, y, fit_mean=False).power(freq, method=method) assert PLS.shape == shape @pytest.mark.parametrize('method', LombScargle.available_methods) def test_errors_on_unit_mismatch(method, data): t, y, dy = data t = t * units.second y = y * units.mag frequency = np.linspace(0.5, 1.5, 10) # this should fail because frequency and 1/t units do not match with pytest.raises(ValueError) as err: LombScargle(t, y, fit_mean=False).power(frequency, method=method) assert str(err.value).startswith('Units of frequency not equivalent') # this should fail because dy and y units do not match with pytest.raises(ValueError) as err: LombScargle(t, y, dy, fit_mean=False).power(frequency / t.unit) assert str(err.value).startswith('Units of dy not equivalent') # we don't test all normalizations here because they are tested above # only test method='auto' because unit handling does not depend on method @pytest.mark.parametrize('fit_mean', [True, False]) @pytest.mark.parametrize('center_data', [True, False]) @pytest.mark.parametrize('normalization', ['standard', 'psd']) @pytest.mark.parametrize('with_error', [True, False]) def test_unit_conversions(data, fit_mean, center_data, normalization, with_error): t, y, dy = data t_day = t * units.day t_hour = units.Quantity(t_day, 'hour') y_meter = y * units.meter y_millimeter = units.Quantity(y_meter, 'millimeter') # sanity check on inputs assert_quantity_allclose(t_day, t_hour) assert_quantity_allclose(y_meter, y_millimeter) if with_error: dy = dy * units.meter else: dy = None freq_day, P1 = LombScargle(t_day, y_meter, dy).autopower() freq_hour, P2 = LombScargle(t_hour, y_millimeter, dy).autopower() # Check units of frequency assert freq_day.unit == 1. / units.day assert freq_hour.unit == 1. / units.hour # Check that results match assert_quantity_allclose(freq_day, freq_hour) assert_quantity_allclose(P1, P2) # Check that switching frequency units doesn't change things P3 = LombScargle(t_day, y_meter, dy).power(freq_hour) P4 = LombScargle(t_hour, y_meter, dy).power(freq_day) assert_quantity_allclose(P3, P4) @pytest.mark.parametrize('fit_mean', [True, False]) @pytest.mark.parametrize('with_units', [True, False]) @pytest.mark.parametrize('freq', [1.0, 2.0]) def test_model(fit_mean, with_units, freq): rand = np.random.RandomState(0) t = 10 * rand.rand(40) params = 10 * rand.rand(3) y = np.zeros_like(t) if fit_mean: y += params[0] y += params[1] * np.sin(2 * np.pi * freq * (t - params[2])) if with_units: t = t * units.day y = y * units.mag freq = freq / units.day ls = LombScargle(t, y, center_data=False, fit_mean=fit_mean) y_fit = ls.model(t, freq) assert_quantity_allclose(y_fit, y) @pytest.mark.parametrize('t_unit', [units.second, units.day]) @pytest.mark.parametrize('frequency_unit', [units.Hz, 1. / units.second]) @pytest.mark.parametrize('y_unit', [units.mag, units.jansky]) def test_model_units_match(data, t_unit, frequency_unit, y_unit): t, y, dy = data t_fit = t[:5] frequency = 1.0 t = t * t_unit t_fit = t_fit * t_unit y = y * y_unit dy = dy * y_unit frequency = frequency * frequency_unit ls = LombScargle(t, y, dy) y_fit = ls.model(t_fit, frequency) assert y_fit.unit == y_unit def test_model_units_mismatch(data): t, y, dy = data frequency = 1.0 t_fit = t[:5] t = t * units.second t_fit = t_fit * units.second y = y * units.mag frequency = 1.0 / t.unit # this should fail because frequency and 1/t units do not match with pytest.raises(ValueError) as err: LombScargle(t, y).model(t_fit, frequency=1.0) assert str(err.value).startswith('Units of frequency not equivalent') # this should fail because t and t_fit units do not match with pytest.raises(ValueError) as err: LombScargle(t, y).model([1, 2], frequency) assert str(err.value).startswith('Units of t not equivalent') # this should fail because dy and y units do not match with pytest.raises(ValueError) as err: LombScargle(t, y, dy).model(t_fit, frequency) assert str(err.value).startswith('Units of dy not equivalent') def test_autopower(data): t, y, dy = data ls = LombScargle(t, y, dy) kwargs = dict(samples_per_peak=6, nyquist_factor=2, minimum_frequency=2, maximum_frequency=None) freq1 = ls.autofrequency(**kwargs) power1 = ls.power(freq1) freq2, power2 = ls.autopower(**kwargs) assert_allclose(freq1, freq2) assert_allclose(power1, power2) @pytest.fixture def null_data(N=1000, dy=1, rseed=0): """Generate null hypothesis data""" rng = np.random.RandomState(rseed) t = 100 * rng.rand(N) dy = 0.5 * dy * (1 + rng.rand(N)) y = dy * rng.randn(N) return t, y, dy @pytest.mark.parametrize('normalization', NORMALIZATIONS) def test_distribution(null_data, normalization): t, y, dy = null_data N = len(t) ls = LombScargle(t, y, dy) freq, power = ls.autopower(normalization=normalization, maximum_frequency=40) z = np.linspace(0, power.max(), 1000) # Test that pdf and cdf are consistent dz = z[1] - z[0] z_mid = z[:-1] + 0.5 * dz pdf = _lombscargle_pdf(z_mid, N, normalization=normalization) cdf = _lombscargle_cdf(z, N, normalization=normalization) assert_allclose(pdf, np.diff(cdf) / dz, rtol=1E-5, atol=1E-8) # Test that observed power is distributed according to the theoretical pdf hist, bins = np.histogram(power, 30, normed=True) midpoints = 0.5 * (bins[1:] + bins[:-1]) pdf = _lombscargle_pdf(midpoints, N, normalization=normalization) assert_allclose(hist, pdf, rtol=0.05, atol=0.05 * pdf[0]) # The following are convenience functions used to compute statistics of the # periodogram under various normalizations; they are used in the preceding # test. def _lombscargle_pdf(z, N, normalization, dH=1, dK=3): """Probability density function for Lomb-Scargle periodogram Compute the expected probability density function of the periodogram for the null hypothesis - i.e. data consisting of Gaussian noise. Parameters ---------- z : array-like the periodogram value N : int the number of data points from which the periodogram was computed normalization : string The periodogram normalization. Must be one of ['standard', 'model', 'log', 'psd'] dH, dK : integers (optional) The number of parameters in the null hypothesis and the model Returns ------- pdf : np.ndarray The expected probability density function Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. All expressions used here are adapted from Table 1 of Baluev 2008 [1]_. References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ if dK - dH != 2: raise NotImplementedError("Degrees of freedom != 2") Nk = N - dK if normalization == 'psd': return np.exp(-z) elif normalization == 'standard': return 0.5 * Nk * (1 + z) ** (-0.5 * Nk - 1) elif normalization == 'model': return 0.5 * Nk * (1 - z) ** (0.5 * Nk - 1) elif normalization == 'log': return 0.5 * Nk * np.exp(-0.5 * Nk * z) else: raise ValueError("normalization='{0}' is not recognized" "".format(normalization)) def _lombscargle_cdf(z, N, normalization, dH=1, dK=3): """Cumulative distribution for the Lomb-Scargle periodogram Compute the expected cumulative distribution of the periodogram for the null hypothesis - i.e. data consisting of Gaussian noise. Parameters ---------- z : array-like the periodogram value N : int the number of data points from which the periodogram was computed normalization : string The periodogram normalization. Must be one of ['standard', 'model', 'log', 'psd'] dH, dK : integers (optional) The number of parameters in the null hypothesis and the model Returns ------- cdf : np.ndarray The expected cumulative distribution function Notes ----- For normalization='psd', the distribution can only be computed for periodograms constructed with errors specified. All expressions used here are adapted from Table 1 of Baluev 2008 [1]_. References ---------- .. [1] Baluev, R.V. MNRAS 385, 1279 (2008) """ if dK - dH != 2: raise NotImplementedError("Degrees of freedom != 2") Nk = N - dK if normalization == 'psd': return 1 - np.exp(-z) elif normalization == 'standard': return 1 - (1 + z) ** (-0.5 * Nk) elif normalization == 'model': return 1 - (1 - z) ** (0.5 * Nk) elif normalization == 'log': return 1 - np.exp(-0.5 * Nk * z) else: raise ValueError("normalization='{0}' is not recognized" "".format(normalization))
bsd-3-clause
nonZero/demos-python
src/examples/short/object_oriented/static_method_6.py
1
1045
#!/usr/bin/python2 ''' An example for using class methods to keep per class properties. Once set, subclass properties shadows properties on the base class. ''' from __future__ import print_function class Book(object): num = 0 def __init__(self, title): self.title = title self.id = self.increment_num() print('Created:', self) @classmethod def increment_num(cls): cls.num += 1 return cls.num def __str__(self): return '<{} #{}: {}>'.format(self.__class__.__name__, self.id, self.title) b1 = Book('Guinness Book of Records') b2 = Book('The Bible') print('Book.num:', Book.num) print('b1.num:', b1.num) print() class FictionBook(Book): num = 0 # Removing me voids warranty print('Book.num:', Book.num) print('FictionBook.num:', FictionBook.num) print() b3 = FictionBook('Sherlock Holmes') b4 = FictionBook('Danny Din') b5 = FictionBook('Kofiko') print() print('Book.num:', Book.num) print('FictionBook.num:', FictionBook.num) print() b6 = Book('Britannica')
gpl-3.0
stevekuznetsov/ansible
lib/ansible/utils/display.py
66
11938
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import fcntl import textwrap import os import random import subprocess import sys import time import locale import logging import getpass import errno from struct import unpack, pack from termios import TIOCGWINSZ from ansible import constants as C from ansible.errors import AnsibleError from ansible.utils.color import stringc from ansible.module_utils._text import to_bytes, to_text try: # Python 2 input = raw_input except NameError: # Python 3, we already have raw_input pass logger = None #TODO: make this a logging callback instead if C.DEFAULT_LOG_PATH: path = C.DEFAULT_LOG_PATH if (os.path.exists(path) and os.access(path, os.W_OK)) or os.access(os.path.dirname(path), os.W_OK): logging.basicConfig(filename=path, level=logging.DEBUG, format='%(asctime)s %(name)s %(message)s') mypid = str(os.getpid()) user = getpass.getuser() logger = logging.getLogger("p=%s u=%s | " % (mypid, user)) else: print("[WARNING]: log file at %s is not writeable and we cannot create it, aborting\n" % path, file=sys.stderr) b_COW_PATHS = (b"/usr/bin/cowsay", b"/usr/games/cowsay", b"/usr/local/bin/cowsay", # BSD path for cowsay b"/opt/local/bin/cowsay", # MacPorts path for cowsay ) class Display: def __init__(self, verbosity=0): self.columns = None self.verbosity = verbosity # list of all deprecation messages to prevent duplicate display self._deprecations = {} self._warns = {} self._errors = {} self.b_cowsay = None self.noncow = C.ANSIBLE_COW_SELECTION self.set_cowsay_info() if self.b_cowsay: try: cmd = subprocess.Popen([self.b_cowsay, "-l"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = cmd.communicate() self.cows_available = set([ to_text(c) for c in out.split() ]) if C.ANSIBLE_COW_WHITELIST: self.cows_available = set(C.ANSIBLE_COW_WHITELIST).intersection(self.cows_available) except: # could not execute cowsay for some reason self.b_cowsay = False self._set_column_width() def set_cowsay_info(self): if not C.ANSIBLE_NOCOWS: for b_cow_path in b_COW_PATHS: if os.path.exists(b_cow_path): self.b_cowsay = b_cow_path def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False): """ Display a message to the user Note: msg *must* be a unicode string to prevent UnicodeError tracebacks. """ nocolor = msg if color: msg = stringc(msg, color) if not log_only: if not msg.endswith(u'\n'): msg2 = msg + u'\n' else: msg2 = msg msg2 = to_bytes(msg2, encoding=self._output_encoding(stderr=stderr)) if sys.version_info >= (3,): # Convert back to text string on python3 # We first convert to a byte string so that we get rid of # characters that are invalid in the user's locale msg2 = to_text(msg2, self._output_encoding(stderr=stderr), errors='replace') if not stderr: fileobj = sys.stdout else: fileobj = sys.stderr fileobj.write(msg2) try: fileobj.flush() except IOError as e: # Ignore EPIPE in case fileobj has been prematurely closed, eg. # when piping to "head -n1" if e.errno != errno.EPIPE: raise if logger and not screen_only: msg2 = nocolor.lstrip(u'\n') msg2 = to_bytes(msg2) if sys.version_info >= (3,): # Convert back to text string on python3 # We first convert to a byte string so that we get rid of # characters that are invalid in the user's locale msg2 = to_text(msg2, self._output_encoding(stderr=stderr)) if color == C.COLOR_ERROR: logger.error(msg2) else: logger.info(msg2) def v(self, msg, host=None): return self.verbose(msg, host=host, caplevel=0) def vv(self, msg, host=None): return self.verbose(msg, host=host, caplevel=1) def vvv(self, msg, host=None): return self.verbose(msg, host=host, caplevel=2) def vvvv(self, msg, host=None): return self.verbose(msg, host=host, caplevel=3) def vvvvv(self, msg, host=None): return self.verbose(msg, host=host, caplevel=4) def vvvvvv(self, msg, host=None): return self.verbose(msg, host=host, caplevel=5) def debug(self, msg): if C.DEFAULT_DEBUG: self.display("%6d %0.5f: %s" % (os.getpid(), time.time(), msg), color=C.COLOR_DEBUG) def verbose(self, msg, host=None, caplevel=2): if self.verbosity > caplevel: if host is None: self.display(msg, color=C.COLOR_VERBOSE) else: self.display("<%s> %s" % (host, msg), color=C.COLOR_VERBOSE, screen_only=True) def deprecated(self, msg, version=None, removed=False): ''' used to print out a deprecation message.''' if not removed and not C.DEPRECATION_WARNINGS: return if not removed: if version: new_msg = "[DEPRECATION WARNING]: %s.\nThis feature will be removed in version %s." % (msg, version) else: new_msg = "[DEPRECATION WARNING]: %s.\nThis feature will be removed in a future release." % (msg) new_msg = new_msg + " Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.\n\n" else: raise AnsibleError("[DEPRECATED]: %s.\nPlease update your playbooks." % msg) wrapped = textwrap.wrap(new_msg, self.columns, replace_whitespace=False, drop_whitespace=False) new_msg = "\n".join(wrapped) + "\n" if new_msg not in self._deprecations: self.display(new_msg.strip(), color=C.COLOR_DEPRECATE, stderr=True) self._deprecations[new_msg] = 1 def warning(self, msg, formatted=False): if not formatted: new_msg = "\n[WARNING]: %s" % msg wrapped = textwrap.wrap(new_msg, self.columns) new_msg = "\n".join(wrapped) + "\n" else: new_msg = "\n[WARNING]: \n%s" % msg if new_msg not in self._warns: self.display(new_msg, color=C.COLOR_WARN, stderr=True) self._warns[new_msg] = 1 def system_warning(self, msg): if C.SYSTEM_WARNINGS: self.warning(msg) def banner(self, msg, color=None, cows=True): ''' Prints a header-looking line with cowsay or stars wit hlength depending on terminal width (3 minimum) ''' if self.b_cowsay and cows: try: self.banner_cowsay(msg) return except OSError: self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.") msg = msg.strip() star_len = self.columns - len(msg) if star_len <= 3: star_len = 3 stars = u"*" * star_len self.display(u"\n%s %s" % (msg, stars), color=color) def banner_cowsay(self, msg, color=None): if u": [" in msg: msg = msg.replace(u"[", u"") if msg.endswith(u"]"): msg = msg[:-1] runcmd = [self.b_cowsay, b"-W", b"60"] if self.noncow: thecow = self.noncow if thecow == 'random': thecow = random.choice(list(self.cows_available)) runcmd.append(b'-f') runcmd.append(to_bytes(thecow)) runcmd.append(to_bytes(msg)) cmd = subprocess.Popen(runcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = cmd.communicate() self.display(u"%s\n" % to_text(out), color=color) def error(self, msg, wrap_text=True): if wrap_text: new_msg = u"\n[ERROR]: %s" % msg wrapped = textwrap.wrap(new_msg, self.columns) new_msg = u"\n".join(wrapped) + u"\n" else: new_msg = u"ERROR! %s" % msg if new_msg not in self._errors: self.display(new_msg, color=C.COLOR_ERROR, stderr=True) self._errors[new_msg] = 1 @staticmethod def prompt(msg, private=False): prompt_string = to_bytes(msg, encoding=Display._output_encoding()) if sys.version_info >= (3,): # Convert back into text on python3. We do this double conversion # to get rid of characters that are illegal in the user's locale prompt_string = to_text(prompt_string) if private: return getpass.getpass(msg) else: return input(prompt_string) def do_var_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None): result = None if sys.__stdin__.isatty(): do_prompt = self.prompt if prompt and default is not None: msg = "%s [%s]: " % (prompt, default) elif prompt: msg = "%s: " % prompt else: msg = 'input for %s: ' % varname if confirm: while True: result = do_prompt(msg, private) second = do_prompt("confirm " + msg, private) if result == second: break self.display("***** VALUES ENTERED DO NOT MATCH ****") else: result = do_prompt(msg, private) else: result = None self.warning("Not prompting as we are not in interactive mode") # if result is false and default is not None if not result and default is not None: result = default if encrypt: # Circular import because encrypt needs a display class from ansible.utils.encrypt import do_encrypt result = do_encrypt(result, encrypt, salt_size, salt) # handle utf-8 chars result = to_text(result, errors='surrogate_or_strict') return result @staticmethod def _output_encoding(stderr=False): encoding = locale.getpreferredencoding() # https://bugs.python.org/issue6202 # Python2 hardcodes an obsolete value on Mac. Use MacOSX defaults # instead. if encoding in ('mac-roman',): encoding = 'utf-8' return encoding def _set_column_width(self): if os.isatty(0): tty_size = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[1] else: tty_size = 0 self.columns = max(79, tty_size - 1)
gpl-3.0
hubert-he/hyde
hydeengine/processor.py
47
6246
import sys import logging import fnmatch from media_processors import TemplateProcessor def load_processor(name): (module_name, _ , processor) = name.rpartition(".") __import__(module_name) module = sys.modules[module_name] return getattr(module, processor) class Processor(object): def __init__(self, settings): self.settings = settings self.processor_cache = {} self._logger = None @property def logger(self): if self._logger: return self._logger if hasattr(self.settings, "logger"): return self.settings.logger loglevel = logging.INFO if hasattr(self.settings, "LOG_LEVEL"): loglevel = self.settings.LOG_LEVEL logger = logging.getLogger("hyde_processor") logger.setLevel(loglevel) ch = logging.StreamHandler() ch.setLevel(loglevel) formatter = logging.Formatter("%(levelname)s:%(message)s[%(asctime)s]") ch.setFormatter(formatter) logger.addHandler(ch) self._logger = logger return logger def get_node_processors(self, node): if node.fragment in self.processor_cache: return self.processor_cache[node.fragment] processors = {} if node.type == "media": processors = self.settings.MEDIA_PROCESSORS elif node.type == "content": processors = self.settings.CONTENT_PROCESSORS else: return [] return self.extract_processors(node, processors, self.processor_cache) def extract_processors(self, node, processors, cache): current_processors = [] this_node = node while this_node: fragment = this_node.fragment self.logger.debug("Getting processors for: %s" % fragment) if fragment in processors: current_processors.append(processors[fragment]) this_node = this_node.parent # Add the default processors to the list if "*" in processors: current_processors.append(processors["*"]) cache[node.fragment] = current_processors current_processors.reverse() return current_processors def remove(self, item): if hasattr(item, "resources"): self.logger.info("Removing Node %s" % item.url) item.target_folder.delete() item.temp_folder.delete() else: self.logger.info("Removing Resource %s" % item.url) item.target_file.delete() item.temp_file.delete() def process(self, resource): if (resource.node.type not in ("content", "media") or resource.is_layout): self.logger.debug("Skipping resource: %s" % str(resource.file)) return False self.logger.info("Processing %s" % resource.url) processor_config = self.get_node_processors(resource.node) processors = [] for processer_map in processor_config: if resource.file.extension in processer_map: processors.extend(processer_map[resource.file.extension]) else: self.logger.debug("Extension %s" % resource.file.extension) # # Wildcard matching: # This should be the default matcher going forward # The above branch needs to be kept around until everyone # has had the chance to upgrade their settings file. # for wildcard, processor_list in processer_map.iteritems(): self.logger.debug(wildcard) if fnmatch.fnmatch(resource.file.name, wildcard): processors.extend(processor_list) resource.temp_file.parent.make() resource.source_file.copy_to(resource.temp_file) (original_source, resource.source_file) = ( resource.source_file, resource.temp_file) for processor_name in processors: processor = load_processor(processor_name) self.logger.debug(" Executing %s" % processor_name) processor.process(resource) if resource.node.type == "content" and not resource.prerendered: self.settings.CONTEXT['page'] = resource self.logger.debug(" Rendering Page") TemplateProcessor.process(resource) self.settings.CONTEXT['page'] = None resource.source_file = original_source self.logger.debug(" Processing Complete") return True def pre_process(self, node): self.logger.info("Pre processing %s" % str(node.folder)) self.__around_process__(node, self.settings.SITE_PRE_PROCESSORS) def post_process(self, node): self.logger.info("Post processing %s" % str(node.folder)) self.__around_process__(node, self.settings.SITE_POST_PROCESSORS) def __around_process__(self, node, processors): for child in node.walk(): if not child.type in ("content", "media"): continue fragment = child.temp_folder.get_fragment(node.site.temp_folder) if sys.platform == 'win32': fragment = fragment.rstrip("\\") else: fragment = fragment.rstrip("/") if not fragment: if sys.platform == 'win32': fragment = "\\" else: fragment = "/" if fragment in processors: processor_config = processors[fragment] for processor_name, params in processor_config.iteritems(): self.logger.debug(" Executing %s" % processor_name) processor = load_processor(processor_name) if not params: params = {} params.update( {'node': child}) processor.process(child.temp_folder, params)
mit
YtvwlD/yarfi
etc/yarfi/services/console_setup.py
1
1300
# YARFI - Yet Another Replacement For Init # Copyright (C) 2014 Niklas Sombert # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from subprocess import Popen from yarfi.ServicesAndTargets import Service as Srv class Service(Srv): def __init__(self): self.description = "set the console font and keyboard layout" self.depends = ["system", "udev"] self.conflicts = [] self.respawn = True self.status_ = "" self.process = None def start(self): self.process = Popen(["/bin/setupcon"]) #use --force? (and --save?) def status(self): if self.status_ == "stopped": return ("stopped") if self.process: if self.process.poll() is not None: self.status_ = "running" return ("running")
gpl-3.0
suyashphadtare/test
erpnext/stock/doctype/item/test_item.py
5
1803
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import unittest import frappe from frappe.test_runner import make_test_records test_ignore = ["BOM"] test_dependencies = ["Warehouse"] class TestItem(unittest.TestCase): def test_default_warehouse(self): from erpnext.stock.doctype.item.item import WarehouseNotSet item = frappe.copy_doc(test_records[0]) item.is_stock_item = "Yes" item.default_warehouse = None self.assertRaises(WarehouseNotSet, item.insert) def test_get_item_details(self): from erpnext.stock.get_item_details import get_item_details to_check = { "item_code": "_Test Item", "item_name": "_Test Item", "description": "_Test Item", "warehouse": "_Test Warehouse - _TC", "income_account": "Sales - _TC", "expense_account": "_Test Account Cost for Goods Sold - _TC", "cost_center": "_Test Cost Center 2 - _TC", "qty": 1.0, "price_list_rate": 100.0, "base_price_list_rate": 0.0, "discount_percentage": 0.0, "rate": 0.0, "base_rate": 0.0, "amount": 0.0, "base_amount": 0.0, "batch_no": None, "item_tax_rate": '{}', "uom": "_Test UOM", "conversion_factor": 1.0, } make_test_records("Item Price") details = get_item_details({ "item_code": "_Test Item", "company": "_Test Company", "price_list": "_Test Price List", "currency": "_Test Currency", "parenttype": "Sales Order", "conversion_rate": 1, "price_list_currency": "_Test Currency", "plc_conversion_rate": 1, "order_type": "Sales", "transaction_type": "selling" }) for key, value in to_check.iteritems(): self.assertEquals(value, details.get(key)) test_records = frappe.get_test_records('Item')
agpl-3.0
TEAM-Gummy/platform_external_chromium_org
chrome/common/extensions/docs/server2/github_file_system.py
23
7628
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import logging from StringIO import StringIO import appengine_blobstore as blobstore from appengine_url_fetcher import AppEngineUrlFetcher from appengine_wrappers import urlfetch from docs_server_utils import StringIdentity from file_system import FileSystem, StatInfo from future import Future import url_constants from zipfile import ZipFile, BadZipfile ZIP_KEY = 'zipball' USERNAME = None PASSWORD = None def _MakeBlobstoreKey(version): return ZIP_KEY + '.' + str(version) class _AsyncFetchFutureZip(object): def __init__(self, fetcher, username, password, blobstore, key_to_set, key_to_delete=None): self._fetcher = fetcher self._fetch = fetcher.FetchAsync(ZIP_KEY, username=username, password=password) self._blobstore = blobstore self._key_to_set = key_to_set self._key_to_delete = key_to_delete def Get(self): try: result = self._fetch.Get() # Check if Github authentication failed. if result.status_code == 401: logging.error('Github authentication failed for %s, falling back to ' 'unauthenticated.' % USERNAME) blob = self._fetcher.Fetch(ZIP_KEY).content else: blob = result.content except urlfetch.DownloadError as e: logging.error('Bad github zip file: %s' % e) return None if self._key_to_delete is not None: self._blobstore.Delete(_MakeBlobstoreKey(self._key_to_delete), blobstore.BLOBSTORE_GITHUB) try: return_zip = ZipFile(StringIO(blob)) except BadZipfile as e: logging.error('Bad github zip file: %s' % e) return None self._blobstore.Set(_MakeBlobstoreKey(self._key_to_set), blob, blobstore.BLOBSTORE_GITHUB) return return_zip class GithubFileSystem(FileSystem): @staticmethod def CreateChromeAppsSamples(object_store_creator): return GithubFileSystem( '%s/GoogleChrome/chrome-app-samples' % url_constants.GITHUB_REPOS, blobstore.AppEngineBlobstore(), object_store_creator) def __init__(self, url, blobstore, object_store_creator): # If we key the password store on the app version then the whole advantage # of having it in the first place is greatly lessened (likewise it should # always start populated). password_store = object_store_creator.Create( GithubFileSystem, app_version=None, category='password', start_empty=False) if USERNAME is None: password_data = password_store.GetMulti(('username', 'password')).Get() self._username, self._password = (password_data.get('username'), password_data.get('password')) else: password_store.SetMulti({'username': USERNAME, 'password': PASSWORD}) self._username, self._password = (USERNAME, PASSWORD) self._url = url self._fetcher = AppEngineUrlFetcher(url) self._blobstore = blobstore self._stat_object_store = object_store_creator.Create(GithubFileSystem) self._version = None self._GetZip(self.Stat(ZIP_KEY).version) def _GetZip(self, version): try: blob = self._blobstore.Get(_MakeBlobstoreKey(version), blobstore.BLOBSTORE_GITHUB) except: self._zip_file = Future(value=None) return if blob is not None: try: self._zip_file = Future(value=ZipFile(StringIO(blob))) except BadZipfile as e: self._blobstore.Delete(_MakeBlobstoreKey(version), blobstore.BLOBSTORE_GITHUB) logging.error('Bad github zip file: %s' % e) self._zip_file = Future(value=None) else: self._zip_file = Future( delegate=_AsyncFetchFutureZip(self._fetcher, self._username, self._password, self._blobstore, version, key_to_delete=self._version)) self._version = version def _ReadFile(self, path): try: zip_file = self._zip_file.Get() except Exception as e: logging.error('Github ReadFile error: %s' % e) return '' if zip_file is None: logging.error('Bad github zip file.') return '' prefix = zip_file.namelist()[0][:-1] return zip_file.read(prefix + path) def _ListDir(self, path): try: zip_file = self._zip_file.Get() except Exception as e: logging.error('Github ListDir error: %s' % e) return [] if zip_file is None: logging.error('Bad github zip file.') return [] filenames = zip_file.namelist() # Take out parent directory name (GoogleChrome-chrome-app-samples-c78a30f) filenames = [f[len(filenames[0]) - 1:] for f in filenames] # Remove the path of the directory we're listing from the filenames. filenames = [f[len(path):] for f in filenames if f != path and f.startswith(path)] # Remove all files not directly in this directory. return [f for f in filenames if f[:-1].count('/') == 0] def Read(self, paths): version = self.Stat(ZIP_KEY).version if version != self._version: self._GetZip(version) result = {} for path in paths: if path.endswith('/'): result[path] = self._ListDir(path) else: result[path] = self._ReadFile(path) return Future(value=result) def _DefaultStat(self, path): version = 0 # TODO(kalman): we should replace all of this by wrapping the # GithubFileSystem in a CachingFileSystem. A lot of work has been put into # CFS to be robust, and GFS is missing out. # For example: the following line is wrong, but it could be moot. self._stat_object_store.Set(path, version) return StatInfo(version) def Stat(self, path): version = self._stat_object_store.Get(path).Get() if version is not None: return StatInfo(version) try: result = self._fetcher.Fetch('commits/HEAD', username=USERNAME, password=PASSWORD) except urlfetch.DownloadError as e: logging.warning('GithubFileSystem Stat: %s' % e) return self._DefaultStat(path) # Check if Github authentication failed. if result.status_code == 401: logging.warning('Github authentication failed for %s, falling back to ' 'unauthenticated.' % USERNAME) try: result = self._fetcher.Fetch('commits/HEAD') except urlfetch.DownloadError as e: logging.warning('GithubFileSystem Stat: %s' % e) return self._DefaultStat(path) # Parse response JSON - but sometimes github gives us invalid JSON. try: version = json.loads(result.content)['sha'] self._stat_object_store.Set(path, version) return StatInfo(version) except StandardError as e: logging.warning( ('%s: got invalid or unexpected JSON from github. Response status ' + 'was %s, content %s') % (e, result.status_code, result.content)) return self._DefaultStat(path) def GetIdentity(self): return '%s@%s' % (self.__class__.__name__, StringIdentity(self._url))
bsd-3-clause
kenshay/ImageScript
ProgramData/SystemFiles/Python/Lib/site-packages/sphinx/websupport/storage/differ.py
7
2603
# -*- coding: utf-8 -*- """ sphinx.websupport.storage.differ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A differ for creating an HTML representations of proposal diffs :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from difflib import Differ from sphinx.util.pycompat import htmlescape class CombinedHtmlDiff(object): """Create an HTML representation of the differences between two pieces of text. """ highlight_regex = re.compile(r'([\+\-\^]+)') def __init__(self, source, proposal): proposal = htmlescape(proposal) differ = Differ() self.diff = list(differ.compare(source.splitlines(1), proposal.splitlines(1))) def make_text(self): return '\n'.join(self.diff) def make_html(self): """Return the HTML representation of the differences between `source` and `proposal`. :param source: the original text :param proposal: the proposed text """ html = [] diff = self.diff[:] line = diff.pop(0) next = diff.pop(0) while True: html.append(self._handle_line(line, next)) line = next try: next = diff.pop(0) except IndexError: html.append(self._handle_line(line)) break return ''.join(html).rstrip() def _handle_line(self, line, next=None): """Handle an individual line in a diff.""" prefix = line[0] text = line[2:] if prefix == ' ': return text elif prefix == '?': return '' if next is not None and next[0] == '?': tag = prefix == '+' and 'ins' or 'del' text = self._highlight_text(text, next, tag) css_class = prefix == '+' and 'prop-added' or 'prop-removed' return '<span class="%s">%s</span>\n' % (css_class, text.rstrip()) def _highlight_text(self, text, next, tag): """Highlight the specific changes made to a line by adding <ins> and <del> tags. """ next = next[2:] new_text = [] start = 0 for match in self.highlight_regex.finditer(next): new_text.append(text[start:match.start()]) new_text.append('<%s>' % tag) new_text.append(text[match.start():match.end()]) new_text.append('</%s>' % tag) start = match.end() new_text.append(text[start:]) return ''.join(new_text)
gpl-3.0
tersmitten/ansible
lib/ansible/modules/network/f5/bigip_gtm_topology_record.py
25
32848
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2018, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: bigip_gtm_topology_record short_description: Manages GTM Topology Records description: - Manages GTM Topology Records. Once created, only topology record C(weight) can be modified. version_added: 2.8 options: source: description: - Specifies the origination of an incoming DNS request. suboptions: negate: description: - When set to c(yes) the system selects this topology record, when the request source does not match. type: bool default: no subnet: description: - An IP address and network mask in the CIDR format. type: str region: description: - Specifies the name of region already defined in the configuration. type: str continent: description: - Specifies one of the seven continents, along with the C(Unknown) setting. - Specifying C(Unknown) forces the system to use a default resolution if the system cannot determine the location of the local DNS making the request. - Full continent names and their abbreviated versions are supported. type: str country: description: - Specifies a country. - In addition to the country full names, you may also specify their abbreviated form, such as C(US) instead of C(United States). - Valid country codes can be found here https://countrycode.org/. type: str state: description: - Specifies a state in a given country. - This parameter requires country option to be provided. type: str isp: description: - Specifies an Internet service provider. type: str choices: - AOL - BeijingCNC - CNC - ChinaEducationNetwork - ChinaMobilNetwork - ChinaRailwayTelcom - ChinaTelecom - ChinaUnicom - Comcast - Earthlink - ShanghaiCNC - ShanghaiTelecom geo_isp: description: - Specifies a geolocation ISP type: str type: dict required: True destination: description: - Specifies where the system directs the incoming DNS request. suboptions: negate: description: - When set to c(yes) the system selects this topology record, when the request destination does not match. type: bool default: no subnet: description: - An IP address and network mask in the CIDR format. type: str region: description: - Specifies the name of region already defined in the configuration. type: str continent: description: - Specifies one of the seven continents, along with the C(Unknown) setting. - Specifying C(Unknown) forces the system to use a default resolution if the system cannot determine the location of the local DNS making the request. - Full continent names and their abbreviated versions are supported. type: str country: description: - Specifies a country. - Full continent names and their abbreviated versions are supported. type: str state: description: - Specifies a state in a given country. - This parameter requires country option to be provided. type: str pool: description: - Specifies the name of GTM pool already defined in the configuration. type: str datacenter: description: - Specifies the name of GTM data center already defined in the configuration. type: str isp: description: - Specifies an Internet service provider. type: str choices: - AOL - BeijingCNC - CNC - ChinaEducationNetwork - ChinaMobilNetwork - ChinaRailwayTelcom - ChinaTelecom - ChinaUnicom - Comcast - Earthlink - ShanghaiCNC - ShanghaiTelecom geo_isp: description: - Specifies a geolocation ISP type: str type: dict required: True weight: description: - Specifies the weight of the topology record. - The system finds the weight of the first topology record that matches the server object (pool or pool member) and the local DNS. The system then assigns that weight as the topology score for that server object. - The system load balances to the server object with the highest topology score. - If the system finds no topology record that matches both the server object and the local DNS, then the system assigns that server object a zero score. - If the option is not specified when the record is created the system will set it at a default value of C(1) - Valid range is (0 - 4294967295) type: int partition: description: - Device partition to manage resources on. - Partition parameter is taken into account when used in conjunction with C(pool), C(data_center), and C(region) parameters, it is ignored otherwise. type: str default: Common state: description: - When C(state) is C(present), ensures that the record exists. - When C(state) is C(absent), ensures that the record is removed. type: str choices: - present - absent default: present extends_documentation_fragment: f5 author: - Wojciech Wypior (@wojtek0806) ''' EXAMPLES = r''' - name: Create an IP Subnet and an ISP based topology record bigip_gtm_topology_record: source: - subnet: 192.168.1.0/24 destination: - isp: AOL weight: 10 provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Create a region and a pool based topology record bigip_gtm_topology_record: source: - region: Foo destination: - pool: FooPool partition: FooBar provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost - name: Create a negative region and a negative data center based topology record bigip_gtm_topology_record: source: - region: Baz - negate: yes destination: - datacenter: Baz-DC - negate: yes provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost ''' RETURN = r''' weight: description: The weight of the topology record. returned: changed type: int sample: 20 ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import env_fallback from ansible.module_utils.six import iteritems try: from library.module_utils.network.f5.bigip import F5RestClient from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import AnsibleF5Parameters from library.module_utils.network.f5.common import fq_name from library.module_utils.network.f5.common import f5_argument_spec from library.module_utils.network.f5.common import transform_name from library.module_utils.network.f5.common import flatten_boolean from library.module_utils.network.f5.ipaddress import is_valid_ip_network except ImportError: from ansible.module_utils.network.f5.bigip import F5RestClient from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import AnsibleF5Parameters from ansible.module_utils.network.f5.common import fq_name from ansible.module_utils.network.f5.common import f5_argument_spec from ansible.module_utils.network.f5.common import transform_name from ansible.module_utils.network.f5.common import flatten_boolean from ansible.module_utils.network.f5.ipaddress import is_valid_ip_network class Parameters(AnsibleF5Parameters): api_map = { 'score': 'weight', } api_attributes = [ 'score', ] returnables = [ 'weight', 'name' ] updatables = [ 'weight', ] class ApiParameters(Parameters): pass class ModuleParameters(Parameters): countries = { 'Afghanistan': 'AF', 'Aland Islands': 'AX', 'Albania': 'AL', 'Algeria': 'DZ', 'American Samoa': 'AS', 'Andorra': 'AD', 'Angola': 'AO', 'Anguilla': 'AI', 'Antarctica': 'AQ', 'Antigua and Barbuda': 'AG', 'Argentina': 'AR', 'Armenia': 'AM', 'Aruba': 'AW', 'Australia': 'AU', 'Austria': 'AT', 'Azerbaijan': 'AZ', 'Bahamas': 'BS', 'Bahrain': 'BH', 'Bangladesh': 'BD', 'Barbados': 'BB', 'Belarus': 'BY', 'Belgium': 'BE', 'Belize': 'BZ', 'Benin': 'BJ', 'Bermuda': 'BM', 'Bhutan': 'BT', 'Bolivia': 'BO', 'Bonaire, Sint Eustatius and Saba': 'BQ', 'Bosnia and Herzegovina': 'BA', 'Botswana': 'BW', 'Bouvet Island': 'BV', 'Brazil': 'BR', 'British Indian Ocean Territory': 'IO', 'Brunei Darussalam': 'BN', 'Bulgaria': 'BG', 'Burkina Faso': 'BF', 'Burundi': 'BI', 'Cape Verde': 'CV', 'Cambodia': 'KH', 'Cameroon': 'CM', 'Canada': 'CA', 'Cayman Islands': 'KY', 'Central African Republic': 'CF', 'Chad': 'TD', 'Chile': 'CL', 'China': 'CN', 'Christmas Island': 'CX', 'Cocos (Keeling) Islands': 'CC', 'Colombia': 'CO', 'Comoros': 'KM', 'Congo': 'CG', 'Congo, The Democratic Republic of the': 'CD', 'Cook Islands': 'CK', 'Costa Rica': 'CR', "Cote D'Ivoire": 'CI', 'Croatia': 'HR', 'Cuba': 'CU', 'Curaçao': 'CW', 'Cyprus': 'CY', 'Czech Republic': 'CZ', 'Denmark': 'DK', 'Djibouti': 'DJ', 'Dominica': 'DM', 'Dominican Republic': 'DO', 'Ecuador': 'EC', 'Egypt': 'EG', 'El Salvador': 'SV', 'Equatorial Guinea': 'GQ', 'Eritrea': 'ER', 'Estonia': 'EE', 'Ethiopia': 'ET', 'Falkland Islands (Malvinas)': 'FK', 'Faroe Islands': 'FO', 'Fiji': 'FJ', 'Finland': 'FI', 'France': 'FR', 'French Guiana': 'GF', 'French Polynesia': 'PF', 'French Southern Territories': 'TF', 'Gabon': 'GA', 'Gambia': 'GM', 'Georgia': 'GE', 'Germany': 'DE', 'Ghana': 'GH', 'Gibraltar': 'GI', 'Greece': 'GR', 'Greenland': 'GL', 'Grenada': 'GD', 'Guadeloupe': 'GP', 'Guam': 'GU', 'Guatemala': 'GT', 'Guernsey': 'GG', 'Guinea': 'GN', 'Guinea-Bissau': 'GW', 'Guyana': 'GY', 'Haiti': 'HT', 'Heard Island and McDonald Islands': 'HM', 'Holy See (Vatican City State)': 'VA', 'Honduras': 'HN', 'Hong Kong': 'HK', 'Hungary': 'HU', 'Iceland': 'IS', 'India': 'IN', 'Indonesia': 'ID', 'Iran, Islamic Republic of': 'IR', 'Iraq': 'IQ', 'Ireland': 'IE', 'Isle of Man': 'IM', 'Israel': 'IL', 'Italy': 'IT', 'Jamaica': 'JM', 'Japan': 'JP', 'Jersey': 'JE', 'Jordan': 'JO', 'Kazakhstan': 'KZ', 'Kenya': 'KE', 'Kiribati': 'KI', "Korea, Democratic People's Republic of": 'KP', 'Korea, Republic of': 'KR', 'Kuwait': 'KW', 'Kyrgyzstan': 'KG', "Lao People's Democratic Republic": 'LA', 'Latvia': 'LV', 'Lebanon': 'LB', 'Lesotho': 'LS', 'Liberia': 'LR', 'Libyan Arab Jamahiriya': 'LY', 'Liechtenstein': 'LI', 'Lithuania': 'LT', 'Luxembourg': 'LU', 'Macau': 'MO', 'Macedonia': 'MK', 'Madagascar': 'MG', 'Malawi': 'MW', 'Malaysia': 'MY', 'Maldives': 'MV', 'Mali': 'ML', 'Malta': 'MT', 'Marshall Islands': 'MH', 'Martinique': 'MQ', 'Mauritania': 'MR', 'Mauritius': 'MU', 'Mayotte': 'YT', 'Mexico': 'MX', 'Micronesia, Federated States of': 'FM', 'Moldova, Republic of': 'MD', 'Monaco': 'MC', 'Mongolia': 'MN', 'Montenegro': 'ME', 'Montserrat': 'MS', 'Morocco': 'MA', 'Mozambique': 'MZ', 'Myanmar': 'MM', 'Namibia': 'NA', 'Nauru': 'NR', 'Nepal': 'NP', 'Netherlands': 'NL', 'New Caledonia': 'NC', 'New Zealand': 'NZ', 'Nicaragua': 'NI', 'Niger': 'NE', 'Nigeria': 'NG', 'Niue': 'NU', 'Norfolk Island': 'NF', 'Northern Mariana Islands': 'MP', 'Norway': 'NO', 'Oman': 'OM', 'Pakistan': 'PK', 'Palau': 'PW', 'Palestinian Territory': 'PS', 'Panama': 'PA', 'Papua New Guinea': 'PG', 'Paraguay': 'PY', 'Peru': 'PE', 'Philippines': 'PH', 'Pitcairn Islands': 'PN', 'Poland': 'PL', 'Portugal': 'PT', 'Puerto Rico': 'PR', 'Qatar': 'QA', 'Reunion': 'RE', 'Romania': 'RO', 'Russian Federation': 'RU', 'Rwanda': 'RW', 'Saint Barthelemy': 'BL', 'Saint Helena': 'SH', 'Saint Kitts and Nevis': 'KN', 'Saint Lucia': 'LC', 'Saint Martin': 'MF', 'Saint Pierre and Miquelon': 'PM', 'Saint Vincent and the Grenadines': 'VC', 'Samoa': 'WS', 'San Marino': 'SM', 'Sao Tome and Principe': 'ST', 'Saudi Arabia': 'SA', 'Senegal': 'SN', 'Serbia': 'RS', 'Seychelles': 'SC', 'Sierra Leone': 'SL', 'Singapore': 'SG', 'Sint Maarten (Dutch part)': 'SX', 'Slovakia': 'SK', 'Slovenia': 'SI', 'Solomon Islands': 'SB', 'Somalia': 'SO', 'South Africa': 'ZA', 'South Georgia and the South Sandwich Islands': 'GS', 'South Sudan': 'SS', 'Spain': 'ES', 'Sri Lanka': 'LK', 'Sudan': 'SD', 'Suriname': 'SR', 'Svalbard and Jan Mayen': 'SJ', 'Swaziland': 'SZ', 'Sweden': 'SE', 'Switzerland': 'CH', 'Syrian Arab Republic': 'SY', 'Taiwan': 'TW', 'Tajikistan': 'TJ', 'Tanzania, United Republic of': 'TZ', 'Thailand': 'TH', 'Timor-Leste': 'TL', 'Togo': 'TG', 'Tokelau': 'TK', 'Tonga': 'TO', 'Trinidad and Tobago': 'TT', 'Tunisia': 'TN', 'Turkey': 'TR', 'Turkmenistan': 'TM', 'Turks and Caicos Islands': 'TC', 'Tuvalu': 'TV', 'Uganda': 'UG', 'Ukraine': 'UA', 'United Arab Emirates': 'AE', 'United Kingdom': 'GB', 'United States': 'US', 'United States Minor Outlying Islands': 'UM', 'Uruguay': 'UY', 'Uzbekistan': 'UZ', 'Vanuatu': 'VU', 'Venezuela': 'VE', 'Vietnam': 'VN', 'Virgin Islands, British': 'VG', 'Virgin Islands, U.S.': 'VI', 'Wallis and Futuna': 'WF', 'Western Sahara': 'EH', 'Yemen': 'YE', 'Zambia': 'ZM', 'Zimbabwe': 'ZW', 'Unrecognized': 'N/A', 'Asia/Pacific Region': 'AP', 'Europe': 'EU', 'Netherlands Antilles': 'AN', 'France, Metropolitan': 'FX', 'Anonymous Proxy': 'A1', 'Satellite Provider': 'A2', 'Other': 'O1', } continents = { 'Antarctica': 'AN', 'Asia': 'AS', 'Africa': 'AF', 'Europe': 'EU', 'North America': 'NA', 'South America': 'SA', 'Oceania': 'OC', 'Unknown': '--', } @property def src_negate(self): src_negate = self._values['source'].get('negate', None) result = flatten_boolean(src_negate) if result == 'yes': return 'not' return None @property def src_subnet(self): src_subnet = self._values['source'].get('subnet', None) if src_subnet is None: return None if is_valid_ip_network(src_subnet): return src_subnet raise F5ModuleError( "Specified 'subnet' is not a valid subnet." ) @property def src_region(self): src_region = self._values['source'].get('region', None) if src_region is None: return None return fq_name(self.partition, src_region) @property def src_continent(self): src_continent = self._values['source'].get('continent', None) if src_continent is None: return None result = self.continents.get(src_continent, src_continent) return result @property def src_country(self): src_country = self._values['source'].get('country', None) if src_country is None: return None result = self.countries.get(src_country, src_country) return result @property def src_state(self): src_country = self._values['source'].get('country', None) src_state = self._values['source'].get('state', None) if src_state is None: return None if src_country is None: raise F5ModuleError( 'Country needs to be provided when specifying state' ) result = '{0}/{1}'.format(src_country, src_state) return result @property def src_isp(self): src_isp = self._values['source'].get('isp', None) if src_isp is None: return None return fq_name('Common', src_isp) @property def src_geo_isp(self): src_geo_isp = self._values['source'].get('geo_isp', None) return src_geo_isp @property def dst_negate(self): dst_negate = self._values['destination'].get('negate', None) result = flatten_boolean(dst_negate) if result == 'yes': return 'not' return None @property def dst_subnet(self): dst_subnet = self._values['destination'].get('subnet', None) if dst_subnet is None: return None if is_valid_ip_network(dst_subnet): return dst_subnet raise F5ModuleError( "Specified 'subnet' is not a valid subnet." ) @property def dst_region(self): dst_region = self._values['destination'].get('region', None) if dst_region is None: return None return fq_name(self.partition, dst_region) @property def dst_continent(self): dst_continent = self._values['destination'].get('continent', None) if dst_continent is None: return None result = self.continents.get(dst_continent, dst_continent) return result @property def dst_country(self): dst_country = self._values['destination'].get('country', None) if dst_country is None: return None result = self.countries.get(dst_country, dst_country) return result @property def dst_state(self): dst_country = self.dst_country dst_state = self._values['destination'].get('state', None) if dst_state is None: return None if dst_country is None: raise F5ModuleError( 'Country needs to be provided when specifying state' ) result = '{0}/{1}'.format(dst_country, dst_state) return result @property def dst_isp(self): dst_isp = self._values['destination'].get('isp', None) if dst_isp is None: return None return fq_name('Common', dst_isp) @property def dst_geo_isp(self): dst_geo_isp = self._values['destination'].get('geo_isp', None) return dst_geo_isp @property def dst_pool(self): dst_pool = self._values['destination'].get('pool', None) if dst_pool is None: return None return fq_name(self.partition, dst_pool) @property def dst_datacenter(self): dst_datacenter = self._values['destination'].get('datacenter', None) if dst_datacenter is None: return None return fq_name(self.partition, dst_datacenter) @property def source(self): options = { 'negate': self.src_negate, 'subnet': self.src_subnet, 'region': self.src_region, 'continent': self.src_continent, 'country': self.src_country, 'state': self.src_state, 'isp': self.src_isp, 'geoip-isp': self.src_geo_isp, } result = 'ldns: {0}'.format(self._format_options(options)) return result @property def destination(self): options = { 'negate': self.dst_negate, 'subnet': self.dst_subnet, 'region': self.dst_region, 'continent': self.dst_continent, 'country': self.dst_country, 'state': self.dst_state, 'datacenter': self.dst_datacenter, 'pool': self.dst_pool, 'isp': self.dst_isp, 'geoip-isp': self.dst_geo_isp, } result = 'server: {0}'.format(self._format_options(options)) return result @property def name(self): result = '{0} {1}'.format(self.source, self.destination) return result def _format_options(self, options): negate = None cleaned = dict((k, v) for k, v in iteritems(options) if v is not None) if 'country' and 'state' in cleaned.keys(): del cleaned['country'] if 'negate' in cleaned.keys(): negate = cleaned['negate'] del cleaned['negate'] name, value = cleaned.popitem() if negate: result = '{0} {1} {2}'.format(negate, name, value) return result result = '{0} {1}'.format(name, value) return result @property def weight(self): weight = self._values['weight'] if weight is None: return None if 0 <= weight <= 4294967295: return weight raise F5ModuleError( "Valid weight must be in range 0 - 4294967295" ) class Changes(Parameters): def to_return(self): result = {} try: for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) except Exception: pass return result class UsableChanges(Changes): pass class ReportableChanges(Changes): pass class Difference(object): def __init__(self, want, have=None): self.want = want self.have = have def compare(self, param): try: result = getattr(self, param) return result except AttributeError: return self.__default(param) def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1 class ModuleManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges() def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = UsableChanges(params=changed) def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False def should_update(self): result = self._update_changed_options() if result: return True return False def exec_module(self): changed = False result = dict() state = self.want.state if state == "present": changed = self.present() elif state == "absent": changed = self.absent() reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) return result def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.client.module.deprecate( msg=warning['msg'], version=warning['version'] ) def present(self): if self.exists(): return self.update() else: return self.create() def absent(self): if self.exists(): return self.remove() return False def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def remove(self): if self.module.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the resource.") return True def create(self): self._set_changed_options() if self.module.check_mode: return True self.create_on_device() return True def exists(self): name = self.want.name uri = "https://{0}:{1}/mgmt/tm/gtm/topology/{2}".format( self.client.provider['server'], self.client.provider['server_port'], name.replace(' ', '%20').replace('/', '~') ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError: return False if resp.status == 404 or 'code' in response and response['code'] == 404: return False return True def create_on_device(self): params = self.changes.api_params() params['name'] = self.want.name uri = "https://{0}:{1}/mgmt/tm/gtm/topology/".format( self.client.provider['server'], self.client.provider['server_port'], ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return True def update_on_device(self): params = self.changes.api_params() name = self.want.name uri = "https://{0}:{1}/mgmt/tm/gtm/topology/{2}".format( self.client.provider['server'], self.client.provider['server_port'], name.replace(' ', '%20').replace('/', '~') ) resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def remove_from_device(self): name = self.want.name uri = "https://{0}:{1}/mgmt/tm/gtm/topology/{2}".format( self.client.provider['server'], self.client.provider['server_port'], name.replace(' ', '%20').replace('/', '~') ) response = self.client.api.delete(uri) if response.status == 200: return True raise F5ModuleError(response.content) def read_current_from_device(self): name = self.want.name uri = "https://{0}:{1}/mgmt/tm/gtm/topology/{2}".format( self.client.provider['server'], self.client.provider['server_port'], name.replace(' ', '%20').replace('/', '~') ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return ApiParameters(params=response) class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True self.choices = [ 'AOL', 'BeijingCNC', 'CNC', 'ChinaEducationNetwork', 'ChinaMobilNetwork', 'ChinaRailwayTelcom', 'ChinaTelecom', 'ChinaUnicom', 'Comcast', 'Earthlink', 'ShanghaiCNC', 'ShanghaiTelecom', ] argument_spec = dict( source=dict( required=True, type='dict', options=dict( subnet=dict(), region=dict(), continent=dict(), country=dict(), state=dict(), isp=dict( choices=self.choices ), geo_isp=dict(), negate=dict( type='bool', default='no' ), ), mutually_exclusive=[ ['subnet', 'region', 'continent', 'country', 'isp', 'geo_isp'] ] ), destination=dict( required=True, type='dict', options=dict( subnet=dict(), region=dict(), continent=dict(), country=dict(), state=dict(), pool=dict(), datacenter=dict(), isp=dict( choices=self.choices ), geo_isp=dict(), negate=dict( type='bool', default='no' ), ), mutually_exclusive=[ ['subnet', 'region', 'continent', 'country', 'pool', 'datacenter', 'isp', 'geo_isp'] ] ), weight=dict(type='int'), partition=dict( default='Common', fallback=(env_fallback, ['F5_PARTITION']) ), state=dict( default='present', choices=['present', 'absent'] ) ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec) def main(): spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, ) try: mm = ModuleManager(module=module) results = mm.exec_module() module.exit_json(**results) except F5ModuleError as ex: module.fail_json(msg=str(ex)) if __name__ == '__main__': main()
gpl-3.0
jeffery-do/Vizdoombot
doom/lib/python3.5/site-packages/networkx/algorithms/shortest_paths/weighted.py
10
32872
# -*- coding: utf-8 -*- """ Shortest path algorithms for weighed graphs. """ __author__ = """\n""".join(['Aric Hagberg <hagberg@lanl.gov>', 'Loïc Séguin-C. <loicseguin@gmail.com>', 'Dan Schult <dschult@colgate.edu>']) # Copyright (C) 2004-2015 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. __all__ = ['dijkstra_path', 'dijkstra_path_length', 'bidirectional_dijkstra', 'single_source_dijkstra', 'single_source_dijkstra_path', 'single_source_dijkstra_path_length', 'all_pairs_dijkstra_path', 'all_pairs_dijkstra_path_length', 'dijkstra_predecessor_and_distance', 'bellman_ford', 'negative_edge_cycle', 'goldberg_radzik', 'johnson'] from collections import deque from heapq import heappush, heappop from itertools import count import networkx as nx from networkx.utils import generate_unique_node def dijkstra_path(G, source, target, weight='weight'): """Returns the shortest path from source to target in a weighted graph G. Parameters ---------- G : NetworkX graph source : node Starting node target : node Ending node weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- path : list List of nodes in a shortest path. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.dijkstra_path(G,0,4)) [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- bidirectional_dijkstra() """ (length, path) = single_source_dijkstra(G, source, target=target, weight=weight) try: return path[target] except KeyError: raise nx.NetworkXNoPath( "node %s not reachable from %s" % (source, target)) def dijkstra_path_length(G, source, target, weight='weight'): """Returns the shortest path length from source to target in a weighted graph. Parameters ---------- G : NetworkX graph source : node label starting node for path target : node label ending node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- length : number Shortest path length. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.dijkstra_path_length(G,0,4)) 4 Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- bidirectional_dijkstra() """ length = single_source_dijkstra_path_length(G, source, weight=weight) try: return length[target] except KeyError: raise nx.NetworkXNoPath( "node %s not reachable from %s" % (source, target)) def single_source_dijkstra_path(G, source, cutoff=None, weight='weight'): """Compute shortest path between source and all other reachable nodes for a weighted graph. Parameters ---------- G : NetworkX graph source : node Starting node for path. weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- paths : dictionary Dictionary of shortest path lengths keyed by target. Examples -------- >>> G=nx.path_graph(5) >>> path=nx.single_source_dijkstra_path(G,0) >>> path[4] [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- single_source_dijkstra() """ (length, path) = single_source_dijkstra( G, source, cutoff=cutoff, weight=weight) return path def single_source_dijkstra_path_length(G, source, cutoff=None, weight='weight'): """Compute the shortest path length between source and all other reachable nodes for a weighted graph. Parameters ---------- G : NetworkX graph source : node label Starting node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight. cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- length : dictionary Dictionary of shortest lengths keyed by target. Examples -------- >>> G=nx.path_graph(5) >>> length=nx.single_source_dijkstra_path_length(G,0) >>> length[4] 4 >>> print(length) {0: 0, 1: 1, 2: 2, 3: 3, 4: 4} Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- single_source_dijkstra() """ if G.is_multigraph(): get_weight = lambda u, v, data: min( eattr.get(weight, 1) for eattr in data.values()) else: get_weight = lambda u, v, data: data.get(weight, 1) return _dijkstra(G, source, get_weight, cutoff=cutoff) def single_source_dijkstra(G, source, target=None, cutoff=None, weight='weight'): """Compute shortest paths and lengths in a weighted graph G. Uses Dijkstra's algorithm for shortest paths. Parameters ---------- G : NetworkX graph source : node label Starting node for path target : node label, optional Ending node for path cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- distance,path : dictionaries Returns a tuple of two dictionaries keyed by node. The first dictionary stores distance from the source. The second stores the path from the source to that node. Examples -------- >>> G=nx.path_graph(5) >>> length,path=nx.single_source_dijkstra(G,0) >>> print(length[4]) 4 >>> print(length) {0: 0, 1: 1, 2: 2, 3: 3, 4: 4} >>> path[4] [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. Based on the Python cookbook recipe (119466) at http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466 This algorithm is not guaranteed to work if edge weights are negative or are floating point numbers (overflows and roundoff errors can cause problems). See Also -------- single_source_dijkstra_path() single_source_dijkstra_path_length() """ if source == target: return ({source: 0}, {source: [source]}) if G.is_multigraph(): get_weight = lambda u, v, data: min( eattr.get(weight, 1) for eattr in data.values()) else: get_weight = lambda u, v, data: data.get(weight, 1) paths = {source: [source]} # dictionary of paths return _dijkstra(G, source, get_weight, paths=paths, cutoff=cutoff, target=target) def _dijkstra(G, source, get_weight, pred=None, paths=None, cutoff=None, target=None): """Implementation of Dijkstra's algorithm Parameters ---------- G : NetworkX graph source : node label Starting node for path get_weight: function Function for getting edge weight pred: list, optional(default=None) List of predecessors of a node paths: dict, optional (default=None) Path from the source to a target node. target : node label, optional Ending node for path cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- distance,path : dictionaries Returns a tuple of two dictionaries keyed by node. The first dictionary stores distance from the source. The second stores the path from the source to that node. pred,distance : dictionaries Returns two dictionaries representing a list of predecessors of a node and the distance to each node. distance : dictionary Dictionary of shortest lengths keyed by target. """ G_succ = G.succ if G.is_directed() else G.adj push = heappush pop = heappop dist = {} # dictionary of final distances seen = {source: 0} c = count() fringe = [] # use heapq with (distance,label) tuples push(fringe, (0, next(c), source)) while fringe: (d, _, v) = pop(fringe) if v in dist: continue # already searched this node. dist[v] = d if v == target: break for u, e in G_succ[v].items(): cost = get_weight(v, u, e) if cost is None: continue vu_dist = dist[v] + get_weight(v, u, e) if cutoff is not None: if vu_dist > cutoff: continue if u in dist: if vu_dist < dist[u]: raise ValueError('Contradictory paths found:', 'negative weights?') elif u not in seen or vu_dist < seen[u]: seen[u] = vu_dist push(fringe, (vu_dist, next(c), u)) if paths is not None: paths[u] = paths[v] + [u] if pred is not None: pred[u] = [v] elif vu_dist == seen[u]: if pred is not None: pred[u].append(v) if paths is not None: return (dist, paths) if pred is not None: return (pred, dist) return dist def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight='weight'): """Compute shortest path length and predecessors on shortest paths in weighted graphs. Parameters ---------- G : NetworkX graph source : node label Starting node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- pred,distance : dictionaries Returns two dictionaries representing a list of predecessors of a node and the distance to each node. Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. The list of predecessors contains more than one element only when there are more than one shortest paths to the key node. """ if G.is_multigraph(): get_weight = lambda u, v, data: min( eattr.get(weight, 1) for eattr in data.values()) else: get_weight = lambda u, v, data: data.get(weight, 1) pred = {source: []} # dictionary of predecessors return _dijkstra(G, source, get_weight, pred=pred, cutoff=cutoff) def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): """ Compute shortest path lengths between all nodes in a weighted graph. Parameters ---------- G : NetworkX graph weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- distance : dictionary Dictionary, keyed by source and target, of shortest path lengths. Examples -------- >>> G=nx.path_graph(5) >>> length=nx.all_pairs_dijkstra_path_length(G) >>> print(length[1][4]) 3 >>> length[1] {0: 1, 1: 0, 2: 1, 3: 2, 4: 3} Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. The dictionary returned only has keys for reachable node pairs. """ length = single_source_dijkstra_path_length # TODO This can be trivially parallelized. return {n: length(G, n, cutoff=cutoff, weight=weight) for n in G} def all_pairs_dijkstra_path(G, cutoff=None, weight='weight'): """ Compute shortest paths between all nodes in a weighted graph. Parameters ---------- G : NetworkX graph weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- distance : dictionary Dictionary, keyed by source and target, of shortest paths. Examples -------- >>> G=nx.path_graph(5) >>> path=nx.all_pairs_dijkstra_path(G) >>> print(path[0][4]) [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- floyd_warshall() """ path = single_source_dijkstra_path # TODO This can be trivially parallelized. return {n: path(G, n, cutoff=cutoff, weight=weight) for n in G} def bellman_ford(G, source, weight='weight'): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. The algorithm has a running time of O(mn) where n is the number of nodes and m is the number of edges. It is slower than Dijkstra but can handle negative edge weights. Parameters ---------- G : NetworkX graph The algorithm works for all types of graphs, including directed graphs and multigraphs. source: node label Starting node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- pred, dist : dictionaries Returns two dictionaries keyed by node to predecessor in the path and to the distance from the source respectively. Raises ------ NetworkXUnbounded If the (di)graph contains a negative cost (di)cycle, the algorithm raises an exception to indicate the presence of the negative cost (di)cycle. Note: any negative weight edge in an undirected graph is a negative cost cycle. Examples -------- >>> import networkx as nx >>> G = nx.path_graph(5, create_using = nx.DiGraph()) >>> pred, dist = nx.bellman_ford(G, 0) >>> sorted(pred.items()) [(0, None), (1, 0), (2, 1), (3, 2), (4, 3)] >>> sorted(dist.items()) [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] >>> from nose.tools import assert_raises >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) >>> G[1][2]['weight'] = -7 >>> assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, 0) Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. The dictionaries returned only have keys for nodes reachable from the source. In the case where the (di)graph is not connected, if a component not containing the source contains a negative cost (di)cycle, it will not be detected. """ if source not in G: raise KeyError("Node %s is not found in the graph" % source) for u, v, attr in G.selfloop_edges(data=True): if attr.get(weight, 1) < 0: raise nx.NetworkXUnbounded("Negative cost cycle detected.") dist = {source: 0} pred = {source: None} if len(G) == 1: return pred, dist return _bellman_ford_relaxation(G, pred, dist, [source], weight) def _bellman_ford_relaxation(G, pred, dist, source, weight): """Relaxation loop for Bellman–Ford algorithm Parameters ---------- G : NetworkX graph pred: dict Keyed by node to predecessor in the path dist: dict Keyed by node to the distance from the source source: list List of source nodes weight: string Edge data key corresponding to the edge weight Returns ------- Returns two dictionaries keyed by node to predecessor in the path and to the distance from the source respectively. Raises ------ NetworkXUnbounded If the (di)graph contains a negative cost (di)cycle, the algorithm raises an exception to indicate the presence of the negative cost (di)cycle. Note: any negative weight edge in an undirected graph is a negative cost cycle """ if G.is_multigraph(): def get_weight(edge_dict): return min(eattr.get(weight, 1) for eattr in edge_dict.values()) else: def get_weight(edge_dict): return edge_dict.get(weight, 1) G_succ = G.succ if G.is_directed() else G.adj inf = float('inf') n = len(G) count = {} q = deque(source) in_q = set(source) while q: u = q.popleft() in_q.remove(u) # Skip relaxations if the predecessor of u is in the queue. if pred[u] not in in_q: dist_u = dist[u] for v, e in G_succ[u].items(): dist_v = dist_u + get_weight(e) if dist_v < dist.get(v, inf): if v not in in_q: q.append(v) in_q.add(v) count_v = count.get(v, 0) + 1 if count_v == n: raise nx.NetworkXUnbounded( "Negative cost cycle detected.") count[v] = count_v dist[v] = dist_v pred[v] = u return pred, dist def goldberg_radzik(G, source, weight='weight'): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. The algorithm has a running time of O(mn) where n is the number of nodes and m is the number of edges. It is slower than Dijkstra but can handle negative edge weights. Parameters ---------- G : NetworkX graph The algorithm works for all types of graphs, including directed graphs and multigraphs. source: node label Starting node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- pred, dist : dictionaries Returns two dictionaries keyed by node to predecessor in the path and to the distance from the source respectively. Raises ------ NetworkXUnbounded If the (di)graph contains a negative cost (di)cycle, the algorithm raises an exception to indicate the presence of the negative cost (di)cycle. Note: any negative weight edge in an undirected graph is a negative cost cycle. Examples -------- >>> import networkx as nx >>> G = nx.path_graph(5, create_using = nx.DiGraph()) >>> pred, dist = nx.goldberg_radzik(G, 0) >>> sorted(pred.items()) [(0, None), (1, 0), (2, 1), (3, 2), (4, 3)] >>> sorted(dist.items()) [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] >>> from nose.tools import assert_raises >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) >>> G[1][2]['weight'] = -7 >>> assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 0) Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. The dictionaries returned only have keys for nodes reachable from the source. In the case where the (di)graph is not connected, if a component not containing the source contains a negative cost (di)cycle, it will not be detected. """ if source not in G: raise KeyError("Node %s is not found in the graph" % source) for u, v, attr in G.selfloop_edges(data=True): if attr.get(weight, 1) < 0: raise nx.NetworkXUnbounded("Negative cost cycle detected.") if len(G) == 1: return {source: None}, {source: 0} if G.is_multigraph(): def get_weight(edge_dict): return min(attr.get(weight, 1) for attr in edge_dict.values()) else: def get_weight(edge_dict): return edge_dict.get(weight, 1) if G.is_directed(): G_succ = G.succ else: G_succ = G.adj inf = float('inf') d = dict((u, inf) for u in G) d[source] = 0 pred = {source: None} def topo_sort(relabeled): """Topologically sort nodes relabeled in the previous round and detect negative cycles. """ # List of nodes to scan in this round. Denoted by A in Goldberg and # Radzik's paper. to_scan = [] # In the DFS in the loop below, neg_count records for each node the # number of edges of negative reduced costs on the path from a DFS root # to the node in the DFS forest. The reduced cost of an edge (u, v) is # defined as d[u] + weight[u][v] - d[v]. # # neg_count also doubles as the DFS visit marker array. neg_count = {} for u in relabeled: # Skip visited nodes. if u in neg_count: continue d_u = d[u] # Skip nodes without out-edges of negative reduced costs. if all(d_u + get_weight(e) >= d[v] for v, e in G_succ[u].items()): continue # Nonrecursive DFS that inserts nodes reachable from u via edges of # nonpositive reduced costs into to_scan in (reverse) topological # order. stack = [(u, iter(G_succ[u].items()))] in_stack = set([u]) neg_count[u] = 0 while stack: u, it = stack[-1] try: v, e = next(it) except StopIteration: to_scan.append(u) stack.pop() in_stack.remove(u) continue t = d[u] + get_weight(e) d_v = d[v] if t <= d_v: is_neg = t < d_v d[v] = t pred[v] = u if v not in neg_count: neg_count[v] = neg_count[u] + int(is_neg) stack.append((v, iter(G_succ[v].items()))) in_stack.add(v) elif (v in in_stack and neg_count[u] + int(is_neg) > neg_count[v]): # (u, v) is a back edge, and the cycle formed by the # path v to u and (u, v) contains at least one edge of # negative reduced cost. The cycle must be of negative # cost. raise nx.NetworkXUnbounded( 'Negative cost cycle detected.') to_scan.reverse() return to_scan def relax(to_scan): """Relax out-edges of relabeled nodes. """ relabeled = set() # Scan nodes in to_scan in topological order and relax incident # out-edges. Add the relabled nodes to labeled. for u in to_scan: d_u = d[u] for v, e in G_succ[u].items(): w_e = get_weight(e) if d_u + w_e < d[v]: d[v] = d_u + w_e pred[v] = u relabeled.add(v) return relabeled # Set of nodes relabled in the last round of scan operations. Denoted by B # in Goldberg and Radzik's paper. relabeled = set([source]) while relabeled: to_scan = topo_sort(relabeled) relabeled = relax(to_scan) d = dict((u, d[u]) for u in pred) return pred, d def negative_edge_cycle(G, weight='weight'): """Return True if there exists a negative edge cycle anywhere in G. Parameters ---------- G : NetworkX graph weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- negative_cycle : bool True if a negative edge cycle exists, otherwise False. Examples -------- >>> import networkx as nx >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) >>> print(nx.negative_edge_cycle(G)) False >>> G[1][2]['weight'] = -7 >>> print(nx.negative_edge_cycle(G)) True Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. This algorithm uses bellman_ford() but finds negative cycles on any component by first adding a new node connected to every node, and starting bellman_ford on that node. It then removes that extra node. """ newnode = generate_unique_node() G.add_edges_from([(newnode, n) for n in G]) try: bellman_ford(G, newnode, weight) except nx.NetworkXUnbounded: return True finally: G.remove_node(newnode) return False def bidirectional_dijkstra(G, source, target, weight='weight'): """Dijkstra's algorithm for shortest paths using bidirectional search. Parameters ---------- G : NetworkX graph source : node Starting node. target : node Ending node. weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- length : number Shortest path length. Returns a tuple of two dictionaries keyed by node. The first dictionary stores distance from the source. The second stores the path from the source to that node. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> length,path=nx.bidirectional_dijkstra(G,0,4) >>> print(length) 4 >>> print(path) [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. In practice bidirectional Dijkstra is much more than twice as fast as ordinary Dijkstra. Ordinary Dijkstra expands nodes in a sphere-like manner from the source. The radius of this sphere will eventually be the length of the shortest path. Bidirectional Dijkstra will expand nodes from both the source and the target, making two spheres of half this radius. Volume of the first sphere is pi*r*r while the others are 2*pi*r/2*r/2, making up half the volume. This algorithm is not guaranteed to work if edge weights are negative or are floating point numbers (overflows and roundoff errors can cause problems). See Also -------- shortest_path shortest_path_length """ if source == target: return (0, [source]) push = heappush pop = heappop # Init: Forward Backward dists = [{}, {}] # dictionary of final distances paths = [{source: [source]}, {target: [target]}] # dictionary of paths fringe = [[], []] # heap of (distance, node) tuples for # extracting next node to expand seen = [{source: 0}, {target: 0}] # dictionary of distances to # nodes seen c = count() # initialize fringe heap push(fringe[0], (0, next(c), source)) push(fringe[1], (0, next(c), target)) # neighs for extracting correct neighbor information if G.is_directed(): neighs = [G.successors_iter, G.predecessors_iter] else: neighs = [G.neighbors_iter, G.neighbors_iter] # variables to hold shortest discovered path #finaldist = 1e30000 finalpath = [] dir = 1 while fringe[0] and fringe[1]: # choose direction # dir == 0 is forward direction and dir == 1 is back dir = 1 - dir # extract closest to expand (dist, _, v) = pop(fringe[dir]) if v in dists[dir]: # Shortest path to v has already been found continue # update distance dists[dir][v] = dist # equal to seen[dir][v] if v in dists[1 - dir]: # if we have scanned v in both directions we are done # we have now discovered the shortest path return (finaldist, finalpath) for w in neighs[dir](v): if(dir == 0): # forward if G.is_multigraph(): minweight = min((dd.get(weight, 1) for k, dd in G[v][w].items())) else: minweight = G[v][w].get(weight, 1) vwLength = dists[dir][v] + minweight # G[v][w].get(weight,1) else: # back, must remember to change v,w->w,v if G.is_multigraph(): minweight = min((dd.get(weight, 1) for k, dd in G[w][v].items())) else: minweight = G[w][v].get(weight, 1) vwLength = dists[dir][v] + minweight # G[w][v].get(weight,1) if w in dists[dir]: if vwLength < dists[dir][w]: raise ValueError( "Contradictory paths found: negative weights?") elif w not in seen[dir] or vwLength < seen[dir][w]: # relaxing seen[dir][w] = vwLength push(fringe[dir], (vwLength, next(c), w)) paths[dir][w] = paths[dir][v] + [w] if w in seen[0] and w in seen[1]: # see if this path is better than than the already # discovered shortest path totaldist = seen[0][w] + seen[1][w] if finalpath == [] or finaldist > totaldist: finaldist = totaldist revpath = paths[1][w][:] revpath.reverse() finalpath = paths[0][w] + revpath[1:] raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) def johnson(G, weight='weight'): """Compute shortest paths between all nodes in a weighted graph using Johnson's algorithm. Parameters ---------- G : NetworkX graph weight: string, optional (default='weight') Edge data key corresponding to the edge weight. Returns ------- distance : dictionary Dictionary, keyed by source and target, of shortest paths. Raises ------ NetworkXError If given graph is not weighted. Examples -------- >>> import networkx as nx >>> graph = nx.DiGraph() >>> graph.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ... ('0', '2', 2), ('1', '2', 4), ('2', '3', 1)]) >>> paths = nx.johnson(graph, weight='weight') >>> paths['0']['2'] ['0', '1', '2'] Notes ----- Johnson's algorithm is suitable even for graphs with negative weights. It works by using the Bellman–Ford algorithm to compute a transformation of the input graph that removes all negative weights, allowing Dijkstra's algorithm to be used on the transformed graph. It may be faster than Floyd - Warshall algorithm in sparse graphs. Algorithm complexity: O(V^2 * logV + V * E) See Also -------- floyd_warshall_predecessor_and_distance floyd_warshall_numpy all_pairs_shortest_path all_pairs_shortest_path_length all_pairs_dijkstra_path bellman_ford """ if not nx.is_weighted(G, weight=weight): raise nx.NetworkXError('Graph is not weighted.') dist = {v: 0 for v in G} pred = {v: None for v in G} # Calculate distance of shortest paths dist_bellman = _bellman_ford_relaxation(G, pred, dist, G.nodes(), weight)[1] if G.is_multigraph(): get_weight = lambda u, v, data: ( min(eattr.get(weight, 1) for eattr in data.values()) + dist_bellman[u] - dist_bellman[v]) else: get_weight = lambda u, v, data: (data.get(weight, 1) + dist_bellman[u] - dist_bellman[v]) all_pairs = {v: _dijkstra(G, v, get_weight, paths={v: [v]})[1] for v in G} return all_pairs
mit
asm-products/cloudroutes-service
src/actions/actions/execute-shell-command/__init__.py
2
1386
"""Email notification reaction.""" from fabric.api import env, run, hide from ..utils import ShouldRun def __action(**kwargs): redata = kwargs['redata'] jdata = kwargs['jdata'] if ShouldRun(redata, jdata): env.gateway = redata['data']['gateway'] env.host_string = redata['data']['host_string'] env.user = redata['data']['username'] env.key = redata['data']['sshkey'] env.disable_known_hosts = True env.warn_only = True env.abort_on_prompts = True env.shell = "/bin/sh -c" try: results = run_cmd(redata['data']['cmd']) if results.succeeded: return True else: raise Exception( 'Command Execution Failed: {0} - {1}'.format(results.return_code, results)) except: raise Exception( 'Command failed to execute') def run_cmd(cmd): with hide('output', 'running', 'warnings'): return run(cmd, timeout=1200) def action(**kwargs): try: return __action(**kwargs) except Exception, e: #pylint: disable=broad-except redata = kwargs['redata'] logger = kwargs['logger'] logger.warning( 'execute-shell-command: Reaction {id} failed: {message}'.format( id=redata['id'], message=e.message)) return False
agpl-3.0