repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
Nick-OpusVL/odoo | addons/hr_timesheet_invoice/report/hr_timesheet_invoice_report.py | 318 | 9494 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp.tools.sql import drop_view_if_exists
class report_timesheet_line(osv.osv):
_name = "report.timesheet.line"
_description = "Timesheet Line"
_auto = False
_columns = {
'name': fields.char('Year', required=False, readonly=True),
'user_id': fields.many2one('res.users', 'User', readonly=True),
'date': fields.date('Date', readonly=True),
'day': fields.char('Day', size=128, readonly=True),
'quantity': fields.float('Time', readonly=True),
'cost': fields.float('Cost', readonly=True),
'product_id': fields.many2one('product.product', 'Product',readonly=True),
'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'general_account_id': fields.many2one('account.account', 'General Account', readonly=True),
'invoice_id': fields.many2one('account.invoice', 'Invoiced', readonly=True),
'month': fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month', readonly=True),
}
_order = 'name desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_line')
cr.execute("""
create or replace view report_timesheet_line as (
select
min(l.id) as id,
l.date as date,
to_char(l.date,'YYYY') as name,
to_char(l.date,'MM') as month,
l.user_id,
to_char(l.date, 'YYYY-MM-DD') as day,
l.invoice_id,
l.product_id,
l.account_id,
l.general_account_id,
sum(l.unit_amount) as quantity,
sum(l.amount) as cost
from
account_analytic_line l
where
l.user_id is not null
group by
l.date,
l.user_id,
l.product_id,
l.account_id,
l.general_account_id,
l.invoice_id
)
""")
class report_timesheet_user(osv.osv):
_name = "report_timesheet.user"
_description = "Timesheet per day"
_auto = False
_columns = {
'name': fields.char('Year', required=False, readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'quantity': fields.float('Time', readonly=True),
'cost': fields.float('Cost', readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month', readonly=True),
}
_order = 'name desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_user')
cr.execute("""
create or replace view report_timesheet_user as (
select
min(l.id) as id,
to_char(l.date,'YYYY') as name,
to_char(l.date,'MM') as month,
l.user_id,
sum(l.unit_amount) as quantity,
sum(l.amount) as cost
from
account_analytic_line l
where
user_id is not null
group by l.date, to_char(l.date,'YYYY'),to_char(l.date,'MM'), l.user_id
)
""")
class report_timesheet_account(osv.osv):
_name = "report_timesheet.account"
_description = "Timesheet per account"
_auto = False
_columns = {
'name': fields.char('Year', required=False, readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'quantity': fields.float('Time', readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month', readonly=True),
}
_order = 'name desc,account_id desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_account')
cr.execute("""
create or replace view report_timesheet_account as (
select
min(id) as id,
to_char(create_date, 'YYYY') as name,
to_char(create_date,'MM') as month,
user_id,
account_id,
sum(unit_amount) as quantity
from
account_analytic_line
group by
to_char(create_date, 'YYYY'),to_char(create_date, 'MM'), user_id, account_id
)
""")
class report_timesheet_account_date(osv.osv):
_name = "report_timesheet.account.date"
_description = "Daily timesheet per account"
_auto = False
_columns = {
'name': fields.char('Year', required=False, readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'quantity': fields.float('Time', readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month', readonly=True),
}
_order = 'name desc,account_id desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_account_date')
cr.execute("""
create or replace view report_timesheet_account_date as (
select
min(id) as id,
to_char(date,'YYYY') as name,
to_char(date,'MM') as month,
user_id,
account_id,
sum(unit_amount) as quantity
from
account_analytic_line
group by
to_char(date,'YYYY'),to_char(date,'MM'), user_id, account_id
)
""")
class report_timesheet_invoice(osv.osv):
_name = "report_timesheet.invoice"
_description = "Costs to invoice"
_auto = False
_columns = {
'user_id':fields.many2one('res.users', 'User', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Project', readonly=True),
'manager_id':fields.many2one('res.users', 'Manager', readonly=True),
'quantity': fields.float('Time', readonly=True),
'amount_invoice': fields.float('To invoice', readonly=True)
}
_rec_name = 'user_id'
_order = 'user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_invoice')
cr.execute("""
create or replace view report_timesheet_invoice as (
select
min(l.id) as id,
l.user_id as user_id,
l.account_id as account_id,
a.user_id as manager_id,
sum(l.unit_amount) as quantity,
sum(l.unit_amount * t.list_price) as amount_invoice
from account_analytic_line l
left join hr_timesheet_invoice_factor f on (l.to_invoice=f.id)
left join account_analytic_account a on (l.account_id=a.id)
left join product_product p on (l.to_invoice=f.id)
left join product_template t on (l.to_invoice=f.id)
where
l.to_invoice is not null and
l.invoice_id is null
group by
l.user_id,
l.account_id,
a.user_id
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dbinetti/barberscore | project/apps/adjudication/responders.py | 3 | 1374 |
# Third-Party
from rest_framework.response import Response
class PDFResponse(Response):
def __init__(self, pdf, file_name, *args, **kwargs):
headers = {
'Content-Disposition': 'filename="{}.pdf"'.format(file_name),
'Content-Length': len(pdf),
}
super().__init__(
pdf,
content_type='application/pdf',
headers=headers,
*args,
**kwargs
)
class XLSXResponse(Response):
def __init__(self, xlsx, file_name, *args, **kwargs):
headers = {
'Content-Disposition': 'filename="{}.xlsx"'.format(file_name),
'Content-Length': len(xlsx),
}
super().__init__(
xlsx,
content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
headers=headers,
*args,
**kwargs
)
class DOCXResponse(Response):
def __init__(self, docx, file_name, *args, **kwargs):
headers = {
'Content-Disposition': 'filename="{}.docx"'.format(file_name),
# 'Content-Length': len(xlsx),
}
super().__init__(
docx,
content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document',
headers=headers,
*args,
**kwargs
)
| bsd-2-clause |
ProfessionalIT/maxigenios-website | sdk/google_appengine/google/appengine/tools/devappserver2/admin/mail_request_handler_test.py | 17 | 3829 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for devappserver2.admin.mail_request_handler."""
import email.message
import unittest
import google
import mox
import webapp2
from google.appengine.tools.devappserver2 import dispatcher
from google.appengine.tools.devappserver2.admin import mail_request_handler
class MailRequestHandlerTest(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_generate_email(self):
message = mail_request_handler.MailRequestHandler._generate_email(
'to', 'from', 'cc', 'subject', 'body')
self.assertEqual('from', message['From'])
self.assertEqual('to', message['To'])
self.assertEqual('cc', message['Cc'])
self.assertEqual('subject', message['Subject'])
text, html = message.get_payload()
self.assertEqual('text/plain', text.get_content_type())
self.assertEqual('utf-8', text.get_content_charset())
content = text.get_payload()
if text['content-transfer-encoding'] != '7bit':
content = content.decode(text['content-transfer-encoding'])
self.assertEqual('body', content)
self.assertEqual('text/html', html.get_content_type())
self.assertEqual('utf-8', html.get_content_charset())
content = html.get_payload()
if html['content-transfer-encoding'] != '7bit':
content = content.decode(html['content-transfer-encoding'])
self.assertEqual('body', content)
def test_send_email(self):
response = webapp2.Response()
handler = mail_request_handler.MailRequestHandler(None, response)
message = object()
self.mox.StubOutWithMock(handler, '_send')
self.mox.StubOutWithMock(handler, '_generate_email')
handler._generate_email('to', 'from', 'cc', 'subject', 'body').AndReturn(
message)
handler._send('/_ah/mail/to', message).AndReturn(
dispatcher.ResponseTuple('500 Internal Server Error', [], 'Response'))
self.mox.ReplayAll()
handler._send_email('to', 'from', 'cc', 'subject', 'body')
self.mox.VerifyAll()
self.assertEqual(500, response.status_int)
def test_send(self):
self.mox.StubOutWithMock(mail_request_handler.MailRequestHandler,
'dispatcher')
handler = mail_request_handler.MailRequestHandler(None, None)
handler.dispatcher = self.mox.CreateMock(dispatcher.Dispatcher)
handler.dispatcher.add_request(
method='POST',
relative_url='URL',
headers=[('Content-Type', 'message/rfc822')],
body='mail message',
source_ip='0.1.0.20')
message = self.mox.CreateMock(email.message.Message)
message.as_string().AndReturn('mail message')
self.mox.ReplayAll()
handler._send('URL', message)
self.mox.VerifyAll()
def test_post(self):
request = webapp2.Request.blank('/mail', POST={
'to': 'to', 'from': 'from', 'cc': 'cc', 'subject': 'subject',
'body': 'body'})
response = webapp2.Response()
handler = mail_request_handler.MailRequestHandler(request, response)
self.mox.StubOutWithMock(handler, '_send_email')
handler._send_email('to', 'from', 'cc', 'subject', 'body')
self.mox.ReplayAll()
handler.post()
self.mox.VerifyAll()
if __name__ == '__main__':
unittest.main()
| mit |
MarkWh1te/xueqiu_predict | python3_env/lib/python3.4/site-packages/sqlalchemy/testing/provision.py | 28 | 9388 | from sqlalchemy.engine import url as sa_url
from sqlalchemy import text
from sqlalchemy import exc
from sqlalchemy.util import compat
from . import config, engines
import os
import time
import logging
log = logging.getLogger(__name__)
FOLLOWER_IDENT = None
class register(object):
def __init__(self):
self.fns = {}
@classmethod
def init(cls, fn):
return register().for_db("*")(fn)
def for_db(self, dbname):
def decorate(fn):
self.fns[dbname] = fn
return self
return decorate
def __call__(self, cfg, *arg):
if isinstance(cfg, compat.string_types):
url = sa_url.make_url(cfg)
elif isinstance(cfg, sa_url.URL):
url = cfg
else:
url = cfg.db.url
backend = url.get_backend_name()
if backend in self.fns:
return self.fns[backend](cfg, *arg)
else:
return self.fns['*'](cfg, *arg)
def create_follower_db(follower_ident):
for cfg in _configs_for_db_operation():
_create_db(cfg, cfg.db, follower_ident)
def configure_follower(follower_ident):
for cfg in config.Config.all_configs():
_configure_follower(cfg, follower_ident)
def setup_config(db_url, options, file_config, follower_ident):
if follower_ident:
db_url = _follower_url_from_main(db_url, follower_ident)
db_opts = {}
_update_db_opts(db_url, db_opts)
eng = engines.testing_engine(db_url, db_opts)
_post_configure_engine(db_url, eng, follower_ident)
eng.connect().close()
cfg = config.Config.register(eng, db_opts, options, file_config)
if follower_ident:
_configure_follower(cfg, follower_ident)
return cfg
def drop_follower_db(follower_ident):
for cfg in _configs_for_db_operation():
_drop_db(cfg, cfg.db, follower_ident)
def _configs_for_db_operation():
hosts = set()
for cfg in config.Config.all_configs():
cfg.db.dispose()
for cfg in config.Config.all_configs():
url = cfg.db.url
backend = url.get_backend_name()
host_conf = (
backend,
url.username, url.host, url.database)
if host_conf not in hosts:
yield cfg
hosts.add(host_conf)
for cfg in config.Config.all_configs():
cfg.db.dispose()
@register.init
def _create_db(cfg, eng, ident):
raise NotImplementedError("no DB creation routine for cfg: %s" % eng.url)
@register.init
def _drop_db(cfg, eng, ident):
raise NotImplementedError("no DB drop routine for cfg: %s" % eng.url)
@register.init
def _update_db_opts(db_url, db_opts):
pass
@register.init
def _configure_follower(cfg, ident):
pass
@register.init
def _post_configure_engine(url, engine, follower_ident):
pass
@register.init
def _follower_url_from_main(url, ident):
url = sa_url.make_url(url)
url.database = ident
return url
@_update_db_opts.for_db("mssql")
def _mssql_update_db_opts(db_url, db_opts):
db_opts['legacy_schema_aliasing'] = False
@_follower_url_from_main.for_db("sqlite")
def _sqlite_follower_url_from_main(url, ident):
url = sa_url.make_url(url)
if not url.database or url.database == ':memory:':
return url
else:
return sa_url.make_url("sqlite:///%s.db" % ident)
@_post_configure_engine.for_db("sqlite")
def _sqlite_post_configure_engine(url, engine, follower_ident):
from sqlalchemy import event
@event.listens_for(engine, "connect")
def connect(dbapi_connection, connection_record):
# use file DBs in all cases, memory acts kind of strangely
# as an attached
if not follower_ident:
dbapi_connection.execute(
'ATTACH DATABASE "test_schema.db" AS test_schema')
else:
dbapi_connection.execute(
'ATTACH DATABASE "%s_test_schema.db" AS test_schema'
% follower_ident)
@_create_db.for_db("postgresql")
def _pg_create_db(cfg, eng, ident):
with eng.connect().execution_options(
isolation_level="AUTOCOMMIT") as conn:
try:
_pg_drop_db(cfg, conn, ident)
except Exception:
pass
currentdb = conn.scalar("select current_database()")
for attempt in range(3):
try:
conn.execute(
"CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb))
except exc.OperationalError as err:
if attempt != 2 and "accessed by other users" in str(err):
time.sleep(.2)
continue
else:
raise
else:
break
@_create_db.for_db("mysql")
def _mysql_create_db(cfg, eng, ident):
with eng.connect() as conn:
try:
_mysql_drop_db(cfg, conn, ident)
except Exception:
pass
conn.execute("CREATE DATABASE %s" % ident)
conn.execute("CREATE DATABASE %s_test_schema" % ident)
conn.execute("CREATE DATABASE %s_test_schema_2" % ident)
@_configure_follower.for_db("mysql")
def _mysql_configure_follower(config, ident):
config.test_schema = "%s_test_schema" % ident
config.test_schema_2 = "%s_test_schema_2" % ident
@_create_db.for_db("sqlite")
def _sqlite_create_db(cfg, eng, ident):
pass
@_drop_db.for_db("postgresql")
def _pg_drop_db(cfg, eng, ident):
with eng.connect().execution_options(
isolation_level="AUTOCOMMIT") as conn:
conn.execute(
text(
"select pg_terminate_backend(pid) from pg_stat_activity "
"where usename=current_user and pid != pg_backend_pid() "
"and datname=:dname"
), dname=ident)
conn.execute("DROP DATABASE %s" % ident)
@_drop_db.for_db("sqlite")
def _sqlite_drop_db(cfg, eng, ident):
if ident:
os.remove("%s_test_schema.db" % ident)
else:
os.remove("%s.db" % ident)
@_drop_db.for_db("mysql")
def _mysql_drop_db(cfg, eng, ident):
with eng.connect() as conn:
conn.execute("DROP DATABASE %s_test_schema" % ident)
conn.execute("DROP DATABASE %s_test_schema_2" % ident)
conn.execute("DROP DATABASE %s" % ident)
@_create_db.for_db("oracle")
def _oracle_create_db(cfg, eng, ident):
# NOTE: make sure you've run "ALTER DATABASE default tablespace users" or
# similar, so that the default tablespace is not "system"; reflection will
# fail otherwise
with eng.connect() as conn:
conn.execute("create user %s identified by xe" % ident)
conn.execute("create user %s_ts1 identified by xe" % ident)
conn.execute("create user %s_ts2 identified by xe" % ident)
conn.execute("grant dba to %s" % (ident, ))
conn.execute("grant unlimited tablespace to %s" % ident)
conn.execute("grant unlimited tablespace to %s_ts1" % ident)
conn.execute("grant unlimited tablespace to %s_ts2" % ident)
@_configure_follower.for_db("oracle")
def _oracle_configure_follower(config, ident):
config.test_schema = "%s_ts1" % ident
config.test_schema_2 = "%s_ts2" % ident
def _ora_drop_ignore(conn, dbname):
try:
conn.execute("drop user %s cascade" % dbname)
log.info("Reaped db: %s", dbname)
return True
except exc.DatabaseError as err:
log.warning("couldn't drop db: %s", err)
return False
@_drop_db.for_db("oracle")
def _oracle_drop_db(cfg, eng, ident):
with eng.connect() as conn:
# cx_Oracle seems to occasionally leak open connections when a large
# suite it run, even if we confirm we have zero references to
# connection objects.
# while there is a "kill session" command in Oracle,
# it unfortunately does not release the connection sufficiently.
_ora_drop_ignore(conn, ident)
_ora_drop_ignore(conn, "%s_ts1" % ident)
_ora_drop_ignore(conn, "%s_ts2" % ident)
def reap_oracle_dbs(eng, idents_file):
log.info("Reaping Oracle dbs...")
with eng.connect() as conn:
with open(idents_file) as file_:
idents = set(line.strip() for line in file_)
log.info("identifiers in file: %s", ", ".join(idents))
to_reap = conn.execute(
"select u.username from all_users u where username "
"like 'TEST_%' and not exists (select username "
"from v$session where username=u.username)")
all_names = set([username.lower() for (username, ) in to_reap])
to_drop = set()
for name in all_names:
if name.endswith("_ts1") or name.endswith("_ts2"):
continue
elif name in idents:
to_drop.add(name)
if "%s_ts1" % name in all_names:
to_drop.add("%s_ts1" % name)
if "%s_ts2" % name in all_names:
to_drop.add("%s_ts2" % name)
dropped = total = 0
for total, username in enumerate(to_drop, 1):
if _ora_drop_ignore(conn, username):
dropped += 1
log.info(
"Dropped %d out of %d stale databases detected", dropped, total)
@_follower_url_from_main.for_db("oracle")
def _oracle_follower_url_from_main(url, ident):
url = sa_url.make_url(url)
url.username = ident
url.password = 'xe'
return url
| mit |
drkitty/cyder | vendor-local/src/django-extensions/build/lib/django_extensions/management/commands/dumpscript.py | 23 | 20723 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Title: Dumpscript management command
Project: Hardytools (queryset-refactor version)
Author: Will Hardy (http://willhardy.com.au)
Date: June 2008
Usage: python manage.py dumpscript appname > scripts/scriptname.py
$Revision: 217 $
Description:
Generates a Python script that will repopulate the database using objects.
The advantage of this approach is that it is easy to understand, and more
flexible than directly populating the database, or using XML.
* It also allows for new defaults to take effect and only transfers what is
needed.
* If a new database schema has a NEW ATTRIBUTE, it is simply not
populated (using a default value will make the transition smooth :)
* If a new database schema REMOVES AN ATTRIBUTE, it is simply ignored
and the data moves across safely (I'm assuming we don't want this
attribute anymore.
* Problems may only occur if there is a new model and is now a required
ForeignKey for an existing model. But this is easy to fix by editing the
populate script :)
Improvements:
See TODOs and FIXMEs scattered throughout :-)
"""
import sys
import django
from django.db import models
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
from django.utils.encoding import smart_unicode, force_unicode
from django.contrib.contenttypes.models import ContentType
class Command(BaseCommand):
help = 'Dumps the data as a customised python script.'
args = '[appname ...]'
def handle(self, *app_labels, **options):
# Get the models we want to export
models = get_models(app_labels)
# A dictionary is created to keep track of all the processed objects,
# so that foreign key references can be made using python variable names.
# This variable "context" will be passed around like the town bicycle.
context = {}
# Create a dumpscript object and let it format itself as a string
print Script(models=models, context=context)
def get_models(app_labels):
""" Gets a list of models for the given app labels, with some exceptions.
TODO: If a required model is referenced, it should also be included.
Or at least discovered with a get_or_create() call.
"""
from django.db.models import get_app, get_apps, get_model
from django.db.models import get_models as get_all_models
# These models are not to be output, e.g. because they can be generated automatically
# TODO: This should be "appname.modelname" string
from django.contrib.contenttypes.models import ContentType
EXCLUDED_MODELS = (ContentType, )
models = []
# If no app labels are given, return all
if not app_labels:
for app in get_apps():
models += [m for m in get_all_models(app) if m not in EXCLUDED_MODELS]
# Get all relevant apps
for app_label in app_labels:
# If a specific model is mentioned, get only that model
if "." in app_label:
app_label, model_name = app_label.split(".", 1)
models.append(get_model(app_label, model_name))
# Get all models for a given app
else:
models += [m for m in get_all_models(get_app(app_label)) if m not in EXCLUDED_MODELS]
return models
class Code(object):
""" A snippet of python script.
This keeps track of import statements and can be output to a string.
In the future, other features such as custom indentation might be included
in this class.
"""
def __init__(self):
self.imports = {}
self.indent = -1
def __str__(self):
""" Returns a string representation of this script.
"""
if self.imports:
sys.stderr.write(repr(self.import_lines))
return flatten_blocks([""] + self.import_lines + [""] + self.lines, num_indents=self.indent)
else:
return flatten_blocks(self.lines, num_indents=self.indent)
def get_import_lines(self):
""" Takes the stored imports and converts them to lines
"""
if self.imports:
return ["from %s import %s" % (value, key) for key, value in self.imports.items()]
else:
return []
import_lines = property(get_import_lines)
class ModelCode(Code):
" Produces a python script that can recreate data for a given model class. "
def __init__(self, model, context={}):
self.model = model
self.context = context
self.instances = []
self.indent = 0
def get_imports(self):
""" Returns a dictionary of import statements, with the variable being
defined as the key.
"""
return {self.model.__name__: smart_unicode(self.model.__module__)}
imports = property(get_imports)
def get_lines(self):
""" Returns a list of lists or strings, representing the code body.
Each list is a block, each string is a statement.
"""
code = []
for counter, item in enumerate(self.model._default_manager.all()):
instance = InstanceCode(instance=item, id=counter + 1, context=self.context)
self.instances.append(instance)
if instance.waiting_list:
code += instance.lines
# After each instance has been processed, try again.
# This allows self referencing fields to work.
for instance in self.instances:
if instance.waiting_list:
code += instance.lines
return code
lines = property(get_lines)
class InstanceCode(Code):
" Produces a python script that can recreate data for a given model instance. "
def __init__(self, instance, id, context={}):
""" We need the instance in question and an id """
self.instance = instance
self.model = self.instance.__class__
self.context = context
self.variable_name = "%s_%s" % (self.instance._meta.db_table, id)
self.skip_me = None
self.instantiated = False
self.indent = 0
self.imports = {}
self.waiting_list = list(self.model._meta.fields)
self.many_to_many_waiting_list = {}
for field in self.model._meta.many_to_many:
self.many_to_many_waiting_list[field] = list(getattr(self.instance, field.name).all())
def get_lines(self, force=False):
""" Returns a list of lists or strings, representing the code body.
Each list is a block, each string is a statement.
force (True or False): if an attribute object cannot be included,
it is usually skipped to be processed later. With 'force' set, there
will be no waiting: a get_or_create() call is written instead.
"""
code_lines = []
# Don't return anything if this is an instance that should be skipped
if self.skip():
return []
# Initialise our new object
# e.g. model_name_35 = Model()
code_lines += self.instantiate()
# Add each field
# e.g. model_name_35.field_one = 1034.91
# model_name_35.field_two = "text"
code_lines += self.get_waiting_list()
if force:
# TODO: Check that M2M are not affected
code_lines += self.get_waiting_list(force=force)
# Print the save command for our new object
# e.g. model_name_35.save()
if code_lines:
code_lines.append("%s.save()\n" % (self.variable_name))
code_lines += self.get_many_to_many_lines(force=force)
return code_lines
lines = property(get_lines)
def skip(self):
""" Determine whether or not this object should be skipped.
If this model instance is a parent of a single subclassed
instance, skip it. The subclassed instance will create this
parent instance for us.
TODO: Allow the user to force its creation?
"""
if self.skip_me is not None:
return self.skip_me
def get_skip_version():
""" Return which version of the skip code should be run
Django's deletion code was refactored in r14507 which
was just two days before 1.3 alpha 1 (r14519)
"""
if not hasattr(self, '_SKIP_VERSION'):
version = django.VERSION
# no, it isn't lisp. I swear.
self._SKIP_VERSION = (
version[0] > 1 or ( # django 2k... someday :)
version[0] == 1 and ( # 1.x
version[1] >= 4 or # 1.4+
version[1] == 3 and not ( # 1.3.x
(version[3] == 'alpha' and version[1] == 0)
)
)
)
) and 2 or 1
return self._SKIP_VERSION
if get_skip_version() == 1:
try:
# Django trunk since r7722 uses CollectedObjects instead of dict
from django.db.models.query import CollectedObjects
sub_objects = CollectedObjects()
except ImportError:
# previous versions don't have CollectedObjects
sub_objects = {}
self.instance._collect_sub_objects(sub_objects)
sub_objects = sub_objects.keys()
elif get_skip_version() == 2:
from django.db.models.deletion import Collector
from django.db import router
cls = self.instance.__class__
using = router.db_for_write(cls, instance=self.instance)
collector = Collector(using=using)
collector.collect([self.instance])
# collector stores its instances in two places. I *think* we
# only need collector.data, but using the batches is needed
# to perfectly emulate the old behaviour
# TODO: check if batches are really needed. If not, remove them.
sub_objects = sum([list(i) for i in collector.data.values()], [])
for batch in collector.batches.values():
# batch.values can be sets, which must be converted to lists
sub_objects += sum([list(i) for i in batch.values()], [])
sub_objects_parents = [so._meta.parents for so in sub_objects]
if [self.model in p for p in sub_objects_parents].count(True) == 1:
# since this instance isn't explicitly created, it's variable name
# can't be referenced in the script, so record None in context dict
pk_name = self.instance._meta.pk.name
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
self.context[key] = None
self.skip_me = True
else:
self.skip_me = False
return self.skip_me
def instantiate(self):
" Write lines for instantiation "
# e.g. model_name_35 = Model()
code_lines = []
if not self.instantiated:
code_lines.append("%s = %s()" % (self.variable_name, self.model.__name__))
self.instantiated = True
# Store our variable name for future foreign key references
pk_name = self.instance._meta.pk.name
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
self.context[key] = self.variable_name
return code_lines
def get_waiting_list(self, force=False):
" Add lines for any waiting fields that can be completed now. "
code_lines = []
# Process normal fields
for field in list(self.waiting_list):
try:
# Find the value, add the line, remove from waiting list and move on
value = get_attribute_value(self.instance, field, self.context, force=force)
code_lines.append('%s.%s = %s' % (self.variable_name, field.name, value))
self.waiting_list.remove(field)
except SkipValue, e:
# Remove from the waiting list and move on
self.waiting_list.remove(field)
continue
except DoLater, e:
# Move on, maybe next time
continue
return code_lines
def get_many_to_many_lines(self, force=False):
""" Generates lines that define many to many relations for this instance. """
lines = []
for field, rel_items in self.many_to_many_waiting_list.items():
for rel_item in list(rel_items):
try:
pk_name = rel_item._meta.pk.name
key = '%s_%s' % (rel_item.__class__.__name__, getattr(rel_item, pk_name))
value = "%s" % self.context[key]
lines.append('%s.%s.add(%s)' % (self.variable_name, field.name, value))
self.many_to_many_waiting_list[field].remove(rel_item)
except KeyError:
if force:
value = "%s.objects.get(%s=%s)" % (rel_item._meta.object_name, pk_name, getattr(rel_item, pk_name))
lines.append('%s.%s.add(%s)' % (self.variable_name, field.name, value))
self.many_to_many_waiting_list[field].remove(rel_item)
if lines:
lines.append("")
return lines
class Script(Code):
" Produces a complete python script that can recreate data for the given apps. "
def __init__(self, models, context={}):
self.models = models
self.context = context
self.indent = -1
self.imports = {}
def get_lines(self):
""" Returns a list of lists or strings, representing the code body.
Each list is a block, each string is a statement.
"""
code = [self.FILE_HEADER.strip()]
# Queue and process the required models
for model_class in queue_models(self.models, context=self.context):
sys.stderr.write('Processing model: %s\n' % model_class.model.__name__)
code.append(model_class.import_lines)
code.append("")
code.append(model_class.lines)
# Process left over foreign keys from cyclic models
for model in self.models:
sys.stderr.write('Re-processing model: %s\n' % model.model.__name__)
for instance in model.instances:
if instance.waiting_list or instance.many_to_many_waiting_list:
code.append(instance.get_lines(force=True))
return code
lines = property(get_lines)
# A user-friendly file header
FILE_HEADER = """
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file has been automatically generated, changes may be lost if you
# go and generate it again. It was generated with the following command:
# %s
import datetime
from decimal import Decimal
from django.contrib.contenttypes.models import ContentType
def run():
""" % " ".join(sys.argv)
# HELPER FUNCTIONS
#-------------------------------------------------------------------------------
def flatten_blocks(lines, num_indents=-1):
""" Takes a list (block) or string (statement) and flattens it into a string
with indentation.
"""
# The standard indent is four spaces
INDENTATION = " " * 4
if not lines:
return ""
# If this is a string, add the indentation and finish here
if isinstance(lines, basestring):
return INDENTATION * num_indents + lines
# If this is not a string, join the lines and recurse
return "\n".join([flatten_blocks(line, num_indents + 1) for line in lines])
def get_attribute_value(item, field, context, force=False):
""" Gets a string version of the given attribute's value, like repr() might. """
# Find the value of the field, catching any database issues
try:
value = getattr(item, field.name)
except ObjectDoesNotExist:
raise SkipValue('Could not find object for %s.%s, ignoring.\n' % (item.__class__.__name__, field.name))
# AutoField: We don't include the auto fields, they'll be automatically recreated
if isinstance(field, models.AutoField):
raise SkipValue()
# Some databases (eg MySQL) might store boolean values as 0/1, this needs to be cast as a bool
elif isinstance(field, models.BooleanField) and value is not None:
return repr(bool(value))
# Post file-storage-refactor, repr() on File/ImageFields no longer returns the path
elif isinstance(field, models.FileField):
return repr(force_unicode(value))
# ForeignKey fields, link directly using our stored python variable name
elif isinstance(field, models.ForeignKey) and value is not None:
# Special case for contenttype foreign keys: no need to output any
# content types in this script, as they can be generated again
# automatically.
# NB: Not sure if "is" will always work
if field.rel.to is ContentType:
return 'ContentType.objects.get(app_label="%s", model="%s")' % (value.app_label, value.model)
# Generate an identifier (key) for this foreign object
pk_name = value._meta.pk.name
key = '%s_%s' % (value.__class__.__name__, getattr(value, pk_name))
if key in context:
variable_name = context[key]
# If the context value is set to None, this should be skipped.
# This identifies models that have been skipped (inheritance)
if variable_name is None:
raise SkipValue()
# Return the variable name listed in the context
return "%s" % variable_name
elif force:
return "%s.objects.get(%s=%s)" % (value._meta.object_name, pk_name, getattr(value, pk_name))
else:
raise DoLater('(FK) %s.%s\n' % (item.__class__.__name__, field.name))
# A normal field (e.g. a python built-in)
else:
return repr(value)
def queue_models(models, context):
""" Works an an appropriate ordering for the models.
This isn't essential, but makes the script look nicer because
more instances can be defined on their first try.
"""
# Max number of cycles allowed before we call it an infinite loop.
MAX_CYCLES = 5
model_queue = []
number_remaining_models = len(models)
allowed_cycles = MAX_CYCLES
while number_remaining_models > 0:
previous_number_remaining_models = number_remaining_models
model = models.pop(0)
# If the model is ready to be processed, add it to the list
if check_dependencies(model, model_queue):
model_class = ModelCode(model=model, context=context)
model_queue.append(model_class)
# Otherwise put the model back at the end of the list
else:
models.append(model)
# Check for infinite loops.
# This means there is a cyclic foreign key structure
# That cannot be resolved by re-ordering
number_remaining_models = len(models)
if number_remaining_models == previous_number_remaining_models:
allowed_cycles -= 1
if allowed_cycles <= 0:
# Add the remaining models, but do not remove them from the model list
missing_models = [ModelCode(model=m, context=context) for m in models]
model_queue += missing_models
# Replace the models with the model class objects
# (sure, this is a little bit of hackery)
models[:] = missing_models
break
else:
allowed_cycles = MAX_CYCLES
return model_queue
def check_dependencies(model, model_queue):
" Check that all the depenedencies for this model are already in the queue. "
# A list of allowed links: existing fields, itself and the special case ContentType
allowed_links = [m.model.__name__ for m in model_queue] + [model.__name__, 'ContentType']
# For each ForeignKey or ManyToMany field, check that a link is possible
for field in model._meta.fields + model._meta.many_to_many:
if field.rel and field.rel.to.__name__ not in allowed_links:
return False
return True
# EXCEPTIONS
#-------------------------------------------------------------------------------
class SkipValue(Exception):
""" Value could not be parsed or should simply be skipped. """
class DoLater(Exception):
""" Value could not be parsed or should simply be skipped. """
| bsd-3-clause |
idncom/odoo | addons/base_report_designer/plugin/openerp_report_designer/bin/script/About.py | 293 | 3815 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import uno
from com.sun.star.task import XJobExecutor
if __name__<>'package':
from lib.gui import *
class About(unohelper.Base, XJobExecutor):
def __init__(self, ctx):
self.ctx = ctx
self.module = "openerp_report"
self.version = "0.1"
self.win = DBModalDialog(60, 50, 175, 115, "About Odoo Report Designer")
fdBigFont = createUnoStruct("com.sun.star.awt.FontDescriptor")
fdBigFont.Width = 20
fdBigFont.Height = 25
fdBigFont.Weight = 120
fdBigFont.Family= 3
oLabelTitle1 = self.win.addFixedText("lblTitle1", 1, 1, 35, 30)
oLabelTitle1.Model.TextColor = 16056320
oLabelTitle1.Model.FontDescriptor = fdBigFont
oLabelTitle1.Model.FontRelief = 1
oLabelTitle1.Text = "Open"
oLabelTitle2 = self.win.addFixedText("lblTitle2", 35, 1, 30, 30)
oLabelTitle2.Model.TextColor = 1
oLabelTitle2.Model.FontDescriptor = fdBigFont
oLabelTitle2.Model.FontRelief = 1
oLabelTitle2.Text = "ERP"
oLabelProdDesc = self.win.addFixedText("lblProdDesc", 1, 30, 173, 75)
oLabelProdDesc.Model.TextColor = 1
fdBigFont.Width = 10
fdBigFont.Height = 11
fdBigFont.Weight = 76
oLabelProdDesc.Model.FontDescriptor = fdBigFont
oLabelProdDesc.Model.Align = 1
oLabelProdDesc.Model.FontRelief = 1
oLabelProdDesc.Model.MultiLine = True
oLabelProdDesc.Text = "This package helps you to create or modify\nreports in Odoo. Once connected to the\nserver, you can design your template of reports\nusing fields and expressions and browsing the\ncomplete structure of Odoo object database."
oLabelFooter = self.win.addFixedText("lblFooter", -1, -1, 173, 25)
oLabelFooter.Model.TextColor = 255
#oLabelFooter.Model.BackgroundColor = 1
oLabelFooter.Model.Border = 2
oLabelFooter.Model.BorderColor = 255
fdBigFont.Width = 8
fdBigFont.Height = 9
fdBigFont.Weight = 100
oLabelFooter.Model.FontDescriptor = fdBigFont
oLabelFooter.Model.Align = 1
oLabelFooter.Model.FontRelief = 1
oLabelFooter.Model.MultiLine = True
sMessage = "Odoo Report Designer v1.0 \nCopyright 2007-TODAY Tiny sprl \nThis product is free software, under the GNU Affero General Public License."
oLabelFooter.Text = sMessage
self.win.doModalDialog("",None)
if __name__<>"package" and __name__=="__main__":
About(None)
elif __name__=="package":
g_ImplementationHelper.addImplementation( About, "org.openoffice.openerp.report.about", ("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
asajeffrey/servo | tests/wpt/web-platform-tests/tools/ci/tc/github_checks_output.py | 7 | 1606 | import io
from six import ensure_text
MYPY = False
if MYPY:
# MYPY is set to True when run under Mypy.
from typing import AnyStr, Optional, Text
class GitHubChecksOutputter(object):
"""Provides a method to output data to be shown in the GitHub Checks UI.
This can be useful to provide a summary of a given check (e.g. the lint)
to enable developers to quickly understand what has gone wrong. The output
supports markdown format.
https://docs.taskcluster.net/docs/reference/integrations/github/checks#custom-text-output-in-checks
"""
def __init__(self, path):
# type: (Text) -> None
self.path = path
def output(self, line):
# type: (AnyStr) -> None
text = ensure_text(line)
# NOTE: mypy types the "text mode" of open() in Python 2 as BinaryIO,
# which makes sense as we cannot specify its encoding (it's
# platform-dependent), while io.open() is closer to open() in Python 3.
# TODO: use the built-in open() when we are Py3-only.
with io.open(self.path, mode="a") as f:
f.write(text)
f.write(u"\n")
__outputter = None
def get_gh_checks_outputter(filepath):
# type: (Optional[Text]) -> Optional[GitHubChecksOutputter]
"""Return the outputter for GitHub Checks output, if enabled.
:param filepath: The filepath to write GitHub Check output information to,
or None if not enabled.
"""
global __outputter
if filepath and __outputter is None:
__outputter = GitHubChecksOutputter(filepath)
return __outputter
| mpl-2.0 |
DiptoDas8/Biponi | lib/python2.7/site-packages/pkg_resources/_vendor/six.py | 2715 | 30098 | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| mit |
fitermay/intellij-community | python/helpers/pydev/_pydev_runfiles/pydev_runfiles_parallel_client.py | 34 | 7760 | from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
Queue = _queue.Queue
import traceback
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support_from_params
#=======================================================================================================================
# ParallelNotification
#=======================================================================================================================
class ParallelNotification(object):
def __init__(self, method, args, kwargs):
self.method = method
self.args = args
self.kwargs = kwargs
def to_tuple(self):
return self.method, self.args, self.kwargs
#=======================================================================================================================
# KillServer
#=======================================================================================================================
class KillServer(object):
pass
#=======================================================================================================================
# ServerComm
#=======================================================================================================================
class ServerComm(threading.Thread):
def __init__(self, job_id, server):
self.notifications_queue = Queue()
threading.Thread.__init__(self)
self.setDaemon(False) #Wait for all the notifications to be passed before exiting!
assert job_id is not None
assert port is not None
self.job_id = job_id
self.finished = False
self.server = server
def run(self):
while True:
kill_found = False
commands = []
command = self.notifications_queue.get(block=True)
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
try:
while True:
command = self.notifications_queue.get(block=False) #No block to create a batch.
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
except:
pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once.
if commands:
try:
#Batch notification.
self.server.lock.acquire()
try:
self.server.notifyCommands(self.job_id, commands)
finally:
self.server.lock.release()
except:
traceback.print_exc()
if kill_found:
self.finished = True
return
#=======================================================================================================================
# ServerFacade
#=======================================================================================================================
class ServerFacade(object):
def __init__(self, notifications_queue):
self.notifications_queue = notifications_queue
def notifyTestsCollected(self, *args, **kwargs):
pass #This notification won't be passed
def notifyTestRunFinished(self, *args, **kwargs):
pass #This notification won't be passed
def notifyStartTest(self, *args, **kwargs):
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args, kwargs))
def notifyTest(self, *args, **kwargs):
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args, kwargs))
#=======================================================================================================================
# run_client
#=======================================================================================================================
def run_client(job_id, port, verbosity, coverage_output_file, coverage_include):
job_id = int(job_id)
from _pydev_bundle import pydev_localhost
server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port))
server.lock = threading.Lock()
server_comm = ServerComm(job_id, server)
server_comm.start()
try:
server_facade = ServerFacade(server_comm.notifications_queue)
from _pydev_runfiles import pydev_runfiles
from _pydev_runfiles import pydev_runfiles_xml_rpc
pydev_runfiles_xml_rpc.set_server(server_facade)
#Starts None and when the 1st test is gotten, it's started (because a server may be initiated and terminated
#before receiving any test -- which would mean a different process got all the tests to run).
coverage = None
try:
tests_to_run = [1]
while tests_to_run:
#Investigate: is it dangerous to use the same xmlrpclib server from different threads?
#It seems it should be, as it creates a new connection for each request...
server.lock.acquire()
try:
tests_to_run = server.GetTestsToRun(job_id)
finally:
server.lock.release()
if not tests_to_run:
break
if coverage is None:
_coverage_files, coverage = start_coverage_support_from_params(
None, coverage_output_file, 1, coverage_include)
files_to_tests = {}
for test in tests_to_run:
filename_and_test = test.split('|')
if len(filename_and_test) == 2:
files_to_tests.setdefault(filename_and_test[0], []).append(filename_and_test[1])
configuration = pydev_runfiles.Configuration(
'',
verbosity,
None,
None,
None,
files_to_tests,
1, #Always single job here
None,
#The coverage is handled in this loop.
coverage_output_file=None,
coverage_include=None,
)
test_runner = pydev_runfiles.PydevTestRunner(configuration)
sys.stdout.flush()
test_runner.run_tests(handle_coverage=False)
finally:
if coverage is not None:
coverage.stop()
coverage.save()
except:
traceback.print_exc()
server_comm.notifications_queue.put_nowait(KillServer())
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
if len(sys.argv) -1 == 3:
job_id, port, verbosity = sys.argv[1:]
coverage_output_file, coverage_include = None, None
elif len(sys.argv) -1 == 5:
job_id, port, verbosity, coverage_output_file, coverage_include = sys.argv[1:]
else:
raise AssertionError('Could not find out how to handle the parameters: '+sys.argv[1:])
job_id = int(job_id)
port = int(port)
verbosity = int(verbosity)
run_client(job_id, port, verbosity, coverage_output_file, coverage_include)
| apache-2.0 |
valexandersaulys/prudential_insurance_kaggle | venv/lib/python2.7/site-packages/scipy/__config__.py | 1 | 1356 | # This file is generated by /tmp/pip-build-LbyYwr/scipy/-c
# It contains system_info results at the time of building this package.
__all__ = ["get_info","show"]
blas_info={'libraries': ['blas'], 'library_dirs': ['/usr/lib64']}
atlas_3_10_blas_info={}
lapack_info={'libraries': ['lapack'], 'library_dirs': ['/usr/lib64'], 'language': 'f77'}
atlas_3_10_blas_threads_info={}
atlas_threads_info={}
blas_opt_info={'libraries': ['blas'], 'library_dirs': ['/usr/lib64'], 'define_macros': [('NO_ATLAS_INFO', 1)]}
atlas_blas_threads_info={}
openblas_info={}
lapack_opt_info={'libraries': ['lapack', 'blas'], 'library_dirs': ['/usr/lib64'], 'define_macros': [('NO_ATLAS_INFO', 1)], 'language': 'f77'}
openblas_lapack_info={}
atlas_3_10_threads_info={}
atlas_info={}
atlas_3_10_info={}
lapack_mkl_info={}
blas_mkl_info={}
atlas_blas_info={}
mkl_info={}
def get_info(name):
g = globals()
return g.get(name, g.get(name + "_info", {}))
def show():
for name,info_dict in globals().items():
if name[0] == "_" or type(info_dict) is not type({}): continue
print(name + ":")
if not info_dict:
print(" NOT AVAILABLE")
for k,v in info_dict.items():
v = str(v)
if k == "sources" and len(v) > 200:
v = v[:60] + " ...\n... " + v[-60:]
print(" %s = %s" % (k,v))
| gpl-2.0 |
TeXitoi/navitia | source/jormungandr/tests/stats_tests.py | 1 | 9272 | # Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from tests_mechanism import AbstractTestFixture, dataset
from check_utils import *
from jormungandr import stat_manager
from jormungandr.stat_manager import StatManager
#from mock import patch
from jormungandr.utils import str_to_time_stamp
class MockWrapper:
def __init__(self):
self.called = False #TODO: use mock!
def mock_journey_stat(self, stat):
self.check_stat_journey_to_publish(stat)
self.called = True
def mock_coverage_stat(self, stat):
self.check_stat_coverage_to_publish(stat)
self.called = True
def check_stat_coverage_to_publish(self, stat):
#Verify request elements
assert stat.user_id != ""
assert stat.api == "v1.coverage"
assert len(stat.parameters) == 0
assert len(stat.coverages) == 1
#Here we verify id of region in request.view_args of the request.
assert stat.coverages[0].region_id == ""
assert len(stat.journeys) == 0
#Verify elements of request.error
assert stat.error.id == ""
def mock_places_stat(self, stat):
self.check_stat_places_to_publish(stat)
def check_stat_journey_to_publish(self, stat):
#Verify request elements
assert stat.user_id != ""
assert stat.api == "v1.journeys"
epsilon = 1e-5
#Verify elements of request.parameters
assert len(stat.parameters) == 3
assert {(param.key, param.value) for param in stat.parameters} >\
{("to", "0.00188646;0.00071865"), ("from", "0.0000898312;0.0000898312")}
#Verify elements of request.coverages
assert len(stat.coverages) == 1
assert stat.coverages[0].region_id == "main_routing_test"
#Verify elements of request.error
assert stat.error.id == ""
#Verify elements of request.journeys
assert len(stat.journeys) == 2
assert stat.journeys[0].requested_date_time == str_to_time_stamp("20120614T080000") #1339653600
assert stat.journeys[0].departure_date_time == str_to_time_stamp("20120614T080043") #1339653643
assert stat.journeys[0].arrival_date_time == str_to_time_stamp("20120614T080222") #1339653742
assert stat.journeys[0].duration == 99
assert stat.journeys[0].type == "best"
assert stat.journeys[0].nb_transfers == 0
#Verify elements of request.journeys.sections
assert len(stat.journeys[0].sections) == 3
assert stat.journeys[0].sections[1].departure_date_time == str_to_time_stamp("20120614T080100") #1339653660
assert stat.journeys[0].sections[1].arrival_date_time == str_to_time_stamp("20120614T080102") #1339653662
assert stat.journeys[0].sections[1].duration == 2
assert stat.journeys[0].sections[1].from_embedded_type == "stop_area"
assert stat.journeys[0].sections[1].from_id == "stopB"
assert stat.journeys[0].sections[1].from_name == "stopB"
assert stat.journeys[0].sections[1].mode == ""
assert stat.journeys[0].sections[1].network_id == "base_network"
assert stat.journeys[0].sections[1].network_name == "base_network"
assert abs(stat.journeys[0].sections[1].from_coord.lat - 0.000269493594586) < epsilon
assert abs(stat.journeys[0].sections[1].from_coord.lon - 8.98311981955e-05) < epsilon
assert stat.journeys[0].sections[1].to_embedded_type == "stop_area"
assert stat.journeys[0].sections[1].to_id == "stopA"
assert stat.journeys[0].sections[1].to_name == "stopA"
assert abs(stat.journeys[0].sections[1].to_coord.lat - 0.000718649585564) < epsilon
assert abs(stat.journeys[0].sections[1].to_coord.lon - 0.00107797437835) < epsilon
assert len(stat.journeys[1].sections) == 1
assert stat.journeys[1].sections[0].departure_date_time == str_to_time_stamp("20120614T080000") #1339653600
assert stat.journeys[1].sections[0].arrival_date_time == str_to_time_stamp("20120614T080435") #1339653875
assert stat.journeys[1].sections[0].duration == 275
assert stat.journeys[1].sections[0].from_embedded_type == "address"
assert stat.journeys[1].sections[0].from_id == "8.98312e-05;8.98312e-05"
assert stat.journeys[1].sections[0].from_name == "rue bs"
assert stat.journeys[1].sections[0].mode == "walking"
assert stat.journeys[1].sections[0].to_embedded_type == "address"
assert stat.journeys[1].sections[0].to_id == "0.00188646;0.00071865"
assert stat.journeys[1].sections[0].to_name == "rue ag"
assert stat.journeys[1].sections[0].type == "street_network"
eq_(stat.journey_request.requested_date_time, 1339653600)
eq_(stat.journey_request.clockwise, True)
eq_(stat.journey_request.departure_insee, '32107')
eq_(stat.journey_request.departure_admin, 'admin:74435')
eq_(stat.journey_request.arrival_insee, '32107')
eq_(stat.journey_request.arrival_admin, 'admin:74435')
def check_stat_places_to_publish(self, stat):
assert stat.user_id != ""
assert stat.api == "v1.place_uri"
assert len(stat.parameters) == 0
assert len(stat.coverages) == 1
#Here we verify id of region in request.view_args of the request.
assert stat.coverages[0].region_id == "main_ptref_test"
assert stat.error.id == ""
self.called = True
@dataset({"main_routing_test": {}})
class TestStatJourneys(AbstractTestFixture):
def setup(self):
"""
We save the original publish_method to be able to put it back after the tests
each test will override this method with it's own mock check method
"""
self.real_publish_request_method = StatManager.publish_request
self.old_save_val = stat_manager.save_stat
stat_manager.save_stat = True
def teardown(self):
"""
Here we put back the original method to stat manager.
"""
StatManager.publish_request = self.real_publish_request_method
stat_manager.save_stat = self.old_save_val
def test_simple_journey_query_with_stats(self):
"""
test on a valid journey (tested in routing_tests.py).
stat activated to test objects filled by stat manager
"""
# we override the stat method with a mock method to test the journeys
mock = MockWrapper()
StatManager.publish_request = mock.mock_journey_stat
response = self.query_region(journey_basic_query, display=False)
assert mock.called
def test_simple_test_coverage_with_stats(self):
"""
here we test stat objects for api coverage filled by stat manager
"""
# we override the stat method with a mock method to test the coverage
mock = MockWrapper()
StatManager.publish_request = mock.mock_coverage_stat
json_response = self.query("/v1/coverage", display=False)
assert mock.called
@dataset({"main_ptref_test": {}})
class TestStatPlaces(AbstractTestFixture):
def setup(self):
"""
We save the original publish_method to be able to put it back after the tests
each test will override this method with it's own mock check method
"""
self.real_publish_request_method = StatManager.publish_request
self.old_save_val = stat_manager.save_stat
stat_manager.save_stat = True
def teardown(self):
"""
Here we put back the original method to stat manager.
"""
StatManager.publish_request = self.real_publish_request_method
stat_manager.save_stat = self.old_save_val
def test_simple_test_places_with_stats(self):
"""
here we test stat objects for api place_uri filled by stat manager
"""
# we override the stat method with a mock method to test the place_uri
mock = MockWrapper()
StatManager.publish_request = mock.check_stat_places_to_publish
response = self.query_region("places/stop_area:stop1", display=False)
assert mock.called
| agpl-3.0 |
euri10/populus | tests/migrations/test_sorting_migration_graph.py | 1 | 2017 | import pytest
from populus.migrations import Migration
from populus.migrations.migration import (
sort_migrations,
)
def test_simple_migration_sorting():
"""
Test a simple linear migration dependency chain.
A -> B -> C
"""
class Migration_A(Migration):
migration_id = "A"
class Migration_B(Migration):
migration_id = "B"
dependencies = ["A"]
class Migration_C(Migration):
migration_id = "C"
dependencies = ["B"]
migration_execution_order = sort_migrations([
Migration_A, Migration_B, Migration_C,
])
assert len(migration_execution_order) == 3
assert migration_execution_order[0] == {Migration_A}
assert migration_execution_order[1] == {Migration_B}
assert migration_execution_order[2] == {Migration_C}
def test_multi_dependency_sorting():
"""
Test a contract with multiple dependencies
A -> B -> C
"""
class Migration_A(Migration):
migration_id = "A"
class Migration_B(Migration):
migration_id = "B"
dependencies = ["A"]
class Migration_C(Migration):
migration_id = "C"
dependencies = ["B"]
class Migration_D(Migration):
migration_id = "D"
dependencies = ["B", "A"]
migration_execution_order = sort_migrations([
Migration_A, Migration_B, Migration_C, Migration_D,
])
assert len(migration_execution_order) == 3
assert migration_execution_order[0] == {Migration_A}
assert migration_execution_order[1] == {Migration_B}
assert migration_execution_order[2] == {Migration_C, Migration_D}
def test_circular_dependency_checking():
"""
Test circular dependencies are handled.
"""
class Migration_A(Migration):
migration_id = "A"
dependencies = ["B"]
class Migration_B(Migration):
migration_id = "B"
dependencies = ["A"]
with pytest.raises(ValueError):
sort_migrations([
Migration_A, Migration_B,
])
| mit |
dimkarakostas/matasano-cryptochallenges | problem_11.py | 1 | 1330 | from crypto_library import cbc_aes_encrypt, ecb_aes_encrypt, BLOCKSIZE
from string import printable
import random
def generate_random_string(length):
return ''.join(random.choice(printable) for _ in range(length))
def cbc_encryption(plaintext, key):
iv = generate_random_string(BLOCKSIZE)
return cbc_aes_encrypt(plaintext, iv, key)
def ecb_encryption(plaintext, key):
return ecb_aes_encrypt(plaintext, key)
def encryption_oracle(plaintext):
encryption_schemes = [cbc_encryption, ecb_encryption]
key = generate_random_string(16)
padding_before = generate_random_string(random.randrange(5, 11))
padding_after = generate_random_string(random.randrange(5, 11))
ciphertext = encryption_schemes[random.randrange(0, 2)](''.join((padding_before, plaintext, padding_after)), key)
return ciphertext
def distinguish_encryption_mode(ciphertext):
blocks = [ciphertext[i*BLOCKSIZE:(i+1)*BLOCKSIZE] for i in range(len(ciphertext)/BLOCKSIZE)]
if len(blocks) < 4:
return 'Insufficient input'
if blocks[1] == blocks[2]:
return 'ecb'
return 'cbc'
def main():
plaintext = '0'*100
ciphertext = encryption_oracle(plaintext)
adversary_output = distinguish_encryption_mode(ciphertext)
print adversary_output
if __name__ == '__main__':
main()
| mit |
mapr/hue | desktop/core/ext-py/Django-1.6.10/tests/admin_custom_urls/tests.py | 61 | 5610 | from __future__ import absolute_import, unicode_literals
import warnings
from django.contrib.admin.util import quote
from django.core.urlresolvers import reverse
from django.template.response import TemplateResponse
from django.test import TestCase
from django.test.utils import override_settings
from .models import Action, Person, Car
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminCustomUrlsTest(TestCase):
"""
Remember that:
* The Action model has a CharField PK.
* The ModelAdmin for Action customizes the add_view URL, it's
'<app name>/<model name>/!add/'
"""
fixtures = ['users.json', 'actions.json']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testBasicAddGet(self):
"""
Ensure GET on the add_view works.
"""
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/')
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def testAddWithGETArgs(self):
"""
Ensure GET on the add_view plus specifying a field value in the query
string works.
"""
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/!add/', {'name': 'My Action'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'value="My Action"')
def testBasicAddPost(self):
"""
Ensure POST on add_view works.
"""
post_data = {
'_popup': '1',
"name": 'Action added through a popup',
"description": "Description of added action",
}
response = self.client.post('/custom_urls/admin/admin_custom_urls/action/!add/', post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddAnotherPopup')
self.assertContains(response, 'Action added through a popup')
def testAdminUrlsNoClash(self):
"""
Test that some admin URLs work correctly.
"""
# Should get the change_view for model instance with PK 'add', not show
# the add_view
response = self.client.get('/custom_urls/admin/admin_custom_urls/action/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
# Ditto, but use reverse() to build the URL
url = reverse('admin:%s_action_change' % Action._meta.app_label,
args=(quote('add'),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
# Should correctly get the change_view for the model instance with the
# funny-looking PK (the one wth a 'path/to/html/document.html' value)
url = reverse('admin:%s_action_change' % Action._meta.app_label,
args=(quote("path/to/html/document.html"),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Change action')
self.assertContains(response, 'value="path/to/html/document.html"')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class CustomRedirects(TestCase):
fixtures = ['users.json', 'actions.json']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_post_save_add_redirect(self):
"""
Ensures that ModelAdmin.response_post_save_add() controls the
redirection after the 'Save' button has been pressed when adding a
new object.
Refs 8001, 18310, 19505.
"""
post_data = { 'name': 'John Doe', }
self.assertEqual(Person.objects.count(), 0)
response = self.client.post(
reverse('admin:admin_custom_urls_person_add'), post_data)
persons = Person.objects.all()
self.assertEqual(len(persons), 1)
self.assertRedirects(
response, reverse('admin:admin_custom_urls_person_history', args=[persons[0].pk]))
def test_post_save_change_redirect(self):
"""
Ensures that ModelAdmin.response_post_save_change() controls the
redirection after the 'Save' button has been pressed when editing an
existing object.
Refs 8001, 18310, 19505.
"""
Person.objects.create(name='John Doe')
self.assertEqual(Person.objects.count(), 1)
person = Person.objects.all()[0]
post_data = { 'name': 'Jack Doe', }
response = self.client.post(
reverse('admin:admin_custom_urls_person_change', args=[person.pk]), post_data)
self.assertRedirects(
response, reverse('admin:admin_custom_urls_person_delete', args=[person.pk]))
def test_post_url_continue(self):
"""
Ensures that the ModelAdmin.response_add()'s parameter `post_url_continue`
controls the redirection after an object has been created.
"""
post_data = { 'name': 'SuperFast', '_continue': '1' }
self.assertEqual(Car.objects.count(), 0)
response = self.client.post(
reverse('admin:admin_custom_urls_car_add'), post_data)
cars = Car.objects.all()
self.assertEqual(len(cars), 1)
self.assertRedirects(
response, reverse('admin:admin_custom_urls_car_history', args=[cars[0].pk]))
| apache-2.0 |
MRCIEU/melodi | browser/management/add_new_semmed_v3.py | 1 | 7714 | import sys,gzip,time,resource,os
from csv import reader
sys.path.append("/data/be15516/projects/melodi/")
import config
#4 steps
#1. Download new files
# Citations and Predication
#2. Convert each sql table to a pipe separated format
# for i in *sql.gz; do echo $i; python ~/scripts/bristol/mysql_to_csv.py <(gunzip -c $i) | gzip > ${i%%.*}.psv.gz; done
#3. Get rid of double quotest in citations
# gunzip -c semmedVER30_R_CITATIONS_to12312016.psv.gz | sed "s/'//g" | gzip > semmedVER30_R_CITATIONS_to12312016_edit.psv.gz
#4. Add new data - change file locations in script and run this script
# python browser/management/add_new_semmed_v3.py
#neo4j
from neo4j.v1 import GraphDatabase,basic_auth
auth_token = basic_auth(config.user, config.password)
driver = GraphDatabase.driver("bolt://"+config.server+":"+config.port,auth=auth_token)
#files
baseDir='/projects/MRC-IEU/research/data/nih/_devs/NIH_SemMedDB/data/SemMedDB/semmedVER40_R/'
#SemMed
semCitation = baseDir+'semmedVER40_R_CITATIONS_edit.psv.gz'
semPA = baseDir+'semmedVER40_R_PREDICATION.psv.gz'
old_pmids='data/old_pmids.txt.gz'
new_pmids='data/new_pmids.txt.gz'
#####################################################
#### Don't run locally, go to epif version!!!!! #####
#####################################################
#getData metrics
#memory: 2973Mb
#Time taken: 16 minutes
def getpubmedData():
print "Getting PubMed data from MELODI graph..."
session2 = driver.session()
pDic = {}
start = time.time()
#check if new pmids already created
if os.path.isfile(new_pmids):
print "Already generated new pmids "+new_pmids
else:
#check if old pmids already downloaded
if os.path.isfile(old_pmids):
print "Pubmed data already downloaded "+old_pmids
else:
#com="match (p:Pubmed)-[:SEM]-(s:SDB_triple) return distinct(p.pmid) as pmid;"
#com="match (p:Pubmed)-[:SEM|:HAS_MESH]-() return distinct(p.pmid) as pmid;"
com="match (p:Pubmed) where p.dp is not NULL return distinct(p.pmid) as pmid;"
print(com)
s = session2.run(com)
counter=0
for res in s:
if counter % 1000000 == 0:
print counter
print "\ttime:", round((time.time() - start)/60, 1), "minutes"
print "\tmemory: "+str(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1000000)+" Mb"
#print res['pmid']
counter+=1
pDic[res['pmid']]=''
print "Time taken for download:", round((time.time() - start)/60, 1), "minutes"
print "Writing to file..."
with gzip.open(old_pmids, 'wb') as f:
for i in pDic:
f.write(str(i)+"\n")
print "Found "+str(len(pDic))+" PubMed IDs."
def getNewPmids():
print "Finding new PubMed IDs..."
newPDic = {}
start = time.time()
counter=0
if os.path.isfile(new_pmids):
print "New pubmed data already created "+new_pmids
else:
#get old pubmed ids in memory
pDic = {}
with gzip.open(old_pmids, 'rb') as f:
for line in f:
pDic[line.rstrip()]=''
print "Time taken to read from file:", round(time.time() - start, 3), "seconds"
print('Reading ',semPA)
with gzip.open(semPA, 'r') as f:
for line in f:
counter+=1
if counter % 1000000 == 0:
print counter,pmid
print str(len(newPDic))
l = line.split("\t")
pmid = l[2]
if pmid not in pDic:
#print l
newPDic[pmid]=''
print "Time taken:", round((time.time() - start)/60, 1), "minutes"
print "Writing to file..."
with gzip.open(new_pmids, 'wb') as f:
for i in newPDic:
f.write(str(i)+"\n")
print "Time taken for download:", round((time.time() - start)/60, 1), "minutes"
print "Found "+str(len(newPDic))+" new PubMed IDs"
def addNewPmids():
pNew={}
start = time.time()
with gzip.open(new_pmids, 'rb') as f:
for line in f:
pNew[line.rstrip()]=''
print "Time taken to read from file:", round(time.time() - start, 3), "seconds"
print "Adding new PubMed data to graph..."
#pmid:ID(Pubmed)|issn|:IGNORE|dcom|:IGNORE
counter=0
countAdd=0
session2 = driver.session()
#1|0006-2944|1975 Jun|1976 01 16|1975
with gzip.open(semCitation, 'rb') as f:
for line in f:
counter+=1
if counter % 1000000 == 0:
print counter
l = line.rstrip().split("\t")
pmid = l[0]
issn = l[1]
da = l[2]
dcom = l[3]
dp = l[4]
if pmid in pNew:
countAdd+=1
if countAdd % 10000 == 0:
print l
print "Time taken:", round((time.time() - start)/60, 1), "minutes"
print "Added "+str(countAdd)
session2.close()
session2 = driver.session()
statement = "MERGE (n:Pubmed {pmid:"+pmid+"}) ON MATCH SET n.issn='"+issn+"',n.da='"+da+"',n.dcom='"+dcom+"',n.dp='"+dp+"' on CREATE SET n.issn='"+issn+"',n.da='"+da+"',n.dcom='"+dcom+"',n.dp='"+dp+"'"
session2.run(statement)
def addNewSemMed():
print "Adding new SemMed data to graph..."
pNew={}
start = time.time()
with gzip.open(new_pmids, 'rb') as f:
#with gzip.open('data/new_pmids_test.txt.gz', 'rb') as f:
for line in f:
pNew[line.rstrip()]=''
print "Time taken to read from file:", round(time.time() - start, 3), "seconds"
counter=0
countAdd=0
session2 = driver.session()
#117403|248343|13930367|ISA|C0008059|Child|inpr|1|C0018684|Health|idcn|1
#don't split inside quoted sections, e.g. 49963|341414|1|21065029|COEXISTS_WITH|"708|925"|"C1QBP|CD8A"|gngm|1|C0771648|Poractant alfa|orch|1
o=gzip.open('data/new-semmed.tsv.gz','w')
with gzip.open(semPA, 'rb') as f:
for line in reader(f, delimiter='\t'):
counter+=1
if counter % 1000000 == 0:
print "------ "+str(counter)+" ------"
#l = line.rstrip().split("|")
l = line
pid=l[0]
pmid=l[2]
s_name = l[5].replace('"','\\"')
predicate = l[3]
o_name = l[9].replace('"','\\"')
s_type = l[6]
o_type = l[10]
if pmid in pNew:
#print pmid
countAdd+=1
if countAdd % 10000 == 0:
print l
print "Time taken:", round((time.time() - start)/60, 1), "minutes"
print "Added "+str(countAdd)
session2.close()
session2 = driver.session()
#check for dodgy pubmed ids with [2] in
if pmid.isdigit():
#statement="match (p:Pubmed{pmid:"+pmid+"}),(s:SDB_triple{s_name:'"+s_name+"',s_type:'"+s_type+"',o_name:'"+o_name+"',o_type:'"+o_type+"',predicate:'"+predicate+"'}) merge (p)-[:SEM]-(s);"
o.write(pmid+'\t'+s_name+'\t'+predicate+'\t'+o_name+'\n')
#statement='match (p:Pubmed{pmid:'+pmid+'}),(s:SDB_triple{s_name:"'+s_name+'",o_name:"'+o_name+'",predicate:"'+predicate+'"}) merge (p)-[:SEM]-(s);'
#session2.run(statement)
def fix():
print "Fixing..."
start = time.time()
session2 = driver.session()
counter=0
with gzip.open('data/new_pmids_test.txt.gz', 'rb') as f:
for line in f:
pmid = line.rstrip()
com = "match (p:Pubmed{pmid:"+pmid+"})-[r:SEM]-(s:SDB_triple) delete r;"
#print com
session2.run(com)
counter+=1
if counter % 100000 == 0:
print "------ "+str(counter)+" ------"
#session2.close()
#session2 = driver.session()
print "Time taken to read from file:", round(time.time() - start, 3), "seconds"
def main():
#get the existing set of PubMed IDs
#~15 minutes
#getpubmedData()
#check for new ones using new SemMedDB data
#~10 minutes
#getNewPmids()
#add/update PubMed nodes
#~130 minutes
#addNewPmids()
#add new PubMed-SemMed relationships
addNewSemMed()
#load using load csv
#using periodic commit 10000 LOAD CSV FROM 'file:///new-semmed.tsv.gz' as row FIELDTERMINATOR '\t' match (p:Pubmed{pmid:toInt(row[0])}) match (s:SDB_triple{pid:row[1]}) merge (p)-[:SEM]-(s);
#13 hours for ~2 million relationships
#fix()
if __name__ == "__main__":
main()
#numbers
#before - match (p:Pubmed)-[:SEM]-(s:SDB_triple) return count(distinct(p.pmid));
#+-------------------------+
#| count(distinct(p.pmid)) |
#+-------------------------+
#| 15841558
| mit |
briandailey/alembic | alembic/ddl/mssql.py | 1 | 6703 | from alembic.ddl.impl import DefaultImpl
from alembic.ddl.base import alter_table, AddColumn, ColumnName, \
format_table_name, format_column_name, ColumnNullable, alter_column,\
format_server_default,ColumnDefault, format_type, ColumnType
from alembic import util
from sqlalchemy.ext.compiler import compiles
class MSSQLImpl(DefaultImpl):
__dialect__ = 'mssql'
transactional_ddl = True
batch_separator = "GO"
def __init__(self, *arg, **kw):
super(MSSQLImpl, self).__init__(*arg, **kw)
self.batch_separator = self.context_opts.get(
"mssql_batch_separator",
self.batch_separator)
def _exec(self, construct, *args, **kw):
super(MSSQLImpl, self)._exec(construct, *args, **kw)
if self.as_sql and self.batch_separator:
self.static_output(self.batch_separator)
def emit_begin(self):
self.static_output("BEGIN TRANSACTION" + self.command_terminator)
def alter_column(self, table_name, column_name,
nullable=None,
server_default=False,
name=None,
type_=None,
schema=None,
autoincrement=None,
existing_type=None,
existing_server_default=None,
existing_nullable=None,
existing_autoincrement=None
):
if nullable is not None and existing_type is None:
if type_ is not None:
existing_type = type_
# the NULL/NOT NULL alter will handle
# the type alteration
type_ = None
else:
raise util.CommandError(
"MS-SQL ALTER COLUMN operations "
"with NULL or NOT NULL require the "
"existing_type or a new type_ be passed.")
super(MSSQLImpl, self).alter_column(
table_name, column_name,
nullable=nullable,
type_=type_,
schema=schema,
autoincrement=autoincrement,
existing_type=existing_type,
existing_nullable=existing_nullable,
existing_autoincrement=existing_autoincrement
)
if server_default is not False:
if existing_server_default is not False or \
server_default is None:
self._exec(
_exec_drop_col_constraint(self,
table_name, column_name,
'sys.default_constraints')
)
if server_default is not None:
super(MSSQLImpl, self).alter_column(
table_name, column_name,
schema=schema,
server_default=server_default)
if name is not None:
super(MSSQLImpl, self).alter_column(
table_name, column_name,
schema=schema,
name=name)
def bulk_insert(self, table, rows):
if self.as_sql:
self._exec(
"SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(table)
)
super(MSSQLImpl, self).bulk_insert(table, rows)
self._exec(
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer.format_table(table)
)
else:
super(MSSQLImpl, self).bulk_insert(table, rows)
def drop_column(self, table_name, column, **kw):
drop_default = kw.pop('mssql_drop_default', False)
if drop_default:
self._exec(
_exec_drop_col_constraint(self,
table_name, column,
'sys.default_constraints')
)
drop_check = kw.pop('mssql_drop_check', False)
if drop_check:
self._exec(
_exec_drop_col_constraint(self,
table_name, column,
'sys.check_constraints')
)
super(MSSQLImpl, self).drop_column(table_name, column)
def _exec_drop_col_constraint(impl, tname, colname, type_):
# from http://www.mssqltips.com/sqlservertip/1425/working-with-default-constraints-in-sql-server/
# TODO: needs table formatting, etc.
return """declare @const_name varchar(256)
select @const_name = [name] from %(type)s
where parent_object_id = object_id('%(tname)s')
and col_name(parent_object_id, parent_column_id) = '%(colname)s'
exec('alter table %(tname)s drop constraint ' + @const_name)""" % {
'type':type_,
'tname':tname,
'colname':colname
}
@compiles(AddColumn, 'mssql')
def visit_add_column(element, compiler, **kw):
return "%s %s" % (
alter_table(compiler, element.table_name, element.schema),
mssql_add_column(compiler, element.column, **kw)
)
def mssql_add_column(compiler, column, **kw):
return "ADD %s" % compiler.get_column_specification(column, **kw)
@compiles(ColumnNullable, 'mssql')
def visit_column_nullable(element, compiler, **kw):
return "%s %s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
alter_column(compiler, element.column_name),
format_type(compiler, element.existing_type),
"NULL" if element.nullable else "NOT NULL"
)
@compiles(ColumnDefault, 'mssql')
def visit_column_default(element, compiler, **kw):
# TODO: there can also be a named constraint
# with ADD CONSTRAINT here
return "%s ADD DEFAULT %s FOR %s" % (
alter_table(compiler, element.table_name, element.schema),
format_server_default(compiler, element.default),
format_column_name(compiler, element.column_name)
)
@compiles(ColumnName, 'mssql')
def visit_rename_column(element, compiler, **kw):
return "EXEC sp_rename '%s.%s', '%s', 'COLUMN'" % (
format_table_name(compiler, element.table_name, element.schema),
format_column_name(compiler, element.column_name),
format_column_name(compiler, element.newname)
)
@compiles(ColumnType, 'mssql')
def visit_column_type(element, compiler, **kw):
return "%s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
alter_column(compiler, element.column_name),
format_type(compiler, element.type_)
)
| mit |
Tanych/CodeTracking | 308-Range-Sum-Query-2D---Mutable/solution.py | 1 | 1989 | class NumMatrix(object):
def __init__(self, matrix):
"""
initialize your data structure here.
:type matrix: List[List[int]]
"""
self.row=len(matrix)
if self.row==0: return
self.col=len(matrix[0])
self.bintree2d=[[0 for _ in xrange(self.col+1)] for _ in xrange(self.row+1)]
self.nums=[[0 for _ in xrange(self.col)] for _ in xrange(self.row)]
for i in xrange(self.row):
for j in xrange(self.col):
self.update(i,j,matrix[i][j])
def update(self, row, col, val):
"""
update the element at matrix[row,col] to val.
:type row: int
:type col: int
:type val: int
:rtype: void
"""
if self.row==0 or self.col==0:
return
diff=val-self.nums[row][col]
self.nums[row][col]=val
i=row+1
while i<self.row+1:
j=col+1
while j<self.col+1:
self.bintree2d[i][j]+=diff
j+=j&(-j)
i+=i&(-i)
def sumrange(self,row,col):
sumres=0
i=row
while i>0:
j=col
while j>0:
sumres+=self.bintree2d[i][j]
j-=j&(-j)
i-=i&(-i)
return sumres
def sumRegion(self, row1, col1, row2, col2):
"""
sum of elements matrix[(row1,col1)..(row2,col2)], inclusive.
:type row1: int
:type col1: int
:type row2: int
:type col2: int
:rtype: int
"""
if self.row==0 or self.col==0:
return 0
return self.sumrange(row2+1,col2+1)+self.sumrange(row1,col1)-\
self.sumrange(row1,col2+1)-self.sumrange(row2+1,col1)
# Your NumMatrix object will be instantiated and called as such:
# numMatrix = NumMatrix(matrix)
# numMatrix.sumRegion(0, 1, 2, 3)
# numMatrix.update(1, 1, 10)
# numMatrix.sumRegion(1, 2, 3, 4) | mit |
darmaa/odoo | openerpcommand/read.py | 10 | 2122 | """
Read a record.
"""
import os
import sys
import textwrap
import common
# TODO provide a --rpc flag to use XML-RPC (with a specific username) instead
# of server-side library.
def run(args):
assert args.database
assert args.model
import openerp
config = openerp.tools.config
config['log_handler'] = [':CRITICAL']
common.set_addons(args)
openerp.netsvc.init_logger()
registry = openerp.modules.registry.RegistryManager.get(
args.database, update_module=False)
model = registry[args.model]
field_names = [args.field] if args.field else []
if args.short:
# ignore --field
field_names = ['name']
with registry.cursor() as cr:
xs = model.read(cr, 1, args.id, field_names, {})
if xs:
print "Records (model `%s`, database `%s`):" % (args.model, args.database)
x = xs[0]
if args.short:
print str(x['id']) + '.', x['name']
else:
longest_k = 1
for k, v in x.items():
longest_k = len(k) if longest_k < len(k) else longest_k
for k, v in sorted(x.items()):
print (k + ':').ljust(longest_k + 2), v
else:
print "Record not found."
def add_parser(subparsers):
parser = subparsers.add_parser('read',
description='Display a record.')
parser.add_argument('-d', '--database', metavar='DATABASE',
**common.required_or_default('DATABASE', 'the database to connect to'))
common.add_addons_argument(parser)
parser.add_argument('-m', '--model', metavar='MODEL', required=True,
help='the model for which a record should be read')
parser.add_argument('-i', '--id', metavar='RECORDID', required=True,
help='the record id')
parser.add_argument('-v', '--verbose', action='store_true',
help='display more information')
parser.add_argument('--short', action='store_true',
help='display less information')
parser.add_argument('-f', '--field', metavar='FIELD',
help='display information only for this particular field')
parser.set_defaults(run=run)
| agpl-3.0 |
wdv4758h/butter | runtests.py | 3 | 226786 | #! /usr/bin/env python
sources = """
eNrsvWuX20iWIFYeP8bLHc+sd8Zr79rrg4ZGC0DFhB5V/Zh0sWpUUqon3VWSjqTq1pzsXBaSBDPR
SQIUQCozu6f6+Cf4p/jf+Lv/gT/5g7/4viIQEQiATFV19/jY6q4kCcTjxo0bN27cuI//5c++e/dR
/PavPvroo/VNOi2qdDotymIznb77T97+e3gahmFQlOvt5n613cBHcJEv10V5HsCL0bs/e/sXUGa6
vtnkzSZdZfXlu//07TThaud5mdfFLFjls4usLJpVsKjqAAth/aycB02+zGcb/AUtXFRlsNiW8Lsq
m5TaL1brqt7Ay9FoNM8XAfczLbNV3qyzWR4nh6MA/tX5ZluXwe8ibDw6DL6Gj59j79mmquPkO7t6
Np9Xa+wlXmd1k9fSyHldbdfBJOCH6Xm+oSdxSOPIlmHSFkuNRugp/osOLqOx/pXRQCZhAyDk4TiY
Q9eT8DK/uarqOf1eZNvlZhJFY8DQJnuf1ZPw6O3LV0evXx+/eB62LSHGJ2FVLm+CelsGOIYmuLoo
ZheAzA383VzkwXnxPi+DZnvWbGpEaH69rvOmARAAlbop+hc+Lo3XQdEEmcJ//j5bbrNNdrbMzSJu
A1cXeZ0H2XIZ0FQEGfzSXR8QUPk8yM6zomw2BLAUdBtCIgDoixqxnpebYLbMmiaH6T+6zlbrZX4Y
HFwGEU0bIOmimgdAQtSg21QF7dQRYwRBAuConCYpojhpH9BXNTkB1WloVpUbBNzqNsJ++QF35MHq
fF5gR9DzTSDzzKhRCNlUun+EpYWsBwSex02dCfaKUpAV0dOpdDKVMUewnjbjLo5htDlgAz7bHpl8
LrL3uZpCmOnzEoCcFzWsSRgCQAu9rVJuLxntoP7wYBXuon5cnkhWBvmHoUH+Xz9+9QtcAruInwaM
2GGqx2Ytgu/gINfUtAqISTykGSirDVV+FPlG6R3kAY2hbgBsa4zTTb3NO3A3F9VVIDWC+GxbLIHb
jYP1cnsOk4kgrPP6YF1XvwGcB1WZN4kNiTAjAAXIIQ7bztVX4qk2ocPbaFmU+bJoNlFi877Zao6v
gGSKMgYyWxTnwv6KRcC/Ux50Kh0c6iHJ63k15W/bGhiwfru5Qt55k8IW8iavVwWsg1/VxSavjTII
K3aPlCytAZu1RpYcWlOHxDkG/g7jm1DVtFkvi00cHsIoHyZW2c1VeoU9xuHfGxtSerc5DIO70tJZ
tZxP3sBUdapi4zF25H+TeNAACPcgQjajByMPytNNfbMoahoNQuFMTrWk/bAqp6tqXixuYDCrJqa/
48CaLVn6SPTQlD1x8o7KMWvwlVJrUU0+rgWz1XZ9SAuHzgjpJ+/g23JTLKGHZ9myyVWDRmMnBw8P
T4PJJIBpa5ux6xI61Ct7eGZLhwcPT0ey6a+ER06Ck1N6BFyG2gQ2pp8hzQFiEYlIdoTMFgYbTnvQ
8iKW2mOzpEOmbcdptl7npa7UkkUOqLErQd8GCux3XSQ8B+7gVvfMjfFWDwSnuh2FrpR0a+05FvMf
7lVFubVh03PTaUJRR9tLh8FcVNVlKkuiLcbLYNI+aMGhNydIYW3HsLJoryVZ8GuAAp5xTyBYvqyr
98Ucd71gWc2yJeCECmgBFRkrCzlEN9lsBluLUF1TLd9DVdgekWRxLCh/cS0UpnAbAlGGJVhG6CJQ
UnUM0C80JTXGFKxusIkGxgC7uMMzofg4QNGMOKeqnDJ7SDpUVTQod2UlCMhUaUxIOC4XVYJijP/1
03xWkbzsIQuBDbehGHpPzCW8SKct6PJtZIwbGTzAqYeOfDhxeQlLYjA2q0E9h7/gEd9qGjWWqPTP
SVCAcrAnBtWChR5cC2tY8zeG3AzvYKHhB8xqo6QjmexU9dw3r1Q6OXTww21M+O0QbgAMBz04Ehs3
1EiXkXBNLIaf3SkUPFtcVp4x1+adyMsveKNINN7f1AXjhI8fVUk4lBUgmM02m0wJve35RBpNFQoF
Ajx2xKqfcfC778bmslWgpHrVJCa0e7DpLswMjpQ0jzpQQoOKfAbXC/bN0/arYrlsR43jIqxXi0D3
XpSz5XaOpKReN/i+ALGVzzhCkS9QpFWnFaMYLGmRzvHokBd42gDCPaSVcPjtE/z4FpdxRs2o589E
+vuWWzcPI2PdTVVaJ4mhcwQAQw3hzPLAjHMEN6JPCvrsgHNtiaL2gund1dM6Xy/xPB8eoGiLG5ec
tnFF53O9gpg10ps7OEYbx/hjtq3pIMl8G7ZzHIAcLgm5vAEpxQKCq8UEJSMoekNmMbtASTmxJAaE
z2CijB4ROY/lqcNFzXEQG6UOaLH7hwNl8AjdyAFQnyfxDClDrOruwEx+YQ6DWlGNmMPpADYEk7Om
FSBquuEhHJzOL4QyhC5WUETt+hdZA43U7VqNVN2oh+mpQahy6XQ6L2bAcXfg1xiG2hUmzj4Sm5US
BWII/6MJLk0KbXu7EzTrfFbAbjPLYH8AVMAag1MeAt0UeMqEU+IlzEzThESA+Oth+knosl0B68To
hSVWOLhZUmkKFFVvmitgBbGsDW7YC+nJp4en3e2VRMFOf1j2tMOIjQLMi6Wme5TUZx/7HNlZWp2T
JW61s00LpHlak2b1ccWS47ROT7N12LyQR98Q+hUvtMQZ4JVneLpuggPk81VDy4p5W/Dtt0bf334L
E1ieL/MNasVkHaeBVkUdjlqSazWT+pF16GyAJDQCArXjE94UJbuCG1IM93CFuwzy4CyIVEORNTqU
5dTAqIrsRt9+a3XxrSrDwqgteeBC7JPKCpaQTh7wqW0a2qDWWQGk/xgaKM62m/yorqs6Dr9uBYBg
tYVt4PmLNwHRboC0G2zLOcjVM1QIJWZPiikwJJFQQOSgh6UfYD+zS1ncDo1bs26ufxoxV/QOFs7k
HXnKkUddVYjnJEwr93qWrzcOYjyz7IjPcuCYBEuv9K80JoKBvfQmZ/l5UcrheIfa5BqK6OIw46po
TEXhgVV6Sdz1OnFJRXGiAYT5qeZuTVUzwOU5blQ1rE+ujlobmscxCnw4i/i+2SB1x/jH1v4rMsI3
IERMp3AiWE6nwix/rUER0m/LYRdQDr5/tsxWZ/Psc6jzo4nx0+RBmsRaHvQYSEweetRx1D090srt
4FcXeBpZA6Jz1tMUG171szrPNnnvWm/1/jcwZywA1vni8FvAAQi47wF3ZzcBHqBRo8sCjRYdPgPy
hMN6XS2by2L9uUiK1pg022PFdbXYoCRMMM2Bji5hv7soGpMTsjJ1YvHw54+/PrLXyR21NVq9WY08
8jRCs/9wQqfUBBoBcQcewEh/C+DYTYmwOoddmVhhCfhRGEZU6BlqulKqOZ6/J1hGH8C1eU578ImK
AaRHAAamel7hWQT49aJC2UGfaQMQFILjhSpJXDOTTYmph2cVJXmQu2EzEyEfZut8i4KWaLBaEDN9
ENCnHVUWTmkAiwh11CysXS3bUW9NBY/bxmDpNC1St5sKTgzFjC48WA4SOs/wimNRAcPnk0YmtJ9K
U4/6R6llyT0G2gJWVrcYaKbVIbKqZMxtcwoIUrqqmxgaHmBGtwpPYGXMbwK6AMChAvfi65+i3ROo
jkUUCgufpEwzii74GE3XS4RCgnqd18BSVgh1tAAJHiEC+WnLQjPVHRtiSQoCC9RiloiVyvyqjyQJ
8QhdVRfnqK9v27FqRA2hAZG+Xc+JFeiqLTJwIaijQdFucQShnJ6fV5v8MHgD1ertEifirHqfo77s
PbZtQ6n43aKuVoRgUs4gMfC51yUX1PISyQI7hD0Ex5c16m7RQz40AlkoHtKpvLUaGcgODdAYKzQT
1NiOg8sr/cNVDNG+yWoh+wXWgBf0AZDEjsKNm8Qj9JUq8rvvGLK/X9cVkMzmRsOH/Ax7IPi6ircW
kDu0d51lM2x1fjCrVmtYt/Gj9NP0IWyuc5pXXEW1KVdN4eiuxEij9TlJMigKyHEtnVXrG2MgMvZ5
uq7WcYS/oo5EF35m08Rd/P/nrVgQzC1IeL+XebiHmIGPe4wjAzSyaFgoem3ZK9Sme2812Ydqz8h5
d95c8EVjAYwjtfYAWrJXKNwAJXHP91XHUPyAdBuse5L6NTJOupoPQ1OSohrqEoIbsLcbpHehDFcy
g+rLvIxptCi18xVn7MpMgbHvDvyzpaloOj1Dpj6dRknivWzoL++/ZehW4c2fLDh66pD8CaPHCmlb
vLdsV1WzHJMGeKB9hWSjfbxLgopIV6e99brXOx3I1V0ILZXR7RvxgOWHqb+Zi2oJZzCoaQvBmgmM
6Zop6Z0xrg5sAIv1Q6q7URttPIgZo3vN/sYmq+utnYz62+wZIMP2IfjnmnT+8YOZ9GndEYj2bvNK
8Uau5XLGy6uUt9nYbVW4frtFfBxY2DFZOrBD3A6FH6bGtsRgy7Z0eZVYGhacrPZg87XYaomYpA4C
cMToUbXo40O63x6p4DAW5J1DrVPP1FFx97YJtXy7PC72cZCv1psb5gZwkib13UIdvHx7rtmqK0sG
uJ1hL/WN1fCG7FF62nY2bOf8D4/oWgrWc2wiRQgjOd293VobJkm2sFfSBMOnTLXaOkcftvSMrVat
gN45RNrBrdCQtLXEyLcreqOToVoboYUWxTUt1CSJZ/I+dmbPXGPOijIpE4T2LkJxBDdFvpy7Z4Um
yOEUAQhfZmS4iOceZdN4QLKuNRQUqUzAWxWJDM9mN3aX7tLVox+9+8/IOFMZXqIoU5NAUed0CgGZ
evPuP3/7f/75Rx+JprK5aQxzSpKqu7W3aIohpb5kY6XH6iWpa0ZbNvIRS6Z0uqnwakGzELt07G1D
sOzhC/ccMvJWT72VXCHKRusd1I++JBPHR+lPgqsc979WWppMHuFEHQaMieDBOIivxzdJfxs/5asl
Or9coSXkVXbT2I2SCOa0wEddlt2Cs1xdElJnwWa7XuZpv0j3efCwuzdtqjWcjDYu4+rfzOwKrgzZ
UYXqdbRqUJW4jaW+jRvRex7RB0zUjja8O2z42clZXV0Cb1BM7jS4i+eN4O6D6/nnobcSsDMBqN3w
4Ng812AmQwY4CybmWTXP02c1nZFumhR14wv69TBJ9sBOta1nuB8tuKHFdrnkZz4xWUqDBIWSab8U
5e2q0yV/QV0wbLubHPeneEH2amU1DvS69hi8OTN3XM7za4+22ttrxxJpP/GphRp1/Qz5HNbAHKFO
EroVWsdewoJJymntHz1/cfT8zcAkeGG7w0dX1AxmMDdBNZtt62C+JZ2CwTEz1vlcoVLG0gfpdmbV
dFHAkqTbgYbuaGADCD+rt+XnYWftMlSDi8HoXXAyDtAQ5KLaLufTRVYsPZPnx7PRavjZDOsTjc3z
DZlj5ubB01lOclDSW2lP4yLExKqnMWykOExYNO/zGs0nptgHcKr4k3HwQDh5h32vqvl2mU+nCKhs
JI0yB9HYmIKwjQXww50ivjB4D8ihAzvuseubT8JRi5ee/Q0a0y2pfa59kDUuAP7RAQYejYOfkC0V
cQw4328QtdZF7W8AwFAdlnvgKfOrveExRmc8tUmIYH73X7z9S3HnWGebi5RMo979+duzG3LLGLGp
FL4KSCePXIO1ESN8TaCSvu96syzOFFjyZJWV2Xlej1qpYhxUDVrooslPfl1sXClDwdE2tFpV5Uj0
epke9+vp8euvnv9iTF+eHr/iL6+Ofj7ishXg2WgmO2vwJxwmAPH8rWjgIQjl1yBWocq3mRc1fuCK
xU/gi5dAsDA/5Sd422BOHV1z0ouQplWdj3FsEd0QRWM2lKKNPSrJsrqX+B8YQs4qW0+zZorCnpxG
UeTsCu9UAAqbhRJj1o120NIuWyuh6zWAGrN0aslWO+54pVs1UpZKqwbnpM6b7RJdCZrNNIRTpnuZ
aktgjH2zpqvopImbUEFHyDdrQQnzZ59Cs7oq89qV18nqkebPd+f4vNocK1LP53Lx+Pbt24Bn3JAe
hXav2gMcVKlvWFDgPYhukvM5qRtjKInb7/pqW8zl3ATfOppMagSFrZ4xsaeR5wyiJol8k9S5GAm6
ozr8AYd/Xq/3HD6UZEepcz38853D55tcWJw9p1hhAB6KTNEfB4SsxGwIsTHQEnCQfVtCBuG2RItN
E3G6xCa6xv7CvIY74tX6smqK65fQVsycMMXvX2ZNbqzd2QUQuayuLWwWYyYAZLOzyQOHRNC57jzn
ZdFcFGvb4hKr02GFfVos6rDfyaVyq8NAPU8WlNvVGRRztMb8CugxJeIDuFAeIoNoFx7HMBxErwYg
W96k5ij0d1g9rJ5E4ECQx48W3efqLUEMr+nTOv1BD46UD/z8Wp+73xdNsYkRj3y5H1wfBtdM1DHO
P6DXr+B2CvnlXP8qga2LZnRMou91Mg6IW1grZZ+aRJpmZU0weA/po16DhbD5OGrVYM9anVXLYoaW
IZc2I5HCvdCojmSxA1S4LAxIVpf4elMJ9YJA4mw7CBKq6q5hSDcgvV1k9ZwAIVVKydefTFkmYL0Q
CTS4V0tfBrIswGDYFmxi/A5CQ7XcbvLJQ3dhsS2Ggy9Xk8V4jddVUWqFkDGKpHvHI/05dgR94xOo
eVBsCeEiv/9cgJcwin8xu5EmfIoJnpVWPGyC6H5E7qGs3shEdqNWnVW9bE93tvaZuwO6WQLesWLi
1pznLY/tK1aSnEtFYRxbODAyBvK1c0KHI0quW4PX6W9gXuI4SlOQ35J7JQgzsYYWniS3nATuoEP9
tIfbPMu2hhSBonsjxewdVRXtzBFfngRatgA64jZPHlnWmvhM921zRKt3tZ93e5dtxe5eOULrzR37
52YdAOjhaPTs9ZdMZ9w6y9e4r+i9jq7n7e1O7YdfIb3RfsjNtCb76tqhqgsSXPhotJC7VHUewH54
uTHlojgAQvkmXxlMbZlx/fbm1brHZ0iOCVtfFw3ZwbOYxM9MvaVmqyhazVEpuL5hPMeJOAPVOTtV
r6Qp5LpTUhxMp1GjLTGBPEnkRTo1wHhCtFc3Sj5Qv83bdRTCSRJx+H2v6si6GsKaM7SF8elahowX
BzVTTtuW2ORITb3qna9evHh5+9aXPc33DNpCo0cM7RVFuZvUkBtVKz4ZtFcO7W+Gj64DDZkN2FV9
u/8e8qtPht2kjmTsOe+JcUt+vYb1h3xowodje40cQ7UiWxa/zWmZSkfKW4sYg7azbodDz1FZB8Io
37O8z12XA/YwqGpDijxeiFKDb6fJzo6dzBu39lXF95r+VtpBKZkW+H4Bm88BvVFBE8QyK5+3ldHA
ic3drLHpHXSW1XCYQjWkyAAEsVOfVJTUCJqoIJhFizMaofBFNhNHHc6Na4iMg82vMyjU4JnRqJUG
36BXW34F3Ap9ncjKHI3E8LLRL5IXNmI9ZtmyEfYeE4HHQUezq7kpq9iOiqxE8B+J+vpizlJbagV/
u+aVFRmmkLNf13OynfouF5JeZd9J26KxLYV5IBVlVax0VVxjSHDjg/svUXiS8zouCDaGxE2Cl59D
ZeGwYU+IbjVVecC31owCWhp6Cw6tRX+RNRe9V834Mu45A0yn+TvNLWhvNg/UD7V45kxd88gwTKFq
pIGiQqFqaKfKg9t/mC6rKyv2QP8FDukCH3kr7LknWo6ULst+iArD5pGJnjLvRY/hsEMlsLKM3Ghg
udnVgEWBn+1Gq6k3vF3jn9+iceRGeq/sNm8cV5VvdSRxXbRZJxtOE1vDllBLHmFrkZ95sWh4O7pi
MRYWbewCaLaoFnW3vjVv3n3ccsPtVd4ZVEV3WCJYlpXG437n9g69Kh6mmrHPM91pq/NV9V5NGupO
HsKZ4bzEsCfUXePd/bkWbC80T8B89HYbxGgWYfze1Hkuyhs4hqcWasx+2g1Zfl9dYNvUk7mho68v
bo+j1h9GGpr37nF8SCYdDzSDQ/SqenwqJj5EgzQwy9WoK1bKEMemIwmeFGCS59VV41MveSnA2k5m
FyCWxZ9++jOZAnSIqGYblAoe/PTBg9F+uih42mzmaXOBth5pvULMO9Pvt/6zptv6tc/lZL9CaUW3
NPsqNIYwZWNpCD0D6i2cvF7lFtpEwy6g974xbYObm3U+CVfzHwMzmV1sy8sGBN7Jjx99+uhnP/Mz
t4v8el6c5xyKAJtgJRL5DuFNdVfB39m5vFuZnPexRby6A9YIyPBtaMaxts+al4ZF12LNRfYw7LEn
1eWoWFcAgf7hHRwgqEPYV1QNm2jwqcGkoVaL2sTYkwX82N6Qx4H/mO4XpJ5WZbQJLsvqKsDoTOhQ
zdOKNnPYaRjc1WCODSltoQSXap2XcVSfGRbrnelgxuSx2DnbLshYBLlDrMkl6QvgAsX9qDfksFTT
k+cQTAXE/A0aM4ZTkK+KoyxPZ8uqyWOD6PGsIKZW9y6v+nSkFLAILw3kMpT98fGYZJ1w1OFEOz51
nXW4qZvgfVYX1ZYbQM+vjeWnJRaxh/ebapXfxzL3N9X97D4tHTRJswteXw8IxvMaTpidCs4/q0JR
W9anff+Munjq2LuOYlPbOt+7nqpMq2Tjl4XOfoMHGD4HohR6pcyDtb62IwddXjnSz9lvjIONV443
qBNKjywsjxXuxhojY2ucYwBea5GBH5zd4H2UI8KE3JZqStd1G7L9jCP1KiKX+avOzhKZ9aUQxf2D
prx1/AymKIGdF3N1kTZjm5W75M+K9tb9u9v6jGyE0ybfiOoitmGycbXH4YaxeXl1QmM49e0Fv8hv
ejYCy1e5f2+nc/NGu63Ad9MQJkr73DkYNngffBy4KxYh1qPGwE6AGi5lzmgcCQmoyWpdZ070q1On
e+sduiBZq787RGc+VFVNyZblvlmyydfROOheVNhLSOsErE7Du7FqvrkbY3X40PPeuKRkLLf2INeu
HokuBF8d9t3kpAsiyeMKVU4RFopE++ZeDTZtVDWJTbhpHCYgzuPtoF3nBeDPGBOQ9RfYmdSIxlFi
uHOw0Tfd+jRiAe4ND9K1VlP+6GilQ1MbdalPTMppACcPTl0B1mpCpru3EfumiZvEIHXJPmKxmlEO
Ows1MbqdTxJQ4Og14V9SApOfQQybaBYYBkDqpTUIB3Naun2uQAWCc/Cw39zT4sWy1tvfUfQBFqRD
TZ4cFqftnnJSHJ72Qa5xaXHW/l4VsfQyXj/hIL/d2SgU+hBPpP49B6nIu/F4Q2bUZjgyot99nCe1
Gh85B3ExtAahASk/6QirRXgAsSQDFLRh4aMjrboKnzzEM/0Wr9Iw+CaCT+Z+dYVKmAW6RLeMc6F8
jkkR0K9Ht51J1sZ9sGHXUwtHOlGnDvhtKYbgd6IcOBSvOTUbGBKCMIQPu55A83EEgLrbIGBPQXDa
MViRLtE1ugbcxAKu3zSFdVUIbo/6oFVE1+d9JQQWgcpb6gyE/a63pdRIixLNWuMHjLiRZzTSiw0k
viPAxAC8c6c/dOS/I8GXtqW2J2HLBXQpNzQuaZd7O91G9z28ri2kApRBMdzSHRANWpBvHxPdfWwh
/Fbid4+Q0FGR+HZ+Op/ySsabvElUA9B52QAH61HU8YIuqSasRFLYOXYu2JJxSYf3ZNRiq5KTs6DE
JDNVccgX83w+oHzjtjzXPdAMDTxJuQjr5pI9DZZgOK5u073bXGTNxmB9jrXSDI7x8/1njYr3nI+I
JpgLiUmtOwiqvcsgUcoaL32KKiPakXNq7znXKUvN9ga6q44TdyRojpybOnJXCHMT9p6T2osNkMCp
6GnwHw1D0G5/qlkkx33bpbLcsL5oSPtWTbNdgyyqr7+YVBJRAhPeGKVowI6xPWYXWa3CEIb3vjhB
22itS1dMgW/jm40a1xiXk9yXN1W9cUNB4Aoko+5WHS7hNmALXldNU5yxDhkgwJ22XZLwzHYtVjFw
dC3sb2DVYQPqupzyMZADgu+aVwVgHDDDoxH3aXINpJv28LyO1BJU8VPtmxF9lQvQ7n+Tq/1ZrLlL
yYSo4SDb2KBnx1RL3oaNCvuPvrRirJWfDKrsTqjsaR8lu9IARceWi/Fnz7+mGJ21Dc73nZ3O0c48
YnkCzvagqCsaOzSG8bGKJeMq8S3lRseLpiKOswCSJ5FNjRFm8dewRIuK1QGLWHytZEqKTbe8oeDB
dFvFJhYe01jBIJnwYAuGhemmWA31B5vFRvSkIppjBQUBMZFdHVIX5n3E+kZ4jDJMpA3ft8ljWZbW
0RiGSqf9N2CIBMsgVopIReOazDOP2gpTSuvtNfUfoMTtFkv8aMJ1bHID0ElTns+t0XZoDQfvWdBQ
Hd/01O0etfjKcxavk97tZm1bg+9lcF5b6hfDOFGEhWsxf/XHBwHB49pBqVT1l90pOfXZtj/s4V7U
qGlFfa2twePkFrHh9VH5eojWfHN/PSYgkuHm+qhSj2EnOvxkZJGSBsW4I9dRljSNuWyAWKq+FTEW
on3Xp5aPOpiqtW2cTV0i3m23bwR50LKwakUPYr5drZVtxtlvxsFZUXbM4tfF7LLlkLChVjwaNFBD
rmaFP7CvzK4Gr8wGL6y51xQBFNgWBN7t77NWl61kds+j3pBjzH9QOEXm3IpmuPB00LpO5AqtaqD1
6YQm6J0cgmisp3ttzfQ66c5yOxhOeMKDmWebTJ34roZPfFSNKrQTmPpOdYKNNlJjwSaJ3UPeD3+0
oysb0ux3F6RIeKY0WDRn6F8b46iSfe5pWou6usL8DgHVD3eYH3S7Rb/Yvl5vDyfNLE4MO8pTqVuo
caVqN0ZHzOSB/qrn+pIiL2cVKu3i3mtuPi73rtmFpNyxwdx3KU554mHeO0bPQmATh1Rso1QuwmV6
7aW7VYyN4kEnnCcVMAFLBoyEHuxje8/u8sR1dkQ3OHp7/PqNT8tVo9MD7KYcp/CQZNT70KAZkkl5
HW0ucFu+L0SdeqMYrNbLDJPhYMIBFYyC7LG8MSuGx2wtMJ/w2s63rPN9lMzChsgOOmPd64HwXtpt
+NgVxDBwoWUV1VtLlYSlJMUh3FRb0Yqj32InphPe8lCchcg1VCi1bI5oIgsvinGJwWWNXaFf/Ty4
IzhKYmgvauMmdCROP1F6bpENzuqvY3KmXULYWu3dUaIXsA8849ilHFPGRBPAXjiGhT3Br1qlYyPH
GwmB424g0ggudGzH72HR9tnvxAVlnBPwAGfrNqzg7GmhvU4/q7J6foyahnq73njskdw6PYFzBASK
adKqlnx+Px2ELvsxeguMLb0oayc637Rn3jGff33aLQyT0j33srWb0mWJBIJ7PrUTKaWBbikmQYzX
etKuTSTcqOHOjZgsytUDHxtq69ZDA+tSMqctgYTQGD3ruBDzHPURuCf0yTe6567WbJdUvsWqHQU1
PtxNmLdrND54qFruUGy7/xw//+Xjr36I3jhcDNFG0vZrqKc9bleGL7thGFyxNbXhPoQRYTpuQMPG
kpWhrdKeqQN48EQZ4iYsghxw2R7UtalRjCQahBNaRSEpa6azq/mAPknqBVJRQsMzCq1EMSgoSLQj
M6wR15tj3BM4aajm0uAFhuBCj8t6OjWDSVOo6CVlCO2xtW/RzDM8QMUc6Q6xMCg0QgHdluntvuTb
sF7cGJfTghaV1RWt501/7ibt07m1pKJ0/FbHPo6Y7aMC5Gxyt1f+Zbbyb0csyIgcxuK7dRIFd+07
aqOJZtMT/FD1v5EgWZLVjvm3QP1yB9ROb+ub9eW5Rh9sO5fnqJTw7RjG0ZtD7gGGZpdArdreYFlV
lyrVW2aLP9xsyh6B/ENroLGa3nlwNkaOXaewk+0ag2CrdAvNhnIzKBLKSu0SmZoZYOT2hXMLa9Me
3E1oDAdsBI0xq2yp/FW7z5PJQyB4IjMsMgLO2zBePbc5qorDoxZGkqhS+YHWmyYWywJXMlPnJBtt
3nOtdZbaocPrUQO2DukaM7E0qzXFh7cwRWihImk7MmYpUtd6fZE7NP64hdsNRO2S3Mh+NpPOQFG8
FbQP3h454Dk9t0sbFixKLnDeN5ecfTnViJKhc83bYP4Tio8Fb04enDrhNDmEZChdSNQLVTyEnc9W
9sgLw0KksYQB9NhozRUGo9yscRFQpr7GjnND5gm44vELWwfZRA37GW6SxabNW0faD3Eoq5oDZTZF
TTSpx+tHa8XE93d3OJtuyAUeoh1xodVPvblZK/UUDeRuzamU8LxZqkFQGF9qJ/mw0Df+m4gBeYYc
MlTwGlsltE8tCVzjmF7ouAl67vV2oJSXQr2+86NeDqiIJZ7MraGCgLcMjjOYei6JTN42tvcEI+GH
bDkd/q4zcJBiYjaraloC3B2HrrHqYMILeUku2zCX6+3mPnYLwG7XNEGwRrhMM0hIhi7GSz+ODOvc
JamFC9KYLFrGWTi2NWU9m0kXeYcjPw/ljUbv94lnf1E6lf44qFp3LFQwFBjC4XbSfqsI6apQDZ8k
VbrfvBNKSMBAmaQTt073Or/NWYlBnPMrNPacRJGKhbMHiFbe2I5ldY9pq+SPwG9oTcz522Qf7PHi
4vYxEUfic+ASA+UwDVN9zb+X098dFLaV4NbMqjUeFDecQvWSAjSIoUr+Q869rTAeGhFTaZ+TjUEg
UsVui13bTJqQYqcjnwLV3OdDQzAJe++YsYM7GF56Th5rtOrRHpZDzVM61mBVnF9s+uyEqTIFj6Ap
W6MSWU1Hmqaw/oEBzC7plE2tu+MjU4gJfktV/BoPN8UXJwefHp5iX3EEY5pF4wA/K5/rh9Uu1T10
rd7pPlfepji17EnytxisGY9k+zb7d6fBxwRI1AN227hhxgV1rAnyX+vYk/qjnZPaO/RHp6M9PDub
xqBZ7UYvzXxY0GTdph23wBJbGn8yadprCBRf0CRZB2M1SrGrG/lpfECTPRjKaNfC2+neRMvjIiMX
9RkIWdUq0JDPMaz/usm380qObT2urXI9jb6iTfo1wYMynJ9lSDW9mFQwbK/Bh2+ATDn7x2QyrgHz
63zGFpGWQKYTNPVFZ+q5ZsyXu2fAr/P2UECrUb5pEE7jhug93hCt8QpiWq03TZ+WYl5tNwFpw8h7
ABvZUjg+jN6HwcDEZk1uxcaOr25OV025RJIQl1OM6EenDG6te25HwU7dHRdGPuyifE+infJGU2mR
NSzNRY5J0XxyHsdF3p7pdtml+yWbMr88fnlkelS950jArT3lpibz+/eGfK5xdxIxntifzn4MnIIe
Wx0gDPiMLoBONN2csl27OzOGZwT2gls9Not9QTsUfnBb4g1AbsdPVQWusmLjXPB6bs258U6QK5p/
77W3hmbnxTedoDbIyh90FSJ+UGB8nt2hHbgXHHi3DziWUK9uc2erOVJjyoy2Rqsg+tMu6v0chX0z
Ne7aR9N7a3mSY9hs2agsPnzqq+XURs4XAwq9zApgxRkiDWWeXMvwKrGVCIaB5svHb/7B9nGigz+d
3hga81xhz+RGH8HUIoX1LbbVtIPxwQ9GwfrDDG2MstpW8s2yUrRyNIKxaO4aGWVuM2iV9bAgJgET
eZahaTa2oOKQoX6TLuQ5dKJv/GfbDQXoxSpYfnZzDoV7z4rsEuRRoK2ZLNvI8H6r3Z3Xwvrad9BY
hp1xhrSIg3FPuLreZEFGy8v3RV2VJxHqniOVqzj6n/odFaNIpSbm1pApo8+x+XDA5ZBIQYySe/0h
h7N10GySPxtFD9NDeP2Pr98cff3qxYs30elQco5+CabXUXtPf0pB70mdp7DlxNHd1wTrK4D1bjQ2
IBfN4W7ewvpmikrGzd8mO93QdMOab6f7MOq4vmO6KRhJFJ3u7TwmtT62ZwV7Onr7RncmBwPbmdlS
rvWQEN1MzOcok2ASXOqsZ9ydNXmdtEdtilDMGjDceKnF21PhPmvaKS/M9HDnrMtpQcp7zaZvpU0f
5DODJ53HT54cvd5znZjWFbJOcXNDZws8Y67yzQXqpflpYvvVX1QrtKnBjXAokfy1s+L/4cXXRwaB
Dq5v72w6DYbY4NNXx788Ck/ZFcnqihfN7Q5FLlZM97RloxK/Gzhw8GW8EZzdUVvTHdqzs6XEudVq
VIzjRXnunfi9vLDsNiTY8RRer5ExOTPg3B1yOxEI8TmK0bCPt1d81shjOQJAMQ7WhdnPt82Wkjkr
GzvT2NgfM91YwepcKC2irIff6SabmzGH4SDRfGVbLuNTFr/wHZqtDQlbLw1hywpQDseMvLmgO/I9
UINXoFe5xkO1rdlPzi988IFFDdySrhnkHlsZeYmYING4nePER4n+eweNb8EVRemYoHQs7auMYPLe
Qbw8NXGeXeZTzoEAfciaH2PS6kVxPYHzIl1GHUT2hIyDyzxfTz4ZksaBTi6neKXPR5eHP330swcP
kkNSTGyuqmCe3TS+aYVD1LutaSMjCdQlUcM5zRJeZmSlGfTW1u1l18VquwJBEu/J8RwrtfHirGm2
KxaM2R9fn2uzBTbMQ+9cq+CAsfqmNoDjOHwmeEsyfUDYYgACHh5gRXs7VyI6R2Prd1/8cHqyQiev
M6BnnOPY47tHHg9YgHCpkmFsN5xJgUSdmM1o6fggt4mMo8QCmKT4UsWt9aqcoaGz0gyeI+3cIgq3
cg8uN/FZeYLuuqqN0974261RfJ9c1jQtyu7oeGk4UkVJghkhNlKCYFQH7UVuVGeA9AO0woBW3Msm
DqDWNfhkqgkw+MioT/KUuVduj747f27DmXrvnQoU3JmuTwMFX4Sux1jTERnv8MUHURKwXj/D9U7t
lklZDYytyRmR6FeOMfOo14/dnIUfbmPutyfXu4C4xXgaKpjvrPKsJOtLYDDk07zl/Sc7h5OuD9Oa
ECaCz8NbaBNbKuK6o72kTTYjMWlzi4bwuIdfsiOoDMaaLjYgdbhUkG0sGmdsUUY0c4wG07eHh2/k
cgAnWyxYCBLntLO6WVMaHI6qjTkfOsd3lVpdNToOIsNzz3d9okpaHn5EV9jbXsGMdBPilNRTGVku
0PiU8TZVtXyL9A7fXsHSvoyToCk2W0nRSG47yoRLI5uzJvpIm2OyYgVCqES4vSqErTOhSzPAwutt
XwLIumgukfU3ucreCuvSEqHgvwb1BVkNhP+MQm5f+a8eXKgUrYm9Fo0xLlJYQJhilnZlb2pL0ZqR
grumsABlAcLdWQ54y40Wk9S36gwqQtUnzdjQ1rL/VqTpQcIH75H/YcceZFliULZBaPscPbDq2ENS
ibWyYVK1/aoSTkaOAHP4p9hS+J4Ann02UUJRcEDg9ByklwslRfQzib2UAhuMlb5cKG8BcS/vLY2B
0vWk7ldHVAOWrIt6kbMm3jw62DxMgs8GeGIfD6cJbS6LtSVosmkBtpbP91MX7NaqUU+89niloaAH
G1uj152OyR0tF9Htp0DsimmBcFjnHarAIeeS3dukca+n0uKSCQEZJVl4GQe/4thJ9AvtBYbVKiNH
yKHcYkalDhYwdYIYYpiKi29eH72KTk0WBy1tr8cBZqpYfg/dyUB/zx+jXgb78kXo3qkzMVqORACO
Wnw09SyQi94taUXajZCTs9Szk0P4o4LuHUR0wwaf8Fc1PeDb0KTbkpzwsb2OU8OL1x6gLWbqa1FE
gBjAGgfedmNpeBy4sZg92SYTT/fumX6rhMnOids9o7vvJf9WG+ZBA83d2iHP22J4ZVbPJF0clTda
0vEmnMZ0xGaAikN8A8FfZOiiAozhHGUDuh3ktL849zTDbsxoC+kLoQRKb7aPp/yu4NJETd8zvrTf
wppAFQFvj2jSnLDN8BrsFCNYtWMwh70yrMGNGyyxb+tEsqIyJw9OU5C7luuLLKbEzPKQQjFOo6Q/
s4IVRIdt7XS0t3AaYqjKxJedg5OxSqpC7Bq3/GT07r98+zcfffSRyvU8zepzcrfNN/m7f/H2f/vL
jz7CLMscte4say4O5K3yQYPyrG5QEQZUbbrKQCPi5RK2Nwyn1pivP588SH+c/kSSYZP96yfpo/uf
pJ8EmBgRxFsJzN0EmFV8RNee5VzO8OfFTFw9MK5OMH386udPXnz98qujN0cBcMoUg3HTNodeA+hi
N0ahZUT7x7zKWZZBmAKt4+VwceYAdMFmu5YU0QTno/THQZwt4TsaSbAoyz5MbJQLBytUx66KEjgJ
tPlsW87YjRBEPqODmBBXJ3IOwC1tRthqNVgsE4+oD4xTCVh4LE7AL6lyyuIb+uTG2Ncb8TBsVCeE
BgqViz7FGJePdMRX1CMqPCoR4quGztGwZbWRza1LazQCBwLCqZFETKEEm70fiqqNHnz25vGXn4cj
nHtY0sr3Rjk9qzpByMHRq/WG0ZBm8/lUdR0/qcpFcZ6y3VFVT8lBrcRhTqJ70a7rmiTVw5/Y2BiN
XnBCFTjyLJV7pxrmONDl1OEsOK/UMYe0w/NKjlGBCS5NDmoi8c0NExY7XbfJaeFlC9UoU5tQw4aR
y2UaxHh/by6gRi59ZRGNGQxNTXDg080Y1j90zpf7foxTimTx+uXR0dNvXo4m/I/I5Dwv8xrWkdll
M6uL9Ybo2Fnso/h+vpndx6fT9mk6vy++QkYrsHMFyWiLB9NMrRk4q57X2Qrh055BrK7drlWvBJCK
DrAu1unoH4Fk0acIsMiW5wbzwTj6RNdQmNYfTwx61HJzaC1ezpbbObLMO8HLf3zzDy+em5xi+uIX
o4Z9MmgFdAZyQKr6gzxrbg5kFg6kccUbRsZcy+I1ORVno6a5UQsBGQ0Hr+MUMiylN6NVVqN5B/NR
P7Cj4+ev3zz+6qv7T4++/ObnPz9+/nM1oe2/0Rs9bMGGMC/UTIiGzKR96v4iIz/k7Tpd37TTMOKx
NoejA6R21l8USqXYh1EMWpSxb8KmWqtRr9BaRGjBQAq0rIA1VhlhUqK2aObGvZK52dRaJXJ/Y/PW
scMFsfJYeKhdMvgN+6uQ8sHscMypOtfMFfE6YU7HNIe9ildPyz/bzBCRxSRgsyWrH8piNq+0QU1Q
04Emu8puENOoSMcUnKW1MOf52facSJzoI6akexf5cr3YLtEesAyYM7Fd6CgwOJlgLr8mPMF0PZkS
/UweQm/1tlRC5y2o/2/lkg0IqbXGyWcXVfC3X4zkPoR+PpAMo7JlI/UQsZ3lqPdhY6SHIhjh2GFN
48G6wWMrjq7RqwloCqYAqYcTKW0xxAKDblLg5KE1QgXhKFCekczV0cLxCriTPiMFEbVw/PwoUjeI
bPy0RisuctLnUJbQ0vmyOuNpdvgmcFpKliok2dw06mvVjIhysarOcgvfVVLZZ1mzeYZ09URNGw8t
wheyJjTFsUl5X7ZLQ8PguiEpTwOtgZhYuiWjRVwobf5MUtv3RT4htZ+wvgpFzVLWjMGsG+cizDRe
R75AHZw8PDyFQ7598qI3cqjCyyFVU6QIdVnki2vaE2zEbPGBFelbNhYrsCRO0lk+tx+ilda91iKL
bzXITusL52nXwIyHKo4uBsyHomRFybGiS9NrU+tbVrQhdwQfgQ8zyKDFF/5sr1mi9F7kZoyWd5PA
FKP6W0nswNjsnEdea7FU6jrjqlFxqFzfbSADcD9yLpskiQIFuF4rOUsgjpfFJTkpX4xZQ4c/cdZg
u0qc6ItaOhE7uOuTdtbNa0Udl0ALOSMGX5QtHOXGZC/qYHbHWv18khFxDZ/C1nqmIlK3h4b0J4cB
ObTdxxf3ObezOiqifa2ceKaY9/nk8NFp8FkQPxoHPzGXMBRDPXL80HMuF8ZiimLtNjAylCHedFq6
6YOH6uxrbXQTD5cyoxz0nG4OTftvDVe23VRuyVG7Wofaa7VBHQDpQvbd6O2/gBPt+iblfG/v/uXb
P/uXH32ELEhC5cHjsmrdZ4mfyunt+AWekauGJXqQnLFay88xqM6Y6yvWzZbNR0wxuM+bGqQ94h6E
d5v0bhPchT0IPjCXlx3MAb6x08J0utNWulMTeeSH1JtXsz2qhYF42K2yNZvycxwV3CN2G3bfQbTE
bY2dNnjJzkAQuKOFJ3ebU8SlF5U7B/bBY+oao49G0ysQeJBYoDEA7XdU5NEhE5D4OXGbn/gePvyp
fkq34PK0LfvlN6//cQzMiDPkzuYqZxUaX0NDXx89Pf7mazxorEBgKrP3WbEsMuW++MgE5M3T41fc
/KMH/sc/+an3+Y/1U7JlHBNvVD5/Z2hEVn8x+s5aLF9naHXJZl4yeRRTPPttwcEN3xfo5a6DTFkL
NM9mF9oNM3j54vXxW1mP2gAM5EugEQ69lvNJJKIikbg1J2kQPMZc1NuZkY64paLtmUDrLOp0ZBr1
8D3DhD9ZEKVeHnFlmO7vrCy1HC1TS1WOrbtoC0UFGLqur+xYYauuHUv4HGSENgYrgTJ20qjMlo2Z
q4zKELQxVDajV3MbLaBjrOoza7PNTI0GuS42e7gzNBQDZCDvBOrtaXYKMMhVBpMhsXxS5uHmnSMO
wm9KzJ5YHmGBu3PkC/jctRGgioQgdI6LmY3H0v444AfjHh7wu6hl09FhpK6moh6OEQknig45SltN
pWkWvvNk5u7ghodL8HqzIqh3doIFZXQoUf635WxXVEM6MGetzonCWhEpZqVso7lephgKhjULZ24M
t+l0g+Y6Z9ns8gIWN3kSWjmF+0gDe45dGF3KIBx5aMMbu669COrZsw3KGgeU6HQcbM7EdRoaIfHM
H5xQmbFILeE6fcFdHe/tLoa6fq7UILyhDlKb7/X74nZiegh85oXpUJDM4DmsgJbNOQjVL3s8VvFE
g/0DhUiHY5Sx4M05RfHkQ7a6+OvaQfXzLPy6j6FP74V2f9vmzn1CX/rNEYf9XRiDhHEPR0fLYJAA
OedVjNTeSm0BCx07ycRaSPhvNMrNvYn322T07i/oqgcFY+Wep3QH7/6rt/FHJCMHF8X5xcEyf48O
s9uzA2WrdAFLn3RyuKzf/eXbfy0tEavlgPjAA9791dv//S8++qirAFnfjEZfsmvhY1VYbaCG16F8
NqldCCQpFBbpaFXnIuijtdWC73aUaZuoITC4LIfor3NKoLEm0YGrFw1pKBq+vJvyKWyaX6+XWUkq
0dj43oon5A/HhSl4i1GIuexzdbWCLDNfwcl4DqCU+RUqvdhAHw4r2Rr1+BfVFV1MsTkGvOKz3AVm
Fdf8tDkMfl3+bgx/viO+++vy92L7vCjqZkM219gqjrCciwnJrMJ2oUNUoxswYv6JPLcZeXt5YBYc
qbN+fp3hyaoJYo7EM32Ce0cCPJF+aVkkThKBi4zb2LVQt0K6SXxm9BHwbSAabK8BlastzDn0ROpB
PF6l5ynV492raKDiDWdLXywaWwirs6spI3gSmBNHaVCjRDlg/bqUS9E7MgntxCBr4uy+TDmAccb3
76mCavxE9wQi2qke3pJsrfQrVGFZ5pNWZtHfRXTVaz/8zvfw9+6mQc0rfcZySMHFgKCCCXUsv4aR
Bx8HS6bROm/wDm4ihQ4ljAX0PLvEMT4wfs8omrV6RGNF7S0Mt2eo8NQdbUc1pFpG+Dx+yGiQgqgH
AWkPtk7F2REmctyroReNrBwTT+C4k6RbSmBBXD30vlXNPHDDQWCL2tEy+BixHAHc96hDqp0cPETF
ZIP4B+QgxsyJ8+HsOxdnwsd8BT1D7obE0cPovuIhnFAJxAEhQQE6QGB9IP0+GkYR4KZFTQcnqtW2
xERNiQoQCaSs8ojQdI7UqdLeLGLvPmMpZCyFuZtTwVs99VYy6b+bKXAguHpzLhZWWKub0VXb/Wlv
fAo0sY8c2RcEw+g3/OzkrK4u81Krpk5R9ZRtgrsPruef4/nIn6yVYW31KeOgmOsRDPGkBe/yJCs8
qyntCMjSKHIt6JfrBODHXLWtZznloqeGFtvlkp95w91w6Z2eEMOu0KpL/oJHSrQiJVOxeJEiBeMB
U4s/fOEyZBB5DDv19Q47Tt1rJxzAfm7SLdR4hmfI5yAIzekSMpEEkh8Wg2cYNrHQxCt7uqCsZrOt
DuWrZLE634g9yZXExPaG/CfTDxW7TYJkh5/V2/LzsOPLxVANEr3Ru+CEMrLwfeAUTYw8k9ez75hL
aUb3iUhjrV0DHhBRUIRBfh72JnrrMgzduOQ2jVVP44RuJtw7guDzIP6kDcDf4VhKIYGAKslaxKYW
G1OOghzihztFnGcB1YZ4pBdbrU9CQ1FPt5r6HDDVKwFbE9FfN4mWLk7H/lFN5NoD5SLiFCq6ubnf
hL8BwMIkGYYD/WB2wGGMxnhqkwzBOnr3r97+K8NWbgsbAn5591+/ffZv+fAEwipJ4DesKdmWFJgR
/S8284NlcUZOF6GqGEoKWPwuiXn0cQmfYaR78zJ5dIctzOqsbBY5LSqyGqnldlkBJldO2jKCi09V
WWW2OFVwoC62PekcN5T5NWv1oAC/Kpq+gT9Psib/Qkng6o0dW46vztQ7kQyKtp2uD6Ts8hSE76ya
3xAXoWhX8xy12vMuDN0bMJ1sVMEeU+qlTs1ktJ+Reru/uvuqZZLJGGXswyTM4DiRzzZTtPwtYLeI
5UFVK21qi/CibyaMTr6BtwpyCbHAHjIT3XArDFmFhSCe4Btpt6zwECgchoLDkgU8kgZR16K4hk7z
FVo01jiN8sCj9bI5P1p2AQgqCQrw2NbFR8xdlC2SgqC3NTNlw3btXvMYahvMI2wwV6X9Jt1dNNV4
naLzxXQajQM3u5RNelhsbXhjUPeGVp3bhaffrAmnEcfScVLuYJ1ewYNHZBhL50B61VXZ6QVfPIUX
vR3pmgN94WYyn5MVc/Fb9DqSOoayn3LKmlSj0soqSI30jURtnVs3DvFJjuVosanoB4P8s33qIhMb
E5Ge1bQ4W+6yyuZ53QZUsZbtV/TSQJyEqBVCQJ8wWhOK5r+Wqx6TQnoIhwyRyPaIbq1cxauZXJRB
xN4UtlD9KQmI7cWr4l+cG7cpKbGjbgQjXBCcbluVvgYKKFZTfAFzf+3KxzZLj6X+mJXWcsvlyV+g
gFYmyTY36cb162CGdI2uRbtdyllb21J2hy4eIniJEHWIW/nLS91BIX7r23nCzRV62c1TEHWzpSak
0NeR2pk2ZtpX1fWPJvBCbxupAOyXvXtQbIzSwrPm2J0aQsHqt9DUFLYgko8mToYdL49kpTa2M4Om
dfBYjuEO2OdpsCmR69h0aLcjt+F2NcN/1y0dEWxT9oRxdRt24dQsGmtQersZByGcXt9tcVq9DctL
siFfCldqLIMV4YUu6obGo+rsNySntDkqM/tUTcxEpeKUylZuuca8aKWQSkQKOmvVlTwwKt0BLntV
Z+tgXWFubvRxMK7qkI5i1AjLQglooegN+ixfVlfGfa/uwKCM9iFute2vyAKo/4avbVGpBvSN/zGO
zdeISGw66vweCpd+d18jGqq6U0n1dYBAEt/zgWE1Y6QJCJ+/eHN0CId8wwisNWN4ZSUlgdK9J/jQ
UHqjPhvj9R/+uvx1GSY9YVOJYeApNg7FfmWZ4PGBRiYpNfvur2KjejsH465cnNwmwMFQNFYL3qNX
r168Ogzkkr4PeQPIspO9AJ7oNs2kz2FU7O+r2qduC7wYPPThZJDkXVYsoW6RNYr/TBxN22V2cpoo
9aom0Ta1z5x9GR220sMtuD+Ds7hkL00+gxG2KRG/T6Nmq69B7O42mWeN3vy8K1mwjkJ7LKVH/onB
In0T04HSvud3UHq9Bgk43wMPagCTMNxjDNe0FijuEw+kbyTXQzR2q6F8U+YymNdbyvDUMwP2ALiL
ra7bcF1Sl2Bpe169DffubM2mWt9mJxQhbVj0kdsCJXTpXZTiK2jGr+yVOjTsyGLpcD1jnqUAWurw
S3OXoQCutpZ9ma3O5llwfYii7nVKivF0MWVnA9GntAfayI4DrJv2brGpCY3+Phr9vQwOTxFQ5obu
lU1VhiCYFBnAZTH3Ceoz2JHGVGBoyxJ+7Uq0tnCFZVLFxhybG2g3bbmkVZKusB7scVGAsb6pIZ55
H7+2Dek8qWWIzu74pSMfhtZ1hQbNS0HAdEqX2uxG8cGIivynmMhJwOKEKXDOQye6FVYL0iYFrMsJ
tCgMTV1zBXTukYf68iuVE6k9Z2happcbJqFgX/pmSmBPmXByN5kw5de9nsGa12wUORUZyqicOvAb
bfHMn2f8o2smlK1RwP9lVnfiORsY1T30x/g1DhCm1Dtg/DV8IyO94jiGOxXy5JJihKjhTfaFM1ZN
GOhUqEv2F44dcnAnqJ0b1Xa/abQ5M8b3XnG0R7j/YMgSZ3l0qJpZNBHi3tkcrAWecsB9N2KQN8RA
PwzOwCS0w76Law50NQGBUPOauUe73l6V/LZa5yldbywwjJpcFZA+QgfDaNriNiMqmB1KpeNXAgMb
INlNxC6HG7flRbDEmI4ixT5MRu/+9dt/Z9yx6Iuc1lTtr9/+8q/ZnUOrkyuK8ohHADITggNqsaQA
qPoeDlkmMj7ORCNtNunIunMx71qsC5VVVV7mN2sMmKhGbTyyy+rWVUmM4mFdEIBgxB5ytovMeV2R
rlnc4WHDpydxSL6fM3KIl/MfvUjbdqKDA+4WDgUZoXkSEj2HHAtkEvJrjCMS+lfq7KIqQFabxHCc
onAZUDVUN2D4nQK+h332z3I8mRi1VzAN77N6En794ulRT6/o4wnyZYiBkupq2U6N4fC6qSr0xIkI
gEilsqMQRp7iUFBBHZk3eI2yMNEX+I0ELSCX/QVTzKZS/gdG2yDq1qzdNimKOqKxRk6qZ3ns6xGd
nFGtKHtzoKlk1Om5r1u8ouulgrLqIYQphjklauB5omPw2ArtMwnLiitDOZ6Yp0cvXx09efzm6GmQ
v9sWwGApRXUVKHqbMFUMwEOOv5heiYBTv3ywjVxaYhhvDYprFISJw3N9u0m/dPa8dpLXyy2QD+XW
7TMU4iUoaQGd4wYlP6RkN7n9gsRtjAjEAS3oZ4rB2VhPrQEIE4tHcHE8YfI36U+6kcagBdrzYnN5
66vWTiE9vXSx7nlNs2Me9qQ7WfuqYX46CfRaHzjeKpdEoHm62X2Xuedary+i2blmQgMGRneCL6vN
RfA/88U33r0/ean8Ln+SPuB75Mev3wTAJSQUmops6vqgaorg0SEHusCEi6tsqaxnO/YnvUYKERop
kIGnN12S7fD5Cbn+kOkDGnX4c4Z5UKKm5Ed6ptqaU7ywEkOQJrYzYVL813YbM6QXoY3pbJlnJa5p
3p5XcASZG4fcVao8hTwm3LDkbYMUry+MwaEdm+/EIhiaAsPqaJgMpbS8aBt+pab0H6BAnHRIko5N
MKSpk/4V2+NhX2V1Oc3Oqi0cijGva3ne2poY6FUI5He0AQA8NkuKLX7SVy2VseBHbxniKnGomcac
XY8JQ2I6ddfOwmryGrw79HIb6boPKjURVMx7R2fjU6IGMjJtZkf3yohCuUbWbtZkT6Y6MEMNsO2Y
uZei5YG0JDf4dzA2lGHKrnzzOSBFs2ljqKKJCwNzcD0vJMv2HRUJQgeCkuatfhMTVeoW/MNRhm9w
WU2lKY2RkU/twFd/qEsw7FsRQ2dFSe611XocLPMFpl3CKCOJ3dFUG4STpqWgzpWUSqCTfaj4XcSO
mz0OcUIVFSVX64nqcGL0OuG+R/aNen5FXhc4IQYsHZNxVc7nXfSUEk2qCDLr7DxHdkezOc82WbAt
KXy7OCGhXZanEXh8hgnQ44P37703w3GzXZFV9zrhowaCrMAiC+bg8+BnD+79rPcIjBuSgaiUBaRU
9fxZ8KjnasXsBa3xPQm6ejuNnvrOQSBrlZjTbD6m6F8hDDokT4yL6ipKTj3n3cafGCz6dfn7SKV9
ETC96DMH7pdYeri4PU3oYCLkqtx9FuixgVpNHWbqrrjoII/vaQedzbT9KWWKwNZQ/pdYNeyRAzwD
rSzyatv0NBTdjRrFTTgWXRocsXMJPFoF2FLaE5iY9eVNG7jvLh6x7t713O+JwqEWB2WKCel4QxkL
3ssl9EW3wSh87bBZgdGA8B4O0Ozw5h+a4/FJxwTCI7toMf4rdhbX4mUrtC0pghhdrikTRQnYIxQ2
tow/26BSfed30yjUFBiGRZBdbVJ53R6Nd7dscdi1eJTDYmuxpDTcljDl3KB0hejhwdBermIBoo8b
iKnt0aWRhaQcPd1DbjgY2SHUCe7OSzgMzsMhRwIHiFCpdMiClvLNsUIMTXxxpPAuHFniCKcOlIig
4a8ev3p+/Pznh0GIjjpm4x/33zcDsLMMmadHoyAoEIXgfOjamuNJWYkCxW5XkRtGVOyvH2NnGNSR
QmEpDnjw4ouE7ARGvfs46fZh9XffjN79zdv/0at9q5ZzDda7/+bt//VUO3laajOMEYlo3LBODJuF
HahWZE+qLfSXINNmXKPnedmjPksRRlWx66U59vvpwLChBUlgml87Wxd50bQxt0jf2usXVHqO+bC4
xARKlM18xh/wDmd7KT634Yfq/ZdFftWJgYEPMcZRzmrUlEn3eBE8oUQ3lkU2lqWIkGXwJL5OJM9F
ww7V1zcqjVJWo3Ge+Jfm6um1eGxms81WJ7iSUH1cXVj1E1zNHBFwyee3M8o4cU+Bcg+rPWmdH+YS
Jy2oae2TIRF2VgXZ+6qYk9i4RVbPxymJa/oeB85QUIzELjyxPfonFPdV0CAeZBitj4fnaelakPnY
CLwJFdgwq2nNGHVSA+oVVwuGKkLhiflCTTFNy0ASClE00VamaDguFYjlnDrE6AyjC7LqkgOl6U5w
PQgNmjgsgJ20aJH5UtNH5ND6ZZCKXrUn7nTUwBSNzabTFhDBArZl4ByIlM9flBa4M5XTKZaFZuCw
sRTEKc2Jio8ohS7zGyjHWAWYv7xR2jwOXsodQctG50WjG1tVsG3ouEzWfMMeg6KyBgX5LSHcnWVZ
MWXF4V10I+S+TQEHGZCsxkAyePxDX2SV9kdMNHloBjHljWTHkKPkGBYdpzhk8ZE8sjEtLAdF1t1y
QwQ/9qDBhw00TdNxcFZVyzEGq0+YfvGEy+mZ8NCJpxuKfpiVPK26BzRT9bcIbJobHKt5KgN6wcMZ
qxj3gCP0QZdIPbiRmrh8guQDrAvRhonvijlGL5dAp4a2W60qtGdQlmqMYS95oU8HNEBhikry01ch
SXnXEGyZnIq2FDOScDvZY2wB3W3qApMRmyTIY3ydi1O6UgBI6hGMMketYToBklJar3K2tcWGZtns
Iuc4Phx6ocW0FYoKTz9TytdGoBGmrcAq88bZRzCkPw0v7dTumIDxAqYK7itdiQroX7oYYEKHt2hR
Ew/YZNrxsnRljY0TaGHPwDw9TTEtaPccaK8bOAsKDkVOwh+WoavBZjt8nCNeBxcFcGhY8TeEJubA
uHWYrQCHxvXFkYalOk9ThA5jap90wbUtpXm+BEhzFC3+/XHgzOot3swWKGFb2wTm7IO1plCZN46V
DQtiadG0mLZnCJ7gOVQVhHGs6sqeka4ELpVsQkCOhMQGSMP3hx4/cKzzhAqSSNFCzalsqO6TVK2x
TlJcn3LIPh2blj7y9OTUQJ9DeBqD5tJsgUJP+AbjR21X7dO4u+6c+JjO2A5Hw3F42gGPeW/1qIHa
Msh4jBqGup7vbtu1blCKSyOu74bUHYjLZQ7XCV+Ql7HUZ3d9bytSQkWS6LvAgbPCGTrPknJWRQSR
ur5myT4m+iISzGlAxsCvrYU3GH0xutvEd+skQt14d7hGLB5zeSbi8OdQR5vaFUmBXpCCCPM5Tqdt
QWTBhxbZwLtWTinddrklhzTY2cSoOGqfUuQvOWcca4deTE9B0rI+bjxWmSIx7gydUST35QK2btJ/
KflYq1LMECuuH0rRTOWkpSKM1XaMO9tnUhuBwVGqp8IdkEuWywPS8SGetmIjblyNI9jNkJPBGh3K
QAZIj3Sl2J5Mt3yqPc8nQfQZgvd55NvamFXvKjxjc2857BpQPIEnP+dY6xj6DZ4nyIPxsTdSB0Y+
IKNQQtis6xVhnXx3R0LzBRg0zZodSUQD4fBaORebZNF5hG6ltSQqkY1Sx/AwjHlvSQVa73ILImCv
wJgNCvHB681qE5+YM3qa7CIJDEIwOMncy/4TLPN6nc+mf5SJ1UgvgWVasa/8XNITJMud49YBGR0i
Y4vtKAe2dh9DzGM5i3lwImo/Dei4E9Hdmv0woWwTJ0pPIWbSMXFy7aO2O7xgzdlwyAQlNpYX95f8
wFPh5X/QO0O/59D/3z3U4R3CGqvokvVU43G2fWyhIe7yZ/Zs9bgr/nNG0MBeyH7az/WoEgaOi418
AUb0knKbGmLbREuykp/Ive8ei1mK7jUSYce2ZKLgqROrnHeQpABpr/Gw3CMSvMmzeN0Vu22/0Yj3
nsibwLW5qOrNbMv6OxTeLjJShbUiR9MXYNMmIHOX2zdjGY/ERQ097e5Jj7y46ZnbEEOMNxwVO4g9
pteIalO77uLX5vXekEDhFHeIGhXgU7z9xw6NR2QEgN3rpveMluUTfySEz86Y4UbgTAu2CY2YW/6w
VtiygXGjhBmfZ2A/k7hdAK8hAc1YVQSQtoEs5/ssXii278LVYSLtjBhsZLOu9SokicwrhXVp2yNu
9VG2CUHr+mjTrmctaAoyZn20cwUbhT0r2F69niUXxRwTkBKDsJ2ECT6GwIuSSE3Vi3qfmXpR//8T
9QeZJAwXOjBHozuo3/imxPBNxmXPZDLCtMfZsnifM55J+98oRTB8W2cYAp/8D34nNzMg+gKtwb/D
IEKqM5gKfhE7SSx3XL7HC2YoF//eKZVIse9SNOqg/Fit/wVD+rhGE1QfVXUpi1UII8s5y6EvcziT
9muyB/F4NvedFOSZrLZTbTgT+ZF3u3/DhHm7jamF8cO2Ffxibk5//E1lxGpeIWu1eFuKSmQ1fFl4
lsN+9P94Phf6j12Z4ePOHpsYC+L19qyv4sFgxa+3y76K9wYrPi3e91W8P9xj1TvGu4MVX1ZXed0D
aj+sfj7Ac/QnYQQEsJcR4JukU7aXEXB6PG9LYlfaKX0bpmKs2J0L1st2EPhoLAPuZyN7t0cjiMZq
JEZ7f0q+pG16fwChmUf2z4u/GSulVWVhxHN0XNzrBCxlbW1HVe1W6xgXQgaqxMAIW0ii76u8uN2u
6EIxMc+yf2I1iJhSeZhBaTm34C8vG+iXjd9nNaXtMRfjoowOuS0e/nee+bOKx5Ela2da0O6Ge7UD
AmSsj/5FfnNV1XOPLHvJb5DcbI2fNgWlVwjLHkFs29Y6kXyzDn4ze5VmvfwVBqm05AZW7s5RR4e3
he87jt/45ESqndIA/FK/grfXhV7m4+OJBgJk93HkU3V0TiZZP9vuCQGsO4vuNpO7zZiUkALjWEGQ
7NU5t+A00MP3KbwkRlvcZPW0S1H6sX+F6NeJv9YtpxXrRYOT2bbsmVQDh/fwENY/bV6sUR0DdN8E
KnTNe/A134GweQ/G5h+KMrQFGkbZfG+cfRDSqNJ8B9r8+sP4bpN0tYfMZ03NITr7eo7S9qxwVHKA
Sfwkx51Ienbo9ZPDg4enIw8ahvbGXdpDkKdthvSHvkgVNRPhzLgLYfJBHYSpuyfZwae6r/Vgutep
O4Rd4DW/Ln93F8kdv31HXKcGWXMceC70WAj6udg37SEDSdE/zi2AdwOm0sxNedcFcIavx3YSyV6H
8z/KFXxnLmWkcVd9bw0+Ma7SJR93m/tnVmG+bW1CrOSRMRsgq1g5ZPyfN5ZflZ6AOFIXLA6uMEan
xLAL+f4u8giicq/pzqKq2ZnLgas8HMZUDUFPpxKLO33fcrZ/2Ol2YbXSeJGe03j/J+IApOh5lR84
aQEqgG6LCTRbz5fJxA68sNe9A5XcxwREQgh7mAW+SaxyXmZxhzMvs2cCL/asAdITF57PjGEbph02
b6DGd1u/qDZxp2V4jFRhgxcORgfJHpFNmV/43WD+6IdqPffFebnn3EPJfeb++28UO28WfLOYpil+
wCS63NVnmHQA8Cvi2ihL9YzGuFL+BOKuZyGgPbqyupSsYrpa7cQKez5sywRdnNrlh6yX9rBcghZ8
hkse5mtaMf2JN04hyKdFM8vqvW5Bpeg/X5Ls0KGMkaZ9jwFiuX1GRxapUHbojpDedzAAD5NOMQya
rsbPdrPsozmVQISx6tsZLXWbdizUdHwb46H35hNPLeg8ws6d7vq1z/VONbZ1FXfRartR7qJ3G0ws
jvsmHYSQ2M18nXmqQ9CZh/c2kFvcCLbNEIXqSrOr2uDH7FpoJsvqFLBzaxm0JoRKZr7YeYTEGFkZ
1wwVFItstq1sMurVuhE9tlo3ix941W/Da93NtWI7aNLzjpvliXEo7FCV3825u+0a6j33ENpxGG49
uTli2GGgqKGHWpHm73z4P5DAHr88Du4HRyXgN1jDsXrTwMMPb5CT/3RzYfHNjpkPy0yVopNt2CQg
hCVtRMj7o8SgiTssdYWY6ZebCFV6Ck9MVIFBBWW/WecNkzSGhEwO9yd7ixTFwcvgQt+HxpTLj0tm
tyJtgyB1jiYJytsJIJwSCjEIbsv6jFnq2OrGYewS6ZicdiliTgGflP6KBBbk9F0P85B6ZPs3dAXN
5wXGLyLehh7em2BecMozikUdBK+35+d4NqSctZ720A0cj5rdwHZnOYCQG6HYUAmCrvV2VLck9C1m
GXAbMAI9mJrz2A7ibJOYDs7tutvsCs1uRXi+I5RKGVEpI8LmDL/HKtronZ0kekelqtIrERtQmzFv
0+RX3onhfKI0YVaev81Zqk9jdoq/a/Lxctc7lPcs+TYfT9dZ6XqMHSUOVbfhWuOwjZPBg8P8x1V5
wMFRVASq67EIp4KAaz1331MeQCzIDmwAqfL3GlU6SYF1uZMHlAQ1DILPPlO2kmpTT3qEhTYtKTUh
DCi/3rDW9LBtxxEWXM0rZXS63ljh45woHmqRRNbR+JoPp9ebk4c/kcytykcKHorIhdLeH1n4GN4z
fNvFH5Bvu7IBJVRUs4Fajgjd5opyOo1U7DFxGm4DRCzirmvEj1uJ+tzz+pP29UV87fE/K9EJOxKb
DZIRQ+gmuIetIVg/FtYn74jrxkn3YbwQC3msB0z0gVNmwc2d67oFIOxTs0SB7ztt460dPKTKD+xX
Bm949PEnH38K5LWssg02wEQIMxcS97HrXatxtaWErmV0QBpVtW4iqcYlYBMbBzNo7eE4eOR/w8Cb
Xa2y6/gEW4Rxn9IYPrVhiS7y5bKKTvA9UcGF1Wt0vr3k28sLwgK8e/dv3v6VESJlXs0oB+R/+/Z/
/Rs7B6RKARlIEXLt08Hq0DUfVQV4NGf3Ok/ux4Gsjs84odErTnI0DtD0QJ7hfjWyE2LiH1XzDUV3
yJaYjmZMSWmeAQxfVTM5VH6PSMUSH68nHmx4cCCoOBA0GEFfbxOtliPCuqjFOCKApKDbNke3lbLq
9U4Y0cGkD0ADthAL30s31xsz5vE624QuwBpYnPOAfHQpcI+Y5agWDwOjRf8YCDRvDEXKmhtjuEWV
bUzmjONz8aRhCjD+rfYAis9IGwlsWoDE0Doy2VHrbDx6zTmechFRUFnQjLT3cKw7hcmLIxwvXR3W
mN2CFoiAqkKMIb8p0IrpgqomxOd+bUS2guZ4WS9Kwu6kE3XOwl+XGwvcbwCoLh6VuoVWDGwkUjg2
11N/lnFU1C9ljbHg6AaFSM0i5GfZ/rQLKpmGPtt0ZtVGIFH5U64cpc6SkxMbjTha5it6SoJM0g9b
avQEfWhVG+PjGBOeCss6tgNTugGWjNyDY0qWm9eS6mFO2SGdPA7sAWV0oxyhdLtmi+4AqHlEK32x
X87lfoE+98svw82kSovGTZjurOh1vrCyBHWTywg5tonI5IG5+JT8j1FO2g03lpIpoAND7Is8tI+d
l6rZ5v/RJx035rAUlWEYhxFaTs5lAUdYmTiVUnlhE5qg2y5pjZxotlVbc3ot+e3aBtFLTsbebx8k
7z32Tn4TH13ebP1jNc72wUM7aDXwquzcRBWqMbTY7zBLa727O3GshjsOVKJ5aT3p5l8x06aqCX6x
3cCx+wm/djyOXx29fPHqzfSbp8fPnnVrmm87M6K4D9Qhlis+hgJskmKELCoTz+qJk69H1T3pBPho
p68/1SeKrCjRGTMypgcPFIqCg+DhA9gY7gRv3779onvfazBBPZST4pArn/bkCcRCyoImvPvgk7k6
0hUfP+SOE7+pb4GHyYd70lprpHP09vHXL786Cr568eTxm+MXz4Nvnv/i+YtfPR+zechFdUWxxUDc
YXGCbpsyHeQo8kRhLefoJg/i7eeffx4NokXRt1jo0FGZZzPZAz3RF198wX6GdK/M/Q7jSIOWpmnk
UoWf+fl5X9KDV5wEWSNpRUtiOi8Wixw2CVIIcTyv0Q52qZjUeQXjMddHIuqEsJOd0T/XBe4b0z3V
Tcxo9aE26R/hSfjN86O3L4+eYIaEo7dPjl4i6Yj2YXBXIBMCCypRopz297ZH3sx9RiBilytR+WSl
gdgxKlpNn3hgbcXd/IOsG9DpVHsiMy0aFgZZPglPhCxOhRd0fKddUdKCSyWVK7QKckDc2Ckk3Aka
QFWzuAm+tU+F31LoNjhLYfZtFfdNSzBGGvjftWbNi5XyBO/J6G25hmAbLmNYmzGyOLCGkUIej+kr
N608aoAmchkJLU7wz20duWbLRoRHNTA316f0qBIDY8olC12OyZVTvJNHWJcTHTtIxa1MQ8IMTbxz
HyrWOkxOiboMmVLc4uJ97h512xCLi2UGQ1LNH3311fHL18evx44QBisSDzdQsJht4hbNE3c0+Irx
xIt93L1Rn1blNEe9I6mFxypQOu7pNpGrc56XqnuSxu8j+hqYSjEqKsuCk4ASHYhOJOwkDTGSwus8
Fqo8q0DUT2sqBhiM3agAtL7hxuLkD7MQfSvHUfLEqCo7OeWIVx9I5YJ2lWcSKfiWlGNmnKZ4biXF
glzqKRb0obYIBZdVPrvIyqJZGSBjDGRj8aiNnZ4bCNaHOF0Q0z69oqexJs/x4Iqxw0hwfvlSQEjx
I2Z41dJsVfY7GZK5/Gy0JofeM4sIH80hks5lsQ7y1Xpzo7VanQ45vJd50KZm2lAn6hjNyVOT0bv/
7u1ff/TRR6gHlEwX6Ww1R737u3/79t/8uQ7ijBnP1NftmRQ1EqORNrF9o1SJLyuQ+sbBy+OXR2Ll
wI3H8NmGGVP76LYsSA/JQhiKrRwmG4kighqU1zKDcQCl4IWjglj0xHTbJB2kdB9C9VVGcwkOWq1W
qDpippzquMdt6nOK16pTbZVBBCwusuxhy01WlMrQlSpjZwcCtraUla5SfXmDYWVbFAnV69whukv8
BpJKAOJnhWFL7yu88D1Zo21PznXWZ1U0TnTuA1hqsOcUIvrrXvLrArXB0Tdvnh38LGqjmtLGLJBN
DDBTmkKcL7wIzJd8BWiTOsCHiYqyJcYqJWnMU4gNZSZmy0AVeGWI5hPuc16EUGGMyMVNQFEnIBXx
kelsirgPuKmSHpwGnwWf4KrBzF03n2AcacBW4GByzK8fIWb6zdYEx1OFZLlI8mL/TvAb1sJjqHnE
NyWWKjauOdtQftme/sTSSD/FiAQ8i7p2RbldZZAxYU/hzVPb7cfY4Qjlqh34ZbWD1i6728HL5m1j
zNwa9nnzMhZfu3fFR7zeq/IZLc+YS40D9UlkqKjCuiWGh0rocBsB5kYLlHlCv7pRd3bToKe0t8sW
YH0cS61mHK2ixgJ/cV4aHfEk6592QYABs3is5o71H00T/LUfMxUQQtqRNpv+GJehgzE4FM4Dslui
s0tsjGSsARprGMgMPL9mOxWLlVLqAo6yi5JZhBohe5fhOYlGNtueuHM40ovSeYGxTFlARiNGTwdR
Ty11rxtEAoEszNhemePWfEHwRqeXd/+uJw0CMMA2DcJ///b/uM+JSFFQsY1l0PRtK9bybFrGd1Sd
jBFiLRSILxAmqUtHbzAwNdtlBOIiHpgZGPCaK38P9bcYk56zmI66W3mGymxnEx9OU9qTisFMgSLF
e7IvjIRd+5LhhZgML9SpvTA498Vmsz68fx+z8qXMWdOqPr9fNM02f/jp3/2UzVjRYKihnCNfVtXy
BaUj/bIo+QsFpuCvX1EKdfx2vDi6pkdPQZjt5B4Nv4JNEiOWYQlt5yw1/hFlLPwiIc3oK6C52wqq
LfDt8+0KP15v6JcmMHq2PWtmdbHeUDmgST8s+PYNGiSJBmmKdrM8YpU392m+IEiQBcr3V7S8aZT5
MucO2aq828vj7bl6FYQv0b4LvzyrCORfXRRkmAdoo58wWdQ+8uxuU2/qG2aOBHV984zTHEvvnD40
5KyO7bdnQFjdpnDd0hyQlIzf0PCUQIRh0jRjAB+eDTRJ22oMIU1MyTCZzmV0YiEXJEouQppoTjNG
xi2lEJGB3ltVpvlIWn5bNFMoSm3GTp4QYbpkrNSGP1cQTNtQpWZD2P7+DbXgj9rD6p5wGWpULMDm
vq3V+p5AeVth828nvUrcXi7pAwHvAhym/Qppr7VPQ5kDWeBQHlbDcmoShu7FHKfmGc7H0rGrb03q
P9g8VozsTYtY7vU9IGtDux6dVo/b7D69JqtSJaVP8r8YtH1q7+duY7kqn/+fNirVtnHVYpGXKN1O
DTPM2xnK2XZwrtmcJc229OXtN9nTJJKyOHli8+Zdt2525EQrSlofijLEbPIHN5r8oUwmGdgHp38S
u0mObFXV01WG1k5t+Kovi82LOgCq/KdwbD58W9HT/2g/fQxcDp7+B+PpV68vigXGdQs/+8x4/Eo/
/vxz4zFGy4JnH4d2HCx4dBBaEa6o6r3QDl4Fj+4bj54tq6pWz80XGK8Knt01Hh29wyeTifHoebXh
pz8yn37FY7GeHNEjs9TPeWjWEyr1uVnqZXVFwzDHcdzgo6KxHmEQPXqKXMN8U9Lj0oaan3Ls4HD0
3Wi0RbmxM7XSKJa7a3WnIvGFv7eef6Nmwn6qpgyeYl9KXe7yf+5xnv+S+X27Q+pClLSaJZVNFZwv
82yFrGyxXbYp7zvp4Xt3zk4MIlI/C/+iT8OaFeXiYjblPcibEu0Oxu1ZUlx22geuUN2FOYtIK5Jx
JqeMkrgt6biT2dmzi2GJxd5YjfD9dqQf7cXIGnPJOdfumsM+tMoHr0VFN76+sfdqaWaXW2xvOEy7
L7p89bluCyeynPodF0GyohsS1rzuoidY6HQf9IHUjUfscN+wY31R7L8/+gxPj7Ftv+O5DaZ27A0g
DmcZ02U5B2mT1R6lSoWtF4sau+iLRPTDvJiTEIki7ArCuooUDj8zDtaW0/znocr0rTukpTVFwm4l
27rn/pmXIbncybWL04xMLzeExO936ER34LNq7guCISudpXi7cfJKbq32/KFmx85qsjmIG4gCj/+i
NS9FriJLorQbXyLcEfOfhIWSXPrmZlqcLlWbcrmXmKmPHexgmJbvMPcDbg2UsMWBoRcNUJ+3H4uY
0QyKnouMaUWEsX0WqVzitYNQiOhJiuTjKM5sq7jvPOGIO1M25LiO+JQiOVov7Ayd9MSmCaYFOwIk
h3MHygKpa1pR6q3fFuuYeqjWDUOQcopSlMfc2zSqZ3VMT3wdSxeJc78+bW5WZxXOhynznVTr9uB9
OsDP7eDqXTzoDva3aXDH9D1DrsctCDs5v7tGPmTvHHtDR06/f+hIY2a/3wZzu3C/ZloFeyHeOsK/
Spl3VmX1nOS8etsbZWOveBzdXnYtxcEVMzL5UTdNeK+FJmc171QwSM+Ewuoy8aR7OxwKnsdpiD6A
tbECWTgbRtiqTN4Gk0qKGkO9xWVS5E8cgD3ZM8o6cRLOQyhtsK7jsI+RuNsncyxpZq/o6n6C1tnz
0HSGRriLhMTmmrGBYwhxJ5I/9LsvcEKMoRZoA/LGUg+T8APmTDT8Mml0bkvMhOYcYHoS6BPdCX1L
/exbEDrtZruAh74ZUI0582CPXEFBrLbbwaBQYtQNnZgp4TgZTFyzHwOm6Cv2EPeVZvZgmrdYfHhn
o9ZeUVauWLGn9EBVnSjStDvY9flRfwNO8Gi/CMBF/ZTk0r6SApIeMeB2MkBnRJ7Id3tv/529/0Nk
4z/wfu8PE/2npFe89lNCMHw1T1TbcmZPLj6xqQyrkG1f23VWn0/7dw36/TtzTrF2GBxS49+ZrWwl
6Jq76cArSlqEXXfDnTrdU5xa7wKB551oplPZFUz4tMRbxsDgHdGypLimUx3YV/fltsyD0bF4pU4y
BLpV2jvLkpKWYwYLRuT3h2JFqqeO2eMfFj/S6dQ4DDeTu9Kwgsg3XzZWnWagrup7nOyHaX8L1iZn
BToljKNO/nvQodXGXhjHwuEPQYbhPcHxbfFkVdyBnsur74scbmEv1GDs4R8INR+Omz2QgwPid5R4
jaJ4hSxPuu32SWNmtOIuq7Y7sDt2u+OR79h6yQtC9Udp3/+AG+29e2XzA+6GrfgcOpFBTVF9vYfW
ulcghtLk/+y5u9t3P5620ZKJ4Py6SHMs3TyRTv5IpU0Mu3NrwK6VafLl+06s4VD0IcpFf7oElla0
4ZEghzLQd0KfmRkHoEA621zzyfarKpsn/eDaylzObm8jzhF2+ZlXusB+nU20s35TYRS+tqkBHwjO
upRg0LwsVZ0/khBMZy4LCT/gou1gq124Ka9dnc+8u2Bv61j1QzWz30TeCchh2YxTjSonMkzMSh2e
uv+WQJlydUJRRzqgNDnMeNa9Idr8QNTixnj+gxGN25F7r2C/N+4XPCpNq2yHJaz3vMDcZ2P5Q28b
whg5ILVwxaapN05kaZso6YmXb2FVJ+C0sxLFzkR4Tqdtf57ebhDo6R8sCPT304/7cG7A60G8CnE8
lmDIxpBoB7BRz496cA+1d+waHLLZNEHqwb5IxzreclftprfEeIDRcdiAiQmbCkvQW2dWLafVYtHk
G7te+9wAM7+aciE7ZrRUhKMOMLtGBVKxodkFRz88Pkg8VgQattNBFum1I/AnYdgVP9qkjj+wPsjs
avTuf7Aifs2yNfpDvvv3b//vYzb/J3aX1wfkTMieOPfZkSbgwpbbY8OT8u238K65ab79lgRT+rmY
w6+FWHwbBy8y8L+DHCM/RDvDopKGczK8vLrIa/RHWxdwXkLuDWXXN8viLHiYfpo+epDO8/ePkKG+
Dx5+Mv+7bPF3P/7xpz+FddR1Gaj0t02+WqMDs+lBYEUoG7VeG7Q7A1Dy/jW5Xx2/UE4XbAJuuEJR
eVWqU6u33S+x34FmJechl4pVe6bTMbAmDuTLPGmebbJkKCkXFhgrT6mkj2caITCRqWfB2Y0ybT1U
YS+pJeeYrCBMOzBpPwrT7S34LIg/GQcPEnO0GGDg+wzWORPsGnBOWk4lY0k+de0aFmnPw26kCmzZ
8DrjjqAi5WMnF5dob+y0Vj88fGi3pZ7RGmN/AfJQ2kON6oPDIILlWJQA3kP+DisUfjziH+gA+t3o
ewW+o9MRGggZQeWMRvS4ooMDWbjQfV9EOTJzalmvCikHXy4q9EyZXVTFLG8mJ9FijhiHoeJHWUWn
bsg5zZVMPoTNHAYk/4FgOAfa5I6Tf4KW/qms0j2G0bjwT8nDHaHDzwksBBoRfpURd+LhNRfoi48u
tSCHasRMGIDR6O+F366y+jIFlrAoamA6xiQtYXsm+8YiW2oX/ybOs3p5M2XX/7FMFSt9cEtZwUAL
VJ1NpzKppHaXGaWP6WVZXZWUHytutWuMN7w8b9QWYAbKFbSa08bFw8U8VAXVQ35KfJ+dSDl7TgUQ
hvPt2rQza9uByeGGoPdVhiLBEwbjaw7MEXNRcRUwkJCul9vzopT4HSCxnxcNWp9yO9ClDEcKqBCa
d4JVdokuzWRLnykaUsXwEMGezMsbFLxzYHwYzhI9/5uLLayxq7KNTQfw4AMzVkhHCGB4UmprqgnW
DLUhjP+XyFk9nrVrnbp2YPhAzlMFX6zg8gx5WbFxrXatZl6vvR8bHC75Q5ZzPc4Go2DB4U0qT7NN
25etiwmlSEgB+YDTe+MpGi9OdI3TlEA5aiGZiEboNkMfAFVjY8GuXTBVxTlOuVpkAS49RA67gKAH
T8tgYvLe+7tPkpE9qSDLCAXFHpeXPl2ZtWJTDhhgmocKiHZdw7Fc+m+2DWqgbQBcg5pOwgf4lox6
kwFoH+XWXIc3EeSYWzTs4TMsf5fpJ1vMfMYiO8YawOhXYvF2EkJRVD9D8fDUqdjG4sQqnVN5Co1a
uvY4FPYwR5FURSmyO05ad63nlRRvvcnIoUJdgVpqVrXMOHwSzuzOMvlmuIhMUJ8LM2IlNPLymoyv
1+Nb9jXmi51oknRM4XePlDTticBiNYKhR8zfhoYaOYeSmd1RLNowN6pI+iZH8TVD/8mlSdBwsMLi
sA1QS4uxDhQxCUm2Ms5Ri3S2rBqztrIBhUYc2DgmQtEX5ErkSNMcGsQbtV7Eqpg3GIse7S3Nu4Rf
b+YyYc+ednOwUdQIngsTg55jN8WR2Fkyse3jDfhwA90B4C7wNBJ3gtdXsh88kJgO/VbvTyyuNWT2
3u6McbgtSY7xiH58JFHigjXh/FBb/rIMJRGSBmICz0ze0RFdvGUHxuJVdLmttUGdUAsSh9z6VAub
FA944gZ3sm0wPTKEX3bbX36LYOFGySHFj0/PU4kX4sdLK9IZ0+0ylo445PKWirQMOq6vw9LE6tAN
0gZvWdCKE5eVS8VpgfGFmBCKNo6vNQDusCUbLJcqqqEfLv5FWlYYKygcXsQbnZsClhrgV5RkEg85
UYKYvcrh8DHbgmSAPqRSpOIEbFjL60FhiyAd6rdfmwzP3TlgDAp+dQDW0+OOobMsPY6sWUniJfUP
B5UlBlC9MZYtujTymu1UjkXZpso6RgYyUd6YuM4k2tuaQSa6hENXuOjME0e7Y7aQTwKnPTJ8WPsB
8vVywj9PWZAzKc6dR5e4tfAyxDRlFaD8knREEZsUkKy8IsQ8x3PweyDEqQrkFicfRCvOjOhyLsi3
mREb6f2hdfUyI9yJLOYkvc6Xg5Ch5gyQNNB2bKxo8hoO9PchRbVZ66FR6+Gp10uM3/pmYJDbKNfs
trM+IVTRSXfeeYrXN/jA4ZlGkD+lPWvLQbvqfcgObhYF6bq9U6iCVaCh4lqKU1g7qZliL94QvsiQ
JcpFHLFGGvksKaOjHrcGI1tHL3ErpET+sL8OIanSTIDyw1uxfZ2SsZe1gfnWorNd+rp01ZndQXgm
pVOocwTVC8ouh0Ed4cxa/DbfvcCmPlTYdK4HL1oxFD51NgiObSsDs47SYxWYs6r32dl1Yb29t0/c
Pb4/xXZ3+91Ts3MHBj3b1k3xPlcdjzHENzxeZAhE4OdJd8gPb7msrmiJrDAuxBLNiq9y/Gu26q3P
qB5SUiA3/lAdBRMIh9/y6SdorXl1CS5rgndMCH6lqUMiEmh4CuLfdu0X8bpTxvIgFfuArmatKfWt
erJKdRf3h4Kj9ZEfOnijzUvx5ZoWypmrN9/DbaQBP1ncfqjID4bYAO8/Hvt2NLa0iXRY1NlvLQyS
vT3n7c6n4Ojd/1oRw5Qu7NoDUoYpXDiVTCFjx1qUsw0q41C3lc/RvgZ/UXAEPDQqsgv7oJczzsh/
BPKLJbT2KQLilPfvxTyXMHvqbIHBb/gdnVlpc1fq6wZ3/02xykP7CkzhQRqNJfxvG6MiLzGdW2Mc
VCq5cKRYFa0y9X6rOaXuST0yajdjRBTHVs3eZ8USWw3eF5m+IaeMEoL45Ntv1SaFJMbNXF0UswuF
DqwX64COUJwyPdqRUFH/Tkgg7Xsb4FuI2g1bqQrQL8Jt7MG3FeFHKbw4dnLcapiSfiwv5rdGMmWS
muccY66qm+AhYfjRPy88o5pjF6JvgWJHh2BoXVrdMjNIjKkc80yrdR2UeT5vgqpJ6aJtrzl79jRx
Vc9SqFf1LBinOh2lc6ttNotZulcWbX2Jh1Rt84Tb1tOipVO1d+fJ3X3HZtDODqA7V7qjIfk130dq
NWjOAXnQetoApKXZ20ThtRpzgWKtuhf7huyuI909ta5OMOK1PAiOX8Aqvc/Gq8E5yJmAgOZgmb8H
VOHabZfucOAeNvVaYJYB1npLqgNl6mAFo1NANBSHQmoaXGJM3AHjPgNQZX7lpGaSawnmLRs0LUqD
4BjXnOoc9mFbVME4Pws0PsoC636D9fM6ZjV2mc9tpQEH26LIKKkJvY12PYqJHpBJ2i1cbNdK2RVV
lR9NggfOaZnifnnvYaKrs4+dI6tqfM87Gf+9DI9CtyTfHNrCCaMxMneKraFbcoaad58GrVrOcaM3
jrU3TUspaBRzoto8NbVePo7TWYMA2QKj18YmxJ2F9+K1b8XtVoVic3NnYRCvfw9rDu0Iu1UwauBN
taVdSsaQBJurYpZ/ESadZBItHQFRaP29OTkOpchsUJB0nJX8fbldLsf47MX01dMXz7/6x8RFEEzd
oxjX6oPOKyaLRVdB5zJnnkSfAqbpndX/h7p363LjutIE3S+zVmOtnpmneZwJgcVChIQMZlKW7cIS
6JJJyuaURGlEqqyaVA6IBCIzwwQQEAJgZtrt/za/aB5mrXmfs2/n7HMJAKTKrm5Xt4iMOLc4l332
9dveF14Ms2fNavudIY+AvfpiZe79vNinDJWR69mglGtgxMo6NuPfcPx6IFrJY054uE+B3O1W2Bpx
uItqusp26EAOdL4lVlcfQsxYljWXf6pmysivKY83IbzVO+fBLm76YEQbMKZK3rwbJuhtfrpXj5ye
4vecXm6lSFpMhEr1PHfCZnOd9iiERcASAP1GOb8NiwoT32zqa3QMCvjV5LQjZV53k+hiL3VKrBg1
aLdyEd135iV5dJB34V61DZcX+k7MvLoYEC2MLufLHWQZg0vidIjUD35Sfh93fyiTgkqVgfcyXdFq
m6Ib1sDIcNDhApMkoAtfT/tp4y1o38+FW0bmw2wbO6gBOoFsmoW7y+ld1hr2xjoF8iqay9/WpEi6
AX/RQDucIm6oEXeBbYBwHYcVbwaFF8CcoAIxlAcRYrb1pboI9deCgxnJ0aCcBhgT6hFe8DTC5vIu
Uqm6hS1gGQ9JR1940om3fZDSX9lN4s/omP4B0GJwx6p6RzCZbryJazA8blc9cf1Q17/Qj5Qr8JOx
5wvMxWzahk5Fifqckt1u8/4lGjb8c2FX25SGbB/hiIERmOO1iKOmja/2vNviQ2Knr+aYNUQSAOvb
54q+eW97xft9J4/yORSs5l8GXFsRz7/NLqFq8OXQKeRB1oVqulTthtICFQC1GP0KYKhdBhD5GdBa
7sgMxF9o5bFt3nU5bJtXMLOXf6Ke+G6KJ0GPVTzfTI/BWCgLJA0Ifi/qVkcosX/3QCH7Ygm/F01n
1bQyq+qlBo59lLSdprSTT2ARgXwer13SB41ILviuanWVU1UJymOoP2klxUzpXV2JWxpbP+gV0Icw
DrH8Y5V+uDw0zOAwmvEAxUIdDIsWpKlFHmkq+eQ89at4RrI5U42dW7N8Mf9Nl908UhOQRhJLwMAD
bYBX2beIy2ChH/n90Tjqnl8lu7e5Ubq69yrHDnIyDwcdENWlPNttILF01q6m6/amsQ7RLTiKLA2P
Y65N4VmCjeFrUZI6i71LmRo/jp53vediJ5pGx1VgPhEqicYw1B3aCCKrTfzyGakPv3z22Om8UZF4
D8zCCm7Wafby+6++YuUAVDnNcszVvsGEJGqc0zmH7fLRqlcFKRKmq3vKWVtlp8Oz4eNIi6nw2MzV
j7IfYszDbQ4b47KyJ9LXNyZoNPjvUfpC8M+jX/Vqwr+s/gSvppB6N9a9+i/e3kI33RHuIf85+O2O
oKPguenQPDf/DZ5L9+al/HQl/prQDvg6QngQ6vlWE6e6o+GfD8zDwUWQRSooAgExF0GCqKAI+Exc
aKwFHG9cTt6ownBjrSajI3GE6RusVi1PGm1OA1VYspBdXvmRdARMaiush7JPH0cdqqEoqbi6CpDW
DPCYpQRi1wjkdDpuimgND0zRmZsi/veDZik6D7xbZOXBXT7UZ33I7Jr9NeqaQsxRdswUavtYWs1x
YN2655wOxYE5f/w3m3M8fjLnEITwwXN+0ME/ZmOAgqTs0+Z5KV443bVhu6Rqw845XDvhq2XnIPIA
SkcjMJsHaMyBW6XVTMDFnSVubkmOFplarBpqiIr28AZRfEMeXu0kQLfbZu2GE/ELbkOntmrHJPsq
TXUyu3g4zWlA8YDb6FgNvxu1Gbt4Nd0NFD/Yzb4NF1eGBUipwvnKjHknbzmOMUap5bTsH+VUaa66
Qr5jHtBelsgdcTs5LV3y8vXL4dwf5mT9WkmBKprw4L1I5b5nHBaK6E5nQvbBoKciVUKNJsEFX4W8
ux9gfAXBUCyvpjwjzbsUCPH6vmS0qHKybcDAlCObjuHGfdZ7eHE02w2Yh41oelrsHTN7XLQJzvvD
+W4ne/5X3kOGYzxBmeK+73HiyAJzICJQ+Ba93jc1tQNTGbDcZfbCyHGrxb1WaIGtro17/a9Yw6XC
JWcRiX90WtdmN7uh3I+LeyCkoCvJniknhBz8TbRIQqmWb6bb4mfx6zGDvpjzqbLfEpfg82R9UOIS
9coVqFcBxxdHeSA7B0cgYToHAV0iqRIcmx5gB8ujuzrA9tCXpfvrSB5KvLX571FcgIw8DkqlyVY3
R1Qt+jblBeRugqhaJCfYdZFa9AkJ49Z/Q3ezXfLoPk5dkPGE8s4+cFsn9S0Jy5HdENG1vWc0ngy4
mEeMdXSpJ9UvXaM5vMx0MN/n7v57cX5/By5BNHSHD6N/CiEGBDsMtGGOHUzccz4/3r19Uho4u5P2
dYwM4r6OkZVP37WOvZEkWuHBt9etNROaqd5d0r0LGqd5hUHsaHeiLWZu5ekM0ghDGsPtTdXzIgMk
8+5llZHZyKyeoSS4a9ZN29aXpjVwN0CVt2iiKCt2k11NIcB7tRXbp7kWq8rcw5lO6ntZcRjjbgNu
eDAIyAJMsuJlRTks4S7ebZslmvEw7N7MYUv5ZEwhAEpDfmIzbW/gdqXDYv6F0A80xoHW/j6+cpHl
IvL4MQJchA6PL75htTSUhBkhpgLnjsL9WRFet/b7rS/ddA66f8zQMJ17z1r9cAI+1RvM0YzP7OjY
BBeGKUcqc9NnvalmMDmvZFXXbbWbN0QhzCUKZkux6PXVCa7her3viIQOwDWTvmCUJfp//eF/VrhQ
kNUSCMpP/9sP6z8gMFQmjzJywGb3TTRa7hYLXFN4Jjmte5T4GXZndt00cwEkBO5p0TRvoSx77b6b
bupm16qGb0wBRoryYZoi+CaAe/o5YDfxV/WJq7Z/TK9MmfEhVJzByTtwdz45eVdtLs0kK0iZWbNb
xXmbGU+GiyvEnNMhQ8rUK3PapmajUJl6e1/2OQA70f1P1P1Pu7raHts5Fk51Pa/ep+vNQLUfAwFB
TzShsxsz/SE4kAMF4teuLQutc5sZvnAzJcJhLsAlEEa0HYP1ynrsXd5n2EaWXxWc1zX0surnz4uK
sqXnbQE+tWsolt8Vkgk2/6EgS33ZvdYLmmwYGUHvds4A4iDaaXAV1CyQ+T/51VQWHQs3RsqD7ITm
4+bmyBtCi98rcEd7BntCk3/sInWvj2kxsTz5vFob0gWEfUiZaTfFnsFsLweqyXZ7v4jTcneMb3vJ
xe0AB4sGgLrC6g5Uit5nA4RnYmAp818g4PjXdFu/qwZxLAV9mp30DBP+ksNBDm0+wgYfQTuPqJFH
q2bvIgCRxPZo68CfJ/z33q93Gyi9W/R4KWHlDFn1e3/LUG0gx3DL7vZvmFmzaDZ6mfDBscvEhd0i
wcW/b5Huq9YtDRbuWhBs2t1EfHHnpgEz/Y+gKn6XvhCENaly+iWAcx6MA1Ph7CTEd0AKyfc+HI6K
hFTq/zt+lEsYoZN9Ct1JB2SVtGg+Wr5IHg2ss4w/HEyTCq47/mNcaXrkQ+Yt71He3uTbKSCGBfwR
alBa8NjoC0r8crrO2y1DjBVhSDINjvweMMdz3j9XfV9kkDHJtKhBNHGcWKjcNA3w5FtmEYDvoUUa
UTFIKskDdlmjqVfzof762cetky3MX5Ia2p8fKixTKsV0hIVVseFBn+T9Z8+//e750y9eP382YqKG
cM2glDJcj9DUjDsIjgaaTBgjx0mgnhwSD8IBKm05tbgtxPmq+8N+2gd1S343/KsEd451XqTCo21h
AOygy6+fjotWEwwJsdvYXRkxX3SDd3SFHtfgXV+tI93a6WXDd7J26lGAGmIeUVhOooCcJSjDiPlu
NKlcgHqkUKvnh6jxW4/O0OMJwvZtp9tdy+fbpVynvzmcTieL2hJZ6Zf9np1XLsxrlCrdxqVp/lOF
v/Skby6OQuRHGFm3WASr5qpe9eOvB+l61iBQL5YbBs/LnRk1OIT0BNjTp5jdoUaC0iO28FBNSgXs
VvFfWm5VhPkklQ+UIobPujHHurIaANfKk3F2GpemEPFjSwO3QLLRVZOoE1aZrHZLDgrHfCGnvUiH
gyQvhPZC3Y6RIe2X7zb0lyFDMNwSWUnf0qTDTEYRAXMwX50K6e2tdEdUm6rUTSnr/Ue6fcjpPdTC
BEhKeLUbMoLsQMoAtr0tjRgMccIoynm+Y4faNLzFEU064EG338hPyq65pxxlByhNwVI3ll9edxiP
wsnq5lFjw6+heQ38AJTMbIIBk9zBKBvcIZ9L9AL+bgd/RRQVKMsNRAmfmWiG3xE4S07o6xlCWtKv
4zOEqff17DxVH40TE5hYg3CC6YcPfOCfONnq5SUAkxm+0LzLU9hYdnaRVfHfPeCKH94oeX/y0D4B
FqrodRSCOQrntFoBMufEiE5X9Z2AreAfQxJ2x+DT+PHHnBQpNmX4E2emm2qPjp6C5PRTI/u+mEr4
lksY72hfJSyhPufwQE4euxnjuVpVt/gZHbadQzvtfaYBj3nSfZnRJuFb6MRGhjz70XHRcBPYzyEv
5GSjrIA0uxL9kEPTmD81iS2Iz4PWtf5ff9wHDsJ9cv/HDcgAnd3RZ7eVIHW0gD2wrbc2CLRrVg9/
JzUqzSV7Z1hRiUOlcqbOuD/ms5ZazLhhU841qtbQiD7dzWA5VyKC3kDEDXNnUsC4Rd2AnCQBQh4u
BpBtsKpyCRERflz1U64mWmR78fL18+9efvHV8+++++a7J5msV3Q7nEVDJEF2IhJsJYp3ep46kofF
VCWEfvvV979/8TJzzY8k/wV1EOQcfICgOVvQMqO7Lrr23iBibDZn4weAwjwSpApEhgiacFAAO8Og
bba71RTsDNYdOmt2G9EzQPhwZ/Xr6QbBDVR5Mwu3FUqPwRrsXR+Un8OZn1emGPKECuUlCsZH5hAk
bFa95ANXz3AH5xdFaQgxOIZT9V4XzsqiuXYhtEMjM80rCFkFHhLzTYxUiiM6mYSqDdp8pKCIpw07
GsxDVw2+r1Sls3ZLylwjTE8zQoUm430LhqGVxi6yRJmGITt9NOoX56cX0Z4Lme5ArmFChqdSvgfn
XRiaj+1Xdi2Tf3UTrdMhRywYe+JC55n0eSsZRL+/d3E8DBxPznTgNUrp4ZyJ9KkEI0t5QHodm3+U
Nme6dRIfZiP1U5enNyHWgr3HYM5ek0weod/JBoHgQ74eBHaWRsXkD12HmFrrTXM5BWMmh9n5R38f
EJYslpPKPo/iWQNfFoTMGcB+ZP/Bvau9d8UBU0dWnT6zOMLpN7h2w4pxJT/OCiZwSCAnCY0SvbVC
Cvx5xJBIrxCpNzw6L03+ZXANhuTBCBb7r2nNEk5MoNDoaGwDEs+BpiJNSkdb9xU4t6Wa6yYdLW0J
AY8zNAT+StORPTtp765JER1aKMfohNUfBNtkX/k9+6yT90p4YO7/nhTHeP6wBcXxQ1wlqFhemyv4
dnrvZajsqH1oBsLOEtzO0VKINSigdkb4yDgrZSDlW/KQNjk8GWdnXZdD3pfODC8DCcEMl3rZLCT4
tGNk+y+Hbm1hNwXvI0/aD6k4NNwLD1pSa7mnadFC720c2RXIEH6HDPCd0+CWnGDNp293Q0FNegEo
bBf7dGyfjDPIac0cUXgp2HUM7pt99Bwnl0n64PmgSOFH4nXLg/D9sbw3rGsBXVoEi5NmsmVffR5e
YXzradfsBlVW8PVqcUBnNMCXxDKqu5qWqqOKqJ6CSqhnNMNPsmF9twz9PZdXUBpOQ98XTT8h2TRe
3gJVM7i4/cBbtQlNBNJSP3uUPZxzEaBM9Mub+NQOD+rLzoZcFfTz2N2VnjO/CyPopfYV8wN4PShS
0T23ofhBxLGb6E3Q5/qeMHhTqFLBDlakKoR6XExjpMe2agEYQEsf/CiOR1dFQbJTeSnMX/ifPJ2k
0un8R/sYQzU1oANAJUGfXFCoa5ZY+smpfldtWOffL0PragiBcD76VJ8ZlofXi+nWkLulkYKzk5Ps
W0TfF5EYmpACQ+ksHYUC6CmD9f36fqL7DO/nQ+ONGggGLQOH3XkOhU8etub/XVACS2q8q6VPL/yP
xx1uPhkboR8w7yeiDrgvJ7aBiaX16lmxh7t/kp3aZLEd5vUf9Vd5kfpeBZhWs4erS0zHFpirwm8B
zqOl5WAoUXDiSUEKKD2APvdHyG+0qf1oPio9VjWHmWi7x57uO+iuhCjojRdwrrVPVwtIb0vgCG3x
IUTFH7Q2AGrvRFT98M5MOlSIWmpek8DvYYvZ2gGpBmbiIrK9QxOizoJv7KqOACeUdxWqlEb4BIQG
TOfehUpO09yC5634PZz0O3hkbhv+Of/16CK+BoRRIryIRLxSn4beigoNSBeDWeyl7uBo2950Ut5O
OxvCZ68WKbDsCWqC7I0sNwc2XQZsl6fIcbwFW7mKPYIwUuiP4DPd12RXlCu17cfs/hXh2Vrz11G9
sXalhDgssnDmMoAi4UbAatTEw9Ofcy89gLHXlK2YsfErw/NUmykr8mV5AsvRg6Q6GC80FHceisI1
rz85s80XOkw/vZQphSSilZPbHLj1EwePWIY3Vb0BR1xQ2Vrg9VZVvAWkx7cQ5LBBXIZFBeCP0MoL
ZvBNS2+nWa4zrYujOWRR2WR8EZhyponrhoGfzOpmm3rOfsuQg/3eyJkfylAfKn5yFm8gdMcN7Pd6
R8I0wXridO2lDYim7GlEB6MRJAX1lKJx3+dQH3OQ4J+44ynbwinwymfJYUmitR3ld2vBiQNw9LEF
TsdwSNyn3fYQKBLyxLlqtDhGF3Hs/OCc+DO0vyDPooWQGo3yYjQCUgI65iO+ihoo9vKSW3Aj9S4d
iGtYbf3QmAPfCHi/1XziTo18JqATzW6mhrwX52ejC3Me0AksA5c7/D6vGQp7wCEl9Xg82HHc3/kI
RT94X1yk5/9yU03fxnQaqpTrJvRLQ9+tZgHfG3fm+holOqMm+SI0tRL6L3SgXJR0ZMxE50XCOe0B
c0mGedglbm9ZpdyNJjsxxyz72LB0CRTPaMMz00oNYa6JIr6C+T70rt/qrmZtfDrZK0We7IeoD0bT
D+z20gGi0p0OAWbj8TD7ZRq+H93uJyQMp4zoUkJu3X1lbszFkCyQYG7lrp1w5Tx0l0U2tuvLzLo/
TnElzH1KxgOX8KALCDoqOYGo5l7y85S9rkiXQH4j/1kZGPYNC+LuqEJJLkmbXIDKO7SH5q11lT4M
uJ3uNQlzsG+mdZx/R6FgLGzwtor4A6tCojRXAhK/wRCzcmnOm5EeOmV94CQ9CQzg3/+F+3ohfSES
vCl2DGNgAwASsc88vINsZfpijL8u2ZCba994wXIXJdkRvaU1SZkiKyO3z5vltPYNv36VDPw+GSQL
Ta9zNoxD6No06z/qn6yazRLxxueZ5+PlnMaAP6Sf7jb+8Ue4iR/1C/Bx8vsMnBZFaW9obvb5iShK
0l9WxL2nDcUA3JBwA6uveGr2pyLjMp+ME4yV1eWNRDVApaOOaO73KANHVrnAy3SELvVcNsFFlLWR
2gWPNj+fJtP1G46AdBaGNKkgU5B0E5KG9AYju7U1naVEJyp4GLHD09T1yVvkt0FGWDgsnZ/SidnP
1j5wMvDP3XuB9ifBmTq6KM5HnwVc/RE96MngpXzAybDpFgLkDAo5UMyHKueFYLRpdJVAk2LNM4FQ
Tcz++UWx165+B1fLen4JgvAqJYBbxcdd5K600PCHmsnochc8zjcJPgjasd+U1kFhV6NDzCCUEo0M
/C7iMVvm6bhhU0gcevEncu7iuslVqVbSqjhSi8GFuvZSl6LPqci//OLFV99/9/xVP2b1WePS2cX+
rwQWHiayI8LEM9B7x9tzOTmsqIydCFLXrmMvbI8hhTymX5y3SchuJMDTdrB8lgfGpqPdw8z533Tv
kG3wb7N10Cdxz8ZxVOWcx5EQDD9kUZJuGRAg06UGtPzPnn1Bn5NpsyUFyHW7qdj0Wui/1B8dbn26
JV8n0God33xH9q6uHqwL5Z5Ojt7TR+zn8G3qfj6oiKWrbSZ6JnTGZupTsitw2+Enjh9w0h9K/TgJ
LTV3fgLaFpjQH1NTKZ2ObfnRydnFHtd0LpY42SQvRgZXvLgn890G+aWkCTQ76TCYOl8AI0RhHCVd
C9aFju39TpjNON4ku6MifVY9+nNuWgs4AGg/j69+KMjxeFAinj94Kje++V0ozAoiTgHnwT3HjVFn
I+B8fJdgoXM3oGIGkP/lensPZYdZfb1ix9VlNLB304WKHGEeB2v5sG6q/3cpe779FvnK/sO5CC6g
bzKViiEMpyhCX3xn0cEGIuvxQ1TqPCwfo997s5pzq+gaEG6cQqVdcL5wYNUejDLfFc58irAP/iqD
4ZSJsv8isIkGfnsZ4aKlO4tv3tiHMNnAPvtvOltJcBWxp37oWBY3Zw7/ydnokEt3Oh4h1hbFN7f2
4d43rwmT5tt0HKTZTpFPJxyQPex2/+Qt7cq3gcE/3QN8aXW33oRdLPd2saTE8NkyFrVHB28bZB8e
ErZQq0nW5T03mwa59V2ZzvVsX5ij13dm08J3vmNvrQ1A5tw0K7Y45e902Coc+zjC8h0j9GkXiDiJ
h0huD9sS/x+5U8CHvNMpO/LX92sS/oYKUShGAgKR8h0P25ruuZgV2hZdiRmMYJYv0DhOTsNFLCTd
a6eAuwTXdF9Xi3l2v+dsU4m7Xu+n7If/6Re/+AX4eSya6/J2uoGIgJ8++uH/+c+/+IXFABoS+g9D
aTEKiZnQP1LxPH7Unf4Cj6jWSIR6VqKqmu/BxxI2p/VZ+II1P2NuTvWLm2bSAdgEBjJnh7VdDHWj
YJyVMRWq4Xbb3a6Upw0wma5rmNQceQHehzwJaOLArH7jx0PMNIkgH2pXg7EVslCbjb1s2u3iPnv6
zbcvnj8j5R8dh8flLx/xsoHDz6Cl8C/os+BGfl9tiS262yKWz2bpdD7+7nNDguF54VAbsobCcUJe
H/7Oz9RliekJtfrtqtmtMD8OFC2vJpBMxJCvCWJr4aMaMifYLlHveOLqk/EMa48CR5ug1Zhn3N9P
4iYRBSWMucO+ahtAv0zz75FWuS4jK82OF4XhJjJSZMoCyHcD3sw+b0M9WmrJsSu5WfFZJbZ7cwQK
OAOyKe0+pcXFrRRuWPcTzojdu/KDyV+y6ll6r3PeZkn7FGt6ZTkd62227dpcIMASUiIt23ugFcYa
uP1AvJ7eYlobW5r2ysacL0dA/AmNNIhgBaw20XFQe+zYHPXXi+YSkKe4qQkkXZtMegnVtt4Q8Xq7
dmhsZofwo3RbtpT6aOGDJuheNZkgF8StRAmhdghcwG/PXZ2LfSylVOt/TplwnvRTy8uNko/QBM+x
adgi9UhJtTVWsMvteTcks9oECAyrRWl4H3YKM3Ry1i8wM1f4vAk9xdS45KcvW6LI77fzD7Dd4LI8
ojFrfxnYWoAvAKQ8lfUKeDueRTONkwmo6CeT/hEab9U3bLXp5vrd+Yepuh8Y4e2yms8xx9q22pjL
3/y3zQgVCwU86QHuzyoDZ88Hv/n0n84+O9s3rIF8ziB02oqXPKhKc9IjKY8YBQ+OQ4o6kkZIPglm
JmZQegxdBDTHXrXwY2JY70U9M3I5P4YUztvqutncj7m5YbTBx8AgcvmSwpgCejWWt2yyUSwGBOya
xsPBCN3QAR9+0pjJRApLIxNQG/3lr/R1hmD/1P/hf1TAlBAcWm1+evDD//eEYCkvp209ExUb5bmi
OEmICNw26FuyNCzrslptp5QCB5MTR0iSPsIkcJZkywTPcn6IShNmckphHOb2tUK52XjYQDC/7RpO
k0WvQqbMZeJBiXpg52WEnotuhjG4Qr+Gv9VrGkCzoWKjTP+tioFXhG4F/qbXRmB+0HvA4xUHWETh
/I9D1/Qg6URR0e6DMdwaUWhcg5NNB4Lhyw50yZdZC9S53Vr1kPlpu8zyl+NTlHHM7VQgEqaelIPu
KSJ12PbGWVgkdESXonK5uLqRNKlU69tNomnqG4Q3pyBgjTo+RHBJ1mIh2pinqfONNlChM68N6cxl
pCm9OY4iCgNmYjr3G1efRdXA6RBUbOPFdHk5n2Z3o+yudPorV9B3W5fWZQI1+xTpD2QX+OvfL5LM
V2dtw8oGDRih3f7hGABZFPz3fGRLXPR6wdQHc2M9FyVk8gjvxa2P2/fw9HH5+KrNHp78piVh01st
tD7LgIbWdCBQAIWPsRgE7WincfHznuDBgl0G9+Ir+OMV/GFWaY8X3IGWSlHjThjVnFQBL80YX1if
/YSwn8AxIPGan4MkJR4IvUQEvtwsGDRo5sRIsPBLRAZwyKy96IwQViFYybFyVXWjo6fy15AvRDwb
1F7HMMZ2PPquQYGOlFbJ2mZsnKHCtqPEIHPGN9VPuwr3qxx4qtif8Ju+PXCqtITs47dIyZEX2Fni
4vCrXAa9ZmlgYlpg5zDpkExjOOSi5xsrz83Pi96ecHQuat1G0z0gIB514PVwoJo1qmFVd9XEKyM6
ELz+EH/dWiTw1iV29bKqViQSzbn87XS1zcSRj7N60kRjhAz4uDfZVJzavaUIJ92tpw97hm9tFwrx
zEMAbJMHA1cmD89C6uQa/hZCAKhssi347qgpfpmnq8gc7jmXe6lIdWc4irDyh6E8oqEVnGrtbnW7
4+ho7AcWdgOZ92GGqMMCwMGRBieRLl9UiRK13X9uTev94+O1pRUXoN2HSMf+q3958e23z5/1j/DA
wqSsmJiV2MsXHiNOE5s+SnTZOJoEkElzCS+BKkIdZOnh1Hh1uQZRL/NSPOWRMKszDfQsQaqX07eV
GtGYmoYux/AfS+rMEN23d9B8bschqtjYdgQlrWZvydkX2Nl31cTmGUcykwjh193YwrYJjEYIxyut
eL17B5pX4/1GY1MKLxYluxtbih+bWxSjaJow++J22qJBGZUesfFctQpZNbRC2OzJ8rmM7H1rs5h6
Ob8sfze//D929Vb4maO21MjuKZby+8GSg4ME1KAl5s0nUaRuEw5tG95yPDVjAO4lJz53RC3oXcjj
oGGN4T+FyxhNlV3aru8QluDRc5e8HhdJaTVX75jFcMlbHoyQ7IMqylaEZjklfSkGKA7LtFdEgteC
fmik2nl4ZDX/zZUawSjDXq/cHc9Xsa6pr9k+OHuz8B9kuyFXF7cQGoMCj30QIB5pp7qzZzLmxBg/
Lk/mYY1cxRM+UZAFJFG3C4XFTrdojb2l0T4YIP4lQzExFZT98oM2KZsvh7qOVPkYXmHuGRmHuDZa
KAY5fyhXFvvtAtwYze1IbLZqvoeRy2f/c9nvdBQebswAnrh6dGypbYco3i4MbwXDsrFb/Mkp8ysC
0kxsldl0xjhmB5SSEldW2qoaGIAMHmTXReHwfOCF5l9AbL0nJIRjAHUpunEDek6ADyAOJH4cy/mg
ng8uhvCjvW8FOgCevANuwjx2UemDi2iuraH1d9O2Ihxnl5e+6/AnsRBFKbhbz0EA9LAQA38uTKGn
nY3ZH1q8DuTvPZEqKSgiyA2Fix/tBmwVt0QSSkH6A8h+O9KwdesXrj7GpgRIb/8kqbGjtF7gKULz
/aoGYvB8hf9Na8ZtS/3PdyuMyYVFtgN9ImBz7PcFKLDNutps73Ota2Fsb9Tws/sXjYh9wo6oxgDx
VM3ByRysJzyoBRLhCi7PEkYT7LN9p8EDU1JEwPoRF8Wt8qWCzIW9WZT3Hb4AKnvCv6YMwcc+Ny0i
Bhgm7Ty/G54VzvMC2QIpxKfPUXzNzGitln4+0mnXaObsOqUOkL20I0sOyuuOXeL2h+l7Jz5a0jcv
dvJsuPA0XyDpYthQ9x9wfZ0dy27xZRIVETfhkDh0EA/ubzVCwnVjY0i2EbuH+cXRY/uO9IGJqK/c
JsItnsDgD4ZDAjBc1OzRLCuTiBEDY5ji+MRDeJMlARFTHU10T5P1vd0GSfeCrv+hhzkpspLO576K
yizyd+7gWYVYQg8W/0/OzjBzuQkstUZGoKOinN6x8oWUrAV2PO7SUxlxf4emJ0pmx+lA8ELM8umi
bQD4dS4ZPRgHzip5YLVbswmI77ip7pGIHs4oG+oI/Y86PAcCgdyOc7Ol7aefwlWNGN0+k64i9hSq
xgptXnP/Wg+i6Xu6mWmGlk7DdkB+ykTYF+5ici2DgOsaPg4gGzRm7AjMPbvpwn47Ycfi9GPI/Alk
E1zW1zdbSEk4r6+uKsSYIDOe35TDHQOBoyqvSziRU7AU3TRzMLLUq5sK9OWchHiqGiSrbnmE7tib
A5QWT55wEk24BUwZSGQGohBOwjVqAu1VYXaMbgGdKjfmcds2sxoTKWIaR5kDJ8f5I1NXj/z0RkY4
vLJhpovb6X1r5TC+QIaWig0dfS2jrMBM8fiX14vIk1MBZUHnyaoVbVA0nZYcWSZPN8fjG+DZAs8E
OFEDTKVER2wAylDeS5WzCRtOLlw5LSjqLtA6Y/rIbQwB5MgsOBWd8ljjQFfAL4D0WsGatTdmZMBv
BVIoH0T0EGzBbDJj85C3Nmh9hvybzVvMir1bZX/atYS0jPZqr1HFicjP3n7umw5972j3xM8dUTQi
mhW7eMXNzyex6KHowzDTgpnaNYro0vohK9tFfTsuV15JpyDoNgAJVYzJXuf261q+84vjplixmsBi
CtCFKD4t7oWv5bTqIPu+nCilR6ACofs0xeV9CMcYcYZ7lGpueEAenNqw/QA+TX1qyIXoEQw5GrBw
Ebj/EawcqtktQJmM3GPU9KiLA0nSG0bK6zvxsZ/Ol257fUq9ypFRzXTytraEMrdxM98F+9Ee3Zip
6IUYeaQjHpDCRjDxrAmtVJp7jEKuKhvRbwpOLFCN00zbVMn+4CKKsIdXSvBCrFDyuaEOUhAyNgfu
vPcjI1aTyD/MBRmSk+6L4mjCHsvLwqJ0kHnSg0iIPjnRqEfvc1946wZXxqJa0ceOH7b7743o7rCx
GtRA0XGFRMdBO2btDQHYcw2I5/8RWiqrbvHagF1ng0dooMoNwqrSRLgwbMMGYvDQY0LnBeToNfhJ
SS2EAW4pMaSgJ5XdckQib0QITkV7CjXJhv3fMGSZbW46n9t3GqQY/kYwEWV3lS8yBGI6A3bVlpS0
4tf1OzB0U63Su3DomXg9DSndx21t6IcZgt/WzdRxQ5jLG+zmkmO6WhHsm+cwUurx2d9wAW62tmvr
weDddaQZomCYqCYzfLkdnAKqC1pXAV1mCYAmAjnZreqtRDKh9yqJkFRo9Khzlby0CzJIhautBqQw
/Zo1GmCt9TzYIjLUsRppgOQlWRx4ckEskVERmZEdobqFWXIjdzA0wc7xtmD0vTAM+6F+6KW5bsN8
bRy6YauHfrsrdID2Gi+O8WBepW1AVlWV8lWG3DXfrBZG8Acff6Wtwc19CTljzJm4vAc1wmaYBi0z
QngFlh4UG9ubZreAdCuSqHVepkJJYFqSOf/8eSOU2hmaI/IAWmvmV1TZU81lgFat3FylM73Ye7ZG
18LTWge7xO6jhHLe7oOkp4XaydbhI4hbDs/kpGunhGe4tdJt+lj/6E80YPx1ExN3y2haFZxJRvCz
XSRhV8NznYzSZp7C/9wkjNeeNfRis3l2LKqpajfxceRz4zBEI7edLtxDKUYEmv/QQIgpZsp9xLa5
Nb/aPGo6uW2lNHOWUZ1jV0Zh2+5HWLSFUjCLcTBX54o7lDd2uOo6bshzkJYYWZHWEJWGdHCOqchw
auXaRghj0pj5x0NrOk2/7+pm1xoq5TVf6is3tb5yWNWKfuhaMl4H+LbACsDYAXs+0mkCVBnSrzlC
AQD/2EsAVe5ZXEWLEMqIJh2k4ZQ/dJJyAmClrlj0jsfKdLsmBMxUV3cXYmbycoPRkFff+15x4Xd0
Xyq+QwQ54gRSYaQdEbcaJ6U6xy7P95YLkN+tLT127SkB2Ku4V0cT1mfBFmRcK4gB+yCepI41YS0K
HpKjvK20P8Yxnl92ZB3uX0UsYT8wlOhn/M/UB61g9g3JYXZPkIMqhFkA94a71xAu0ewbRl/koLBi
Hhr5gDiFhbDx2tCpdndJzdQWHdrqfbUKndXs7AB1CXjSW3FRVpSApcn2oC3GSIQcX2JWBODDSLhz
y2WHWiYkzeOiy6lh9PnBX8f77ESolAoHwA8e98T1h24an9gI9MkEJ9jsaYmnZF8cjjk345PhWNGW
dHt5uGo2hBz43xtDjur2BrT52advAff7CnC/AfLbHAaMTLmBMD0kkS1XNE+bzZyER/SS5q4UC40u
Dpj/8QZDYB6t6hkHwkwmZL3BQQ+k6YEM+0vU73WNGrYh3w9kFDKXBIfa0aEGyHOyVYPmz7Agsn0O
dPv8rt7mkctYoldgHZfLag7mH3CbuN5MlwR2m5nTn+EegRDs9pFF+CsObOH+bvV2RVA407bxeOaO
rRkNNLm/e2CCXjcwbCIBbFxjz3EYdO5ULvCR8IiwQ83SmY8zJxmsXbCloy6zW/Niu6mvryFRaKkm
2s7BjZGOcL5tLDvOIk02DRF6dB4Fpii8o/GBcjrH+elrrYx5SssOrlJIyYCEoB3OaTJYp1xmhu5v
q5GhUoPWyHE7kNWwqUvKZ7hFIzFlzVSpcaATCKhFex8Umldmm2wqsfSZJ5eV50KzW8GBMHscCRyA
/dREgBe1OU/Lul1ODQ+F08p+X634zM4r4Aaq1cxsFTPgV5UezwQ7oHoYEohJMirTyyKgj0fMu5AE
mFScft+fA8KCOE0SgZTACaIFSFJY2jF0+iBjK5SHpeEUbov7k/2L9DUvEpHUESRS4F5GGJoNQRFY
HA6AtVW2+AiwfK0dM8Su8P9H6iNGxXAH1Ir5vrD+HtPJpIpn000R/1v0YHze9FKNHk2vDhHNDXEi
C+eyFiAbHf2DFIhuCKpmthyHewNQx5YwfiFNJ3DVfddIH4/vtuVvsul5QBdIbpNmy70AE4jqGeRp
QFCor+oK00WYfQ+j7LFiD9KMuKMvDD2PZzZdoeR5Wdmh0szCSaQktdKNofCGN61buPOhydtm87al
XdJiyLAtSagA9BV4ovpn5ePy0765mkjOpb/LefXurA9iZ2NGtHmfEwJjAawIuxADsGVX78C7kFJp
zOAEGw7a8DB3Kh+Zx7QbMozfbAgxN1RoX9UX+DLwEcT1789Qd4RqziUr6hF7g4fDJhzzJ3PxNNvt
ORewIVf+KqYCVaGRHrvBwi5QrvHmDQhNbpsMNOOMm3YFICCQTYqWJDY4nJsi+Z1yrLNlxfWv7Bd2
uDIGpcFxXcCbIvtcnrhPK8LZ4733cIMHQe/zh6j2/2lX405t2a05MHPYRedO9TEsPHYd5u6nf/AC
4rfL9bze/PTwh//7P1NAfLtb4wrCDJhL9F09JwoI6wreJqY0igcA/guCPJBGCQaAGwD2qxcSD6SF
ot6lz2Wzelvdr3FHclH1yFnycWh/MBRwIRltEjxIlPFJ4YnZlE/+S2GK2aUM/0RYjj7Nhk6eTMlL
4fMlqRHuhiHMw/gsUIDkcwZegMSBPOfxzN0TmANQCl+AtncL9QHED/UkhmAv8BI1MigQPDAmePyA
VexEfMGAvmjg4jXMRbWDGCryOvE1sbjckB1HgAx3q/qnXXViuK8T1NY4JyH3NZHuxgzuejfdTA2v
BQQYuA1qLm0vebC+LwGly4JK9Z+clWfgKIQfgeOPh98vkm66ZhUvpy2tV8GZZ3O9ZJinzK7u8q1a
WahIB2m1W15W1tzm1liaVrC2rrfQBVYaCbTdpq7UKZdvzXBy6XefD8E6xP4oUb8gfUygHcrtO7af
kYDDrNbgqwdZXWAuZBzoD/h2As5CRnod+3YQd2IMocLpAn4qmv11T0N+21kJpEvFCUCR1OEovc3R
CaKOcqV08z646eEa+n6lUYu8oPI8oUx17fngLPIiieZptwDqcPIOkGBbalMtm3ep1MnBVjom3UPX
gLt31IDTzaVS0U5Uc1u9t404uECn+kQl3k6r6taWB/iNZCa6rdtYKrtNSO+5RSrR972lXF4S767A
Iat7hwcKH+FfPVKNOS3cLpOZYVZXu3VZ3W1BG3puvthIUY35ipIGwU74S8AKZucWzp44mBBRu6Hm
DZdi7khgyg2R4iy9YVW5SQfuOoJqpfszaqEn6VLNCpqJrHBGqOOcY7cde85zPe2+rUjtzm5y4K4I
VxDeDkDhK7DMe6yA8iclQoS6O9CMQzqPdnepOqB8agFFoFV3VCF7fVPxngC3W/KUrqGxN942fjNx
eEk63FCoCgdd8hRQxnCbd9FHYttUpRlo3j//8Y8XcB8BtJAj1F9/8cO/fvGVKf3pqY3mrVaUUjF7
wq+VDUrlZBzRS+Ij79RoZHd5G6TkWyp1N2nu7q730z/+8D8g7iUsxU+DH/7f//SLXzzIvv2313/4
5uXki+9+//Sbr7/96vnr55Nv/qUH80EFR+QvgMlTePWmC6tIQYGGYsNKrDSZYLIo8PcYgPP14KLX
q4EtI9UeWKUc5BVEEtSIGcELP9vUa/RTuryHpLIo1Z4s+agOBKagEgGNNgbCRfaBnPWddgJaQGG6
WZjXMFTiJWkxPFAmpURAH3JU4ViVCZiGSN8EIrvpG3QlIv31fNaV+UrhWk3dYfY96AEYQhUsFotm
Ouf0mYY9XSJqvNeKVFecfq8X1MxZbls3690CtXU0gI8ziwaVtQ2qxvhLCpJAe72f8h/+F0Y/ZS1Y
aZj5t9UcVven4od//i+/+EVPjsOX+OZL80axf9nt9F7chb1QX/gLK4h+rSdMH+RGnG5Roc7bEf3U
gV+eg6oc09hv50b0BA35Gp4YYblaENyk+d3W1yuA+oOCLv3WrmUFC+h7wd8PxHWizhkeD1x3U3tq
5jgNzNW0GpTLrhw6Vlu9rZ0F35Pq+Q8vXr96/cXr719Nnv/w9Pm3r1988xJO/J5o5WGGSbJIs//2
Vv0BWuQJoUWe+hp3hMpIelUwiIZO+3fFraYr8DsvQyS53u1WFHHsP+bS8E/g9i8N0Y/guuUlGGfu
V8BMvJ0HbDFW/O75ayKaXE9Y9IHZNKit8Iu/ev3sm+9fJ4rTrkoUf/7dd+niZucNlD13jT6ZDR0N
n3k3r0ZkKOAso3qv+/YVbGStvDolegrUqDf1Yt5dd4Lvc7cpPOcmfMfeAq6ETp+1wUiVlSGMhmKt
WNIi06A9sUPUMJGHkCulGllO1+j88+Ib0m1R+MsNmni9Ym8NtQFlJShv+ESvmxr1mQ1iAAJkNtJ2
UF8pnylVYZy5H25xDedtbs3BrV5LW6e8Wuy8ELuredBYiQCEgY+RLfXROAsQcc2Kz3frx7kUgYyn
YcdApMaZ++G2VmKw2JCu0T0iKPVRlE7QG5EpMswee9KVORZXMgo6PjSK/u2lEoIdpTJlT7vlJxBO
7YZKhPq2Jbz2tuVBIz8NUsZoNl/+saUt4Bet6EnRUZmx2HOmy2ZGlmsAx4F3RXEUysHxAAcuuhDX
i3q2YQl3M1Dn5kVc3ptj/KjU/XAAR4F27WzRtKE8x8NJvZJJCt+Z5aJHZ4lnj71nE7SYuQErcnM7
rbde5lB4UG3GDeCB1ltD33RCdYzCQTbKoi5Q+VxoIuQC9gzKqnS0G00nf3zx5asXv3/5xVfPn+W6
bCpsQSXmxKqvn3/3tans14NExI9/c1Tqw6A5Nz9+iz6ukhgC0T7UrNiUIrlWdUVkAIcpVKbwf4MB
JQoAX/Ch/ctsiGKfXsj7AHfsiZkiZbtbqH/MTu9+fRXqqVQT4qtM1Ue97kOuCdFgczk4mkpMIAgO
JR74C7QE8zClb+rYuAb2nA+Q55l+ACct9AO7LPbjTVFtz9s3eVkF403eEWEZTmIKGpyAJWJOmTB2
cp2vl1ZgyEMTLnrIHfqJT23DHa4cwgwlVU1eCatmUvztvFp0uP3738ScNH+Lx0UnGOX3+NagV2/H
uz+CiAS7//FHGDTDy00/oogcy1sEHas7vvfTxzbDBDK97bvVbrP46ZMf/vxPaEbpsU3HfDpH5oLO
Q2xbSjmBWg8II6YwgztDR2HAs2a5hNM4MA0Pyuz1DSZLQKNZNq2XyHaBuMf2xner7Kz8lPylDAtX
bXrA+BE3h8Hk4uOFIhdVqqabRa1y3Zee/NQQIRtiCCmsjRKsGMLYCtkY5SOMrsU3xo/kIvA5zSr9
zgz+doYS4LtVqlyJgDWG0C6kxu92hjueNe32ixl0+RTeD7MvjNh8jb97vWfPf/f970kTZj2K3q2e
0qR+i1Af0llpXsATCAlT9moZoQKcIxa5gTBuswjYNSxDvm7atr5c3GdwHrZVoVbaAsW1NZnMSLtN
Bt1Fu6neCRpP6psMN3RnOod6Y3OfFVIN8krZiu6zveKnp6dDQw3vOHfT+Fen5al3rlfVrTmNs0Ur
sOCmTRZUp7vtjbbfyxGAO5I0aqp60ZHyhRqFAiGxfccJTwBp1xd2dzYZSgm/fQuIy5RiePSNnzHF
LibSuMvpfHZjLoLcz7urW0Cj7jofPApFSG46mZOFhq5HTWwuDdsbsTJTHO9zh+3HSTy4rQHRmPzh
phgASJYa7WEsRVd5mHEDuW5haEegNwkcTUMqzFMxthpmJvsYGXnfqMOZ48FyCMSIyZfZSugsnDU7
IxXerjIzBEYdfVcjJdS2Hj0NSd2GZ/uBsSHjrsa0Zx5EkTI4AYW79HNhOEWMYIhlk8Od+fOEr//9
ZklPDKbmghXMHrawcqZ9pw+yH9Z/2PbtxpgYaXy6ZnxU8vvjuBIo72ovxIDhT4DNqhW2ijBRc9hJ
ii19YHi2O3bLxdDQBehsZ9MdqVNnUwhBQ5+WW+CbEeZdsVAbSlqgUmXpLYGkPDCGgn9m3ofZhKoW
NA48OnQUpY34AvqwnEN5OKR5WJSXGm58b029OrKyRy1pNgDHJbNYehlNZRiRpVVm0miychBYzNvo
JCBl6UwQDu19Ms4G5v8+ceXRjGdekUGzzdN28gl/G2xoT/HEj1OfG3H1NMWiY/ZaDAyyoBv0i5UJ
02wV+fifB9kuzcTkVUlKFtidX4Lm6fmdOcHtgBMtpcA8vU7C6gB1P53fAy+pmik6ABfhU9B/qnxu
ZMVXr4mW97qBGbs21xpkpw/YWsPMGuvW9bpimPcpeukY5rRe/be75/CTwx1HD1NzIDOHkZRxRUWL
9uC2CSHUl51uBXkpEHKAc5JEsVjItccXvJseNGZ0fjxYt3xJEgei+bbSdojIEowrAa0VanBqZpYQ
mjHYDNK+FGaHQFnw8IYdHboTgy4svGnhoSRPzfsI87z5vj8MmTXc8i7LE/qqoacaqgnIVayaY+w9
vBkWURwfOVmg6AmDG5+BYQv383TL41cORzh+e1V8OGvgTtdA+pKbLD8Sgax7+wyzojjMcu0bywkg
jX7YiMwUDPeMTe8fI9JbWENmSAhTM8GaiLfFTRVZ/sk6z37R9Azu6cqP2iMsqdU9bzosJs5abdrz
S+XA3RBfjy6WDFvhZ01dYL7J7PPsl6kd6ojyi5f/+sVX/MV99ChgWmYk/XreL3y8QG7VMN2/7F5D
ZErC2Tty/Y2YZP27xoNB0dEY273NEaqv7oGSc6xDlpOYtwTMJvBJXuFnoIOhcmIDn45jFpl8MLJ/
TK82LSovpKlcKskJIimywbK9HggImHMkNDJwW2050nO5BBQ18Y1PrTqVmVBcCA2TcqJB64BHBJ8D
xv31veHNL5UjiVo9+hKW1IJVSpVSLPwAOwD/6JPlYKiGE6rsgqtBNSYHq4guOlfIHcNZs5aLZWvG
V20pcGZgnteUYDb60pDOQxN0pMDlFhuhdULCWtuQEWi4DCi9uEdL3wPzVeIZ3X3Rp8+VtNEXAa5l
Z4klAf/2wzkMuGgigPAxJ0skf+4/A87V2Hs/olwExJBGWBQHFpOK6YW0y7XBw5pYsD69mXfszZCe
osMP4o0cu3BZx8oxFPHPWDemhz9n1UDVa1ft5MSwnbPKX739K0cJM/+dlo/Q6uJT2FnYW19UctOU
GNoHqTvxPNKL4w4klUUAmQX6/TsymiNKoft7uyG3FeyqILfvI46t5nf+zku9WXrHc2CzqEar977r
5Nzq7zAgmg9ZEzDcHJuFOfMg2QCKLJmjqlYixYoCXcLGLEzdVzcb8YcgxUTdDh23GeKPOL8VG72a
vLq4v9DTl8ffrZLJE/u+iCldR3luflhcvN/+CFReErgLOj7Qe8VePcnNwOtAmqfcCH500+JFO4wv
QBpsGD9xFHdCRUnLPiVF1QnzJDjtJKBnuTmfDFJkZAz/i/E4FiXEhN03Ow4DuzfPI6ZlAA7w4EI5
CGMwiC0i/T+Gnc9sFMM04I3TDM5/DOXmy2UPN0RuWprjIj7o1IeIwLAnvh8h4gTMqJgRL4yG4G+9
6/Zh94FGEpxy8Mkvv3n+8rVjMiAaCuYKpArke9FY9lHfEwzugHunKqMUNKIMT4I/uvQ6MDsg1+vR
vH724juIBoNQKLcur+hpiue/U6SCmW0ZnDl3i22j64lcAEW4dCABgXOSzmZ7vLNdnI3E94iTDlVQ
TKjmGaPRQGxuKX6tQ3Dt80FmvaxPyqjbIs3m+WV5xLiVbYxMR827oFiXqrqT1nlay8777O5AlpTI
kB5SR9wKIu4xqFwk9Snl2KZZw0llT0D4Mq3marWGWyw1A6400M7qHmZHi6meQQ0DRjAXX2aEVfRV
9vu3CE9Hdg3lB36y8pYQKnl3D35cJd6f35Vkk1PBl/ju/Gx0cRHp9qxG81vT5Vemy2e1xYHCaorZ
QF1ydySSuWswB4zpfjeDUAlrVD9hl+i5DtL22XR2EeXJsKyO5/YPlSdt9ROluDXFywlMkik8kdeq
xuVKWpPd7JFnigJATzcA8lHNx+RvSvkvLldJZxqMx9YpFNLkmNlntymisXdPbQtxDYabgm0OxU9w
3O4SZXM1vEYDs55Z6AuQLuaQ9Xx0jH9Rek8uMPP4ybtBEoHoCLPEcaYJgbdT5oVVszpBpgV2SM3+
A3KTi6EhHfnVvRRD6iL2TqQEGK77/vPHp+Z//zTq/617IjsKZjRvdqv53/zLBtAVsCYE4W/9K+4H
xUfjv2W/368w8Y1hBkGl+7frDRIBm03PvXJSAeSEINSlmr9Pz188ffr81f6ewyqo6U+UPUTKE/Tu
3D8kQMAWLbRjyfsodYqoTPoD2ckXSLbz6cmpRpGuQVdAiRQL9uagHIwyNkOclZ8BEZjvAPjJvADq
1HZrofT3iZE8d60TZS6654TyUCeZR13s3832dJwmQVssnFsSCAnNBmlw3uEtwkar98vqApccEfaj
zSie8ax7WB82GDUch3BNl5wFJ16wbxQsX+gbBRceZntwFxumJ1xtwblKvMNMGxCcz1FrZCUxzzBY
I4qht7XaAIo1EDSVw5VpC6X8IoSlRch0Ypa+fPk1hLoa4mIedzIpPOXdLIoKdZRqaEQJSJJiVcZe
H1qmCXgVj/EhOAE4XG8NJcTIQhDwBwlAXJb2lLUxBGjoJs0o5vl2fNhOyJU6aZoCO4GC5eqkh0yY
p6Mxgx1FTmzE7Doml55RaSNghIijsAGQBcdytAHj6Ht4p3i0RXNtFZzvJgheyPsX/jY0C5SenACY
A4bTrLHNYvJVc/0cRCCFkgdfYPXbZc/2BHQGtjj+AXKF9fbKWW+DKjkCAaXgJDhP1EJFABIWY7Or
7h+ef/HMVCEQFvgMqEXJfq0OJzFmdIZ9i1FTGMWMeYUAlnR1DacsgeOtdlSRPcBIqHrLCD8bnoMK
XTHUoshMjDNvVhAkBvZ0H4aPUD/2vVcbZmGcqfnoqGne9vxk0LbnsRRlfztqMzqj8MbcHuB/tT8l
sy2GtusR54i0HdqdVehElbAwXE+WCT/i5B1+gurybrlAd5Zx1mk4N5s6OzkxBSlHpWghjqT2OX/C
UI9rmPnGcydpgQey6aqcm38lZrRe1eZPpWOpYLz8uES3tpy/Q59QTtGgWCFkg5rrijcnXw05Q9Sb
Zks8Fk8hCrDEWMCXzbxqY92bNIJh7K/v10hC7cPnXz3/2rCdk5ffPHuelJfMwISHcQK1nJpc2ilS
KgRTtdd7cHr2+NNffvarX//mn4749atf9yD25fHjz35Frd2s30rDZ7/6zOzxd9njX2Znvx599pmN
FyjX9z1Ce2zXzXZL5pnf72qAWnj1ry/B7b08Rdwkl2BruqivV4j4ggrIlk3T8+qjjz7CIZx9evY4
+1Nzs1rdqwk5+9XjX2dfT++z08+ys1+OPn2MMfCTeTVrNlNMiQBjYXdyn/0UVEqE3Dn97YCkEwag
XNbz+QLp03Rl05NR/rHbmwp8XbCYmdRmB0a1WqAqAZWlwpxHFeniINrb8Maksl5ALg0IF0BKUQoE
mFurwf+VfZz/9tvPzcZ/8uP8kyL7BP6C89RsnpSf/BYenP6WyrT1nyssVPw28zXiA3wPLgdPfrz9
JPvkx/lfHv81++T8x/noQtoEKvqk/Lj4h0HRmTKp9limB3RzIH7D9n5dg+kFxQI8eHTc2+xmu12P
Hj0qy9KN6cEE1+rMrBX+70+7pbw6zf733cIsbnb22ejxb8ziG5p/47IdIIihTf0qs1fi49yXHjhI
CV+V15tmt8asm5G1i7S3UPqcOJPYRIOFzkHfBuzLo0EKGp3bUeVBQRcXJC4IbReDfdmQdVkclps9
fGeOC6r2MCkMAg2fIv82+HYQwGgRaMeEPN/B65a+FQwaF8FswHU9od1l54T+HIQpacxms0Xgj8EF
c3rSPj1EaeY0gBPFlGZjciJGyCLCk1/WLaCfTO6r6YYbgT0bjZKrq7Y+ziBYwvxPO+pXP9mdi95o
Xd6FDNYMk4fl7BOARZ58wP8MgXkggKcWVu0Dm0IMnH3zBI+dxIBJVlbTxf2fCTsMZwcJGacwhIqQ
97eWJOt0Ss1lLnkoUaJmfId5U7WYe+TGcFvwDrrEUPKWocmod9py0+Vlfd3sAtBDCR+aGnFjPiXH
3u0E2DfKQl5eL3Uy8i3aq/idaZq1FII+uEJRGuvBjY81qQqfgWE2eHg5sKq9ubkLDpafm/KPqTwy
rOPMK2IoHX632ck3hryPDLew21Yx9qIhF/1RH7UippUDXphuT2Pb4RihBRjavw086xH0T/zKDu1S
pxrc0bU+8ioMXfl0J38YPfza9PPp6LOLaFSwUpgm2bJME8sO5VBoSKsCqRHvh15/w+x0iP/nSZ22
/hNq3J8n7PbEsLi9n9WXuHLxpkOScb3MpT0b6ghRY8gpdUY5rmEnxNmSbIQRAvopVg+4OFyIkqxX
+eD711+e/CaMUZrOGJGVGriuthbwLB/Qy0HR2YR19OZWDNn/InUrgZsWHPyJN1q/MylzAmX29Kn7
9dodpTN22TLu4tnbPdxH4F7y09ADtyR8oZ9OfvgvH/8nBLfkHPcIwjOv21ljZIH7ocqwC4g3mOfJ
R7X0QC3ll5Eq14CJFaPLMOyQD3o53bwVeva1+f1M2EoX7YigBTPMjkvldOIxG5nJDfZmu23CFpxL
f5MJ3PyTSaGMUj2QMEhMAK6rygnq7+U3r189f22j+zCYl3NQUfOSZUKeEryaWYurljIhQryvhWa/
u7vLFtU2c4xzVm1nyhkDwHoIhN1cKD2X9USyX5jGAG1rcrsB4WQ+mWjnOvOSRlqqAr0gfwa2MFgv
prNqMm0HyerylnLQ84co9Ijw83pKKaDVblzGXBqgEoLgfVNaExIpYWetXsLWyuE/PDLvKpBq5jWs
ovmHP/AAumEXViI2VC8nnqLxCKREVd2mWviSwOS+5KPxNWKMd9I/c8DWkN9gupkuUzp1wx6aK581
UMPsvq4W89n2Tv6u520qDLXEZoGJg3+DjAfYFfGH5kcUqAndUawm/PJfS/fmvfz0C9SYBLmet5pT
nCEamsN/mgXJ5dH4gISiNAQHJjFPlEpe82nvCloIRvVr/UCMLF8anuXS5j642kGhQnnMSN8l0wlu
x2J8EWuc2ouUDZiw1alSjgsw7svbviy1iyPWyxssJwLmip6xcNQCtHtILKfST3Y1Ja26HTwtAEbF
u3qA3W0+HfOm52jCb5Dvgp8IWARjq7aQn4pQ8a9qNpSonqQHwhtEwYwDIMIi2B9As20ETt9MIygg
ZoLFBHsNANEUau70ppqi04e5aFYCLd+OXC5mCfKH74GFBu2k6SeF82+eyS7I+Ydz+CD0f/gM7yrD
xshMscDccPazEFYO8dZXa4Y+sOEbsJ1IXzqb8kCkmtBBB80o+dKlSE9vO4tbX6/+hAnUNX4+bqcR
yRF4xEFPRv2itsD22XJ6yyHn8Y4OituSVm9s6DIdHQh1olnGZORVi0jzHo4/buIRhVUxHKLowd0m
4pHhB9FaL0E9vVaZup3JUjZA27mbgPsAKC5+j/siamjXIt7dVo+WTxlmHbAKcK99VLNAgiAE4IRJ
xWQjmB/SS3tp3b8g5BC8lreEr48HOMsxXIRnk7TsKoNJQr/IpwHtjphwCr15eSB6lPpzDJVwudM5
OBgIL0KMmqNudvsaICCdCcEn9W1DfhbmzT06S4qjgMyqaa3kpAHQLhZhN5+e8wK9tzv1uloBrjpA
l5qZBh+gGSZNtPuc+udvsJLrlcvjiZu5ILse3U9ao29vpTHNs1aS0UkVGqczRDDhTN7H+Z7zYG9j
6bfg4cmo3QjFVpxKYSqN5LBSkntQo0TJPcymK/jLk67SA/cZBhkiXR3m/zOuL97Ok7/XJYS9nXRd
RdhK/vyHb59/9wKU7V98VejbaWvVHS3aMy1FNYR2tL4fQTOjNz5S7xtA7WOGb02p9GJywRjv0+zN
GxzgmzdIivl2gcf0VW/eUKYXyAMs0Vq83V2zlLgFC7LKlUfUbK4fVatHcMu120fYkVS52S4XSEuW
DScxKf/7PACK70QHZ30efN3qh2zd46xT3giC7Q7kFkZv9g7hcXNin7FlwyzctRE8JpKrO1feoix3
hI56mLTQQdmTb2J+HWBOk9utjxo2rw46kyCwskhC8wby9/UZ7xcSdxP4EV7r2BzkY+IsasJD5MzQ
4XEtw/zeMCxxlBp6pMXOAHqRExWg/5phFD0PG3w6Z9+ZHBWlogpFlb9omTcwOfgk79NlsGCGGh+W
ro3BidAJ8jk84XUzf4X7gLQv435rqEk12ZplNztzboZkHt00t9IMPsQtwAQsbAdUxVQlm76b1iS0
S+0hu49gSBNlYAKBGXIzzCR2gD/RfIQR3fK+YioBkv0eSCt8gRrI+YUbBXUv17WEZVjhhMImkCPn
oHRwF9g0sPjp/mmLoO4iHID2DMWB5ANcw4/L9T1M98cTVvoMinCA14vm8qTd3uPcLDC6A2w/5MOg
lUIC2yW6ob2DZAa9a57yPvDe/WE0GjrGVdw7iXbHdW5Z+u7ut+/ZvceLstehHowHqk+41ojz7ePq
Y9p1nVNB72clyaunHsK+Osmn/mEVTgzXuc3NCKddOhSikcCASKnSinbMwm/qP1fHqFVoLD2x2zP1
heA96kN1Kl2pHvKPWRKT0Cf+00Z+H05UwLNJ64+o2hOY+rzPA1Ano697Nh2gUIeI1FivLUYCEewv
uEgumXPI6KMNBUO/kQVfaQO7FgxhdkrXBfPLwCGrxoD/b20UldALrkEI2CSASuYuztMFPAIK4aot
jK+UJpDzbA82Zj8QX5S6ued3U/DlGWX/rCdvYBo5M1Tl/Gz4+AKg1SHCcAHsFUgdtxRaaKU51Ryz
KiDs6CkmUVVi68/GZ8T6rNBqqJ4/LlVbIIPFDBlzY2q0CWaM/GxcUxV9ZcvNF++9szq0DWfDTP31
eJiVZWl2GfLOJGFOSfqDTaTGo0Rdl6xNOiizvV+uWdAHalyZfJs+VCzgUwJf/oMPFv9ViiixnK6m
18hZMYv3NT2w1VxSDVTAGIoDVjDdmwXnl5AFVOq1Xh49leLTPdcMzF/sNA14ZIORzI1bkYEnBpkS
3t+qHA1hkI14MOoVc1ymsurza7wDzTP6YW7Wp3AvmQf4r/n7BYuB5pH8VI0KY2zefml3/+D3RLyb
jXlsf6tacOUvLL8E32v+5IUlKPu/mukPhD7R3hRqDWhTxzlOlP6MN75woXT6LKCjYHiRnXutco1b
9yedrcO/o9b3qLAH6pBPJkh2SEMMadDglcqbzj6rXrGSUZ90yAvqaZ2C1rVj+WuR3d2bSd3ifqCs
D8idhkE0qtX8Y131EMCJrJI/FnkadRInJJC7FPU9K2/YvMAoTwodj42E0uy5+c8FpTWY2b/3fOXH
UjG8eClNN/CGDFxJwhBPWXW3FeBI81PWDp8aqcZwnP1RiKrMgV5CYkB1Um/RDBfEustcrKdbl3JA
EsGAscjwfAPNFacywaPexozuakXeTOaPrpRSXkhuJzK2YnqwfU6XHnzVNfCIzVvD0N/dB2CbEl+m
U46v72WiwQuEWG2sNtZzPuapD45VR9VgpbjbrxMF9rQHWy88rSoPOrl7OxMnRZp1HNuedcVt0zKy
86HsddiHXE/i2i52TZeuniyckLgHE+d4rnWuE0ylY5o03BplEZ5mS4BqZEkDzyBaCJ6CXh8Msu8Q
jNiLH3GdYi2YDXZYDQJS4X94RWDAgVQybPt2tjNy7pK+ro9F+skQGHzF7vW8G2xDopHxRgRHOxwQ
wSB7lmAx0w3o5Y+2bzDyyuWPahyY+xhiDCMtRP5oNsEKqQ+wN9vej9gb8IIaU2/6VlbWy91W5P3s
DeuARVlZtjfTW/TUwBrm6eRqMb0GhPhPH5tNY1v01ZxpQcmUZ/8AZWbCzOorw+6b4wYdqd3J29Hc
xku4bTnbru2yjdRtGsX5W9BxPYXgmrsAZJwl93GoBusTLeD9Nlu0iSJ6R1qDVlxMeJ1+4Q3nazhR
uR6ZUruZh/riRetpQhW3DzXfeRvq6/5II75zMrWeD9bTFOKoYHhdnerucOCtDUVHYSEFJc/d4Pto
/8lMXqFmDpoTzVwfU5AvMAqetYS0urwqdvAeOD1/QBr+UWNwlHxxITUgYDa0lGrtqNlACi2vRSAT
2lSilV3NFrs5Xz77omb4A9BvalMZocVQVTFWQWDYlCDhqSE/nHx2AxnYxUfZUAJ8oJYI/y4ZkSPX
AUvk+RhEGZCqdkXVovABZdCBgkPLzSeYBxh4vdpVUYQXxHZ5SfT2dUCHMdE8O9agVrdazVvgyXNk
reLt6aXSO/k0GYet12/UHRWqVnTUqaonf04OlPAxFfazVnsq0qt4HWEN+yUjIWOhGEzVVEPnJTM/
52DDPu8Hh8L6LXWeDK9Eie5NCiALnElSkf0UiAFBmGb//jbwqZKW9KXp38KYMR247Qn6PNIIQnb2
gdlNbUXZ1bf1Zb2ot36K4atWYP9UInHyB6O/JpML6/0Wbkiq7O+wWT+xH20v9CN2yLf+YtB18sv2
SVAtsaa2kaSzGV9rGuVQ0akIaVUdNmqJ/NEiXx6v76H04K60p8J75O56G4rWwr40c+YSs/pcWBKt
hPKJg64G6JFOvctCTqzYLpKIFiWqcmj5qMGi89oEwq2AGAP29eeNU7T//+6jpBmOz97L35XIzr9r
6nm2MXxss5RLjlL3rKvqrXh+2cgEtgqrdnI4xM2mviYEG9gkFP8+F0f/b++/vUelHdyD4AYB9qnf
aiSpGV023h0rrDZ1PBgaib/wbyOI6wUOrF5ZmQc29MaRIXNwcV4nk2C6sEshpdyODbbQ/tHVytc1
LOJL0VQifbZpMhbELYvNZUqQDts8LXXjVYT7pFqlL5DkvSlDPYf6oMDALbAH0cRKqU4ASA2Hhc7Y
KHtwNKy2UDQE7YfAgnRoEmh05+af+AK2aQU2lZdVAMEP3lb348V0eTmfYlKCEf63VLdOcT56fBHR
rYVK2y6z4U6wls9xm+vwXdJCCEmXPHWtgzJIBvfs0V1g5z5iltDFsRvT2A5s7N+wvkynPwKe+h9i
5ZrATM+clHfjmjNhxhGXRfmm0MVAQqfCQDjoJzQV4UcYSXHVXlWbCZsEch4hpnYZ8uiUe+nSYjqn
lesKP8sq+uCWXZZOFLesh50PSiQTGdpMxa/5pxuXamSo6fd+VwzT/lh90Tj8MAiFNfuA9WD6WtC7
xLdQKmdrM41wWKhVL0gQ6WGXLwVWFKpnSmooM04UrFfbpg7+hGqmRl+uUTdtExTLXI6tOdXW+tJ3
Co41OVKgX4TqT2sKRcVLQGVRMW1b9zUlPgxDzDk9yKbzudX3mk+dWr9Qc7I4HamnQn6sXvs4G/P5
hBuarNtqN29kr05MWQHe5w8x+2pZ9KLLAr/OXGRoD973zUjxd5csuvQftucP24t+9pC/Xtop63lM
2BPzNea2vHk76GwkvYzlB3UMBEqOz6EmGEizHf9FDXkE19dfWSN1YFqVXlXNrUcjChdODV6/nusw
0aNms0QfVucxROZYRIt1YZPUSn1FDp27VtDOnQUjr0sjgfNTZ1FFV7OC66tUNrfT1ZZ9vtGOVEkK
SDFtMzyUkfrBds0NoJdsTenU2bL4rLpSeHPikhvg5ZO4RRl0QHlyOQVEGOL/cGqmi2vDx21vlnju
TEGCEsyemsZemaV5LPPYOJ/v2ZQ7xNheydqjjxDwrMChGRaPAQnLXvfRPj+9sKafSNIMlxCdDyAw
FFtCJWF67nngvL5uvcC3Ef2tae8BK0kIwZ5HgXivP7ZuqMwQwjOJHuFHR59iNljRqRN3B9StyEGQ
UaY5RmZIuAnJvWErBwEr+rjyu3OuekH5un0Vm//FCN5DxfiJuY+37BmU2684vyjSHSJKQeX1CIA/
QQfFvt7l1pKRRsGAwUTYpUnQTXi82gFrYUc4seXNC0Szla96f/QnrxMrf8Z02Hap5wUftOdS3d3t
8baYLSDo2mZ72FTXgA0Iu2pbX9WzerpQ5GHQWld2Ijjg0+7RHDncEKbNRILpDuQGQl2rJjfmnBA6
Nmxk8SfGyMfSng1NBpTr1CR1jXrnAZcYNx0vJe+66CQ8YEqFZ3/KcR74lYZOUO5K+HOlyD4lLtfQ
EY3KSG5eo6cQaNIkwQWgR89m5poC8ubGnTU6qbmZo2pzQgMwAmXdltkfYSi7NkOnTOKMMf2GTyE5
WWdPa6zmFWl6KcbBRrA43C6hOvEeCt1xI/g2rPnRWPkwBzBbVM+cgQmWncADfc8qh+aEF29aWBTx
iYY9ltgZwmlwQqLq5euAY3bTg7Ex+PYEU4o7YYmoL7TMC5GT0vo2ofSlr7RVI4dk9KdimlKz/zH6
4zwmLsQtYASTrve2Ptvnexqx5S726fncphln6jDmPsMD+Z6H2TEEDBaZbswJnjDLWiEfta9mvPH2
FrcHephx5AQZPI6eOvf8omvnpWUeWexD8+4c4M3Tng2u7Z6ewCUocObB8jbM1RTn1QKgn84AV+di
if9MIDc1OF/hn3pqwhBW66I4tm34Bbz22Nhh/46LBpQ7fESzc1iWt5w3MJ7IunFuBSPU3mCgYzZj
ixIiG4GXHzuWMxAaPOI2+AW3sKw21yo1mdjIsExpL5SbBnAYMSlZMKakhZTEbhzGmOuW7tn7BDtH
irEgm61tNKkTE8dg8maSovG+hqcy6/ukXNeGK81bU1xbSB0PyLtDrbNX/m5PPSrrHMwr0hYH8AbH
GVVJD7Cslk3952qOmoQBmHwlp+qEQApw9HwfHFJs79MWEagaxVzVVRuANMqYduYiz8VZEYuU0pdW
uoVDizXszlkGfm2Ys4CvRK+YWQUsA19l3ZvSvM88HaPZktR3Tsj47T0+pnCjcJO+ul9tp3ep7Jh3
M1aWiR+HdR594cNzumByHATvA4b1pVbQB6Rab3IMisAAks22X0Sj0V/xAr/h67pFH7MPyt/ZNao4
XovBLjBaY8ldjn5c9btKmgViTenDDcLBIfUSM2A2laOfbiPLHrbJFxTjy3eVC+NrXTQpZVxk1QB6
j+J3vWc/f3jx8vVIsgpNQNsMjKcZ+KMMLJSMU2mO7SNzlCnwL9HKblWby8wlloBzfw+oP26kbDCN
K2cPs6qM/DdVkl/MuqumW8XL9JFQB9BhpB0lAi/H2Txs6zncQHQCgbyHxxlasocW5ZOYYpiHE6sY
vwMvOFeQ7FB98+D7tef7Y0FoVO0k/3ts+7bQ/g7SbM4Dc8WOtK3b7NrZ2yHl2UK/KECpxdkLFKBw
M205snSaIfY5xaib1Rui2sbpisThmWKftGRBbRkR87ai/H46ySMPnlxwIqcOZ7qD/bLGdMHukwFz
KuWD5Eoop4ODjqh+RS/9Bsk8of1xa2TsZ83tir3YOe2Oj8q7Sq/73jbnpk0exp5G/95rPfgPWGzz
qek1xpQohi+ACAoy8IEb/+q9llu3cRVMB3HO87ktk9tfhUWoIRfSozki0XB6PFAHp0IOLSAAuE9K
8W1l+7Ze5wzU8rAtH5JO1QoP2W0DwHiQrskqsPvJdDrKLC6+LcN9hYDqh0Vi+NbztDXHuTeSiaGf
F33fLnNxFGGesQtwkm4OVJlBgmxS5c6T5Pfgjqp9anpgqKPBUJcu3rsZgV46sp3Qk4o3c6KTmKzQ
bKSpyqEZSfVhn3mzYZ8W79WAmoeOFuJzqU5/ODtWkJHtljire+QQ5WvmfDrjREtgFf87CR42Aqpb
9FhVt1aSTA1ir1Oulrzws5yKAuEHnPcx/lTkjlz9BaIcBXfCeIY750sd7Gvd1h2Gw/6TDs2bj0dD
HKHnWTR4L+wxymkqHoGcGW+yNuJQtXlXzWk1Q29APTNBUT+Ts5LV1ebo9GiNt5Feo8T23rtE8VWW
3KZpTDC6gbpuFKIwbDZm4kk1gsSFQkukqONZgtIpvOC4D1nAY3uJy7t2Js2GR+HfCuyNLZ0nboOg
9oFrwS+dYDt8Bg+7977offm6vYTPI3mWqq03uxWEX8yqS8MCSl4eEsojVsM7KaBlKSwWj5axOM7+
ardYYMuhZmteKaXBUzAOpDky8odSHqyobzd16AX+TPm3sp8QfZKpIzoG+8w3V+ii9nc52211LJjq
Z6x+x1YM1ZpqLqFS1t2uOvpNOtUd3cWhbsy8gLs59kbonUVKw1N2NMFeq3o3gQpnAoHEoOO2Wca5
nWHGyp1Fs7ru+5tLuqo2m0jLGUQmF8m0mtIA2glA5xpvPH15bZ3mKbAGe3epd6nJhRp9pXyffxbx
W/G/vi+7rRrOTrMz07kJESbZ8EUvRU408j46zdLDHeZsRuxBcyfNwOrXTytK32fk8YHeXsrn8J3v
osuCiUqzUJ2exHhFu4grVHWi56wD4YHHlzuBUVu0jbbgrt41b9l2/EgIKFhF1816t5huxKaifY7r
FXkYc/oj5kP6BODSB4MF2GoJ1o+kpRUkXGSYuqKDacMhILZVaVYCbtqAU3uASALebJUO7ZcBAqSN
Sd06JmNy9ukvA7j/gAHZc8EHTsexVzJwS/WQEgCB2wB6DVqynBdFKpMSKKKHktZZnP4QZAtc4e6K
1Dm1GFzwozMbEmACkya2/3CTceC4qb+y9WnPPNyAy5oTYXBERZFM5tRpWhYfuPOHc/CAy+oj1AK2
zuBhO8BaqXCj/d7YEb4qfKtD9yO0Ct56qxPWpWKT7rMjOKzDntwLm2IFpi3werRmxHZMADHij4fL
tc8HWi89JyrwmMfYoRtXLEfAFDZiJZliLKdPECE8bvmMdhjITKXz04uhRouegB6aEBwT25kiZ8JM
YDbOD7K+7mFZuQXPV8I7HjKesCmVa08fKLYfi3rHTSX8GfKMA1HkeCI7MvX1NkD+NbU/EmDtUuol
434pCkQAdh34RQgXDKIXvM84lYNzjqNw7g75KwgIRgLNyAmEB6yJ9gGDqbYPA3jLdEFcpiRNJ4cc
BC8khxiOjtemNjEEN9Wf2rdmltgSnGUvtmDgaK2tEktG4BIPkIDfssMOehOZfm4bhGy8rNEUg9ky
cZil76BuP/qwk3rSN11DSaGUaR/g/vCFCDfDgct7fcxyODCa7+hBHvuniQvDxNs2+gij1AHsi79Z
/tT6fupGBlbYHsF1JcT1qu4A5lC1LTW03+O/iSdH9avaSYIwpj+215sYeocebq0PZU9qYXGJ9aPG
Ez4dAayYyzzjgg7kp18gPXVsFp+zR+b5RQdgeVQFMOsw6f04c1/WUWZimYI4SRGVwX72NaX9KXE4
7LlFgMFtQ6n8FOjU3GasCpoSF/HER7FfKb1RfKoVXRKTP4Pibvm8tVAxLa3zs9yt55abkodeSXZO
1OU0RC2Xku/wyslDryR/lleQn3nlfKdVXdp749exW8cmYpdnfjm1X/zNkSw10Wyk/zRRXnZPtKPC
e3WmsfExayLQimbLYe/V3DlO+bwKQwHpM+EAgryVPACa35/vgF0BN1Bi28BSVvih/thBOvyzK4mC
GlhAwviu/JfqPuESYbOA6XkExwR7DI/gU+OQc0/I1MuBpf5ZgBbsN9fzDn+e/gmHmi+n69zwakM/
F6G32/QkGkKD6DcODgIwKLVD3DtwF9wClu3QBo0MrZd1wveQHPbMsanuisjrfmiag53w53qdh30k
YSiSew92XS9wp1Q6Nv6KQlClzN9xy8o1/kLyTNCoE6PQp1pqyN/hPsEv3L8zqNFquTbDxK1okfMC
uUOvm4gfOrLII2VCiBxp08sMLl4uFo+EFFhTHECwUCjOps/pnlW5K9L3qIwrJuHAWVttcNeEHZoD
wTo/1I4OJO0+xomufdLpk0uFfrZvvvbtn4lKyuFMQnI4nqKfATDwTmrgtHJUWIILvrZBWJ5FyGc6
GcQZrNbgi48sPeDxUw4NgsC8Ur33ffwG537lY4THhEo3krI5mUtq2rrmcB7fvFEIlO2bNxLCf/K4
/NQfh7ZFaRKq61s/SwksTc9qN//oED+9SFQOQsVAUyKuHGmaynDD+IRjrhUwouKYFDgi2sPDMZse
dpMroL7VuVCjZnaPj/Ekjg4JaybjLOlrJN74Pb6Doq8SDPOEsvAoPQMxvXY5NIpr6KVtUWqHmIgV
HMXDxABDP2Qjlev9i/kchBQvrQdHfSgYIx9hlzJ3YFrrWmlGmyuFnCuWWsoNL6M2IvG3fnAcUB42
vqq4mPluI12wDheKrm+mbUVZPO6bnT29pAMFEX3VAjpRipWHcEYQGU3ZLUEA84zBVYMxhQ04J7Fr
uIQnUcuYK9A5QlCiEpEajYSOGJsnLahsUfnGiT7mlSGo8APzujCcrURFdoUr8NpaWOBHqOLiqcWa
3HybGBFN/Qiz/UgQoVuROWR6WRoS12aAsN5cbWFhIoBJWBBSirPuOoZK5shEWgmBNg6DKVDEBiR8
d7G7sWCuXJsYBfOPe42+TLXWZrfVRkHsqiOQLXft1sdjfnlCcMo+d3Z7A00gvcfXJ9WCUkk4eOUp
jYTU2kb+hiQtsAHfJT8xXAbZWDaFDXVmYyyjrC/TlZuteuEzaZfk4QZT1/rBHVDTH5xtRacqClUT
9qastxSIy24PuDfdCSIzgjqFsYGaDiPl58Btw0cH492mW31sU+fn/TPjuLVuA2vth2bIcalxwkk/
mCbHDib8Lk77BKKS3fb1lmiBJE/B4DAeokvBVHqDeG0LmqNiNReYo8Fm2sJMCFLfYv0FDb3YUqw5
mL0yAL/fYLaU1b3skxO7N4gBmkumO9Q4aiU5TjaSW7Pm83tQoc94kHwjMBwxwAZi0i/GgJ9SnjN1
7fQ8C9y8NuuxMyKDp5FxBK/atpIQjbMkYmSmeQH+TKqp5sqC5ftZvm7hNoO6OOm8jzDIbr1e3PvB
PqyR3ToeElgirVdMK4WoA0jh6LZqBNdTC9HyjWUK1D4gVzX6XVvTBBUc+lk3E6YwM0qOJPJKcn2E
/H6/AGOuiGpPwA+T77Dg/x0jYCWolc9U6HaNqEqdFc/5hx3sBTXlLZQ/PBtMTuMMYc4SK+RHdisY
MrWEiWq9PaYpx6Hts0+poLnzO8ijW6/zwgmelmGirM/9Yb+ArmzJKA6RQ9qxEuY/Pxt1TBLvyxz2
EfXI29G+vgi/zvEWve72nPxomv3YH9BFL44CTocrcGixiw/uJSLnKU69RE1FHkQEy4LIPRw5r0sQ
H4RFAVFEOd7yq80aHdMrpBu/7UL57kbzZvUfAwFEUkoH/FTKmouGo5WFgGEN4HEH1hOfSEPpZohV
Q5iLmBErMMECqWH6QLlFAG77vp6CiCYsLaR0AmsgL3OSekWfNXgopml3OyKPaZ6bBgcHv8/vbWhH
UsTrPw/mWhKeQrT9JplPRJP4hMQWgmto0c68OvfV+sVFChbd6tdE33fcNaBUN/DFrCqEU+6dshQp
tSNWeBPWRrG3Rmn1ons1ou25+qyLIy8UfSMA3oU3M+ZS2Ktc7YA1tQrXzo+y6KXuI4sOSV2KOFF8
OqcQUV9/yOqPej5W5I9Go54EMnfufK2GGACn+J4CJfIpyuSIMBMJ414qBcfZ7xGZNZe+A7P0y2Zb
+dkyRbiQr0TccA4WVS0zhxc0v97UQBpXTn6HhYCpNuwbsFeYKXPHDlAO9MOM5N+MHM/0GAVJDAeU
Megz7Qn8nH3bJsJxOXzclwb5dS2HblWUjsHkrYggihCRtLnnNF+QFhRl4YgNthGTHb3U85Fj2wUO
8JoQJ2BWACOF9B/r3cbcOCL4ms/0oSJRTwk5kSEoapW9qedvUGQUuSRj3556Hif5DAeF2ww0Fk5q
UHlfL1ECbFpObcpILWGyRp+mumTA2xsjm1yTGwTka1Sy6ps3oc5U600VbbNGZ8moCAyzyk5udfaw
UL5ns6rZEVqirf5JLbX2u3q/61u5muZyc0LoLrSCmwAhBhR2VClOX0dmJlFShjUXJJIVRlyEIUs4
j8jDgThXzfsJy0OXtQDtr3DDKNqoqqN1drsJDRH13N2O0S28z8RpKiKPk0XWHWgHSBN2tc+Y7lmR
2eCTMvWkqL6FJpSEKvt4BXaoCjwhmJl+zxsetsYEZzMEPDe9urb8hpLmwlgYQWExv1o00y0iKIMf
9WaYXTbNgnx7wFuySLAbPCjr77d183Bux3VRfAIv5JOLY4IiEw2D5KX3lbZyWaMu1yu89JhUlpYr
lYSPuke1zcRmu4IoCIrIB7lOkmrpZvXzXIwdE90B5grkfRK/sHXSObpY6zZx0VW4Y3cbw42TOQAx
ChaGti5cEhabfgdgqPzEp+wood4Dkh29SbgdoY7MVUCTT43WvW2rnRht0/3RKDStn9Nwy0vwiqwW
nA1oszX1i4vsE+wDnBXVwlJzFvBdumdEhrZaD7P+I4F/397STNRN+RrV1tPFHzc1xQygEFNtLiHS
WMxKwEbiycr7/EpaYhjXtHMcoYXZZKPeSVYATFfLyGo06sXASG3oYQfPHEQdTUCEda7rq/srlGiS
fSfhXjykJo1ppBUiC0xja4a64Cs8dyVLMqLSEqvxyjzJ4czhclDVwEnWfOA+nj3oZG8wsPrEeLOZ
YR/bkV0A90z8P9wnIZyzPYlg77A2PbsagvsNF8mSMVp4TA7ySk8/qAqkB89FyO/iI7EbJsk559ZQ
IOx9IWYptP/tbYmJD4vUG3PKgKSak2YTDYraF6nkwxaUDPxtRdxEODWBuRNcP/hgfj7OTjWOmSEM
iL026R8GK5I2nmSnaZ6IBNr+wzY7OeEx2+mXBTmGt6J2uGovnEFVaphdb6pqFcDcfMAZouzR8Skw
zycTVL14KhfzOOZjMZkfALo3M9EP/rjatxX6BNjyCdYUFeK++fEqPmwB7wF6Y0ON3dIw7+bTh4mD
aD6OJ0vAzNwcWb0UTw4zU3QnczApnbdVdB0qDAfMcGc9iOlzbKyf9t2H8AJJEIVly0RyC5AiIBnN
Ytvk3rjsQMLXmmMwW3H0EIAP8quVpKP45Mx8vEsoxUeW8lay/wpNDj3Kn7OTuY1WG2YfS6Zd1rI7
/xe+O9D4NSWP9UszQ28f+fi03Ns/R20T9VvNmSOfZhzZxS6A0ACmc701b1mOfA0iDPvdrCnLAlhL
37zpwHAych/HClB9I96SnYjzND8uP0ND92XzDhLXGwF/OSUNgA+0C04ykuIVTUt8eY9GThZ48uQJ
aRJ5Lv/PatM8q9/VcOmjoKEWsyxL+Ofs0SnV/waBhdDCJJqFqYsVQiMbBSdMjWB8clmdsF6E42OD
UXQNYGhhREzH7ux97k0ajO0JtdckRgWK58t6uwENhR2g5NslDUg4HHREyu+KkezUs0d3eiaOHPvV
MDsw6KPbuRsf8/lfwCbYzAEmphXPqxpdUwgxikkRQVVzSMb8+LXoX+WnRf+IcXxLvDpmUQMxmbbR
Sfg/KvyqXtYQMwjQWdPd9c1WnyY8CmiYpP0/5GCkGpGsAU4XPgipHFMWdLuYUep5Wj2z6zqOmz1s
ZCqFY4q6GOqLw38M+7zeoN7HbCnT1m6NTgTXZldBFKhT7PGJfSqjMm20lR4R5oeC02+fZbN78xFZ
/uZNOK6TE9od0ZSYF4brABbb8Dazt7hcomn0Swm8D5YmWQprwDucMSMgb2o4Di0+iwZRsKZriehD
TIDeVtUa9CKSlQ1nz37QXMz94M1A30Y5tpAUGNoadlw7KF1vvAV4+r0DFe62XuDHrYCKQZv1LDH5
pKR9VVW2ueaKIZV54G/ebDf3ZmYx0hNVmOZyxvNPyjibPXtebQ1ZZ8cq8G7bLKdqfUUHN5nYyOyb
eg5QbcoH11yM0e0BR/ELvIHkSAXQgxZBDrDuzDbxC6uyLA1mgOzW7i7JX9EGVMndqcWhBzahdjt6
9MiQw8vd7G1FSbVv1m9/+ZizbD+q23ZXPTr79a/4AVEDd6drrcBUxlfuYJGYF/kdde8PHu6j+Drt
+ebSGD7+O+xd0j1GDaiMoL4tucXQv3arb7AZ5p8bZ16EQ5w5i4pBVfXRFGwI0IaYXQ3+zs9ClhYf
l1cT3OItGYy8MtbPOrD+C7Ie8x18uskphpR7ftPdsQoB1MMrdOnDTyowRVm9CEUcxfLBhTDh75cO
yYUZ8gA1syJAEwOz8Lyerwavs2UFgN1SGkQWGWvWNoDGgbHdFCn426AZ0W9RPL2Z+ObekBF0OdNX
LUdZRHsgGayxDyDTZ9w5oIv3TPfMoicwcpagoFHc5b//AD3N+LMXz7KX37zOvvvixavnLu+ofzAO
RbTFBydwN45p1ThLc7+2ikMjRUlfdQ2pZjbcd9iRq2UjYlfVrSmcnJA0BhK34XV5p7724+1adbuX
TDP92a5B0E7aB/YuRurTbIxvboYRZcNuhVznXh2yFKeXAuQhIxERaZ+25vZjlD/namXuM1MmwIUK
ISL4S16YwsNsr087ikqueagBk0PuhYaxXJCfp7gFM5YCWyjhnicmkK7dwOLn3aDX1QpVpVZXlNi7
OskNB8qT+ZhUmIHLdhYmX5DCHFVvVu/V89cuIGksYU6o6Yxc8T1MEgtHIgP0h8bjOSbXlOd0Qj2L
0jv2um/DOws0YfQ5VvOJXzXqAq3i0r3ITmJNJHkg+cN6gb3Q4QSBaPWXvxbpzCZeNJEN/dXRUzxm
WJGuhHs8FOtJYX0dOtPvHdWtvD/Q7c/pzXXXUqYmiZPkv6u57P8oNjeMCD+QPC2MQ08PUnakg+mQ
KAzlg0E2LxxfcSBApCMuRK3pAYSshD9NaKs0J4rt3z5kt3qeuk867b2dC5CM7aJkRBpuqy8D7RfD
BC5nXyBSXMZz5aBSgDhpExxZt7B9ysMI70sNIJGL2U2oN8GdWZUZCMZLR9MRXShEWaX3Spn8ZbQu
kxj5wvU7skW6+Fndth8UJ5hJHcAHZKzeEziGKxwGjcWJxQcWq05yjO9NHU6QHxpUNMqY3IFVySAl
NkmD26CcosGcNTDXnvcLwGhIZMAS9wCwdpycJSFQKAt2fXF0+nOV+TyeRiPbwWfa5Jp6JnOKpQOf
I/KBRXGDYD54es0ynTwuHzMCsawkqwmSsXf6sEen9XBid7Pt8JLsM+qGZwx0KGA7TH+YCihkNdje
4C0fxDKVGxSRBymvop2/sdqvnTiaD9iVtr4K19+cMDQYsq5ylWHYrx89FokpHjVIRgq/T04M0OHu
TWKtElhbZEybxrroRka+BgA5/BznVYWukVZ//RCCPDNrIOjtddZ1wMdqTMpWk+bjaD00OKTCE+G1
66EY/hiOCdwxzBMRywbWIPX6vM9Y8MAW5H2y0IPR0ELEe4UpnZKN543a+CQDAGmzr4N6oqiIq0qL
UNOWGmZBfesjHtVXLUMTtqD5BGF7+myZwnpCOwgsmhjpeTPTTDSUnVMcRYV18hBYBT1WHSEDNpON
e7SizgYIkZ0UhBSeIeXPZuuhO5X6OuKq4MFFJ0ghyvixiy8Ze/xzkoMZFdVW1g2ihk1LWazmYnXp
APWWj+a7hJzxI2HRzi9/9pCRCOm/2gTqWdW8+Xah4f5VelD++8ISbwKVsi6NlCAcyWMTezqWYojw
ST9GtLaQk61qbagsFwmDoKyRDZJ4GLJHWIiGS3/zJvKMJIGhpYtVJWnDAFkdFyVO+4v7BMBxIHE6
Ch5ym+pyRC2b/OGo+cjNSCOOoqjckvkwPy8rvCyj+N2Q+Q7hzUzjrzDSg8w4Q4qRvcq8k4okw9Ic
TFOB1Cq4vvaFjJBgIox1CpnId97RMFGeOOMmyBN0flZgeahWdW2gN11itB8Y7+4LXv7X7HWMinmq
VRPTLDhjit1QhgxMjpaTKzggqVm7iGwhIhddUAZwBWMJy8Ixfen57CqYU9y8+oHyRZefVnLFvOSb
KQBlz1MrkRyDki1twNyDal/Jt4t+6Spskl37g8qsrhAnURlJwSit7KxtaMlqasYBIalBA0DG364A
wXSKaWjBnPYoCMXtSOrn5qRTTkdvtt6BKEF1GSgaY5HJAxe4zrOTTo2ngz310Clbp1mU5XK3xSuM
8oOAMyYgs8DMV5BQs+Xgf4VwI4ctOH7+VjgtspPs7MBegFskP6H2nmS+b1xbJIOj+C77ynDguzVd
x94cdlIHPT/+9wgHoOaSnHW7zjSp+VKnmpPb8Y0GKB58B5pruJnVGCHNtjp3L/gShnee1X5gMBAe
k8+EFYclYRicF/7CA6P82Ja+4L2LuVFdBDzmMj1qjFbRGIyP2VM3TMhmnxghsSs5hy6TTEcABFH8
CJ5qm0DFH55p3bxxqQPcAAn+y5wrQbt+6ie0qK+4ctJwQ6/UV8ZQWx2JHuDj5F3wfY3EXR/9eQ8g
hGVLeBu7NdpdKaMQmHnFPitFytfmP08Nm/RlGI+yF+xMT9oE2rGc1pHio4KiCUR2xbH494Vi5cVk
3p0k1NnStO66XkInCiVz/2lJpwLE9BakLfKyI33IVuya02gjcgLdziOkjjgKl13UB/O8tfctsKmI
yKqhHfhzOM5fRuwydxw3dhpA1xEQcTE1QHn3SEAHVACZxBMrxggTwXYOCd9Ki12jkZiE9BITZA2W
EJvkUTMgERA61tGlgeCAR5udyuvWlHSvHH663Uvkn8UOc9OrLSKEuJNnyBVD7cA1wplYO4Qp6Kdu
bzy1jw0k9AkKmPN37Q4xO3A3uJwb7c10DnFQ4NenHLVZpzQDQSRCE01m6eIQ1bHHllqudN8MShJr
7c+xYEEsxff68e1dq+cByHvdhxmS6PFYjYf7H/O/6kMQH4O2t6Ca4h8hihTFJ/JLih8EVmsR0hYX
MejcIV77K2TY5avdQmI+KQBS3Ik40zofEz53DoBqhX5Zic4inBaqCbGQo/X9CC/q0RsXIrR5W3q4
GW9SGeUoQTvupKmN1QX3PNfIyy++fp6XZVm8eZOOfExrPT1acE5j9WAwgg8/4gKLAu94HZViGUpU
+I5Xur0Olll8hWOu1blsEeCXEUZBZAnoj8uYGgoZCT7YoXbSUNTpitWbKrhCH6SEXiOQoeU2r9GX
oK/f9oex1F2EqIcOSborSpQQIXRYKV073UGo+rXm8yVNQlJGVBDEkG51PnE6ek5M5BIEiasAEjCn
YDFEdjM12y6FE+eZBL+ji4R0Z0CnBXKNXbPxTLx5g72+eZP9o23pzRsZgnlMUZfwEAcCyrAV+LfK
MMwDi1lGsdj66vCacrwLRzLzjSNwV+3usoVbZcW2Qy16u3HeUnrwig4230X0YWaYf4Qdbu1CnyJ1
QhA1FduBK4xh6m/eeMsAzvCGrRKPbtHggVS6AEyqKS2GR8oijST561fXRtitSJEp+SU5+lp9kwro
dzcBkiGfEMokjlwvZmhV/Y4c3s2av6ubXWuGSrBfdkIC3Ct4CeR01ZzYuHoXigATSg121ScXQevX
TWBpgNhnGL43b6SlN2+GMLNArukn7d03b/zECxtcVLwWzbwjyi31j+gIZl3Q07q+qsivt7ny19of
mmzHEbBFFByPntSgx5W2zGtoJVeQn3LLp+k+e4pqO3kpQaMD2jO4fwYp9xSOeFTFCOScGJ/bavp2
U139VuWSMCVghOMsDwnasJvjcPSg8JuylvpoGN3XGoEzYaFzGc+RsNMHvd40qpCk11bawpjd8u8j
VER7mcDzPsBSmI3IUAik6p9CSD/bRPpJLVjfkkGyc6oKvg2hE8gnlxErW0pxcDYQ9TMx3aFV0K6g
W4IEMjSoUISP3yuzUnIPS1vSvhLzatG57D70JHWZezBc78WI+3wyCzvWqYmj0Ly7s0tTDBfeM9LU
ogyxAdtLhawnlJwnKDPTPEVev0A0EndhQIpjmFBQQlYIl2JllMtq+/+z925NbhzJmqBe1sYWa7sz
uw+zZvuUQh1OIkkgWUXdujGC1BRFddMORclIasQzxTpoFJBVlU0ACSKBunS3+nV/zD7uj9jfsv9i
wy8R4XHJBIqk+kzbjsy6icqM9Lh5eHh4uH++YbWZeA1Dr3yVRePWqb0VtHy0ZG4Sulkql6utAOjk
k5QPBWmdADSGKoKu4I6kzj8zjZJKYVqTJTbDVM+oqG5SRDQpt0VvUewKDV+J4X1YT2PbktNqdhMX
mv7dwHiCyJfiDsdoQznvvmpFbucQlO7eGES+bOKFdrtSzGK9n1gL6AaR26af3v2G0fgcsAWqK9Sj
myDK6FpuBEILRVM3vn7JhvljXWxnFRP/tjgbNt4COOOud5x+ArAAcegzGk2/gris9uaBq7DuGnJt
i1jvyG1DTL0mgrHCu64lYvwh3rqspw8uG50zUxoC+H6MOtWUgo1QIY+GnUbW0acapgeeRfZ9o6VS
xK035Tub50ohLNZ1LLzc5PVyP2CsgKb5sF3mX+rUhqZEk8TJGb6I/BaUh06eIEyiBzrl9tS7wz8t
ziA8CqQ7Zp1qAjk6SHrzCcpmcbCJ9cQsqAjugCPidh1P9W22IOOeDAK5oR0R7dWK9pky2UECicHS
oudaDPrCPpDFKyH37E4D9JoqcNjiZEVYrxA+uCghEhBUi1NEGwb2q+azAfna6Yw4Ao0MbNIApdRS
c9h/ncrGu8nz23RmEbHAw0KiWhHWLeNVMpKuBtH1yCC07wYhuai8xlUwlpFWKC3ybLMQl2Fv3IQ7
ok/BQnZptepwmt0IUfPY+VKAdooVNEpebE+lG2tfAwbjx30X5VCulZij4CS5UKxQrAdzJVbmWls2
GgdqQxhlDuBV64RKeeAoBk403s1f7/xwAIuRFBy8trLxYRB+e7adh+IT4ddHYjzV7oKJ2W9sHuXs
PQ8qwGs7jyoWVXTXYaXfRALWyfwSpkunln+9JCyO1rON6yPADwHAghCGcIiy/Y89zYLxgI2yu5Ye
HZaEwGU/2h6Pw8jOlpN+G1T3YShMgrPBGYbggvwgsYdGbG3GKcT9SSTxyk7jvkQSwfuR/SGdRVWW
HcH03H6IChnWU2I0k3toZi4sjlXiHX3IW2VcVYiY0uy8K912+XPPezd2k+6BUUmgoTMPWJVjNDQQ
CcVUM0xM767AJ9HPdCtcMjhKBuUMvYOHNMB36oZVtNLd7OuBEd7CGMEZJlmFWlwNyvWoCqWbkagj
6VM3DLa7Um/jiKKMuRcg1+yZE2s3K+oVoCkDzIB1dQzy3TbcTDrQYTS3Y2zb2PihyeK6L66VbWag
pvHMbftGXhx+x87UssZ85UqgnsmwROiDVMpFuxrbbWJWhLqgs7bCM+G3iem1x02pzb50nV5xQd1Z
f2UyGNOy1R6yYod2v3OiItWqdzxjL9TwoDp8ztGQvqtsJ0Cdvc8F7D1PxAmV69hXTwgcVZ3TAJ7T
3IzcEWfTFiXcT5XVGHAYT3ZlXxnFM5Y2zfFMdRV4/0wZP0liKSnpvV6JN22OvUbLiB01Au9Yq5P4
cVUxZ1mRvbQhWVLEIdZ85L1q8YJ1PokMs3ejp4s7ibB2OMT6XXG8YPdan3bJgSJll2ezyV34ZtuF
G6p4fqA7ebQHSgYogQIexybtCUqiu0yH7QjTC/aRI/2aOLZX5Oe5evZC+5IsC742mwiHO21BQP9z
Oj+AF7Z2QGHnHOqX2UYYr4wDOWTmNx/W3249mY3TMOo52wpYoiyLK7mL6d1EkjNFkq/cF5JS5oUY
yOto8VvMxBSvDEGOrPWdKPNSYfLOJT2d3RrwjJeAljrLWiWmUZGN6FzU59Hkatbu4Ms5fL2f0CQs
HbEKQrXMy3ZWQz439f92gZAaBIvEXyKb04hCqOQXotiN9GvWhGw93KZj2Q/fO/PELa4U9w1oUwBS
SmBbyfUwuWYkvqDHQluiDum+yb0C3jRHBHMj8V9Mi8KGAPRr0iuPvSaWScE4YMUmrcE8VbckAhNH
ik1CCY6kY1QtDRBnHGnETi/UHD++DXTZJjU5yMJtFNR+Mha6LmiPeMGF7xq+46lVCxislHdYNyeU
xqTnNuTekacVc44cjXBo8Y7pWDiMnqgxn/CyyNeMbBiGDusmWRTELGYWRypzpiIBN1PF4mnDDRdC
YXUiPBMBITemv6YNx2l1CEsbxvy7p6oITm0zCgApVjbJcmyROW774XXFZCrzOUPtKJ6NQUAcT/um
viw69kwrPsYB8mxo96IlLO0aCJpbbZczuw1rwRp+em8ENggx5lpSDBlfFAKQyEJhymTNdGAXTlc3
OWrLg4EBWz2GB7ACTlLSuQGnDeHz1WCLtFi8m4T70HOQsO4i6msZittE39kYWnY0pKSxleGPljSg
aiVrdjOomaJaFGw6lCu8QsMG2W2Af3mQM5aEKiL+8nesudXx6adfwDQQy7ggny37pun7ptrwoHDv
N1fyXsFAo0au2phVnAnoCyhUuXxpFIwphAclDsNKL418cwk5ZMTANUPDOuiuMWjYWJtDWGFDRoc2
G113LhVdMQco7l2G/J6xPB00HfZldiGKS8KmwBQaFskOfXqVsAH3YvWGr2ks7DvaKbBGvpr/lrKZ
iKCoMxRzdWGllsx+YpKMwEU3athYuCMv5t0UetzQcqnK2htNAzBIHSAHImK6yQb9nrZT7G1jO8FN
SR6+Yb3zTclgUUzV8b2c1hyXsYHgHAo/RfQjbgeCWxr3f9D+saNwZQtRMmiUQATZAeW2NO65DTTU
G0oToq/48YLMwC0BuVocNUDKKZ6A+NkKzifWwkCHN5EmhKsummq+qOaz2mEEcl0wPCMSP3+rz5fr
Yl5cgisxBQIDRn053QJip/CMeEgJ/wBuUSeoNERLogNmncUpYsCWb8jhgZFHB/DtQF/9gCszf8pv
IW+LejpA79uZaO28CpP3qClV63q7snknHfv1QNI3iQ8nOi7iPnK9xQPGuxP+0HXKNqd9AjE1vpKc
GRFey1DT+JQhJ1WLFeCw8iBRfygkVTvZmfaCl6TM3wsv9bjrQmTLLvOkyIMP7UWsAK4RDIZXv6gs
cHs4VNtpPWaaoG7/tKJtGMdmIHKc1uRU7nQawrLtOpv1Lf8jLfQIn+PGwP0u1z4je8HboLapI4Gq
BrQYBuPgiRmPSSxG3K5HkYcNZWGLj5aHF40R5G4GC/9ewmZWaMj5re3qsdzfEetQEJ5dF8VyNd+e
q9Emh7YgXhoEQbFWUgGKNpShiuB2JFYHKbdjJTDGvDxNZsZeVzuB2ywTik6vK1ZeN8tkWm/Z35ya
rp3c9Z2PDB2jd11xO2obJQPGOQ5dNd7xZhMAWowxR5iXhD3voGThdi4dJLRpHDPee1HbXRSR2EJ7
+8c+pLaKbE8vqVhnjvf3AHWwW2S2J5Dx0o2WWtZdVhanKvDFhwTu7VficOzTtsejPTLaOB88a0iF
zbEGwkGYzs998XkbqJYgI9hbbgDW64foOjwaBLOTpNUkNXU5ypJ244B5dMQ3mIJV6ZWyRMc5CGOU
SxP+gESGMrgGeLh40JJvhcv1ZKXvciHq3/FkDbAQnkwXqaLcdu/ZBK932R4y4Zi9eGOgFK4nmgUa
PTggT3HUjQH8Safgs4ETeDYtQbEup4na1yC3Huo6vDPp1M6FIahDLa4KjtnCa+4N5I0gvM9iXl0F
n2P1RqJpDCqUmmN7Ra7RTPC5e1VJz+yNstgwYgIpzG3kLNjQuSpIQEHE8zEEIRUar+bDgFBBdzCH
DirfMt8LjBdGla1uYqB0pjOrfFau8dY641wVjhcB2Q2i5pcVpIYBaLbu/QZPUFOJl8dpJVI3+bdk
8lBFAxdmQQrmDTO98VzrQFgRU6fmCI+UPtcYnmJucVKA4egbPSK6O4O3HJXouQdsy2JRxqK+NnGu
0w99BS/VDLFvq1Hx4u1MnBUkkhYnERIvHF0LfnFuoJ2vxriOGDqDkea02q6eBlUokpYIWUCwKKd+
inob07s4V5WcYpC/j3Necb1RZ6G14b3jcljeO4oHEkDrdHnUD/QfHHzXHTYxuE4x4mQjig2pvmcw
AyjwzzHDQIInHmcW4MgAKabo5CIdy8BH7xDNGJ4m7VSJ6amcVkGgnr3liIOoXJ9Azq9c1xO4h8gq
Yoql3k8dR1ve4cici7ujgwFIiqR/9hNnqDoxSs+5Fzcawrvw6NXO+VDp+DkGWKjNRm8zkN5jY7I5
aF8Och9y7Uc3V5Obvkn/bdZY04U0TOlqhZcuvMVRFJnq5hpDJ9YFWHtLyqI9q4o60XipggZ6CQ5m
pZqKy2LtoMdMzuGYDyuzmEwvZMelH7L+FBqCRnbhuFyzQYNib+HL+1q5B0jLZa2eixOGMPzbSfRt
/HL8tR4jne0dGWZM+w7G3aJYnxc9DCKBg3Y2bEqnborsyqcea1R8McdK6ssDSIdur/6wkfEY39YD
KtzxzfHIOTjyXPP1K86PHmuLNxiWWOMHscGLRR83j4sI/KAc9g8acwrulIcR4KimtIchc8RhlBqu
fm6HC+Xcx8ip1QkQ8zBduqsnt584Or7aobMtI6yM3s21XVaij7kzZnzcmwPH7e2aKdyGVxbGc/D3
jcEcenIBtqzUycJiViaIoDbBXgEfeleQTfd4zmSTC9WOkzjNIXddeOj3IqkWmeAtDnsaRxENIzqI
0A+7uA29WS0/V39mnVBH1VYWSHtZnt3Q5SwxDf529tIaIlggFN0BgCRXYAfwoD4eUgRTtZ4V6zFR
JXqiDa4+bvXOcbUeE5wybgEms4AGXRqhp7V77uLTQIsTnTHMsdrGtbScfxq+yOVXwUlEF/K2L4D1
NuSMfusYC1vPh5HyeC4xT7I9tW294BWziY/duqnL2nxjSumMEdFoLY2DDhfRvZBko/A+iJjNGVbF
tzonbJDuqX8R8gDDc/z4GZRrRdLVhzQm36UbCddpgPFhRkLFZHAZZ0S1HwWVbpRfPAr+admowtT8
7JY7m2mjWjOT7Vyb7IiYB7wSwy+3CdNFE048O2KYqZso97X4NRlPfLgdqZaeQ/4wTlREmvR4zFyk
Zk9EG5xjJno1xVfrSlyphzxhrnP2Y6a4QVQg9e+am+YQRnt355yQ9ZqA5bGXbGb0HD5x8V+32SwQ
yH26uR7xt/rv/b42MlT/6ONGwbRwk4jv+XF8UiX7mS05ZcvxSUscZ34xqcc67+qwyX1nV4RmfJJd
AE5kvmWV6Mro+o2OSmeK5zaR7/E4VeKNtWIXvi3UifMsK4SoYZZEcV2SP5xsC96WwhEdITq01Ucx
dYQAB4KKi0EkwTDVMwRlZttbnXeajBfHZ6QICf3HZO125iASBK0ngUahV/aTpnkwEjHnHWfYbqWI
ukrBGaDRstNqBDJJv3n7VR1O075DLctiRgQ8DXiOrYG5K8xSthut95YR96zmOwbfkLe5FFrcWFw1
+rTVpiPSwOcV94OU60jvI2p007koan0Lc7CHvEH5fGRYOvpVQ3SavnxB1HjxW2KANQdZ2fKcg1nL
gBGlRdaOnzIvMnIbYaUEOf4CgjJxGfrYJd3h6+XrJbgt1YCeQ7A9cIe23PSyDArQW92UUFavKH6N
VVsaClCn9FCMw7EwztiULq9vdgadwPrMPBn6QD9lrQ+K1drpoH/ka5mMUFOpz0eUvWdsfI1VG0sw
CNFs2wShpQVlFHb6ciMDSygxYJANELOVOkhpkEAXst1db8bjbgQnG7+Iqm1xWvC0K8/jsOThYc81
KBmQmGwPLA3veyEeXmyq1ZMNTEZ0W3RuaZo3wNvNFZr03bky/jcQgbGgbWhC0RYplkydMYngeenx
iGWDxESivxbTuaq+OElwsigwiP7xj9iJP/4xbwogfqJ032IyU0ophAgSuBkZY+G2ZVlYt6z7ph9N
tODrecG2WdUS8Bun+F/0t8PZLjB2X/2RB8zWvAK4SeDC5XouGhgUisgB/XhSYqMpy0Rge24N+XCv
v/vmksaaOshZQITQxfRXo5x6h3fSOmMRJGEwVMNNPJamZqlC/AOVALeI1HdsQZnLyHXYpaylTlDv
PqEtLcF1AkbBCfWhT7RfiycKbWBfrWP66k7UxcPAVQNnHQG9sFe38PoYxRxAkIieTEVE/3QLmJbY
RnmpnWYYqjwLA+BsVOHxya2Bfz0i9gChi4q4IHS78mOCyEzukQkRt0VKAl3VqvIDPHDNtngiAQIZ
55IS0ES3vrY3PdLoAE5wq2O2UzOuN+KZweSqXfRGyHeNSMk0PvIGUV+JlWs9+G4SNZJOXrYS17rs
8GALTpAT8dWMphXBXULJNMYhHZNfmNBB4yNt274DdUncU3zcClDVEBCr3dBhZMW11+LGaa/ueyx8
10ex8jDfndcxBcj9vtHIzdmyjYzvfgfhKolPvnk4AwKiM91+NDJZTqEcjuZK4NzpDN3IfhixC1CT
IlG4qnmPHj76w+PkD09edmHlrNY9SbgRUaq54221fP/kxYtumJ1F2zoZ2BTsAjZadn7jmtpNDia2
0nUCC4aiiHiv4F2stDG4BkbkRI0wZZFPl8VV4jsHCkb1pFpbNk/JHVknSBWqt4JhC1waCk09erp3
0VTgYao8qypJahE4pEDRhZHSXiunyOrslK1Ez9aDoTxI/OaZ+3fM1XhVrd8YB4IuNKOryMLs1h6d
Sc2JHYtZ7jN2UEfrhZTbdUzQYBKt5a5DbGiPw68+FiPWenGt1SL7V462XaUw+m3O4vN8y9Ps/lme
hWbiKSwB+J8WNPFl7i7vfWPtewYMA6JqMGGfhi6CH6Rvwi8Iv++EKEQyDtDBIBIKbsYWgZjuxnkX
rf4mtGryq8R7Z2JMSECUr27SOrmcrK0P7IFY3jq4tqwpm7obVavkx1yEyDsBIKRFafEgS+L5/GoN
etlMHdGdQXToyT9z8Yk2aAjFNzjPO0qx7QUh+dtuWDX5sGNWnWjEx7ZXUdr3yMYqnZu5iyt2gwH3
5uMTGRYX72DHc6T2sHp62g6l/l5PruCn7UUGGKLMoWB+9/2tqUmYjnesi6SNiyry5dh8Nh6nrFJg
qnLeGNR5RlQNI6L/zPSwiiLBqiF7pu78sR3f4UB8duKceZu/UeU6SrDOzyt1dL9YoPoJHmoIcrDk
lEvOTmqgkcmRQC2zsu4cMCSucDrWZ7fR6DDR6UkziuNERuJcg7WuTtHA/ZY3EgFjoYpspxfoOlYl
C7XHLgDcT5E+BfX7LOkC8hwV7XYO+FYd69BmlZqEQOxWfah5SQm0emwxuDtaGbfwe0rATZbnRe/Q
+vsZ0AuxMB1axwaDD+4hQ1Wf8IgIl8jT74EIh8HAdiEnwewGUKYnEJPANTC4a4BCEdDnY/gOmgXv
XQuJGKbxhBA1aLj6FJXTdzuJWIzhCPuflufLih1x5Lem4cb4aob8q1E40rCSYNHQ9CVfJp8ESwRf
dYxLxXhWLUXofQC6SoVIz2PnjXGNmwr9Rqc2xCahHihSr8PYkXk5LSRnNfTXMkQWbYDv/GEGsXVb
97oQHWhdsbbaa9ui6W9TJkRAj0bM/Uo3jWCpwdPVsdkIdVeMnCYbu5wR83PP6YQ3NlBCO+vKYt4Y
wlZgu3NPtsIW1HNo5pPvT1pmkOAUx854GkVBHRxWJcCMoaIM4g0XNQXKbgn00j9+MK53x9GPC41p
BqKLU0TSMQeiMDAq7gb9ZX8uqLHsSrviC9EDHgS6tVxi7h4d+EojsazwSksyOVQmvxajpVei1/Eh
lJ05Byb4S2nxcE+sNoCvxaTQjKy1C5O8c15jYLjq6gR7w9kXOh6AiBaQFkCk9P1PuXXaSOg1Fz3r
CJwpNE3gd60nBUucf+X6jDlVTUH+yBr8M1HuYl/xhBTvpi9D1Htuv6gytJU1iA52IStPWgrXZIsl
8d9SjhZvgFgi5cKV4EOwM4NOsdYBwk5S+AAaxux7zS787zSvDXOMgIHu8JLnrHn7ulXAet/q/D84
/8M9RDMMurautjc6fkfVMD27Scpdi3742YqjpdFFzu10K+x777abqKOfsriVqcRxx9BHt/2VH3uR
Y3AzSQDiPKNxdT53TUPWrksyF0WkSd7N+eS0bLZXP7zRGV3ly4iqAmtfHlkck/ZUb1cGGbqzhxnb
3Gm6fHKgyBFM9qYwSEEg2ZTKOqsWCW3b1Zk9NUk3GrTyLDl+QSDOmdtArW1BlATgU5wXhqIgYkey
rsh+VtSwdS0YeR20+NW6Op2czm9iBm/HzokeO7WB/o6CH4GBtnbxtK1F+mOrPO7pUSlVz9EoORxq
5103nMeLoo41Pgu9BCXlI9wWCN3hNmT7iUh0uaOOB1gHWiLeuQr+Yzr3nNvIJwG2MVqi12CppCQ1
xtuVpwqMCOKEHXWGLc9CN1L4Lgs1Ul7V8NYadgL/00AKOEX0RR9pv4ZL0QUN3qH3/EwsRsw7qNe9
s4aZvNO7LmcfFEa6IK2sdW86rdQR9QmgyKy3K7EZ2jQZXNbARzp+5dWiMHg05PVPwEEU46tRA3vo
SXemeOFNUi4Q4jzAwT6wAb9kZEUMDrbxBv4BnZgvFmNBQmAzXbCr4y4WTPHPdJhgWmMypqTE/uoh
pZjlp7oG9Zx9EwB+uN/5xewFAjeYYAIkYiSAAuh1b9qBqoJAFLbgAS5zOYDJvOq7UacyGfHuZUUU
mRm72yXlcCfbhLPlLdk+xdl2cYm9zV/9Lx999NHqBrByNxd5fbm8mr69/+rr/+ujj4D91N+DR9Vi
AeoKmDdnyZPFao5uQRMNLDQB2BdI7AF//0ym9Udqw09+BIpgm31xufz5EZOBh4mGjgD9zSAsIZ4J
80oF5/4b9X+UIgg2eLVlKb1jstZFVjf6V709VTIe8Os7yHW6N5rxpqpmSFF7MHj3/zoHySOd/G2C
9+YA3F+X6KcB90G1TbK3qgZQPdioehoPGTkbjq8QyW4CF7XLaukMqpqXgw6bgyEidLEZ1JOz4v3a
zw4odJ4i17h1tWKHYTYRWaAQlAJLAoY1D+ADxhEhYs9VVx8vjXiKuKZs13OYvUuaSHUSXax8XwBV
BHwQ1nMffvQSDROXHhScJgNgcPq3tP/XmxaoXZgbQFr7z3fqRAKuYTN1tX2vpkx295EdPxD2gDIF
j8mKR24vkMuZ4i2LmeQS4glU5MBQiJf0mnNqSqtlMlxB5QBJpZMOklzGSCtAVDBWVjwFMfPpVIRV
tckdSDRNTO3Qh6hcKFVkVrNztDn1K63oHJ2gCIgBxIjFI9WVNGD9+HOKvfb8Q6bzYrLeu/Rqu2lm
Id8ZCa8FLuPGJC82BzLUGRaKfuBzGDJCT9yVYrQtcL3xlsAOBHoiMvaIigKHhSohvYqzdLxsuCq8
W+1uXZBogpys8EknjjnafPajro3s4u7FFnF4FY2DoE+HSCSLXbt3NbCW20rp+20nXswxsKM7Ie8w
H9Lv2cxL1hAddKUOWP4M3bOyQa2o+KFZd5Q4D1PtLRbFDNC+kseb9fKmcWqEqDKt69uZ7+wuK8KJ
+aV+3DGSiGS6EWbA2nCHcakGb7viDRPyCzx8+vSHnx9/O370h4fPX4BD9jgZ3H/9evRP+d/u3ekm
B5C/HdARJuoAuMY4+2UBmzCksKeUu2hh7ICOAxYltW/C9QuqOlfl8pMHrOi49QAI6rDrVj7+ww8v
XqoWeCWT9HfDlHRyTE5xuWQtpKf+HR2f8MTG1GdVQF/KsbL7BI4i3sn3khFcSLnIp4sZOHD1ujBW
g7fJYMD1CVfAy9wHEgYiaZ4S/uslI2uqB9nx8IGwyF9a08plmESFe1nU04nSRAH5FV14r7VhckO+
0eDRjk+10qlaX29muTP+6pSa4vinIoTC+94gtKT/lPaT9PXrf0rd6z1ViNuE+ZjGivL4dDIbA0PU
SKMPdo/qquBnI2fydGCAWrpTgp12uqNV5rysJ3N1uvRP4cFZGgqjeU1WueMb7orJ56Rxcyk1EG5Z
0DXVK+qUFRVKr7tYvcnQJvd2Cw7IBNpxqrfs4lotCrWHrdWSU6rr+bYEe3X+tVajNhWIt5L0Hg22
Orx/X0mGIzt3UA692AAuSJwKLirwvVDfo5ILNErNVvdTQ4EjLDFJJ6bMIvwWWrO1TvMJaVewommF
RqqNg+PVi84u1R+u0MyzaKr6ot9zyz0SWbYj63xXNx7afmeNkSEo3TKUYOP3+8+kUbhcwhHlG3XY
6dHBIdd/i0M+/Olp7Ti+/LWv0FsbHiAlqfV3P3VRfVQ/GnRW8jLZIDITviN9GXTxYr256fHXrimz
C4VQH4QjhzrD4eGum+1UlB3kIIZUBo8axJunbvbK5XS+ndGbywFdimeNyVZ100XNF5P6olFHh5c9
+aVo9LK4Yt0AnM28ZnPm9UmC0fFgPtVnUj0QOAjJwyRV7U6N6bDjB/ZDovqane603us6f+G9WwWi
hu9Wrqq1tOpSfD647aCbIC5TXFvDYcfbxC82m5Va+LCkcsVu92GXvg8f3IdFDxeQ3gd/bTjg/VXc
s5frIEtS239/1RQ0KNFtvqR6lagrbv21oSB7KfmIYtvJ2ATpqoorZhv1B65WiaEGgfykHr+5QvML
TrM91rklJ1sUnboo/KnLwm8RO0SDaSGv+k5nMdO5xLY5vcHEXe4gdDUV85lDww1hUnI31a9SkHVv
rgJdNpXfcyEMqFCkot/EJSpnFNFXDhx4QbJVLbCWU8LqlEZPBBW7bXKHyY+l2oRfQ8txNINbUyiu
4TrVbye5Qh5LrUD01TulI/or6M3VsR1d8EVRPaFSwkneaRjPnWoc/8qaSirhrplI/XRm1K02gHDQ
wg7jHnu6nhrMEyv4x4xjTVPTMjPN1LxPWeACQoazFzEHM4av+ukJWrAS2DsqlGva9kASNkmVKEzh
dK33EKeBaP2CrXViQEpmQkbmaFQhshMMS4RmemAQcD0POhbDTJu7JnF/RsycuwJX5xqnMjeeJCcZ
7X5xSwH9wcXzuwrn9xXNrYJ57aOV0HSNnC2fddJwOUhoE0xPy8pr31/LAhojj+VnKc8SnYrcrNKI
2LPx57otdBjDRmPqnSxys2Uom2XbSpqIAa094j81SYoWo++iCAEG0XMNfiso7mLGihIaOjiK2yNi
25X9W4Tg7fYHaCMFPiB2lzwuhydRs4oeVWezGDaYQuzoNu4l4XzBRrKTYLDPtHects5nqgreOb/F
ays0ksIM8p7pNk+Gf1rtt3hrrOPooRDXvjfqVEr5BiHBLl4rXFqElZrg/tBvIKZ592qO+chgROAP
qsq4XgVe7ag5jahF+Af4ocZeOHAFSiVr7Iy5ddogIUPEfo6LkPV5jBxpOIeAIo6nqp65OEGPIq2c
ZyIfRbWAI7fbSb0vQCuQDkl8gtOEvapOaZzx6yWo8HlU7HFAPnzRiFjekV65mLIGPaHBSYBlmBGI
EsYQCgvRoeWpcxJSJ12lqji5FdXQ8EbvqsUxSZcFVxyupqsnjEgKU7w6alqrrJefR7IshU9v4EKL
h5xAJfVZ1QWavQQ91CZA5UqiKwiLug3CpFGRwyteaKHDDVdKcLdu1bqVsnIkGNTMJW3dNWDIRep9
bscAitiUFfNgUPBatPG8jHj8WQ5EbK0LNHm3V4v3NHT2ZT0ePpIN2VkpVsM430l9MYELLMwyRXEZ
UoyRJVLDJS9m4cL3jJVQpsO02W5YzmeLybViR9mzA4+rVIlysV3Yay7rhY8U6qQnRRUuUX5jjRIH
1K5Lm67s0j63CdHw/tMAvgFHKK0CsyTTAIkWWoKqgT1cUpjghKwQEMfEB84DZye4lCOg+J3uknXf
2sZB3DoPzFhgMihr2nC6S3aiwHx0QOIlJjOMGYlbbssfGNdbyE0CkXOTOWU5LukqE28dB9A4vVmV
xlHhIHCBPwgCrg9caeqMK/bNYZcsCz8yQZmjxO2rFanYZ6RmkM7dYnwV0Hvyw2NKt+hZ/nOZwvMg
fql20HQheeDqEqaOrs0DOisofRiM9ZWe7cSZbULEMcmko9xFhsxHYHkt1rW2Yuq/xUoFLgRsQU+w
NMKiSJGB48hy4w1wASvhaQw1ZXVD2cXySIS8T1ivO3DvruW9K64Z5+W+7bYN5s/j2C62lc9+ePzs
ZWMz6YZgt+IoLxicXoAw/nCDDtTS9++P00L0TKvft43v06g4Lxgs0vFktR7jrkiXs3pVluYSfe2f
mWLnpNAkZlcj1sMLTlfSsR4Ca1uTcX/HT2a47/JdNYhe2Uy13u/8y+DOYnBn9vLOH4Z3vh/eeeG6
aeFnizf4kaVnnFB+VLrKUzUq35bTTQ+cb5287PBUiQq6ggWd+KxQR4SiJhVK7ZWQauTF5VL7dOno
ELVXzid/Luc3LTAzpIK+KW4Ir02IkRLNs07h49417yUottC3Vn96EiCnGMkszxYQI70pFoYkOIQP
A/WRK48Vlno7FY86fDiKKHKvVkYDF3SurA4r6xOZJtWVVv01pgEpeiLubNfFLJNJnz4aP3z6dPQo
SSWvqMN7h10yl0r9g5u+7fIN6kYQ5w4J0qr5ZWFPkaAUKHVU34zAo7fbCj3UyrpWHNJ58vTp498/
fGpu/dO7yV+T18n9ZJh8mXyVfJ283iSvl8nr68NT+L9p8nqdagNOolaa6lRVY0IQNeMOMeqU80gp
YovqsujRF1nnyYufnzz79oefoWLfZ4CHpgMZMsZ4zzuelTXlngbL3AqE6jr9V3XUGvz55PXw9evs
6+N/HZ7cgxtsVeRJJu+rcfvH6yWei/m8OJ+AxuQ08JitGPVKqw5Sl1J9NS0WF9dESvctHaaZf4L0
+pBTYvJ6tesKNMWJBAMmQvgSbJRa9tNqDjdzw4yrwnt1vimtV+6dushnwck+0V8FE9eKz7gXuxqk
x804gkD2XEh3ohqawg0tvOhnRnRvLsabaky+3j2SKJPZbLKRKcaCKWqfAvyeAnzhLSheH7tSHj9N
79S/u1Njm+pV35TV+GeaUOSrPzx++K3+zhHV9Yq6pVbVGDxPA66ifnK7g44zfiAUhEXIcIbgr6EI
zsvTHJ+2cBrZf0YN7OT77JvG0A/r4vH6Nfh43HfZFGnk5+tqu+odZbGM4HiPfafmMXXLR4jvRonA
7nKzEQrZpZkNpZ02Ai6iWyXpxODtWgqyxS3GRLbTlpF0zILPTNHaHFbiLx12Qk1ueP++SzwTngkP
t4p56D5UbPsefBzcbMrkpuiXYD20W3b4bV2sdXhGXcOltsb5AaIMdo/5Zwnvyc8LSL60TIRSuq1D
9DVNG5cF/YxAgYz5htb+4cGT2WYgHIP5S1hNJm8KdXKrKJzCV2a33Ein0ZZvu+T31E0zJxDqamY1
BWp76ydqZt2LEvBDhIoC+6E2TKeDgW7MqKuUT2SFLWUedxYotaaNjm6hpUPfeIS0hVaMexvVZTWA
IgMsncYpieloJ7UciKJpoD0pjYIMmOrvbG8v7y95pRj+G92pkzzPv7L+3prRM/CLvB6fzokXHE3i
dX2393p2L8N/X9zLkl5+FzZYuxydoIYWb6FV6BJESYwR3WkKJob7ruWuQn9MjVM0rVZlIWzSTzaE
+0lWuaQuFyUkItZZak0MHaTHqXOj/LnlhDUU+2AucaHm6bxULXXdyMl1iVQ19w4A3DIwYejVFCob
kRMSyowIbGTo0uFEeyH4qIG2J4qYjzSMxKOX1oKF8GLzuvFMSeXdy+gpS2emhRq6U6ps8HGEWxX6
KLtNmsiYG+R+Lm/cVMZE1gOTeYcp04Eg+yA31gObJCnryNfgNsO6Exi3M5E9TI3Dta028zzR2IkM
Xplz7mWc0jWZDdTiRNkyOszorsKhp61hrjNby6WatCeIwREBghQnnJqXGkJnp4seRREKgLV0rErp
z+OnTY6xEWb3CIw62W7RoAw/D9itsKefsLHZ8WrSMBh27RhafeCE020530D2SbhHpVnJooMEX3Te
zdXw50cYvunMzTveFBj75Hax0jPrZCpBwVpO38wLk1EeAChwdjWGbNN9h5nqHMljAhF7xXO5jMy1
MMXrqC3t7Gms8D8/GmD0qntp2DzhTE8vU65YT3K2N6xZF8P2enfWGYoGx3syOUj6dl733kT1BbJY
2A5M/ZsCREaDaqXRBEkVbY7Kkc4Hcnrgu9zT3mT18F512fIUAs8EGMV8AWvSPeMVH140w0FXKGXw
zHERCTqYNSwVaAXWTxfYoo1e+/wLblhYxylstaTtq0InfsOPqQ5eQAi44CBZ4J2xpajRalwQxrnR
t7QW6FB1eEU6jkM8zVl5zbGLCVmuklMlmgGg+wrC0sADAoXnFWxbaKsUIGwo9kfS6gXY+kmXFLp5
C+pu1MRMAeRgB/v+8YsXD3//+EXouALZj0hFKZaX5VopY1ErHvoEmDI6C3L6KCRIUXOREBBffhL6
4lKdwW+i0UTQsrhnSdgQKHsL1xTAKPaIdEKbe/Qmywv8YqOb+mhMgurY80qCm8o12L0LsOHnCJgW
umRRqZyM75jHBMBo0sw/ULtaj3cvQCIlgjcgiHcfPzhU//122H1v2hDr4LQbL+7pFkS3XAmNSP52
/5vJfF1MZje3/fbq6DPVlQfDfT/o0vJjp3fKRletb/RIZLuH4vGrJy9iQ2FhAKST6Fb6QFyVaK6M
ROnBLkmv4ZRB7h8/PX/q7oickIWFeErlldZ0rGidCBmKWnclA0H5ZKH0Hl/Wox2Ey1MWH9BLNpRV
aK61ezfTMUtZ93CuvoruWOI6XcYquze9iOVADlM+NwKHtUdjxZyz8D8vuA19utKj/JOY6zM0E0Lo
0NLUbU8Y3ki3heydGagYfjBiQy4vsa2mg3XKKVfIiSxWCBWUBi6ZVint8oJBtqvZRKCpX45StK+1
2qiAUeg7eby+oU3V8KyJ+0h6OK+DrxIgnbkcpLQD5CDoHDbgJMjd0mQIgU91t7uBJaTbOAxb8DCH
j8UwYA4Mm7kcfI364AlUjNKr1Os65cvQHkmoMdMKAeGlGBNx5KaR9WpVZqrOqUjq6+xasdOvD7QK
I7fIL48NEFq/bneU8nzdhErPLaC62Q4h/N3E0WQJcXuqfKDBEkaGXvCuu4ixv0GRHIUO+Nir46oP
/Y8FZC1ZoCI3f46vF29cb4XgaI6X9UuRaPsu5RcyaNzu2FNRCvKfkAY5AH1Me+DTrpX0Tm80WK06
V7rQd8gpaiFAyqJqazzpJ0EcFLqD0PTiIkz9GKqliRkA+amk4VQ1g+FfJoI3cFajDpkrPUOoU951
NV+YIx7f0HSk37DwK2ZBolJTBwKTZbHjy8qN8EBRhuFE4AcDEKXRQ8+qgSUMz60iDNfKTCtek92u
fOY22zTZmsffaN+f+Dpl3eI/xRcscw165GLHfU/ZVi9ZOWEBk4e7iT1spVg4zdrdbzkfSewMD1Hr
1XaNhkg2s4a6CdakSqZCWvB1rt5spqOjPvHs6CgQcFCSVwrmOBXjpnS4IocQxGmqWgG3F6W7wAis
boYnVshGVM4QSGQyv5rc1OQX3tPHMIGFRlcaquz8BvPNQzg/5OHZlNMGb2Y2GKmW9NGCoBMwcfNh
S1LvqJHzm278ysBbRN5ma9BcYdgJuBMq6k2WN5C06mslnf+0rXWVrvR0bJc4kfpGPWsD+DibTyJq
HU6Ud10IBcVlBBZJsxgnUL1qRd/Fj6SKCsCGxBKAA15s/DUEqgVKODC6Y4ncEWAN6AL43Z1aY9Ro
b/4+BcpTTZnDnHCd0dYSKmG9RUWDkn1ahDzBo3Sblqn5exNbh4BTMsG3SU+dOOdbYLMMEMAJ1IYh
vx11ayuSm2qFCCikWRAexEwagHQcJEpMIygHoIaQByajmGm0sx0meY2whR+jF4UaHewHX6RI6+d2
2dT/7ZJGQPutzm9wSJDQzk4T2Xi31QdOhKT6YJhmH2IUDB5cT9Vx/OnQOavNi8lyu4pbTUkcLm+w
d5xktXGWCfiqvkA/PAKCA+0EjdDzm48//rjZcMQpTHDIM88I0paWCVIdbnW2TToc1KNDkvKHfZMP
tnZ0NKHK9nRuduDgF0hM26SNbTh0wD/QsYVqhk6r6o0Sb7PBqRpGjDPEJxebxfwA4venF4NPBrUi
OPg0/yQ/EjTkfw8eHB7Rj6PfPtAP/7RdIMBHvXGHuONG2FIPd91HwdTwNsFo+nrwsobsejYGulra
zLoEG31TyLjncNs/OMofaFCaemhbCda6wYA2yoF56vvAisKpe16f+nrJ1CkTS+w6pTqdTTHteEwL
uI4odhCaukI4EkCn064X/K/d7w9YTEXG/yDoRKzHjumCGNczW9BD/H7b1kVRUJANlpjaE6AITXoy
uFRbwvVinqBbAKP+4ubAZu04hjzX1Sfdw3TH3ddjgg+vht7JuBlp99+/xchKVbXhVoySnx+9sKIn
y0EwkmUZJCxd27StD4fWq++f3oqcjhowNOQZ/uxMWFUipjYTmwdF/XM7ORwAsq9AjkO7mM474oeE
6wwcEMIElTUorDGDHVvf4nDq0rjUHawTY73K2vdX6JUxN7WZQtFxpBVERaNBYxgg+1ioI4CaTXQG
8ZZrTw9XH7UJynELqY+zOECGaDw2HNvj+NFwOiUxaFgGaMJgorYA3rv4wDkvKuHFhbwLSm0EBYV8
MvM/ZBT9nqmGO+BC6ger7M9Kj7Et69v6fVygBfrpsM8OuzqaD8P4cMUXi32gMNRArNRoAhFwYa3w
HpjGysp1Drjd7e7R1VnDEvLDw3tZGozMP75canjiMUQ9kbtj3Ytl5jJp1MvlBs2o+kvsemtyrDEM
GbGU7+eU332EzzeFDdxKyPMpZ9/pb394+fDp00wce+ADFhGQ1TdN+UwcnH+wRrQSaHQ5jLeT+yiX
qiNqYJmcb5VkT/C2Es+1Ri/EFB6nBeJ8X6gj8tcf2ywQLO259sECkol39ellMK/OyWW1Po857/WD
U0SgMQD9e6qCZPAs7ewt/qPpRSmzANy34XXvnvnonTzTpPSHq4SaYieeF4sqGzWfAFMtrLetG29b
OwHAHO0QtxvBMcaJvgXxTkEW1TIWroj1+O5KYBVSSuKqLT0rmP9mZEpKsQLPLqTtYl5ohne65+6l
umtgC4CQKjrSrjJzud960Tuz1on9I6fPg6FqHiHC3PDafW7aHVv9iG0BobxwSlTDNynnsIaWxRUI
DLediheb26leFpvi/ZqqaHygpprYbz6hNW29CyUu8Yx75kWDA0OaJxQZlXee4MkAdAnyc0YDtdBz
TGCVJqsUe8J5RrPWFvEY8I0i5tlCI0eOyAihqjB4HjdugkfK2IhMDFmHNplxaM272VidV5fe8AEo
g2N6Xi8byhxfa+uDjfDCd8dHw5OTWBec0DVqN+3w0o6lzkjtkwsFrEsK54aoPZC30pvMGURCdYQ5
MzBXx6ZIJD3VhkCs3Zmj6Gg3fJnuTGD5jw9x998R7m4BpIT3Ri73SAde72YTmAPZooX5mj/fcYtq
yrXcmPqILY33jP+g2C17zIG9uIqlPH73oUWPKxOGQw7QRw2jukwgjBf2yu0Uk2SQfn2JUK6XJdy0
iACgqDuqroOumYwOmmt1JQs9Gc6qPdz0+BjlyD74NG0KBt/DdrOfc5o2ZebWqepHvlrGiXfdPcyF
XLNfWrv/WJ9qagCjEq3ocgPwcs4IbHCfWFfz7oeu3fXesgepF//lWXKUf4JxIzxHFXj5zsChDww1
6iSPh97NDM4xPcLr4Fw1QXJtZMPDj+HWB1L+napyGH/cx5ToVwXcLW4hKLnSlZW6Wj/Xd8WNyPMg
wXaPvjBqBrgnpTHHOMt42idReB9OEnM9aS4c0v3d5OSYUx1ZzJ+f4+pNRFBPl/X6+wjd9taKSSAl
FUotADmEzCmqxdVVjWsZpoDigmCA0D1MHX8DH4Y9wb1lZA2scRRsH/uS7fYs2G0a3mE3ude6R3bB
3qpawLEsplV9r02ercQ/K3OMRJCMFZ7ZC0iG/KA2K52DnQghCXCrabOGfEWQObOiracWWJJMlDJy
aBj+EtMXTsjUR/f+2vpnc39Qs8CMwwmPvbiw6UWF2ceaNjER36H6gmdUPzq3BE9Fjkb77tn3cOhX
a0I9zjzrynaJnjvaX0epNtAm3EyewhT8KCBTHHgQNfEg2UWks+9mAl8alENgSjAeiosFOjg5dkk4
RdiNtwxCNfREUuVZaArYf9cNjYTYHco33kOfZTWIqj3qH/U45hWEDKHRCQOvAcVbeErFYsRroSsO
8oThU8gfpS1q6Lu3Thu3fygLtkWQ0UavIs9F+DbqNGfoWzg5z3VvF/hrPIJGfgXloxhErmmnAYoo
gjcS84nDYBeJGRKF+tFgAaH6C1UZmbU/hpAB09LVt4Dc9DyB2ff9rrPbgAv9XXSlqJqUZh+PGpWT
pvY6xG+3H79DTaHCsycYEzmnnNt7rjF6TVgX87Fa2uDkpZp7qs4lgYNg9JrnaXX+mHPRMLKOB9LW
MTXpJGj4B8Pps/HdXpOZoF5MXkx3Y9w2/b2ToMn/lryWAZWFu+EZuIBApM2qqxWhmzKMEHgHs61l
5pq3dFZS6w6W4QaDqcuUYljjBrfmYYDM1SsZXqAHY5Q4A4Ne1yCNu+jIzo719N75GgZilIghafiy
EM71dG2oKx7pkoxcRyRHSSycRL2tVtDYbrfVAGSKwaVjPWQ9x1Rq+MvJ/wLTw9/pycJ+DC6xF6JK
iqhSBC7dCCs/T+OpUgtBJzGqvp01tQ9yXQ0Rb5YWbLvLPh8N+vrQQIFWK9hWPumJBt1rcE6J/5eS
99Q5+xHom/jEQBbcipoe+b4czr7pbP92xBpi4+hySfb4Fr3Yo5FJJPsFqhww1DBfJt1h/iNu6hBg
GM/ujTM2kh88+fFxY1k1q3uWvSjmc4IDMe+FCuTyyYgaDra/hVI4wfTY8wvT5Y+JUN5UkAuoZwjd
oFs1CzalklegO8uYTKW3lrNq0X98rcYMd0U4GmD2RzUfvdZYwwK2SyaQYxDjC/KZoOoDfxNbxy5s
pCVfC2jZvONenvTfc5PKTMlcyGNP+xG6DeMm8AiAMHOEw3ym9LcILIImkkPGz5c3lF3UPHz89PH3
SiUZP/vh28dRRHNx0ax3hp7+OttpwP7/C0DuvqlsPJXbPaNIHGZAxyWtWbvxEEGAF6Bbq1HSS7Xl
P+2n6FINt9Zq+M7m5RRuAtPtkjdp+EP7KaXhMk7pSg+LwWXQ2BIGIujiij/R8Wk8uZyUc4AIi5Eq
l2DGAHLwBeBSLsoa75rhb/ZnTwlh4Q394mv3WRhym3Wa0Ik04oV2ScLzi/0DN691DHAk90A+9ko0
SrRBNOCPEDUDZQz96MRTG2BJPXv+QUbCRtC987H0p53M5yKMCm0VpLV510Izm571NvVrvHwGgqPk
MW+ujjGPeSgVgKw+lZ8HTc8aApOP4RMw0hw5Ye+z/E1x48dCqQ569xg5PAsDWOYanxoMGGR6rKdw
LauUXbY6gspTpLUOmXigzrETUGpPi81VobZQg1ClAy4PGNvyQh1WLiEnKhyp0YpGCeXwtpdolPS5
vkeGmnQ6b42bXZgc1nBRp97XFeTYUSJ1XQFq/7BnPXKM956HPHQP/G/+Osjw14t7+G9+72v1718e
9H/RQESaWYSjn1qtkz469b3TcgnubrQsMv7M4LsNlSidJ43nBok6OHot0o3R7bDTzAKH1h7m+Hb2
R3DPUnMALZA31MOY3xcU1sbjkEWDfIA4fZy7k3UgmHjK7IBeCEH4CF68wzYOr4+HvzmhG+3j33jJ
Lw74/Dat5tuF61o/PexPj/rTB/3pJ/3pp/3pZ/3rz/vTL0CvhxpcMpD56W6qb9p9n37QEan5+Gm3
j6nbehSzgtA59UY/XGMmdn/UpodAO/361ZOI+fhsyR3lgSc+OmoyLihaYLD/uiEXh5HJljPobu1M
HTUmp/XoKIsbAwx75bxNaWXFxzdyLmS4Na9u0RprSWy0ZYvS3g2h7UUzOBRaJQWJ0DYZ6bTe02/T
6ye/3hzw7u63pnm1uTyrWwlc97ePUwQg/RTb/CKNsDenYak2Jgt9MWP/zXUxLcpLMIoqdudFOz30
WrIQIikXApg942hR7OdBCu3+Alt6t2F0cb0AyWjuog+5DjwdbRdrNEo/fX6ANe4a7nxv7mGbZdAR
4eSqWm+0tFZfKoGhh+SDNI53GxFfkmbJV43mRFIdMIQR784hFlrt17MK3UjzPIfQlovJqoaLzKvJ
Et42EKo3tL8v0Iq3KeRNKgY2ck/UPtKHBMnr8vxi00ALjG3lBs1mZNfbVKvBXOkjcxs2A/6CHEl5
VU6LBkq9Cm6tVHX6u36in6gz6Xqhxicx5wQMxckaKNk4U2yRUqfwIpnzgdZePM/t5vIgeVMU4Op3
40cDxB20fWB29tTWm3O2lw04UDz6tEwb3K5vuzgP2BjKRdkc2onvjN9H5EbseziZwj4CWSRncHtM
vuVOVDHl1OMZ1cdpYOfQV10IDn3maxMYch95SAL6E/zjXpoM24gjn+5L+du0lRYfVvel9qidmj4v
70vub+3k5IF3X5Ift5O0J+p9CT5vJ6jP2zvJIa74YbPW7Khf+j6glWh0Ib7nPg79PmpcRKKNjmmj
rZ06gA+RzSo4BELsHsGgmrg9ijMIWvIAW/KUFsdn+Mc/tzeLDCFt7WlXL26x+ccxU4GylWk7WMe3
j8QlSdRaEpMLnu0kssdbBWK4p+5Dlds/dp/2wvA31NnMSRoM7eCSMSWU3Z59YxCAZcSd/sFL5sOf
ymnTS4FSmvRU1Rpizzh0bcgpEpEfNhlh+0yS2hzfo6d14ns4Wpugxr4Dy1JfQLA7qhtDVCPEp7jr
WB3Auhz28eIKlQ4sc7ad03tobXkmYQYvCoJeupqgQzKqJxgeZA46SiGT0YWghFSSxKyYzI3fCl60
YioLaLwaDjygYH6LTTKg1xjOBXqWIGIjbWH9TNZSfeJo5QkohKofQo2SF0pWo6qWZCjiy11hPakr
3cDkTNWBxpQS2v/rW0/0FUly+zuSWTVtuCIBbtz7gmS3W0Kg9EEAjgxs24IbPUZDqzbBndBjjqL8
5ubl5BzSc5qjiotMzh82hc96YoQKQ1JWqOOhzrqJXvz+TQ4uHbgaKeZomGpsFxZKA4woVC+ZgFcb
YhF7YUvY3GLufhOp7Wo6oLLquHXojjJys2YxTdCrGsqk4eYhPx3BPYFx5YhsUo3KM/tkxc+2t7fv
RE8YqMx4zTWnw/3ausP+02z7kf2LW3/2s/y8g9Vn77HQtzK/wrQ1mITevan2eunXaO1eqnazCYuz
SsaXUURexFcS3eUlDO4rXqidMJ54GmpND8P7M6OJpV+HL43uFXuJmIgjPyF0ZEa6+jTRjflF1vUe
BwC2s2sZtqncLcgOOcbA7xZ3VDCUdVqVZjpxddIMmS623yyGg5q2vB/TviWriG1eWLahIgBVM7SG
jWZlTP6E+NCmsBoW4TEwbPXN5u7cU21FBwLcjuMXfFGGCZqxo8tABSfudWuz9mo/tshreef24qIT
McmIJQDuzOa6HNZs3FzTi1y6l7PXtzTgBLuwaoe+1IW737AS1k2HjjNAWMzZvMWdf1hS20uooPU2
iFUN0kqLM7fALxhbZIexL4bUH/INZlfUN81WWved7mfBR62HX4zz3qX4YKFQXMtv+c68R73tmwHP
3t0S8Q93SJe2OZIC8I8f7KKYbzYvMIttzeqoxj0BZ8hFhRbzs8oLeNZTU+8U+5JyOGmWUGTshCZt
y8W2j7WjL6/bFeaIyJbfI9sYvx0jQ7Lh+17x+CGgb/YZPcKQC7zHimWPKWTvYMT6kAYWP6hq2OQa
xMFWEj0LnMiGPuzZT8+fDnVAMmTIrNVR/02+LDaAwXYfgqkwMHmzVtLw/qysN+KZS+k5cF6Jovun
n558O0zOZoezL07PHgxmZ6efDw4/OToc/Gb2ydHg9Itielb89vPJZDZxvueLtOTB0WcSzw12uOSf
S9VZuzuI1y/UJjPbzoshm0rEq6fg3/aIt5CHuG5VZ1dvmoqoJkDth4dNBb5VLKdKHB5+MlC9efCF
+jn89JPh0afJvUP1WdL7Hiw96vkPajODYtL/+EfCVyiLmoj+hBw80/SO1BAlR58OP/1i+OlvHHrq
+bPqkum1+TlpXxAdJfjhvUFsXlfX8yEdpuD44JdVhdT/m8tJAy2TwGL3Fpqmiv9GL4hnOh/EpaOA
NYAeIjr97DiF/EN7YsiQtcW5Y3vWEJ/R9YzlvqGmnzR+yib80O+O8ldDm0FXg7/SE51EnENz0YqI
YMqgZTkld4yHvXtWXxn9/STbb2QECbShxdMVOwC1qho01/i5jdHXVeYWRv9YxzaVgmMqK2oI2wBm
pEiDCP1hNnb65n170kiZTxZNxKHk2Oz6LmH+9KSJNGrwTYQXnA2bsnZfTWG/R2ddtw6kcRLB6OHP
Ba27ydEh/vcOCcDGYwBNoUxxWM48kbnFRSvd7OLWo7hW9JTMwOx7YOZW28FUHSB+evnIOhGDVXkC
toV3EKKEcqb9UlJwBxzw/xL1vyH/L0t6x/cGJ/grv6vkjJOoPPReCa/V+QPydPOQzpoyn1M1f4ZA
m+Dq/AAu0YACK3+mJALFA25S38mNLRC91ODdPot6Es+iDsEZy9lkjfxzvnAzqevkoDE8naspaCzt
Gf1ox2kvsy6uXbfOrtgRq2WSohPnsJsFrOWiDXHw8OAriZ5jkYYMs1lYHgvHE+6MwBLXnKseKrG7
KgX8IxWRpapc6q0PQ316hyLmgnD4mEVdF4y429Runw6GtBqgd2PUuYOFr2U6qbILP2+YxBZDIFSk
nepFCIYYBM4YBnhGhycOoLI65/pWfKbmDVV0Wzc1m+BhfhBk8jMlFbcvwJnoYnJZUDIljV6leOlj
Ad0NM3pMgwCKg4O3pK+PDFVnueCnHVoZ9k6IUEiOT2y+enwSiFZ8atT7RH2az+BmCwnpiyP3Pc73
Ggzbqlm6pL056thwf85qdhy5wDrxljy0go8OOnKl8chgIlqGnQbNwUTMNFkD3Uug+QpDF724HUOk
NWAHPnWjdfBJe6iO8+EzZD+wJNNmHT9RusEL9LUIKmo296EVOFLboj5vqMqUt/Sb7Xa0u9fnt2tU
s3k5QjdipWzqFOoiDb6DuJEffjF48NuXaiM//Gx4dJR/9tvffP7JF/81+gFvWLfvGCWeIdsKaSWT
1Xrs6CR7dwiRBtpYgsOTPGkYRIDEORzra2Rv35AWsPpqD1ZvbLAWonDap0g1JJdl+6bOTL98qkPu
wAtD6RPsgnGnRpOW+verMIJTS4q+XFF9O2cQy/X28NV//Oijj9QxYQymhnxMYl1JjmVx9fbo1R8P
P/oINu/v4ByEuZ8WxayECS9AMWI4EAqvQ8uRDrQGR1MCYdtcqE3w/CLhwMnk4YuXeeflBXltwM7K
R9DE1l0pxlvdqCoUgS1oYxTM3WEJXN/U+qfSzc3j1U0HJbfpjKGnRfeYfD/GSp7PJ0t2AvmG2vVQ
FybR3oFodAlaI3fX7p8ml5MuAXFAGT5QosNA8mXSe9BPPlPqMk/kQfKiKLRB53Sr9KE/3aipWObV
+vw+eoEcffrbLyjcRrVsjTF03W+qav7Dqqu21W/KJf34CXQt+vkU8xHDrydnaquBH7CBdn11rwtb
6yOl3EGJ3xdLwDWu1vzFv5TFfAY/HhHAD/5Uwx1Sea64E94+2y7gnxcb/MvsN/hse1pPlSaxwXJq
UcfbAm9fAvQMa8LjerPYUI+/45Tg3xZn2BLYIvk36ZzYS3SyxNrrujxfhrU83J7rV4lSZxXTwo/v
Kmzyz6A70bDhn2o2kT4ofSGpl+sbSieBrV7ffEeYqly7YhekhLxlf32neDAk9fi6mOIczKvTyRx+
qUnAJv2ouonTrE7Ob2g2yMqjRwh4YgzCiNA9Nj195aCYn2PbnJA6YiIxvLf6GOdDSKayHsOpG2j2
gE4onZYoOEx6c90CqjUgBPT3J2Sb37Gb6J7tEiBEUAB82zegyq2z2zQqSgXK21DY7yblHHKbmdQj
9rRELkykDjNaGQjRlSIOh1mQhSjZGvQ/IahG3a6vBZLbu++v5JYRFMDUbv/SerRuTo8sYX2l/WKg
Jwnn8ZnqmwMqsqjAIAaDgBs+f0a1gs65Qa3t20IJuieaNgA7rc3pwTGG8Sc5/ttTxKkMW754ZG3x
M3rQ6qTFE6cYnUv3+F9jbxB9C+a71+3ZfQNKFDMGjAMkEoAJhxCJYrDeLnHhhL7rXXQbQG8+uWES
hG8f4epKOk8C+TxJXmzPz5WuBM67wxg9OJxvN4XeW4GFIexPte+0OKswU555qVoETIGJI+hR1mVj
gmpxrzo7I1CvMeG847RIT05IQ7VGWeDnF6XHaFLE3fW7NWYVUNMA4Er4FhG55doxzBWtl9sVmagh
M4Hk3cjmrctLLmdOZMu1LoBLRTMJZehlwG3XjlC7iySedUDY6r1LOzpY61wOwiCtz+iYnzT58ksN
KkcI5dLiK7vcfb3UmagJghiXjzprwrs7ilmYitvLwxMlREGeQ+h+MDKq8UDBUWdc3WeoeUkKHK4V
/jk++pwjR3XOPQCO7XTgsA/axXgxWZlLfxAU35SbH9aJYu2/8r7ID19V+PRf3acPlahUT/+TePr0
xUV5toGnX34pHj83j7/6Sjx+OEMC98QjpZzAo4F49D1AW6hnd8Wzb8tLeHRfPPpuXlVr/Vy++L7C
Wu6IR4/fwpPRSDx6Vm3o6cfy6VPqi/PkMT6SpX5PXXOeYKmvZKkfqyvshuzHkxoelbXzCO4+8Ckw
r3yzxMdLt9X0lLwIup1fOp0tKJ/B1DJRKHfHqW6JGNTq1d+c5z/pmXCf6ilTT6Eu3leDTYRqVAdS
2jTsNmsKwY6akLoDaXLnxWQB8vBsO1fbq6J2TmLZg51t2n5JSnm7rpaD+K/99ByU63I6po2McQdc
jeIA0MHmp5PpG9pMrjA/RMo2s4k2Pntgi7mUO21qj7s7Pza7BJVz47krA6mkLb9m6228s3IgYexQ
5HAS7E2rKMacUYmawmu07uR/LcWgqEuHhatGRAHonZMdo744hoc2jc8dQLXAt2pcjqHQyT7D14eE
KkrL32MY5eipT8YfePh4MMj04TjYR2DAkI67A+jE7Xw7QlkUUfuVGTdM3zX8B+mPiAvZBabohtq0
+YQLd78Uh3S9iHH6lNzzoR9xaY2Bsa163HSHRssQCuScWMMjw9NrsCwFHVDrgD/gBKLe5KfVLHZ9
yyudjgIu8Wc2z6WHPRJhUM1LrgRxre1kQygpwnXJ+hnhVFrpRFfZoBpgykd8DTnvGDXxHI+e6gHq
DEAkL2dC94hwtVTuo8xM6Q3axUE7Lx+Q9LPAKCVA62mkEr8eF15RK2LwMorwqjvpi4IWaeHNJJtG
TOLyxUpqjMXZhtxlVzn8dl64ViZ84s43zTO8cG301UpxjVKpxtUKphFyTmEN1aqmFuQEyAy6lm/I
xO+civFJrGKuwssOtxrXN4vTCsZa6nPH1cqezE9aZHVXw8pRMqlgHEwF/b3zE/h98u7fDdePcVeE
zX2ME6OaIR5hVDs2yjZhp1T3+f9d9sV+4jVsJHhh/1EI+jISM/t+m0fj2Ea7atJNQcVxK/gpmLJi
Tfrn4ua0mqxnqK+tt6vNcB+o1Ehq1Hgtu5Zd6+oQToEw/eKki8Z5BkF34DnhWSyHDb4QfCab4dQZ
eoPGHMfdFbYWUPf7yzEyJ7MYO1V/VFKQKaWIIu2ssYvK5CCMQBv6YR09pEbQsVFs4G0B10MbsNev
ln2QxBOTadQLzTG7kXtpOJHwiHq4i4f4epxGA1FNEdKU/g//bjqo97rJPdpt8Lgu25mpN92s+w5z
xvZ+nXAD/hJztppAVjmo3RzNjvFXHpfVPKC+OKaHsRnQxLx5cHuuW4FyNaygVbsQ30rhXaBhPGvR
OvaVtvBj5HZxX7VkDwl5i8UHNzh67ZXLytch9lQV8NPcVRhwK3C/p0fNBPC9MBNH93sqGuckn/f1
lp817Pm32/CDHmWdd9/rg43+XZTcX3lzDzZ2OYH/JvwKl4Ba41U/5dFou5y6kwtPXC6DT3J4nMmk
uOPmXQP//oucU/i6mwyR+C+SCif2CjYd9QqjuKDqMHuSVz3mzI0uEPU8C77kXUG2z6i3y54S8J4e
uayP9Wfg4GTr8ilTZ/Q+pr/J2prulI7OMoyGzoSmR4T/ftdR4c/zSIbKX298uNKxONXWoztMWLco
Nl/uqHpk1Le67n6230jHKTibnHM1gCMO1vX34EOHxl4jDoW7H4INu3d5jG87Ts6HO4aHMpK9z+D4
Oc1ahubN1az+QEPz7mOzx+BAh+hdCbEJqhP9hK9/fLqNF0Z36p7eh0NR7VaQ+SnLneqo5zu2XqjC
1IfZ5n7FjfbuXbff77kbWvVZDdrr5V/uwBDAr1+kqr7aw/zcqBCr0ghkGrmF23c/BnM1O04hw8WN
irIvax36acx9eHKxz7VZsBvOrWi7sZzxj/edWGuZfScroZNdxdNWrNsrDQ64t8izLacGtEdbDMKd
bq7pZPu0msyy5ua6Vlmk7Q2cp+zSs6h2AfV6m2iwfnMWFDHaFD0caYK3LtHsrQWO/ubvpATjmcsZ
hA+4aIPRsgs3p7WLQxdfsP1bZURNkg9FZr+JPKA8VNrGj9HlgMAEHo2Tpcl602zu145d3lz0k1RH
D6X95C+/xNa9UG0+ELdAs8e6zb8m0/gVYeoksfSd9z5Hr/a8XNxnr/i1dwKWdXhbpgVdXa83jo9G
7Z288UlUFMGn+caBEkbPEJeC4yWCwlf1Hv9NRuz/GgJiBESUBsQkXi+7fhozrzD6mLiPjo8+Gw4e
NJof2FmFxV0wBoHbjhiTPTKqkVCK+wh/cJt7jA9EcyPMUJ4vLTOoP8To4kbjsgM9auAH9fWOzQlA
7kaO51LDSLMSju4aoLSE1j2z8/Za5Ck4PS2rkWxbTs+av5lW83F1dlYXG/c7+1w0s7gaUyFuLA8o
f6hYX8nUWme+cFuzqx3N7Ym1JOJ1YNp20iqJo34H8XBj198glL+SO35ls5OsqvP2wav/8NFHH41X
N8Dr+UoNQ3FaLt9+8uo/dDD6AJJJLcqNcf6EHG2Q0aBmL0jhz4P3yIkmoUqsL8spp8wxYQKYrEk7
GW3XfHUMiUCBy7WjPhIB2AWS8deL+XoFli4sdy/p3qcn9+k1IFDKl/C3ekXxutix8WQ2o7xlFOep
dVSMEdTxM2uI48InvS74joLDOUKTYPI9nmJ8nwt66WCgu6z2+IVSBC4n61EX3CeEJ/oEvetHKeKB
q3Iz1ahRV3/YhQeYu4pQPi3TUa7X0XFK3rCAE6BO4OmJLXJRzFejbg0p6ajMXycaShNyjJiRDCdG
+6byEAFsT3kOrDQeLyA/IJz1wY2RXmgfVZrITbFYAY4RObrL8sjkW5PCC7G14PucxsuwGOKEQcDD
UCwxLDjWRTD108jUlb8soPIJxALMAcHwXiqXHToDIIHVfHteLheK489pUulBL9WzSpNaSBitaj67
AiAWqG+djzdXOf7p5ATdFMUYn/biee0lnV40+3tTL3P+BrLmOJcvtiU4EFy/M2/bpZ05d6rOIMEU
6qX0HNRRWauMUo22qi6KNzKilxc95LEbxb9YF7CttFOdzqtaek3Ninm0oIzFvb4pZwT4hH/0MsXS
Vz9C4V53hRE93b5onjQ1EEHCv1AqJG+h6u8c5ERfU5eXMnXOKcdpWrpi6WANA1XFEJVQQq4wVXw4
foRB4enXR4jjFJuTnhjXbR4LM92xeKhP+smhvETg+FolPeflqV7OL5RIKNY/ArlO6KomvskZ8Lrh
O95vxGMcaPo2o7VfO9yrh2BcbxcLtbR7/pjY3vlv8gbB8rGaZxKFXd8/rWOmxiemK9GCFuF516CD
b2qp5XtNwBkZ14XaMkYYicVpu71tETkVh6V0sFKgV34/0FfWV9Sj1YIe1OvS4MJWys6CdsSlhbPG
OUGOcRaSc3kBxzLouF8fDgPCs+nx2ScjMsXVKiL5vFqewwkPj3mb9WRagCcu/gWhlWVBWLrwN6z8
eTWNKfjG2NSQ1pjqg1VjwxoulEDCcVK0PQebK3IzKSu1sVB/f4ZhRVBhNYdlhQnVfO/OVQ7JJ6l8
b3PlOURB/Ve5JgAjhuqW5yPKxya4oqmD4H8WdWK+orLOPyXfWszxBiNZSZ2tB4OvmI3UaPYd2ZZ1
3n766n/liFUGwCWX7befvUo+InURMsaXOvO4kkeA3D1ZlagEvv38VYc/V49Wb87ffvHq//yPFORK
D4YIs7Su5miHUWorsN+McqlBejUAu5gkNAiqbdM3SqLmnU5tYzxXN6syX9kYT/j7PhHvdHrTLLmo
5koKJ2/WxZti3gdAld8mg+T7Jy/VIWNaLOuiI/TVqpZxrygHNzcrAO+mp3RigEDlTmc81qJ3DNHk
R/knailfpiTtlII9npzWmLFPpC3X2Bza+5wpYDepNEksyEBzVSq9bl6AwyaFYmNzKJw1+ZNSYJPJ
XLsba7olYYM46BRj1RZUv+HNeJyGTrymXncb0Gnia8IpcrqjA9rKjRronvofndtoBtWrmuxuI0RQ
kYgm8InqOYAgUXp6nlXqneUCUMCwLNzsmP4pbYuOb7DzLXAySExxCzja0IKYnAnUSPoYbXTArWij
w2gsE4wltkAIghJzeMaUj+3HcJt15iteog7DHXLEkYB9AzTok1w8biE6V8f4Yh2haV44JPXTFooh
VyA9fgzUjh1mphBW3DZMNVTYBHyBEbSaqq/1NYhlCzRjRSaEyvN8eE2hd06/8BHNCCMhituCSD+7
WqeSJjFLDV9pQiwyic8erkq2EkR5HKJH1JZ2Vl6PzHvieyIi2PSYC5wkhLcpoVWq0z+BcYGBaTSY
qYkB3eKxSAlfLDwe27I26qCfiKFywJ40vWBhE+WOVuQu0embH2rLEmxwmgDDBnVzVmgMqpCXTtTQ
MnCt+KCfXDsBi/RUmwbsQFsZm0msFtU1WOgeaEPr7Rd+1LmFUuHwbmmoLCar8TjmhyyrwnTp0FtD
IXNbXxfaluwHQ9rGOuB+TAe0FgL3uenpYegbks3ZcYkbiWdgv3G4lZOLAKvG8uPqmEbw44XIK+8l
Hvw9aClbEUJKISbyeAyaLiCkTiF5JrCmTwrH1oVy5De2tVjA/plwCL3UqqEn3mUFl8LhRrQMuG5B
6NUY3KPO86mGFK0/fXcg+pZWJMAoTN2rymWdsClykOSQxdsFOOv2JlXOJEisSCfq7Smbg1O8pEvv
9CK9CGFilL7kizsm1cRAIQ0p6vhj9PdA0nuNGRXdJ1sUILrUJlkJDqBNJhgDwNcIjfCdD1sm4aJ5
9Ig+bFX4C2KJwS25G/PPZ+pQBpB18rQRUp4bIRj5nn7cafL8D4X3h5hvd87n5aT2Z53bFf/0dlOt
BWtu1IHmDt2GNdrBnqR4OdYtjGyynZ24QHPXA1JoF9TCJlUPvzVOSVxkBL7rePfJDTSfup5bQRWs
dDbRT1F9Tj3i9JFLOR68lX5plj64r2iMI4+butppaZ5lnVYKsc+dsTbbZdTJRun788mfy/lNovEY
CGTAoKeA7YKCKZ3tHDP7cJIYJ6kVS3dda1y8W60Cb6P8hEmkHfg7WrueYCjJIiiBIqS8kAqjGt4l
KllbSqaAs706aQ3sh3YLVipqN0czwjCgOETC8Z09sqUCa6xvINeoKJoswOOaAhHrgiRc3aRbRVLL
xwJqHR7wojkjUbj6TrZN+e7sEkkx34WoVQwsuy0zsXM22NxTq/EELSYxEKUDbaooN1uyPE6SWbU9
nRcDNLFPlmqUQTzEFry85R6PeZxR0bJrU65XOiP5svEAViQkkp2TBUfnqeD0sHx2gXupIsGz6Iyi
8Q3qbgn4LhO7acDjWVFPwWRrDwLCJG78a06cbwCiWH+aY390Y+XyxZKCp92B9mQuJPH1lqQEb5Kq
cDhhUU5w9iUWf/ENek8DqGYNXxYjdicfIRjd2DlD4LOMUYnk/q8mzNn/7YKQxwTSG9SeKAAZNuph
L/QEVE/dZl/H1521w3hKvY6/jk+ZpqjlxHXTCoaDPm+V11nkIAygsQQxSqdRMSbOedS5H2zSGHSr
tHLX0K1YVwCyJgcdQRdq2Kxt+2C7Br7Ue66ZwuvMby2PEnKUc0yMu2TqQdWTqwWwR7XWssM/AG19
unWUoC7r01UyLKDrEeQi8RZyLxz1VpXl8QHr9m9e/R/CFwKOWTM0qcIFtCJcvP3tq//333300cHH
yf1tvb5/Wi7vF8tLNkd3OuRDh97cStX43Ysffnr+6PGL3zWgJ4K3wuef6r/+PC9PDfSKWo4EZ1es
e5QXMmtEqeVK/QO7bQv/smsTUqKPFxKBAZJv0sgDe/oGADCEcgG6s1+fo7tENw7B+NUIMBi/yHyk
AUiVXqwLwmoBmHrKaPQljd6D/Iukd3VRTi+SlaqM4Cy5Ij8PLwKCqT1GEVtvl0u4boPU6Ljf1ZsZ
XGsSpQvIJbqBMUJcLrCi501YHdHuarHOY9hoWYp+fE8dcfRkpbcnJdtm5g72zPjcOZswa7ei9FBp
uKase7HbdKHR7NCkVUnuy7Gm62WRrMerN+cBbkR7XoUm0u5oNlSE/qeuw5ZxmpVcnoKLSup4gJFJ
Vd4iKOHEPjk9+6UdHvMw8+mYcxZdxN3nmzj9VExZ+KG2z/MwdBzLD/QyUPLpOzK4wyZs58KKT/Ax
K6bod2a+4D1fMcuyejsJYFzEid6jyNs7h2TFZfG68CcSL2XcTlOIeIh1t4OAywnB3g0R+gAfay2W
6J6/mJTqTN1t9pQAseW5SuCgdaG/cgTn1TQbJs6f4FnbcY9pyaqcvhEuLIE8zosl4Bv0upN6WpZd
mIhiWYOH3SlsP5EPiWIOLFL3YL/IZwWwNyBN9Wg3wSezAin09M7ADOqee7iV0x+RKGQs9hq8q/Ow
vCigxen9uzSXU2CL9gpPswIRLeWOqMvZCxWlRtB9dLlED+3DvvmYaRF+s+KE3z1+9vL5v/yOLAG6
Z/i2b4JzzKJ4O3z1v8nrbsSP327K+dv//Op/+J/p3hrBlFmAwGkHUgsn6A63uYCTzKCenBWEPO9h
i9V5p/NQnZMewbsaD8DUWMXp1RoCjzD+sY/gzbPCIKfOsbnwSnWOUmORbVIdPdQ2qJ3B6ARIwJlo
Mt6UEwTGxHMZtgfVDYJ3pp0AEmpoBz/4rdjiHOzfC4Oa/82kLqfY4p2ayWJyzb4do6MHv/EVFPsW
FGPzh5dIR+3vxXK7YLx98c1AfHP/Nx7u6tgmrjCNm86LydpXzG3pnN5Lk5TqOY00dQfGuyFlDGZV
gFxM4mswUrlf95NoOoExdhJSQtWbqwJsIb1Yd3SuJ6Rh6zHsIKoBI4iaaAc5NrqRW0uE7cG+BiGu
pAlDRm4VmplFEzVLtzROL1lqoJkMZPu9GukSaFpP3lxsbTV6vgJnaszWcGlTh9J8B7Po8Ro4O2Ah
SLqSaD5GHZg+S+g769yAd64YC8rLBP2FzHy5wRW23Fcjf4X5qnhxCetjMp1Wa3RW21TUqbTmNoTZ
wEG1IDmZUxFaEK0pZu0YWvsIrjW+3IokHVe7/XrjOUup4wpmyhFdHLjiIbgkwy++Sg5D7oR2iQ7g
rRtUfDzEj04akjpAue9fWstaTZ4pnKmSLJin6mwjDxIBd0nR7a9SI1+BN6dVvXmIxlGStFboCseZ
h1T2pRLO96nwYFptCfs4tt3Ykw80ncaA3KnIlK62Mbatz0DuQ6HVWu1rUyjl3kepIR+Q8baoB9XZ
YDIgEndx1xhsqgEusYGiMbh0U+y+vNBgzMT6UI2SvBBQsFUnNmpWjWc75NJi5vhKmq1LbAWwz9YX
1XwGXai3q0JpB7NiZvv7HdwDOGORnM2L6/K0nJebG7V5I4rzZJmAVZt2NauZ66nl1iQTSGKHKacc
R6sD2jDPGEYflhQcOYHdFqp7W0yfxaMCyTmE5NhDOKL7Fpm1ML9HJgONIPKBvhDPi+UsWp5l2M84
zsXsEXMMZcZApwz15QDrsxjr8dK79n+CV4ZBpVkNNj4nw6H7jtlTC29DA3x+9G8RkYUulrG9nb+6
FxCKb+S2JbptxkqqbVJsDwQs25bFiVohcSoKqRqFPj2wfAu8BLxXVeB75DLUToUK/+YUc6Ojw1ye
XHAV9Gwj6XYrM8cmoUllgVqm09aNRAX7qkXunhtqFWoN0yaidITrVbkuZr0sEi3lKrnhBY/e8N33
cn++3fpqXkXUHRxJsUbMulKHUW/Ympoj+OZWyweHCYUWpqbz+HufJeRSsANjp8D3oWqTHBujX/BC
evtlUxYbxdNvR6/+n8cffWSTxFg7aB+OJZDlnM4ebKhZm/MzWP4wVLGvvgPK58VyV46ZaKhoP4Ze
3+nAPQmnx0H1QlFlsId8rJpXXHsZJhonygMy5swP076YIf69Od2RMAJIgaaj/tG1/5eyuHJ5RUkI
eEjhbnyEwxdPzpJHsAdOwA+Evlb7O5Ttw865TB71rrOEkoRCKYoFINpKEEGejI7WD/jpdU5b9mQK
uYASQ5S1A/U5X8U8oih4MO0y2BhYaSfJXd2Uu/DZIzV+5DuvdtLTGySzBiOT2mGVEgyVVcnksiox
w/3ZFswzlHuwQOya5BI6Tq3Y6AOs256e2/tHaP3lYaDRhqMydy9C6ZoH01y10dlan7MxcwRmFIEw
CVMriPHJdlNBxAc09AZvMcEkfU1i/QdIJcoob6g/zss3eN+N+tpEVKYoqVLAsQUSMJXAYmAelGNY
KnXJDgvPl54+ZAedEty6VK9MwA5ayAG/TX0xHtuG8CigumbHXDFpQfqNohtO5XgMZRWZDSQZooHb
cN4/Cm5RPESFlCBW5WhUVZu/udEBkMiqXJGiLCova0NsUYHCCD7zZ+XUne/k6qKqRVNA5zMKu5xl
XjHLijRPQwRwH2CCdUMma7iTWFfTopgJpRXwSJekRkhmKmAGQOHkxK59tegwioTc99HvQZ33qjeE
nGaqJULYfqjBNH+U9PI8J9TQfqJ+UuB/tVQsBiedDSZkBbxwuNZR72haTQ2g0sYplssNEezreVom
+IK601e/9RgVl8X6ZnMBnApRNXIsHwH7KNFFx4dlXaoNVE0b3fBDMZ5WvaogLRxI4MsC0rxumtgL
fBUUgdkaQwIUe02WmIxX8etKq+e87DWr436ywVV35k12HyhUqhPrEs+7ggWpj4DvDamG9ayBlo8H
vCVTMzY+q6TRfToQIrXOuKzKkXZcnCCsfNyDvKPYNBzpfnIXsZswVnTmX+OBuovdy4OvfRsRL2D8
wH9lPsIC5q+OgHuz3kFmaHq7Ytpttij9sRmNW9iSGkgRL+jlEVX08IakE1xnm43ZwyI6kGI2kONL
UBHmyUWpJLRa8Tc4TCSBYeuQVNYFri/1crvSn9M0pei9wvtkp8V3Sc8XN1L2wo5/PB+h/NyOm+tK
t5leWBKs9eqhdO6K0ViCS0pp5Hakhz4aIDrXckG4XF9XniEucKrij1xGYKsQDBq8b3DDOn6EBVGl
sK2GtvK3j3K9xk7ioMONvvlxKHrpueIzXuyUIxoF+XzrTQ+sUPZpL1x3XipYr2+BddUDzbId7tPe
GvH1tmVA8IgvMh9NQHoCGk7xecR33OFv24IrRHfdvB9gt+TvM7iPO4pS4RIyXXPIVgdwUDiFpNPo
Twb3LCCv+dsYWbje7qVfpzxypiGIx9TZOwsogPutM+nFKror/Ebl8tQeVR53TOd6CWKyC3iB/g7q
H/WlLQgi2EuMsT21esrSp0uUPNa4gTyT8sOOfapKGxuPyeIDmmIPtWVz3HjIybc366LAwwmpFJPk
TG3dxfVmPdH6sXGyddFrXCcGC6vXkOonDjiH2FXxDw6UXjKfDzC7D4wTu/zKbG02IWL8tkGVDJP3
uJPpl8/hQl/7238JzfsqjW1tJKp3FUZ3BXPSFa14pJ6Y7KGYVSUDGQyPg9QzLEHbYf6ck28EUqbT
Bv7vgs64HpWc9JYb4cnaSEbE4BGka7RoYIKcWKiQSe92XGDy4d6CCdhBjMOiIIsZ5Cw6ljN6ku1i
CdXU9kmmWvaf4KasRb/WxJpBX5Yuil6DlIylCfQn2ZqVEZ7KkTtMUWxkGsbKkR54Ud+UJkzj6qU7
EvGAKCeji+Pp3xA/SCOv6t6ow1xPrC9G4/vAcxEVgKp2av2eXf/H7mr7FuH01QNX9TBXnWGIZOkm
zy7zM/uHGKCWzZCM/89MrzJqnEgCK9Q67LZZUj6pNrmNvMQrWSdq2mMxc9G9esLy2FVNdHvcnCjr
aCdNShRCPoNyD8xddLUK9W7P5Z82nzQWHgqgY+vNdLvhZLNKBSXoaKtz1E1BfC4DyW1u/5Q+0BN/
aPBpuCk9iI5Nw9y6OaRi6XDWeZjFRIxv/uGyRRnSeyb1i+k/nOzunVI9YU4J7PH7J4yisWnJGNUi
JEJB0SQsdmloYlVhg8y91HK2z+JVxfZduC1JirABehWiShZVw0LejuhbTZzdkKxo3cKdDgeJWe/s
XMGicLYrzVFkyaU9iNNMcdtK4zmM0izVU/XDep+Z+mH93yfqV5kkNSxtc9Q5AAMHJo+Stz2jUedN
Uawm8/KyoHFG83+tLcHql0YsVuPtpvhV/w2TFLjOAw1NIyl/097fvFIZF/vFYgqIwCZs6cM1AAPF
uCrkLLIhOKE5Pn/J7ozsz2wP5ols7js5KDJZtlI4Sc4n0wLCa2ODd7v/2hnzdhuTbeO7bSsIGyuZ
+u++qXTIzstsrRev5aiMV8M3ZWQ57Mf/kJua+L/n6wz3gj02S92k4w0fDlo//H47b/rwbuuHkKe8
4cP77TVWjX280/rhj9VVsW5oanNb43KA5ujfRBBwIteIIHCzremyjYJgzQ4vEUpe2jVT+jZCRazY
nQs2Knag8WmfO9wsRvamhz1I+7ongt6/pVwyidg+gNJMPftvS76JlWJNWZC07TtIyLPPCZjLutaO
qtpt1hE3QmKo2MMIKGTp+xovbrcr+q0YybPsv7EZhH2pIsIAHbacclEx0KwbXwLY4ij5i1yMZ8t0
SLSo+79E5s8p3kvdJHpG0Q6zc7m4TxMySP8zpUaL6LI6A51itzwIGcfNA19BW/bAVrLUnvne7ZNg
fCcezmmjfFWd1GZymbpuBjY6uC6EIXa/gCfH/BmiCzVo/Sb/Xt2AD8DzcW9kGqF0937alvnXZv5r
Ftvx0bOVpZBCr+6nIoVeX7cg26tyouARaJD7EOKI0ZObyXoccpR5HF8h5nUW/+qW0wrfpa2TaSlH
JlWM4V04hDVPW3TU8BvR9NgE6uGaNYzXbMeAzRpGbPauQwbOQO1DNtt7zN5p0PCj2Y5hi9sPTVY6
N2gE5ay0HEIyg8hR2p0V7IfNYqMa79un3URKx8PB0UknMgxte+Mu66HSp12B9GvfpLKZCcdM3IUQ
+yDeobDdo+4QM92vTWfC+9Qdym7q5VpCqbNWumY/iVzokRL0e3Zw2kMH4qJ/n1uA6AbMadrM1ViA
r3J7JtnrcP53uYMP5pJ72gvN907nZUY0kQsNfebA8aaorQ+x1kf65IGsY8hKmKaihluRcAJ6qb5g
CRKlCUReMoRGFFG+1/Rn0UBA+XN5ixxprlochg3ebrY/7HS/T5q1v5cEQEPP82JQmlW6YRxzgI+w
Lhho+9GXDpRPbZ97Byy5jw8I5iaKCgs33xoi/MSExUFSQ6xgeXaTpAzTTznXri4AvYbzr4GndCrn
oEcE49nbUi97m9qMgMeDXGx5NHFbCoOfRVK35WHqtrwldZuGhcGgCxJjkzoxvfxSfCq8Vlyph3Xs
duzRNEGH8JvVab1KERVkHz5D3K9rLjBcDXnM9uNqVXIfrn7/LXDnnUlsFinnHEyiv2/EfK4Gqv2a
uQziH6VRW+hQCTJd1m7KN3u91Ldp6wJ7fdaSsc1301JVnLjl2xyz9nDKArDZiE9WZFtpT/P2d1UJ
DJxYPZ2s97rf5aL/7bJkwIfcR5z2PToI5fbpHTrbqrJtt5/4PhgB9TALiuVQE/efXIIps874TCfv
47q93mK1eeB8Z+D8xcNhU+JIiIsZJl1axM76dS0W3mddA69Tb2bVdqNTYd2pIc2QTn3VA2afCvfn
Irdxq8IsgX2eQo5dROJaewCS+rI2NNrQY4qaxKw+8HfvKAsKrG5yjAn9DgsIXmNGRQ9mqDwFZmTc
KN6mhHGNlFHXDZjKRu2JyI/WnujIg6hhsX2t23XO5ZzYU3weRJAei+NuwFUx5KfYtisMl/7xOkiG
ZqNuKRWUTYTWwK3A8wfv/p/SLR/++CS5n2DcdLKqlBJTq4fvTpBz1fBEGo2e76zqi2o7n+EgOhA6
BiXOZQFmLKbB0HKCJzTU9DmhnyoS3b7MquEad7kNipvVoOcIzUcsLfJf7MX2Dity7JqQQu/DYzqa
yWezW7F2AEVmsoKJ3VXnCcMhVEw1taJPzFLghtzr9nwm7RNqJKJhqX8nEKiFCgtI+hDHv4s1kmcf
RLkWs3KiQcchcn2TzMoZrSNFPk+SF9vzczj1VkslHyP0NGw5SxwRmHBanBEAiXkJrutqMx8MltVi
cl5Os27WnI2JQis05m59rkTSFCbNSlZHuuG7MIiIXwiGMjlnnwAHaKLE0cykkDOMoQJOMX/Y5lQW
aOPOA269XYRAQO/DtENjtDxXnBteONbmPWv326xVzbk5YmaYfm3FrHJN2GHeUlflI6vdpnsLQ7Cu
+1BR5jF0AsuSoCi6phaeGshbVi0HhG6X3FnjfnmtU/LxAFybuXtPVQBGgTdfmaugXGK8l3MwpcwY
r5cCPBHLQcoKwBtOki+/1A6gej9vyksNZDgFAJBg2VNcb8gUPLR0PD3BNydjfsfrjXNsdg90Q70+
Uue8f03n0uvN8dHnwxM5tvCQtS1Q9P7Oekf7dhHbKX5Fke2rBQFEZqohMhm3/0CHQlvYi7NeGPDx
mcV5i7z9xLy96F1HYuqWEFhO5zDSDbuqjuQu0II2fcZyj9+htO1l4cPeGfv8w3dKeB56Zc6I3Ln5
tlSj9aksUcL7gDbcQ54BHKR6eei+EoLhwb1P7n2qeGteTTZAgDhQTVsXRY/73bXuly3FTM29U3xR
Vas65c+ohNq8+gnkgjrqJw/ib6jxsqrF5Lp3DBRVv0+wD5+6bUkvivm8So/hPbLAhVNrer59Q/ex
FzgK6t3br179e4Z6KascgC3Bqvj261f/9/8Yx3jpdPjmAoMrKRfoutOB/weUZhd0RbE/fdlLoUCK
/vdYMDVn2ReqzudgyeQ3OfwhcJckFATmG4X3hLQ0LxdgqLbWL5DKNeQd5LLiXD/BuPDpfDuDuGOR
yrVa8hJEpwDcAmbJjNC9CJxsPg9xnIztFUDYGwK64UtI5V24fUO7LVlcLYK7pjneqv0ALBL8Wm3J
akXNnZC4F2r7mW7mNwCXMXkD7bxaV+r/ISRyosTqFcEe1Y4jFzZ4G2af6qZdHVWzbYyxdua0AiGs
mOkOsLTipa13Zw45VLrpOxFN1f7STWNEYxfxezXOuFmpjvaTdfd1KqFtmX9718dDA/qEm/uJm1AH
s7omXyVuIW8wCV4LoGp7brnBJ9n9+14Ux59sab/woAwz0OpWlmo/T66PoUHX2eBPw5OwaM2lwNaH
9+TH1HwoHTCqx31GBDayXxOHS6ngsrUsRWThARK/62VbD4xvB8nLNQHRXE6W5Xw+wWYSfM8E0Ha2
BPTnmMAJLwkHMxgcqLpnag7sdT5g1G1tdtO5UgNQGW2GheJvx2y61BgS+GmqN3AQlRzsmu7h+qer
Yd25iHvWtffuduZyUaNpaDyRxfGdGmxMajmycC2XLJxPIEpIg1VtksPrO9dfpQ0xSjRefVOv4h8L
lSG00BKyf7ShWESXs9ozwi5GF7QqGVvOTUsai/sL+lYr1V2tsKD0Lt1DsBmuZfTg00Oxa/IXE9wO
B7hRqpFH4Gr4VB9h4KgMK4fy9DIwDRIBfZSAdQhEweBg4kasDrNrc11cExDSKZEuanVgY4xPxhTA
uDq7y+L2SzfCuiupBhpTu9pGLWzau6YEpZigUsmnM7FXE0wRwmMBwpPG6cTlf7UGtxe6MSIcNOwn
7L/3ta6iv5ggZijfblCKkPUseZB/nsCW7WGCrgtEN7KduVDl51rU1KwUGaUms4+tiGccRjUz3ltQ
XRreEVaS0hc/P5QHotqkYAduUPrc70w6b4Lu0+m8H77KOZ03p9pm7BfVhftQUifxgC6BbQ6Te3/z
6n9iYvPqPFf/e/vo1b/73wkl/RRAMhP17BwZyRlLwLGbUfYOApwt1vcBSmq7UH2olbIKWKudV69e
WRBXQmRSNQ+T3mJyo5hptd2AVUadwaukhnqKdb66+VokkVEP1Yg8xVe9eFJCxWkj1X5o/ouX3/7w
08u4N+ysON2e71NwWi0WasJHbjIj+AqlfBcVcKVodK+q9XzWdYvwx5FS8S5h6/k3pKpCvK4RAnzF
fXr37obXD5EefQWYMaDmk9L9fVHXk/PdyPDsvVij/2OQuka/VV3TP90C7IYH/wh8d8o01gBMYBIV
LiYryPrTt5QyoX1QGs4mGmpvOknQmtEdMjmnwZkDJ1Nv1u0YTlyZwRDVPbBe5T/ycgjwH5PeDITn
FNLoZMAAZuXgcqTsNzV4ECQLmpPaJF1TDGI9D9TDSZLq1ZZq7DemgJhwaAnFc9MCitNtUF9AE8Pd
gDr/lnOwjBabaZ78VGMctOpNjUBvAL/2482PN4Oj/MgDUGOWgexu/AuglCvOZQBJl9bJdFtvqkX5
Z3tzpUf8gZETLth/I7/xrwXKe056DMhrLiSWdlKy36VoNEtDYCzNqYQtpB+wjS3zQdraWNu6LlPr
4g7MbpGRhtcbO8/jtZpv3LL7IiB1v2RRobkSkoLCanAXg+lj1gbN5uWLwUyaaTwV7B4pHQ3zjzw0
Oa9JaqXhl31nYsJUjppegDinX8iuTTHjH3/vn1FgreK1WTLRKxG4G439K8CWXuO9gmZjUDQl+j9s
lKZTzrSBM4L9yht46RsGFBrT5cFb+lqLbpcUC2hjfuGYg++xCQ3ivUmcN61V1RE03beJyoCGEpar
m55NqF5LIn1yN9vZEJPzY0ex7WqGpJGo03BnBqyc8RjAqNdmBBB3TduMSJ02oqPjHeBKkt0I70k5
NBEsv0RzsiWpROzVZA4Gnj7BxaOita7c5AhAAG5RCHoToOtAlbXTDU8JozRs6yQmiChhJ1939MAX
CIHuAYJJNZolBgupTI6K42pR4jFvslQMCIcaM479RB2PBkd7eYK1scuxfgJnqFumWoVtuVwKj8t2
xoQ0VSl3WJ3LtXzW7wX/1I380zej73ES5se1U8MQsoUziW7+kAMGtoTDAggeBObtxAOLbPXuTVd0
q1P7PSRjoi002PcyFwAwsqEaGdOyqRqy42C3RLLeFZ1g4whGYnBfB2CSd9ZaOGojEFwg4OddEWdT
992oBDMDQT7kyrGcwN6Atz6R+RTXjHp4dCk1PLhpxIBn/G5ENfau6hddbILOR3Ok24Iga0pZGwAC
NXbSVOvFjQqB/V0pO7FDYJrVdoIJ+OghWSP85dBb1OdCrRWAwYkdDKuByj2UFNGkt4UsKkmKhVK7
fQZ+K5jrU9V2r4sJyzpRzUk12Nnjeuy6JNdq4yqNUsx3fGro8/4ldq5GerJox9lD+Uud9jdKwBbu
dMC55hE3pVZ/ach7mO7g0AHHTcMTaDPBbQG5qYfslGkDXUsWC7nH892X5v8zy/hWgIU+Wmd02jBx
MPLzalVIGyipM1Aa7sga1TbLhTuUNhgCKV4tfY/LlKrZddMCuuBS9JFq8Nl8W1+kMcwtIovve0YD
+3GyudgxM2jhhnEgAHZsV00nPfi6dW7YSbafUBwY+T9FrAKQaPemmI0pmQAXS063Z2fFOpZ6jDpE
ROHojj8io6gT1cNs8p/+ZJpKMGMH//a929z2xQYXBggZV55TzENPF12Q75/TCzRJTggQ5krE8gIE
98ok6rIjunCijn9txvR2FqLWxQq72Y7xeCfmNgFpdoJ28zT0mCxP3kbgMrNlYeuP6gt666IaEfRQ
yePnz29Xido69t9N+Hr6BmyPu2tA2zGWTWaTYlEtrUEksirV1latwVJKIdjuudm8jJoLxKc4/k9/
+P34ybPvfvDTLJpS+ueH50h2aaRu8z89p3rUO3FU1YCCbjuGN3A06D7+/vHz3ycPnz5+/jJ59PzJ
y0TNZvLzw+fPnjz7ffLsh5dPHj1OoF/Jt4+/+en3Xa2Hsg84khklXeg9OELjg9DlRpsCaBb7VKxv
7tycDvDbzHHLcY0UljC4RHbefmucJyiZaLVQE//28atfDslGLuyq1f9H3bs2uXFlCWL65gjYa+8H
x4Y/ObLB4CZSBFIsqmdnBhbUqxbFae7oZYqaZru6BkQBWVXZRCHBTIBV1RqNf4n/mn+Cwz/B53nf
iQLZrVmvQsFKZN73Pffc8z7d2E0VvjX5g75E0w24qmViqwpR0bzZzKtb4OUq9g/L6UeHWkHgxt7g
31VN9hR4+vJ+OINhhTgb35H19O7KSTpZR5k0mSz9tsE0rSyjr1bshuPkHY8R6xHVVs0uWdOVpOJ6
ohoBMa1nQVbmDnpHZb9kBmgvi8giGl6GdaehpzZ5xYg7df9QyCaNBNRu57gnHzB/nVdy6L2rEPWP
H3h279eQuyRYxbUBWO+a4xrjon71iw1JFo5rQAv7TRgXuUNtKMFF0G296vyW5minv7cCJE8ijKiI
73Dxct3AdxtFyVchURy2MOc50RAVnaQoiEZSmiGFLyPhZFJX34d3XACHuqfTT9nQC6A9n/aowGT8
no/pvcO9d9inn4bGJ+85BQ+VBnOjXnvDfRzHMG8alAUsGb26KZNg78V014kTTLB0hKUF3s9iHwpr
0i5uyFoLx1vAWwysQCk7s8/DlAiuZyniJqwiKQiL7J9lm/pXyQuWE6140j5KeqNYAtwR9EO/qO+/
rFvZZLxCK9yE8qtvv/vq25fk52BevHz6/IX75rc//vCHZORd+pJdVDARQtyY4alFNdOyaVtUXyXq
nJN+uMve1JzSZ1Nh+lIMSwd8Jdya0P83Xz19/uM3iboqIaKMpzVCRb0RxXgyvyzhX5vxNr6je1df
a7656V9k1BPdkJhxE0d8eC9AICclcYlElADXWjJc0PsNDpfqLxygvCQcpLHi0arjBZkEjYxrgrRN
OMHhj38LV1fMI3dXC8xMFSaCFWkYUFuAz4nOERmGzWWhRBeskz66NNSqfmdIKDK86LnIWEcGaJEL
FeKfiP5X1AQ0L42hq/bdp296aICA/DY3/hZzxAJo04Xpkd6eGR5szPkdWnOMcq2aF5p0xZIPNvOp
vhuLIJH/NfQBjLlZUq4ThzzsG6x8+oCxSk07VG3KGam88gcqLxPj3O7b6tDK4nfOskgdaZpBHPIx
I3abt8N237pjd9/7E3C/JGahVG0weFG/cwwGHXc2YqtmVT0hjXvUXKA1OwV2uTAjh5/+gNGNIwkV
OARXUcq694TKVB04UBxcI0pvAFGT8Epzu+1bTL/H7/BgVX40BbYl29yJco3mDv1QBvGuTOqh3Klv
qhsD9rMc1ojObjhmlxReaIZZFPPM8jaPZrRgkZFRBp7fYayDVtV0VEL3JT4W6OFz15WABHFP2Zjy
s2z0ZPxpgGARod0ims73yPH9mCepEiqBQ01jZ5F3XbNXKJtE30JrjmT1QkUKJOzyhVu9sQdLWiin
kXqDVnzBDMrluuk8wRxWI/8itQluZyf3LrBqN9kHyua+pLVFJQ6pb0R0iZklYdfJpc0aKF437+DK
MFUlHWC1WF5Rq2VK5LYMqFcx69HlohXI2x/zIpkDiwsnfbRius3fg7jRQ4pSHgkvapBkJrUpPRuz
bharFPbx7JOkR1EN7DfbevlmretqFyUwfQjmdp7fD1+GdmQ6fkWSK5XbcK8ljhiVEO8NgwgLAn7A
EQNuDKaM39kukbANSsGoWAQk8tqyz0lFpaWDn3/7T198PeJa44Rn6WJDHAJ2Tz2TDSRlFrW4s7kQ
A9hhcW+AK7oOZYoJm3cZ1aunX/3TlKjjDbALmLC3bbpusqre1Uu0LNi8idsmo42wZadnWuIjcrL5
NsuMQ401sQk8ZPYiSWCwT44wqa7lnkQp4FvKNw0jkS5+RlNVXGoOkq+SN8Q+civWVWjI8Xu4kVBs
bW6hsUvmarQtapVFRUJM3nBcDzj7QYOy6ioQnA7i8wp07ezE8DCsqEuXeuxzOvSSTGuZwcI7lEYV
8/AAY9yHdgIvokIIDt5IIvhgGMN5Y7kH8uzP6Q/NnhLDcuLfOzf7H7PtsH0o0ER777HsDfswJhaH
ZbC017xOYxllQTk+6QfHGqBLuQmXL4H5Q8bozQ0FiRVpbDbNTn5OUhvKVDAollYWBcDXJy+LQ2nD
gJRscgBKqZhPMAwX0ZzUSsXqdEyNNMmlrdwBMAYuDcm+BNZpsdwlwOxjJRikUZsp2Cv2m6AYUmao
tqe9k7a9Cqdd9fYsqGBKMm/91q/wK64RVlDPMixvKjw3h42nB5fuot50cqonXQXsCYb9YQNYsonC
IGYkBDd73qFfw4rATBuyplKoH6RopYiCWpIAcFZkzWAtIEgpCgbe7eAMTFM7ywCT48vCkOTQI2WH
5pWAcVzi3HYeNBykgJ99+w1VbUcKZGE2OFfyC8+B3iCFndmeuFb3yzVlgDZcoGI+1yzZWsbl0kVe
Hjp8jn2AlEdHHLT0VpFAcZ9B0MN2GpkCOQKBYfbQNO0akO5aeSuKa10Rj3a3hRA27E8MZ5D9Skgd
2NgwxJCp9siWiWwjguZmRxZ94vXsTkhyT5vrMWRDgCPYoVMsdjW8qTefPhniYqnctyEDL+J7mScs
WOwcSp2htaZjAT9aqS1RbiO9F67eIUTeqUq66kW/aSC3zG5TtsL0zDcqk2Ju/27pwQe1rDbUQ1d5
gf6Ic9WlpW2HuZAlDZjVFfYW3SD0ymU6r1+Uwg1ZFvanIVQaTon4cNPgniNPyuOXYa0wbuKRp3vh
nm2kj5TC91YNg7MADaRZ4p3xF/gKOwTqlfwPFruYTiRe3BknARdVIrdtMUWA+oPwvjdp5SuM6UIr
aSylul0avURUsoSK1k57wQ2AdLlvQ2qIxEkzJYdRAzxaVSFdJXPAskRiy7DZwRzqA30Lh27V3HQH
YD3RLvb6xB0BI3J8E3Kj6xUH0lxxPMZ0MVkKajJeB80USJ9LUnSMFNmgCcnJEV7b2MZjPwMIhuIn
BIBrewYUyCbOAb5O78xaw1tLiYBbJsZE5wyDlLgmSUadCw/eQ4sV7cx9qmHn9DKdWrgZI1I6+KgG
EedetftU6S5F7IpKHcVxi0ReV0U2Xh5mwOTYE7YkVzkMcNkZeYAi5aJHvg6Sdm/X+86iO3HaTB5M
FQHOfARD8IFvLPIF9ASEZqBhWy8IvqUVX2JjWpanUmWXEfhzK6bgfXnFPCCUOpEVlaxvMNxSXo8i
p5S16wRHGCWtiwgEq1yWaTDRjdC1gt/Yf9QjyejebdnSmcnUtmJ+0W0pvVWy1p7qm+SU4+wOifM/
11vV0SOYFTJ2/RkbTd8i4dIv28L+Upt9O0gVdMQNi9WqV5XjLN+munEJQ163nCrkGbRhSH1DcB8l
aKY3+uuRqylyjbGut8cMsUO7ITZXGE1OyI/i0UlRHr7aVAh5vdU0EC0T0rwd8rP4AKxHTSo1yXZe
MjcMuKwqdbvvlFvHTnq9651z3yTcCWSf+TP4QLRNg4e27OjtGN8B/7GzVrTiWgg4Tp7qSyBWq5mj
xhxn5xdqOIu7lcSndzXcVB2hThx4B2QA+kW6KhCK/xYIdIy4B7nlEXL5hqeEY6eeALARgnJcf6SY
aiJ5OBEkFQ8IBy+25SIcIbQwstw/DCCxitWqGASvl0cOlS0okRFum3WHgfxwA4JsNBSiE9qD+3VJ
HHdqmP4IeF9oEJvMKJYlKRr1iTHtqdCKogcuxcUJrgS82IIrAb3YN3cuT9tlo/M7HcaYdtKJSoAx
Czg6V7Q25xe4OyQcpA1YLjAo4IJulNXuiu/Wrlq0SHoD64v6D+7Xz0Ai7hQr6JIrldlTfjd1sykZ
GhERh9cxvQFII080RHWAsUkFYsXLFPel7HXvIm3TP+EJaciumw6Gngk8B3wCMMLyJiURJ6DDUHkG
I2BxWn4lSTppYuonDwmtVVwrfPhIMVTYvDXpa1N1JRajssVRQYakguvmBcjcUF0HcbYKgvn2basL
tDlQa2EU3aHbhgmB0jGK6+X4FPfNZh7CYsiWzZgecATo26Rpj/cYVYkdxxAbzRyZEvwuelqg/uIW
hFNZes3A7yJ2BtO9pZacrRXF4LK34fXi+nzF4rWpb/sWb7RTDf71zWV5tvCv/1qwzExW0//oHegZ
LLT/udnu6ATO+CCS+IjwBeLHkQz8dupFUdJzFNn0Rrelnms28S3xTCFnEd3xzjRS9+PAxeiGubSL
A212VqFHExqdbll4ishBhtFnerT1+Blag5HcHXTzNHhclqNtUZylnRLcNY715awqw3HgOKdJo66t
seiixaUaPbZNjK62g3R1nb5MuUhKF0q5xHV2+g6mGPcZ9qdt/FeZtHqQ6UmdHrA2DzQYSgfS5T8z
gu0+T4QAtG1Ftx3PVULk7ehKPSJwJ0kEIMhJwByqcY5rCH0AIzheSyyQesBVFQU/yDB6Tfdu85uU
mEelmXW3OO9G6Ryp/gQ5pZnOgB+9yVp9OGq+RXNkjKglyIF0NBi8ffbqP3z00UfzLVmklCZ0crnf
1eu3//CqyMlB4Uf4VaNmk/2OTYBliiODcX3Ih8W6LBhcQ1LAZbNeVxITSsr8UL3do+J0IDjmOb12
aHAtgAi6Rk4tmejDDyMpf7syyAayD6NNNhwe9kH2Eq5XUjRjjGYkyGzKo4ZZZVwHjeeqep9zjjSH
rKBZisGDLMh/Q8GSSBtklqutyGGmZVHnDu9y1lSv97CIGJoamkErhWolcSgzZqGQdMQur5rmTce6
TihCfPiic3UpUP8p7okJPAucz8CbofDi7BvF0S29qMJudF5jQcmBs6gwUcteGgYs8y15mq/vmMi9
Pq+IExarGqZ1gTBfbJEuv2puKE4XyxEWcifurtrKjdo1zf64+WkM//xMS/HHzb9SAC+NUbC7aahV
XHQk9zkOWIPtQoc7CkJjE8uh7XZlI3/RrpiddgsOFP2J+hhI+JL4uzmmbkXxBP8ybOOoKGRcJGgg
EYlthdgGfOdGTt5U1YqdzbawlEaTTSuFWRQuS6onlo4dVLxjruPiovOjjPnxmOd4zhbbrppfIG0/
iuIsa0ToOZkY9e66uBFLdo65wAibCzmRngXR7NlUiQWmmt6EoevQgAxofSllcDILslqkwHEOe/JD
w6eEQnvDR7SkAEZr6KZoAliJkjQB6PAyna+b5Ztg6bi0SphZTIjEn3MloRIISmhJN442XSPDMLXT
cMyFPTpEqscXjS8eJDbKk3hTcAyM8Es0EroSol2Am+SJRjg9i8mIJSnnfhomxODUzaNZJIE3tX7u
rTUJazkXGZVIUwf+POMbVIL8eCh7NNxvzhdrZA0A3WL8dwwnzr4Ybgo7N9gEeRfzpj3KakcCLNvn
OB376VNOseokY+0pAvO0L0cmprFzK06puzM/vwpvy6OTv5lCu6iTfXTICj4Yx6OT0FmGx49r/7fu
sfMwMJ+2w6faHLgXoZEkwHL9rl7tF2vBEDVffRGSp1uA5AI9hpY0Arw5YRV/QhNUyh1H3tKAv0WJ
/wUKSYjNNdeDWl3KFcH4ebutFq2OhYwzF50RcazxGoUB43Y5Nir+GW8XN3PFee5aUJaFUZ4D6g6N
LbX4qal7+liixuB5XHOwGv0Em+WxxGvPifCnnMIU+C9/Tr3811ACQc2rsmB9yA6UB0JghsCLy/Eo
W7uQQiUURhLI3ADGM/omCR59kIj3nwyD2ZCcdlr3+V/d7UDIaPZtdg3Uv64hdEEiOdiMyz36DBvN
A9vPZTlDPIY6hXaxUYp6Sq3LWaIXOzI+33Fga4oAyrBs7AAX60Zsqm8Wd2JpEwG/jAiAjtfcByC9
BHmVp5p+YocxjgBIHju/l6Q10lcELAiztWxADCtoRByAS8wPSsu4wdNkaFiAY1yZLMuPSknNNXiF
8yjD8/KNgbnKXOaJUjKi+B7Rr9rM41CBii3qRxhI9ihHdTA+wXA+HrEVBbRRTE4oOCICMyzUqYcW
6baK1u/nyGeX6YNUwcTEt812lJ5q4hNP5JRK4ErQUuhAD5zWviH9a37fQuHy2NWJlkUbtiVmujfG
6JUSrCHX833bwOGuxC+45VQ34tX2qeXdnM8z54eybJiqz2HYvNId5vcljMMDI1te4UAwltBFfTnO
mi1GDb+AQ9zWl1e76IYiW8/Fuu4CYp5FGA265Ww5R1/3CT1tVp0h7+oVGWX93WO82f8G/sGlaYDl
n2RP0K4C3iGe6lyMMc5OkCZHc16OBNywYAUHqVkMgI908h+MePwaWhmYkRH1/MkTTfWCM+upS99s
Zao4wWGa/qSRbn99je6KdHYfdhn9T9HvbVFeTtudSARrWH7LSBuB4cjPbjNCbCihs8aG9S6Kg+57
GvPKb8qCgVRHC2lyGrK999aQCpjS90AF/FyY2R0qOYLPmPql+XO1gUfVpgKTeN4Qxc6giA6LDXGd
o1y+5UWCwTJKbE+UilL7LZGOs1ls5MbTp31i4aW8cEG+n9qcI9M3Ny3IBo91BkVMxNsN9/o0L4/s
V87qvHo799o7cgS7oPPdB/X7HpNGmPD6pBfv36lpp79X6lF2nC2M/+q7Ds3Wm/u2XdCwUVwen9Gr
L6HTqTcuwDUjFgvOrQSLJVXThMvFqtot6nUnOdfKLMrincO1c744RxmRcQi6WpAGGDWSd8bloyzy
IG713iQhOxu4ObqSOTC95GjOi1NBowG7ppTx4ZPmaejtFRWwSSZMPjYGDM3upqo4FxXS/OT1x0P6
cbOuui6bTBQVqWhxZ0js7k29hZuHHQMRhHYtR9qXPD6GxmYLTJKF4U1WL4Fo3zXZm6ra2rEgBX69
WAsVLEbw9WZLXikVDw3f3RleDK8p8qsnx38xa0H3zgMSqNOzROI5Xk5XTIuduYnKoADdbeZGLYCw
AaqqMA6ILXJoxE/mRaIL2asDfVAJ2wmfw2N6QTCTTXJQPgpqkIr4074jFwMyzaR5oDqK+kLQ/nPV
Nkh4XDp2J0HM02s0M5Q7vxgTlcKDS5g+YZnTmiy3qQw832d1huuUfZ79+kmYggYFCo+zOOCB0oZE
X5FL4u3ugDjkFLDEDwCpFA4Q6BILggq6+UEiIssdSCZ91MXFmFMgviMR6VVzQ5RO7YduFZjhFQmT
YMheyxo5XyWlB6Pl2cxZ7VhHFsSlpTpF0qGWBjFx92WS2pi0SWDPBoWbdMTGhJvz6NDuKDK5b3v+
kj0K92k6SRTwN8sUCSciHBPmdvQ9Y8NtU6Zb9F84VkzOscEH2kQW+Yjz6/iI2aciV+BgvYaKs3uE
c8cSVN5Fk8SuH4JAjkEeCdj5Ykfs0S3CDi4gymCxgYdtfpg3AM6kHmuXY9NhcdYjAfcO5ufJc5kY
3tcIXMaD65qMuTAy01h0RHBC2gW9mhLDNOgfboKtO7WjOBvLyFnyYEb62VEjDamqFwTy7zHu7KFv
mJdgI0/NoMxYY1hENPLIpXTUPZoC6JZbloUJJPUu1oMsGEyiCYHAJPDvPhDuadgOPgFOnpqwHz1c
Qiz/xU7By9RO7pMjafnKLL4RhFNvMPBp7rDEdAwxboxgxKDxng6CvcMGisIbIw3+AwbJk753lEHz
7z3MozBcLxd1/y6LQTlacggklqRX79iAYGR5SZGNsP0F9Th6IzjnzVlBM39DtBm3qCjwDal5+NC8
ObM5dLEdFWlEBF8C+Xx3XbMg27tX5RzHl2PvcSKRHQZLcjBM52XCSHT+pcwJe4P9Pusrmz6d1J13
fZkzJAv/5tDy/SqxfHQ7HBrx01pD98aDNn35raRaCgDzpzdTHdbPKBTNcXD5YY1blmhD5/NzYcLc
tAtFNlacMcks+A2MIlGL3ndo+64r78C6VSmOz6oHS/uWvv3gb8dX+HteODNVzGmnJ3Olev5Uj0JQ
vXfcX3OyBgrD2QqCSEy3H305Mhf0EByLfimFuJgm4szTbAqANbiDK7TFlqTUUyrIEIXCEX1Prx/h
wad6U8V8FMFuLuJSaugR1XNPp3N7uxWC4fJYNtWN1DplwXHd6ZaI9dCU6PbgQPD0VSz96ydFrNLy
D2pCG+MwHnnINkVZNdL1J9mH1nwU15SlCDQqqkZ54muXIhs/vzJW8aBJvg/e/o7S4Kkx3XZ1/vb5
q//r33E2PbrFgIlCB1ljNsfKz++f/pbTr3zPGffIgguz2GW+WR22Og6zLJOLnojpVisRY1OmcTXX
vGyb/ZZMGvElCrvpzWgoqRQlLgy9LJ1G8skEppBHlOCCbuLZEJ35KophN2RX4dkQ7j2oMjS+Fz1B
5DHJK9ZHWw0WRNm1kTVYyRpQ2mXUMnWlBvSW2aIpI2lrgswHP+WAuGBY8CWfSmFY4VFRmvc/ew2x
EmBvNFM2QbxVD5A71Ujn5wCXFGG56DXgFNy3trpEgW87+n51/nzzrnlTUXZvqFrTL41M3KxXPKDM
XtWrcztOJ37yxg03myqLsee1ucGBQgKcfh8yizlmRdoAFAikQ7fGbcAspA2r+H1X7VcNgi+7y6w8
twVg7tYZ9m6EhUSIOmoUsQslM04rrpWcGDy6VPgfXkPq1k7MHCmAvbaR9BSbu+z5d9lysd1Rmm7f
YeKiFdPdu47i3dFvgJKLOaYwsiiGB81G8PisJtlm9F4MDya2+5JuwVAARtAZAIqVSdiBkfCL0ZAH
XsmXYeButAdcgHHqZ9Jq2e073DWpFTusqocE9o3pMvYAoW3K94UGx5/Rcsb5efoYpefUtfv2hGTq
bRs4Vd8YnedL8rlfrH9P5q+jsFyJKDV+C4dtNPwc01/6ez16/t3EbGomCaiai4vCWSIH+uEQuod/
RButuASOJEyEORwJKaqAXsrW45+B4Ip2v6G/0BrjVP81GepGb3fVol01N2TYzv0NBv9ZzuH1on1T
7to74vxdrIRpq+cCY3P2ExvN52QtyqbwY3t+ZOSeWtJOwz1mFmgDdYnXNBBj1ZKMWwfJMGnptvkI
O1PQ+eNUZAZ9a8x1JY6rIk3rPyANGuvgud4YGme6WVWaLpx7in3c5xhhvZ3DDoxs8VIUS6baIOyT
eoL50yUk3UEl1rmrWsoPGK5UEmXY5qJqaUYpIwKfojC/xPD5ty+/evEtZjx48d2Lz4HBhErZQ0t2
JGpqWg1zejCv+HzbdLtrdBi6ZtBHxGZUaXY3G9wjKjbanRuhTbhc7krJFNi54QZx/QRZXkT7etRf
sGtjm8OFIK/Y+F1qnlfs1cimvRVKMjDS1rumXrGMu9osmxWHd8NcoKgAa/bnmsLsAYdbxqCLDRpq
70hZhUn98OLpmo2YchESwgnci211lOKTCfi2nO9uBjGG8jCTWdehkoTbEv0LuK3R7iZRh1YWZwYH
QKodu1/hXmmXuE8oati48dttuDvd04Md6J5+ka2aJSGnHzfAKMHxrlbWPVbuNti6i/2aPYXsmEpp
4kcBhj1wOu2azNfdrN7ksTpNKPJkKCWRW2MjxO8fTnzMvSZKzQJ/+kTFKy5xH1SZywMWVntJ4NXm
aNkP/OFmflWvVtVmzpQC21nJLGxGd8cEa5KdFI55eS3xn9BmrD6DqxRIjXUDWKijiNnDud0T6Kia
zwFWQsfw2lphy+BrGaoHGYpmV+ceZclUlyLZkVyOTvOSUGxOg1R/UOBNQpS1ozySNV9o2EaZrBVS
IBf9ofprcjW4b7XT7vk8GJ4xNEP3Pf8q0UhhZ34ZPgO4G/bNhzG+/S+v/r3DtZlE6P/46n+B1w8G
87lG3sUw3fmT8m/KJ/ng7ddeLXLMAdh8+82r/+c/MsOHryReIVyr1nKNa4glRTcWclZCol4Db15u
7whSRJetJUtJnfwgm/y1/oO2nmPQRMwRuRAvpr9e4wFrSks08tCvtafAuxgNv3VdyAsq29XXlc0I
DW0gIkG3K/aDelcvMCUmOUdxTp80G8Y9sxMXx7w+zEjacIpkidZw/PvJ52K0wqGrrhcrDqLIbk8m
a0S9EXemSnfatF+KCTVBBspieNLn1QWKxyTTOlMPzILzVYcGGsS7r5wE2i7/ek2xfOdUxl9gL8u6
nVatmw6987Vosn9jIxqKWoAX9wVaYe4p1WdJxKsxlubY/akBthWPUS1A/cF5gYNhtzDOp7coJn6q
Lhmvko65jFamRyBCC8GggW3St0m3u1ubdRfvu1reyhrRlaLBXdklTj3KrtEQBKCitrl8MoraOG2r
i+lrAerP+G/Twq34+WsxDGOkzKDQUCxZCdgIQ9xsKBbhBTREoAQUtXQ/xYxEPKtp9rLBw5GEICZz
Dd6cbu+mOGgYEkuD7BKVZVlknylCE2rp+6DU569tqAfpFZeJYsby8vB5TPUDBakT00B/Z1AUe6KS
38mWYCQ0TgqPgeMoIQcZTAFVeHllHEOwY7rhpq9l18JevqQ/sPwK8gC4SFVQunqJcTpxJhDZiFJq
i55mbTHoQJAS3VRtXb1jmogj89CeJjasPDQAXMH7eqelQxylnQ40RAp3TMHS6a5ZbBwQp+R8CtmV
jxUokumCllsNozHg6eYuU+ZI82YQSsZJvn4tI3v9mnGY9attNaadDnAljj1QiSelNWFQtyjKwDq6
4WzAC7142EJrmk4PY0q8ZX1hH4V35jNId3TVouZBESGWD1ARHfuS1kpknUwI+ilGyJhO5FU7s6w4
Jb7BKDaF4ZLrzbpptkk8S3TBPWgWr8y5oHYSauLbbsRhH4ExWt/NFfGG6NCOu6Obl10CqKXMtNSz
pmnpqbOgiws8tCnklF0t3iGiqzZywQ1c43KKfMS0j+Jjb0CcvIWrs9N0eoRwiO4do1woTJm1DcIa
Qn11CxzrqozblE3D/QKCt+s8D7v7IEkqZ1h7wFExZYnUTx5FLnV3BaDtQYPTaxIY/rqkoTMcgtZf
jDi0HSXWUhZQjo8ZEWzSroHfxuSWAwZLff8MmVr3Eiqm5JzpD070Jo2OMz0+9Lof0O2yLQQ8ZRLo
/Y6B1SXkFYUumRA1YGhGtemYkElqGZ82M0IGkX7oO2I0dKxopjVCedgZx3zRPkccpi06QW4EI7h2
thiscMM5vIGLZRGtSY2Au+Xsh6WkDE2MaVPXOzk9iAL4lONIncCRnKwU+5OgXAPOXdIaqnXJQGtR
QnJSR0LE3MS7klVAS5fNrh+PtAtiGilHto2V5c+eZ5YCVdvdseOjKFM9QzP5RUzH32JoCo3o6B8g
TpqAPrrIZ6HgTFTJErOAMLbPHLx+zV3Chb3gQOoUOFOY1XXDmk6+8PwVSMyElIKjULLNCXVV1Mza
XtNOivLHYyTfq5Ur3aestRUbbpsCSl6TTiPrOXMiwfbEy/64FGkfHNmq6ipnWF362jDDoYh2UgED
j0g+97jdlL4gtYyKT1+/Nl9Vij8qXr/28+h8yR9YlutBaqK7f4MrSZTDfPsr76Xy4V/2ktre6Wxx
6ixMuOfELbJvOHCMBZEDhy7AheqH4QAFRd6z8RpxESQyjdtAlcIN3KY5xdAuuZvUF9lds89uFpud
ciucNt1tvbOZ8kTP29jIkXzkndIRwk0t3H1oza9DGjqzhmNJyAosRwLJwbkG1gZrfOIcTLxKxMHV
8DS4PzyasW5LeWDghBvuHTYCpaYewucA8xBv7AjFPUAu/c5NU/f1ymYcu2rO9P51tVtgXadbLZGN
NNpMgUALywgcGAmBoBvWvwRD+gUkjjQYuJ6VCv7ljm9wH/DVwtpvvNhmTkb1IHEVW3AGaky4yaiJ
ouxjB+ZKlWov1e0ugACH0SIjCVef/AluwydGW+ySuIY+ZIRhrgcUH2tWP6sSd6OMblYeicSo2lzI
rjSJDc5N0xLAqF5euSO1o0KSTglJt5kNMU3YElIJmIcEjthqsmsm59UEV8TpYuSFla5TQQAlcQjG
aLkGCgqIv82qUEmqSgQdCQVeD02iHUcildbOq7xpKqgEE+BWi83UxObcNHAuWopcwtSqx+t3aOq7
X69ICsgi/QgVhmBy38kOIY80sTVAJ6qN/KguWQeE7YKjhLFhVXOBe7ZhGlQy7hygcjxIHCXwlqVu
X79OmD1wldev+1u2paKGAfOIyQFNgIb5+jWWPdSg7lz/afNYoeSwX7/+YNhVwLVwkQA7Wx7V3Npi
DMAiE2Mn0BT8Kt1W3S5QByFTR/1RCfeWhiYmurbakEiUzDYAvbepY9U1fJfronHiIEMg0EVFoDDR
u6BH/pMw8uDtwCZSNJEjoVUhJsJp1ZYv4ZlJTRXOGsNTi/qc6lL7OYIJIrpQABw0jxHc0P/5de+5
tHN4j5PZxxeoGEmu1BSG5zqSOIXCVBs8b4XS2faKEn9cyBHhA00T+yUUgjJYEir8G5HW0hujuFie
IVhHRTskvC2s/izGD1LSF5CMSZiHYZT3Ls91c9UoXoQ9VRZOuPK/9to6PDDRqbVRtUn8Fsy33qx+
uZX+kGgsCT8y5uKxft1RUM8Nuf0TZPJEoHQru+pHXjLcDyaPYGLdDejmJD70YoyxueyFZB3qJOSi
/DK8kqRIOr+zocXOJVqgvqCGPq43HzspjPzoYyVGs6SLCNAjokFsYmcCT5o0sRz1ppIAMtBtt64v
d1fru7Gxad6oxYyiMKcJTYAkUQkc5PpLwVy9uVij46mkzBQycOTZHwjKnKsBU/GLgSKPYI5uDlVr
oJBwgI00nMo4xQsngTFZecSNZBT0gqUnPHo7zUiQKt0Ty0Q4IUbgXuCkCfIFl00Luwt0XrtbVztS
HwNZqjEcUC5D3fu99nV43xWjk5gLhIxCAzVnqKiHRbUpwtsCL+6lWFrZpdAgQnZwvwi4ia3WL3xj
SC9iUYCCKDptI/kbb6JcxmzvQDmZWSbANIU05+xWTwf/BiIusjPNrgCw1hr5w2hZrd/ILyzrIq3b
XI0kqpVYlaD2IjDcwdQbmBLHNWrKLimVkVZOSGc9m9oea9pAImoWQbxBokbfVHfnDRBW3Hq73+5G
BxvU8pkpH7eZMDjuNTUmEQIaKLY1MYXrMa0MLMeT8teF9mwi+1IUZTWJvAEkRnFQV8JxIzuwbegG
Ifui80o8z4yjDLwkIDHMt6dGIWpeugQWpaczoTY50wmlv9w462wqrOs3VTbEgDQ2kNDQubDefvvq
f/zoo4+2d+W8bkrxP3j73avbyUcfiatS07lOSzZQuDwBBb+lFLtB5PC6MQHDCfk//643YDiV11JR
rYFkr1RTProqPstGn44fy/axWeRLYIeefzfSeoFpJNtnS27ExW4Rh2gJAqJhoXG239QYhSnhaYHf
MSoNF5DimkqT+8nnagiNoWl+fPls8nc5OhNJ0JogqoEOvIyGOrCmrzxJ6NcuT9+y/xbPwYFV51WT
Uh++bO+xZFHqVk7bigF/2MJFAgVzQ8XRyzPYojgbYESiz/30eJrlmBEDQwKd8HOz38GPJ/wDnWd+
NjkPnn7JYG9xgbxAD6Rd8wlb3Mh903QTDqmMMI8Zi9p6i3oYPk49SRM47eDFCp74sEh6qU1zM8OL
iML60wyS6aQ6UtxJG5ntdExXTLOtNozKfSecCjccyQZSIlDE9pLiRwH5LsMI8z05VoPmXANrzw/A
uCm5jV06Fh+M3jhY1jXa6ydTCpHnlZnFzEzIS6NsxsXcBgXP0iq/mmWPg2A/5JbrjJMm/AwHm9+c
PwoOmDY+y1Z7nhH6g8gJnQ3pfAaeWhflct142ft4FqYlefI/z3HDaI5NV0JfI2/qRZDcgvY9katm
3qxX8AVaMVjlrrOQgrB+qm2G+VNuEu0xj+6mOBKm3fPSi1LNwBQukOoduVOLEs1890MqCRud93/C
ky0HHmuvgqPDYRswc/k4G0Y4Y7iqV6ToYrEWzyHb3dTL6jfDIkqeYuELw3xpUAh30wIIkl1CiB6t
qneb/VqyUMPL7+Yvnn737dd/KMIFgT19MsLj/Dj6xPDirlCYwEquBt7dlA9f17vd3hTPxtlTIG1f
AAP1DDDUc4wINyoOOUPryN3lwPQ01aZBJ9ceKP0Fx+8OxAHMFRz9lPfofrNlvSUqRlDAn+0xcCqx
Caxq8E4n5UMVaWSRREneggho966D2dz0QYggMEZX3rp3VfXGDbJ89BK/5/JKK0Uy25iir4F33zft
ZfrKJ6kblvBYeko92taXxKvSmtvDnU7OjCh724+7i4PYKLFj3KAB5SK6COEjkw1MMNgsZ4F3pFNe
ET8zFc6Nca36x3F2vpfIKLPHY8J2+CjJIc3FEqgqjUwEOQy6ux0wJYo+p+Tg++0afU9Ienxh/ZSv
NVmifje5s4k+AbAxg8pN4kVTm79lGB0CdaZukkWgCkxNDnyay4xylyLkoP+cx9HnTWqHD0KKBPkR
kjTu6nPnhnRna1V/QD6MMAc094gfZBlZ9ObU16qUY9hQJDZTpQtsHvgQqr8wQOKv6Iz/oMFBea2W
U/fkGbXjTVx74XG70LAeLl2g+CPF4Xw+c1kcDeOijpW9burObExwzCGGdRgGpI3ZbNft0E+EfrGi
a5EGzXDvgLyFcGCp8axcrIiWLRLOehc85YPtFe83TxnlV1iwWj0LqLkiXn6h9t0KcjX05jqbo7xo
ce00Ow0JPSpAeXnoyf9sZjEzLQSYVrNLGnuYmKmCb308FXzChT3/E/ckN1O8BhJo2W8yztiIR/EQ
9UC9UVJKGG5yIQTJ0nd/ohzJh2eLz6gYcFlNZqbzXJK3awm/FxeFO3sm5LHZNnKziDxFXda8NDu7
4XgCDBvC9sVwQerv5Rqj/O03y3H2MRvmf/zxmxvHDyrC8CNKcKrRHgoO0s5ZYcJ4EIRwMWiDic7M
1+p+B1TdJyxY5B9+PCU2GjH2dKJmVPNcG7tCkpAxD2uKUwgbnMEnPJFBGCReBEL+OKRTwMCp/jyW
DwNZw7odutFbStqCQxqFy3r4nnaiaHSN5y3qp06wO2DctWhUsOOssxuRuttCFDeVoD/pg7ruQ6Of
sCjBtbuUBVvJilG+V9V7uSuToPao9ZDYi1koFAGiImGxuuMBDcOzyMMMsDqMB28+WgfhXak009pA
R4nkYZBY3iFdH17UfG7NMFjym4mmVbRPiKWkBA58FIVDsZVH/j0gg5XTQc+/mkXdy6dk9zwFLZHo
3qscQ5ABHss3c7CWNISIKSMn+e42i213hYEOGCwAFq+r6wYN9wztGwCGid0iyVgXK34zKo7bytT4
afSC437YrQTNPXs6kieHOiVJNJckz12xKGEsQPiX3jx7ekKL/+zpE8/vBz0kgOjcUKrj7Nsfv/5a
pE9Y5XE2wjLiZeCMc7HqWHgpR6veFCypcpJyPx6fjJ+E3IVFWJjue0NCBMBLXupzPZF+8PPEbQ8L
JcI4WC95uq5vq1UYCavezEOpHf9UcV5EJqjP1Cz7yQO9IfQ5nBKI+e9hBPAe/g3e03jgC/0NvsGw
4Av8G7zXQcJHfQxKwLjhI/xr3/+ckGiNjhUyUbpFN+03Vg5OC4xT4VaX5zSHl/nZwEccQRGU4zox
M+1RsUVQumuL0FLFhei1U0yXJi6pX3IvGDkMNWAbw4vNppGmqRoxM0aaiMXA6Whr3tBm+hDgrwMS
uIisY9w97ZGLmvBeiWuK8GBOKCAl9LGNNAGN0r8wvMF2YU7swsjfg0G+7bLdt0TRSRQ4Mrmt97sy
lOR+yNIC5EX6ESXuCOAOrdvhkaydrUC8cNRWKM1zOLPYgf3v3zs+eXbvnvwb7h0dcF0xDKH0wXvn
y8IRb4VoKqLTEEelUt3DeyNi76+NYJeqjft+f+1E0DezBpF4H00drqseOnZ/XTlG58QHGBEeUiZZ
gjTpRfBGXjsmVVV4BzqE0SikXQoJrNNs+2L9Ka03c8gdD057FtkX/jvnqo9IDY9hQE717IbfjQOM
fcSo2w0Wv7ebBMAFSANBL24GtyKlVgqucksmehvjwpGhQfspXkPpmhAJPotkNvc+PohIoiLFQShz
A9SidDYKt/ag8oMaOdg1Ul1Finvo69rZ7kNdKxcVrvmp8hZnDrEU9TLO3DW/iBQHwkCHrE21WTqq
Q5qeCIaAmyOZdCBwS7Hlcep1lmlUKAUZinzRYUAvyl0LnDxmO35cHByzcucJxuTD2RLLmv+LwB1Q
xRNiue6GHqPCqVCJuWajJHI+Ifdkir+hgTYMR1Jmz3dkmuPKjVFX3sW9/gvVsIEzxAW6qUiYaUUh
zX555bqUoVAye2q5m2yErkwuxybWqItd8UHsjOVcXMbmAN+yXsnZM1OMS8gRMQEN4xKwFraEBFkK
qCPEoD6liYcqTyICMfQJxB5GzHAfQdZDiPE0QuKV6C53cPfQXtxKeoRGEjFIsAfw74dxVwcoF12X
ZNBKB6kGi6aD7Yt16dxeUbWILTK7njm3FT4ndNf/P6IoDEhFVETqWo+XVI7OPTRGUgyWUAwb8ImI
jQOj8Xjj9SriBSJSJCkV6xvNgY2euSf/feiMfyt69RelaBgIoiu/9zj659AGLC/STELqfvW5iH7w
6SVtEJIOdUx0zqGOiQFJ3/FWfsr3d3TuzS1vjABgpffnfN2jHHBVkR0taZUZwtD7ToJKjU34MAse
1pvTBpgeM9Bsm66rKZEl3P+kdVL54EAUFhcLDBJOTvXiSthW1Uad/mzTGpgIIwfiINBvbyyOJRwe
F0mA/a655gi44qbUoXyyxmgu5ISAZEy76NBPd8FnBf5ekmMK+jNhaNz4pidKj7Hjx4H+iUma59+J
sqCVTH5My9DacUBTUU+QJxbP3wTBXVDYKszFBI/eu859OUfH7JYCa9I7J+A9KdiDkxUrMqBPCpUC
i/OD7uqWwtIzgsCYN+ifKfp6GR/2UONVfBf1wPBGNI3V3BmLHTcGP7H+RrbhmH2Jap5fDA7o/5F0
6coNhaHHZLK73LWbddvLv/3x6zyhEw9KfQK/P8EX+eDt96/+Byc+6QZm8PZ/f7UYcGxSdHVjR789
kb1IiuyqjThmddUxmSdcfyE1F95vsJ1ud5+HaBDIPHIYxdwh6A3q2y2WEo+CwuXmOM5cuBCgprRK
7jImsML63iNq3NDfxkVeXyAichTd2kBAqD3I1hVa2XT7825X79R1WRtlrajJtoDJXEO5GPb5hML3
Q9/zOaEqoLw5mfV00J++yDQ5QkW6N3TCvkUhwc0R2xVRn6XND0xD0N+DOCo9xuI9ykmcbAPmxmNh
jpuDJd0ysVkCeaJz9JSxzusfOFZFExoWXFYmbwLXSJneK4F9SUZMNIpLSv2936YIbYI/lggu1lip
JKuGXCqkkn06o8fyweCfy7civX1xfxIr6XC3WNCbS6iXldn7zXOCBrfhEIBJiy0RZNgenVunYAxV
W7HhjBpLcYkUEAd9HpzTA4wYT/HAMHwCcr43lfVWMX7tclHTcG6a9o1jKE59qDMwQyKaFlcYN9S4
vI/0GJkWaQElkx1jHEkBkSjw3jB9cPW1g3AD7ls6W8+uXpDGw8BfP6xTusYHib68Gj3d8R2zdo+d
X++9Q1Adg9n1FnkP7J628bIRrhBbzp2wKI9mmUHs4+KXunP+CqPyUY4TBOkAuvQ323bnn0jeunsh
/IGkI11eVUuOIIWuzjUGKoaTK6Gg6E+HDtkYS5ZDHSg8SSuY04lINUMkSGm698QxW+NT+1eEmewz
sTli9vyPA6sI2sV1tBsKK/HloqtMbZm6v0y0OI7VF5v1OMBgCnib3WmM2pk5mFxznA2FOJISajA1
NBsrXThAYnhz2y5ZO1gJKg4auQXpxTvozwVvo+3v5g59XqHgtUg7lToRQ1sUUBI1DcCDVriGdeCm
OiSYd4CVKVyiRCmwloli8xTxaXQrvX3x6t+pOyCaHCKL8/aHV8//Z6Y9z6HfzWSF0tGOQuhINFrx
8nQDEnflYPRlkb1oNjCd7y8Wm023vLquV4DwftesMTvYP7bVG8BPk0n2zfOX2bpeIl5aDZB69eP2
Dx+XT5AmfwJk7XxOdCfmxcufb2qOnYzedRSAmgj0/Gww+PK7b7756tuXX/7uixc/YAsP/rehSVFj
Co7CVBQJKSr7giDzs2nG2XXnmoCa6uW9lXyZhND8+Mf/wFUwSS49+B+hHTTs7S5d28fO2D3GnNDw
YTeF/7OHHfrUmZ7Hbl+PTsamdWMH+QP7oP++xVxi7b1mshoAILC7lExbFIx6JoX8j8w+UT07JxxX
c3HQlNNpuXSKl3zCxaTTyYkhjb2p7mx2O7a8gEYA0nYz3xg20Y22In1QU1pZ/tq25W9opIq4zSwa
tHC4RwkE0J2abs9OodKZ52RIHHi4/RpCYJZsL5zKqeM75qy/P76jFx+reQmVaI8xoRLGPUCpxZKD
hUDBGVcPurmrq/UqgAmN5etN02sdPvU1wwB+SitotE0Gd9wL3XxkUFTkenKEZxm5OXwskDZHeNjU
mCfeJTRJ2pRMokLm8WgZb1os7rfC2DVvqo01V+LsDJSi7SJwgElaz6a9G2PdZbIbnIqboZXE88Ug
tBlr9u2ShEY/BeZkCov8KUrAhVjTAApvIQth0cmUxhMpdUzYjL4kNaLCxXtzpH0ATWgqShwS9nBZ
5UVKcc7zOfWGdhZja2UsCDZ7k+ZcuKE+vFVJM6DJCVgvIW3sYZs/tKes6LGlcZALPZz5m9RvK2Tm
telp6IMGT03SyHm3jx72qW4BwYerRaeurL9BfHszCeVQA84F6V3dtlEGftso4V8Pi0twkdOzBDL2
VEwuqGvOuWqzv6ZYn6NU40Qs0+qIs4RzICnDGf6jQy8CcQH6mvES9QCpR8Uqpkp73tiZaq7RUc+h
JU+N1FCkXELTemg4vQcJS2+8vOv3wl51vd3dmc3BynkK5nTv6Dp630VgEoMujXARJBvy4vAqvMeG
yCrwkN5jHfYmL5ugWHdkiSVBqSJJ/Gnq22Y7SpeZMwTQo8ArPp9OnkzPUoM3dfo3+oPn0NsfDizd
j/ojYf7Gh12ePcxGpoZGnTgMDjTb6YTSq0bxLLzwVCGCocNskYw50i67eL5ebN5kNiqaETdgqg7k
/hQhBAhENDuOXjJG81KGknNjxKR6Gxp+cxi0JHVoYd7lSIPjLqm/MSMtKk9O87Apioipg/BpGQxf
h8jS5fDiPXTnwNk7lwVm7Yvm4w4HNwvGMzwbphAeTY/KneC2jmNjWW8JdBJl2OODABEbCxNZkrLu
vIxmB4lAlzjyppvPMPRJUgw+nGIe5gMIM1LVJe3N7fcEbZoa68HxwqCS4z2is2PwAsErUhhwkuNk
rJoLHgan2yWDTOxeEnXHp8gDBg8Q0pyud3WGnIaRC3iEKHFyI69iUYSHDKrd50MrxR5lJymuObzT
j+CfEz5+ksaCSo8O0XJRZJF/rO5SoUUkyZ/kbzrAaAciBL1eSVJ3kPDmY6Ddc/KuEIMHkhKnx/vZ
9CR37IyGeWQjq+lhlI8bAQLtot50ZlUW7WUsfqBI4sGaqKi552JZ6jFeC3ifTk/OXHlwhKwHb19S
yDE/C9rbH1/9p/+ThYzpVHgmVR2pfCWNFKbblPCANmGHjVD2IHv63be50XCLvpvUY8ZGA5U/8NBp
oeXdEv3VNF+xVZpjRJik4lzzhwJuYDntDhhUynPU1JtdJoYctJZudZh4W2kb31PwvW84Qh+GJZQu
yHpJVTUUPRftPeTwSYKtngATGDI3W5KPPcfQdtJZUWyGiUY5TqUnxG6mFL4VLZ/SqRy9dIY8FltF
wglqcjkJmbrfNRMbXVA8iVOR473Upr4FjBEpaoRHfiHJ0mQgxcDHOyitwo0q+9PJzSQ9koqIpMiU
kT7HsajhrrqzWUFZ27BgmzeMJb8U6Tq+Mw392MGupiXOuF/sVI3JYnnP91g8o1fvJFY8wTVOF9eF
+cuBoL65plmbZaOBOAku2jc8KhtMlcJoaxoNzC1sNDlqRY0KrS0Ch9CGw931dlW32XWzATzEgXdg
rW8WGJITiNvqvEapyXor+0FKJA4hjMCPWkayYzbt/Qm7vL1eC9m7bi5N8E5NsV5oruu2wqxwMjWl
hCQ+MWJuXQu2RJIfSnpjPmMJgykxLzEkzEATJIefjMZY24nTz2sHyO+IhdsDsh6p3u45Arzk4KBV
J9tyycWyyC5guleZKrjiVLeY9Zg23sMBmvfYS4prIF9El35CXknpjnos0fX9xsRgJ/xUm/tSgcYx
ifI6YqwkpUZY2TtPXmG7ZcFZ7MdSGNUSCsSsHr1lS3F4fAeUtqb+9nWpctTvSFxTUiLuImrlFGWy
ZB7nNBKoG7mhEQVmGBPuKmLrgKh8HDojtm7j0IYSJKlpbbYA7ETiHGbcnkO0ydAl7Xd3ta5u5WjY
gAgh9KRgWpc52mfKa8io2aanDsKAJBMxC7D1w0ESvbLErL/5WSLl8Yynquq6xNnwfvVL7vVi7man
+njmiu/3SDAl2meFQWG1erYh8xRo9MwqUU3PwaErq827uoVbjMwQvv/Dy69+eDl/+tVvf/yHPO1s
FHtSiD+PBpNRtTb7IrgxRve7i7/Lj1AecE8AZnVTamgras1ErYqjyDg0ubnG7nGJtUHPWsyI2DYN
2ifvyHuChs8hXRxSNcpM6qoYXTsJvtI53zCxanMiTenyq9puGLi4wypQ/OdpRrfjIpUGxsZF6faU
OUeyOQ8Df3o+PtcLzOVVtXeM76EDk3yj3nG0+E8WHSdvxTAgYnlclY671AfMAuVK/1aTwL5wDmu6
yfwpeOr01lIz31uDgJ4E4Jqf20k8rfHMew4yUUR6nTDRCnxBUh03X2yazd11s0c0yumn/wFo+e1o
KBkLhKEYaiLdmX9iuZFLrNKFgnwSt5vu8fyYH0EpJuFmPPDgG8xMw9T+HH8i0jLRMezSTt7bs2K6
5/Sr5PXlUpeu+b8z8thFna0YVp6pbd+8R9KFJyugBXOYz7EJ0gFFZxg/hniQcMs4wdmOckkxwVRg
YLwF5jeVBPG0e6WV4U5JbsWMs4QGkZKXXNIUdEYAbEkDmNN5QwA6mSD9KrbwTk0aq+2D8lpw82PO
8nNB6QAlQ2rQis2xfmXD0nFtNcdEy3ZgwV6/NrnZX79WiyIyPia/A1flQBmi2KaoO5AI/oj07CbD
OabG0Oh53RXabSLDcOXGgTNpbHj0/F1dB2BhTOpuWYPXr9NhoHC5TBPuCYsgkfdQzerTIkqN54VF
zUdu3j/yESD2nPka6j32BluPocGtr5Nzx10kBm6HzeATjfscAPxNEtGUQIOhXKxGaxx6FQmaeLbm
yJmNVDcQ+EWRs5AuiOORMXFyMGs9AT62MtXzRKhorNmtKHMGErJ0kgSLumcGO54S1LqOrzYJHUG7
2OxRtlJzUfExdtfrNdwWtFUw0HP0CHI/fna1222nn3wCzGNXMkdbNu3lJ08+kcKfaO3yane9/vz1
3K2NfkNbI7vA/76Ic5xj7dpOEy4vmNS7RU3me+QpbFJ1hoZNcsQxzT2maaXTJwv97RfffAXzpvhv
8FF+AhO07/bk4QSkkY3UeUekFGeMev0acTNmeNWVHXNW0dsFXv6mkoNRRsPJBLdqSCdgNxtid/Cj
LMui75wGN6hFF6MAvpw7yFGJB65InrBrDh9RiLKudkboBdSG+zpw593uxLZQpd5wUfAbP5qc28TI
r5qOeWs+l/QH63cjYhZvC1rVW/LcgrdnrgzVdh/Kce+bJ5lD6ktL1Lsz/OaOEdf3Tg8+brMmOPLz
UXYa7NdZGuEeRLUC4Yn0BjBkZDWolHtxatNlUicO01dUbNcZD77Sfzz8EbYYa3voXt2qUcw9IyR9
EveDp4vQVUIbTaOyxQiA+4rJ4L0Bjz7eEOC7OqBz9Nij1Boo0ZStVapiVaPnPsui8PjmnzgcWXpR
Rs+ef/3V/LsX86fPXyABhVxw/nF+KIAO/leUBqpmPUCmTpjabXBy0QfDu0hYwBFRkVRYkIECIh/9
IFCj6DQcgy23KrDUSPvO5yVb9PVEtm62kelX0RfO021+nDnrGCKp+ZsNeorQaU/iq+MwjitcivBF
77KX0RCo89PHZ96dDgyAR0QjUTXOdndb5X5c7VvvLb+x7FV8xcfkMxZ+t2jrhRuleYqdTqlrLadl
DEmAyeN3VyjPev16jHcPTAnuqAbTdWoEV/eqmcropzoNBRJKSRGMGfUX3Y6TwMEV+XYPxLbJl6Pk
tXHNdufQ6fhaTJFcYcIBvI0X4qVk7fQsHW2NfHEHSHsWkdBfumU+77lBRURNy4YhrXnXhrpOGEcT
Vwn/6gpF4TuFQTSmbSMLBQYAih7OUUXgogAUHv0LQTO9OgjO8sfBuznl0BcmO6+IPslVmFg04VE0
3KLEeMdY35sOIRAqOSoi7aGPt7/uLhP4JjB4DykDCpK90mjEIT/aYxVvPPC1flKjPBTwM4bz2civ
5drL36v+16LRVpil59ckprmur+sl5zHYVMhtoyv8eXW1eFc3+5aUaiwXVrRSMtFgdnAOMDK/Xmy9
OJN5jc7JmBvKCpNylkvDa3gIgz4+AKaH6FxWG3bNNTKACwABzI/VrFwZMkvyWOD38g/ffzX//Rcv
vsW4cOSE3bfnHwtrkWRXuh3qRWF+15ScctvW7xaUH6jlVIjqWe9cnwkaFtuFcdDf0CwZORlkRJIC
HqSZ+z6u2MWMGmVpLr7J41gLZimOMOEh4YE0eprjrzxpIZc/lLOfZ6J4S5NEAiOop6PgFPgw6qUk
cuFi4GLKSA1ueKdsqB0ObdSDfpIkz5YYuYATQg8fanKBAo5Q3h8J8NkehRq/57H2FwMUtHxDTqyz
T5MC6bSRSCSPjjYADoxde8SwbmzRQ03fEyv9AfvJXmM6VlEOLdCcg3W9C/ScxSR0q2aT77KbBUeb
WJKOFVHBgRxbMMixF5YLaNGKj3ORNAulKc6yfHnV1MsqTwPNPYD74fBFoEHXoVGAwUwtY0nJgI2e
7GFbHiZ68+wZIh8EULQ6YGfkLlNPQA7tRe7H+y26KK+y+9qzAT0WdMXeO4CRMiRwPRRkGUpbQiit
OBjz8lhoPwzxFsSMkITN3DqZARu47BZvyLoFudOe+pSu3SHwOMNtxcqENIPkHhTM3wIPI3nJ8NXl
Z0DUJuz4kt4E/62D3n+zwBNupJIlpZIQp/A3voYesPi75ggkrN1FVcjkc+fFXFD/JUW4SesTaFv6
EG8/vLh14U8/Xn9gJWXMSFRCwuIweQZCz88TShznwmc+xYxUr+APuSWE+qiI9phLVoARb31ocmDI
jYSjk0el9KyRUCpBYTiZp0+mZyYvTD5BnfE8UBj3BAnuM+mN+7MEFnZ4kjD415SUm1V1e8Cqly9O
n3vpJ0DQ5gVTjjdscCECazpvuaj0LUXK0qK032xEJj4KF9Jh2kmcFLQDx2S7Ip3p5s4VhmNahvO7
lNpQ6VUFMJLZEpGXG9OoEAhi8GDCVFhAU8aXzpB5kASn7I6NYM709Cn+e+bGWGAxCjwmibK+vEnJ
02FA26Guaf4BVWM9n/opZiyzsFBuCGcAdocwDcD+QVgLtumSq+BTVD4xCGwgDUzMhzgR8QMsYJS3
ATPEYRfWdyxkVr7PyQZGBKZkOrBM46IzpJHk18Kbz+hUFsjXsVVhIHlFyIjRCpqSVxu8u4rss+xJ
X2LXIw7q0EgNmHM1l+80kfqRjRn3HU1ugVFs0Plod9Mgq4GplIHzpOOOXDo0qIc88qcygwc6+Emv
7xQWEZeU4WQoqVV3pydnmHEE3hR/BSRl5u9ip/tXwVsJpdaa9jqb3I6z0S3hmU2zmaxQLo2LU+iS
9LNU0VKlOGRFJrh6R3qHmpWcPpG1dBbzyS+ymI5K8n3WkoyrWZg1maBCDWNtwFk5t4v5Fy2juS68
VXSEIpjrKRm9AaaFl4GuwyiP0aTdo0jwhLUfQXW3DNCejyi31CisXsAHRHODY4kMtwdbJOzAfIna
d+rTFRbUpCsrMaYgKjrRjEXvyFh87beM745oWW+L/saNHDscOd8siS64Mnk/EjMOn8mufLG+Wdwx
EsuqDYaQpLCTePmPhmb7h1mRGspMm51OnpyllrfIY49D/GikkY61RK8VZdqcRwwpUiZYblSPkEg0
WsSZ22JKvpuSvxn1TKjLSplDHJIxYgWUcjcmoA5dl5eBrRGKwhlTY7rrTGLSIAJeeDgHOUGTOVvS
D1lFfkXCiLbSPJeIvJFeKJ1h55MJ3Gs3TbvqEDbo14R/FoF7eafiP7LaIQWjW5wCso1JbWLbRIrN
klpsAgGDgbXZrD7+mEYkkUrrJZGe9WahMxPdVqltpRPp6q4amE1sQSij1dnPbWJCUePRmu/RoUmz
klnofK+9/uXGxakuE8OytVyLvMScIjtzW2I6SGjFhTbj0Hk92N8a9Klz7SR3KJmy7vCKa0fFkZ6f
Qyq+XHRiAKRnE1PMte9M0Fj32tg6BqGJ01v6VouqvElhAOfalNBfjLUCYwmRChnBtW41fz4U2MX5
7lg6BkgmED71dFMmTVZlymwP6hMRqL/AU6wx+JGcW6DAhIOJzp62zfYHQj3t14BqfgdFn2kRB+64
2rza1utGkzabppy5qf2oO0c2KvVuQnwRbB26i/9xM+TEnKdDLAKkFdxxt1b5TfXO8OLDosn6yESn
viubFAWvGob6+0Bzr7mz1BcroRPn4GnEP1OWY9SgNV2tsmJVUnoKXLJ/QOcTz8wh1Nw7/XonACvG
h1f8KuOPthYdUvhrj236hLJONITFOQZeROeby03952plp4Zyxrzol9uQPmKEjT4EYkayr+I4g4gV
yvHz3F0ri5L8HVdcKeFQ2pm43wcAOjrDPrgbZancxXz9XQTGiKxEXdWU4ZrCkqL9JgVAvLrbXlUb
YcAnFMVxsQWc9vHH2ADcgF4TaO2nFrgqy2BrO7ctborufQQmNmW0UsfrxdbQwPSF05Z0+5ZIAbLr
W60EFmXd3Nlo+zjjJdpAaGBXHB3NDhlhY6CIKH6jvOG1SLaZx5QoBTDtxZJMkilIDQ5JzC/ZYtGR
GaChBEKRstxPsl19LZEmvIwjgn24obn1V9RjugzU+g3Aw65NALC33+WR7XpuNtwyrQM94kH6FR8k
imAJC2a1RHo8UpiIqwfZdQz8iy98Plcku4pHmSfy60AzSQOB1gngYQlfHoOGcoDufLf+Su/NjkUa
NO2R8xpVMPwFz4jz/kTeR/GEFxbSENTzydVYjLrzjH5PFudLIk0Xvo6DZ18eWg8zoaOWG9/OyYvY
YwCEuGLrhng/guMGI/3pZx9AbP1khBmvfdczgygtOpnee99UMWmk6G2V3Q/+fcrSEUC6idjWHGKi
SoXJFgqQZBcdyi6Qd0gFyE6JUnIHwzjngGQgmIcCZTXT7PRhd3av7SH991AVWkVK9nZ7a8hvnXOg
C4A5aSEVc+O5ldgMdksSYmnDlc1sE75eIx0E3FbUCBB2g2Fz/piaN+6iLXZqmjhDcSh+1BH0kNXJ
us7AHcUB2mE4I0JPkfMKo/vXhJJxkyQvBX5HOQHJr1BW5WCSB4a1u6myN1W1pRotCYsbNeMV9xWS
LLvWNPeAePbgrwPkzkkPCf2ke9KpyE7s6pCSoGfriw/q8Dhkhj0wpeQ0GdM+97dlOJovJZ6FH4wS
BRVEh8A2YahdnCDTk0uJeCHuc+JSB1cnduEEVNg1Nwtk/zVehkSMl2zLYvojIRruTyi2YXt2G/rl
Aq6LVZ2WAs3R5vEJFoI+UzEg59oeogd5DErosKmpa4roHLVihyFBZvmHkx+LDDYxmkTa7JdWmf0e
L2okwsUOAlj4K1L4w/FiUrN5g0u+32aX6+Yc0Ccvvk/t47rqsLuSc8YYRxfH1UNIrPl5BW1X80Fg
hs5oWdxQysxuIJl5U5ptFGGlQqX4TaE9DFKz+w5wyTsMe5OpkTg6nCwuquxmQUm2VxVHiwgDcrAH
zxavG1U9OL6cSJAudzyntlpzqpumfyzUGNYiU9/urtsBtYprjD6gv6+yP6FgHreDjVgavOm9Vs6b
ZgcEAx7gFTqzJMVRmn995scIGHl2+HiSJxMLMrnvAoY4r0WLNiKsCGiKiGQ4ERtU/Hxan6VQ14l7
V6MGoE9ic4LYLXLPDPAsjSP7PKsPWMngQeAFMPwcDO4RxixbnHciuYoNdNLDRZ591qeuibrCBk5V
3fboZCpdnvSpSLzTu73HZe6i2W9WcixnTp6fmM2OCTGV8cNHjKNvpigJXOG1O2+kg4oj6TIzoHDN
k1PHnIRk2D7iegVyJzcVz41tbdkInpByz6qh1xPGRzEIUhtLONS4S5ZKPOKUSCWLjbuSabqyz7iQ
YFqZ4qGb4RQvD5PMGU67tiFXVTQ1Td4D24+4bNPVGLcXa3yMIVnQ+8aTyFD1klIgjPCyOiliAQ17
cHBJvMyh3GhITQ7Tx/X2QIPeZBMTuvVdp8PPNur0oYhqS+GQUiuKv440UMIQIXjB9QbdjCXC1zWT
HmqpYmJxDYvkCE/PouXmbCTsHAS4me6WUdETIzlAE5TcMXgJ1N9614y42b78PamzqzEYlMhgqa85
xEMnztgwyU24lQUkKAV6H0wsXeqT8ygS4WNPl9NgkTI4SWw14h5sN6RA+aXNGmdj+rFODyuHtNs1
Zr/Q+Iwc2gTjyyBWnEuvI1s7InqDMMxx3SP6JwK1Hx/4/WqKl7ZC7ZwbyhwBDWlGjHLBhY6wdpJp
0CqojIF++NH/3sPIKYK7ZFhBG+neBwcb/cQAxTR0NkrAYOHXuTcwY5rcPnVfHGvx+OZSDpI3rO2d
fIhDr2qNXhQ0rzbdHmjktrpu3gEbBUQjb+jI7wLKoC18vFPHzA4djYjBCIbN+xGMGlBOapLyOp6i
ljfRD7xDPE26wXIgcuKd0tXSLpQhYqcUNDg4RpEykiJTzgZITnnXT1AKnOH6iMwE++8tToMTFAfP
SSTWdDtZ2aiEPYJuYGRbQdzGvPw55qJQRjJFyug3v0u3O+qqF9rgjxNL1DtKmGfLSTh1KkUl7ljf
eeE0lCIBuF6xUrOLZQBXzXplwhFakX/nmJaWB/j2jz9+cxPwvLwg6ovLxqojKXWMfRIp6D6zY84e
tp+bHDPa7hjQmqMpQnYL78Z5086J27KijzALB1loLDk2ZWMTrRIPzUz3OIiCRmDOLmkUouuegEJ+
4DzHcHfq9Rsx18Giu/VGK1isCrMZrAqKEuO4l14aJ+tDbqUmMgsaEGiW1UAnjrH/LKRYKJ6j8aqz
2H06bVGZe6eDVdXDh2rnmp0KiJ2RJFj+AVb7jLwh54sxdBc4KzhRmcJ4QbZg4a1zsINegERrMxDE
uvMjD8YhxozS1otM50QfQxpoyEsfOd4aIcvMEcip0MvHIVo0aILicSWHgF9in13WyKVkY9vdE7Gp
jyVe62qxoQgLkX1UOnIfw/yQ4S49d78mWidr7TlKqs4XQFRIjG1978YD9NeQAretEl6gQV3vMEaB
minFm5zp5JI6BVKVRtquTBwtdAg3DNPWnD/lXvPoGxv1+XNivzXkoYRgtNFtZQAS4nB8lEJFr1CJ
jhiPwMnVVC8lC2zfhI2JwpC0YXjnO8BEdMkyYAfdQMlhaAZAOJpNGCoWx645IxMcQs5oSFdWsNLP
sQmRA0WJtJs9XZk61BX/kq5+dgjsVRMEF4wJauOA44zjEJh7YpUILmxvEmY48ESBAe039w3pfYbj
C8ai8bideSM6gBCEJuqugN1ubjajRMBGL+Bn4paN5O7OzJAKTUW6HWeplkqN0xxblwXRSBzuJhpw
vAzO2DhHutzQinCnYVItk9XOFEkkRIFy7nIBbNUXdzZxqExQ0jirtWDIFRslmNryoWbpYr9e080W
ELuUgnGWDckz4oCPsimItp7vqqHL4CHlB580NTXcmmTmjOQM2V4hyPsYDY1RScjezS6DMC65/ZaP
GTwD8oGGMqN/U+RCawUCLiSTxmuzWLMRk4x6Jn/fC99mOtOZ/I0c8hyP68XmbgQjSogRNZETagRk
GBrZGu3eEqyajfXKkVBHw+ffvvzqxbdffP3Vixffvfg8wwQ0QHbFOSuC2hfrfYecJ0PafybamiMK
WlPBtrlGypyM2JZrE+aHyPWkWgylq7t2jwlokaKloBAUTXp/LvQdhu50FS9HxyZWyRuFs04d7zBG
qgSY7jQGM98sndoHB6XDcEPC4jgT9gVFJAD26yZjI0uRaMAkhJaZwvhQxhsK43hWlhgKSclA1sOs
ZMiz0XlHq9wFpvL1y3JM6ZH/0g0unaKhdOiRAFL6d3lwGwLUWDb3pLOAL4EDJb3pcSjxSV4T9war
4ItByleD3L2sq4bixbTPrJte3mGmxqZG6mQ6Po5ReX2K0nwCvF9jFId6c6nB68UmtwuFd+u10RMb
Ob6RN0fSqpL5Xuf02Lj/p44lbmKPMYqVBtLvRYqo01Dpjju0s6i/R9FgE0JamW54CKSN6DIm5ZFo
6U27nWenHSGoYBCOmt8JlX6o+VKiKSsFZ3eRippQXKl+kXe7kFDc8CA46VRoUfyMIZ52za0+Ul7t
EooOz4okcy5BoEe5IDY0MpHs3QklP37FPvN7GruuN+K1gjVQu38tqvq3+xrJRbEB0UL+ea/C6I4W
7ZISeRpztLRuPhVWq39OcOxJLT11VX9Ydyhlh2hNTvbDwYajgFuGOzoOt3k3x3FVaLt2TbPu5tUG
4IRSuXRH9ldt3o0OMIo9wE6BwucORT6OVmsmeQ9sEFvdcHffvBUKY2AlGTyGEt0Ihm32wPbgJzJq
5a/+rnJD/EVpn1A1eH3HpTRmps323VcDO6NKn2XvUrF1Waki7Tn5VtI+xZgXezXVQ9BJvcnDbizG
LPZNjldqv3pX1krJAptdWz5Iii93HXvCfHCJcWJNiuPCrT7gj9kl0PIbIy22pjoAwOLmJnQXi1wd
ISZs8K7SiPE1h9Ejw+nzSu2Vmg2HASN9O2qBhSgoUzx04FfJ+Gqcxcs5ND3xFJzBo7w1u27QTE27
/tIdfiTEa9r6UmI0JrBHDyrgzClWgktC5OkBnl65XW3OPbZuEoskciZvGfWbSCCSORWIQk56bYTR
OyUxiSVWIg4mNoshtz5R1GhkOYq8sLxZjUK60V3TIGSlk7jAVe8G6SeN2yVfZBiQIA5USVEdhdUl
H20yWkMLWEfXAswNXl3o4mjjH9xV5ByPITYwLh3ZmZHfGtnKYo/nFdykVefFmrZhj5R3q+GocOo/
j7G5Da4pX8soQO+E6bqVbDK2AZMYFJcFIA5IJXQY5Xh/JAhecfKsekNZL10bET8uaBT402EwPDWJ
2PZusmlbXUxfQytsXPQZPJEh4eevy+y5H33bOrISMQ9nGEOUkQ2yk7hrd9WSs/IC4/M1bSKkpkeI
ZJ/1haOnWJqeKDEbqY0kj16IRkAcC8cOBdXFhPRX/j4d1nCryP3Uaufu1WinanLCaTdObAwTvlWE
zyP07mc0fC96vRcCNMQIftzQI7MoRtY9+w15uqWgSZIWsflHUfSP2TMf4YvwfQaFgmmZ4j05qL0U
kHHYuwO5hKVunqe2y1GraEMYx8IEcQ12aBuZmfaQBkWPhcA6bSXVVmu1Gkjkg+KoyInMt4pnVlu1
UV8zXWJsP5MwuvYT0ppZU7ja5JHqHVRPSybgbbK1U3Y8YvOCxXa0XlyfrxbZ7RQwZ5iT1UrUCtqh
swNSTi8ib9ebtDfMH+/wtHPd+HuNl6KTQKZDsAGoekxw+ceZVh2TB9WbFjSkxmweWKIYYT4nHfw8
BYsBHAoM4tjJFlqmkZSuODeflOtPz2aHzm3P9Mk4cQw/GQbVgb3fFgdakCkbiOcRpK2dzQmRcpGU
Yp2Gg/T2jzPit1I+/fQhdZnQB9NsMrFsAlI84HCpAy8cwiECIbZn4APlhAdHG+q+DDtllv2h2bPz
ARrfMqlw51vVEbWFnijr7PXryeS7719iSHD1IiLDBm11iKK0oZtuJG3NLwFFfDFhaVfEX8A+CuAy
JdDzqx5h0pdK9OfOYKWZ/fSWdDeKVvteLPLX2bJEVVvHNVZBSL1BChTpPPKAcXIq0T6j54uTaofi
U4ugd5GpuNN3UUlv5C+8Ne7Jcc+tXWr/sMZ707SYBfWoHfKW0M6Dt5gjyGNSq4UTCUZoYOxCYt4z
pcQOAT4dO5+TqB8NL67qFSDsUM+corocHsWOI7xOBDlh8XusvntSXweE7XFmoXelmTsdmYetsijo
f+CSlWSFJz4SvGf15aZpq9lXnEbReLOmDPGUNHFMYV0bPG4pKi52KmqQb+S5QMKpRalKWjT9w62f
+cGEcKGsEL4ryVDyjyb5cG0v8FM6Gxx0asGX4e2OGb29uxuHLlXFxF6MpEPhrXF5stMlTtiZfSJ4
IwwAP/OV65RN2snrfvZZqwKtbJSBd6X5WT7f1Jrrtuizc5UQ5rmMWWqmc7onYNhUOdWGvPzlP/08
YMhgC5jm/E9s/iKZbCN5aiIDChSPjXX9qN5UlhoOdVFslWxU6tg9N+9LY1VIigbvfadarFJR5S+o
iBMcJBP5cL9C4yP13JXfkHLnJcZ8VlvYpEo6tIJl+izVA5CjaNEzjxkgVwXnTDqyFoZv2oif+yI2
XrKuDYcSNyQ6Deoc3e0DNNs4/xOGoeD1DoMu2DvhmClwuhuxwPJG9vafXv1PH330EWzWHBO+l12z
b5fV29+/+t3ff/QRp6eaX1D4aFhqAdnLaoN57eDGc3Lc6yPAJUYUH2e75k2FIWUGBtA5hz0BhDZl
4WKw3KIK43pbk2WJPA0GBkdLjflCTwsNDpVw8uX7uy+fzb/79us/zL/44SWa2+Lf+bOvv/iHgQYN
poLOHWNKmDR+c9bqEVOkBtY/0Jok7Iyh5+v9jhO7SXrGZr3ivGu8kBkuKgx1cYl3jxXfSzrUOyAh
gGyqNiQQr3e+I7UuxxLuuR1pPR73GmoTlu432EZSDi8L/escmRUuN9fSxBQ8pHyc+RxQS8x0UJpf
RmXFV0s8tdoe3g+/pIL7trvEQGPUhwXHsj3F4XbwmVchjvEatdiX1Ds5wNuSl4ClCvZux0JnR3R2
dEKGaDoS4+WPm3S4Ch5W+hJjKzXT4vSQct+UOp2cnKlAZXrQRsp3vu2dSMLmLR0c1p06xiPkA8nr
p+A6W1VFYodJJHi4RW1hZF4GXD+NVZQuThnnMFZvbawDjLV0SLLjMhsyhBnXCoZ/BOPiAxQ1ksox
73YrzFJhek0NLkrCg5JnoEPMLN9Ud70UCXxDAnhX3DPvUyh4SP4GrUIJgLhqq/YlkhXrpDdojY0L
P0LVIBHW63qJWRYYT3DQE8zdUG2HRT8XiDOmmjBlHgVBGz82W2/319Wm19eFAsCYKRfBmmoPvKjS
B4CZ086mupG7ZKY3URF/NBjeWV5qbgqtRcnlTDUn1gWd62AOjjwBquidJncdpxuGZV+HcR9IeFAt
6CY8Xy82b+TeEceongyiZvJ40Y3NwhWBaS2HXv6MCmJHxuTamXQPmuKqj2bZSdAmtvW5fE60CZ97
UR9Wnbgt9m+Wu1OO9cp9mxXu1BZIjhZZX4EajgMyy3PNlI1PghPzLM8+zn6d3lK0D9reqdQn3lxv
njl3wyFBc+ooz24kmAaPxxAw4dZyVbsk/NvRgV4weSOf6Wdx3AEw13A24ilnj1h77FvVmnU+O3B0
eFjy81Gmf3l4AW7uP0hyl/De/NW3AbWcPDBummOOY0U23uC3EyYiemSi74dQTmVhHx23rP0Lg+gO
OBFKESTLg3U3zVg0LWi9HsrCnXUS/G3aUE19s9kt6k0XJoKWBaEu9tfnAGAjJqRXzDo8Lo7AQyoT
M51KWJdw3EkhvHumk6vAjX3AUoycmyIjYlUWo9sueCUyMdgL8Z8sHerjm42/goitLSDxeNJLpLH6
H2efzaSgRDMjhF2k5N7uxSxVGk4FT6swdC0idmN/F6I1Qy7NbIJ/SRgyR+s77jKVdzabiwtgjQ+I
0BeJS0/b4HPHTZTedJ9fyGuVpuCibrLLPfqGLvSELiR9ORWMEm1Z2xLMJmCv0DLLftifd9XbPW4i
n1DeQzJ2WfhkK8ZfTnWHMcbJMKvZ1hyb/By+X8PCh4O44yhNXbe/5uBM58w04LZd74FnYdI4CSRs
GHW9uIM6ezGmUZlAi21t22ZbtcADX8GVAhBsGhx4Yg5Jy9Bl1W5Zbre/+SA8xtetBwD8QcGgOAaz
d5zCC/h9ASHDewRiUQeI8At5p/FQKa2KNIJJevct8Jg1yvrvAuMNVy6AsCbGCkeqR1To4gei7pOD
WBEcgMotKxnpRraqdJFFoHVSvuhWbGNcLfODArGwObE36fbAS3i2+TrdoL5urzItpdnB4mC/Yb0D
Kr4HOjGu9AjZ6XE2vEXLa5xgwCP4U3IrpbTu582iXT1H4U27D6NzEVoM65gEu9N+rqx/3g7I3ZvS
Vl3xTXzuJJNiFoc9EeARpYeivb5uVkB2EhTEhqkX68XlzEoKS2mpneOHuPgK7qB5vQF2tN7NgPoH
5mhz0WpfvSpObHLFQjaxSUVo0nEmrTbQWBbzDKN2Y1df1BSD2dRgbLqCo75e3EX4UCDrEyKANB64
UZuinWub0agP3pumM5SReYoY/VK4AYEeFxFbbFbnQOgTU2RGknXkNun36KSQBI48YF+XsxnL5Tyz
LZQXLBteNnMd4Nj7ylX8WHuoQZplw88eriZYGUpnqDBQ4wRXsJkwc+BI9vN5WNRn3hJrmoiIoqvN
SifMsEJG3J9TDkWYsFAwx8ihUm21mducWZ9UwwYvJU8dtgZ45EDgLtwVR0wu2MeeSz6SYzp7ETr6
gTBXAulXtwIiUHCOHoyj4vTkLNA3tNUEfRKrDs8OI8GMnCc1ICjHm0SlxJVJpeSfn+vuMiGkmFa3
Ja/TWRztruTLOaGPkrZU6zHMhh+b4riQ/xzIRKMKI0IaN4vOohEgSM4X52o9ilEkKdcmgq05mF6j
QCPQ4jlLS+JY3lrtskhU0pHO7CQThcxRNM+JQrvqVtrBpwTlTcUOi9kQYLL/aNUhvdLDZRNLRolH
vJUEDUXgTxZwh+qqD9CmTgoIcRi5+fNZ9uk0kTYUqcjt3ad5p6ql0kiAcVdGBRtmcIbvhQTsSrRD
sRqzbbX99PETlNmhHyTcixgIFI1YOJ8yk9EHGtnxZSMwM8Hg9nCssQHKIYmRYCV7LsJPDLewWI4u
dDjf3mF70tx821X7VTPn7hNh40Snqgsh2tLzO1yIU4XRM9Vf9dR29awaQTHO83tdOmsUIl1pCTeX
TJtLNnB2RzA6YT0zY8Fu3HOELFwNHgweZNv9+bpeZl98/9wJKi8xcDsoMXCIknmE/xJ0CYF2Nwsy
UvRQJQEV4qj6VDFp+WPB5eiLAOfsxqVBLHWDMHcNrDWy18z7sdu55FOQkLac3oFSK7rkWVtXaGnj
y4VYn4jWrleV26fXJeb0a6vlHg7YO4zVu2wrck1xygcxm5GmIFWqtZrmyNBWqq+LjeVKQBSe/xAe
UslorGKa/ZY4EDhReC9Ahd9Efs1HXmWdvzEMQO9H33SWR+SeuBG6UQ0UjHo6t7SoeSoGAfCqBdBF
x4gatcUO/LxwRbnZWgOCi5WS0AlmXxnCXCer5z4QsGIDGGuNvAybq6IhTLE2kdCxur0eKUFQQPYZ
RNV9Cf/QaF1rJ8ROAakQkSMXGMViFKAkHzdjs1nTJjSBcTVTIaJcEoNxORiar+Um2X8YhnbBIS8C
CntDxnUefjTX7OTEJdW5oSMiP87Htgm0guyMvUAy7OPz7+5Lauvf0WZCtF1eUEbTLX0iiZFDJ8Su
RSoyQ+WcC8HchU6DUTEm8EB2JsS+wQwTlnT8VdC+GWKw46mFkgVyNeJwtKvb6SBmmrlsxM/yDSAb
mRYPhfJtEqyJDj+Qb9uJeIZdiqx0uVSua6YTGWKgLY89chgTZXGz9A6ev4K7tmsxRlN0rKLFem7l
ewFYmUaGfxz+dn95eafEuSYjNBGQ99vLlrR1Y0Ut0J/M8o+CQmJg4vZZ3eyujuJZ+WxWgtfJSHA8
8ZvL5dehANUPFKGbkzBcoE8skQZmZgunf7c4D0MXqBlMuZYtj4jT+GQaah1l3KQHmZC0u7gn7HhM
bJuWHgdzncGrWHlci5xbegJSPjCMoVC51YJ9Sg1hI3WOXri7ulqLvXfAhLJ+FCVKqRp5zni3JU+M
nlGkTG25KNYqN8q13GPu4FWaz7HafD6IG8epAkaeYwQoLA+P6Dlc2ec5pdIUUXQpw63m9AYtXbmf
WOpCzWWfGyDoElYItP9Af3y1IV+FerPdo7aFfDQPtac7m3LLslDdnXKdSXZy1g/h0rwH5Jka9J9O
GeAwoSNFeOuz5XG6PRU2/Awm9lTOb1RHZ+A5qw5iS2GbHEG2p0qsNlOVT4knU4yh8ZCRnORT0Fwk
Khk1Am9xl43qsiqd1yKbKI6agU3NIOA5ChGutXQsX+KDa2Du3eoPsi9WTJuL4gbIt3qFM+wqGOBX
5WXJUeczE+KL5BPiq+ohALVG4iH6AHTmYWN9b26puVGlzUlJps0gXbCBK4mM3qbWxPK8RntoVSjw
rznlA5JZyQ3PhuvIIT+Wod7uzOVv6C3GJyfTgIQxWbtU0DE5sYIEHRWCHzEpqEqEBRR7jZ3DnMLl
BLfUnEqgkM4Z7cjpKRbNoQWTW/dzxvXBgiS78aw/OGm8qkRN/VO3xpkKduAYF5KAx+3avQgO9+Zc
zNIkjPuITh012e1uw/xAutagn/73NjhZ+9GJ22ukhTIa4cDhQw8MgQdsuLbtdGMnYL8BbOuvkc4r
CmWgJRKGQapt1roGEsfOVBP2RBSyxGqj8V2R2iExXDIac069V22M2X0mlzSBDqr2fRnsFtDWLhvy
uQYMNnS15ElDNkcHLoffLhAuTtqw2vHl1D6x9JDRwzgbXuDd38nvcs4/x5mo9Yc2/L2UN+8fFwYO
JBacOoxQxgyKewS8Hv49b1Z3+Jd1wy32NmxapKeGNILNYk1FHMeW3a513B24b+kiEYCGivc6ifsG
jVhWrIFTXtOaxnS3a4vY4Nekj6FGdDEOuV/bhqI9N/ALq7Ae+I6nSesS31JC+JbAxgRfEGQEVLi8
jclwvIg5RxIpWB2BSo98F1Hhk/HfFlFEPGrm0czP7RthGh2IcQsYSVWMjkW9G23tODt5/OTXBd5L
+EBw9sUPL0PcY70wDyCCeDGb9ap/MYs+7+ronIa9HLqS3UMr6yASkgeu1HnZtC1e0yglgupTKTHB
tZuocoZoQVJ42BSmWAaPlTUO6rJVI0F4SBjeYaAa+JY5GBBrcU5ZDqNyDr1eb3dC2igAObjNLjPP
mtgnhxEvBp41pk8fIKC5xhxijxkxcNDlCAfOjquMTD2HugfDIvme+LRhJKqKyhHeWd9B0fusQGPO
z3KGal7EuzlO4en3gTvPUOwvMxL7SwzEWNkE1Ku918VVtdtjp96MKEDUubhw+qJoFiZTFKl1c1kv
EYIowNO+oyhSIsV4Uv6aLtDzat3cSMWT0noh822rwnPt3JK1KH1ptnr5qUpBXJ/tFbUTHxbf7g9p
gsmJbwBv9iMpk/Dgl22UAw0r2Xw1qAuwDA5aX5F2e49BbfCOrFboiExLSPbXy0Ub5XrNu/0WrXOF
12eLXdTGBa/Uh8i8Psaofl2/QZXCAq9WUtLH4daGvBRDNdo3PQ6pOe/9EVlvMNai6o2jVZzyljxy
FNYPsrf7evkG0Bf8Q3ZmiMAqo6I2NniViDw8I9sHISwA7zziYyD6bgBBuZCHaMKIIjQUw3RFNOKS
ieHRY443mt3e3gK3nXsFjYwy/+NGc4Nq/SKwhDP//XNGrLuvY0zYC/gTMb1FEvaRo8EeZ9/B1X0B
cCg/7V2ZuL9pp5xhPnGOYLVZpQ+gni66H/yz9ehk7NwNxaPQ08NdMX5IWc5r/B0tXLqme4d8Y7xr
moHfVe4P2fG+XqlomZGZmL8TMD1ckZ2Logr0Sh+8ffXqv2MnSzzCb//w6v+GX67rJFppIZ2KqtYZ
HkxUGsLB7PMUZmuNeic+nSUWR5M4e+vY1sdhRdX95jKe/MzEUxwMHkjyKzgvqISlCJYZh7BEMQVt
Zk5RJ3MEeopF2WygGuyj4pNyezd4+3+8+u/Fr1Tevj199f/+h48+QjQvHYt6Zl2fT2Eht7CqGhoQ
P6yqd4Dat3Qb7Xf1uhsM6FKQiMVwGwB+3NbbN5fQHws4F3++m6DOmmid/bkm7xpgc4SrMOY0xYnD
IUHNCXpNY0JrmCTfJCaIWTfADZl8zq7Y10CakxHqDRE9/FLusMWG3KgykzNh4LSKNgkba425uyoH
g9GyyH7XrDHM9T+2FaBTDkGMjcBuPXn8+NeTJ49PPh3gUjnBHBFHnJS/Lp88zgcDycJrmTdaCdy/
ruE4jLDIhLDKj02CFL5QtoAc13gSXO/dEDCoKkGGNF1+sa4XHdsujIZaYkhJFMq5/si5HgC9VpOV
HimAj4nzmv2US4F8qj38jLHscUCITGc/yZ1uk+qg6wPOY7Vo8azZ7SX3lG63gqbyEijo1RR/jJMN
oNEeWS5tmpwSIQlccCM8emqGHqf8YswYLp9v7zD9UEYF5tcVEkUXzZTfcn+5xuqcYnCBqbN9+RgG
g0E5MK8y4ebFrj6v1/XubiBDldChT8rHlL17gQronYWrMRL8aEq9wK3wpv8A50fZuoFUBHR2AVBK
G679iDVLzuEOpibywdi+LpdAfQAP0fuZI1f6n6V3yl4jYUJR8LqdrPH4evvVwnng5iTyJ7b0k8HF
gPVWzRKpEY1Og6ssRaf60dpW5DAesvFIlS/l41QLOfXe1Ot17iB/rx5+xOcplXJqPWvaN9Xq2X4j
HXq1LugjRS90ynHtnxV6BN79ScvxyLVFLjTV184AvtjWfPpyr6R9HXRnglMEPZq4GLkzfVN4aj87
fVPQRboC81Qd53MwCMR4R20zatoTe9y929ws83CvEI3Sl+kP7za///JLpm++x778uvvW2WmvLnzB
yj1VyVYg2S19mX6N/4aVoLkv9jjd/rHSd3+JHqhWVlS9gCs+Qd5kInop5MLgJublxA/3LKd2jEWT
p4aJwmR5if4wVcLRmx19cqvF9aSMU+tLGnCWrkVDpBLuOUPzmvxQDS7hVDFG888BG+epKn4Jp+pL
jZSU9/VmSzjVHEOfvG8x3DJ+VdH95z09OiWcelsMljUXu4guT9QLSjh195uodlA3KuHUnn+hnK3B
ALa25XqDUm4DbUWGb8CI7VCUkacbCEv1tJCHi5ZsIai9bekibKsDtb1ibnXA7teL3RyIk/ViQycy
TzeQKBgedoQkpHE6NtRbrWrOggh8kq49H3X5dQzylKKp046uj6TdzhPl7UenhgR6WuWpHsxHF83C
bGzxoIJ8dIovNncxEtHi+NEt61/UQVn/fu4MZKSG4QMEEM9/rjadD0ta1n50avx2ARecIpE8qOF/
dGr9g1pCfAW8WljL/+jCG9k89SyofHQRA6oI5j3F5aN/GIivTe6v+ehWAA6QGYU8UcF+dKEOKK55
3rMX/NHtwEY9yeMOnI/eoBq0du85B/LRLV9350iypmetH/0KBzqQj255DJBxzUK2uLz9GFRBkhG5
xTxVxXwMKrmXR1QpvDe8GyOskEL3uD8XDoUQbR59dCkKOInITiYrmI/ukHp3ItoGdw+8ks76W+RK
1jaTZr9Doxu0X9TAFHnd3E82KVnbpBDpar+9CMgmU75cLrYYIWuqhVwCA8b5/LsUCeTUk0IuvsGF
COuF1bSQSz09/ZI/5gfq2UIufbdbxVXDmk6hZNVnT/P7q0Ihb4HQfnmx/j3mMGtzv/JOPlKCs3Ya
lPVula6eE7JLjD5oxSnr02VzLTi/qVdEyPe0kCjr3kQL5L63bZ7aO/04NaVCIO4wzQ57DlSLTXZ7
vf7kane9ziw/wCANH46AaeoXikLtFFhjywFwelXou7tbi8uwuFcev7vExOLmYHH87hT/ViUdebq4
/e7iqw5ALHUwpZJ8DxjTdRPwxQ9Q23RJrgrfP89GKKNY7ZdA7UjEEzSWRXIJfsPjBjg1dJ2vFxgR
6KZpV46wvn8joIvULiAvf7NoN3mifIkfYFhTU8jly2WQyYrYmSngU0o6mTxZyS3g0SWil8x7OjPf
g/vqYKXLRCXirFNgY6YVst4vn37348u8v4IU8Kt89eLF4SpYwK1y1xHY9FfhAhbUfi4Gb//46t9/
9NFHmrr7T/tNvQPIfHv2qvtfSRqeARZAKSxJxNuq268p4072X36EkpNX33ydMVsxJhk3iv9I8fe7
/aqTpJ54J67It+hSBAbAkSChXg4GSJuS1lSyRmlAwuY6e9F0Xfb14mZd3ZUka3YiNcpTY8I6tlUi
1iN6v6HI+XuWYDwpX9F4PoW/vnzTqKZg9surdvSf/qbQmIx4ph1DDC6Alh5XrV8NreXzL/JDFdlG
BKhFWxPzBo5O+irhV7Ky35lgj/8FNwi9WmCTSoNwRGdEpmk4499byws0lScVFwbmMBrnesN6Ih0W
KYo6FEYDriBFegPN4O6elI/VEYw1CzUpxUjXIaIh2uAyy36HMl2OuL1e7tFhbPCAC67uNotrDiPh
hkq/WrQr6F6CS7LqA6VMordqLrg/aGUJRQHsumn2JTxl0+kse3D799m/wL9f0L9P4d/TB7dPHk/g
+W+fPTvj3189foxvnj179vRs8CBlR0zFTh5zuZPHUPLZ2WC+ri4xhRn1OstGj28f//04g3+/oH9X
hZaQdZtJ6ico+OQxFvnbryQZFrz5O3qDg7LvcFz4Fgdm39Iw8DWPAz6YjmC75xSj6lSBbfiwmzzE
ZHIPs5HA7bq5KcYKxFf15VWRCi2M5xSLjjMqQqoxdzaDZJhFVD59xgq6xa2M4Sw9uiuMFG3tY93F
PMseZV6dQb0Ommgr4/BmztXpPz/sznKY6sGwjqZ4XrCi2usJ1mJVrb3RuC9k7s4bGSDZvJzXG/rN
tzVFpJ46Dg7b9YiixVuPI1o1TPXZrsyn8rJt9ls3Ngja8aHJJQJC0nvMTOnB7cPHT17hEmT1/SE2
vGq/dqvZDCeIQIDAGfkbUAKeGOGExlrGmbLGMA9ToI8kVR+PhWZpo6kAAuJ5D5UmlSsFbUQLW6O0
zeWTiV5EGMhFfk3454KE0LNht2swI/rAi0rT7WZDKIZi7eE4QxXYu0U7G8pviVHDfrhuRWTGZkP2
Q81MZ5JHWi5Ayi+12Il5EQnO7xn+tq0u6lt3BubNgUmYQcNFcf+Y0XoE7RS4ZTK1w3uClHJ4+Oxk
mO8c+lvYkylcltCmAZc0GfJeNQMob9vAnQIjQBwOX4G+wNu8W6NtHpqPddnodmXShWEuVmla4xiq
/anmRc+pLrHKeTGNciXjTGbZ180l3E0jaWscjNJZ/Ch9sp+PTvN+jZzm/QXqTabOA1FzXjP6OQOp
teflKU+deMZrdzKHx7ffmBHyyGho8PVyXc1xfLTPIzeMP+88B/7VpDPlloyV3egHAiCYG/dXsywf
FRIbnt5jvIOZefST1+Rl7jtwYCmlTmRfvFDUiXDQACV4msYCtc42yxdEnB37xbIbEdCV7ch/BWek
G0n5MKVf3MwGiFRKCqQvFufkVNrXgpynmQwxyFUtqY2d6M9cCYXtJi4fZoTY0s/HfrGLBcXzkGJi
toWlnBTaWzQA63aY508WjdGQG3lVdjoCBy5Z4vGrV5pTaopWoRbaLJLQjZ5OTrzsYM4yBHZVpq5j
QOaULhKLpbbjRL6WZmbpdmfDUsLP2K6C3KJULLiUeRowi6AssgEzPai8NmjwJhngcrT7N+XdtDZz
EqfMRUK0mjMC7duNrkJfVpSH6vprnoYimUh8lEN7OZuk5D2W5wghS4pjPhTh1ArupJXNbRDZNv5/
3L3dlxvJkS+m5/a9x9d+8j1+KVWbC4BEY0iOrlYLDUaa5XAkemdIHpJaUaenF4MGqrsholEgCmB3
a3f94kc/+A/2i+MzM/KjADSltX2sczRsVGXlR2RkZGRkxC/ky9SFbDO5NOKKpgFl7R3IluuTzqOW
lAeUHpNnDiroRgSXxnqWaFJaIAwDdchzAyF1S0kKGI4+H+NCaie0X2kB3BJDM+UnTWpJGsKliKK9
pa3j6+ZSwghkUvFIhnYxFIprl8hmQPmsOaTQzIbubcp05c2kucUm4/gKS+hkFnjNiDTpAo9jEmBQ
c7Cik+sJ+nPw6ZyI0oDyBwKSDECLu7LXSxtSwfRot0c41g+D5+aVUK55TojETyMfW3yqo4lYJqJi
rnfyIVaSeSuy86+eeHEm+o9kADuakIh7iNLfecpRLVHmQQaCZhU3G1EfZIchmrXRQdnr/xE6aGP/
QXSQ6hM6BHwfE4KItFPmBEPg4n/1AMLlRLnZP2sa90zf58qiQ6ZL6zlouoIxq5gqQlmyCzrTQ+C4
eBPoHJzb0Ng4KqIJSCLaXdkgbOYtj29Y5MLegvojCvh3p/+gcc2fIdIpa3uDYG64r0dj2EdVjEgc
lSYBW7nvC6W/733ywc6V81liV45T6y2ptWiS5iI7OFYIwVt9Ev+o3IfgxZiDFY2L5RBvWSkTGq+l
LzbVZD2rb5Z51SZUs7XPO7QgVkzighRHKf3h/SmXGnNvW9Ggfu4GtbNHLIhy9eUxIu23uh/tGpBM
/WeNyLalzN7GGSK/93IF2gx2cMa9aZ2bsLjvsbLwedSOt9o2SigKm92Rqtsp7SjD9i3BGRgmm0kq
qrSCHWezo1RC5Q9qYRnuJrba96LdIcmx533L3mBOfDz2FtnFRbTOspfSDPRedGMnqR5DCbMEQ0xn
Dl8dEyriiM6FA/xPt7U+DCBpruIKMemQxyqMgSK/inEivTFCviJIxWaATNq15go4kN10ML6H70RG
ne3m4uRXOyGsjaHj4No8YYQm9SpHkrAUPhzPqgXxVvzhSZ7G3lKxvVaTSXB6syr9UWwDGtDO0u18
9Rs0tQmVR+WTwePSD6qkQZW/+dpQKfzeMzJ1r5tKC3qXMS/keZKX3Mgsv35y+AEZISV4bGEJXP/y
WkRBZK/A/oyUZhlbRvlg8OUFoRVHU+PL9gZ6BSAI8I97KYGmi7rJMb/zHWm213C+vBMZpI9ZboVJ
pKJXTHtYRSvMEgrKpqIYzchuiq2ruhPwLHTy47+8/0/mQnoKe/nH8fv/67GGZpGu+JqspD+wlbSP
YMcYiMtXyPO/8CUzGpjxXEKJRvDb1mSA5mL5WIBWg5g2Cs75VM9nxXS+xovNtbxvjo4EPIMCyVws
jdrcBmXvdPiUsGNPO0/Q2POLzlm/6JYU8bRYcEcR13dRXYP6yYHGD/CiE9qsMVimL1mJMFKLcdAq
DG/pEOnCVlEsshn23eSS/MLXLRbYWDiON5PLpxgsAgv0X/89fMc+RTFmK70S3KPAconGqmy6bgV/
16693Z5LwTShtDgqy26i7Edgl1H2wAgIA16ibSmbbpROptBZLRRCOHCyWnoXGEJTkav1YPrU1GBX
FmK6vJ6sEGNOemzMQkq0Eqa6eGgpeZSmDQpaBs4A3jh90Jz9uMS8wvxVv/BoE0NpHGll2jQwOZkk
rjSgNI2r64fDpIbG4X98UqEeFK4LNmlnmnWeAZ6clOGopMNm1/JgjP4aZg62dGPZG3KRaSHdA7iF
bkcinGNg7CR2OVw2p1j3WbaF3bmjyV9D6QInFukGk4R/xGvVrUf+I/hcKOu0Ru6Qe9q6brhcmpcP
n6NWgOAMVMhdJuw+padgjNwCVdQ72kVEunGR/ibiDGRG653Suq43POKYYvgGbQPwT6T9yvAmggTP
tU4pX60mLs1lK8WaBhkuHsQMINNyfRdPTG5KfNXJTA7iiTxY3IbpCrqujb6hwCMjgZngwfa69zYP
gytxH23iXBjcONb9lK81M9sL4Z2hYYohpzMF+NPcddv4iuK+0xdkjsS08so43R7fotjb1bKXbWiM
t9UECpRpsLnabtCckHlHIaaj4vfwz5tqMbnrOrLg1n0Ke9LqesTJXnzGp9pcwDMxgztmH5OTCcqN
Lo6DunqBWBjrJTLxNlanjclP05wsXXszz2VLV4t+lRAwLoDuA/LnkXGWkQttWQlEdGZfwUcXl4Yk
wZjuBpajaFoNhBaagk6e5HxigmtTufTEmdErMe1IRyPBOyQRu/OZvOqFfju8bzc6nGrWtUPJ5hjz
SBXdkpdYgSrzZHZX+Gpwcx3h/krqcXePLY+Hri0n5DFdPvYro1tqcwzTZivpta/eU8XO559G1Nir
xVZuyYCc4ZetGGfwMiRpbAUSPsltzCIzVHOJh5bLjW4/89fa8ZfZFEbGS8MytbDzMpMlCC0IzADZ
BDlOWmp+P35AjD4KaRF2nM8KyXgzSh8BdqXLSbTAFFyWPxzp5KeWLvRraWEZc1sxmzkZqkmbtstp
smdoGXcZiIV8NdWywYzvQU2mDknEGtQUXSNAfUpe31ia7hnLdXuHb0iWlNNFNVknHwfbnStjcrgZ
gdIiITMiMWSR3q5F5fnXcgdmctCcKMEoaN6Tc4HnX+AIKNIKWRMuFJh+2hU1nzDsjuoDpNaWcRkz
A34xGPtP7UfqcGL1IZmetnzHnu+loN0rP8xX84vreYNATHm9SmQP92zSWKoP4xwk7kpGFY/iAR1i
pAHvxOE64GvcpdSd1/Wim5v3HCHah9Gan8tgAx6WJjyz5A86/7TVwcqamoBK0FAtSqaiFb9+QawL
1NjARs0IOPzOa6rV8pObcZR+64gOyPfOLls3Ayg/X9fcASmd2bdotexZYlBGTkudfifZQLpBEjvG
ucfOxlxLIG/oEe8Wp46ofP2nd8/fvhu//v4Pv3vx8m32FpetVGNlBKgn1yzdlG2Ap5ox4SkxnFLc
kzS/BkH8fLiEnZ4hBRq1muG5lKsac1394tt5w7Dp83r5st58h4BUB6d9FJox8h6snS3CPkrcUQjY
tUI6Jc13BQ/myZPY0Up0wWo1wL+SxG2YPs5eFGdEVEarPP37YZorjFvIytjCeIUlLw8AoMvOTqBG
QOuIO9XN5vvITc2BrWbPTbpnd6HVGYFZJ7qoP+6q3hRscpFQdIzKSHnOITI6miMD1KvNkz785ynS
8i8geLEMlzx9MjxLdzH8gK6PT1Zt96y+eeoj1NXFFrIddCVcDyMQxvVlwFDLeljmWRJKnn6Z8tH9
dns3BqOhGqU0dfpr1/lPnuxO0RZ1DEfenvwxI5zwgwxF8UiLa86cjvEnY0tlFKHAoduXlAGHDwdj
yhc5HudEp+sBl43qy3VVCnJH4UevFfsZXhIuGQkT0ULgeBQFhmDRYS6XZIL83GXMeLauZWSTNM+l
e0nKA91f0gYP20iilzr+aKdNbYJSxmRoyfOS1rVvy23fqqAGDKOjfkaV/i2y4rZuXDAWaefQbcRq
RCFdpSLcXHp7uxTdWHgVZ3ceTWEwNfyICot+wqDEdnot+KsppnjEpVU/0IYHDozkoBqNqc/dRKg/
klOr5Tax65mqGlw3l73e3kO/kxi0KhO2aBUIuML9GtBjnQp++NNaVZpWA0CzywLgrjJUSrgyH6o7
XdFsuwUdmc307lSzT3M3J6HgUHoKdZ/d5wLDXb6EJ2JMjRA+QWGjZo+lDvGABGbXFYUixZY6HXw2
u4zyMH4LDAz1UvqxTls2GcwaKNyFn6R1BmD5vlb8sLVSY0hqqzTnNMTftvZmTyKiVo3NZQLAPu+0
SxALUHIdROdP7/PM7oeXJYHk19nBfvMSCDKNmWp+wJQ3z6ACGmM9E58It5R8DbqITvmPs4w7ETcy
4n8wTyNmTKNo9RHaIHoDhIYBYnU1iCrYDeQ6H/c1H0Xltgx8niZy4k0lOaW69+OxCPAxy4pYxBtE
U/j37GjfhmLqM93dVa0vdqY3O47mHo5dCANHKpxMRsmkbESIyC4R3i7L3hcyVQOHh565EJIyOvPh
bMRWfZpz+UJZTn5GPMq1QRn+I7o4E2SC1ChnGqcEiO6XzUGPvnEZ5whgxC3WWT6YKfZBHyGUsYPs
vbJQryt53RN8aDOq9EK8/MrNA+xbSqfRg/XXuIdxq307aGv7FD5ut3lKs5EKRA91P9FZTC2ejsiq
hPEDmZI4hwSFn9Gbrs+jl/hZNgecUIR8KvXWVZMV6/GM5oWnkHltYtmt3a71e6t9SYcCi5rQwnK5
mQJHOnOFSUmf15d6vBeTkiPnAZsf13oqlZzpzESPcxtEfsv2J1GWcSjCxrTe+eK7zJM0342YsWVl
koB1YzUWfpP+AQpuCTybusH5HzhFtgobJXGf/UTRTKJwXQOelhfLAr1TEShiwoX6xRyEQKkCqVS+
nzfuoKa5yr5DIHGROwiJUeKASoxahw4Zd5P5crrYzirEylggUvNdvWWc6glZcTn7JIbcnxPKujRI
ICgMPPKlAvQjaDLlm1gxxCIBpgSfNWFeXL0TV80HKYN3a2N0b4LR9YHVsvkLOe8vbeXlWCeiDPaY
lDucMsfJxiix57whsnajW5rUGMgXKviPQ1Q/uofC4s2ijkcbJ7LcbUWoLulBw3VVGYd76yLMfQER
VdFgTP9DQnfUGaQj11S5zuVrF4D+myiZscms6osFxMSQYykCn7i5O9Xqh+YlLKEpMM/m7JD8lFhx
N9NgyiVQ8HZPsmXHKjJvt6pfOPeH/d4i6BCx8/YnPUiabzPZrrzDBSgB7kfiM2JLxS9X1zhZ13lv
ktU1/8lOOnRFgZWV4T2r1o/i3rWVu2x2N1payoXDG8UkujGzxItD56Ph6U7qHvhuQqUUfk6qWOaS
WGUm7VeNqSHvJJixlCed8/FbVhlzBjGR8x3zGtYcq4pJHVdT8bN5RgnUzQVXqGmav3NBPfa8zh9f
TdNySqvgxtT5VHD+sy5sHRtKDLGsbmiOyhaXBaptn2vIbM5yixKVYI0PhKsKMnVkvUKY4JZBKLOH
X5RMq52rco3rNkPI3IUslQWyuL/DIuY25FBlPLvkwvb48X613WnZfuCF6tbau75dZZHP38PkxOrP
J1GPQGIMAtvPIJz34IZzhrDs7rTrDsbGM5be23N087fojj9AJ8ah+/VPS2SPeUmkP7kamU7E5wNf
iiWqeEYHwUzXuNYTQ0H+ZBkz2E4TGJ1cpsYqkJ5aWk4q4kLFoUFlvzAjLE9Ovi7RY99iPkhOtqN8
RcHQgxRt5hBz9PGn9/9Z0tU4UPSPk/f/x5XL0+NB8TC+ge5mRTPRW9lnr8b//M2bb9787m1f/v6n
53/646s33749OhLYWlpPHEa7PjrC/2Iek1GQ8d4hKnc7jA9adKQghhZlkitiTNTXIw2Kop65cHHt
m0MmF8PwGDOqmXRBkk5GRj9e3T09sAqRgM8kl733Y8WTyA06DVRrzY8kMIGksnHZZpeVQxSxVGFx
kdZcANgCdDUNXC4Tz8AdOqI2sccPXesOVUd9misPa8Slxg0+8S9Oojy+Vq6bT4IWDlH0abtzKiVe
gW7w8OaJPpQ9TskXxyLDc98FK8Srj25uKFlRizc0fT/iIvjDVrGs9lWhRgRXBX/+29W6BlYSt1yS
5QguFG1M9vhLBWTInMGKYRc1dRdRBNNFVpwvae63SM5pSS6Q+B2FJy0RUmxLAIvV7ZzTVFFElbfU
kZ7AjOZzhXSVJpZlbGK368ndeSVRUZhsi/Ac4cgw/WC9L9Qlc0BvYg2RPTe4JqwDcR8rzHP5gTN2
3VRzzMGD6RSj7whpsgrzKp0ih7L/3HY1m2z47zPoY9X8Jrye0B0yGl88qau2ScTEhjwfu6dSli4n
GNFJRYWasiZuMTMUv6JgTcHgJP419R2FnjUi7VyqNKrAl4FK+8XYJx1EbdF0FYecbPP4kfF2+2sH
ZhZtp9ERktjNjerYrUJfUKrCdGCSDjMgigcrNL0ww7O2OWOZQ/S82MwcDEuyjaKJBscRmKKa3PCO
khyW0EYHLUSIqYpngwLaq7UF/JrrkqmG0nO0WxWTKBW2oDO7LlAGNwwJrBqbjTCkIyadZqa6As4A
/YW6fClnkMA2zsJSKXZkwt3IcsCC1NoSQje3SHi7zx657y4WGELyd0a7uN8XTgeJ1R4u7OwfQ2/s
kD2d8uwkm/ofwz19orv6BZaG48qC0GYvxiT8OMvJxfhyUZ/DLzZ+aVbZ6hZngaLKNZVehd6wGMU6
IDziFsWAmoq1Ybfd0tsBdCDdZnlz1t6YsrZ/pqCMwdRJD3LbJZU4ii60A7WDVCStiLWOFqHokmXm
RIdP0UsbBZMd/oAlbplYTfnUtJeyO/1RIzFQlvljTFTlwKYr7pq5sHcqnyYL52GEGtvDh2j5iIam
s695rmRvYtIGHUbpgMDs66pgSMrJwuXSgT9pmpxMaI4yg2WTtByURYhI+3OD1xLsGp4fAv4Agqzu
zAHHPefNs0sjjSlJFGFahFzZd99Hd1LjAwgIxT6HeL/eRbz/OFKYUw8PcDc9ip4NclqtPQZeFcQi
BzsRZWPooJ63PFnDURSOTZ+gDVgCJwSiVXBuGaqQpHxH9tvsuP1CMakcVETaIAM4xW2rtg5KLXbn
+4w9Fs0QxLm41zYSYigRwnzRg2kkdNs7dIOVy5hkU9WNNN58Dt1UodvsCh65q0AVzi1XT2dEBWj8
kNs7rte5xMKn/ZAl8UbtrNe73/0dmt9FF2B1HLanegrcwwPdUh7p183dtE6tCdgfF1irh+jn6C2d
HpAnBdZEKe9RbZ+T7uQO3m4PxJ1SIiYpZmDHiZkqSozW+oLVEfqzZffU14PNud1B29R3UhHy1kHd
Tb5zZXw/sHbexdtqXlcLbL6lbttnwUbhnZX3Jn/KvoctM5wrjMJ/MPMmTVM9IyKbLjx68nlb+R7l
f7te4zbvN3y7nnwS7HjwbmNOiHbQdn34QZvO15Lx1Z6pMwIzR8JA8PFa3TXbGRXM7UHa5a6riWNK
9L+llKwxlBAOxovJHSmpa7IX2V3F5Q/MDPiNf8tb62S+oEzTboJQTqp6MYEaN4yliUsXbWfzEAhu
cl7DyeIGD2fY/QknAqSb72qNX4gaHKt0ThpkdTnPGGqhdv3rDXCzix1Sbo2OOg4oQDX1Dfn7eCDa
LhjXjN2+9vhP3+ZQD7JmKk9bpmCFdzTLE9meyWJVQHW5IAcjHm+zvKMFApZzUiJ/TD9GmPX/lc82
rGkb250cV06eIDzkBJepOZNLy9eT2+4OyUSoyLY/phv9YoKwYNOrJB7RaACO/fzSG0TnWEkXjcwq
iFC4w0jVuHQF6w1jPvD5Zl0Zhe+YDvCEXyImX+Lkgu7kgsigwHisBw22qlhpQxkgxpNm81lSbK4G
lcNj6tiJFxpc8lEseIk+OkqKVvcpriErviLGx5I7vbB8N7RZuuKG7+IAOLrnNR5iMafuMFdLI/1i
jMBkM74CTifAyv2+kPVQdNHsqEbyb+LW/PZuuZncZtQs7p3dyB8Ve0JgDiFx4F4rncpvhKdE5iH0
Q+KnHTPa7YQfBrr91Xw2q5Y7zHqkTc8vgl1czCOU1R1UW1BHnJ4H9VXGmUeuz+rFJ7FX+xBf7xkL
/MC2PYcDuG3yts6dcaXhpnraSXrVOTDQtE03T1qSU93upg7S0aV29rEwah7sGKmWl/pTLe0WiQs6
YV2QaIQ/N8h0gD7v/OY3v+nEsBexrAjM4Uk3kPVb9upFvFl/fgBNts0Sep8oamVRfIcW9Adr0H7J
HaL5cVkoHFX3YhkpvurNwIDUbhaW7ceCmD5ih3RkEjK6NegjhJ02J8VjdQ6rK3vJ0atrPJlwkboX
7g6yWk4nq4byaZEKV1/AwZ5yN+EdSvWp8klEJlMECOJVqdWgYJ1XzSDwLHwux63wVNF2cNucR5LE
Qe1JrjbJiSPqSDSCQWz50wvSzTnGUJyPl5gsNVLC6PjWBfGUUc7Y1xmPuzvFLf7vbl4tZjypfMbF
GvNYpFtK67ZdD6RHEVM2m0Fs54UB7Iwvyn0En5joCZdOAle44IUw78oPo5HJE1gyaAFxH7TZXzwD
qLbL993IinrZQoqjKxgK8fM7ZPJPZC6fLO8QfOh8zlnD8VM+ZcrWSFZ0qzvC1nKUuH3JJoNXnKRB
bEjJOzmvTpxKbW71GzygEKykxQYlKYW9niwW9U2DFOR7UzzwUCM6tiwFQKcZJPGPfPrZsJkN4bCb
oktAKqy+Qpe3S9rHKr5WPUcXY7q0mlWTRVAdXSnh9Qypvmq3Vf30Cze8Xt5sqcl5kCfijCCkmt0a
0ZS8dxmbWlTAbpdKiHpK7tTuqxFNaC+5n8L/dQ3L2a/D7E38nrxBFpu604MS6TrDT7TogAraynst
7QuXmaZvnUVoJDzY8qk9FQXf5089WJ/52evtADZR+X3r7UbZCwm7+WOibdhAnTQARTABo2i2sLN0
Xf28o/UG9mP8rAXKj/R1xITvNos5/H7ciwchrYTAcPAw6TwZCp0sni88vtHFcrSYXJ/PJsXtkOb0
duD0zt59BBIulynsoxOGbbhuBO00WvEXhJPkIwN49aH6602Uauzt26ZeQJ3hMpCm+ySz2FwQ6MVk
QKUMmfCpFoCzNHluhmKITUtmokjX4xqAKIk4lUxuLL94nCTHwmpeEBl8xk0ma1AXzELlFf0eWmo+
Vb1dNwKeW2UeVVOKIGOm60lzRay84/xAgZ1o/OAOGKHNRlyanEU18eQSUvmDOslnnzSkVRrOURrS
sbB7AsrciYZW9ejXkxjhVZQaLHE6z4Y2s3nX0651fYdGaLOcCTrWmOTozqZeogff7Q6icdJVKKO7
AgmgL4JpR9ZZV77OoG/1eo7YEhvO2gYbOWig6zn8Zr2TB+i/5fuAteFZS9sM2CKRu+8t/dUSEyxP
0kgzccyZBX4SdM2M2lhVzTjv7E29/iC38NGn7NDDKV4x2SGM5BKIcU1pYJlSs2paQ9v1mhx+xICz
mkcV8SJRaOkm9NwjLrzCNISwR119wfhl1cftJABPpIrQOQk7jtIEoxAyJhbmm8TKPp91kzfomSJ0
lF3qKOMzXqoLFZHRT9tkU9K2ECqShKSIJpmm2ogYYUl/epaYODNgXBfhCJL3C0J+vkg9CAJsFfR2
w5KIHpfXtqF1xETk28WLgVwij4nq7fYbvPqQ4dMgpRPjJyP44/6fPR1pT3stcdlmRYcs5S7q7KT6
++6cLe9Ix0eaF6et7bSOCPWL1n53smPt/IaSxAEpO+7w6AAUXiCMQ9avtSlCqAe3LtwJ0s8yZhKV
8I9P88uJU6gjAa0CZEwn/w0ckdDI0Gk9NG5X7sjC9u0MCALmO8raSzeUQjYcQwKqFN000Ngs6PB2
dYpg4N8Q9AiQiywlGaYwFnoNSdFvO9fNZSe2gO7oQ57jTAONq3x3fTgW/WFDazoCpNIGOiCYhnaK
wvEPi050/QoUls5Bx7x9fRht2sR7kfc+KJr82enjs/YvdUbCjwVSsmBqtH+NW4tjxaj9c/n+6Y7v
qZNpKAo+tkYx/D1Q8NcdtTltp6vXUqlmmwtyEETc5CNBeD7v3eMyOBAAxQPY7c5BNRrxjbCkVNDx
mYB0f/4MnHem6Iqpq3Z9R16Fu5w7QoKQzZjvwUKVlxThjlbYEatx1ajRmHXwiFOgshhGka/+Qibu
s/soeW+S37OpQywAkc0g4KhKfEEm2qizQHSbOrNgoEhkpKZTikQzQN3nFShniEeXV8MVlz4bpOCn
q28WhrlFUUlrsenjt/jPYB3bZFHCCv2T6wr6wgiWSHBkxItp6tTxlP3iLOZUfGw4bb020tklNtp/
SxHyh5cnqEvDu9yJJ2qIacrNmXWGsRbAC3TsiRebrAuXpleWiVsg4f2yPXv4dTYIz1TGt8yazzLq
ZeSw3qJPyljfwBDQLP49KBKSjt3XjkZw6Xt41jM+ajCgG9ZCeNn3OfP5qMQkdFH+G8mbrAU353yi
HPGKxhM6YVnsEB+4A/bIcStah2qRDC90Q4Od7+qQ/lYvC/T5jQ6i4Xc4oGGBA/o3mr9/W9b/hubM
T0bP4VKh4JDxDfEgXqkxHSP68aiW3BJR5hZijMjs4IAKmLIjbtrTT+0RMjishxPZzfKn47nUhBpl
hyvrZC/SkDlCfRGfuN3nJX2aCShlvDDPybHo6rZrHyTK2l/zUtzxubaJucedba2fSVKoiOfBMu4Z
2l9co8D6jg281ew5Kzpdw+/+T2V6+m+e5+Vfw/X6h+F8/SONvrjeULJU1be6UTaAlrvBvGQJMtjw
+S0RAywzuMdWfuBVdj+UailGDa5SPEPZ/klOqf/P9FFzXHE/NS4gnu3kfCRCh+0NzvdJHJ7Ub+U7
hTyilfk7NjLUa4N8dMwWkPiijQMWF/XNmHIz461S+TV/gXWbp8/bYwj2SGTHkSx1DxXCfIfrhczI
tBMVYtkSCUTRVEVijVy7UWCCNC94IClWk3QePSD4r+i1ur34HEu6WXOcrNKIfR+sTxSnvyhwHiWk
liJj6HYy8tdJQyn1ojvjh0jaDjd38qT3t7/zzvonhB2ixPYtLgi7Gm+Bacx0wiysx/FK+wVmiSEq
9IoTPlA4D4BepFHRMg7cxRKvX3Vl4RUfepe5+e/lfavyF77eVYunb1YJr/Ty7jWmy85RPpe0vtWl
PvL2Hxvf+Hi4DrdKVsMwl3rs9Cy5mBSwJcIR9tm6jKponNFZHc34HMF7634uiKusuetwtP5er9em
QqA4/AbbiiABoAvjYK7VLRPF+Jgs3qMTVkGdCahf7D1iXqgQJ+krSe63K51mzkuWPWIZOu5xyfMO
VVFgEXqe9BKfFB4MZbuz1lb/5qvi8bDtq0ejwsiQtjsOUyTFWJnrZSdXOWzBAHE+PmYHeoSOM0V5
QL5aJFl6WmyFlEyaLoukIfHnkSKP1L0tup8JMs2F+ePJKjZVI1t++busek4h1K0iorhvS8Aw1Ytx
rGqGtNWXOkea4g5pOVmIX/G+HHe2QnfQDar9ha8xXgc5VvZZAzFpoOQL9BvfpTh9r6vAFGLVG9gU
zR6kbUi3Wo63msHgx6VJYcAakqW31WseSecQbz3gc+IDcteltnckGjQNPCpCXjBXeUK9zG2H7V9E
qfxskQA3Pv59OVVmZLg5f2YzD0Zi/EN1R09R+yUiyP2JHPIu8C8EyMBk378t028HDSJtpJjdCiiI
ZVIKqGVV/CZThEAD20QHyPFYIt2a8bjTAkgbpIK0H0BDX+mvr8v7gNweF+/Idd074Ezpvh0vxc8r
dqQBuX9+lzgU+RrIVNrtGdRAvhaEesl8I+AkA9zEivmypZbZvLnczknPJinzqVrfFZpPjREHs+dV
OLAJZkq0pUY2vKA1nHaU9PJxD3aOv3+sHjXGcLXjoHy8y+nZ4sFLKtTeIZP64OTJY+RWhtJ22Ozc
yZax7JrcwoMLLytX/Y8/ko1aUw62fkqgEe2vxUCxoutU+Ucohp2uJtcjPT2igJNkmm3qzfe8+MW2
GgoGd8Ibe88IUfJCvSY4imS2LDZnzjm9LwhrcqTYnEfWqjDoIFaoIz+j/3AVJ53fY6+1J57/PvKk
b3xnHmOsyp/JObS9yeDwf0LhKe3t+OiUHaqfP8eCoOyWtK2VFG0FR+g45wO+VJo7w5p8EzHNai0q
exCvYUFm8xqU+VIQZvXYEE2qO/ypaqIKdqJaG7WCd2367y5tJ9lKWzoreU1R4yA3a5QIPB8p5Hpe
BvgadCnwlSbrG+72DcReWUag8mQbYPa/nnwg30BjKIp2w/mign1OpHCLKdwyFxrEpW8RoUgGSEX3
nNvgWzO9olRI+5EfQWylJelC09V3zNI3VfftYGXGc2ry8K/Qdn+RqrZJ36y3O/9XkBr9mdpNm3dv
bkmiJiagA+whyxS8qDc4r1DYL6itlDnERPLq7e4gjZy3LO7QyxXuzbJXc/W9XDQ28etydZSrtmUv
CU8FQSiIvyt2iyW5/Vb55q4nIvL5OowFn13/7Pxa57VMAJo7HAQXd2+2cAq8rnLOFEmNvvXIU64X
54nIW431viUQ7pSwXI6VQZc54ojlYurH5m+/I7dBcX7ock9kK+pZcYVSKpEIdHtlDd9ZbUH1BJrm
xF1xXhnIc6kuEdFCzFFE3pwfiydM+fOf/xyWrvqVoTP9dEMX3Ch15QDyd8WqbghZo1cmtZ2DEvUh
Jwy864IMoe9bdrcobiON1Sl7+5FbAFjIMnCGtCq1esFHdP1jVk10LXS092IsaLnv6/RBOxSPMFng
p8N99zY+eYu5Nwk1twxS4lcKlBhUM6iWdNHR2W4uTn6VphNsDrqlOS6++9MLp4Eikqe6MDD68skt
5mtDKK9qTdE97lrWWRSOLDSbsy34w9G8Zhk9rwfvgCdevLLRoDfmHRPyj5zvHre10byOvWbqjRTr
blIsM6jlksAstqAPusjxw11oHjTejWCyKR48vvVoCs4vnjw81U9amCDlG8sW7WnSW7krTgEf8X/w
Oy2qXO/+ji5yKrnFIglqM7HKmwABGQ18ZOxqqtWoPElysWptzqKcfmZvA8wMinfRzc7Rtk25h6a2
LTlgFu1VtAnewIsVNLyKwZjlLZ1KpcKenV0v3zIzy4exakW5X4r2GzwV51nxmKOC3+rMr6MIScDL
d/f3rou7g2diRcnrU0Bwt43G3cwnAPQnJzpk5e3L2FjeZOCmDMcqdE4tBDp1ZRmDLfAoEqhuwXTJ
85aHrRB6DrN8EpbJs4v4UgTP2uXB5pytDcP8BJbs1FHuYpRTp5pL01rnWTvfiNeyF2Xm88MEWUvH
9RQurw/lPSAw2WZgOtm/zVfW6yUdzXWRdZkDlqQ5WulVHP9SB5DM8aplkDzEZGrM1bf9mRZ0F/D+
R6Y27g4BpbuOJZf5bLfAfw8lubFlx2fZuO22VRMb9T1xcoesmVAsyIL4HG+K2lc3S8vzejETrwao
ZgT/D784bhMGUUpInx/Uz0rbyOX1rs1o37APH/Lhw7VDyN1AHFvp6dZEvyjZttnSbipEgyb2SU3D
KsP71d/CYPu0atXfMOSf/0/W5PLHZSo+9kKGRIM9vLx0PpBPgdnpMElq/TS9USoSOWoOUwtECzJa
5Boqab7UBCd/HSohjguHKY6nYGmk3m5WAjZbTRAhNfTLOxbgucnSlIQDAodiIShIUc3m6ENVbNFX
hUCdPah1c+mzk3FnHbPhAJpLgjimmQ6DX/Hy7ORJlO6MaoP/ng7NRbNjSkRPazRnqKNyAB7RLyh/
qJ1huSQ4bG537pH32iEPEThWjIRcqX4bh/U6l8hBNjTO7HW/HSYVs2zFv5iQA1yZvdcUlEg33LQS
sgZyXr74vqrl3k0Neg0mSsW/XD/wwdPia6QgojrdzGexMTLy9qCv2gPL7ExwA+03b0KHYtnc49L0
sG74+h8BmWAXwPSUTT44qbWpuKNRBbt7socQdoOA/8HGp3fI4vcn2H7TK3fV3J1oXIbskRJwZ32f
yL9zu9FXJL7CcI7CtbOpBR2607iH3qmIALjkw+FubHZXLsiFa4Zk4zaj6JJOGMTpQhAN+qyrJQpk
yyHVtpQltCl5EL3SwQrFhgeNQQrfp/Pyye5e67T5G0t5wvywXX4eR3C8zkFMQRMsoOGHMsVB9Dek
PI154Mxk4sxNxW5SBTXrzGiVRxruITlF6vM/U5TX1PkyJakZTRy18Z9VBwtPjFzSQfgMzWe1wwzb
kxMEypuoS+pcZ349xsY67Iu5syiWo9YOKnxwSR1BXJbjqShPrmYXVAQU/jBMotJJvfjC4i7BoM9W
KH4NcQLG0CtV/3fLPTdzO9Aq/SS3pFEznbnd2/Nolm8z03509PH8/X/52c9+phmR+Z9q/XH6/n8a
/+xnyGDd2bxBb5qZwcPoaUJMdqcXkB3+mKgif0ruKma/I5f2qC/vTfojmLRGfyJdpu7XutK/8AaL
0yRdLK9Rprg8QvyT3+lQrgmghAu85aQA/eL5+xfvxq/+6Ui3BFmDWo48UsZhPdN67TYNlyrxyK3R
8Wp7vphPx5w3deFX5iv2ZOIMNJykgjI8bSpBSSAoFXR7nxDyBlKQsMcabNHBtEoNp7ceamiBfHCr
Lmjj8kx6I/kYJ7MZg7B3CYVTNcfLdU3x2fwQDfH0pFuu7hbzc9HP6dHA19A5OVnWJ5u6XjQn9fIE
Ne4w5H1C8mVUNjCkikAEQHGZQTdGmCUIv6tR/l/RU+IdCY6wlWAw+wiT6MHQP2FQGH6HDiivv3n3
ezL5A33wTuOy5jh36OXlFW031zM6sUv3ewEh1BeMLMbwlxBCPNg4I895pZcqLn82shmwLM3fZiLp
bmid8Z0J4w0qf4xRA3WX0YF4zZYQ2fpych2L1bg47hIN35zLhXtXWHJMQYbjsQaPlvW0ND4AmYri
R3giXlBOtP8FqYjiChVOWP9lSEOxrIzHUkN3XX3cVg79TrjzNb2DM4t7recZ92bHHYut0BzZ6TGZ
2+ivwFGX0+fBQpk5gCN8ZCqxJST35Bv9AN9h3r1etjSbf8b8E/hNimeSctqO4qKhvHHzv0gLrj5O
O+crTO7FbGlHOFymM5/2PUM5iRSP3V4VbIBEUkfS6RY5g6UmJdZcCrGzS1gIdZMOPurEJWzqyL23
eTN/xyoVRxkeZtWimJ1K98/SW0BPmeLBuvvw4YN1z1wDMlFmvTC7LdO23ahD+9T1ZDm5DBKISYZZ
+xYlqP0dJWiBXmWz1Or0NlGEVMxrUc7YQ1PrWvfR3j1z7B6YAVfYypUUvoqGFyLciUkD3+F8ZUwC
Y7NGg+zkoXWhJb1tS3Zb4yrtd8k26A3WIU0ndWF5ZzacVd42NQl8bLh2YzD1dDNWVE8oNxNnJOJk
8acfBMwGwuZyjvpZV7/opXlenbQDPaUbpG4eZaoMDahJameLJhdKsXhdCyYSEyJlDA4hziUizo1z
3uhIq5kfa0t2z+jb7XIflUyvpqB6rbuxG5yZcJs31XRARB4/TyFI+J4KeNX98eHGaUB8UrYnBgkK
g7+VwZIsxGjQ4/iyRnkau1amgWlYK6zU7mM4IUTi+oJWbtI823y1Bzv73kux4ClF0DnQDH/As5sJ
5zckIC+gLqpRd6vK+lmj4kMK3s26poheNP+41C99NkqbDx5iV77QrLq4Q0zuGgEVhdMGHnYbqdU2
A3omxfwbLzToajcyU3TLyKiJbMC2yV7zoEs06zXt1raQpXgLUKPKztsJv5F1pbm1buddG9aHp7AH
uRcaGSNvJGgkAAckQeTVhWhDMZsJvc2lVuCzC+/ujQQXpW4euOKpxDCR4NNFk5EHrRLbCflpoNW0
e7m120PTPN1ID2rkAUduXFDGufYc3XYn8EIDI3iCichkISKDg0tDlG5+aDKCp6I85akX2OewdDr1
Bpi4XiImpw1onQfznQJfRH4X8zCKwLksoG6hboO+B1iPT5fCAgW949CFj5P4POml6HSMA511SBFv
IcTYk9gC9LRErfNxeok9X876jsYh7iFT+3Q+PMurBYbsI2brPJexMaBbvvzmh+c/fPPu2e9L53IV
TEWMBkYIejiKvqFRX5oVnXcHyp02++z3z5/90/M32jJFKFG1PUxajTmr27ux+3rADezV7jZ2NrEz
tpS56RGF8gVJGHav4rBzKd1LtJhke5UfsByXMbgQlj6FttCix16BAGBWkzwwhvusOxywn0mfvgyz
RO5dUQLQuYtPe7vEQit/4hnJ8vpZm5s8vk+DbmJiTPkwhREFQxPpdXaUDUg4LQu+v6ML7tteCILN
XTqzdwZmFtydv/MWinarPKUXJrSG97TQPVAOJ3RL2EOyPenrlGaEJgLf5a0clKYY1IK8meN7ePsM
37bYSPD9D2iJJNWzpQIpkK8Bf83m8deb65W8QAa7Xr2LStkmfFnMBn87rrebac0ZwI3G0/1x9qhX
dH+8edQDHV/OadvlG0peucNUA2dMqFCcskDhkr9mW841lRpx2ICzCR9rFWjhkj8j5zepGfcB+TP2
uJuh5XRkKdrVunpJWagkKqvVRmV1IFBa/zSyAPU1oWeTO/7ojf+6QhfxSkJ3XLeSlQ7qOYjQWUOp
NVM/RrrpkOaIjG42ya0CF0saaS1V63f5XWAW4ggnZ+7tNQotOt/vrogqO4WiZxS3venCp71dtxAz
55fv2PivtwwGr5+Rrde/HbDx1zuIx9Ydvqrw5VF/4fXInuqlFCnt2Qfxh+WGcrYFnZh9iCQ/9fUK
lyqfSo2+1FTwJtMxLs+R0+tBtWy2QKwKxEwpK9m0LOdOrcJd8CUn0Aim4snjx4fkvpSuj7S3g+sP
KGio1Uck7uf5xJc+s83z9y/evktrzuoKoQrAEQTaA/4j8qvi+6ScgQ1UTzRsy5ykBaZXOJAeTN1k
cYOnR3pwgCmX0RLk532iFTyHw9ZqQT5pYP3QtEK152SKd5kPhzg8ykC/of4t5n0s0131cjlceHV1
xvViBh2Jrw25LX6nVDOMD5oHMGdxXc+2mCPWQ3afVxUeRSczh7EZTqAx5rElj44V2GOpS215qZyE
AhnIiKW5AoYSdDXL9yK7wH/hM0p9sgxwNDVZEve5LfSetC7f4cx5G1iiXm+im4nwHji5UGYxEE8D
X3Hj7TC/b/8eTXztX+PbWE2KLrSdjbDTo84GEVqVQMDBsNz9SXC/zoUG0X2Lla2D+MKm7XuNowhr
C4fgkuJiOXP6JUaN1g8wsQO4o4lN+Fmsa/dfGezCEZkNkftcjNymL1ZANlpZDqB0EXKu5reO94Nc
gqnDs8EGUOeSMbr13W7YyZJ16zSue7Lqpp+I6yB607XVF0pr2BMs+vL+TciN1tgku1qNQ4MLFPRc
fGjkYUg1RvIvmmciAx0RBsvqpgvjGSHa7cHEjLAXBGkrm+xUopE8jJnE/ZUU91fijvOoQGKGnt/+
M/nLIQHniq8k4MLFUpU352USdIrka8U0Z9quMtmljzwb57j4IbOxWl9b8q/5BZCyflg/yjO6Sm6B
BLS1Ukz6HVWrn42S/IlYCBTHQ+uj2jrAkXPYJzb1bVohQVHNpxeXbVU6dnNNR5BVBoIOIztJz3ux
nLNuCtvyaYdFY+csHIaMlZu9L+E7g9VdZyfpN7ebv6p++L69AVFPZKVHOdLCDdCmpRq2AklECkSs
43iR4iP9dymFakg1pT1xPuj2kZgeLC1EunilOKhidddaiWcY86lf3KzzI0QTH4DYDyNmJm5LfJmg
QvnLsi3mJpUeMNfRbAUnqOhr9Y1J3AeGQ+c7gBTDaoLhhKgOwWsGtKPqBxc0AyHmgy8pS4M8LMQW
Ih+SdtaVHyP519KkMY1IduvxtF4squmme3p71sckM7RTiDOdzXbQ3izfcsbtoiScE7bothmJD1kv
laQhmByO1cxHhKcx1WOqhDKsyRYg/kpx0Mi+OdtH8KC6/x/TnBt3C2BBP7Pcry8Rr7ox60jGybe0
8ZUOf0PI4PJ5HEKGH3rcFiaU65Z81MuNAr4zmAtb+qIdM3fb6CELii5B4VYVbLIgb8wm0DWN7ZSr
jTYrTu9EVhC2zTBQ+k1Fycws5gBe7ZIPPqaZRGx1wi+kLpjkBs2GqnGh+pG6mGQWkTGM0k+xy/w2
xYKQ50RRk0t5SUhL8DKLcPpQHAaj/S7Yz3UPjqi0UI3dVgG62unqLLtX+J50Hy529PCJ3Y2tGxeH
8FCTITa5U1zp5OC90O89kvwQ5MyXG4QpBOfBxp/X3NG36arvyBoDxuDfRX3Jr8qsuV4+I6P9l31X
8zF65mxXXyD3fbEBYs/qm2XmEIiFT212N+lxJAse7lBw5kvgZszBS8MMCdM5ORH5hshLnbPsPLa0
Qe3jiAw1bUtZwPngUBuW99Fe6rDWxKoVt2Ux1eSoaR7xEn/Go0oT+Ca+s7eqSmQT702dViUSJrKA
yKe9tnawg0JhsZf3qdO5q1s6SEpzvOyjY7xQJdIoczZDdUyX2oQa3eQoanzJDWd4+su/vXT5wH/t
7ifrJL0QCZbClO+uEtwU5aTQ5mGUht0y5DS+p8PnZ2kO29TgQCkr8NsgndXJCZmHq+tVLuYgNOqm
N7MW3rr0VY3kStHo2fB/0l86vjkz5+yS7930AyuZU6/C94MxkBDopmTj1WRty5GD06xedjbFh2V9
A7viZON3ypv5YgFvxeKPnkhXk8XFCUmtIurMsUbmcPKkbYM1o4AuUO2H6eW8qLCH66qbSbbyBQF5
TJZ3pi7KStKlhHk36N8ynayASzwSa7OZMxYr+uZjvhIK/r2dXK9sJBcl96Fjx9i0222PfhEChs6D
RSmSouxlTf7JF4NZHbQXa7Kt30lfm6vtBreCrsGSbLfdpwNMBK5MVW5RYf/y66pVee8+DKW6DAUG
bUh8QMc5Ve8wU08wWXtH41Q+GoZTg1BdxDPoqHTXz2XGQGm1xiYPhC76cEYZRvcyeDpQRwZtsxXR
b24T/4pYhMN9+UCaCfy12AQ/ZEABbl1iijOWe23a64F8MEjo1ISEMiSB9mCbMjThDsCzNtuPFBQ1
5JQrOAvbNJWEE8TzSAlBEDkVhQxaBhLY8Q+Iz/Wvu62xwxhJVjqs1sx/P8oZX4z+iAaiYK/V7mTM
4+YzNWZgzPdxGXG9W0F7z8GotDtqmItQzq6u00EHbTV5hOddjcHC/OvGW452/PH5HVFJ3XxpkhLr
TXT441KD8XV1XevpN+PGwx8MdjvyuHVLhQO/o0pVPjhwyD5FXg/0b7Ves+UuyOjziWOG4I/5Gjgg
CqyAx6ed13969/tXLzGwqnPmI4yaasU2cwGcZIjQ0ygd17oLxdED52bW7VHC3E/4s2sr7RedTu/M
SKYPN6cdKEitwb9+I5OkuvCs7AdvvPGU7H3bc9GCB6+JKEqOfcgfvt9ItVFIvFFAw+DorTJf2mkz
i9KZSMuYi6CdNciTnE4mr4yO+iR3qVHyOMx6Wj1tKwcDtOXEsw4PznPKSOQYq5xu1/DlqOzHYK1G
3D+h1YqhmQNmTrSjPkFPwRuaQHgH1Y7w3uNXptmLp7nvnu79LvFPQB1sRNF5A/xPdA1DC8aJkYBR
lAMunrjZv3iass90UTfV+GIGIpdszIvJBg1c5Gp/M19++bRMU63gEQHbGtxMQrdo3MkX0QAungyo
jTg3wNPk8Q5arz+T1ut70Zqdq6DDoMvPupJTgryYos6zZxUMYVfBv5IavN5m2+vVmGvmdYyZFmhi
d5SkJS4l4e8UUFR93rrq20ZubX3LZifAeHaBm+oDjLKLVQAJfDdsRd5qAd0y95CyVhlhiqA2L1ZJ
Cqs/MGjoc7pvzKAlM74++3riKYYF7oaSz2q8BgKFCleQh8rFqt8LxeHq7nyuEq2ZYt5QVqVinRhD
Sy1UOvqB07ecZcl9iXaVAGDNSlUSqlpTlKPKVBZ1pgWmOvKzctCjUWRyL8bKR9+uG8zLAqeveint
wM8LxFTG+OOajoHTer3GQ53sttc2Z4k7P04nS+wPkrrsmhssjETZbjCuvs/1R2C+x36G0KtsCWce
TkAgvcGUm4PLQTFBrqEDLtSYQylvaTSJCO7jRfWy/jjZcXKXo3TzAWOOZWQwY+hLTcfgNFicTvR+
niLGQsCOYDL7GICGhoE4NRbeGfKbYR5yHxkEb/u4UJp3PLPcuEm5WG/k1h293Pg5C7ReK5vGBJVp
DHnWdCnyTfFobJ/DpASk5NETfp1eij5Y936tLtqIfUOXk86iYvPhJdPchDhPKV6xm7zx1N21XMNB
fRYckelJ2/wAnaVEK4FbGLc8mZa+wbhT3q/gYcbGHyg3dFYZL7fX5xhlOMZ7WQ1/dHWdRJl0P1TV
SvKhresafftG1m8sNrxZU9noQdOB+Vj1i0QCHrdY4I7VTxit0MsLIAF6JemN7G0vKpka36JUL5yE
xWaEP44665sZDTr9pKPGlnobef3rqzCq7JYjyoIkH1krre/DquPMq6ePz/btFrw7lSIES51245Ww
mtwsxwFncOJzvF/E6Mcx7vKoHj55PHj8N1uggahM5SJKs08Y61iINl6suDcn1F++xCt7ofNwdR27
ObE3QSnfWoy95af6Q8W5x/g8gC5gMPB+bnN25LMxbaA9C1pbwMMS+dHlFvquZ0rWvMCkUaFCnlA9
/BlPnD8B75suoYG32bN0rNc8BUoikB9fDh6Xeb9YhKHprO5Wd2ML8S5AP51f/oKWniYxkuMBSJHp
FYKi7WIArPPkl78ozucbVk8YPaealQm4vDt2ICIznF9g7y+zNd9yQI2Oe1ZXnLXopl5/QN1lDuKS
9Beu5Dc/b28rADy9WFfVeTNrYeiDW3XVmGzClxcMfpU5rOJkD+hcyweX0L8NKEyunNKsZSepdST/
HmRc5aJ8zogaGQh7EVq35bfE0oqlHQoQnLhAB5/hyWTXIUCRl7abAX/hvARbdPtvqxbdXr3MX7x8
9/zNy2++x0k4wVPdCVfMeyVenk8nmEKCF+fQga/mIx6jjKkwmL5Fpox8gH2G5kzwRgTHYtEJAn9e
s3l4hA0FaNlRykMhRCm5d8aRWZkU+BjrV5GaujP8r7Uy/SqqbE88tkFzw6tGPJoZqBPdUpuCsK4Y
B6kSnDfaS7C+ML/qvtFyFJ3ulsfGi2S+uXPQXnrNnnF0D4Yyar3595fKZCxVd4CEkqe3A/kg1C2i
7gY+k0AL/kYvj7HMCsTZqDS622d1ENMSVsv4jt1M04ToI1XcXIE0AYFIGXAVTwy3euojzFuQ+Tb1
Llqb7AiGwNzzmFdaA3jk/Isdp10LqhnQD7RiIRFL9xgVwWrWlgiQ3S/U84K0RuQ59c/AhNKYdmR+
uUR8tA0O/vD8s3sABv19RJOgl7qRaejHGgGzO/hYwz5wrPj7wCAkBdZRxikovFx/UDjfaoC3B/OZ
wB+Uw2HuvnNh0gIlvgmLTH7xEJogDMk1K8+v+AfrIZQIViUmQoFp/TlI9aRDXTeMfhhZIHGyXxdP
9vWKL9yueY4RJXpZWaYPetZ6/2aJ7XpULHotkblGzKC2sV1XoZzp3GsZdzJyBtfavjXXY4/6FQUu
V7O0X9VM2kHPyZYoMK3ZjaOT76KvHEV/W4wpr9gouA0UzFX8kLt3sIwJpqzM9bBod+oaplEI7bJl
fhGu2xLXaanrdqQyqgWQgSrNLjI2ZEjjQpKMxYdf7K+B6ZdPU9zyvcsfh33sa1N9+cRePAIJ2yZY
+RPXZxJXH3CG3QT5rlxeOW9j7MfosesJ/sl9GT0OWpwstM/4t+s3/nBslGk/vqlX3hyJK5NWmxR0
DOtLyqOkqLbvS/ITs1wYvylO45UoPAyTMDyLUioZqKhcFa24UqxytjeXYGU5LVohBFr05jSzDKrt
uQxd0eyPFf4lvZl4GikvQlyK3OQC7OfLFWC6OXZvXdz13GXFk0Hu9DFxiqrLVtOwi8bAtng4Xsan
ycIndOWRm4xhedIM9JBjYWLCEk1VfbBveUjQElSsW3oQ7ZbCNTBeBSLaMNaskFkIbGdXPmg9GN0r
y4AGpmbii3PZM0Lj805OCO10/DoH/yQckqQj1pHvri/8qK1q/kP9UQYh3oNN//7UxEzbeRivQSOs
r9vG6hpytq9w9lrv6J6md3i3+1LnCMI+Sq1bVCWlo2gKJbSG3WBDfFiYDQuF46Fven9jGCzqYAyB
xQlACGqMM1IYfUfIPLnYuKjmi2U0HAG7UQJGYDctEBhcC4l4/NfQS+4+ly1EswoW1X06f/RkaBxu
7ztoaamFw9pYixbpVX2DnqRPyDv7aWI4i291fVG52zU3HfE99aGs60SaIYihxhIEdC77qpHbgbJ4
ELLRrqWyrIl4NGJSIcI8dR4KjDueyRXieyw7UAYJLFpv+k0L1CdOU1ndTqZybhl+1hpziqIyqra6
c31z4/LJIQ3zB0im+cZ/oG3ds7c78pLgaggnawfSIfVJSh8yiCwnJLn+kiHj+f6QIXvuVZXcFb4v
Oti6Qh9/9MF2EvgBXZZU4jJEVlOaZujIx9n7/9Gg5iN+OV8Pfaze/5//M+PmyxVCwam6sYhmdGr6
lDmZPbbZ1ZFTsrK25GDy/V8WLL9u0MhFwJ3k2tKOVv9psqaT7F+DDt+ZVefbS4WoaMOIl7FibHAG
Dz5EI0asd4R6Xy0md5onYDE/d/RC244MwKarDQDqzSjKE0SEK09OsOKyvwuQnktkeoOMF8wQdSKY
G+pLWx/40lJbZlZ0rcpdZmnhX0/PEvezazgWw4SNSpyyMnnNHSWt/ARBVsTwy5UXXQKvP4FDTn0D
x6NBUWYXWPmuLiaf6vmMcFqQ/2p3HdsnHxU0Kf+0rIc/CRJwn71MWqrDkrN6ilP406A9gwBtJcJC
/UIfnOiTuO6WGWxLHuCpQ9XitDWwZfG0NThCRQ4YrO5ov22gr/22zhK/H9wpnGD64sAu0ucIlFWt
KVZTxABVYdkdt1UxFdG7waK+7Awo7ZIN0uJIxDEt2u54TFzAqURkWfswGPtS/Ckqm72GLreZDANq
chi7hcfJCMqof9YPEYvjNR47cHduOklIxJi+kwtBD/t9oZlFRRw0muf6AUa93ck/6Htygp7/05vZ
CP8lT3n8A114IhukS5qgl7pjOpcEv8tBck0eJfruRakJjSd0342pXoubejJc4s0B+oxgTqYbyaHN
gzXHVqcBKhnwH59bJeUT8uajQRuv9ygY5Lcuim39ATpyB0fVjWWjTDzO8Cgb/YPYOG7eOklAjJnU
1IkzM7R6Ux0wsFxskGkoxFLYRW7GhMquIArwi8ceLgphhh0ORXF6jl2DfycZ3WUD1M0P2Zs3Pw25
zhGhm2Vpw3Qs05GcjJHFv6UwPk7irpxfDr5t91xtK5MdJH3EbnWJaYVxhkmPDsmMcnJ4aPwU7NtY
PhlNS7hU2okjPb/ZagR088YY3Tht4h+Zj3ry3mPycGQhq04wDvmLmXlMVffu+VG1moNIZZZ1n1Ly
vdwvzSNblOjCX4rUJ+EK7I8CRJ2JXAq/U+ahM5jV+QnTnmC6UQNALTra80v1qlnO/21T39K/dEU4
mF5wS8My7tmRxW9i1IRg0GgXi1CvkVZ9gou36lLmO4RRFvw1w8L4YR5Xht4gZj7ZBE2GbgbERzen
7oOmZ6CA8YvwfE3uVMWDk6e/aDQNKXzNun0vSViK/zkd2hSVGvzlKFT8umUeYRqj6QJxiGcRUBHn
C/TtKFCYYzoR7OZQnYCLkxN5vvd7UO42eCOVVKAv4hq6uExAPZ+KTRslNI5/fjFHWYUSr16jFyUa
cabbNeY8L/BnxEjqoa2f9kq7aYUUKq3qJkya8pmueMpDpqGLJELjZTbWYsg+Ieaaxxmjg4BwK20S
ujoeha0MM8wB3PHkvzVFkN70Huyxe2WjmTnu00iCtNzFoqjzqGircV4OCQzkD7OGyn8H1Jkz577U
uj9ItvbQQBTsLtlgXekFcANmhruoj3ZsMcH5vSSZwvm/fAYQ5zJahrbaGSW9keHt2J04IbtePHKp
EC6SLA38otcG7wYyAnROvG8XHypsfrBa1+gIpXmH8JHQkXI59FKLtxtrQRlepYFAc5NbAKOo8G0r
7CUTg6qQnZ28Nk+LMnhsTmVtk0FpOIZeAy8oVdwJDX+vQhKq2D0Trw0P76mayBeZ6wMF25ES0kpC
gwMGy9HyOVZTmZJndZz4p/xMJUoGvNAzaT5UWvXslDlztm+XhMrpmodcAwhci/L5Ts6kfe6xC6rm
Qax7OS49Oi5Gn/M/+O7TZDHHzGNKnuZuuZnckvWFQEk/u2q7dEQWmYRCDr1AKMG04sw7PNHk+mAm
hMdNffLQ2XHiLGWJKF9cmJOu0WRrthGXz0r4t/3aV+8cXlNsVwI5kwUN1WtQOXokHuF8YjVqdfQB
6e9H6v8Q2PLZhi+UC7MjuaRdmiWJoIoIjaCXBAFT3gUOSqUscsGt4mW1hNU9RSJ1IxfMrFOXJpIR
9Doi7jDn8uXQg6l/sOhYWkwWBKnbL+Joe/0fE198o7ZLRAfhZobS/17WcyS2fafLNEMO7ttRZCqE
R2MLEKWm3nx+MgwhCQxA5MhuqkmHaF4qmHRURS9JMieOu02kl+tr6a/rKnkIJydNKLW3c8zmdn7z
5cxcaXIpvQB0ijBq9Hgj1y9a7jC0gulmO1ng2sPbfDQEiGjkAxs8d7TfXY/TwIlkLumVVLfTe45S
dNLa1TG3tJXjtt33RMeyJPDLOB5Sr6eL0rn7KbMng7eLMK1L0IMzmhhDVbjQn17GLfE1vfln3jI0
DXcXtDLv1S4gH6nrSM8nJM3W8pwcUn0GC0pJz3uSOCF9cl9INmMC6GqTTSbvL3m1hcp5idrYj0cR
YYIojDgxiKZltbuGxxX3W5fZmiaruR68MUooQSGX97lmY68+qf9UPjmLk2/L80A7kG+4255PcEQh
kWAG46AELOTwS/omqimXr+5hJoseteJyAn+8eP/fm+u6dTW9ASH08fL9//6f+K7OOWoV+ALNJZj+
gw65eChWv/iC7eaeB5KrOryPyyZ2kTajxC5QyRt1pfqjtPzG51AU/ETy1BLERQoCbsyWS/U8LH76
Ca/H0Q3qsl7fsc/6Tz8Nnao2QcgqbiEMINBPBq4iSYmIX9OfzslLaaM4tCDCNpvV8IsvZvW0GbBV
flCvL7+AQ8J6sr77Qj8YXG2uF5K2y1l4qRIMbpZeSUcw59eRdRyTWCVriy++HhXdp/2/t/zM9Hdd
NBDwDGmCi8P1Rx5ZDwn3rpkjUJbgoHTECNUJgbPWMAWbsdQSR35lWmG0ePltrJCZ4KSgam71hhHl
Ygbp6nLL1IKfmKQVZrXdEIScYVHajFeYCzpclv8KY0e8OQwfovz0nWERPfl3rih6SouvDV06dkDE
D3D2f/oJv+rGH/30k6TVnl9e4hxOim+lMeAFIUjILkJ9RlbymekCxjDRFzAt+Gpc3WISl/nGSLag
JrzEsuU6XhUPnnfbQbVdeEAIZBT3IKnB9nNP96JefXZn0j6ktB3EdAt+t5SXYmmudMZSyfLAURZC
5L592d+fNFBD1PHgs8NdpFhr+YaY3agraLbiOA8Q6bMtyPeUpUmvxFUUHrgZq99mVa5m8kWru6l4
NPSdoGefM8mNBif4ZZ04LjGSl7hCjLSKONM1V4fHZfkzLKCt4LTKn6nL67IW/65lnb6UVzriWP7t
d6E+RBaYFpsI81nvhqR892BSjh7nModgA7rqotnLZOw4eNp6+4PCOA3ZYjbePZr8USI/woyTFcd0
vbtbtYVzHRdfyd3908EvP7eTSYcip++oBr/ry6o371rkQvi1LW/DQjUyfdGMZBIjN3soQwrWxXxN
0Vo84dpWX8RDpQcPFT/VLbBJM7Bu8+JYe5P1qg3yozmTSbM9p0XTvRl4yuUT6oaOtGjRhsHNe4fL
uRCqMZNVd7pQjFXqrkSbOj2KDXPrO7t6j9t0uuNAw6pBPdbPXSrvtNh4LH9q2fE4TfzdHkmCvU5j
R9qSdkVCJmanDI8efbx6/z+Y8wmcKRlM5uP8/YcjPqHQORMtpZMCjfKL6kRudxcXJz5yQ90IatXu
8g6F7pSCkYbjTQ367qdqYc+vyHKSPMNgHijKzDDApUjcIORDe3l6Linlvoite/BqQNauLmKOPOll
o9yhkAXilLoEOuABHj3w+Bga3jhtmNRNXtYtlV+LdY3X4/dwkN+ueK+W8G1a8B8uCSZSacXwL+zd
ZBI7YGAjpUNGYtCPbo+CubS0PuzJyUP60Bl0eDSn2sAZoo/fDlbbdeXSLrnwA6rkzHcNpnB8DTsL
SkwziVoX7cDJRB+p3eFps556ywHeoku5iHiwqCIVSL92vQ76iwvGvWA0pSNn9oTK4MzxwaoVlO2O
oEPJD0G//DRvQCnuPMTkNrFr/QdcUjo3cm3vZojr6iUGKOoxfHRGjEs+RKZ3xnKDBdXqcg0qWtM4
Ksu/Cn8w2Uy8erGaTz9AnQgT12jBfiG++mHRvxD7S+VdfNcv/iFXEGn6y19Iaim2b1HxoCz+o7gS
nUkznc87wZjw/VE0IJkEjXkVcxJ3OjQnteQpl7J6q5HjxsgprYWUehnNcm7MEpDSjt/1lVeakODp
KFw5zZu7SKST887yYFi89jpobplNFvWSAG0W0AviOWba24pzUoasTE/hv4hbsMAzdOe3b1/94c2z
529/2+kXfoZyBZ+/fPfmT1iMxhgQCbHc/hqv7dJ4bbf45ZYnJ26fid2mjTMrYV0ZjRgdXu13zm2Z
QJD6R6G76xRotUEPeiWsbkuCYjfZiB8zGu4qxksYlL0DXfRcP/zdrEdn8p10Tq7ukUFo2ePkxYNe
oUwvMGcXulXINt3pFz6JVwSsExirvoptVVqpv3peX9IMRhndnBeMLoqZ0o3A5xF5r146jfq/nfBf
Xw6+fPSo3IXv5ur94zdvXr54+bthkW9A4f1MI79scwUHAXFeLeqbYrat0Meko2Pq4FBhlCC67gbF
H5pqf13wuVF33Pi++CUteDgn4+2J48B1JWiBRyGwH5lKQmESNNcJoit+Lfv/27sGFv/z2/lGfRqE
/zgHRK8XuYbjPPonvg+WN0P5495kCisYYfQ+wwi6jvzKEtYW5xRXZXqqOwf1M6KY84f8+Of3/x1o
oui7IUFrHz+8/+m//uxnVnN0Z0zNc02OBfznkRwDX4LgN8dAVvZdVuyPW2CHCFQlfV2cfK0fodcE
DBy4rIbfnAqSfPsJElM+8B4mzqgef54NSw5S43JVdCelPUqdMNyZiUsPbNld2UVN5YI4n6mb+NAd
o83VJawLR6Recb2F+T+nA4HUW2ayyUivdPyeuEwstqykxRy6U2AMiUxLUFfOzgGP6aDzMX3FUVas
WiwI3/xjL2kC+pCacqKTavgR+qvmPpj71FPSbnJSLr4uHmcumIsTm4bFOgK5+PTq4+n8LG/FsMOc
H5TuwE/8W1BgX2wqm3ve0wZodrm5Gl9hGOk+EplB+xU7WaLBDvOJtK9TeNslRgAxG4X33ooDE71L
zA63rcaF0C7Kz9iJxHRtgboa/HdX1xaLz+saLsP27oWxqbbPvnvsUOo8S1s6KUITlBZUy/C/x8Cd
8B2sXYyvwieTpvhQ3d3U6xne/ptYf3ZZ1TEg6qFijUJ5RR3lpRpYGKFiaTCI7YVhw3foDo2jT12i
0UkOvmpNGEsAGVw1Bq/ftUS2CrnMyLu3p4/P4AOE7zzAL+1+7eyqn+51ujDobrWoUGRijgL5i7Pg
8A/LKmc7FCVPoTwFQ+LDf5PekHOO41g7Nfpda+WLATJEV8rt0uekZFC98ElcUB6HZXdyiULSKOmQ
jN2xoSsGgCcZ8BZ25SCy+sW6/guoJHTLEz5oWUkU/NJgWiRVPcKvjo4JcgYUADjYXF4dOcMopZmQ
v00v/hFO0M7LA8oEv1v6EH/jy/t6f8cqWb1GnRHKBL9b6uVdNyiZeKCoGZkChYyVGOYJ55B3ixlm
ssFnG8rYCEKGskrBs1XdNKQnb/zFCimXlB7AKtKE3P3sNWnZBajZBCZIs1rfQDvyxjrvUlxeYKF2
QG3BmEAtYoMR2Yw7bhBN5+gIU/NWmLa0+0/V3Xk9Wc9eYFDkeosg4F4J7xc/VNf1+o6I1w+r75lZ
wGtkcqoauT93bCNaJPKJEd5VdY08W8rxmP3d8FTa5orwZb9QeYweIkW3ZKQH1P3pBvHX9Hw8wv/q
eZS5WsRmoxCMTX1dFWhrvwGSuGmaEyAjnqnVf2CMFiE2BOGK2qz9ZZikMKfuu4wPvHtUSIcmk5DZ
QLfQd+d4vEgvFvj79i0DvkVXh/M/qxlK2z9kH2j9WPttZaCC9kX9TvFruNZG3KXiuYZnBjNn3tC4
A2Rt1dYCTGsmj/2QSL73O+yeRLhgiJWfOKoQ7dr471EI4zO/RnWuq05IaRvqEaAl6HyB35BDnUQ9
2hqxIEbA3KtOWAn4DayEbJ00Y/esD7/J1YfLhFJIXUC5y0V93gj/wgk6x71UJM+VuJSQzrBsaeRr
tBQ/CdmRPx9R2cHFGH+CQhhHkJgSdJBvIjvAgt6GLEqfZfslFVLTXo9dadw0Drxc78rQ4pLAXqys
RTLrK8F1JyG501psqD5JLALuQMskp/qY/HADUg72lvkmshiQPOtOa5kfnhqYQr+0NT+jGhJgEwDN
18k7EmIsqqAXW4Y7zog2/8OuG1te146uJeUeNrjSn617xP2XPi3hey986W/vr1/Z82ta2v/vL+w2
cOtdKz2Bud6FANteH/5JVaY9Foildscn1Kmu4Vh8PeEFy5gqaG3+8oPs1phznJTbEE25qXB5doqO
FT0deEre9NxQbA7BL/gNXatTaZuQDQ+UnR+XYZXwuLVK/sRWiaXt2icG71CAvq+EQmrsVxzAX/A1
r+RRCs4EmbbFgd97oSc5zPirs95ug9Z8Sc7U9hQc+MAXoU/aZDMmF+UcBFV72lpy8pcvM0CeeOsn
xtYYBdS+o/GFr013QruB+a4KMpOwrCT9RPYWv53pj0M5lPTLFgbd66khWyW0md+UaHc8ZLfEGkZc
vGXD5M2vtSXZAsN6om1Vd9DWSlwFcQeQSE8Dgiutdyrxjy2a/boivmXnFeDXfrE5N5N0qFeMHiao
ho75Koig4SUCZcz+KScJmN99/dnZF67afnWkPNlCogMqpb6hZg2rL/r8CHosF3hQwx1rAd2H9nrZ
IKh7B6lmO8UcjxgNfechMMQly7G5UJVhrfbcTCcb1HjM9Y7H3dBXRDajF/Q2sxPBewlY5WREU+Ka
7i6bjBpioTyPwUZFucl8KDX3jj4u3v8X4wJEiLig83y8fv+7v2MPIMkZwq458PoLSsXhYhNYxEv0
fxye0I8wxOIwhfteL/M91KId8mm9XVL3wtuyHSBK+sFOHCW+VMYrScqPI5YKugS6IwxbGv6s4Jqi
a+RWiJswUlg7MrQ+0T6rDL1LYG/Qd2yJNOALXcnejCYrjnOib2Hb1CFClb0womAJm3K70pKA4UPx
gbVMvf+OYaa95tHSMnyoVz/cd5g22GvoQm3Ml4yKIOFJXyK/zS+QcrM5q4HEgiYDRRgng6YCqNJ8
YO56yzVlXCS0kQnJFL7PGxTFc/yD8X+uiJaN5BqmlkwV4tLLsodD6G9hN3zOWc65d9DhTpxR50fO
5Plj2Ql6hMUbn9sd0wxUxFA1t0wfFS5dDkFpmM8lDEZYhCJgJvjnF7qMKQCmPPJXxPchPqfccaQk
aO6mXuqFwHYpyfKI+3kQOhA3IaazE5QUktwGgTsYmT43ZVdYNJggU80rCZUFac1QHnd03Yo9IxF1
Xm021VqyFOBRjtJOQGdpWWNzprK7eouHzc6Gl/TNhG87BaYsHc+Aoo8+ewYOCX+hYxSRfkT/dQGE
stS6Bj5ykAsfZAMt27GZ4BycgGdPHFyQUKnnYwr5gcxxWZoaWeBPXBiahqfhhoCwGWYvIKOuX51c
mzPZHqAw6Cj5y97RbThOI3KELD8A88nyrdvBr/HSM8mjQ7eVcrca3IrSC4khIMdRevPb1bpegRZw
5xq5AnlarXelYKBM3HLeIOC2rqtbj5C+x+d1vUivV/Weq5Zs7QNutScUXdbLv1TrmmipVRyZaJyG
zjwtlYZeAkgotJbbtBDotrxNfI3bzuAMFizfJHpOYnHPOCBE32Su+eFFu0KUmpX1m9MnZ2jgR5wH
0rQyt33XDRp+TsuiLB4W9sNBfXGBV0iPil9gXsDyX8r+We5r51hl2hkWeuxkkInyEJM090R8MN2a
cRhduhrG9XJx133oejp8Gh2BA6hZ6g4JW9zqKHcKSgd0ZIS6flyWrTHgjMvR7CwCJ+jWlw8s10tv
V+u+wZKHAcdAh9E4kAgswyM7kZwDYhZF3elfO3XTGSodEcAWN2b/hBPvCR7o0KxYfvTvQVywOmBQ
AbTfty6GmTqlSjix9NAC8x1ghgprIdOTVJTcJsyCu4TcgtWkkiw7ooYsyrdSM01k6z8f5FEPxPmj
Yey0FHdbj0049w7MfmeNZs2v1pwZb92GJ22WPDOXwT83lt1eO8q06/p0gkDMY1JLYp6gdJBQ+ywP
gNAOeW3JiPtAyTtc2Ws3VIQxTJgdWpSbczhYo4s8bPMYyuqyAVH28J3VBAIBtR91ElFdSvb/t+/e
vHj5u6LcnVTJrn/Kj0XATbRVwbmkQbu7U+uaQdnbT3ra5ZDCvbZ55qI7sMIDLmxFojiYtfKwFfl5
3tF2ZFSWzZbLOsSXIPWBlMHioVt0ayZAWklibMUKgk/DylErnCzpxBOLCnylEgFr6TBLdKy+Ymyd
WDxrBE30itW60x4GVpaHIElJWcdUQ0Iy02Qlg5BxnE/7anGUgtGS9SdAOpMUU6TSou/MtT+qR1e+
rE1Khd+1XXmwxeoTpxkCigaaqtTR4QNjx/mKa/mBKlE2wMtndnXFormU0z0JLBJgtwLJkm2cTSVy
MkSBMqYnoFMiNZgGIaTVHV+BoD8c/23olKnBlJH4j5ZWQkp7cdtuHbGDi4ZrWdQ9zBDUcrErxiy/
RX6PcsFH8yAZaU9fvnr35g8vz4gTg2qiecnxGnrNSBbJAORIDyz462/Bhcd4EbjZsOs2Sno0XsCe
KIkGGKoVbf3j7VJP6GIYjQGZhXXSgmG+u9Ue7G8nzSVNZEsKtmM0iOgeBTrgssHzNQKZ1wWC54s7
usQnFbdJ5kZqYNIop8iuy3KDQOmYd5LR9JJaJAUZVsLQPGXq9byyU9XF8QzUhmszztsXAzgKmUm1
zGWO97084+t2opPi3rVKay2QO745w2TU78/o7zBu2K5pHyzjrK+JO/Eh/LNvfoNusyUJzhm7JlYS
wpW5s63O7v6+7RNNQpJsysD7iMBExLnTPmcO3yf7MtPd5gK7n0oeVhur27WmD1xU+1XqaGJ2sEW7
XD7aU6n5eXRMYwNZdH5XbCRqi6U36OSrNaampKAGsTxmYFTp+gCW7Lbp8pMU8J6fY4ZK6X4ZLiZ+
n881qfrRrdAS7e+lM8KHF5xSTy7jpKuGMyliDe/pP6+/efu2jOhAJtKIFiodvmDcMUsILTtutteg
it114481hhiV8vidXT2bteRsnl5N1ubseGxv5pQSLvmj/NSRmc/CqnGSWIlmo4V1ez/2B3++dhcT
+u9fvHw3JG/7zsm6I7DRhQBgU8ogc546jo5PQo6CZe45ZoInZjGHpuP8ccTiV0eQvEgKJA/FNbfQ
C1csFsHFehutVMQKGAsJk4mSpGSRc6Sr6322Lqb7ferCWbz4LlcZA2a11QXbIXcctsLyu29efI8+
Hm0NNG+zDTDT3Hfkzz+rs+Rqin19/ubNqze+s5JmLLhfHow9Xjt8AQ2smV0Ky0bljgSDcUZvqtHh
g/d8foK9HceVoniMCuKg6Vvjj8yywr/cAGMhZFIG5+TTqm4YUliyY7cDCnO3igfdFRr/enZc+9ia
m7w9ZCxO3roB3e4Y0e29hyS3W6NgV9uBovxe2R2xDXHkKSyrqEmpb0wGKLyhXYmuYwIC7lnLQkCX
O3onAUUeewLGiZwtAXNJnv+2BMSNDggoSKvEPCEFrIkwNgwG57U0jw1+Q5/n8ti4lx4DIYYr8xl2
TPFTbPzsKLxsiYzKx8YJI0Ia+/prn0SlX3RLqvPket6gaxyloAtNK/iL4jl8elb152UqXMOjUYnd
K3vtg+ReF5iwlHW0rlYcGFhl0LdHium5mAHt6e68qymc5eYvhI7AS10Kukq0JQw9GvH7waJGgB5j
/hNnHwwUhXKUHvxLoAqVppg440KBaX7EwoaF+7D19pSP6INejPyHHaOS9Jq8EWYK4J7ByetiN7ho
r4+3T659H/LkhfWeHWufbuWTugc6UEeed2h4Dh0zJuqx15+A7UH/8R/G6lOoOqUqTvnA62ykB/Wd
hfpk3RANrzmjGexzBr3YVUBRvsIbkQplDrs0H+yEMcrzVYArTK8iA4UbCe3HY9qPTcd1Ry5TxOnl
Fs7sFw2D+SjImYgq3PkyrQXSOwDQfSuTUZStYYxOBuIfp/8wPNsj/d/+04vXxemD2RmIweGD2S6g
6O6OsWDaymWAM8VW70V9+bF+/9v/zF5m8Ku4niA8bHVCbl/kMc8U5AtKvSSIstVN0D0GzhjkoE+u
sEcRRO5f43Cmq8Sf6nCC3QAUq1myouzwS9MvOBEh/zzh3xEEypEBP7YQJy3QKOybRoBZtCqEhhia
QSQUqkFTg8M80/zgRrGLmr4Rm+JqzQIWI0fQfui/hKlpFpNPCIWPZrnu7YwA2IIrHCyHVorsLklf
z5er7caKDvhE/MwpWMXVQ5n++sWTHnSK1Nzz7cUFZlxIth47uDf09/f1pWtW6k92rDDzhiZ06CaV
hvRtTW9nO6EWPK8fWFbxNjxLNqsOyKOBdD2JtcH4oKSfu8e3XboRxiMT1PExwRcFpn9Ygmrtwthj
eiDduJQ8jqf08PTx2cB7pLLIQE7Td/zEvYz83UluouKBeZLokyfDM5P/obpxNWIxW5sqZb7ISJpP
k4NTw6kcveQ8lCwfO8NOJt1C2uXddqywykFnl29vWPaLqHnXdHDvqF5MSIwYAw9/A8kp52f3tJNr
DGM2DF6M7UDoyuzJrvQNcOYEZu6S8fkclKwuQAf70eK+FS/QyIfLZRuVzJMhiqITGvoXSa4Lio2/
i+SFd5xi7aS+HBNKlvQD17XscXQTz6qqKpAhGGOUfL3U40TRtd9qjT1JyW673HJ+l9YGzWoxl6zs
e1O/l9K2JLdPW3IDxyGLVVYGrRbJPUPWkRihpl92+EyWu5RwX+Wz5/k61V4ZLmgaRDxXe2bJBG2F
t3ELtODSZZwdeNb8Svbtn6t9m31M+U3ucBppnGsOG/Scy5l79piKR9IdI7zpAAa1nT4JkMD4xajo
3HbiwBUmgQQmS489YUJ7ltbyvr2WTidrVM7RwHxVHm6KvlePd5nGd1d0ah3ndGG4ZaBsvI+PxBFn
Lw/J5U8bpXZTxMx8+V2ZCW46jGDtO5KcfkNitnbh7c4ulPbkUESdySBVfwbRNac12U+F6NXtNBJO
+HO6njRWOkkpcsORt4mESgUaF4RDgGjliURrl2ZSWTm9mQ2ZIiFCW2+3WJOc1j/vUDCwdh+PV6v3
/9UcryaK/A6aKtXx8eP7F1M6Zr3hB4UrUnzz9h05hQsk/HIO/yFSKhJ4Y09TEx/KA4WWtf5ApBlK
nugeXK/0Tzj9NleThf6sXZl15cKCNuvtNAgSkj8xR2hjz3Lk1Z4MVGO+t5v5gvLCiVsAGZnwRg5H
vcG09XdTMriMoQp6OR4Pjqxzw/UKZhZme7yZXOpJ+vWf3j1/+2787pvfoYZ7TT43+L5LvsAn/Lq0
EegG1w09TMvV3epubAP/yjCDCN1cY6HSZyeOI0jKP8NhsEw/+zPBpJRH6YLWEtOVKYIQLqMkEDEd
JyedhP/I8FBvwQr7WAPBIOG/T87Uf32BvymHNMzA6z89Gz9//w6rIYzkR0V3zJmzMRgf9spySqmI
SryjwMLv0GCNpbGs6Qf+oKqOjt48/+ObF++ej18+/+P3L14+f5sZBfo/46bcfdov/p6vwHOgmF8S
KO43b5+9eDF+8Xb87fPvvvnD9+/Gz18+e/UtOj1mKgYl+aviS9VbXWoFXk5w9P09pmEKlFhYNa+f
v/7y8VOJ7uOsWzdX8+mV8GPVyDJkSJu9SQUE/Y1tIRHkFidO4Hg/bwJ1b1wOwjEai8aIlQtaAwEv
NBaKHDPASAsOao9+xd24WIIwYiWG3hvPDug8pXMCjiPAW3vpFg1B/gqgzWcC3SPtSxJLkHwZ+I2g
uqzEleNHRKum8WB63AP3bsNATvhcxzSWWcJXYUH2zO+Wptt6M36Bjv8s48NzEifD5bCSwZpU+C6w
PRovvJkDxKyJPmlOT4x+t6I9JAZc000nj6V1XLxFUyHlmcIAHQFd+nLwZd99OSnGLzUk6TWhE/eZ
XaOappMtUAc3jPMFpicVfQaIBStmw45aBOef2/oIlizEaNc7zmrJL1DWPck43dH3WAIWYzDy1rND
LiPFxYyRRmZVMxVpbplOKS/g6rk8E63Bum38pjaF2W48t4tZYruhUaBZEnsLKmpcJb7jMbz+0/jZ
qx9ev/j++bdZg3G4v/HKH+NeOqZdsM1mfLEUGiVfdC+WvXt46lBFF8vToeVkt9XBOH7uxsEw1rkk
Ht9SoB4mvQTGnGAepzkG6A8OmoW0b9SnmhMLsHVihfoVL0w0pcwZr3pQ9oD2uNXjXmbO5EtQIoSv
Qz3O0ua4eNFwTycSSggy8TfxaYDkDXrVzengijXEK/iPFcziJ8JaO69ws72DpbiuLraLgjDYzita
maIKkZZDzTLs2mQZVSeZraBX07spGs0zcKrZraZ9aSGM/mTjIjt4jxi2+fE78kFBSgyHy39HdEYg
cekDvWEhmw7fUiJ+Ua93Dz/+gxzMkvFngY5aNrgdHLhnSN2bSWMy3deIQTnjjEk83NZxRmQ4Lt5d
VZqvzYEbwJkKswov5h8qy5uw59eqoYBeP2AgwLm5Miafr+u6QdX+Ej0gKS+EgiLDxoD+s0PK9s4w
yYLwpJXCM1OZ7K59UtfcN15l568HyP+TBXSNb8eDMqY23YXFRDxx3s6g71MdDr34hsKspc8EWshg
d/WFqQ52tel2vcZLDjlSyPDcqUIjjLHn+DeeMIBmdTH5VBsvBFpw0w93BU62CYrpo38zwuViIhVG
pagXi/oGq8K4wfV8stwMcf5sr8j5AJuijXtxM7lD8YI38ItqU/U5ZygP+dVKIH0bZKD5RglgZ2BT
X8+h6OtXb1+87zTyG8ZHOggexkmaXMEw7wYRqOOIxRfoy4TZxUdWRLRyKFns74xHrdl8HUtcLwTg
JW+8pTmblcHBmio/YI+HFq4/YOZR12x2I3/1tmUTr5LwUgwB5fNuRpCRkYzeDp4/f//i7bu8LDku
ns8RLZMm2YwRc6TCsQEzmuFl/GR2J6mYiq5dKJESVl9fozyYNBVhKc03MG/nsPt8YK/MCUzL8gQJ
Tob/4sWyaK8MDpnotMnxADdVZ7Eo5pdLul0naS6ziuyUrSHxuXcbO63BU6HNy1fPX77rF+7Xu29f
vDlro9WrpV+cuMUib4MURn8JlIoEsuQp1ycptrhrqUy3RhjbWsTCX+YruSzOfaK8nY9hlKH5Wf/m
2bPnb1uiF62Ix9ktMGTX91zda5KF0PusjrVuZEFoNXOe5shzLWv+HNDSvjyTozcezM2lxjRewG6J
9U2tdud5WW/mmkpWYVPMPCBJTkKS9IsXnevisrao6ceo9lDsEGcYNiJQNqjryR1xBYh2vK2+niwG
g0GIODjGxpCNvdjB8JlpL7Sd508T0USq6G9VO7RFXju4dZDLIwEdSuPJGaitbabAazhx0Y3VxIax
F7JtkEHsPueRjDQlQAEMSRq7rZXIFXecqIaW2d6hyg0nZpNJC8xxNJ9ZKlqbxqnmdULgR/gqFwBp
r7AmGTNC4G3XF2a2jdBlZ8y/L+DE3FzBP9N6u5gVf942gqSBxxVqSBzfZqSG94vzLWJPcqofQjIx
QMgoitDIUjCSxOKOJLM7jT991JejAtR/Q+2dV7TvYvXTeo0rxEi5YxT1QR8GUbqyBAZKznPL6kYJ
FA442VAxl5kbDtJ/MK3HSV5NXevra9SLdRQa3I5cjaMbZGoWR0qCLDbz6gvg+DAxg0xW2z2nxwXl
zxhZsrcTTgKtlnmgrLzcbMXWOrJ4GZoUKs96CYH3WCTuBxiWzSuwx/iQMTxkjQ6KnOaNDv/0uzHs
4c+fvXv15k8CkUIGUs4RbiHt2q2PYW5Iiyr4fNkgPpBQs3Cf4GEW1wUxhuDcEL8xpwyKH2ArOFdL
nF8QGK61Qp+y+TX6rU8Yd6lW/RgJNJ9VQNkNOoVmu5RMnl5M2CFFi2JC4YSInmPWA6tuqMKgH8Ia
hiHIc2vsBkJyQB/ilHO+CZei/p6McWREx9s5pniXUw3DmuGJSE4TaMjAo6IeSijnI6Gj96PzW/Et
u6W95jzxNIDJCgUcnF02RsEKR+B4QtY33vQwxmBYLqq9Jz7AXifV3QmXvpip/M6uST3fVdOrJQXr
3dGJizGf1J7C/+JJFVUi2ZQwbmCiuT+7z3ospfuYBZP0GZgivLOZNiW63qKsZnx8kvCwt4FoFtkk
dfAMD4rf1zcV3ZUQdn4HT+CbDTnqkYPmrPo0pwHhqfJFcQUMaiDakXnFBMQOkpzeI92M0Aorh+nr
QdF9W2ktOE48PohntNkrMezoUzVgMXBNLY0w0rZryDqg5xpmHqwHDyJN2kF5c172rEP6i1cRXwLj
H3DSCvQIvplU5YtmCjkTQzyaIavjI/HZxz7Ag8AMAjMvJxvMR6MIZEAqZ5gQ84LDbnY0W1cC9LdU
jAxkVfQOJWPPyaxazjEtgj3ZnVdkHTAVUW+rjTlIJRI7oqkAu+q1I1lcumZQrgTfoQ5QXHbLrxBy
j6bKFJWLWEoQSTvlxaolx3mE2R0kpnnzEi/qflwjdA+HXXfL7ebi5Fcw2/wq8+JoWtcf5qhXksOK
CxZYl/9yWvy4+fHi7OHx4CGj7p8OR2f48Ozh6cmPN4MzzCn3j69+GP/h3Xe/ItTi2+rix9vzc/j/
RedIQUpzera/insHp2vKYgUrgn11eMYfXiwfygMHC0q2I77Lc8BlwaR4BPQlI6CX64jVny8/zdf1
EtdYxPOxQg57c+tFpHWZp0KScEMdgV2SBVzHLo9G8UfYZDF8EA1Cs/AQNatgf+btru++4Jx7ZDxC
GxTHHiKbM7652QFMTVCmAXlMr7hv68nyshoUbyczpwCeVyBZ57BgZ3XVCFjIBoGkA5VYeYEMGRPM
Az2hTDFy/SS+ejh95OoBPzfULHdvYg2TC8S1OnkC8u4bDN+YNBuG6FR1W3Rpg4nH9GPubAa94p3t
Ge3Da9obMBcKyR4dE20jfCaAzWJa2Z6rVCbCBKZO6ribtnOM06OrZBQTYWcGxR/Qz32zXQIzM0Un
G2voZCPkdsV+gcvt9Tnhr7672rJRUTc3PjUDy4I6+qmi+zqYXMS2tJwB42uZUmUAUliWtWG6xtEx
OJMwk8FZpPhOozQEJ5PRJjBJeFUcP/3lPwyKP4GqhgdAPQJFN47H6Hgj1yfr+eWVUcuAjZ5QUiyy
DqPG3EXJE2CcP80U6POXj+x9K5CkS2ZMLmvCKlTu9AL4Bj5CikAb8OUFf3v6eIjVnzlQp0O/007h
50/95ylYiXMsYSFXwr7JWUvK3s7rSCSmTl3jcY63sIIuYdkbrUhkY+Yw79rKAStlTbHZh16GasKV
kpIGZ/ChRJ7+gbMYfEulW6y2zC/fV+iMRFs7Mi6I54UqXn+bGx46M4bUOFKTpbKxpioSAYZ6A7LX
08Hf81VBRemj4ESAIWoftxOC5fxy8MSr1scSiSJwqU3xcDm/RddqjtDRrSP2icnsUUJozf775mW/
eIl+Sy/TfW2zrvALL8j422Bns8CGRyHZaZWu6hXKJOIplRzZSQjNQxPVWanhYd4gFM+Vbvpyg9TF
/mf00iDbCJZB1QAW+ZqPCaXJjbR7kC/QyO7zW7kraN4Y4A9OHUaJlLaXMvrkposjmUSFZXPRC1l5
dLVdg3wG/oWtCw5ihHpTLRZ76SbjO5hyEkUq6tMum5839sXaFOqRxphHggEeP4TyD53mFHmyBTFr
N/NlGQQG/hGWVH3TFJew82wo+9hi28xhH5NDNNSOBwxSRhh3HDULd5TjiytTH19h0Y0mGe7kUhPB
g1inwciu8Bogf7AUMvQyXnbQa/QFxwUOM3vDI0DzQrO9rvQSbd64W7ZvkWqRnmk3YuzcRA7cfPc7
JS6g1cWXjzRyro9wkGVx422tP21D0THbOPG/j8S3jvJRUVDnCmFigj1wx9ClsjjFFR7jcXxdLSBU
UqVczO16AexrQO84Xkh3opS7+1XoLKoLksc0ZK8BGzDeKCZLwHrIaQ7LH0UrbXvsYDE4sqacncfW
9b5ja7yYIhka20+Tc3R4hHb7CmeW16RKv0psfu1nirZ97Lh45m5MFIMWExlv6a4eoUXdCXoQ6ELo
i0Up5NEt51f4Df46Hf7iTP10zFG0qFPcRDmLbpfmNEpV/GJ41kNnSqiGT6f7R0HiW4+uaK3oXqwS
WCsDSIU8S27Dg2fAFZj9JXGleQeiu9PwcQJ4DlG67WVRW0+81Nx3aOYFEO9P1/XMs/8b7wyhHvcu
uwodutCMrQwbOZxilniEg6IajzCn4QSjl8jjnnPK64MWf+kBOkqr8dKBCjtUJDyojDOPj4HWHyey
utnohdEqa8GlJZukSW2YwZjW1IZsvSx7LgDUWPKDJInSmHQG0UHrVdf0CSTtvBk7NGdFLuqSzzG9
YX9j8jbm/9DvHl9f2prIm7snYOicfJHc/TEV4hp6syIQCMogQFgPaKusJtOrsR8xrTQqxSVwHv8y
X3XpIEXLCmrp9eJ6dl0PzFCIjSRkpEmEggKuZT+Kc/DM0ix1ziEKM8MCUwzsmPN3BlPYSWuETE/K
4+hO52eeLNEPPOCchbetXNe+JLBc7CgF9YTT8foOJBHK8H+VRC2bwct6MyxKrPGBJjTAxy/IQxXe
/G/B4z+83Z7Dw5Pw4TczjKV/hKjW/350dD5f1quknX+cb16todS/mQ/h2Xv0/UWMcPvwmyXW93fm
4fdvr+YX2J2vvjJP3+jTr782T6U35ol02jz5AWFxi/KhefTt/BM8+cI8+W5R12t5bJ//UGMDDx6A
oIZzYjOdrNiJuZB8eLwAN2o0wE+ef4QvRiNTCdCdHv7cPvyehhg8eI5PbJnf0YCDB1jma1vmdX2D
o7PDe9HAk3kwxQ3PPTNUMPf4dBl2lh7S9rGkWT5SH3i6/UDnzyUHHwmewbRejBkJ3svxt3DyoYBk
/SbAJ1hX0+0aNdjFnRPmLNDmt/sqlyVScoGSEg2hDjR290iRcyW95dLkao1/BDX5Jg6tzX9BZyj9
EcSlTq/mC7ooQqri5jSmJ2MK/adBRhswDZ7KZEd/5Mq0EijcXuAE3hKZARslT/Ws+mdQETa0T7hJ
wB2U72j9ziyz+t0cNezFIr81P4TyDwM7Mt2y2JtC0UtwEz+vZ3fD5KacDHB0iqoHu2JF4di55C7g
uVK9GdXJd8Km0A2cusVbSq5y8Fg5ucD7xMkyMM7VU17RfIgYjy+20BxeVkiVvjOgLyJqOgbuIxHp
Z9fs05pR011Gws7+29Wdpt1syjyev6+rbA1YK7kqRuxYl70zCzwE56WxG0a8yzEM1OMji2hCy+Fx
wLOU9EOUrHR+UAdOGkJyJeC7JAcRGSmx87m6wk8YhpQ/fLtZ9zJGO2hSkRQYs7TJeR2WEiX17auX
78ZiB6JVDZ+3Gcfeef5Ax5TZvEE9a5YzTOyylmVhY4DIj0YUxQEd6BUnxZOMMS+ZuxxKglpjs8Tm
C+7vQKsVv0fEm6hAvhZfF49zRxHBwpZhY8S25/mcydQxjEBZBEK03Y8c+Q7G/+Qo9Axwq4f73T0l
zj9ToTZKZdvocdbzj7ze8FsStLwyz6yzD7HxKXRjCP8XNx/sgD0Ncniyizo7sqKeegr1+EpvrtAG
Qe/SnUEgJZoIIMJBdJDZ5WJeRTsDPcluCsLWZtKpbJ9ChlrCApbVjYfVylEMdHG3O1XL7TW6ekjF
vZ1ZGux5kjcq0sKIcL19iRreIUw1hW0ypQc7y8MgBnwtxhj2n3Cj4lZ3RDLszjOh9QpSB1e2s3T7
mN++2zNgx0EHNKfp4XhnZy6BjvbyPrbdlBu0S63y1gcKse4l5j6f56aRazgMn8FiYmxqrSxZLfHI
N0W+m4SY18a5ll7Mj14vIa//80WSwwX0i99VeK+LfK+FAq3juPgDeRUhFOlkyki8bPeZo7MlXU7M
Z6DGYETJmuypFKZQoPZ0FYh98Q6mHZ1JoObDJSaeZlaVPoynNSJorGPnTX3f5CFjVIFzCbZw8NDW
/HLp4viDIH4kAFqDH+Lzh0gIjNWyBJBeB63HYfVel9NuydrGdklYYxyk4P7yPl2v0V53Jh1KRpD5
5ns0VPXMpMJmu1pM7loH9gy1zdB4o0kAPe8GYw1yf1SLFSi7pX5aShO+fSlho2ofphmqqRcTKexC
yCQZqm0cuEJo7QZvNbaQCE712xDyIJFbTx1dqQnNQshhKQkzxMZuSnYTiiilgSA2IcMxJMm5RSXN
e2SSMc55f0hRjeOl/LEPg6FrWvnc+L3yu3cMngK2wn6xg4eMtWq8ghV+necmDRlbM6rA/detbcdV
1py6P8/ifDlqdntAdjffPhrUGhPavto2V2rLE5+AbJR7vgOZwHZYy9MPuox3fGphQ+pVrgtsihtH
pNTgr5FtLlR5XBw6vsumq2sZi/8qiO/+UN05rRFOCIxAKpih5DjKXcKkgk3XnIxkTOj3LFz5LWaS
xGKySvRLOlXAt70AX+pavvrH+fIVm1jHDOUq5iF0yDFt9LI7BRe4P8chVgB7491fJmObnyeTFc2O
VS4RDqFqCgv/985nyGkQoQepGDHpEgo6EwgLsR8YDYFVH6cg5EChZA+krqQ5Rv7v5r5tuY0kS6zt
F0cg7AhH+PJmuxayFoBUhEhJMz3LaWitVkvd8nRLHaJ6R7NsLgQCBbJM3FRVEMmZad8f/Bd+89/4
B/w1PrfMPHkpkOoZXyZ2WyCQeTLzZObJcz91Hh8sRw2G5qGBhnqttMYxc7CmshT9MfbVTfIlFsqI
zSbsr2rC1CYmRcxQOC78kxMMlE3SnHIs0E/aXmSfnTcZsmZjk1+w38ZhJDqGZ45lOk5TM6TvEpzB
9CIByZ3OxI8pmqZ9Tv2UO8a+e7pdTc9RZ6O4EWcEABKlyu5qE4zhbejEmr3CV1cP+Qz2B0Y0cza7
5F4wEBUNJLc2lUTxzAxGqnDUsQNJEKUp0gFvgrdir17O+wZsTuNTJmEvobRaY1eQRSWV3C8q1Rw6
Kzbm+T2ibEi2mfJ4pTKavE0x2TdQFBlcWlJvOKrYDIaetfXZQLsxx7yAVUSSrbaFG+BKppahMYAA
NTCRk5if0U9OIkPNk1H2KFFRdSxjvKHK7zBmCC5RRH5Hv7A3bqbZae7nHcVFMalov9YV1iVwF/b0
2tadQu0pyiIIeBg9rLaLP0fv6u94IdplV10oxB/J1yhMaQ2jTD1JtmWu9hyn7+/0IJFVIboeBF7j
7EV55Tt1eqrLulm6HAsOWsh5KCMG9rDUfmhUTubvUK0eihZuiPAxoVUnWWoQwfkVEv8vlGVYPc34
nnPA8oSMJgu7lVp/d6fdwjxAqWR1UQuQKZpASRRWeYKntb5ZtBAjBHRpUFRP0wVrvWQrDUUMnIba
kOBZzhAEW5f6yIkcYw9l4JytTSY+T0ZLL6ubB5mCyB1GeDJ4s4QCv66QAB/z7HIZ4sTPzC+9Xs6f
X20oGaZwDIY1oHGsDtouJimZtzIbgXzJR0ImyoeCrfVeEr56/HFS7RDQie1EZjVghehOIQ+Lu5W8
YHCZItHSQgPhki9bCyFmRwF20lGsGi9guN5wr9eabTVvKDKAo+im2AuLdQ51m8RzS+prPGioPJch
mUX31ei34ja+Fw8xNLpzbNtqlmdUIGZvWlZSvZ3SIBczHS8q6tKPvqrUn05kJSkTWWBwxeVqRfxW
QjVLOTUoGhGNipy/vcIYgcV6vSEGjZmA02KxvkznV0kLC8BhIORczYB5DOO2cgMsdL6yPWO6vRvx
crpzr3gqM2kf/Vb6uUzKOHJJIjGH+fnAnrKQKe3gcjSDFMaoo7ZHkUmYFqBMgd1BHm/YDTSKUf3D
uPRMmX0hZz6V2HQ145S2aePAuM3zWzq28Kv4a8wI7DiluzlZPmfqiN3UnwobE73wT1fcDGlF54YT
6rWh3VL8cIp55VvczTNFEAk72yX7SAWM+I7T5A3XLnR7h1io7a6XZBG9JGYX+SkhPx+dXptSlqCo
YD2AjukTEGsMKkdngLESM4USjumOmj/iu2pA0O+7RHN9zkynXI8ySAvuZt53s76eRR4/pKSFkXcU
nY+0iux6ebrGmVuvpGP61LL2RTFvRJVjPgbL5t74o5o1httIN/s52Y9+bZXk+pT2HP5vIKnPZQa5
LENDvwnjjBS1HrNsBaRqwbwWIwNcWxbRq7crNrQxFjCGRwXLHscYoFqX+JtiRKozahioJBAUqqnj
b7lgd/w9espzPn1uEiZiI5NwdZaJsnqIUA5v8yJBQ59imblZMa4Iyt/aNdnKRt5uiXbycl3N7Gzk
79vNSBoznxHPTUqaKyIsHWxH+D31QkbzVu1ROT0yCgbfKRHnj9iPMWr2JbUIr9sN8+jeax+ZV3uY
xkLrwNzrxmET4wrA7t26b26pPe15hvVq2FfWQlNIxnmFXIS7NOZc5WoX85uM1oJf22OnhlSG99p4
1/RWSlL7SmzRR8Hh48fVH+7ikPjpJ0KMAZ9n7lNIuRzNcfAiPayVUUQVC38naiY7UQQbDKfNlXtR
067ztDpfr02wO05tQv5H9E+CoNE4wTVME2O3BOPShF3ZKJeU0P6krdkQj+vtyZC3JTzH2LJlp9yi
1Wzbn4vonRAlgClJstyEdqvdQgI9XAS95R1GkOEzzIyU03QFkjPLbRTqaH3MCQw5mismANM8mNzD
Ttj46O0VAfvIcZNqWKXwRnGkHYyRVm6EU+Jm+o7xbtKM2iH529Mn9i6fAPDap1qh1v96GXwjbvb4
pUH+SSjxIveAhqqxMGQ47bIJye+V7Bx9indOA4ikQpyC9YAQSIEgdb302Lk0I2fWqB/CI8oxv0xI
hnxdMl11RrNeailx32iIhPAo16oKNGTukMMqUH40Kz65lTJUi8TqtB2XJyf2JlfBTNL3KrFngZ9M
MiomCICYU5wnqhk/gujlaxlJFpLXzxO7wtiRbn6jAxWu++0WC+3iDvty0yd0dtf8Z4OQyJaf2dsG
tiSfAMnzbe969iRM9c1vTaDzfLoi67Nb3E7bBUFwbXW68NvaUNsEX1FofKjedT777LPN9XB8tVzA
a/uhfve//hUV2OiIoykSF3qGJzZDLJyycrIof49/Qzf67rxZLiRabotKj9PrbFtjA666bHy2OEq0
HnY6/ekgO18vMHPzRVVcFHCZ5c8J2l+rRbnaXmXFdpg93N//K6qE6Kpq4A3qdBLWIzQd9R/l+6oA
17ZfD6KYx9r9zBH5/aucw5/rRHp+E/R1hVXtxtJjPO6lOZeroWrTj20RCGigimy0TtJMTU6J/Emq
AvpkgwVsuvvvimZCX/UbFy7IpRik8IstRBYYOkwRtcODEyqVNO4lDGmZZZXIGBh4wYmZC7vbCaWA
/A0yLwygaxtmvBIMLz6tMSi8UUkUmskZOpwY6jQeyxcqbRjSN2mlYrA4Kg8tNRwcY9p88tpoduL1
pVx3jGVvDLw+JkXm8IHDwOgmXY97cFmwRe9E6iEo8HjVKXMYmyr6bqHyeubIThhQ2lLNfqjaHQ4z
0UWxnovanJa3k7O+8kWWeBMJanR1P9zR8QrY3bsnAlrCQ8gkKxxuNxiU2zeiXFsxvHssUyVA1lt8
f2CmTN4GQ9vTFw6Zl+d9oc+0DgtQj+yuZOCopQUOc+UwI8bKurDj9nLiRgWmE5EQXpR0fairffrM
3BGRREkFIuE8fVMm2QAYDJ0zT7Sb237XBH4uvEWSLS5aoXYmtexYnHrPOL99cbfCiyLEOrs7e0Js
Fx+ukt3T0CPHXd5RigT17HcgesvZyjM4xhx32FPXuHeYuVqwPX3s4Rc8BeYHfc3gJ4q6yDs/2UKM
38AzhOcb/l8/AZ+4QQilbV/EBlUsN831iMb/lJ26g7wWJ6zhJFX0bq4MomQV+GXfYk/mqbEC05WV
2vPp8KIjiRSOMREm+tEdwzN2wJLNFZJEV7O6N8knp6dVPplW6xUw1pPZDB3FckDAomhy4P8m+Wl+
Olvnp+VZfgov9EXuyub1KGv3h+26KXLyiwFIQE6b9SqfTii0N58W6L6UT0toQtXQpuuFhgB/UsXh
XNL65bNZPgPOYDZf5ZiZeVZ+zGfwZ5MXS/i/02Kme3NIBkx0DpxOjlxPPq8QifDV+UF+/jA/f5Sf
P87Pf5Gf/zI/LyazHBGtQZR5SV3ycnmWU9Fe+G+dX5zOcmCZYSaL4gzPwqLMafVIRjH5iQIBUk++
nFQftkWRwxq2ORY/zilnJ652tQa0rNY8+dWaJ6j7r9b1tCo3TS4XBvqsN1zEOefqxfkm31RF/iGv
c2mqusMhxF41pq/N4fiscgxRuijwnzXMtG6uF/DH9hT+f5OTt4Hu3tDONbMcWUXa8Ga+Xjd5cw7/
hxhrygZ6NFXeNPk23y7yq+XGOwSYex3/w5tAyDyvclQAzYqrnFjTvJ5AJ5Cmud+A63/2e3lvgC/q
1YmQNL4MRzjjWz9NwbNEpzzPyN/04jIuTm/+dwW348rmFgLuB4tb91LmMP3cIuSB5cKqyWVUOYuS
tUyy0/UVp9nBjK8SpwFfG45OosioBeWwk6oG5Wq62GJqWE4pvrg2iYjW2wbOpheH66MCILtgfvtQ
8rfMQMIHM/HkexSuxEUAsxJpXbl0k7KOYbZjRtTSEt/9HN1/3R+KpiLtCidussX7+bzYq5LSXMYO
1TTJYka/YL5KzP1RNAVVKEcfHlmOLpmF3XhKZN+c6cy1PJaZMsYQm8+B4mxStbh/uiXiU+z+wHyo
51SHkP5Eep1RbegmUNu1+vVSSAYVXaFIC2wAcj7t7YNTinwlxxgvATBznBw4bTiC9twNND4nQ7b1
XhHvxwDmJEzl8JviOpHXhTy+tqfC5hNDCiMvq3XIL8fjnXmXzgBR0bqxQV+Ycw1nZ82qdCxkOjYt
gYzxOEh/5B9PQhO0VT07CWjiJ63S25r8mHOg5qfo0stpHylHFzwyWCNjU5UfTT23O0EcVCFOzBRZ
DQwd8YFqpnxN+QvBrk817rjbx0lM4lvZ5xwLfcNjYTPtro1UMQXahyykSVdvRiepVEfjfjyjoYxg
p+98khAcSwfchwNPdQovJoVQw68JmYavHrZRk0MWU+poW0bTnxt+v3Nu0R2DHkI9rNcC0xIvqMLk
csLXNHHLPCBxYUWFC5ygjwv4RgQFc+EalL+s1EqLTcgPcQSJo5MmKw7fEuQC0GuzL0OlZEg+Epjg
MrsP3HMvA6bgXgB2EIj9CTBuCvdHmrK3DQgjfYFVQp/AcCDryARzJ2ByZglCW8obVvbKaH+xWcyI
iAaipdoWn7SrHcDjEOVgBQ/MAgyCdyFmL42YmOghJJGxBe79FFpC9yiz56gGBPlhvWrZ9uROPJCN
MGOHVUnSpDkC88ShxIGy2FE3Wq0ldbHvWKVnoZp68YDi7InlmvCe+Goa02BYY/D8oE3w1I7cpktK
KUzrJFnfTsYOGnrfym5gx51P4CLpNLHwQroxiS2x43QNbqmVCObp7MR1uxaSy9/2xqEakgyfikJp
MOGKiemBqXOOxl6krlWaESkWdBAVplSGazEOw3uWCvjn1CYjnqC8Zrc4sLZf8Iqy4XqQUi/3srv1
qHu37vaUUobAeAFhslGpw8zcPPu52UoOdHzrbUkiofjnXy8wknp34QgaRmL18AjS31YTGL5bvpI1
qT5X1yFViwegy007vrrfOwR03M+uRc4j+chOyEh7J8lR8GmhpoxLpBDw1a/hueETbEeC/o5Z0tTM
ojYOw0Uxr49vH0pzAyrl9LGoqnI2KyiZrOFhuSZ5MljBCgje6PJ+/p8amrOYKF2akQZTIqLJQMQY
0eqlTOmXoiPf751WpF8h9QIrBFAzcl6xqoQUK6RG6CXZ9B7rZUi10NO6A7HLMoo+YTqTDLVemWi9
stPMqC+y09k6Oy3PQDLIUGfFgZGz+QpEtYwaJGbYKzNYXEaTzC5OZxkpjrIPWY1FOTYZK2gyUtBk
qKDJWEGThMVKG9wz1IhnRimTNU22xWCTnlk+HNvByZ9Ec8nqw6zdn0BzuW1r7a8g7Y458KTsV8fN
KP2DVXgDf9qdNFURhMW1UUfU8VNuWAsg7mVVQWMm54e3K4XOjW1CPvM/IEdIlw7xw1+iXvXXvUGO
f3xhv13Y757Y787ouxDSX9rf4RBKp26va7/crOuoW6BRwVwcxXxcFVd+6QsA9Efz7qv1SFy3ZrLG
omAzojwljm+xxDCQY2oyJPVnf98jxZzY0iq8WIcWvHIguuwJdlEGPiuxbp+vdbvhedvaDE/maTYj
dT4xtXsrJBCxqLpIz1qge4KnXjqQzW3EECiDmAoNah0qOh17sOQ8AuP5oXn3j8TiP0Zyi0bzD9t3
n8N3pgTUdWdzzRVO/WLQ1Ifrpw2wuCPVdwaIH9/9M4FYrocNFpJZTRactO7D5bvjfXYn6HzDqTjQ
oZKruM5VARzKDs49OZ8bJcYedsju3/Es/+u6o2f66EJK76SCSDlrrEv1ZjLFcg0HqrvR6VyWq0cP
x+jBPqV0uzabF7xKa3hlJkaUDjKAI8XsUufuYZw/WUYyMLxEzKnaVm0VuXhO3i+JCXup424qoUVl
TiU7LBD1WbksVpQOwGjGLZZgR9Z1Pp+umkXOzh0dFW5D3w/L9bRZ9A9yaT18+/L1s69/+/LV0d/m
3R/39/e79yQF9HmBru35ZTmjSuN+auWsew7/67LjejbIUATQyRGlc0a9ZfKYu9mcmjH9YBbgIdPr
ipchWLPeGpUVcAx7XVwp4mQr6IUV+O5kL55+++2XT5/9xqUck7EwDg9wUnDKa5Sf+r1nr7/94btX
R3Ddf7U/SCdif/fuHRGsS5NGXs+YKoQCX3e6PtuSqxh6ddWTVTnHTAanZeOVGqaJfJE93j8MzhBP
8Ff7GsuCXR+p/PRHmEZjJs5zSwOTnnFs863CJZfANxKuYeLjTYXI4JsH7XK64iOOyJJ6EKT7hx8W
2/pcbKuHJqcv5ZGPGAv6lu9/3cyAfPL2wyAojsA/wwqpoVGbYLnbemq1Ukq6oxk16PmlnhAuZEi/
Rf09SvAXlhJY4jUs0VPjWhubZFr9nvihH/d+vDo4Pb5bL1GUgyeBs5CuOctbPT0ZZNn9uBIJQom/
Zlj7S3jn+Qw9fXX0kskP861drAHU2FoGgvJgdvexLNWq1wlXG5GcHcuEbgeygiBH83oxSxVcYTQT
8vvHV05Vht+jPRDRq9Mtx2KhQPaTMEJ3FmaBseojVg6zF6/fPP/6zesfXn01/u03L98+zxNp0lZU
+zMpevQfHeQDD8qb51/lyWRrVTFrAfEwAPH1m+fPX6UmclYVxaoFyKMUkD9GE7uTXReLVPgpQ3kc
QPny2x8SKMHqrItt0QLjFwkY8USwDsy22izaoPzyBiiCpDvZ9HrShpPPAxitO3x5HlYFckD+6rZA
6DYlgfwUpgkzB5HIPxGacABPoY2H2T/eBOePI93t5au3z+GCv/2d8yp6+9X49Q9vv//h7fibp6++
+vY5jLx3cOD9/vzNm9dv9M8PvdqqQmIdNfWnITXHRtnXRXPUzDidUD+Eu+uetkPwZq4CWRYzImE1
93kGz996UbzECosMazC8fJrWxyLC+q7/X2b7V/tzlZP0yIJ7C5TPBYEwXIlkCXKIos8v0kn09330
8PNf/iqwcSJBZPUztjo+pDaBv7h6nI4ZxolnzoDvd0K9/Qrs4lNMRgTVPrT4+gbt6Lu+1DCQtA+z
9RhNkCCXYRP3UIfszve/GwPH8/rNEbxMqGM46EUCn30RbtF9P+7uyL5hFsVfFycG0hU/Ub1B/Ghl
P2pM48/h8G+fv/muR+VDerPt8rQX90BGot9WGaj37yYfJzIygB5bTcuq6VmntbfCWf2WE317nhdj
9YqRQusUpLyL0aN9dIGejeBB4ndiBO+KEPsRvA5pR3ck4yOg+kKLR0C8iaCOgP4yVRwBFU33/ZLG
fQzjvoFxH8O4X9O4j2Hc3/G4jx+19oVxH8O43/O4j2HcZzjuYxj3tzTu47ZxkRqODjDxGzDxIxiM
/M5Gv0ClN1ZBGH1uLebIjc6KTVVgxV8R78s1Bg9ul8bClfBMUXyo6cPMZ27rzCV8wtP8qDF9CZzD
tnh6mdrIMLCcLxOv8cvX/dtYulw/oNRDodjrrTbjwTxSxUjotrqT60P2b06Xc5hz6+AnIgndMC2s
TItO8otSRlN4NB8GKfd6gcyXs5u6rviNkaiTA5sfh09B5Hi7/i2yrbxiRHoxWQaaN1MCcuSqQVK8
Q6Pph/kJJDZbB9b3e5bRvdKi9Mt8u1jsFKK81oAIJqnsIZSgssG4QD4aeJMoK7w73iKFGR2mkbj8
82vlGG/gw9RT9WcRWJx8EulS4Xc3e1mtnnvg18fiQovV9OLyMGUH9FwxLE1NKKijCIXtCovZrWRe
UpaOzW5xRpCLy2P85SQGDEN62T3tHLhDWtFnd5LxwBIS7qTSqdaFQRZ8Qs0gtEMfTaFo9gDK3wEu
kZDZI5qkZvoE+0faLyudna2xnEPtlSEnSwxW+MY67Bg95MoOqt7bFVZb50pykoqPfSGR3UIQg+yL
URaPe5M+zo2AipAiIw2EcaHEq4PkYrtcqXJ32AzD5bB0VAADc1XzeraN1NEme7qpbgiPUTm/frAq
tk3FjgKYcjgA0r8sSH2Dx5GKJHJa5Mm0Ydi0wEHQqV6LkueUPMJQDcQVcdnVi/0GqZ6m7x/aso97
Xnb/OR+Xdt8AKjZ+OQHMPczuZw/v4abMybZ4n/lh7N6yQ4L9YTHET66/HNXBvVe3BmL+FwHQfbI9
DW0ve9gChHr127sNsgcPsr4/lL8rWCD8TwKAKKQrRT9m96SOqfW7YBumH/iKfeR6E9uy2CVruX3b
gbCWnQqwCmvRMFITdevot/YbeNUhpQ5M2Wy5apm7U9V6veSkIFgeC26ThS5VYihHSu5D2wDPXU63
C2jFtx1oCVaBroxbNczHAVqgxbU7zrpBZVMYrFxQ2CLFtcilYH04olImgKW/JQ8g2f2CsFCmWff1
4q0e0j/khzEu71tkOt2l/+yTgRL/I+TcPQYsz4lxrT5LkfsoHW6cooF69sk6wk9wKvk/ZySliFIv
iah2/nL8DHIa4dNMUOhnld1Unn83+VvwwhoI/NfXc1PhQqCHfcep5a6H53Bqzb71qNeLcKd8y+pw
bgybCpTjtezXSU9Z1Kx6Nno1pNrOKHvGLhYtZlimkxV2wrLtUoRoT56W5hwrZhoWtJucoz5Wbauj
451eYNVrZ1HtzVCsqkKZRYMO8ijn84f0Uo8CcHsKnEKX6/Ak22/3LoV7f8+1tYI4iQuiUhFh3JfN
VUzZ/ydXTSvIotrft7+Hn6yY+xMVdCqKBx3cx6e4wkBZt/9iP2ovGlXXLeX1f3FJnrI9VB3APdaG
pCS8Ns3qLthV8TNAv3n+Vbo+jJ0xXONPB4vK8t1wSUX06YBJ674bMqucfiboP96Im7baQAZieGT2
P4937dMUvTsejcQzZ8c//ORhlXbWkB+rNfFUgG3BVBw00q6katNwBARawTIRVhKikqR1VE75MHL5
N8APU7Wg8R9uUfS91lilmv1WIpWKmgiXcHbTYW10Moq7Q1khQmslt/JJOWqpInLfSTtekBeJGD7l
VxiCfxfdozGL2vLuyBuaKvBTHqNzk5kmMtEoG413358++w0tesSHfp9MdBgfRLqUqPkPzzPd/AAZ
XVTKGOswxSPWpB4ehr3poureD1t6E42JusO1zrzBH7d0r0wO39A2pjt/HrawxNq0+JUPvuTi2CAJ
zEszADpq7MQkIBIdzoyukVEbd/WxepDsmsCtghHi9uFuGArDCkiI4ce7gVQJNIR4/nw/bBHi+VfJ
QUJs86H+5vWbt6ia5Vrm0zG5NqsA32evX7/5qi8/H5kUMupqS7E9jGA/7vfewWNDMFvS+fR7v7Mt
TnQc8XdPv/0WsPXs7e3H+raYNzcO93a9ubHNG5RYb2z15bpp1svk7J+9fnX0+tvn46NneGbGX/7w
4sXzN7AtL17ffjWzy6Py98hfEMZbZzG7fLat6nX1/bomXdyNHRSD18stZRz+dlefumLiiIu1G7Nj
St9NrsrldsmdvGWIT+hYc67uuKFej5J4Vqti8ejhULeK+2E6TeNSd2wX8hWu5CTRuipqbIHPpmnL
hNs+VR47fQFzib1dx3EbuThpBqJ9bS0ddgFLL5gXEWzlyU44CVR8+fr1t25vpNfRFInYl9v5vKhQ
SoAOzqLavmctvW+CvnN5NyYDk+l8/xqp35t++xUcnNw4kTb8qIOSkJ0Un8W42kEGHAO1Yx6W+5S1
nV5XxbyPwONwLvxWiepJT9GfJTvKWtJLVsq4o62kgsowlRMxYewUOWvOgX9GfeElaj/IckAkCzjV
yUZXikV5VxR2yAT/iDl9tzX5rym9OVWv5mydwxQWhkw5h7/LvT/fUVWDjuWQSWIAWYFFhUNllUZV
Z1Nissku/NTFhxLVh3ggRPZ3LYh7uTwvp+eGtVwvN4sSbdqkI3Uq0yFcR7ISorqzqcgKUWDN9qlN
yfDXsTvsHTRubK4ffk7ImZyuPxbWDZv0Q+vMmZNFb7qKhYc7YqHMtvV2slhcZ5fr6oK0tsa/Xqo1
1Vm/nMsmiCxS6i2aTjZcMgVrqQy034loa6zc5TvaP18lHO3N4n6JmTmX+OiSxoXmdlrQ3meyMi7i
XIsXuG/ZbxFnIh/taKJGytEwopSWN65CKbbNKuoG7QWYNT+LsUlrFDxGQMjb+1YOBdbN54awnTs2
H8GhxC1ShXmz77Zmax2Ur1L46Upjic7oDoazgn+Y1NOy7O48B3qqH67e/WOJPlisz2yasA/X7/7t
ZxR4kMHXZ4imp9+/BE5iU61n2ynMrUfHv4eiGdYIgb/h4wpORDHLPpYTkwa8HlB8SOfD79/9QwBo
Cr0vgZf+8Id3/3LMY0zXaLnAEDl0NJmYHCPYFIfecC2Twwwnh4Zaqr6XZxWWc6sbqhjCcSgqtMF+
4ro3MrL5eo00A8Mh4O+OPZcsqHJ1bLKpSOvvtmRr/m6y2ZRshpWP35VX5arTFjBA4H6oiwoLMBpY
+Jm6RWA42sKgCBaHVSlcHAROagyUfMyFA48uys2mmHU6SOILPE3TbZOKOzEAU8EnlFCrbKZU9xuJ
D1uRl0vcXDLNPH/38u349W/IRYI+Y7H5oxdPX34LchNaQulLeuHf/PD9W/ryofry1dNvSSTPMJyE
vv7h6OnXz813jzsdyoJQKW8cFRvV/bvjyd7vn+797fjkx8t7/7orLnSmvuxsximd+ptJVVv1Mv8x
hF/hvPS7q7UUgIZlYznWLmcDWgPBlyoMqv4x4sCmtlHpWvGtGXUxKR5AEPXpqN8b3sNMR8/+BqMR
euPZpJoiC9/7A3w4/6knlOtOMCFGPw0sI5BltWtnw9VBr+noi7k/M0W8anMxOMRHA+A54q5h3K2a
53H33r0HhLF7w+aq0X1stJdrsblGJMHf98Z0buBv5s/u8HIomIzya9OqgK/h8LIupzldYG88vC7d
6cKkDaLNqrsKzlBtYm/vivJG7eGRnMNmocQ3oa6jLucABhmtUAuT/HWRUrxuRl0LJE66i+l5uUHG
lgRMDQXzo+YcRIZuTPhaADUjJHRFtkpMem85ucKmMFmMCPs4qUbd1XYZD+stBVZB+1ViSjyescBR
69vfNfXJHNVwPGcYz1R5pOxHHAg3bMf0HppGp20ITo8Kmyo3j+PEEQCWhAZidDmpcL/RxWKKZmk3
/u4jI2QNj4wju96svUlLm/Vqcc0nRb7Yk292roUXgS0NNaWtpdppWLS0uIJ73xA7vAz225vE5hrJ
wO1GE64StTrVpsKa7VhWWxwqqQC9JPbFIC5k9oYti+/u7ZVnKz43ZlxbycweO6Tq0Ry4H9K682y2
pRhKh+usT6n19iZohihmAzM+evkXK+RFVmecW41dkxDlq/keIGoPMwrioz8vz4BGZCCLzOmq1MCI
wztdShDIHaowVmfbzVk1mcnzeVksFu2bvJrzU9YzN8N9pS6Hy3hJLJLBATVKbTpmgqb5ClXroTJv
AUSVnYFMSWbKFnfzyZ0Vp9szXr0arku3QxiUjNqcGTrIjbdc1qz9jGP0PGYusIs3X4RLb1uyt3Ls
rKpuuqePH3vcVJwxXG1asn5dbVZNI6yaWyOe1C8B7Aj/k8Pe872sRvZTnqElZvSCTD9HjJCR/DvQ
zuYES0CP5F//mTeIK/r8yTzzzNbwd+S7Sh9Y4AMkn6Lj7rXxi5eDynge2sdBVfj2GggtJg5HJNTJ
ZiwbK/NAwlGqFH9HFwVF0XsMFDKvcPCXJkranI2RQYpZlP6V5oc58beoeREWTLIz8A8FMWSRUBVJ
WLKq2VphMbDCKYgHqb7n6/XFULZC5kce+n35Y1TrXU2B9d3IEGkWg1FfG0hKm/sDSsAJ6U4KJaGg
DR0oZrk/OD44odpHUeIoFJ+Q346q6RDKbbSO8W4g3vSQir2gryr6GEQpq9q2yTG3ibR+p+tJNXuJ
b0G13QRRKtBK9DrABVM2yOfUD4Yh9c7gpq25EPjj0gzQF5gj+fe2a1DMfLCIP8OUQdQG1nZcmJZm
krl/AT9prkbGCK3LAnqI+Vucz8jRdQ1E6/lV2QxucxpQVEUB8xBVX6hKONrAI7qGKThAf/HjqrvL
l5CsvLwE4qlrZi1T4Q1tC1VilwjxK1QbaXM1d6W7iU/Y9BxzaXnuPe5SPoFbeXjL2w5DlfV5v22y
5v7HCmG3ilG8sPaJHbSRsO0qRcQMC7LYwlMLdBcoRjUsVjW0GtfnwDWsL1d978mJJ+O/N8sZUu4x
7rz/5Jgw79RbQM3l4dJdHY1zuWBNXsrwoWjW8AByPRrmoMrfS6V0D8dGuqKjRLxFVaCWwEvHkdhP
x/alaXeii6hY8ALEffxHOoTt48zgfFNU6Gxu2vd9KInhXGScAeG/1EomSAfG6aReWAEG/SL6pZec
jhghrJ1g02s4jQAz6A9V64uiIDWQfn2oiC2zzHAtSa24QhC0R6qvQCMYtQ+BNJSof6HUTDgdrGLT
/rQHiKVsXcfl/dsnuxQA5PbWMW9lmWPt3aVf/dcbwyuLyOjk6ByFXFXgCYSp9nM1Noe+jw1HJXGT
Bs7IfAgyZAqdI/U5yF5Jj07Tyr62FM8T9vSoAh8WdRhZcLInFQUo80oZFpTLjJ0rjRrNkjviN7Vl
TIdoATLiB/1GKVqN3Du24h3+NNokIXl/wmZJEA/sM+a5hgvhRoPBKJlUX+S/rsvAYFsr85gCbDIW
HvtqxCuV896BOPFQSDImpolR4Gw6NNj976v11XWrV9a8TqDYhYLRrxglRh+C3Eq+ENC5feEWPJTJ
TMn61HpJswxoyp9E+ZXguo+xWd1vqe7Bb5PNPKkOhOkvLfpqqcnshW66ww1OoC8dVRWQqHRJPE8+
5iwnwQ0EmtxIMXi3OXOM3w2cxO64+NBZtkc+0sKoe+VYZGCDTaOCVzg058WMjYNZP75XwEr+RswH
fa0hb3foC5J6ExZXXOwH/0nlGDfpu/mvwHuOkkOTU+4fqBFO/ZAoxE/B2UL6ZKcBPPjg8BYUWw1x
DH1umQLc+ArK/NuzxNlMO6mBubcty8rjqzXV8ZqkYGKIYG8JJnenBjUrFjvQ0+p6P0MxusDWFJEo
EyVFQdcvhoNVNRM54chdvi6M+6lMM86z7BCZDt1CSKYWTwp3scm/odxsxcqb5qJYtVbN4RAXSoEu
q9HBiZQCrqWjye99i/o1tjCNvlkc9ol35G7NBWrsrckzV5XhlSQl94sZkFqJGyAYq/UhZhR1Qpwz
vagbrWtsqkKc+FwHl0+Twx6BSi1mgOWcwNQUTWhCLmi69ZAMfW2EgPK58j6JmoyprYlQFx428PW9
c8jlJD5sC453Rbu9BAPWUyBSJlxJjkxMVFQm3I4GqzpN7aKJprQQJaFHEYxrwSY9dEG6/NQbiPyB
nFf9KCqQRhVF3C+dBPgX48BgsUHtBau0Mp8ccPnGg06mKZKOmSW4pNA0N87lpDbYwGxXaPbsY+LV
0yKoReC//PZBoZHzrMe/9EwFAz0Fc0cfGFIejEfpQ3Fna38wS25G/jukrpo5MKgyz8iod4bHg0Pt
bH/g/vF9JP2TPL6Bazm2H0t7fqAlOXWjQ9buIJiZZBRek/oeQ/Xm5RTkQ8D0Ffoswk3gm8gK3ozS
whZNJfJjQLfxkD7c1MV2tnb9/Rogd9wU8X6ZCf0b82Lb+1ciQxESHCQRsms1ejyRjh0b4ot/dc0r
AgRRsjo52cSYtKbzFdb9DFmf9QVB8XkltjXgjNyk1azuyLS6vEu+eC2mtFdyBeyPxWyYoVWf3RIk
IxVBOr2W8EtLvcT/AH/8jis/jkIGq8s/CCf+jKhn3Ii+lzYvRWeVaGZ+kpYvjNwatzQ/mZZlcm74
tRmVJbpoRPjae39RntnCmVxyvbIke811Us6iKnt+tXNo9BejNL8YMNLXQ3JTmWxKNDn2uw+H+2g8
xJu2njNdvUuivGNU4eeIReo61nW8uTYi2HJyUZAAOxwOB3Q+qwJ1UrzKFBj1uNHbhO+ovzxdP/DW
7zSGLFf4KPdbas7tdL70sd2T4hNEIm2uF5YhmGYA+SmIrtASWu45/mZq1/kXfZIdHu7VBVVmhmst
aVpnBWp6UQ/S1CEPkElltBtyx3rsMo//KfnQdT8sT2VjIGmbZ/2kfHWldkna7Sh3KE+gjHE/6x4e
YtkNyxDoLT+f1OetW44/Ov6rnHkJKhrJ56G6cQpS06QpJhVpOXe2Gi+L5RqJGsmNLqU1s0xG8aUT
hVyZnzGSflxcjXFx5jvnYcZ+B9E1d93DCkPoJCidWnlvnUm0KkhW6Eun4/2T3AA4PlCfH54k8+L4
S00XnfWnbtvuOpxI2AzW+gkDVmsm1FhGS1lXHFpVkiJn7AqOehviBcpgx+B1culUZHuQKJjslxia
nk/KVYoqKDEFKTM99AEbjD4BZHnHgYMyBRPSZjOrVq3XBCOUJPyCXDgRP6+NaCYRfJCHjjWcbUeP
QJlsM77+UYElzWagPOCeVYG5lIu4dDH9rGqmzGYim9p83vhHWJwMy1KWU/IRhh5AbrmZKS8qsXIs
Utie799zq/fvM2GsJ4YyI+tuvDCri+G9zFjGhnpcd721UyL2sE6R8BmzdQPZX1eeMUfFZ9Mkcu82
UEm1MNu4yUNAgYUeZIEx8DNzhXHgMo7Xc3BTqL+cgHbMfEdgw9hKqzrhUYnSo/6D//TsDf7+xtUm
sE20o+Kpb4SldcW1mEt3is2GZ7N1UaPvEonOFo4ULZs0icJ1Hye2Sr1ZB2Xqa6HTHzkpUfqi7Dwa
L8momz4k8UGBcfKs77XOLZhUUL9cKejnEyTi7y88qc3H+Bup3EIWG6FMgegmia7YwX91nZn6URqP
nvTmxLZdtIfMK2JbkUQkhoAOQsu6Bm5UUERxklJjRGl8AD6GyK+nhQ85vmJFhtX0h/M88Y631lq3
sEi31XVrqki2beBUKpthcBW9Lme411VgsQMyh4QSuFAyxXmKDQN0pqjkW2wg2n8kk+QntgsIut59
VPlq5BzBO0b4ybkg3tUEZUbjbCcCLHJxgzSJ9UTcMTUlY/jQw8W8XElhD38P6HAKprB0eUxgiFxg
WIzSXmV9ro7KYSuVX3yZwmdovSv7OhgNGBeBIIHEe3+3laq9F7y35sdE4nD5KZy6D1I+hc+teVb5
V8XpbqrtqrB+8ZYtYi+YVv6YhDJhmICq+d1yqXYUKA7n5pa7/ROVingidG70R5kvhy+4y7fr9cV2
Q49Tmks1ALhKFtrRJw3Oup+oIegZyTFhEuEjUQ5q1MXcct1dVaNIbAqQGjkVSagWRzWZmcLxwwmi
Tz1QIiK2lNSAAsvWRrHb11Ak2gNI5V97q8K5tmcMjdfcnHIXTGtHMdPdhL8NtzANblMiz/TwkRZs
kCy7j8QJjXKcmf+miEmYxCUZW9FfJlqP+/VmQLSv9F9rcHNnG2s4BGfdaPutXr7/ylnUkIw4hb05
t7XVjojGHqhDtd6enWfOtUPn+W3OtzUZSMjjFuksMITIzjqrgNWg3RH9dMN6VXEVqwNX6jrrYhAM
BzWer2FblqxgE8pr4/+M2MAultBlrGCOsr6E0uRe3WxZMbOK1rEt1H2sJFwgcqzOXcqAiVEgSUSf
UhWyGZbR1SpEIQQjR1lkEzdQm4yuIlDFNTPp9Qq0VOlHiNnjV+tGKTyFT55IHGk3rJAlB6iVwPIa
NpjrHam/noh09ZnTVhpJ10HvSMQwYU5TjzwmVUkg5kIj8hI93m/LGhq+A8ELILRiEGhWdmwjnVm0
X51zDR/hOEgBjtW3AT/bRUORPcZ7yN4hH0G+BVnrc3pja+Do5dlisjydTQ6dlXBoAXoFpW75UEbK
FGN5GYQBngJJbYb9zq+Jqpvaz8Pplv1sRq1+ECj7qb6qc6I6pR5k5Y8iLiw0WBBBN0h5uQ5bYM1B
qi3wARYK+uLI0VD7aXCjz8snWyytUcz30hGUAdm7urpifpbIAPlc0TFD/fnm2j2tot/jjkMUzj3t
HmbBvsFMXhULBwD+aNZioWvbQGjTWkoUfuskviZBVnIdYUWgutjkWfeBFs2pCp5Cv6mCZzGubcMG
yQkP9KS30U1aYWFB8EBqbjDJxXWH+kIvZCCPiTS7gYoSbpH2DOKE/4AMKqHzoBttjACXT7sxaHV8
C16yHGiMw1RobfcCMNTdeqayoZVzkPObx43RkhQyGBOEVk4zCS/7KOGOrGbDZ6lgjQblNTU1M43s
1xGlCPA2HO4NMhzFNaEYloJYO4Yj8GN03pDyypFf7Xy9S8Y1l5gvrHnbYxuKieEMYe22fJhen2L7
MH1UrWOzjjA7sjpj7AL7YYu1Oms0bnv4tWIZvVmBK5vUq0U5V5yb+92xAk2dunn2h58Gt0gJYC4f
9To2i4EX+ySVCiDtLeUBuulyqSHi8P14FrEXYoD0vjkSthfaKZD9sF88PEnVQR8rINHGa/cf85t1
0/kOtSsRo0ocGHJ2M8qoKRqYTDytCAorDuUHUg55l9WEKcUkQC6z8rCNYlwCRqguz6RCYVa6bi4E
zeN1xmNm58eYfqkn6uO6h+kTSFH7qNUJKHIgVXMfBm3953Pgv8S75Sx5poOHZOSrUvQt8UMUjA5L
KYeBt2Qg6AYMd7ZYzUhyDJP7qRgSGwfmFEnWvTkq6UW/E+/iHIXpzyHaWEgPHMfhaluqRNLbSHXt
bpwItg8AmNCUiGtRLKkfTmACzVJv7Y0e4NDEd//2+sRjGp904JCYXsrO8B/+BPi7oeyB0WsZJpl/
BTb5clJzbPggpVHR23g/jL5zEYj6CDm3bhMtHnFXpqOtTuGN82RkGrQVV9HHB2ObJecFucjcndkw
c3IZTF6QaNCBitc0O8soopOQRr7bHpQDy8YyQewhHz/FxvmchxcnHnFBV/pS33/IcOAxOOuvbmm5
2gd9eIKAFjGjotKH+YGzYkUsB99k3/M82F38akdFU5L4jRdDOC6O6AbzT0U6CAjpK6aZoDLvmnrF
dE9InZorh4nUo2C0KCxtx/ASV5amm86jdql1tO3YVmv3biqFiibVhiaIVG2AFz7qqGW/G4zb5dPA
Iw8SPZiglqsZSk7e3TaktJlT5kfPYh0f3ciwFDSpmjoBwpwU829Q7AUmnY6HNfDkhGHMOzTCo5Vi
VfQkjLmc/tjZGCNMJjNpGShhNlyCyMt000+8p2XiRAnJ5n9G8M+N2xIW8vB3JlAwUUILH5UKnaQ0
ccQnDUPXpeiv1iza3q1oK1bXqP25Ww2IrBqIsaLKn4/BehcfHx6Rwpal3gWBoRzvMYA77969O2TO
K6qzwg9oFILdv8ejDsIE++biJYUY2Ikhax52B8hiOzTIpMJirat6qeEdtr8+LONJIBN9Y+bYp1CR
pO9XQGtaFHpzdn1u6sRbA99GJAZ3+/CwyxWglpNNH6SAnCGkBABDb9gIwt6+nC6kS9SmrUs7wWkX
szwjr35IWvKQX2M21OwqJYOJ/JHKwSYlgUoqJ0KyxgXme6Lnn97Aep3uZcJByKV5cU3mOmtk4MI/
IuykORl7Dc014QsRJQ0YDDq3QSqRC1V6IXyBdFwkSlE30FGRTFngwlzw+wNf6QbcMZV36AMCRgfB
luBFqTg5H0HAum8r4AzLGZ05rrKFy41NAoZPohE+AnNFSRdHrDLOrg6zKxmXCvsdDG5O9ElXaDoy
2nPi//PsdC71qGskyAHzs/sM4sj9gC3bdRBjgiL4UViU1XTaRychmHxg+y2zYSmtTszKzCg6H9Q5
ybaWmgH0GNNIx2ermChWOIGeUtzxh+0BrCMdYqn5uXYXm9iPLfX8ji2WRp5GOwxRkuPxc1Diomz9
3AF/piUHWRrlquhHPZB+I+Tr07ai04Rjt2PWF85vfR5S2LeJfVq3YGdo/BgeCEAKFgSl9FayPVdq
7kuKkPVqpAgJO17XLMxPTklS6veGvcEJ+kFf8w/JSoNXw3qzANqDjaN6c5yyCrii5pyzT02wfhs7
67FaleGgyjfoi7SA08FSV6pUZZJ2ZZcYY49mWHQ9hZHDSnElOrqUy80QZJOZ6J+402TacJ5Wl2nW
Zmwtm1CnuoVzgzOut5vNWoJdTl1NLVQZhEFSYbk8XiunBUuVZISbxBSLA3d3hIMaz/abGYJZjtvM
melQ6xZgwlpQQptOWwLMG6eR9rFA//BZu73JTXc4XazJ0TZEEC3g+OEJmkVwEd//5uvxVy8xR/rr
N787TOR49Q8y3CZcah+WPTi5xYRNf2h/El60pa4E4niBFNug/Kat0oEfHK5dKUoYojB8LQou3Fpc
lXUThFMw64kqZ4/tSPvK8PVI8a6izAnIQ8DJiNQoY8nFRq53kDB0GcHPGqO6D4C1ZYtUxB4p+wpz
0AIHUHdaB7pReSgVFR7cxk8otXaXdLdLCKa3gK5ipkSt2/gLaSitXdsELux8P4uRDdiTzBOxEgx+
1tVgPZYmt4r+mJPR0ofr1o27fJK2g4CoyBs7nVaomMqSi51Rw8ENE8xYi4T5CJ5kMFvoDrwwz/2T
NQA09iisfGY8Y9jCEkz8+PAg9iPhQCptM771NsghtgYZknyFyU/K11FwqbKY40HphCKDtu6j4dMI
K9zh4FBTMBSzzRaGFYNFHjfjH+7w7uagcjnfaP0dJIl+y0IT0zESHQnzKW1lU64CFyBZftusnHEr
QHJSJxWPe4MG43xSi7P2LLLKRNLITSoOGEv8tEe8bYeddrnM4UyrQ9SJPL46yd1BSKiK0msgBjKW
29EHFKs4ITvwVc02e86vSjEK3f6gy84CZDy0zoRtx0GPSZmyCnwCDHoGiIAD+sEhBIdII8QuEu4/
Hob+fhiM+udEHbnafaKm0oVZmaG17WJlVPVtKU+EOpqWXWnV+Xm3Ml4AQrVuZpyyKb4JLAp7GRJa
5fP/R7dxzoknbqtS9NUEbh8YwqD99qUUZp9yLDqdD39890903vz16qK43uAJ/PDTu//xLzh9vvrW
JJtdrplFNB4cE+DIroeZlyRfcuBz2nkXoWWCeFSglpcWS3ya0Txvh4VZf9jCj87r4C35NeJphpv7
/r1q+/59JiBQlPtYzrguhxBEz0WSPH7ZSiXBUZitueQVVSXnugbOsVh9LKv16vCwo8RVO+DQRDgC
CJMhhIyoOT3tWPrPZyR13xmmCPb73txJcuigundjX3mTRadtGOrxqcPAyvveiozbANeTbBsr6naL
wa6JCx/LAIFSQrfkxJMqrOcpCt24i6XJYU8i9SmWWZlhaBvLNVSams+R4Wc40b2J8IHHwLJ7wwxP
2Pv3MvH3703qf8x7AcBmBQcXYMnBOUjSxkkIT4zvOUUdzYScr4x4oD+gbEScvb7g3M00j9U6a+AI
F43vSbYkFGAwoLocJkcaLc0P6uH2Q8SDl0mNv+9IRmaMz/9YjPlikoqFP46V/syrd9FRT6iisapX
ru/3ALHSHXa5vaEA1C7M1/QWJGtxC19ua/K0A0lsvcDM6WYKlPeDAOcEkbTRrUTSm9XAYIFqFgvL
yYhmfYRp2XHRRfzzDnc6uFQY8yD4G4/73MM47Inb3ng8W0/H424Uyt2q1gAEK6SF03B+TNZb+ByI
HbkzJZknofHoM4FuONvFTFVPJxMhvDgk4R1yRfudinl9QnzVjyxe5AQ5xZUI7UNgGg7CVxdPrTDc
HrvU+q5XOutlq76Jt49ZOng7sidh6lcLlcsW00TQVJJm2HibzzSxj2vTBuC0gBTmz2iBcAv3y5+/
7RICfLcyRGZi69bRLcrUrJJGTPwlR1Qg7YDjU9PFYbDOS11RJ+cnKGOb9KYTE7S1npscAQ/wjXoA
j8cDeQ24QEChVE+RO16UNU5gpTwf5N1M/TS9nEXeuX7mAn2u5V0bMQKC5y3wScRQIotk6oa1QAw0
DLKoyt8bddsaLiVH31kgX17bfL5MIv2TYd4SN8as5ItNWjsVjfqC64x+BP6yQD309XpLUalSDsbF
qcPJeP+eJwjMlKt1ph8yGyeFghO69c7WDX5WNDo3geJwB7HkO1IWC+e0MEt2E+do8uw5x7ce6mQD
CW6ru6bIcti4rglIQesiupzLa03zJTpXS5Tq+/fcQ3hEWwiKflrX8DVror0YXo8NkB30n395y+nY
SYAiXjctF/krNZkBFlUxmV1nJoKpLxtNfKl3mTlidllMsGhUw7tAp6El5PdG6iD7BE83llnrBDH5
WzPFOnxx4kffnOT2xAvJ151KvclWJq4WswwpOtwNe0l7Pmz8lPPpsocyDUdQwFMO/BwZByNfm2W0
X7njGGLOiX9SJgO40pwU4Sy5WkayJ0PLYSNBS3q37EaKIPS7rdTdG3rgJYHjZNRMvUFimlblhlKo
kH0Lo8XLKctMD6iNlCDXgj+fpGFZc7ouwUPAKRlcCFdg/fpdiogQHx5RV4qVAI0MeTCI0s6kzpYj
8Ub8ikn8bWi7ZBJ1WH7/HrsidUGZ1xHRHDO6eVTcwgmpedNCzYFGfMSyAIvriLC/xDvpxi5rVWwG
z5Ai5hSBIUK38/v8FKpuibrtb4l7O1H35mqON86zoAQzc6qzFdNIqRqETQAFzkZRFXuCg59H++Zi
Jf6/QOLMAfNuXitt8wmU3/fTKdztyd2nUzWnP42XF/ve8Y63oSygXzfJALejCLfxKsp20+SAA05u
pcctkoqFSQnQNZ/mxFyhVTTBjV5hJS97g+G8MxMYp/EwPKxetxoLPsa0VK0Dfj82+YSCfMpGRRTN
/2YC6JM9XFcuW2vVIvhCNDafEN/eIDSb7qQIuwDjU0+RGer25+dPwKOcB4dP7xyg+kvnCw50Z0Go
rTkPomVEX0zoUZVUdlefiffvCdD790PCx/v3AlCxu0TfbYVXjC2ZzEwBZoqyTQ7iB7TjkyGQhTOW
YbPJDG3kmLnGMPZqDm7YYZAQijgptN4HCkp0aZJxKMJFPHeUwjXJk/G/923f+6qHbEaQ+YYvpmu1
gxsIt+5TD7/Cb3wHGieX+dmeaJ5WRxvN00xC/tVEJ9CaJt3ecLIGWbBh8KM5T8aDCv42GSDkmbD1
QKMMCprsS7T+uJ58LGQq6VAk1UDqauHHY2UMNt+pC1k3ouZVC2blb+syWV/gnXh0o6KMobY2naS0
c0yS5+nAeoc4qd3EDyoLc2QHWHJahaTjlFY6WDm2n0wvyaJ0lxbeDfPVsi+KX4gppngwQqQ0R2Si
WjiVUAK/twyn08FwPitKqix1mWuPEaRuwDJNL6DtM1OnhhLgFdABqEmJPnUsmhTzOWqDtqtFoQpg
o04CXgkQwzHWOrA7WS0+DYTz8C4R5ZoNTDDOvVL4hehV0dJukh3UwkQIfnALD53IwtMi28BliL0g
HI/QviwkGjuXlVxSUmOqnjUZNiCotw6NNvmv0NUeJQnRdDDNnKHTNrrgk80edxREzrKi7L6TprgN
UoMJWnYm+chHqI0TjuwkYY5cWVcj1TxiCuIWrZQh7YFo76ttO2ilHqSy/PDv3/1zKbNOhfLGm+uH
Vgj68B/e/c/OZ59hpqJNafLTc6GovYfDz4ePerVKkbK57tzJnn3z9NXXz48O4eMeeaSOgQ7ifMWn
Dn04Kyog7bJYm/ryTDHrzh1jB0ZnSSkJwynFXHqkMWYQ6RfYwueT4V6/oLZ+IIYpH0BpncyMGbuo
p1OVb6si6IljEJvBN6PeTs9RxOV0c6ecRAE3GqXZsW1svxF+3Jmd3Q2z2ZMYBi8/z4oJjABPLhfr
glar4hKDpWS+r3B5i8U1i70EQWV0kgQUDOzX2TmIwR+Rn5uLPfHoGppy0S2VwAqDJh2UGrvAQ7Ug
NSV5yvQx5SBbMiuU9WcDuEk1bCnezIYjeDFuBTFPietP19vGlleA+dCwHRshlq2n9MzO1CZIFiqA
MyN7LCyftZduL0wvlvEvJ9e11RIrHGZSIGPhtB0qg9dTlTpQdK7MmXCm0mKCJezm24Xsk7Xc5ug9
LTCcrzUKJhw1VU63i0kleW1kIhrDcnAEwpMnT0w8ODU9yOXDQ7+aETeXTs8WxaRin4QM8cs1tk+h
JWoMyrrenuIB7FuQardt+WRaMUYrXcJBXWNyEoVgctGY1BdsU6jKMypvsqnWwO0vh8Z621cqDrmD
X07qwuV4yHTi3agt3euhKRSAeg+vA7Uy3A8yULQm+pa8yWF1iQShYyERZMJGQjHG4+uTCBG2ahpg
xAPZ1PUd3zZtKigYCBqVtx69Tox+J3vZ8NMmt4JvxK8pyx7m6KSYMnd1BPnUhaPGOiZgU3k/eq8y
VobNKLCIn35su1rnQGnmpJk8nVAhyYF5/Fx4sTzPdieDnJTxu2oGwcwj5iMqqb7gM/jEcVk0YWOu
7Um9h+5ddAaj4NO7sx9XPTQkhvP2o2pk7u1vYDhORpGnCFm6DnFqm/4g8sxi9Oz275/CA9FQ9WlY
sQFYMcQeDDM4PmQwJ8OFjJNwRlzBC8pAgMI1XM0666PKHAja5LQeZMaH4QK3g4J5F+UZSYe7p9Tv
T4dweLk8Nmfhw/3oZT0uk0fBwK5HanKUnAsIRIGSzgQTKmLByOkUzh5PhRd4gD6Nm3Wt0ny0bsHd
+u94C3o99pZXMxhEEj8c304ngnObK+a5qlBvqXt6C+Jg+QebiNnmiDevKh28vT3YPnyCAY1SRyfJ
nRgJg6DXZFceG45IKzQ0z20oHp4/0++wE8ZoSwFopl+JO+na2WLRmbfW2rgOKEwJooIZHsbERfoI
16ZX0k48WhNJxbqddiAeFRIAvS+2K+JISB11txbh+gmeNXo2GLoj8h/+47t/KgyvhDIPL8rFAj9/
+E/v/tvf/+wz668UuiN2yjlz9CCW4Y6TT+9luXr0sIt7Zr0zapQKpLhJr0euwL1V00ugUgaYMqN7
k2cPKZrWONv+ppzFBWjMeqbLWXFVTPvdBh5ybJ49eJE9+P7lV0Bi0ZYHnY07k39udg7w/ZvXz54f
HY3fPn/z3ctXT98+j8qyg7Q/4xI+/MADamYgaV8U1apYPHo4fA3X+HueY79VyR4Nk7NTOvBYZSDM
tAzzlnP3NoUZi+eVZ3sHt+r/DEO3vqE+0nXQcXhqxdGaDxJhNzv4hZQPDhqijoR2xOQSPr3Oyply
UnGQOx/+87t/ICe1bmYf/su7//737OGk4yhpsJpZXAYPQ9mRbG/2FsDLL0RoEx8F4Dw+AsdON2ZC
Oa+s7SwPjqZY5ZboeMLMJrrQ13WQFvdm3xox14oKT+bxCdVQIwKy9B3nAtX9Df5xaes3lzM5zCIP
t7vt5ZOWnQ70gbngNsCe/dft8H8Dtb1Mag==
"""
import sys
import base64
import zlib
class DictImporter(object):
def __init__(self, sources):
self.sources = sources
def find_module(self, fullname, path=None):
if fullname == "argparse" and sys.version_info >= (2,7):
# we were generated with <python2.7 (which pulls in argparse)
# but we are running now on a stdlib which has it, so use that.
return None
if fullname in self.sources:
return self
if fullname + '.__init__' in self.sources:
return self
return None
def load_module(self, fullname):
# print "load_module:", fullname
from types import ModuleType
try:
s = self.sources[fullname]
is_pkg = False
except KeyError:
s = self.sources[fullname + '.__init__']
is_pkg = True
co = compile(s, fullname, 'exec')
module = sys.modules.setdefault(fullname, ModuleType(fullname))
module.__file__ = "%s/%s" % (__file__, fullname)
module.__loader__ = self
if is_pkg:
module.__path__ = [fullname]
do_exec(co, module.__dict__) # noqa
return sys.modules[fullname]
def get_source(self, name):
res = self.sources.get(name)
if res is None:
res = self.sources.get(name + '.__init__')
return res
if __name__ == "__main__":
if sys.version_info >= (3, 0):
exec("def do_exec(co, loc): exec(co, loc)\n")
import pickle
sources = sources.encode("ascii") # ensure bytes
sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
else:
import cPickle as pickle
exec("def do_exec(co, loc): exec co in loc\n")
sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
importer = DictImporter(sources)
sys.meta_path.insert(0, importer)
entry = "import pytest; raise SystemExit(pytest.cmdline.main())"
do_exec(entry, locals()) # noqa
| bsd-3-clause |
derekstavis/python-sysfs-gpio | sysfs/gpio.py | 1 | 12336 | """
Linux SysFS-based native GPIO implementation.
The MIT License (MIT)
Copyright (c) 2014 Derek Willian Stavis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__all__ = ('DIRECTIONS', 'INPUT', 'OUTPUT',
'EDGES', 'RISING', 'FALLING', 'BOTH',
'Controller')
import errno
import os
import select
from twisted.internet import reactor
import logging
Logger = logging.getLogger('sysfs.gpio')
Logger.addHandler(logging.StreamHandler())
Logger.setLevel(logging.DEBUG)
# Sysfs constants
SYSFS_BASE_PATH = '/sys/class/gpio'
SYSFS_EXPORT_PATH = SYSFS_BASE_PATH + '/export'
SYSFS_UNEXPORT_PATH = SYSFS_BASE_PATH + '/unexport'
SYSFS_GPIO_PATH = SYSFS_BASE_PATH + '/gpio%d'
SYSFS_GPIO_DIRECTION_PATH = SYSFS_GPIO_PATH + '/direction'
SYSFS_GPIO_EDGE_PATH = SYSFS_GPIO_PATH + '/edge'
SYSFS_GPIO_VALUE_PATH = SYSFS_GPIO_PATH + '/value'
SYSFS_GPIO_ACTIVE_LOW_PATH = SYSFS_GPIO_PATH + '/active_low'
SYSFS_GPIO_VALUE_LOW = '0'
SYSFS_GPIO_VALUE_HIGH = '1'
EPOLL_TIMEOUT = 1 # second
# Public interface
INPUT = 'in'
OUTPUT = 'out'
RISING = 'rising'
FALLING = 'falling'
BOTH = 'both'
ACTIVE_LOW_ON = 1
ACTIVE_LOW_OFF = 0
DIRECTIONS = (INPUT, OUTPUT)
EDGES = (RISING, FALLING, BOTH)
ACTIVE_LOW_MODES = (ACTIVE_LOW_ON, ACTIVE_LOW_OFF)
class Pin(object):
"""
Represent a pin in SysFS
"""
def __init__(self, number, direction, callback=None, edge=None, active_low=0):
"""
@type number: int
@param number: The pin number
@type direction: int
@param direction: Pin direction, enumerated by C{Direction}
@type callback: callable
@param callback: Method be called when pin changes state
@type edge: int
@param edge: The edge transition that triggers callback,
enumerated by C{Edge}
@type active_low: int
@param active_low: Indicator of whether this pin uses inverted
logic for HIGH-LOW transitions.
"""
self._number = number
self._direction = direction
self._callback = callback
self._active_low = active_low
self._fd = open(self._sysfs_gpio_value_path(), 'r+')
if callback and not edge:
raise Exception('You must supply a edge to trigger callback on')
with open(self._sysfs_gpio_direction_path(), 'w') as fsdir:
fsdir.write(direction)
if edge:
with open(self._sysfs_gpio_edge_path(), 'w') as fsedge:
fsedge.write(edge)
if active_low:
if active_low not in ACTIVE_LOW_MODES:
raise Exception('You must supply a value for active_low which is either 0 or 1.')
with open(self._sysfs_gpio_active_low_path(), 'w') as fsactive_low:
fsactive_low.write(str(active_low))
@property
def callback(self):
"""
Gets this pin callback
"""
return self._callback
@callback.setter
def callback(self, value):
"""
Sets this pin callback
"""
self._callback = value
@property
def direction(self):
"""
Pin direction
"""
return self._direction
@property
def number(self):
"""
Pin number
"""
return self._number
@property
def active_low(self):
"""
Pin number
"""
return self._active_low
def set(self):
"""
Set pin to HIGH logic setLevel
"""
self._fd.write(SYSFS_GPIO_VALUE_HIGH)
self._fd.seek(0)
def reset(self):
"""
Set pin to LOW logic setLevel
"""
self._fd.write(SYSFS_GPIO_VALUE_LOW)
self._fd.seek(0)
def read(self):
"""
Read pin value
@rtype: int
@return: I{0} when LOW, I{1} when HIGH
"""
val = self._fd.read()
self._fd.seek(0)
return int(val)
def fileno(self):
"""
Get the file descriptor associated with this pin.
@rtype: int
@return: File descriptor
"""
return self._fd.fileno()
def changed(self, state):
if callable(self._callback):
self._callback(self.number, state)
def _sysfs_gpio_value_path(self):
"""
Get the file that represent the value of this pin.
@rtype: str
@return: the path to sysfs value file
"""
return SYSFS_GPIO_VALUE_PATH % self.number
def _sysfs_gpio_direction_path(self):
"""
Get the file that represent the direction of this pin.
@rtype: str
@return: the path to sysfs direction file
"""
return SYSFS_GPIO_DIRECTION_PATH % self.number
def _sysfs_gpio_edge_path(self):
"""
Get the file that represent the edge that will trigger an interrupt.
@rtype: str
@return: the path to sysfs edge file
"""
return SYSFS_GPIO_EDGE_PATH % self.number
def _sysfs_gpio_active_low_path(self):
"""
Get the file that represents the active_low setting for this pin.
@rtype: str
@return: the path to sysfs active_low file
"""
return SYSFS_GPIO_ACTIVE_LOW_PATH % self.number
class Controller(object):
'''
A singleton class to provide access to SysFS GPIO pins
'''
def __new__(cls, *args, **kw):
if not hasattr(cls, '_instance'):
instance = super(Controller, cls).__new__(cls)
instance._allocated_pins = {}
instance._poll_queue = select.epoll()
instance._available_pins = []
instance._running = True
# Cleanup before stopping reactor
reactor.addSystemEventTrigger('before', 'shutdown', instance.stop)
# Run the EPoll in a Thread, as it blocks.
reactor.callInThread(instance._poll_queue_loop)
cls._instance = instance
return cls._instance
def __init__(self):
pass
def _poll_queue_loop(self):
while self._running:
try:
events = self._poll_queue.poll(EPOLL_TIMEOUT)
except IOError as error:
if error.errno != errno.EINTR:
Logger.error(repr(error))
reactor.stop()
if len(events) > 0:
reactor.callFromThread(self._poll_queue_event, events)
@property
def available_pins(self):
return self._available_pins
@available_pins.setter
def available_pins(self, value):
self._available_pins = value
def stop(self):
self._running = False
try:
values = self._allocated_pins.copy().itervalues()
except AttributeError:
values = self._allocated_pins.copy().values()
for pin in values:
self.dealloc_pin(pin.number)
def alloc_pin(self, number, direction, callback=None, edge=None, active_low=0):
Logger.debug('SysfsGPIO: alloc_pin(%d, %s, %s, %s, %s)'
% (number, direction, callback, edge, active_low))
self._check_pin_validity(number)
if direction not in DIRECTIONS:
raise Exception("Pin direction %s not in %s"
% (direction, DIRECTIONS))
if callback and edge not in EDGES:
raise Exception("Pin edge %s not in %s" % (edge, EDGES))
if not self._check_pin_already_exported(number):
with open(SYSFS_EXPORT_PATH, 'w') as export:
export.write('%d' % number)
else:
Logger.debug("SysfsGPIO: Pin %d already exported" % number)
pin = Pin(number, direction, callback, edge, active_low)
if direction is INPUT:
self._poll_queue_register_pin(pin)
self._allocated_pins[number] = pin
return pin
def _poll_queue_register_pin(self, pin):
''' Pin responds to fileno(), so it's pollable. '''
self._poll_queue.register(pin, (select.EPOLLPRI | select.EPOLLET))
def _poll_queue_unregister_pin(self, pin):
self._poll_queue.unregister(pin)
def dealloc_pin(self, number):
Logger.debug('SysfsGPIO: dealloc_pin(%d)' % number)
if number not in self._allocated_pins:
raise Exception('Pin %d not allocated' % number)
with open(SYSFS_UNEXPORT_PATH, 'w') as unexport:
unexport.write('%d' % number)
pin = self._allocated_pins[number]
if pin.direction is INPUT:
self._poll_queue_unregister_pin(pin)
del pin, self._allocated_pins[number]
def get_pin(self, number):
Logger.debug('SysfsGPIO: get_pin(%d)' % number)
return self._allocated_pins[number]
def set_pin(self, number):
Logger.debug('SysfsGPIO: set_pin(%d)' % number)
if number not in self._allocated_pins:
raise Exception('Pin %d not allocated' % number)
return self._allocated_pins[number].set()
def reset_pin(self, number):
Logger.debug('SysfsGPIO: reset_pin(%d)' % number)
if number not in self._allocated_pins:
raise Exception('Pin %d not allocated' % number)
return self._allocated_pins[number].reset()
def get_pin_state(self, number):
Logger.debug('SysfsGPIO: get_pin_state(%d)' % number)
if number not in self._allocated_pins:
raise Exception('Pin %d not allocated' % number)
pin = self._allocated_pins[number]
if pin.direction == INPUT:
self._poll_queue_unregister_pin(pin)
val = pin.read()
if pin.direction == INPUT:
self._poll_queue_register_pin(pin)
if val <= 0:
return False
else:
return True
''' Private Methods '''
def _poll_queue_event(self, events):
"""
EPoll event callback
"""
for fd, event in events:
if not (event & (select.EPOLLPRI | select.EPOLLET)):
continue
try:
values = self._allocated_pins.itervalues()
except AttributeError:
values = self._allocated_pins.values()
for pin in values:
if pin.fileno() == fd:
pin.changed(pin.read())
def _check_pin_already_exported(self, number):
"""
Check if this pin was already exported on sysfs.
@type number: int
@param number: Pin number
@rtype: bool
@return: C{True} when it's already exported, otherwise C{False}
"""
gpio_path = SYSFS_GPIO_PATH % number
return os.path.isdir(gpio_path)
def _check_pin_validity(self, number):
"""
Check if pin number exists on this bus
@type number: int
@param number: Pin number
@rtype: bool
@return: C{True} when valid, otherwise C{False}
"""
if number not in self._available_pins:
raise Exception("Pin number out of range")
if number in self._allocated_pins:
raise Exception("Pin already allocated")
# Create controller instance
Controller = Controller()
if __name__ == '__main__':
print("This module isn't intended to be run directly.")
| mit |
samdutton/shaka-player | third_party/gjslint/closure_linter-2.3.13/closure_linter/statetracker.py | 14 | 38739 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Light weight EcmaScript state tracker that reads tokens and tracks state."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
import re
from closure_linter import javascripttokenizer
from closure_linter import javascripttokens
from closure_linter import tokenutil
# Shorthand
Type = javascripttokens.JavaScriptTokenType
class DocFlag(object):
"""Generic doc flag object.
Attribute:
flag_type: param, return, define, type, etc.
flag_token: The flag token.
type_start_token: The first token specifying the flag type,
including braces.
type_end_token: The last token specifying the flag type,
including braces.
type: The type spec.
name_token: The token specifying the flag name.
name: The flag name
description_start_token: The first token in the description.
description_end_token: The end token in the description.
description: The description.
"""
# Please keep these lists alphabetized.
# The list of standard jsdoc tags is from
STANDARD_DOC = frozenset([
'author',
'bug',
'classTemplate',
'consistentIdGenerator',
'const',
'constructor',
'define',
'deprecated',
'dict',
'enum',
'export',
'expose',
'extends',
'externs',
'fileoverview',
'idGenerator',
'implements',
'implicitCast',
'interface',
'lends',
'license',
'ngInject', # This annotation is specific to AngularJS.
'noalias',
'nocompile',
'nosideeffects',
'override',
'owner',
'param',
'preserve',
'private',
'protected',
'public',
'return',
'see',
'stableIdGenerator',
'struct',
'supported',
'template',
'this',
'type',
'typedef',
'wizaction', # This annotation is specific to Wiz.
'wizmodule', # This annotation is specific to Wiz.
])
ANNOTATION = frozenset(['preserveTry', 'suppress'])
LEGAL_DOC = STANDARD_DOC | ANNOTATION
# Includes all Closure Compiler @suppress types.
# Not all of these annotations are interpreted by Closure Linter.
#
# Specific cases:
# - accessControls is supported by the compiler at the expression
# and method level to suppress warnings about private/protected
# access (method level applies to all references in the method).
# The linter mimics the compiler behavior.
SUPPRESS_TYPES = frozenset([
'accessControls',
'ambiguousFunctionDecl',
'checkRegExp',
'checkStructDictInheritance',
'checkTypes',
'checkVars',
'const',
'constantProperty',
'deprecated',
'duplicate',
'es5Strict',
'externsValidation',
'extraProvide',
'extraRequire',
'fileoverviewTags',
'globalThis',
'internetExplorerChecks',
'invalidCasts',
'missingProperties',
'missingProvide',
'missingRequire',
'missingReturn',
'nonStandardJsDocs',
'strictModuleDepCheck',
'tweakValidation',
'typeInvalidation',
'undefinedNames',
'undefinedVars',
'underscore',
'unknownDefines',
'unusedPrivateMembers',
'uselessCode',
'visibility',
'with'])
HAS_DESCRIPTION = frozenset([
'define', 'deprecated', 'desc', 'fileoverview', 'license', 'param',
'preserve', 'return', 'supported'])
HAS_TYPE = frozenset([
'define', 'enum', 'extends', 'implements', 'param', 'return', 'type',
'suppress', 'const'])
TYPE_ONLY = frozenset(['enum', 'extends', 'implements', 'suppress', 'type',
'const'])
HAS_NAME = frozenset(['param'])
EMPTY_COMMENT_LINE = re.compile(r'^\s*\*?\s*$')
EMPTY_STRING = re.compile(r'^\s*$')
def __init__(self, flag_token):
"""Creates the DocFlag object and attaches it to the given start token.
Args:
flag_token: The starting token of the flag.
"""
self.flag_token = flag_token
self.flag_type = flag_token.string.strip().lstrip('@')
# Extract type, if applicable.
self.type = None
self.type_start_token = None
self.type_end_token = None
if self.flag_type in self.HAS_TYPE:
brace = tokenutil.SearchUntil(flag_token, [Type.DOC_START_BRACE],
Type.FLAG_ENDING_TYPES)
if brace:
end_token, contents = _GetMatchingEndBraceAndContents(brace)
self.type = contents
self.type_start_token = brace
self.type_end_token = end_token
elif (self.flag_type in self.TYPE_ONLY and
flag_token.next.type not in Type.FLAG_ENDING_TYPES and
flag_token.line_number == flag_token.next.line_number):
# b/10407058. If the flag is expected to be followed by a type then
# search for type in same line only. If no token after flag in same
# line then conclude that no type is specified.
self.type_start_token = flag_token.next
self.type_end_token, self.type = _GetEndTokenAndContents(
self.type_start_token)
if self.type is not None:
self.type = self.type.strip()
# Extract name, if applicable.
self.name_token = None
self.name = None
if self.flag_type in self.HAS_NAME:
# Handle bad case, name could be immediately after flag token.
self.name_token = _GetNextPartialIdentifierToken(flag_token)
# Handle good case, if found token is after type start, look for
# a identifier (substring to cover cases like [cnt] b/4197272) after
# type end, since types contain identifiers.
if (self.type and self.name_token and
tokenutil.Compare(self.name_token, self.type_start_token) > 0):
self.name_token = _GetNextPartialIdentifierToken(self.type_end_token)
if self.name_token:
self.name = self.name_token.string
# Extract description, if applicable.
self.description_start_token = None
self.description_end_token = None
self.description = None
if self.flag_type in self.HAS_DESCRIPTION:
search_start_token = flag_token
if self.name_token and self.type_end_token:
if tokenutil.Compare(self.type_end_token, self.name_token) > 0:
search_start_token = self.type_end_token
else:
search_start_token = self.name_token
elif self.name_token:
search_start_token = self.name_token
elif self.type:
search_start_token = self.type_end_token
interesting_token = tokenutil.Search(search_start_token,
Type.FLAG_DESCRIPTION_TYPES | Type.FLAG_ENDING_TYPES)
if interesting_token.type in Type.FLAG_DESCRIPTION_TYPES:
self.description_start_token = interesting_token
self.description_end_token, self.description = (
_GetEndTokenAndContents(interesting_token))
class DocComment(object):
"""JavaScript doc comment object.
Attributes:
ordered_params: Ordered list of parameters documented.
start_token: The token that starts the doc comment.
end_token: The token that ends the doc comment.
suppressions: Map of suppression type to the token that added it.
"""
def __init__(self, start_token):
"""Create the doc comment object.
Args:
start_token: The first token in the doc comment.
"""
self.__flags = []
self.start_token = start_token
self.end_token = None
self.suppressions = {}
self.invalidated = False
@property
def ordered_params(self):
"""Gives the list of parameter names as a list of strings."""
params = []
for flag in self.__flags:
if flag.flag_type == 'param' and flag.name:
params.append(flag.name)
return params
def Invalidate(self):
"""Indicate that the JSDoc is well-formed but we had problems parsing it.
This is a short-circuiting mechanism so that we don't emit false
positives about well-formed doc comments just because we don't support
hot new syntaxes.
"""
self.invalidated = True
def IsInvalidated(self):
"""Test whether Invalidate() has been called."""
return self.invalidated
def AddSuppression(self, token):
"""Add a new error suppression flag.
Args:
token: The suppression flag token.
"""
#TODO(user): Error if no braces
brace = tokenutil.SearchUntil(token, [Type.DOC_START_BRACE],
[Type.DOC_FLAG])
if brace:
end_token, contents = _GetMatchingEndBraceAndContents(brace)
for suppression in contents.split('|'):
self.suppressions[suppression] = token
def SuppressionOnly(self):
"""Returns whether this comment contains only suppression flags."""
if not self.__flags:
return False
for flag in self.__flags:
if flag.flag_type != 'suppress':
return False
return True
def AddFlag(self, flag):
"""Add a new document flag.
Args:
flag: DocFlag object.
"""
self.__flags.append(flag)
def InheritsDocumentation(self):
"""Test if the jsdoc implies documentation inheritance.
Returns:
True if documentation may be pulled off the superclass.
"""
return self.HasFlag('inheritDoc') or self.HasFlag('override')
def HasFlag(self, flag_type):
"""Test if the given flag has been set.
Args:
flag_type: The type of the flag to check.
Returns:
True if the flag is set.
"""
for flag in self.__flags:
if flag.flag_type == flag_type:
return True
return False
def GetFlag(self, flag_type):
"""Gets the last flag of the given type.
Args:
flag_type: The type of the flag to get.
Returns:
The last instance of the given flag type in this doc comment.
"""
for flag in reversed(self.__flags):
if flag.flag_type == flag_type:
return flag
def GetDocFlags(self):
"""Return the doc flags for this comment."""
return list(self.__flags)
def _YieldDescriptionTokens(self):
for token in self.start_token:
if (token is self.end_token or
token.type is javascripttokens.JavaScriptTokenType.DOC_FLAG or
token.type not in javascripttokens.JavaScriptTokenType.COMMENT_TYPES):
return
if token.type not in [
javascripttokens.JavaScriptTokenType.START_DOC_COMMENT,
javascripttokens.JavaScriptTokenType.END_DOC_COMMENT,
javascripttokens.JavaScriptTokenType.DOC_PREFIX]:
yield token
@property
def description(self):
return tokenutil.TokensToString(
self._YieldDescriptionTokens())
def GetTargetIdentifier(self):
"""Returns the identifier (as a string) that this is a comment for.
Note that this uses method uses GetIdentifierForToken to get the full
identifier, even if broken up by whitespace, newlines, or comments,
and thus could be longer than GetTargetToken().string.
Returns:
The identifier for the token this comment is for.
"""
token = self.GetTargetToken()
if token:
return tokenutil.GetIdentifierForToken(token)
def GetTargetToken(self):
"""Get this comment's target token.
Returns:
The token that is the target of this comment, or None if there isn't one.
"""
# File overviews describe the file, not a token.
if self.HasFlag('fileoverview'):
return
skip_types = frozenset([
Type.WHITESPACE,
Type.BLANK_LINE,
Type.START_PAREN])
target_types = frozenset([
Type.FUNCTION_NAME,
Type.IDENTIFIER,
Type.SIMPLE_LVALUE])
token = self.end_token.next
while token:
if token.type in target_types:
return token
# Handles the case of a comment on "var foo = ...'
if token.IsKeyword('var'):
next_code_token = tokenutil.CustomSearch(
token,
lambda t: t.type not in Type.NON_CODE_TYPES)
if (next_code_token and
next_code_token.IsType(Type.SIMPLE_LVALUE)):
return next_code_token
return
# Handles the case of a comment on "function foo () {}"
if token.type is Type.FUNCTION_DECLARATION:
next_code_token = tokenutil.CustomSearch(
token,
lambda t: t.type not in Type.NON_CODE_TYPES)
if next_code_token.IsType(Type.FUNCTION_NAME):
return next_code_token
return
# Skip types will end the search.
if token.type not in skip_types:
return
token = token.next
def CompareParameters(self, params):
"""Computes the edit distance and list from the function params to the docs.
Uses the Levenshtein edit distance algorithm, with code modified from
http://en.wikibooks.org/wiki/Algorithm_implementation/Strings/Levenshtein_distance#Python
Args:
params: The parameter list for the function declaration.
Returns:
The edit distance, the edit list.
"""
source_len, target_len = len(self.ordered_params), len(params)
edit_lists = [[]]
distance = [[]]
for i in range(target_len+1):
edit_lists[0].append(['I'] * i)
distance[0].append(i)
for j in range(1, source_len+1):
edit_lists.append([['D'] * j])
distance.append([j])
for i in range(source_len):
for j in range(target_len):
cost = 1
if self.ordered_params[i] == params[j]:
cost = 0
deletion = distance[i][j+1] + 1
insertion = distance[i+1][j] + 1
substitution = distance[i][j] + cost
edit_list = None
best = None
if deletion <= insertion and deletion <= substitution:
# Deletion is best.
best = deletion
edit_list = list(edit_lists[i][j+1])
edit_list.append('D')
elif insertion <= substitution:
# Insertion is best.
best = insertion
edit_list = list(edit_lists[i+1][j])
edit_list.append('I')
edit_lists[i+1].append(edit_list)
else:
# Substitution is best.
best = substitution
edit_list = list(edit_lists[i][j])
if cost:
edit_list.append('S')
else:
edit_list.append('=')
edit_lists[i+1].append(edit_list)
distance[i+1].append(best)
return distance[source_len][target_len], edit_lists[source_len][target_len]
def __repr__(self):
"""Returns a string representation of this object.
Returns:
A string representation of this object.
"""
return '<DocComment: %s, %s>' % (
str(self.ordered_params), str(self.__flags))
#
# Helper methods used by DocFlag and DocComment to parse out flag information.
#
def _GetMatchingEndBraceAndContents(start_brace):
"""Returns the matching end brace and contents between the two braces.
If any FLAG_ENDING_TYPE token is encountered before a matching end brace, then
that token is used as the matching ending token. Contents will have all
comment prefixes stripped out of them, and all comment prefixes in between the
start and end tokens will be split out into separate DOC_PREFIX tokens.
Args:
start_brace: The DOC_START_BRACE token immediately before desired contents.
Returns:
The matching ending token (DOC_END_BRACE or FLAG_ENDING_TYPE) and a string
of the contents between the matching tokens, minus any comment prefixes.
"""
open_count = 1
close_count = 0
contents = []
# We don't consider the start brace part of the type string.
token = start_brace.next
while open_count != close_count:
if token.type == Type.DOC_START_BRACE:
open_count += 1
elif token.type == Type.DOC_END_BRACE:
close_count += 1
if token.type != Type.DOC_PREFIX:
contents.append(token.string)
if token.type in Type.FLAG_ENDING_TYPES:
break
token = token.next
#Don't include the end token (end brace, end doc comment, etc.) in type.
token = token.previous
contents = contents[:-1]
return token, ''.join(contents)
def _GetNextPartialIdentifierToken(start_token):
"""Returns the first token having identifier as substring after a token.
Searches each token after the start to see if it contains an identifier.
If found, token is returned. If no identifier is found returns None.
Search is abandoned when a FLAG_ENDING_TYPE token is found.
Args:
start_token: The token to start searching after.
Returns:
The token found containing identifier, None otherwise.
"""
token = start_token.next
while token and token.type not in Type.FLAG_ENDING_TYPES:
match = javascripttokenizer.JavaScriptTokenizer.IDENTIFIER.search(
token.string)
if match is not None and token.type == Type.COMMENT:
return token
token = token.next
return None
def _GetEndTokenAndContents(start_token):
"""Returns last content token and all contents before FLAG_ENDING_TYPE token.
Comment prefixes are split into DOC_PREFIX tokens and stripped from the
returned contents.
Args:
start_token: The token immediately before the first content token.
Returns:
The last content token and a string of all contents including start and
end tokens, with comment prefixes stripped.
"""
iterator = start_token
last_line = iterator.line_number
last_token = None
contents = ''
doc_depth = 0
while not iterator.type in Type.FLAG_ENDING_TYPES or doc_depth > 0:
if (iterator.IsFirstInLine() and
DocFlag.EMPTY_COMMENT_LINE.match(iterator.line)):
# If we have a blank comment line, consider that an implicit
# ending of the description. This handles a case like:
#
# * @return {boolean} True
# *
# * Note: This is a sentence.
#
# The note is not part of the @return description, but there was
# no definitive ending token. Rather there was a line containing
# only a doc comment prefix or whitespace.
break
# b/2983692
# don't prematurely match against a @flag if inside a doc flag
# need to think about what is the correct behavior for unterminated
# inline doc flags
if (iterator.type == Type.DOC_START_BRACE and
iterator.next.type == Type.DOC_INLINE_FLAG):
doc_depth += 1
elif (iterator.type == Type.DOC_END_BRACE and
doc_depth > 0):
doc_depth -= 1
if iterator.type in Type.FLAG_DESCRIPTION_TYPES:
contents += iterator.string
last_token = iterator
iterator = iterator.next
if iterator.line_number != last_line:
contents += '\n'
last_line = iterator.line_number
end_token = last_token
if DocFlag.EMPTY_STRING.match(contents):
contents = None
else:
# Strip trailing newline.
contents = contents[:-1]
return end_token, contents
class Function(object):
"""Data about a JavaScript function.
Attributes:
block_depth: Block depth the function began at.
doc: The DocComment associated with the function.
has_return: If the function has a return value.
has_this: If the function references the 'this' object.
is_assigned: If the function is part of an assignment.
is_constructor: If the function is a constructor.
name: The name of the function, whether given in the function keyword or
as the lvalue the function is assigned to.
start_token: First token of the function (the function' keyword token).
end_token: Last token of the function (the closing '}' token).
parameters: List of parameter names.
"""
def __init__(self, block_depth, is_assigned, doc, name):
self.block_depth = block_depth
self.is_assigned = is_assigned
self.is_constructor = doc and doc.HasFlag('constructor')
self.is_interface = doc and doc.HasFlag('interface')
self.has_return = False
self.has_throw = False
self.has_this = False
self.name = name
self.doc = doc
self.start_token = None
self.end_token = None
self.parameters = None
class StateTracker(object):
"""EcmaScript state tracker.
Tracks block depth, function names, etc. within an EcmaScript token stream.
"""
OBJECT_LITERAL = 'o'
CODE = 'c'
SUPPRESS_COMMENT_LINE = re.compile(r' *gjslint: *disable=([0-9,]+) *')
def __init__(self, doc_flag=DocFlag):
"""Initializes a JavaScript token stream state tracker.
Args:
doc_flag: An optional custom DocFlag used for validating
documentation flags.
"""
self._doc_flag = doc_flag
self.Reset()
def Reset(self):
"""Resets the state tracker to prepare for processing a new page."""
self._block_depth = 0
self._is_block_close = False
self._paren_depth = 0
self._function_stack = []
self._functions_by_name = {}
self._last_comment = None
self._suppressions = []
self._doc_comment = None
self._cumulative_params = None
self._block_types = []
self._last_non_space_token = None
self._last_line = None
self._first_token = None
self._documented_identifiers = set()
self._variables_in_scope = []
def InFunction(self):
"""Returns true if the current token is within a function.
Returns:
True if the current token is within a function.
"""
return bool(self._function_stack)
def InConstructor(self):
"""Returns true if the current token is within a constructor.
Returns:
True if the current token is within a constructor.
"""
return self.InFunction() and self._function_stack[-1].is_constructor
def InInterfaceMethod(self):
"""Returns true if the current token is within an interface method.
Returns:
True if the current token is within an interface method.
"""
if self.InFunction():
if self._function_stack[-1].is_interface:
return True
else:
name = self._function_stack[-1].name
prototype_index = name.find('.prototype.')
if prototype_index != -1:
class_function_name = name[0:prototype_index]
if (class_function_name in self._functions_by_name and
self._functions_by_name[class_function_name].is_interface):
return True
return False
def InTopLevelFunction(self):
"""Returns true if the current token is within a top level function.
Returns:
True if the current token is within a top level function.
"""
return len(self._function_stack) == 1 and self.InTopLevel()
def InAssignedFunction(self):
"""Returns true if the current token is within a function variable.
Returns:
True if if the current token is within a function variable
"""
return self.InFunction() and self._function_stack[-1].is_assigned
def IsFunctionOpen(self):
"""Returns true if the current token is a function block open.
Returns:
True if the current token is a function block open.
"""
return (self._function_stack and
self._function_stack[-1].block_depth == self._block_depth - 1)
def IsFunctionClose(self):
"""Returns true if the current token is a function block close.
Returns:
True if the current token is a function block close.
"""
return (self._function_stack and
self._function_stack[-1].block_depth == self._block_depth)
def InBlock(self):
"""Returns true if the current token is within a block.
Returns:
True if the current token is within a block.
"""
return bool(self._block_depth)
def IsBlockClose(self):
"""Returns true if the current token is a block close.
Returns:
True if the current token is a block close.
"""
return self._is_block_close
def InObjectLiteral(self):
"""Returns true if the current token is within an object literal.
Returns:
True if the current token is within an object literal.
"""
return self._block_depth and self._block_types[-1] == self.OBJECT_LITERAL
def InObjectLiteralDescendant(self):
"""Returns true if the current token has an object literal ancestor.
Returns:
True if the current token has an object literal ancestor.
"""
return self.OBJECT_LITERAL in self._block_types
def InParentheses(self):
"""Returns true if the current token is within parentheses.
Returns:
True if the current token is within parentheses.
"""
return bool(self._paren_depth)
def ParenthesesDepth(self):
"""Returns the number of parens surrounding the token.
Returns:
The number of parenthesis surrounding the token.
"""
return self._paren_depth
def BlockDepth(self):
"""Returns the number of blocks in which the token is nested.
Returns:
The number of blocks in which the token is nested.
"""
return self._block_depth
def FunctionDepth(self):
"""Returns the number of functions in which the token is nested.
Returns:
The number of functions in which the token is nested.
"""
return len(self._function_stack)
def InTopLevel(self):
"""Whether we are at the top level in the class.
This function call is language specific. In some languages like
JavaScript, a function is top level if it is not inside any parenthesis.
In languages such as ActionScript, a function is top level if it is directly
within a class.
"""
raise TypeError('Abstract method InTopLevel not implemented')
def GetBlockType(self, token):
"""Determine the block type given a START_BLOCK token.
Code blocks come after parameters, keywords like else, and closing parens.
Args:
token: The current token. Can be assumed to be type START_BLOCK.
Returns:
Code block type for current token.
"""
raise TypeError('Abstract method GetBlockType not implemented')
def GetParams(self):
"""Returns the accumulated input params as an array.
In some EcmasSript languages, input params are specified like
(param:Type, param2:Type2, ...)
in other they are specified just as
(param, param2)
We handle both formats for specifying parameters here and leave
it to the compilers for each language to detect compile errors.
This allows more code to be reused between lint checkers for various
EcmaScript languages.
Returns:
The accumulated input params as an array.
"""
params = []
if self._cumulative_params:
params = re.compile(r'\s+').sub('', self._cumulative_params).split(',')
# Strip out the type from parameters of the form name:Type.
params = map(lambda param: param.split(':')[0], params)
return params
def GetLastComment(self):
"""Return the last plain comment that could be used as documentation.
Returns:
The last plain comment that could be used as documentation.
"""
return self._last_comment
def GetSuppressions(self):
"""Return a list of suppressed error codes for the current position.
Returns:
A list of suppressed error codes for the current position.
"""
return self._suppressions
def GetDocComment(self):
"""Return the most recent applicable documentation comment.
Returns:
The last applicable documentation comment.
"""
return self._doc_comment
def HasDocComment(self, identifier):
"""Returns whether the identifier has been documented yet.
Args:
identifier: The identifier.
Returns:
Whether the identifier has been documented yet.
"""
return identifier in self._documented_identifiers
def InDocComment(self):
"""Returns whether the current token is in a doc comment.
Returns:
Whether the current token is in a doc comment.
"""
return self._doc_comment and self._doc_comment.end_token is None
def GetDocFlag(self):
"""Returns the current documentation flags.
Returns:
The current documentation flags.
"""
return self._doc_flag
def IsTypeToken(self, t):
if self.InDocComment() and t.type not in (Type.START_DOC_COMMENT,
Type.DOC_FLAG, Type.DOC_INLINE_FLAG, Type.DOC_PREFIX):
f = tokenutil.SearchUntil(t, [Type.DOC_FLAG], [Type.START_DOC_COMMENT],
None, True)
if (f and f.attached_object.type_start_token is not None and
f.attached_object.type_end_token is not None):
return (tokenutil.Compare(t, f.attached_object.type_start_token) > 0 and
tokenutil.Compare(t, f.attached_object.type_end_token) < 0)
return False
def GetFunction(self):
"""Return the function the current code block is a part of.
Returns:
The current Function object.
"""
if self._function_stack:
return self._function_stack[-1]
def GetBlockDepth(self):
"""Return the block depth.
Returns:
The current block depth.
"""
return self._block_depth
def GetLastNonSpaceToken(self):
"""Return the last non whitespace token."""
return self._last_non_space_token
def GetLastLine(self):
"""Return the last line."""
return self._last_line
def GetFirstToken(self):
"""Return the very first token in the file."""
return self._first_token
def IsVariableInScope(self, token_string):
"""Checks if string is variable in current scope.
For given string it checks whether the string is a defined variable
(including function param) in current state.
E.g. if variables defined (variables in current scope) is docs
then docs, docs.length etc will be considered as variable in current
scope. This will help in avoding extra goog.require for variables.
Args:
token_string: String to check if its is a variable in current scope.
Returns:
true if given string is a variable in current scope.
"""
for variable in self._variables_in_scope:
if (token_string == variable
or token_string.startswith(variable + '.')):
return True
return False
def HandleToken(self, token, last_non_space_token):
"""Handles the given token and updates state.
Args:
token: The token to handle.
last_non_space_token:
"""
self._is_block_close = False
if not self._first_token:
self._first_token = token
# Track block depth.
type = token.type
if type == Type.START_BLOCK:
self._block_depth += 1
# Subclasses need to handle block start very differently because
# whether a block is a CODE or OBJECT_LITERAL block varies significantly
# by language.
self._block_types.append(self.GetBlockType(token))
# When entering a function body, record its parameters.
if self.InFunction():
function = self._function_stack[-1]
if self._block_depth == function.block_depth + 1:
function.parameters = self.GetParams()
# Track block depth.
elif type == Type.END_BLOCK:
self._is_block_close = not self.InObjectLiteral()
self._block_depth -= 1
self._block_types.pop()
# Track parentheses depth.
elif type == Type.START_PAREN:
self._paren_depth += 1
# Track parentheses depth.
elif type == Type.END_PAREN:
self._paren_depth -= 1
elif type == Type.COMMENT:
self._last_comment = token.string
self._suppressions = self._ParseSuppressions(self._last_comment)
elif type == Type.START_DOC_COMMENT:
self._last_comment = None
self._suppressions = []
self._doc_comment = DocComment(token)
elif type == Type.END_DOC_COMMENT:
self._doc_comment.end_token = token
elif type in (Type.DOC_FLAG, Type.DOC_INLINE_FLAG):
flag = self._doc_flag(token)
token.attached_object = flag
self._doc_comment.AddFlag(flag)
if flag.flag_type == 'suppress':
self._doc_comment.AddSuppression(token)
elif type == Type.FUNCTION_DECLARATION:
last_code = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES, None,
True)
doc = None
# Only functions outside of parens are eligible for documentation.
if not self._paren_depth:
doc = self._doc_comment
name = ''
is_assigned = last_code and (last_code.IsOperator('=') or
last_code.IsOperator('||') or last_code.IsOperator('&&') or
(last_code.IsOperator(':') and not self.InObjectLiteral()))
if is_assigned:
# TODO(robbyw): This breaks for x[2] = ...
# Must use loop to find full function name in the case of line-wrapped
# declarations (bug 1220601) like:
# my.function.foo.
# bar = function() ...
identifier = tokenutil.Search(last_code, Type.SIMPLE_LVALUE, None, True)
while identifier and identifier.type in (
Type.IDENTIFIER, Type.SIMPLE_LVALUE):
name = identifier.string + name
# Traverse behind us, skipping whitespace and comments.
while True:
identifier = identifier.previous
if not identifier or not identifier.type in Type.NON_CODE_TYPES:
break
else:
next_token = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES)
while next_token and next_token.IsType(Type.FUNCTION_NAME):
name += next_token.string
next_token = tokenutil.Search(next_token, Type.FUNCTION_NAME, 2)
function = Function(self._block_depth, is_assigned, doc, name)
function.start_token = token
self._function_stack.append(function)
self._functions_by_name[name] = function
# Add a delimiter in stack for scope variables to define start of
# function. This helps in popping variables of this function when
# function declaration ends.
self._variables_in_scope.append('')
elif type == Type.START_PARAMETERS:
self._cumulative_params = ''
elif type == Type.PARAMETERS:
self._cumulative_params += token.string
self._variables_in_scope.extend(self.GetParams())
elif type == Type.KEYWORD and token.string == 'return':
next_token = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES)
if not next_token.IsType(Type.SEMICOLON):
function = self.GetFunction()
if function:
function.has_return = True
elif type == Type.KEYWORD and token.string == 'throw':
function = self.GetFunction()
if function:
function.has_throw = True
elif type == Type.KEYWORD and token.string == 'var':
function = self.GetFunction()
next_token = tokenutil.Search(token, [Type.IDENTIFIER,
Type.SIMPLE_LVALUE])
if next_token:
if next_token.type == Type.SIMPLE_LVALUE:
self._variables_in_scope.append(next_token.values['identifier'])
else:
self._variables_in_scope.append(next_token.string)
elif type == Type.SIMPLE_LVALUE:
identifier = token.values['identifier']
jsdoc = self.GetDocComment()
if jsdoc:
self._documented_identifiers.add(identifier)
self._HandleIdentifier(identifier, True)
elif type == Type.IDENTIFIER:
self._HandleIdentifier(token.string, False)
# Detect documented non-assignments.
next_token = tokenutil.SearchExcept(token, Type.NON_CODE_TYPES)
if next_token and next_token.IsType(Type.SEMICOLON):
if (self._last_non_space_token and
self._last_non_space_token.IsType(Type.END_DOC_COMMENT)):
self._documented_identifiers.add(token.string)
def _HandleIdentifier(self, identifier, is_assignment):
"""Process the given identifier.
Currently checks if it references 'this' and annotates the function
accordingly.
Args:
identifier: The identifer to process.
is_assignment: Whether the identifer is being written to.
"""
if identifier == 'this' or identifier.startswith('this.'):
function = self.GetFunction()
if function:
function.has_this = True
def _ParseSuppressions(self, comment):
"""Parse a comment string for error suppressions.
Args:
comment: The comment string to parse.
Returns:
An array of error codes which should be suppressed.
"""
m = StateTracker.SUPPRESS_COMMENT_LINE.match(comment)
if not m:
return []
suppressions = []
error_list = m.group(1).split(',')
for error in error_list:
suppressions.append(int(error))
return suppressions
def HandleAfterToken(self, token):
"""Handle updating state after a token has been checked.
This function should be used for destructive state changes such as
deleting a tracked object.
Args:
token: The token to handle.
"""
type = token.type
if type == Type.SEMICOLON or type == Type.END_PAREN or (
type == Type.END_BRACKET and
self._last_non_space_token.type not in (
Type.SINGLE_QUOTE_STRING_END, Type.DOUBLE_QUOTE_STRING_END)):
# We end on any numeric array index, but keep going for string based
# array indices so that we pick up manually exported identifiers.
self._doc_comment = None
self._last_comment = None
self._suppressions = []
elif type == Type.END_BLOCK:
self._doc_comment = None
self._last_comment = None
self._suppressions = []
if self.InFunction() and self.IsFunctionClose():
# TODO(robbyw): Detect the function's name for better errors.
function = self._function_stack.pop()
function.end_token = token
# Pop all variables till delimiter ('') those were defined in the
# function being closed so make them out of scope.
while self._variables_in_scope and self._variables_in_scope[-1]:
self._variables_in_scope.pop()
# Pop delimiter
if self._variables_in_scope:
self._variables_in_scope.pop()
elif type == Type.END_PARAMETERS and self._doc_comment:
self._doc_comment = None
self._last_comment = None
self._suppressions = []
if not token.IsAnyType(Type.WHITESPACE, Type.BLANK_LINE):
self._last_non_space_token = token
self._last_line = token.line
| apache-2.0 |
Ayub-Khan/edx-platform | lms/djangoapps/verify_student/tests/test_integration.py | 125 | 2388 | """
Integration tests of the payment flow, including course mode selection.
"""
from django.core.urlresolvers import reverse
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from student.tests.factories import UserFactory
from student.models import CourseEnrollment
from course_modes.tests.factories import CourseModeFactory
class TestProfEdVerification(ModuleStoreTestCase):
"""
Integration test for professional ed verification, including course mode selection.
"""
# Choose an uncommon number for the price so we can search for it on the page
MIN_PRICE = 1438
def setUp(self):
super(TestProfEdVerification, self).setUp()
self.user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
self.course_key = course.id
CourseModeFactory(
mode_slug="professional",
course_id=self.course_key,
min_price=self.MIN_PRICE,
suggested_prices=''
)
self.urls = {
'course_modes_choose': reverse(
'course_modes_choose',
args=[unicode(self.course_key)]
),
'verify_student_start_flow': reverse(
'verify_student_start_flow',
args=[unicode(self.course_key)]
),
}
def test_start_flow(self):
# Go to the course mode page, expecting a redirect to the intro step of the
# payment flow (since this is a professional ed course). Otherwise, the student
# would have the option to choose their track.
resp = self.client.get(self.urls['course_modes_choose'], follow=True)
self.assertRedirects(resp, self.urls['verify_student_start_flow'])
# For professional ed courses, expect that the student is NOT enrolled
# automatically in the course.
self.assertFalse(CourseEnrollment.is_enrolled(self.user, self.course_key))
# On the first page of the flow, verify that there's a button allowing the user
# to proceed to the payment processor; this is the only action the user is allowed to take.
self.assertContains(resp, 'payment-button')
| agpl-3.0 |
rtucker/sycamore | Sycamore/support/pytz/zoneinfo/Australia/ACT.py | 9 | 6196 | '''tzinfo timezone information for Australia/ACT.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class ACT(DstTzInfo):
'''Australia/ACT timezone definition. See datetime.tzinfo for details'''
zone = 'Australia/ACT'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1916,12,31,14,1,0),
d(1917,3,24,15,0,0),
d(1941,12,31,16,0,0),
d(1942,3,28,15,0,0),
d(1942,9,26,16,0,0),
d(1943,3,27,15,0,0),
d(1943,10,2,16,0,0),
d(1944,3,25,15,0,0),
d(1971,10,30,16,0,0),
d(1972,2,26,16,0,0),
d(1972,10,28,16,0,0),
d(1973,3,3,16,0,0),
d(1973,10,27,16,0,0),
d(1974,3,2,16,0,0),
d(1974,10,26,16,0,0),
d(1975,3,1,16,0,0),
d(1975,10,25,16,0,0),
d(1976,3,6,16,0,0),
d(1976,10,30,16,0,0),
d(1977,3,5,16,0,0),
d(1977,10,29,16,0,0),
d(1978,3,4,16,0,0),
d(1978,10,28,16,0,0),
d(1979,3,3,16,0,0),
d(1979,10,27,16,0,0),
d(1980,3,1,16,0,0),
d(1980,10,25,16,0,0),
d(1981,2,28,16,0,0),
d(1981,10,24,16,0,0),
d(1982,4,3,16,0,0),
d(1982,10,30,16,0,0),
d(1983,3,5,16,0,0),
d(1983,10,29,16,0,0),
d(1984,3,3,16,0,0),
d(1984,10,27,16,0,0),
d(1985,3,2,16,0,0),
d(1985,10,26,16,0,0),
d(1986,3,15,16,0,0),
d(1986,10,18,16,0,0),
d(1987,3,14,16,0,0),
d(1987,10,24,16,0,0),
d(1988,3,19,16,0,0),
d(1988,10,29,16,0,0),
d(1989,3,18,16,0,0),
d(1989,10,28,16,0,0),
d(1990,3,3,16,0,0),
d(1990,10,27,16,0,0),
d(1991,3,2,16,0,0),
d(1991,10,26,16,0,0),
d(1992,2,29,16,0,0),
d(1992,10,24,16,0,0),
d(1993,3,6,16,0,0),
d(1993,10,30,16,0,0),
d(1994,3,5,16,0,0),
d(1994,10,29,16,0,0),
d(1995,3,4,16,0,0),
d(1995,10,28,16,0,0),
d(1996,3,30,16,0,0),
d(1996,10,26,16,0,0),
d(1997,3,29,16,0,0),
d(1997,10,25,16,0,0),
d(1998,3,28,16,0,0),
d(1998,10,24,16,0,0),
d(1999,3,27,16,0,0),
d(1999,10,30,16,0,0),
d(2000,3,25,16,0,0),
d(2000,8,26,16,0,0),
d(2001,3,24,16,0,0),
d(2001,10,27,16,0,0),
d(2002,3,30,16,0,0),
d(2002,10,26,16,0,0),
d(2003,3,29,16,0,0),
d(2003,10,25,16,0,0),
d(2004,3,27,16,0,0),
d(2004,10,30,16,0,0),
d(2005,3,26,16,0,0),
d(2005,10,29,16,0,0),
d(2006,4,1,16,0,0),
d(2006,10,28,16,0,0),
d(2007,3,24,16,0,0),
d(2007,10,27,16,0,0),
d(2008,3,29,16,0,0),
d(2008,10,25,16,0,0),
d(2009,3,28,16,0,0),
d(2009,10,24,16,0,0),
d(2010,3,27,16,0,0),
d(2010,10,30,16,0,0),
d(2011,3,26,16,0,0),
d(2011,10,29,16,0,0),
d(2012,3,24,16,0,0),
d(2012,10,27,16,0,0),
d(2013,3,30,16,0,0),
d(2013,10,26,16,0,0),
d(2014,3,29,16,0,0),
d(2014,10,25,16,0,0),
d(2015,3,28,16,0,0),
d(2015,10,24,16,0,0),
d(2016,3,26,16,0,0),
d(2016,10,29,16,0,0),
d(2017,3,25,16,0,0),
d(2017,10,28,16,0,0),
d(2018,3,24,16,0,0),
d(2018,10,27,16,0,0),
d(2019,3,30,16,0,0),
d(2019,10,26,16,0,0),
d(2020,3,28,16,0,0),
d(2020,10,24,16,0,0),
d(2021,3,27,16,0,0),
d(2021,10,30,16,0,0),
d(2022,3,26,16,0,0),
d(2022,10,29,16,0,0),
d(2023,3,25,16,0,0),
d(2023,10,28,16,0,0),
d(2024,3,30,16,0,0),
d(2024,10,26,16,0,0),
d(2025,3,29,16,0,0),
d(2025,10,25,16,0,0),
d(2026,3,28,16,0,0),
d(2026,10,24,16,0,0),
d(2027,3,27,16,0,0),
d(2027,10,30,16,0,0),
d(2028,3,25,16,0,0),
d(2028,10,28,16,0,0),
d(2029,3,24,16,0,0),
d(2029,10,27,16,0,0),
d(2030,3,30,16,0,0),
d(2030,10,26,16,0,0),
d(2031,3,29,16,0,0),
d(2031,10,25,16,0,0),
d(2032,3,27,16,0,0),
d(2032,10,30,16,0,0),
d(2033,3,26,16,0,0),
d(2033,10,29,16,0,0),
d(2034,3,25,16,0,0),
d(2034,10,28,16,0,0),
d(2035,3,24,16,0,0),
d(2035,10,27,16,0,0),
d(2036,3,29,16,0,0),
d(2036,10,25,16,0,0),
d(2037,3,28,16,0,0),
d(2037,10,24,16,0,0),
]
_transition_info = [
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
i(36000,0,'EST'),
i(39600,3600,'EST'),
]
ACT = ACT()
| gpl-2.0 |
cognitiveclass/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/test_xml_importer.py | 100 | 11947 | """
Tests for XML importer.
"""
import mock
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from xblock.fields import String, Scope, ScopeIds, List
from xblock.runtime import Runtime, KvsFieldData, DictKeyValueStore
from xmodule.x_module import XModuleMixin
from opaque_keys.edx.locations import Location
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.modulestore.xml_importer import _update_and_import_module, _update_module_location
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.tests import DATA_DIR
from uuid import uuid4
import unittest
import importlib
class ModuleStoreNoSettings(unittest.TestCase):
"""
A mixin to create a mongo modulestore that avoids settings
"""
HOST = MONGO_HOST
PORT = MONGO_PORT_NUM
DB = 'test_mongo_%s' % uuid4().hex[:5]
COLLECTION = 'modulestore'
FS_ROOT = DATA_DIR
DEFAULT_CLASS = 'xmodule.modulestore.tests.test_xml_importer.StubXBlock'
RENDER_TEMPLATE = lambda t_n, d, ctx=None, nsp='main': ''
modulestore_options = {
'default_class': DEFAULT_CLASS,
'fs_root': DATA_DIR,
'render_template': RENDER_TEMPLATE,
}
DOC_STORE_CONFIG = {
'host': HOST,
'port': PORT,
'db': DB,
'collection': COLLECTION,
}
MODULESTORE = {
'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
}
modulestore = None
def cleanup_modulestore(self):
"""
cleanup
"""
if self.modulestore:
self.modulestore._drop_database() # pylint: disable=protected-access
def setUp(self):
"""
Add cleanups
"""
self.addCleanup(self.cleanup_modulestore)
super(ModuleStoreNoSettings, self).setUp()
#===========================================
def modulestore():
"""
Mock the django dependent global modulestore function to disentangle tests from django
"""
def load_function(engine_path):
"""
Load the given engine
"""
module_path, _, name = engine_path.rpartition('.')
return getattr(importlib.import_module(module_path), name)
if ModuleStoreNoSettings.modulestore is None:
class_ = load_function(ModuleStoreNoSettings.MODULESTORE['ENGINE'])
options = {}
options.update(ModuleStoreNoSettings.MODULESTORE['OPTIONS'])
options['render_template'] = render_to_template_mock
# pylint: disable=star-args
ModuleStoreNoSettings.modulestore = class_(
None, # contentstore
ModuleStoreNoSettings.MODULESTORE['DOC_STORE_CONFIG'],
branch_setting_func=lambda: ModuleStoreEnum.Branch.draft_preferred,
**options
)
return ModuleStoreNoSettings.modulestore
# pylint: disable=unused-argument
def render_to_template_mock(*args):
pass
class StubXBlock(XModuleMixin, InheritanceMixin):
"""
Stub XBlock used for testing.
"""
test_content_field = String(
help="A content field that will be explicitly set",
scope=Scope.content,
default="default value"
)
test_settings_field = String(
help="A settings field that will be explicitly set",
scope=Scope.settings,
default="default value"
)
class RemapNamespaceTest(ModuleStoreNoSettings):
"""
Test that remapping the namespace from import to the actual course location.
"""
def setUp(self):
"""
Create a stub XBlock backed by in-memory storage.
"""
self.runtime = mock.MagicMock(Runtime)
self.field_data = KvsFieldData(kvs=DictKeyValueStore())
self.scope_ids = ScopeIds('Bob', 'stubxblock', '123', 'import')
self.xblock = StubXBlock(self.runtime, self.field_data, self.scope_ids)
super(RemapNamespaceTest, self).setUp()
def test_remap_namespace_native_xblock(self):
# Set the XBlock's location
self.xblock.location = Location("org", "import", "run", "category", "stubxblock")
# Explicitly set the content and settings fields
self.xblock.test_content_field = "Explicitly set"
self.xblock.test_settings_field = "Explicitly set"
self.xblock.save()
# Move to different runtime w/ different course id
target_location_namespace = SlashSeparatedCourseKey("org", "course", "run")
new_version = _update_and_import_module(
self.xblock,
modulestore(),
999,
self.xblock.location.course_key,
target_location_namespace,
do_import_static=False
)
# Check the XBlock's location
self.assertEqual(new_version.location.course_key, target_location_namespace)
# Check the values of the fields.
# The content and settings fields should be preserved
self.assertEqual(new_version.test_content_field, 'Explicitly set')
self.assertEqual(new_version.test_settings_field, 'Explicitly set')
# Expect that these fields are marked explicitly set
self.assertIn(
'test_content_field',
new_version.get_explicitly_set_fields_by_scope(scope=Scope.content)
)
self.assertIn(
'test_settings_field',
new_version.get_explicitly_set_fields_by_scope(scope=Scope.settings)
)
def test_remap_namespace_native_xblock_default_values(self):
# Set the XBlock's location
self.xblock.location = Location("org", "import", "run", "category", "stubxblock")
# Do NOT set any values, so the fields should use the defaults
self.xblock.save()
# Remap the namespace
target_location_namespace = Location("org", "course", "run", "category", "stubxblock")
new_version = _update_and_import_module(
self.xblock,
modulestore(),
999,
self.xblock.location.course_key,
target_location_namespace.course_key,
do_import_static=False
)
# Check the values of the fields.
# The content and settings fields should be the default values
self.assertEqual(new_version.test_content_field, 'default value')
self.assertEqual(new_version.test_settings_field, 'default value')
# The fields should NOT appear in the explicitly set fields
self.assertNotIn(
'test_content_field',
new_version.get_explicitly_set_fields_by_scope(scope=Scope.content)
)
self.assertNotIn(
'test_settings_field',
new_version.get_explicitly_set_fields_by_scope(scope=Scope.settings)
)
def test_remap_namespace_native_xblock_inherited_values(self):
# Set the XBlock's location
self.xblock.location = Location("org", "import", "run", "category", "stubxblock")
self.xblock.save()
# Remap the namespace
target_location_namespace = Location("org", "course", "run", "category", "stubxblock")
new_version = _update_and_import_module(
self.xblock,
modulestore(),
999,
self.xblock.location.course_key,
target_location_namespace.course_key,
do_import_static=False
)
# Inherited fields should NOT be explicitly set
self.assertNotIn(
'start', new_version.get_explicitly_set_fields_by_scope(scope=Scope.settings)
)
self.assertNotIn(
'graded', new_version.get_explicitly_set_fields_by_scope(scope=Scope.settings)
)
class StubXBlockWithMutableFields(StubXBlock):
"""
Stub XBlock used for testing mutable fields and children
"""
has_children = True
test_mutable_content_field = List(
help="A mutable content field that will be explicitly set",
scope=Scope.content,
)
test_mutable_settings_field = List(
help="A mutable settings field that will be explicitly set",
scope=Scope.settings,
)
class UpdateLocationTest(ModuleStoreNoSettings):
"""
Test that updating location preserves "is_set_on" status on fields
"""
CONTENT_FIELDS = ['test_content_field', 'test_mutable_content_field']
SETTINGS_FIELDS = ['test_settings_field', 'test_mutable_settings_field']
CHILDREN_FIELDS = ['children']
def setUp(self):
"""
Create a stub XBlock backed by in-memory storage.
"""
self.runtime = mock.MagicMock(Runtime)
self.field_data = KvsFieldData(kvs=DictKeyValueStore())
self.scope_ids = ScopeIds('Bob', 'mutablestubxblock', '123', 'import')
self.xblock = StubXBlockWithMutableFields(self.runtime, self.field_data, self.scope_ids)
self.fake_children_locations = [
BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'mutablestubxblock', 'child1'),
BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'mutablestubxblock', 'child2'),
]
super(UpdateLocationTest, self).setUp()
def _check_explicitly_set(self, block, scope, expected_explicitly_set_fields, should_be_set=False):
""" Gets fields that are explicitly set on block and checks if they are marked as explicitly set or not """
actual_explicitly_set_fields = block.get_explicitly_set_fields_by_scope(scope=scope)
assertion = self.assertIn if should_be_set else self.assertNotIn
for field in expected_explicitly_set_fields:
assertion(field, actual_explicitly_set_fields)
def test_update_locations_native_xblock(self):
""" Update locations updates location and keeps values and "is_set_on" status """
# Set the XBlock's location
self.xblock.location = Location("org", "import", "run", "category", "stubxblock")
# Explicitly set the content, settings and children fields
self.xblock.test_content_field = 'Explicitly set'
self.xblock.test_settings_field = 'Explicitly set'
self.xblock.test_mutable_content_field = [1, 2, 3]
self.xblock.test_mutable_settings_field = ["a", "s", "d"]
self.xblock.children = self.fake_children_locations # pylint:disable=attribute-defined-outside-init
self.xblock.save()
# Update location
target_location = self.xblock.location.replace(revision='draft')
_update_module_location(self.xblock, target_location)
new_version = self.xblock # _update_module_location updates in-place
# Check the XBlock's location
self.assertEqual(new_version.location, target_location)
# Check the values of the fields.
# The content, settings and children fields should be preserved
self.assertEqual(new_version.test_content_field, 'Explicitly set')
self.assertEqual(new_version.test_settings_field, 'Explicitly set')
self.assertEqual(new_version.test_mutable_content_field, [1, 2, 3])
self.assertEqual(new_version.test_mutable_settings_field, ["a", "s", "d"])
self.assertEqual(new_version.children, self.fake_children_locations)
# Expect that these fields are marked explicitly set
self._check_explicitly_set(new_version, Scope.content, self.CONTENT_FIELDS, should_be_set=True)
self._check_explicitly_set(new_version, Scope.settings, self.SETTINGS_FIELDS, should_be_set=True)
self._check_explicitly_set(new_version, Scope.children, self.CHILDREN_FIELDS, should_be_set=True)
# Expect these fields pass "is_set_on" test
for field in self.CONTENT_FIELDS + self.SETTINGS_FIELDS + self.CHILDREN_FIELDS:
self.assertTrue(new_version.fields[field].is_set_on(new_version))
| agpl-3.0 |
faspl/pig | test/e2e/pig/udfs/cpython/scriptingudf.py | 28 | 2464 | ############################################################################
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pig_util import outputSchema
@outputSchema("as:int")
def square(num):
if num == None:
return None
return ((num) * (num))
@outputSchema("as:double")
def squareDouble(num):
if num == None:
return None
return ((num) * (num))
@outputSchema("as:int")
def redirect(num):
return square(num)
@outputSchema("word:chararray")
def concat(word):
return word + word
@outputSchema("word:bytearray")
def byteconcat(word):
return word + word
@outputSchema("t:tuple(outm:map[], outt:tuple(gpa:double, age:int, name:chararray), outb:{t:(gpa:double, age:int, name:chararray)})")
def complexTypes(m, t, b):
outm = {}
if m == None:
outm = None
else:
for k, v in m.iteritems():
outm[k] = len(v)
outb = []
if b == None:
outb = None
else:
for r in b:
tup = (r[2], r[1], r[0])
outb.append(tup)
if t == None:
outt = None
else:
outt = (t[2], t[1], t[0])
return (outm, outt, outb)
@outputSchema("cnt:long")
def count(bag):
cnt = 0
for r in bag:
cnt += 1
return cnt
@outputSchema("gpa:double")
def adjustgpa(gpa, instate):
if instate == None:
return None
elif instate:
return gpa
else:
return gpa+1
@outputSchema("retired:boolean")
def isretired(age):
if age == None:
return None
elif age>=60:
return True
else:
return False
@outputSchema("words:{(word:chararray)}")
def tokenize(sentence):
return [ (w,) for w in sentence.split(" ") ]
| apache-2.0 |
mattt416/neutron | neutron/db/migration/alembic_migrations/versions/liberty/expand/1c844d1677f7_dns_nameservers_order.py | 36 | 1056 | # Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""add order to dnsnameservers
Revision ID: 1c844d1677f7
Revises: 26c371498592
Create Date: 2015-07-21 22:59:03.383850
"""
# revision identifiers, used by Alembic.
revision = '1c844d1677f7'
down_revision = '26c371498592'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('dnsnameservers',
sa.Column('order', sa.Integer(),
server_default='0', nullable=False))
| apache-2.0 |
svn2github/chromium-depot-tools | third_party/boto/core/dictresponse.py | 63 | 6158 | # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import xml.sax
def pythonize_name(name, sep='_'):
s = ''
if name[0].isupper:
s = name[0].lower()
for c in name[1:]:
if c.isupper():
s += sep + c.lower()
else:
s += c
return s
class XmlHandler(xml.sax.ContentHandler):
def __init__(self, root_node, connection):
self.connection = connection
self.nodes = [('root', root_node)]
self.current_text = ''
def startElement(self, name, attrs):
self.current_text = ''
t = self.nodes[-1][1].startElement(name, attrs, self.connection)
if t != None:
if isinstance(t, tuple):
self.nodes.append(t)
else:
self.nodes.append((name, t))
def endElement(self, name):
self.nodes[-1][1].endElement(name, self.current_text, self.connection)
if self.nodes[-1][0] == name:
self.nodes.pop()
self.current_text = ''
def characters(self, content):
self.current_text += content
def parse(self, s):
xml.sax.parseString(s, self)
class Element(dict):
def __init__(self, connection=None, element_name=None,
stack=None, parent=None, list_marker=None,
item_marker=None, pythonize_name=False):
dict.__init__(self)
self.connection = connection
self.element_name = element_name
self.list_marker = list_marker or ['Set']
self.item_marker = item_marker or ['member', 'item']
if stack is None:
self.stack = []
else:
self.stack = stack
self.pythonize_name = pythonize_name
self.parent = parent
def __getattr__(self, key):
if key in self:
return self[key]
for k in self:
e = self[k]
if isinstance(e, Element):
try:
return getattr(e, key)
except AttributeError:
pass
raise AttributeError
def get_name(self, name):
if self.pythonize_name:
name = pythonize_name(name)
return name
def startElement(self, name, attrs, connection):
self.stack.append(name)
for lm in self.list_marker:
if name.endswith(lm):
l = ListElement(self.connection, name, self.list_marker,
self.item_marker, self.pythonize_name)
self[self.get_name(name)] = l
return l
if len(self.stack) > 0:
element_name = self.stack[-1]
e = Element(self.connection, element_name, self.stack, self,
self.list_marker, self.item_marker,
self.pythonize_name)
self[self.get_name(element_name)] = e
return (element_name, e)
else:
return None
def endElement(self, name, value, connection):
if len(self.stack) > 0:
self.stack.pop()
value = value.strip()
if value:
if isinstance(self.parent, Element):
self.parent[self.get_name(name)] = value
elif isinstance(self.parent, ListElement):
self.parent.append(value)
class ListElement(list):
def __init__(self, connection=None, element_name=None,
list_marker=['Set'], item_marker=('member', 'item'),
pythonize_name=False):
list.__init__(self)
self.connection = connection
self.element_name = element_name
self.list_marker = list_marker
self.item_marker = item_marker
self.pythonize_name = pythonize_name
def get_name(self, name):
if self.pythonize_name:
name = utils.pythonize_name(name)
return name
def startElement(self, name, attrs, connection):
for lm in self.list_marker:
if name.endswith(lm):
l = ListElement(self.connection, name,
self.list_marker, self.item_marker,
self.pythonize_name)
setattr(self, self.get_name(name), l)
return l
if name in self.item_marker:
e = Element(self.connection, name, parent=self,
list_marker=self.list_marker,
item_marker=self.item_marker,
pythonize_name=self.pythonize_name)
self.append(e)
return e
else:
return None
def endElement(self, name, value, connection):
if name == self.element_name:
if len(self) > 0:
empty = []
for e in self:
if isinstance(e, Element):
if len(e) == 0:
empty.append(e)
for e in empty:
self.remove(e)
else:
setattr(self, self.get_name(name), value)
| bsd-3-clause |
n0max/servo | tests/wpt/web-platform-tests/webdriver/tests/actions/special_keys.py | 3 | 1605 | # META: timeout=long
import pytest
from tests.actions.support.keys import ALL_EVENTS, Keys
from tests.actions.support.refine import filter_dict, get_keys, get_events
@pytest.mark.parametrize("name,expected", ALL_EVENTS.items())
def test_webdriver_special_key_sends_keydown(session,
key_reporter,
key_chain,
name,
expected):
if name.startswith("F"):
# Prevent default behavior for F1, etc., but only after keydown
# bubbles up to body. (Otherwise activated browser menus/functions
# may interfere with subsequent tests.)
session.execute_script("""
document.body.addEventListener("keydown",
(e) => e.preventDefault());
""")
key_chain.key_down(getattr(Keys, name)).perform()
# only interested in keydown
first_event = get_events(session)[0]
# make a copy so we can throw out irrelevant keys and compare to events
expected = dict(expected)
del expected["value"]
# check and remove keys that aren't in expected
assert first_event["type"] == "keydown"
assert first_event["repeat"] == False
first_event = filter_dict(first_event, expected)
assert first_event == expected
# only printable characters should be recorded in input field
entered_keys = get_keys(key_reporter)
if len(expected["key"]) == 1:
assert entered_keys == expected["key"]
else:
assert len(entered_keys) == 0
| mpl-2.0 |
rockneurotiko/django | tests/test_runner/tests.py | 139 | 15094 | """
Tests for django test runner
"""
from __future__ import unicode_literals
import unittest
from admin_scripts.tests import AdminScriptTestCase
from django import db
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command
from django.test import (
TestCase, TransactionTestCase, mock, skipUnlessDBFeature, testcases,
)
from django.test.runner import DiscoverRunner, dependency_ordered
from django.test.testcases import connections_support_transactions
from django.utils import six
from django.utils.encoding import force_text
from .models import Person
class DependencyOrderingTests(unittest.TestCase):
def test_simple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['charlie'],
'bravo': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
def test_chained_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['bravo'],
'bravo': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
# Implied dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_multiple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
('s4', ('s4_db', ['delta'])),
]
dependencies = {
'alpha': ['bravo', 'delta'],
'bravo': ['charlie'],
'delta': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, aliases in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertIn('s4', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s4'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s4'))
# Implicit dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_circular_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
]
dependencies = {
'bravo': ['alpha'],
'alpha': ['bravo'],
}
self.assertRaises(ImproperlyConfigured, dependency_ordered, raw, dependencies=dependencies)
def test_own_alias_dependency(self):
raw = [
('s1', ('s1_db', ['alpha', 'bravo']))
]
dependencies = {
'alpha': ['bravo']
}
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
# reordering aliases shouldn't matter
raw = [
('s1', ('s1_db', ['bravo', 'alpha']))
]
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
class MockTestRunner(object):
def __init__(self, *args, **kwargs):
pass
MockTestRunner.run_tests = mock.Mock(return_value=[])
class ManageCommandTests(unittest.TestCase):
def test_custom_test_runner(self):
call_command('test', 'sites',
testrunner='test_runner.tests.MockTestRunner')
MockTestRunner.run_tests.assert_called_with(('sites',))
def test_bad_test_runner(self):
with self.assertRaises(AttributeError):
call_command('test', 'sites',
testrunner='test_runner.NonExistentRunner')
class CustomTestRunnerOptionsTests(AdminScriptTestCase):
def setUp(self):
settings = {
'TEST_RUNNER': '\'test_runner.runner.CustomOptionsTestRunner\'',
}
self.write_settings('settings.py', sdict=settings)
def tearDown(self):
self.remove_settings('settings.py')
def test_default_options(self):
args = ['test', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:2:3')
def test_default_and_given_options(self):
args = ['test', '--settings=test_project.settings', '--option_b=foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_option_name_and_value_separated(self):
args = ['test', '--settings=test_project.settings', '--option_b', 'foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_all_options_given(self):
args = ['test', '--settings=test_project.settings', '--option_a=bar',
'--option_b=foo', '--option_c=31337']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
class Ticket17477RegressionTests(AdminScriptTestCase):
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_ticket_17477(self):
"""'manage.py help test' works after r16352."""
args = ['help', 'test']
out, err = self.run_manage(args)
self.assertNoOutput(err)
class Sqlite3InMemoryTestDbs(TestCase):
available_apps = []
@unittest.skipUnless(all(db.connections[conn].vendor == 'sqlite' for conn in db.connections),
"This is an sqlite-specific issue")
def test_transaction_support(self):
"""Ticket #16329: sqlite3 in-memory test databases"""
for option_key, option_value in (
('NAME', ':memory:'), ('TEST', {'NAME': ':memory:'})):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.sqlite3',
option_key: option_value,
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
option_key: option_value,
},
})
with mock.patch('django.db.connections', new=tested_connections):
with mock.patch('django.test.testcases.connections', new=tested_connections):
other = tested_connections['other']
DiscoverRunner(verbosity=0).setup_databases()
msg = ("DATABASES setting '%s' option set to sqlite3's ':memory:' value "
"shouldn't interfere with transaction support detection." % option_key)
# Transaction support should be properly initialized for the 'other' DB
self.assertTrue(other.features.supports_transactions, msg)
# And all the DBs should report that they support transactions
self.assertTrue(connections_support_transactions(), msg)
class DummyBackendTest(unittest.TestCase):
def test_setup_databases(self):
"""
Test that setup_databases() doesn't fail with dummy database backend.
"""
tested_connections = db.ConnectionHandler({})
with mock.patch('django.db.connections', new=tested_connections):
runner_instance = DiscoverRunner(verbosity=0)
try:
old_config = runner_instance.setup_databases()
runner_instance.teardown_databases(old_config)
except Exception as e:
self.fail("setup_databases/teardown_databases unexpectedly raised "
"an error: %s" % e)
class AliasedDefaultTestSetupTest(unittest.TestCase):
def test_setup_aliased_default_database(self):
"""
Test that setup_datebases() doesn't fail when 'default' is aliased
"""
tested_connections = db.ConnectionHandler({
'default': {
'NAME': 'dummy'
},
'aliased': {
'NAME': 'dummy'
}
})
with mock.patch('django.db.connections', new=tested_connections):
runner_instance = DiscoverRunner(verbosity=0)
try:
old_config = runner_instance.setup_databases()
runner_instance.teardown_databases(old_config)
except Exception as e:
self.fail("setup_databases/teardown_databases unexpectedly raised "
"an error: %s" % e)
class SetupDatabasesTests(unittest.TestCase):
def setUp(self):
self.runner_instance = DiscoverRunner(verbosity=0)
def test_setup_aliased_databases(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
},
'other': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
}
})
with mock.patch('django.db.backends.dummy.base.DatabaseCreation') as mocked_db_creation:
with mock.patch('django.db.connections', new=tested_connections):
old_config = self.runner_instance.setup_databases()
self.runner_instance.teardown_databases(old_config)
mocked_db_creation.return_value.destroy_test_db.assert_called_once_with('dbname', 0, False)
def test_destroy_test_db_restores_db_name(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': settings.DATABASES[db.DEFAULT_DB_ALIAS]["ENGINE"],
'NAME': 'xxx_test_database',
},
})
# Using the real current name as old_name to not mess with the test suite.
old_name = settings.DATABASES[db.DEFAULT_DB_ALIAS]["NAME"]
with mock.patch('django.db.connections', new=tested_connections):
tested_connections['default'].creation.destroy_test_db(old_name, verbosity=0, keepdb=True)
self.assertEqual(tested_connections['default'].settings_dict["NAME"], old_name)
def test_serialization(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
},
})
with mock.patch('django.db.backends.dummy.base.DatabaseCreation') as mocked_db_creation:
with mock.patch('django.db.connections', new=tested_connections):
self.runner_instance.setup_databases()
mocked_db_creation.return_value.create_test_db.assert_called_once_with(
0, autoclobber=False, serialize=True, keepdb=False
)
def test_serialized_off(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
'TEST': {'SERIALIZE': False},
},
})
with mock.patch('django.db.backends.dummy.base.DatabaseCreation') as mocked_db_creation:
with mock.patch('django.db.connections', new=tested_connections):
self.runner_instance.setup_databases()
mocked_db_creation.return_value.create_test_db.assert_called_once_with(
0, autoclobber=False, serialize=False, keepdb=False
)
class DeprecationDisplayTest(AdminScriptTestCase):
# tests for 19546
def setUp(self):
settings = {
'DATABASES': '{"default": {"ENGINE":"django.db.backends.sqlite3", "NAME":":memory:"}}'
}
self.write_settings('settings.py', sdict=settings)
def tearDown(self):
self.remove_settings('settings.py')
def test_runner_deprecation_verbosity_default(self):
args = ['test', '--settings=test_project.settings', 'test_runner_deprecation_app']
out, err = self.run_django_admin(args)
self.assertIn("Ran 1 test", force_text(err))
six.assertRegex(self, err, r"RemovedInDjango\d+Warning: warning from test")
six.assertRegex(self, err, r"RemovedInDjango\d+Warning: module-level warning from deprecation_app")
def test_runner_deprecation_verbosity_zero(self):
args = ['test', '--settings=test_project.settings', '--verbosity=0', 'test_runner_deprecation_app']
out, err = self.run_django_admin(args)
self.assertIn("Ran 1 test", err)
self.assertNotIn("warning from test", err)
class AutoIncrementResetTest(TransactionTestCase):
"""
Here we test creating the same model two times in different test methods,
and check that both times they get "1" as their PK value. That is, we test
that AutoField values start from 1 for each transactional test case.
"""
available_apps = ['test_runner']
reset_sequences = True
@skipUnlessDBFeature('supports_sequence_reset')
def test_autoincrement_reset1(self):
p = Person.objects.create(first_name='Jack', last_name='Smith')
self.assertEqual(p.pk, 1)
@skipUnlessDBFeature('supports_sequence_reset')
def test_autoincrement_reset2(self):
p = Person.objects.create(first_name='Jack', last_name='Smith')
self.assertEqual(p.pk, 1)
class EmptyDefaultDatabaseTest(unittest.TestCase):
def test_empty_default_database(self):
"""
Test that an empty default database in settings does not raise an ImproperlyConfigured
error when running a unit test that does not use a database.
"""
testcases.connections = db.ConnectionHandler({'default': {}})
connection = testcases.connections[db.utils.DEFAULT_DB_ALIAS]
self.assertEqual(connection.settings_dict['ENGINE'], 'django.db.backends.dummy')
try:
connections_support_transactions()
except Exception as e:
self.fail("connections_support_transactions() unexpectedly raised an error: %s" % e)
| bsd-3-clause |
treejames/viewfinder | marketing/tornado/template.py | 31 | 30850 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A simple template system that compiles templates to Python code.
Basic usage looks like::
t = template.Template("<html>{{ myvalue }}</html>")
print t.generate(myvalue="XXX")
Loader is a class that loads templates from a root directory and caches
the compiled templates::
loader = template.Loader("/home/btaylor")
print loader.load("test.html").generate(myvalue="XXX")
We compile all templates to raw Python. Error-reporting is currently... uh,
interesting. Syntax for the templates::
### base.html
<html>
<head>
<title>{% block title %}Default title{% end %}</title>
</head>
<body>
<ul>
{% for student in students %}
{% block student %}
<li>{{ escape(student.name) }}</li>
{% end %}
{% end %}
</ul>
</body>
</html>
### bold.html
{% extends "base.html" %}
{% block title %}A bolder title{% end %}
{% block student %}
<li><span style="bold">{{ escape(student.name) }}</span></li>
{% end %}
Unlike most other template systems, we do not put any restrictions on the
expressions you can include in your statements. if and for blocks get
translated exactly into Python, you can do complex expressions like::
{% for student in [p for p in people if p.student and p.age > 23] %}
<li>{{ escape(student.name) }}</li>
{% end %}
Translating directly to Python means you can apply functions to expressions
easily, like the escape() function in the examples above. You can pass
functions in to your template just like any other variable::
### Python code
def add(x, y):
return x + y
template.execute(add=add)
### The template
{{ add(1, 2) }}
We provide the functions escape(), url_escape(), json_encode(), and squeeze()
to all templates by default.
Typical applications do not create `Template` or `Loader` instances by
hand, but instead use the `~.RequestHandler.render` and
`~.RequestHandler.render_string` methods of
`tornado.web.RequestHandler`, which load templates automatically based
on the ``template_path`` `.Application` setting.
Variable names beginning with ``_tt_`` are reserved by the template
system and should not be used by application code.
Syntax Reference
----------------
Template expressions are surrounded by double curly braces: ``{{ ... }}``.
The contents may be any python expression, which will be escaped according
to the current autoescape setting and inserted into the output. Other
template directives use ``{% %}``. These tags may be escaped as ``{{!``
and ``{%!`` if you need to include a literal ``{{`` or ``{%`` in the output.
To comment out a section so that it is omitted from the output, surround it
with ``{# ... #}``.
``{% apply *function* %}...{% end %}``
Applies a function to the output of all template code between ``apply``
and ``end``::
{% apply linkify %}{{name}} said: {{message}}{% end %}
Note that as an implementation detail apply blocks are implemented
as nested functions and thus may interact strangely with variables
set via ``{% set %}``, or the use of ``{% break %}`` or ``{% continue %}``
within loops.
``{% autoescape *function* %}``
Sets the autoescape mode for the current file. This does not affect
other files, even those referenced by ``{% include %}``. Note that
autoescaping can also be configured globally, at the `.Application`
or `Loader`.::
{% autoescape xhtml_escape %}
{% autoescape None %}
``{% block *name* %}...{% end %}``
Indicates a named, replaceable block for use with ``{% extends %}``.
Blocks in the parent template will be replaced with the contents of
the same-named block in a child template.::
<!-- base.html -->
<title>{% block title %}Default title{% end %}</title>
<!-- mypage.html -->
{% extends "base.html" %}
{% block title %}My page title{% end %}
``{% comment ... %}``
A comment which will be removed from the template output. Note that
there is no ``{% end %}`` tag; the comment goes from the word ``comment``
to the closing ``%}`` tag.
``{% extends *filename* %}``
Inherit from another template. Templates that use ``extends`` should
contain one or more ``block`` tags to replace content from the parent
template. Anything in the child template not contained in a ``block``
tag will be ignored. For an example, see the ``{% block %}`` tag.
``{% for *var* in *expr* %}...{% end %}``
Same as the python ``for`` statement. ``{% break %}`` and
``{% continue %}`` may be used inside the loop.
``{% from *x* import *y* %}``
Same as the python ``import`` statement.
``{% if *condition* %}...{% elif *condition* %}...{% else %}...{% end %}``
Conditional statement - outputs the first section whose condition is
true. (The ``elif`` and ``else`` sections are optional)
``{% import *module* %}``
Same as the python ``import`` statement.
``{% include *filename* %}``
Includes another template file. The included file can see all the local
variables as if it were copied directly to the point of the ``include``
directive (the ``{% autoescape %}`` directive is an exception).
Alternately, ``{% module Template(filename, **kwargs) %}`` may be used
to include another template with an isolated namespace.
``{% module *expr* %}``
Renders a `~tornado.web.UIModule`. The output of the ``UIModule`` is
not escaped::
{% module Template("foo.html", arg=42) %}
``{% raw *expr* %}``
Outputs the result of the given expression without autoescaping.
``{% set *x* = *y* %}``
Sets a local variable.
``{% try %}...{% except %}...{% finally %}...{% else %}...{% end %}``
Same as the python ``try`` statement.
``{% while *condition* %}... {% end %}``
Same as the python ``while`` statement. ``{% break %}`` and
``{% continue %}`` may be used inside the loop.
"""
from __future__ import absolute_import, division, print_function, with_statement
import datetime
import linecache
import os.path
import posixpath
import re
import threading
from tornado import escape
from tornado.log import app_log
from tornado.util import bytes_type, ObjectDict, exec_in, unicode_type
try:
from cStringIO import StringIO # py2
except ImportError:
from io import StringIO # py3
_DEFAULT_AUTOESCAPE = "xhtml_escape"
_UNSET = object()
class Template(object):
"""A compiled template.
We compile into Python from the given template_string. You can generate
the template from variables with generate().
"""
# note that the constructor's signature is not extracted with
# autodoc because _UNSET looks like garbage. When changing
# this signature update website/sphinx/template.rst too.
def __init__(self, template_string, name="<string>", loader=None,
compress_whitespace=None, autoescape=_UNSET):
self.name = name
if compress_whitespace is None:
compress_whitespace = name.endswith(".html") or \
name.endswith(".js")
if autoescape is not _UNSET:
self.autoescape = autoescape
elif loader:
self.autoescape = loader.autoescape
else:
self.autoescape = _DEFAULT_AUTOESCAPE
self.namespace = loader.namespace if loader else {}
reader = _TemplateReader(name, escape.native_str(template_string))
self.file = _File(self, _parse(reader, self))
self.code = self._generate_python(loader, compress_whitespace)
self.loader = loader
try:
# Under python2.5, the fake filename used here must match
# the module name used in __name__ below.
# The dont_inherit flag prevents template.py's future imports
# from being applied to the generated code.
self.compiled = compile(
escape.to_unicode(self.code),
"%s.generated.py" % self.name.replace('.', '_'),
"exec", dont_inherit=True)
except Exception:
formatted_code = _format_code(self.code).rstrip()
app_log.error("%s code:\n%s", self.name, formatted_code)
raise
def generate(self, **kwargs):
"""Generate this template with the given arguments."""
namespace = {
"escape": escape.xhtml_escape,
"xhtml_escape": escape.xhtml_escape,
"url_escape": escape.url_escape,
"json_encode": escape.json_encode,
"squeeze": escape.squeeze,
"linkify": escape.linkify,
"datetime": datetime,
"_tt_utf8": escape.utf8, # for internal use
"_tt_string_types": (unicode_type, bytes_type),
# __name__ and __loader__ allow the traceback mechanism to find
# the generated source code.
"__name__": self.name.replace('.', '_'),
"__loader__": ObjectDict(get_source=lambda name: self.code),
}
namespace.update(self.namespace)
namespace.update(kwargs)
exec_in(self.compiled, namespace)
execute = namespace["_tt_execute"]
# Clear the traceback module's cache of source data now that
# we've generated a new template (mainly for this module's
# unittests, where different tests reuse the same name).
linecache.clearcache()
return execute()
def _generate_python(self, loader, compress_whitespace):
buffer = StringIO()
try:
# named_blocks maps from names to _NamedBlock objects
named_blocks = {}
ancestors = self._get_ancestors(loader)
ancestors.reverse()
for ancestor in ancestors:
ancestor.find_named_blocks(loader, named_blocks)
writer = _CodeWriter(buffer, named_blocks, loader, ancestors[0].template,
compress_whitespace)
ancestors[0].generate(writer)
return buffer.getvalue()
finally:
buffer.close()
def _get_ancestors(self, loader):
ancestors = [self.file]
for chunk in self.file.body.chunks:
if isinstance(chunk, _ExtendsBlock):
if not loader:
raise ParseError("{% extends %} block found, but no "
"template loader")
template = loader.load(chunk.name, self.name)
ancestors.extend(template._get_ancestors(loader))
return ancestors
class BaseLoader(object):
"""Base class for template loaders.
You must use a template loader to use template constructs like
``{% extends %}`` and ``{% include %}``. The loader caches all
templates after they are loaded the first time.
"""
def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None):
"""``autoescape`` must be either None or a string naming a function
in the template namespace, such as "xhtml_escape".
"""
self.autoescape = autoescape
self.namespace = namespace or {}
self.templates = {}
# self.lock protects self.templates. It's a reentrant lock
# because templates may load other templates via `include` or
# `extends`. Note that thanks to the GIL this code would be safe
# even without the lock, but could lead to wasted work as multiple
# threads tried to compile the same template simultaneously.
self.lock = threading.RLock()
def reset(self):
"""Resets the cache of compiled templates."""
with self.lock:
self.templates = {}
def resolve_path(self, name, parent_path=None):
"""Converts a possibly-relative path to absolute (used internally)."""
raise NotImplementedError()
def load(self, name, parent_path=None):
"""Loads a template."""
name = self.resolve_path(name, parent_path=parent_path)
with self.lock:
if name not in self.templates:
self.templates[name] = self._create_template(name)
return self.templates[name]
def _create_template(self, name):
raise NotImplementedError()
class Loader(BaseLoader):
"""A template loader that loads from a single root directory.
"""
def __init__(self, root_directory, **kwargs):
super(Loader, self).__init__(**kwargs)
self.root = os.path.abspath(root_directory)
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
current_path = os.path.join(self.root, parent_path)
file_dir = os.path.dirname(os.path.abspath(current_path))
relative_path = os.path.abspath(os.path.join(file_dir, name))
if relative_path.startswith(self.root):
name = relative_path[len(self.root) + 1:]
return name
def _create_template(self, name):
path = os.path.join(self.root, name)
f = open(path, "rb")
template = Template(f.read(), name=name, loader=self)
f.close()
return template
class DictLoader(BaseLoader):
"""A template loader that loads from a dictionary."""
def __init__(self, dict, **kwargs):
super(DictLoader, self).__init__(**kwargs)
self.dict = dict
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
file_dir = posixpath.dirname(parent_path)
name = posixpath.normpath(posixpath.join(file_dir, name))
return name
def _create_template(self, name):
return Template(self.dict[name], name=name, loader=self)
class _Node(object):
def each_child(self):
return ()
def generate(self, writer):
raise NotImplementedError()
def find_named_blocks(self, loader, named_blocks):
for child in self.each_child():
child.find_named_blocks(loader, named_blocks)
class _File(_Node):
def __init__(self, template, body):
self.template = template
self.body = body
self.line = 0
def generate(self, writer):
writer.write_line("def _tt_execute():", self.line)
with writer.indent():
writer.write_line("_tt_buffer = []", self.line)
writer.write_line("_tt_append = _tt_buffer.append", self.line)
self.body.generate(writer)
writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
def each_child(self):
return (self.body,)
class _ChunkList(_Node):
def __init__(self, chunks):
self.chunks = chunks
def generate(self, writer):
for chunk in self.chunks:
chunk.generate(writer)
def each_child(self):
return self.chunks
class _NamedBlock(_Node):
def __init__(self, name, body, template, line):
self.name = name
self.body = body
self.template = template
self.line = line
def each_child(self):
return (self.body,)
def generate(self, writer):
block = writer.named_blocks[self.name]
with writer.include(block.template, self.line):
block.body.generate(writer)
def find_named_blocks(self, loader, named_blocks):
named_blocks[self.name] = self
_Node.find_named_blocks(self, loader, named_blocks)
class _ExtendsBlock(_Node):
def __init__(self, name):
self.name = name
class _IncludeBlock(_Node):
def __init__(self, name, reader, line):
self.name = name
self.template_name = reader.name
self.line = line
def find_named_blocks(self, loader, named_blocks):
included = loader.load(self.name, self.template_name)
included.file.find_named_blocks(loader, named_blocks)
def generate(self, writer):
included = writer.loader.load(self.name, self.template_name)
with writer.include(included, self.line):
included.file.body.generate(writer)
class _ApplyBlock(_Node):
def __init__(self, method, line, body=None):
self.method = method
self.line = line
self.body = body
def each_child(self):
return (self.body,)
def generate(self, writer):
method_name = "_tt_apply%d" % writer.apply_counter
writer.apply_counter += 1
writer.write_line("def %s():" % method_name, self.line)
with writer.indent():
writer.write_line("_tt_buffer = []", self.line)
writer.write_line("_tt_append = _tt_buffer.append", self.line)
self.body.generate(writer)
writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
writer.write_line("_tt_append(_tt_utf8(%s(%s())))" % (
self.method, method_name), self.line)
class _ControlBlock(_Node):
def __init__(self, statement, line, body=None):
self.statement = statement
self.line = line
self.body = body
def each_child(self):
return (self.body,)
def generate(self, writer):
writer.write_line("%s:" % self.statement, self.line)
with writer.indent():
self.body.generate(writer)
# Just in case the body was empty
writer.write_line("pass", self.line)
class _IntermediateControlBlock(_Node):
def __init__(self, statement, line):
self.statement = statement
self.line = line
def generate(self, writer):
# In case the previous block was empty
writer.write_line("pass", self.line)
writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1)
class _Statement(_Node):
def __init__(self, statement, line):
self.statement = statement
self.line = line
def generate(self, writer):
writer.write_line(self.statement, self.line)
class _Expression(_Node):
def __init__(self, expression, line, raw=False):
self.expression = expression
self.line = line
self.raw = raw
def generate(self, writer):
writer.write_line("_tt_tmp = %s" % self.expression, self.line)
writer.write_line("if isinstance(_tt_tmp, _tt_string_types):"
" _tt_tmp = _tt_utf8(_tt_tmp)", self.line)
writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line)
if not self.raw and writer.current_template.autoescape is not None:
# In python3 functions like xhtml_escape return unicode,
# so we have to convert to utf8 again.
writer.write_line("_tt_tmp = _tt_utf8(%s(_tt_tmp))" %
writer.current_template.autoescape, self.line)
writer.write_line("_tt_append(_tt_tmp)", self.line)
class _Module(_Expression):
def __init__(self, expression, line):
super(_Module, self).__init__("_tt_modules." + expression, line,
raw=True)
class _Text(_Node):
def __init__(self, value, line):
self.value = value
self.line = line
def generate(self, writer):
value = self.value
# Compress lots of white space to a single character. If the whitespace
# breaks a line, have it continue to break a line, but just with a
# single \n character
if writer.compress_whitespace and "<pre>" not in value:
value = re.sub(r"([\t ]+)", " ", value)
value = re.sub(r"(\s*\n\s*)", "\n", value)
if value:
writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
class ParseError(Exception):
"""Raised for template syntax errors."""
pass
class _CodeWriter(object):
def __init__(self, file, named_blocks, loader, current_template,
compress_whitespace):
self.file = file
self.named_blocks = named_blocks
self.loader = loader
self.current_template = current_template
self.compress_whitespace = compress_whitespace
self.apply_counter = 0
self.include_stack = []
self._indent = 0
def indent_size(self):
return self._indent
def indent(self):
class Indenter(object):
def __enter__(_):
self._indent += 1
return self
def __exit__(_, *args):
assert self._indent > 0
self._indent -= 1
return Indenter()
def include(self, template, line):
self.include_stack.append((self.current_template, line))
self.current_template = template
class IncludeTemplate(object):
def __enter__(_):
return self
def __exit__(_, *args):
self.current_template = self.include_stack.pop()[0]
return IncludeTemplate()
def write_line(self, line, line_number, indent=None):
if indent is None:
indent = self._indent
line_comment = ' # %s:%d' % (self.current_template.name, line_number)
if self.include_stack:
ancestors = ["%s:%d" % (tmpl.name, lineno)
for (tmpl, lineno) in self.include_stack]
line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
print(" " * indent + line + line_comment, file=self.file)
class _TemplateReader(object):
def __init__(self, name, text):
self.name = name
self.text = text
self.line = 1
self.pos = 0
def find(self, needle, start=0, end=None):
assert start >= 0, start
pos = self.pos
start += pos
if end is None:
index = self.text.find(needle, start)
else:
end += pos
assert end >= start
index = self.text.find(needle, start, end)
if index != -1:
index -= pos
return index
def consume(self, count=None):
if count is None:
count = len(self.text) - self.pos
newpos = self.pos + count
self.line += self.text.count("\n", self.pos, newpos)
s = self.text[self.pos:newpos]
self.pos = newpos
return s
def remaining(self):
return len(self.text) - self.pos
def __len__(self):
return self.remaining()
def __getitem__(self, key):
if type(key) is slice:
size = len(self)
start, stop, step = key.indices(size)
if start is None:
start = self.pos
else:
start += self.pos
if stop is not None:
stop += self.pos
return self.text[slice(start, stop, step)]
elif key < 0:
return self.text[key]
else:
return self.text[self.pos + key]
def __str__(self):
return self.text[self.pos:]
def _format_code(code):
lines = code.splitlines()
format = "%%%dd %%s\n" % len(repr(len(lines) + 1))
return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
def _parse(reader, template, in_block=None, in_loop=None):
body = _ChunkList([])
while True:
# Find next template directive
curly = 0
while True:
curly = reader.find("{", curly)
if curly == -1 or curly + 1 == reader.remaining():
# EOF
if in_block:
raise ParseError("Missing {%% end %%} block for %s" %
in_block)
body.chunks.append(_Text(reader.consume(), reader.line))
return body
# If the first curly brace is not the start of a special token,
# start searching from the character after it
if reader[curly + 1] not in ("{", "%", "#"):
curly += 1
continue
# When there are more than 2 curlies in a row, use the
# innermost ones. This is useful when generating languages
# like latex where curlies are also meaningful
if (curly + 2 < reader.remaining() and
reader[curly + 1] == '{' and reader[curly + 2] == '{'):
curly += 1
continue
break
# Append any text before the special token
if curly > 0:
cons = reader.consume(curly)
body.chunks.append(_Text(cons, reader.line))
start_brace = reader.consume(2)
line = reader.line
# Template directives may be escaped as "{{!" or "{%!".
# In this case output the braces and consume the "!".
# This is especially useful in conjunction with jquery templates,
# which also use double braces.
if reader.remaining() and reader[0] == "!":
reader.consume(1)
body.chunks.append(_Text(start_brace, line))
continue
# Comment
if start_brace == "{#":
end = reader.find("#}")
if end == -1:
raise ParseError("Missing end expression #} on line %d" % line)
contents = reader.consume(end).strip()
reader.consume(2)
continue
# Expression
if start_brace == "{{":
end = reader.find("}}")
if end == -1:
raise ParseError("Missing end expression }} on line %d" % line)
contents = reader.consume(end).strip()
reader.consume(2)
if not contents:
raise ParseError("Empty expression on line %d" % line)
body.chunks.append(_Expression(contents, line))
continue
# Block
assert start_brace == "{%", start_brace
end = reader.find("%}")
if end == -1:
raise ParseError("Missing end block %%} on line %d" % line)
contents = reader.consume(end).strip()
reader.consume(2)
if not contents:
raise ParseError("Empty block tag ({%% %%}) on line %d" % line)
operator, space, suffix = contents.partition(" ")
suffix = suffix.strip()
# Intermediate ("else", "elif", etc) blocks
intermediate_blocks = {
"else": set(["if", "for", "while", "try"]),
"elif": set(["if"]),
"except": set(["try"]),
"finally": set(["try"]),
}
allowed_parents = intermediate_blocks.get(operator)
if allowed_parents is not None:
if not in_block:
raise ParseError("%s outside %s block" %
(operator, allowed_parents))
if in_block not in allowed_parents:
raise ParseError("%s block cannot be attached to %s block" % (operator, in_block))
body.chunks.append(_IntermediateControlBlock(contents, line))
continue
# End tag
elif operator == "end":
if not in_block:
raise ParseError("Extra {%% end %%} block on line %d" % line)
return body
elif operator in ("extends", "include", "set", "import", "from",
"comment", "autoescape", "raw", "module"):
if operator == "comment":
continue
if operator == "extends":
suffix = suffix.strip('"').strip("'")
if not suffix:
raise ParseError("extends missing file path on line %d" % line)
block = _ExtendsBlock(suffix)
elif operator in ("import", "from"):
if not suffix:
raise ParseError("import missing statement on line %d" % line)
block = _Statement(contents, line)
elif operator == "include":
suffix = suffix.strip('"').strip("'")
if not suffix:
raise ParseError("include missing file path on line %d" % line)
block = _IncludeBlock(suffix, reader, line)
elif operator == "set":
if not suffix:
raise ParseError("set missing statement on line %d" % line)
block = _Statement(suffix, line)
elif operator == "autoescape":
fn = suffix.strip()
if fn == "None":
fn = None
template.autoescape = fn
continue
elif operator == "raw":
block = _Expression(suffix, line, raw=True)
elif operator == "module":
block = _Module(suffix, line)
body.chunks.append(block)
continue
elif operator in ("apply", "block", "try", "if", "for", "while"):
# parse inner body recursively
if operator in ("for", "while"):
block_body = _parse(reader, template, operator, operator)
elif operator == "apply":
# apply creates a nested function so syntactically it's not
# in the loop.
block_body = _parse(reader, template, operator, None)
else:
block_body = _parse(reader, template, operator, in_loop)
if operator == "apply":
if not suffix:
raise ParseError("apply missing method name on line %d" % line)
block = _ApplyBlock(suffix, line, block_body)
elif operator == "block":
if not suffix:
raise ParseError("block missing name on line %d" % line)
block = _NamedBlock(suffix, block_body, template, line)
else:
block = _ControlBlock(contents, line, block_body)
body.chunks.append(block)
continue
elif operator in ("break", "continue"):
if not in_loop:
raise ParseError("%s outside %s block" % (operator, set(["for", "while"])))
body.chunks.append(_Statement(contents, line))
continue
else:
raise ParseError("unknown operator: %r" % operator)
| apache-2.0 |
brandond/ansible | lib/ansible/module_utils/database.py | 50 | 5860 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class SQLParseError(Exception):
pass
class UnclosedQuoteError(SQLParseError):
pass
# maps a type of identifier to the maximum number of dot levels that are
# allowed to specify that identifier. For example, a database column can be
# specified by up to 4 levels: database.schema.table.column
_PG_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, schema=2, table=3, column=4, role=1)
_MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1)
def _find_end_quote(identifier, quote_char):
accumulate = 0
while True:
try:
quote = identifier.index(quote_char)
except ValueError:
raise UnclosedQuoteError
accumulate = accumulate + quote
try:
next_char = identifier[quote + 1]
except IndexError:
return accumulate
if next_char == quote_char:
try:
identifier = identifier[quote + 2:]
accumulate = accumulate + 2
except IndexError:
raise UnclosedQuoteError
else:
return accumulate
def _identifier_parse(identifier, quote_char):
if not identifier:
raise SQLParseError('Identifier name unspecified or unquoted trailing dot')
already_quoted = False
if identifier.startswith(quote_char):
already_quoted = True
try:
end_quote = _find_end_quote(identifier[1:], quote_char=quote_char) + 1
except UnclosedQuoteError:
already_quoted = False
else:
if end_quote < len(identifier) - 1:
if identifier[end_quote + 1] == '.':
dot = end_quote + 1
first_identifier = identifier[:dot]
next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
further_identifiers.insert(0, first_identifier)
else:
raise SQLParseError('User escaped identifiers must escape extra quotes')
else:
further_identifiers = [identifier]
if not already_quoted:
try:
dot = identifier.index('.')
except ValueError:
identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
if dot == 0 or dot >= len(identifier) - 1:
identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
first_identifier = identifier[:dot]
next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
first_identifier = first_identifier.replace(quote_char, quote_char * 2)
first_identifier = ''.join((quote_char, first_identifier, quote_char))
further_identifiers.insert(0, first_identifier)
return further_identifiers
def pg_quote_identifier(identifier, id_type):
identifier_fragments = _identifier_parse(identifier, quote_char='"')
if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]:
raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
return '.'.join(identifier_fragments)
def mysql_quote_identifier(identifier, id_type):
identifier_fragments = _identifier_parse(identifier, quote_char='`')
if len(identifier_fragments) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
raise SQLParseError('MySQL does not support %s with more than %i dots' % (id_type, _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]))
special_cased_fragments = []
for fragment in identifier_fragments:
if fragment == '`*`':
special_cased_fragments.append('*')
else:
special_cased_fragments.append(fragment)
return '.'.join(special_cased_fragments)
| gpl-3.0 |
jschneier/flask-admin | examples/geo_alchemy/app.py | 39 | 2044 | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import flask_admin as admin
from geoalchemy2.types import Geometry
from flask_admin.contrib.geoa import ModelView
# Create application
app = Flask(__name__)
app.config.from_pyfile('config.py')
db = SQLAlchemy(app)
class Point(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POINT"))
class MultiPoint(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOINT"))
class Polygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POLYGON"))
class MultiPolygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOLYGON"))
class LineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("LINESTRING"))
class MultiLineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTILINESTRING"))
# Flask views
@app.route('/')
def index():
return '<a href="/admin/">Click me to get to Admin!</a>'
# Create admin
admin = admin.Admin(app, name='Example: GeoAlchemy', template_mode='bootstrap3')
# Add views
admin.add_view(ModelView(Point, db.session, category='Points'))
admin.add_view(ModelView(MultiPoint, db.session, category='Points'))
admin.add_view(ModelView(Polygon, db.session, category='Polygons'))
admin.add_view(ModelView(MultiPolygon, db.session, category='Polygons'))
admin.add_view(ModelView(LineString, db.session, category='Lines'))
admin.add_view(ModelView(MultiLineString, db.session, category='Lines'))
if __name__ == '__main__':
db.create_all()
# Start app
app.run(debug=True)
| bsd-3-clause |
hyphaltip/GEN220_2015 | Examples/HW3_SNPCount_solution.py | 1 | 2177 |
# I ran this first
# bedtools intersect -wa -a data/Oryza_sativa.IRGSP-1.0.27.chromosome.6.gff3 -b data/rice_chr6_3kSNPs_filt.bed | grep "\tgene\t" | cut -f4,5,9 | sort | uniq -c > genes_with_snps.count
# I don't care about what the specific SNPs are so I use
# -wa option to write out the original gene feature
# I only want the gene features, so I use grep "\tgene\t" to get thise
# -- its possible this may not work on some unix - you can try
# grep -P "\tgene\t" to support extended pattern search
# I use cut -f4,5,9 to get the start,end, and group name for the gene
# I use sort and uniq -c to collapse redundant ones into a single line with
# a count
# print our report header
# some code to print the list with "\t" separating the input
print ("\t".join([str(x) for x in ("gene_name","length","SNP","SNPs_per_kb")]))
# also could write as
#print ("gene_name"+"\t"+"length"+"\t"+"SNP"+"\t"+"SNPs_per_kb");
counts = open("genes_with_snps.count","r")
gene_count_dict = {}
for geneline in counts:
row = geneline.split()
#most of the info we need is in the BEDtools output
snp_count = int(row[0]) # goint to use this as a number later
gene_start = int(row[1]) # going to use this as a number later
gene_end = int(row[2]) # going to use this as a number later
gene_group = row[3]
# compute the gene length from the start and end columns
# add one because feature start at 1 and are inclusive in GFF
# e.g. a feature going from 10..11 is length 2 so
# 11-10 = 1; need to add 1 to make it 2
gene_len = gene_end - gene_start + 1
# use these bits to cut out the gene name (regular expression would
# work too)
ids = gene_group.split(":")
genestuff = ids[1].split(";")
gene_name = genestuff[0]
# print it all out
SNP_ratio = (1000.0 * snp_count / gene_len) # mult by 1000 to get it in SNPs / kb
# some code to print the list with "\t" separating the input
print ("\t".join([str(x) for x in (gene_name, gene_len, snp_count,
SNP_ratio)]))
# one more option, if you wanted these data to be sorted by the SNP ratio
# see HW3_SNPCount_solution_sorted.py
| gpl-2.0 |
chuijiaolianying/flask | flask/templating.py | 141 | 4529 | # -*- coding: utf-8 -*-
"""
flask.templating
~~~~~~~~~~~~~~~~
Implements the bridge to Jinja2.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from jinja2 import BaseLoader, Environment as BaseEnvironment, \
TemplateNotFound
from .globals import _request_ctx_stack, _app_ctx_stack
from .signals import template_rendered, before_render_template
def _default_template_ctx_processor():
"""Default template context processor. Injects `request`,
`session` and `g`.
"""
reqctx = _request_ctx_stack.top
appctx = _app_ctx_stack.top
rv = {}
if appctx is not None:
rv['g'] = appctx.g
if reqctx is not None:
rv['request'] = reqctx.request
rv['session'] = reqctx.session
return rv
class Environment(BaseEnvironment):
"""Works like a regular Jinja2 environment but has some additional
knowledge of how Flask's blueprint works so that it can prepend the
name of the blueprint to referenced templates if necessary.
"""
def __init__(self, app, **options):
if 'loader' not in options:
options['loader'] = app.create_global_jinja_loader()
BaseEnvironment.__init__(self, **options)
self.app = app
class DispatchingJinjaLoader(BaseLoader):
"""A loader that looks for templates in the application and all
the blueprint folders.
"""
def __init__(self, app):
self.app = app
def get_source(self, environment, template):
explain = self.app.config['EXPLAIN_TEMPLATE_LOADING']
attempts = []
tmplrv = None
for srcobj, loader in self._iter_loaders(template):
try:
rv = loader.get_source(environment, template)
if tmplrv is None:
tmplrv = rv
if not explain:
break
except TemplateNotFound:
rv = None
attempts.append((loader, srcobj, rv))
if explain:
from .debughelpers import explain_template_loading_attempts
explain_template_loading_attempts(self.app, template, attempts)
if tmplrv is not None:
return tmplrv
raise TemplateNotFound(template)
def _iter_loaders(self, template):
loader = self.app.jinja_loader
if loader is not None:
yield self.app, loader
for blueprint in self.app.iter_blueprints():
loader = blueprint.jinja_loader
if loader is not None:
yield blueprint, loader
def list_templates(self):
result = set()
loader = self.app.jinja_loader
if loader is not None:
result.update(loader.list_templates())
for blueprint in self.app.iter_blueprints():
loader = blueprint.jinja_loader
if loader is not None:
for template in loader.list_templates():
result.add(template)
return list(result)
def _render(template, context, app):
"""Renders the template and fires the signal"""
before_render_template.send(app, template=template, context=context)
rv = template.render(context)
template_rendered.send(app, template=template, context=context)
return rv
def render_template(template_name_or_list, **context):
"""Renders a template from the template folder with the given
context.
:param template_name_or_list: the name of the template to be
rendered, or an iterable with template names
the first one existing will be rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list),
context, ctx.app)
def render_template_string(source, **context):
"""Renders a template from the given template source string
with the given context. Template variables will be autoescaped.
:param source: the source code of the template to be
rendered
:param context: the variables that should be available in the
context of the template.
"""
ctx = _app_ctx_stack.top
ctx.app.update_template_context(context)
return _render(ctx.app.jinja_env.from_string(source),
context, ctx.app)
| bsd-3-clause |
Zopieux/django-blog-zinnia | zinnia/settings.py | 2 | 3641 | """Settings of Zinnia"""
from django.conf import settings
from mots_vides import stop_words
PING_DIRECTORIES = getattr(settings, 'ZINNIA_PING_DIRECTORIES',
('http://django-blog-zinnia.com/xmlrpc/',))
SAVE_PING_DIRECTORIES = getattr(settings, 'ZINNIA_SAVE_PING_DIRECTORIES',
bool(PING_DIRECTORIES))
SAVE_PING_EXTERNAL_URLS = getattr(settings, 'ZINNIA_PING_EXTERNAL_URLS', True)
TRANSLATED_URLS = getattr(settings, 'ZINNIA_TRANSLATED_URLS', False)
COPYRIGHT = getattr(settings, 'ZINNIA_COPYRIGHT', 'Zinnia')
PAGINATION = getattr(settings, 'ZINNIA_PAGINATION', 10)
ALLOW_EMPTY = getattr(settings, 'ZINNIA_ALLOW_EMPTY', True)
ALLOW_FUTURE = getattr(settings, 'ZINNIA_ALLOW_FUTURE', True)
ENTRY_BASE_MODEL = getattr(settings, 'ZINNIA_ENTRY_BASE_MODEL',
'zinnia.models_bases.entry.AbstractEntry')
ENTRY_DETAIL_TEMPLATES = getattr(
settings, 'ZINNIA_ENTRY_DETAIL_TEMPLATES', [])
ENTRY_CONTENT_TEMPLATES = getattr(
settings, 'ZINNIA_ENTRY_CONTENT_TEMPLATES', [])
ENTRY_LOOP_TEMPLATES = getattr(
settings, 'ZINNIA_ENTRY_LOOP_TEMPLATES', {})
ENTRY_LOOP_TEMPLATES.setdefault('default', {})
MARKUP_LANGUAGE = getattr(settings, 'ZINNIA_MARKUP_LANGUAGE', 'html')
MARKDOWN_EXTENSIONS = getattr(settings, 'ZINNIA_MARKDOWN_EXTENSIONS', [])
RESTRUCTUREDTEXT_SETTINGS = getattr(
settings, 'ZINNIA_RESTRUCTUREDTEXT_SETTINGS', {})
PREVIEW_SPLITTERS = getattr(settings, 'ZINNIA_PREVIEW_SPLITTERS',
['<!-- more -->', '<!--more-->'])
PREVIEW_MAX_WORDS = getattr(settings, 'ZINNIA_PREVIEW_MAX_WORDS', 55)
PREVIEW_MORE_STRING = getattr(settings, 'ZINNIA_PREVIEW_MORE_STRING', ' ...')
AUTO_CLOSE_PINGBACKS_AFTER = getattr(
settings, 'ZINNIA_AUTO_CLOSE_PINGBACKS_AFTER', None)
AUTO_CLOSE_TRACKBACKS_AFTER = getattr(
settings, 'ZINNIA_AUTO_CLOSE_TRACKBACKS_AFTER', None)
AUTO_CLOSE_COMMENTS_AFTER = getattr(
settings, 'ZINNIA_AUTO_CLOSE_COMMENTS_AFTER', None)
AUTO_MODERATE_COMMENTS = getattr(settings, 'ZINNIA_AUTO_MODERATE_COMMENTS',
False)
MAIL_COMMENT_REPLY = getattr(settings, 'ZINNIA_MAIL_COMMENT_REPLY', False)
MAIL_COMMENT_AUTHORS = getattr(settings, 'ZINNIA_MAIL_COMMENT_AUTHORS', True)
MAIL_COMMENT_NOTIFICATION_RECIPIENTS = getattr(
settings, 'ZINNIA_MAIL_COMMENT_NOTIFICATION_RECIPIENTS',
[manager_tuple[1] for manager_tuple in settings.MANAGERS])
COMMENT_MIN_WORDS = getattr(settings, 'ZINNIA_COMMENT_MIN_WORDS', 4)
COMMENT_FLAG_USER_ID = getattr(settings, 'ZINNIA_COMMENT_FLAG_USER_ID', 1)
UPLOAD_TO = getattr(settings, 'ZINNIA_UPLOAD_TO', 'uploads/zinnia')
PROTOCOL = getattr(settings, 'ZINNIA_PROTOCOL', 'http')
FEEDS_FORMAT = getattr(settings, 'ZINNIA_FEEDS_FORMAT', 'rss')
FEEDS_MAX_ITEMS = getattr(settings, 'ZINNIA_FEEDS_MAX_ITEMS', 15)
PINGBACK_CONTENT_LENGTH = getattr(settings,
'ZINNIA_PINGBACK_CONTENT_LENGTH', 300)
SEARCH_FIELDS = getattr(settings, 'ZINNIA_SEARCH_FIELDS',
['title', 'lead', 'content',
'excerpt', 'image_caption', 'tags'])
COMPARISON_FIELDS = getattr(settings, 'ZINNIA_COMPARISON_FIELDS',
['title', 'lead', 'content',
'excerpt', 'image_caption', 'tags'])
SPAM_CHECKER_BACKENDS = getattr(settings, 'ZINNIA_SPAM_CHECKER_BACKENDS',
[])
URL_SHORTENER_BACKEND = getattr(settings, 'ZINNIA_URL_SHORTENER_BACKEND',
'zinnia.url_shortener.backends.default')
STOP_WORDS = stop_words(settings.LANGUAGE_CODE.split('-')[0])
| bsd-3-clause |
stackforge/cloudbase-init | cloudbaseinit/conf/default.py | 4 | 15944 | # Copyright 2016 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Config options available all across the project."""
from oslo_config import cfg
from cloudbaseinit.conf import base as conf_base
from cloudbaseinit import constant
class GlobalOptions(conf_base.Options):
"""Config options available all across the project."""
def __init__(self, config):
super(GlobalOptions, self).__init__(config, group="DEFAULT")
self._options = [
cfg.BoolOpt(
'allow_reboot', default=True,
help='Allows OS reboots requested by plugins'),
cfg.BoolOpt(
'stop_service_on_exit', default=True,
help='In case of execution as a service, specifies if the '
'service must be gracefully stopped before exiting'),
cfg.BoolOpt(
'check_latest_version', default=False,
help='Check if there is a newer version of cloudbase-init '
'available. If this option is activated, a log '
'message will be emitted if there is a newer version '
'available.'),
cfg.IntOpt(
'retry_count', default=5,
help='Max. number of attempts for fetching metadata in '
'case of transient errors'),
cfg.FloatOpt(
'retry_count_interval', default=4,
help='Interval between attempts in case of transient errors, '
'expressed in seconds'),
cfg.StrOpt(
'mtools_path', default=None,
help='Path to "mtools" program suite, used for interacting '
'with VFAT filesystems'),
cfg.StrOpt(
'bsdtar_path', default='bsdtar.exe',
help='Path to "bsdtar", used to extract ISO ConfigDrive '
'files'),
cfg.BoolOpt(
'netbios_host_name_compatibility', default=True,
help='Truncates the hostname to 15 characters for Netbios '
'compatibility'),
cfg.StrOpt(
'logging_serial_port_settings', default=None,
help='Serial port logging settings. Format: '
'"port,baudrate,parity,bytesize", e.g.: '
'"COM1,115200,N,8". Set to None (default) to disable.'),
cfg.BoolOpt(
'activate_windows', default=False,
help='Activates Windows automatically'),
cfg.BoolOpt(
'set_kms_product_key', default=False,
help='Sets the KMS product key for this operating system'),
cfg.BoolOpt(
'set_avma_product_key', default=False,
help='Sets the AVMA product key for this operating system'),
cfg.StrOpt(
'kms_host', default=None,
help='The KMS host address in form <host>[:<port>], '
'e.g: "kmshost:1688"'),
cfg.BoolOpt(
'log_licensing_info', default=True,
help='Logs the operating system licensing information'),
cfg.BoolOpt(
'winrm_enable_basic_auth', default=True,
help='Enables basic authentication for the WinRM '
'HTTPS listener'),
cfg.BoolOpt(
'winrm_configure_http_listener', default=False,
help='Configures the WinRM HTTP listener'),
cfg.BoolOpt(
'winrm_configure_https_listener', default=True,
help='Configures the WinRM HTTPS listener'),
cfg.ListOpt(
'volumes_to_extend', default=None,
help='List of volumes that need to be extended '
'if contiguous space is available on the disk. '
'By default all the available volumes can be extended. '
'Volumes must be specified using a comma separated list '
'of volume indexes, e.g.: "1,2"'),
cfg.StrOpt(
'san_policy', default=None,
choices=[constant.SAN_POLICY_ONLINE_STR,
constant.SAN_POLICY_OFFLINE_STR,
constant.SAN_POLICY_OFFLINE_SHARED_STR],
help='If not None, the SAN policy is set to the given value'),
cfg.StrOpt(
'local_scripts_path', default=None,
help='Path location containing scripts to be executed when '
'the plugin runs'),
cfg.BoolOpt(
'mtu_use_dhcp_config', default=True,
help='Configures the network interfaces MTU based on the '
'values provided via DHCP'),
cfg.StrOpt(
'username', default='Admin', help='User to be added to the '
'system or updated if already existing'),
cfg.ListOpt(
'groups', default=['Administrators'],
help='List of local groups to which the user specified in '
'"username" will be added'),
cfg.BoolOpt(
'rename_admin_user', default=False,
help='Renames the builtin admin user instead of creating a '
'new user'),
cfg.StrOpt(
'heat_config_dir', default='C:\\cfn',
help='The directory where the Heat configuration files must '
'be saved'),
cfg.BoolOpt(
'ntp_enable_service', default=True,
help='Enables the NTP client service'),
cfg.BoolOpt(
'ntp_use_dhcp_config', default=False,
help='Configures NTP client time synchronization using '
'the NTP servers provided via DHCP'),
cfg.BoolOpt(
'real_time_clock_utc', default=False,
help='Sets the real time clock to use universal time (True) '
'or local time (False)'),
cfg.BoolOpt(
'inject_user_password', default=True,
help='Set the password provided in the configuration. '
'If False or no password is provided, a random one '
'will be set'),
cfg.StrOpt(
'first_logon_behaviour',
default=constant.CLEAR_TEXT_INJECTED_ONLY,
choices=constant.LOGON_PASSWORD_CHANGE_OPTIONS,
help='Control the behaviour of what happens at '
'next logon. If this option is set to `always`, '
'then the user will be forced to change the password '
'at next logon. If it is set to '
'`clear_text_injected_only`, '
'then the user will have to change the password only if '
'the password is a clear text password, coming from the '
'metadata. The last option is `no`, when the user is '
'never forced to change the password.'),
cfg.ListOpt(
'metadata_services',
default=[
'cloudbaseinit.metadata.services.httpservice.HttpService',
'cloudbaseinit.metadata.services'
'.configdrive.ConfigDriveService',
'cloudbaseinit.metadata.services.ec2service.EC2Service',
'cloudbaseinit.metadata.services'
'.maasservice.MaaSHttpService',
'cloudbaseinit.metadata.services.cloudstack.CloudStack',
'cloudbaseinit.metadata.services'
'.opennebulaservice.OpenNebulaService',
],
help='List of enabled metadata service classes, '
'to be tested for availability in the provided order. '
'The first available service will be used to retrieve '
'metadata'),
cfg.ListOpt(
'plugins',
default=[
'cloudbaseinit.plugins.common.mtu.MTUPlugin',
'cloudbaseinit.plugins.windows.ntpclient'
'.NTPClientPlugin',
'cloudbaseinit.plugins.common.sethostname'
'.SetHostNamePlugin',
'cloudbaseinit.plugins.windows.createuser'
'.CreateUserPlugin',
'cloudbaseinit.plugins.common.networkconfig'
'.NetworkConfigPlugin',
'cloudbaseinit.plugins.windows.licensing'
'.WindowsLicensingPlugin',
'cloudbaseinit.plugins.common.sshpublickeys'
'.SetUserSSHPublicKeysPlugin',
'cloudbaseinit.plugins.windows.extendvolumes'
'.ExtendVolumesPlugin',
'cloudbaseinit.plugins.common.userdata.UserDataPlugin',
'cloudbaseinit.plugins.common.setuserpassword.'
'SetUserPasswordPlugin',
'cloudbaseinit.plugins.windows.winrmlistener.'
'ConfigWinRMListenerPlugin',
'cloudbaseinit.plugins.windows.winrmcertificateauth.'
'ConfigWinRMCertificateAuthPlugin',
'cloudbaseinit.plugins.common.localscripts'
'.LocalScriptsPlugin',
],
help='List of enabled plugin classes, '
'to be executed in the provided order'),
cfg.ListOpt(
'user_data_plugins',
default=[
'cloudbaseinit.plugins.common.userdataplugins.parthandler.'
'PartHandlerPlugin',
'cloudbaseinit.plugins.common.userdataplugins.cloudconfig.'
'CloudConfigPlugin',
'cloudbaseinit.plugins.common.userdataplugins'
'.cloudboothook.CloudBootHookPlugin',
'cloudbaseinit.plugins.common.userdataplugins.shellscript.'
'ShellScriptPlugin',
'cloudbaseinit.plugins.common.userdataplugins'
'.multipartmixed.MultipartMixedPlugin',
'cloudbaseinit.plugins.common.userdataplugins.heat.'
'HeatPlugin',
],
help='List of enabled userdata content plugins'),
cfg.ListOpt(
'cloud_config_plugins', default=[],
help='List which contains the name of the cloud config '
'plugins ordered by priority.'),
cfg.BoolOpt(
'rdp_set_keepalive', default=True,
help='Sets the RDP KeepAlive policy'),
cfg.StrOpt(
'bcd_boot_status_policy',
default=None,
choices=[constant.POLICY_IGNORE_ALL_FAILURES],
help='Sets the Windows BCD boot status policy'),
cfg.BoolOpt(
'bcd_enable_auto_recovery', default=False,
help='Enables or disables the BCD auto recovery'),
cfg.BoolOpt(
'set_unique_boot_disk_id', default=True,
help='Sets a new random unique id on the boot disk to avoid '
'collisions'),
cfg.IntOpt(
'display_idle_timeout', default=0,
help='The idle timeout, in seconds, before powering off '
'the display. Set 0 to leave the display always on'),
cfg.ListOpt(
'page_file_volume_labels', default=[],
help='Labels of volumes on which a Windows page file needs to '
'be created. E.g.: "Temporary Storage"'),
cfg.ListOpt(
'page_file_volume_mount_points', default=[],
help='Volume mount points on which a Windows page file needs '
'to be created. E.g.: '
'"\\\\?\\GLOBALROOT\\device\\Harddisk1\\Partition1\\"'),
cfg.BoolOpt(
'trim_enabled', default=False,
help='Enables or disables TRIM delete notifications for '
'the underlying storage device.'),
cfg.BoolOpt(
'process_userdata', default=True,
help='Processes the userdata content based on the type, e.g. '
'executing a PowerShell script'),
cfg.StrOpt(
'userdata_save_path',
default=None,
help='Copies the userdata to the given file path. The path '
'can include environment variables that will be expanded,'
' e.g. "%%SYSTEMDRIVE%%\\CloudbaseInit\\UserData.bin"'),
cfg.BoolOpt(
'enable_automatic_updates', default=None,
help='If set, enables or disables automatic operating '
'system updates.'),
cfg.BoolOpt(
'metadata_report_provisioning_started', default=False,
help='Reports to the metadata service that provisioning has '
'started'),
cfg.BoolOpt(
'metadata_report_provisioning_completed', default=False,
help='Reports to the metadata service that provisioning '
'completed successfully or failed'),
cfg.StrOpt(
'ephemeral_disk_volume_label', default=None,
help='Ephemeral disk volume label, e.g.: "Temporary Storage"'),
cfg.StrOpt(
'ephemeral_disk_volume_mount_point', default=None,
help='Ephemeral disk volume mount point, e.g.:'
'"\\\\?\\GLOBALROOT\\device\\Harddisk1\\Partition1\\"'),
cfg.StrOpt(
'ephemeral_disk_data_loss_warning_path', default=None,
help='Ephemeral disk data loss warning path, relative to the '
'ephemeral disk volume path. E.g.: '
'DATALOSS_WARNING_README.txt'),
cfg.IntOpt(
'user_password_length', default=20,
help='The length of the generated password for the user '
'defined by the `username` config option.'),
]
self._cli_options = [
cfg.BoolOpt(
'reset_service_password', default=True,
help='If set to True, the service user password will be '
'reset at each execution with a new random value of '
'appropriate length and complexity, unless the user is '
'a built-in or domain account.'
'This is needed to avoid "pass the hash" attacks on '
'Windows cloned instances.'),
]
def register(self):
"""Register the current options to the global ConfigOpts object."""
self._config.register_cli_opts(self._cli_options)
self._config.register_opts(self._options + self._cli_options)
def list(self):
"""Return a list which contains all the available options."""
return self._options
| apache-2.0 |
projectcalico/calico-nova | nova/tests/unit/scheduler/filters/test_compute_filters.py | 68 | 2286 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.scheduler.filters import compute_filter
from nova import test
from nova.tests.unit.scheduler import fakes
@mock.patch('nova.servicegroup.API.service_is_up')
class TestComputeFilter(test.NoDBTestCase):
def test_compute_filter_manual_disable(self, service_up_mock):
filt_cls = compute_filter.ComputeFilter()
filter_properties = {'instance_type': {'memory_mb': 1024}}
service = {'disabled': True}
host = fakes.FakeHostState('host1', 'node1',
{'free_ram_mb': 1024, 'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
self.assertFalse(service_up_mock.called)
def test_compute_filter_sgapi_passes(self, service_up_mock):
filt_cls = compute_filter.ComputeFilter()
filter_properties = {'instance_type': {'memory_mb': 1024}}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'node1',
{'free_ram_mb': 1024, 'service': service})
service_up_mock.return_value = True
self.assertTrue(filt_cls.host_passes(host, filter_properties))
service_up_mock.assert_called_once_with(service)
def test_compute_filter_sgapi_fails(self, service_up_mock):
filt_cls = compute_filter.ComputeFilter()
filter_properties = {'instance_type': {'memory_mb': 1024}}
service = {'disabled': False, 'updated_at': 'now'}
host = fakes.FakeHostState('host1', 'node1',
{'free_ram_mb': 1024, 'service': service})
service_up_mock.return_value = False
self.assertFalse(filt_cls.host_passes(host, filter_properties))
service_up_mock.assert_called_once_with(service)
| apache-2.0 |
martinpitt/systemd | test/networkd-test.py | 1 | 41354 | #!/usr/bin/env python3
# SPDX-License-Identifier: LGPL-2.1+
#
# networkd integration test
# This uses temporary configuration in /run and temporary veth devices, and
# does not write anything on disk or change any system configuration;
# but it assumes (and checks at the beginning) that networkd is not currently
# running.
#
# This can be run on a normal installation, in QEMU, nspawn (with
# --private-network), LXD (with "--config raw.lxc=lxc.aa_profile=unconfined"),
# or LXC system containers. You need at least the "ip" tool from the iproute
# package; it is recommended to install dnsmasq too to get full test coverage.
#
# ATTENTION: This uses the *installed* networkd, not the one from the built
# source tree.
#
# © 2015 Canonical Ltd.
# Author: Martin Pitt <martin.pitt@ubuntu.com>
import errno
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import time
import unittest
HAVE_DNSMASQ = shutil.which('dnsmasq') is not None
NETWORK_UNITDIR = '/run/systemd/network'
NETWORKD_WAIT_ONLINE = shutil.which('systemd-networkd-wait-online',
path='/usr/lib/systemd:/lib/systemd')
RESOLV_CONF = '/run/systemd/resolve/resolv.conf'
def setUpModule():
"""Initialize the environment, and perform sanity checks on it."""
if NETWORKD_WAIT_ONLINE is None:
raise OSError(errno.ENOENT, 'systemd-networkd-wait-online not found')
# Do not run any tests if the system is using networkd already.
if subprocess.call(['systemctl', 'is-active', '--quiet',
'systemd-networkd.service']) == 0:
raise unittest.SkipTest('networkd is already active')
# Avoid "Failed to open /dev/tty" errors in containers.
os.environ['SYSTEMD_LOG_TARGET'] = 'journal'
# Ensure the unit directory exists so tests can dump files into it.
os.makedirs(NETWORK_UNITDIR, exist_ok=True)
class NetworkdTestingUtilities:
"""Provide a set of utility functions to facilitate networkd tests.
This class must be inherited along with unittest.TestCase to define
some required methods.
"""
def add_veth_pair(self, veth, peer, veth_options=(), peer_options=()):
"""Add a veth interface pair, and queue them to be removed."""
subprocess.check_call(['ip', 'link', 'add', 'name', veth] +
list(veth_options) +
['type', 'veth', 'peer', 'name', peer] +
list(peer_options))
self.addCleanup(subprocess.call, ['ip', 'link', 'del', 'dev', peer])
def write_network(self, unit_name, contents):
"""Write a network unit file, and queue it to be removed."""
unit_path = os.path.join(NETWORK_UNITDIR, unit_name)
with open(unit_path, 'w') as unit:
unit.write(contents)
self.addCleanup(os.remove, unit_path)
def write_network_dropin(self, unit_name, dropin_name, contents):
"""Write a network unit drop-in, and queue it to be removed."""
dropin_dir = os.path.join(NETWORK_UNITDIR, "{}.d".format(unit_name))
dropin_path = os.path.join(dropin_dir, "{}.conf".format(dropin_name))
os.makedirs(dropin_dir, exist_ok=True)
self.addCleanup(os.rmdir, dropin_dir)
with open(dropin_path, 'w') as dropin:
dropin.write(contents)
self.addCleanup(os.remove, dropin_path)
def read_attr(self, link, attribute):
"""Read a link attributed from the sysfs."""
# Note we we don't want to check if interface `link' is managed, we
# want to evaluate link variable and pass the value of the link to
# assert_link_states e.g. eth0=managed.
self.assert_link_states(**{link:'managed'})
with open(os.path.join('/sys/class/net', link, attribute)) as f:
return f.readline().strip()
def assert_link_states(self, **kwargs):
"""Match networkctl link states to the given ones.
Each keyword argument should be the name of a network interface
with its expected value of the "SETUP" column in output from
networkctl. The interfaces have five seconds to come online
before the check is performed. Every specified interface must
be present in the output, and any other interfaces found in the
output are ignored.
A special interface state "managed" is supported, which matches
any value in the "SETUP" column other than "unmanaged".
"""
if not kwargs:
return
interfaces = set(kwargs)
# Wait for the requested interfaces, but don't fail for them.
subprocess.call([NETWORKD_WAIT_ONLINE, '--timeout=5'] +
['--interface={}'.format(iface) for iface in kwargs])
# Validate each link state found in the networkctl output.
out = subprocess.check_output(['networkctl', '--no-legend']).rstrip()
for line in out.decode('utf-8').split('\n'):
fields = line.split()
if len(fields) >= 5 and fields[1] in kwargs:
iface = fields[1]
expected = kwargs[iface]
actual = fields[-1]
if (actual != expected and
not (expected == 'managed' and actual != 'unmanaged')):
self.fail("Link {} expects state {}, found {}".format(iface, expected, actual))
interfaces.remove(iface)
# Ensure that all requested interfaces have been covered.
if interfaces:
self.fail("Missing links in status output: {}".format(interfaces))
class BridgeTest(NetworkdTestingUtilities, unittest.TestCase):
"""Provide common methods for testing networkd against servers."""
def setUp(self):
self.write_network('port1.netdev', '''\
[NetDev]
Name=port1
Kind=dummy
MACAddress=12:34:56:78:9a:bc''')
self.write_network('port2.netdev', '''\
[NetDev]
Name=port2
Kind=dummy
MACAddress=12:34:56:78:9a:bd''')
self.write_network('mybridge.netdev', '''\
[NetDev]
Name=mybridge
Kind=bridge''')
self.write_network('port1.network', '''\
[Match]
Name=port1
[Network]
Bridge=mybridge''')
self.write_network('port2.network', '''\
[Match]
Name=port2
[Network]
Bridge=mybridge''')
self.write_network('mybridge.network', '''\
[Match]
Name=mybridge
[Network]
DNS=192.168.250.1
Address=192.168.250.33/24
Gateway=192.168.250.1''')
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
def tearDown(self):
subprocess.check_call(['systemctl', 'stop', 'systemd-networkd'])
subprocess.check_call(['ip', 'link', 'del', 'mybridge'])
subprocess.check_call(['ip', 'link', 'del', 'port1'])
subprocess.check_call(['ip', 'link', 'del', 'port2'])
def test_bridge_init(self):
self.assert_link_states(
port1='managed',
port2='managed',
mybridge='managed')
def test_bridge_port_priority(self):
self.assertEqual(self.read_attr('port1', 'brport/priority'), '32')
self.write_network_dropin('port1.network', 'priority', '''\
[Bridge]
Priority=28
''')
subprocess.check_call(['systemctl', 'restart', 'systemd-networkd'])
self.assertEqual(self.read_attr('port1', 'brport/priority'), '28')
def test_bridge_port_priority_set_zero(self):
"""It should be possible to set the bridge port priority to 0"""
self.assertEqual(self.read_attr('port2', 'brport/priority'), '32')
self.write_network_dropin('port2.network', 'priority', '''\
[Bridge]
Priority=0
''')
subprocess.check_call(['systemctl', 'restart', 'systemd-networkd'])
self.assertEqual(self.read_attr('port2', 'brport/priority'), '0')
def test_bridge_port_property(self):
"""Test the "[Bridge]" section keys"""
self.assertEqual(self.read_attr('port2', 'brport/priority'), '32')
self.write_network_dropin('port2.network', 'property', '''\
[Bridge]
UnicastFlood=true
HairPin=true
UseBPDU=true
FastLeave=true
AllowPortToBeRoot=true
Cost=555
Priority=23
''')
subprocess.check_call(['systemctl', 'restart', 'systemd-networkd'])
self.assertEqual(self.read_attr('port2', 'brport/priority'), '23')
self.assertEqual(self.read_attr('port2', 'brport/hairpin_mode'), '1')
self.assertEqual(self.read_attr('port2', 'brport/path_cost'), '555')
self.assertEqual(self.read_attr('port2', 'brport/multicast_fast_leave'), '1')
self.assertEqual(self.read_attr('port2', 'brport/unicast_flood'), '1')
self.assertEqual(self.read_attr('port2', 'brport/bpdu_guard'), '1')
self.assertEqual(self.read_attr('port2', 'brport/root_block'), '1')
class ClientTestBase(NetworkdTestingUtilities):
"""Provide common methods for testing networkd against servers."""
@classmethod
def setUpClass(klass):
klass.orig_log_level = subprocess.check_output(
['systemctl', 'show', '--value', '--property', 'LogLevel'],
universal_newlines=True).strip()
subprocess.check_call(['systemd-analyze', 'set-log-level', 'debug'])
@classmethod
def tearDownClass(klass):
subprocess.check_call(['systemd-analyze', 'set-log-level', klass.orig_log_level])
def setUp(self):
self.iface = 'test_eth42'
self.if_router = 'router_eth42'
self.workdir_obj = tempfile.TemporaryDirectory()
self.workdir = self.workdir_obj.name
self.config = 'test_eth42.network'
# get current journal cursor
subprocess.check_output(['journalctl', '--sync'])
out = subprocess.check_output(['journalctl', '-b', '--quiet',
'--no-pager', '-n0', '--show-cursor'],
universal_newlines=True)
self.assertTrue(out.startswith('-- cursor:'))
self.journal_cursor = out.split()[-1]
def tearDown(self):
self.shutdown_iface()
subprocess.call(['systemctl', 'stop', 'systemd-networkd'])
subprocess.call(['ip', 'link', 'del', 'dummy0'],
stderr=subprocess.DEVNULL)
def show_journal(self, unit):
'''Show journal of given unit since start of the test'''
print('---- {} ----'.format(unit))
subprocess.check_output(['journalctl', '--sync'])
sys.stdout.flush()
subprocess.call(['journalctl', '-b', '--no-pager', '--quiet',
'--cursor', self.journal_cursor, '-u', unit])
def create_iface(self, ipv6=False):
'''Create test interface with DHCP server behind it'''
raise NotImplementedError('must be implemented by a subclass')
def shutdown_iface(self):
'''Remove test interface and stop DHCP server'''
raise NotImplementedError('must be implemented by a subclass')
def print_server_log(self):
'''Print DHCP server log for debugging failures'''
raise NotImplementedError('must be implemented by a subclass')
def do_test(self, coldplug=True, ipv6=False, extra_opts='',
online_timeout=10, dhcp_mode='yes'):
try:
subprocess.check_call(['systemctl', 'start', 'systemd-resolved'])
except subprocess.CalledProcessError:
self.show_journal('systemd-resolved.service')
raise
self.write_network(self.config, '''\
[Match]
Name={}
[Network]
DHCP={}
{}'''.format(self.iface, dhcp_mode, extra_opts))
if coldplug:
# create interface first, then start networkd
self.create_iface(ipv6=ipv6)
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
elif coldplug is not None:
# start networkd first, then create interface
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
self.create_iface(ipv6=ipv6)
else:
# "None" means test sets up interface by itself
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
try:
subprocess.check_call([NETWORKD_WAIT_ONLINE, '--interface',
self.iface, '--timeout=%i' % online_timeout])
if ipv6:
# check iface state and IP 6 address; FIXME: we need to wait a bit
# longer, as the iface is "configured" already with IPv4 *or*
# IPv6, but we want to wait for both
for _ in range(10):
out = subprocess.check_output(['ip', 'a', 'show', 'dev', self.iface])
if b'state UP' in out and b'inet6 2600' in out and b'inet 192.168' in out:
break
time.sleep(1)
else:
self.fail('timed out waiting for IPv6 configuration')
self.assertRegex(out, b'inet6 2600::.* scope global .*dynamic')
self.assertRegex(out, b'inet6 fe80::.* scope link')
else:
# should have link-local address on IPv6 only
out = subprocess.check_output(['ip', '-6', 'a', 'show', 'dev', self.iface])
self.assertRegex(out, br'inet6 fe80::.* scope link')
self.assertNotIn(b'scope global', out)
# should have IPv4 address
out = subprocess.check_output(['ip', '-4', 'a', 'show', 'dev', self.iface])
self.assertIn(b'state UP', out)
self.assertRegex(out, br'inet 192.168.5.\d+/.* scope global dynamic')
# check networkctl state
out = subprocess.check_output(['networkctl'])
self.assertRegex(out, (r'{}\s+ether\s+[a-z-]+\s+unmanaged'.format(self.if_router)).encode())
self.assertRegex(out, (r'{}\s+ether\s+routable\s+configured'.format(self.iface)).encode())
out = subprocess.check_output(['networkctl', 'status', self.iface])
self.assertRegex(out, br'Type:\s+ether')
self.assertRegex(out, br'State:\s+routable.*configured')
self.assertRegex(out, br'Address:\s+192.168.5.\d+')
if ipv6:
self.assertRegex(out, br'2600::')
else:
self.assertNotIn(br'2600::', out)
self.assertRegex(out, br'fe80::')
self.assertRegex(out, br'Gateway:\s+192.168.5.1')
self.assertRegex(out, br'DNS:\s+192.168.5.1')
except (AssertionError, subprocess.CalledProcessError):
# show networkd status, journal, and DHCP server log on failure
with open(os.path.join(NETWORK_UNITDIR, self.config)) as f:
print('\n---- {} ----\n{}'.format(self.config, f.read()))
print('---- interface status ----')
sys.stdout.flush()
subprocess.call(['ip', 'a', 'show', 'dev', self.iface])
print('---- networkctl status {} ----'.format(self.iface))
sys.stdout.flush()
subprocess.call(['networkctl', 'status', self.iface])
self.show_journal('systemd-networkd.service')
self.print_server_log()
raise
for timeout in range(50):
with open(RESOLV_CONF) as f:
contents = f.read()
if 'nameserver 192.168.5.1\n' in contents:
break
time.sleep(0.1)
else:
self.fail('nameserver 192.168.5.1 not found in ' + RESOLV_CONF)
if coldplug is False:
# check post-down.d hook
self.shutdown_iface()
def test_coldplug_dhcp_yes_ip4(self):
# we have a 12s timeout on RA, so we need to wait longer
self.do_test(coldplug=True, ipv6=False, online_timeout=15)
def test_coldplug_dhcp_yes_ip4_no_ra(self):
# with disabling RA explicitly things should be fast
self.do_test(coldplug=True, ipv6=False,
extra_opts='IPv6AcceptRA=False')
def test_coldplug_dhcp_ip4_only(self):
# we have a 12s timeout on RA, so we need to wait longer
self.do_test(coldplug=True, ipv6=False, dhcp_mode='ipv4',
online_timeout=15)
def test_coldplug_dhcp_ip4_only_no_ra(self):
# with disabling RA explicitly things should be fast
self.do_test(coldplug=True, ipv6=False, dhcp_mode='ipv4',
extra_opts='IPv6AcceptRA=False')
def test_coldplug_dhcp_ip6(self):
self.do_test(coldplug=True, ipv6=True)
def test_hotplug_dhcp_ip4(self):
# With IPv4 only we have a 12s timeout on RA, so we need to wait longer
self.do_test(coldplug=False, ipv6=False, online_timeout=15)
def test_hotplug_dhcp_ip6(self):
self.do_test(coldplug=False, ipv6=True)
def test_route_only_dns(self):
self.write_network('myvpn.netdev', '''\
[NetDev]
Name=dummy0
Kind=dummy
MACAddress=12:34:56:78:9a:bc''')
self.write_network('myvpn.network', '''\
[Match]
Name=dummy0
[Network]
Address=192.168.42.100
DNS=192.168.42.1
Domains= ~company''')
self.do_test(coldplug=True, ipv6=False,
extra_opts='IPv6AcceptRouterAdvertisements=False')
with open(RESOLV_CONF) as f:
contents = f.read()
# ~company is not a search domain, only a routing domain
self.assertNotRegex(contents, 'search.*company')
# our global server should appear
self.assertIn('nameserver 192.168.5.1\n', contents)
# should not have domain-restricted server as global server
self.assertNotIn('nameserver 192.168.42.1\n', contents)
def test_route_only_dns_all_domains(self):
self.write_network('myvpn.netdev', '''[NetDev]
Name=dummy0
Kind=dummy
MACAddress=12:34:56:78:9a:bc''')
self.write_network('myvpn.network', '''[Match]
Name=dummy0
[Network]
Address=192.168.42.100
DNS=192.168.42.1
Domains= ~company ~.''')
self.do_test(coldplug=True, ipv6=False,
extra_opts='IPv6AcceptRouterAdvertisements=False')
with open(RESOLV_CONF) as f:
contents = f.read()
# ~company is not a search domain, only a routing domain
self.assertNotRegex(contents, 'search.*company')
# our global server should appear
self.assertIn('nameserver 192.168.5.1\n', contents)
# should have company server as global server due to ~.
self.assertIn('nameserver 192.168.42.1\n', contents)
@unittest.skipUnless(HAVE_DNSMASQ, 'dnsmasq not installed')
class DnsmasqClientTest(ClientTestBase, unittest.TestCase):
'''Test networkd client against dnsmasq'''
def setUp(self):
super().setUp()
self.dnsmasq = None
self.iface_mac = 'de:ad:be:ef:47:11'
def create_iface(self, ipv6=False, dnsmasq_opts=None):
'''Create test interface with DHCP server behind it'''
# add veth pair
subprocess.check_call(['ip', 'link', 'add', 'name', self.iface,
'address', self.iface_mac,
'type', 'veth', 'peer', 'name', self.if_router])
# give our router an IP
subprocess.check_call(['ip', 'a', 'flush', 'dev', self.if_router])
subprocess.check_call(['ip', 'a', 'add', '192.168.5.1/24', 'dev', self.if_router])
if ipv6:
subprocess.check_call(['ip', 'a', 'add', '2600::1/64', 'dev', self.if_router])
subprocess.check_call(['ip', 'link', 'set', self.if_router, 'up'])
# add DHCP server
self.dnsmasq_log = os.path.join(self.workdir, 'dnsmasq.log')
lease_file = os.path.join(self.workdir, 'dnsmasq.leases')
if ipv6:
extra_opts = ['--enable-ra', '--dhcp-range=2600::10,2600::20']
else:
extra_opts = []
if dnsmasq_opts:
extra_opts += dnsmasq_opts
self.dnsmasq = subprocess.Popen(
['dnsmasq', '--keep-in-foreground', '--log-queries',
'--log-facility=' + self.dnsmasq_log, '--conf-file=/dev/null',
'--dhcp-leasefile=' + lease_file, '--bind-interfaces',
'--interface=' + self.if_router, '--except-interface=lo',
'--dhcp-range=192.168.5.10,192.168.5.200'] + extra_opts)
def shutdown_iface(self):
'''Remove test interface and stop DHCP server'''
if self.if_router:
subprocess.check_call(['ip', 'link', 'del', 'dev', self.if_router])
self.if_router = None
if self.dnsmasq:
self.dnsmasq.kill()
self.dnsmasq.wait()
self.dnsmasq = None
def print_server_log(self):
'''Print DHCP server log for debugging failures'''
with open(self.dnsmasq_log) as f:
sys.stdout.write('\n\n---- dnsmasq log ----\n{}\n------\n\n'.format(f.read()))
def test_resolved_domain_restricted_dns(self):
'''resolved: domain-restricted DNS servers'''
# create interface for generic connections; this will map all DNS names
# to 192.168.42.1
self.create_iface(dnsmasq_opts=['--address=/#/192.168.42.1'])
self.write_network('general.network', '''\
[Match]
Name={}
[Network]
DHCP=ipv4
IPv6AcceptRA=False'''.format(self.iface))
# create second device/dnsmasq for a .company/.lab VPN interface
# static IPs for simplicity
self.add_veth_pair('testvpnclient', 'testvpnrouter')
subprocess.check_call(['ip', 'a', 'flush', 'dev', 'testvpnrouter'])
subprocess.check_call(['ip', 'a', 'add', '10.241.3.1/24', 'dev', 'testvpnrouter'])
subprocess.check_call(['ip', 'link', 'set', 'testvpnrouter', 'up'])
vpn_dnsmasq_log = os.path.join(self.workdir, 'dnsmasq-vpn.log')
vpn_dnsmasq = subprocess.Popen(
['dnsmasq', '--keep-in-foreground', '--log-queries',
'--log-facility=' + vpn_dnsmasq_log, '--conf-file=/dev/null',
'--dhcp-leasefile=/dev/null', '--bind-interfaces',
'--interface=testvpnrouter', '--except-interface=lo',
'--address=/math.lab/10.241.3.3', '--address=/cantina.company/10.241.4.4'])
self.addCleanup(vpn_dnsmasq.wait)
self.addCleanup(vpn_dnsmasq.kill)
self.write_network('vpn.network', '''\
[Match]
Name=testvpnclient
[Network]
IPv6AcceptRA=False
Address=10.241.3.2/24
DNS=10.241.3.1
Domains= ~company ~lab''')
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
subprocess.check_call([NETWORKD_WAIT_ONLINE, '--interface', self.iface,
'--interface=testvpnclient', '--timeout=20'])
# ensure we start fresh with every test
subprocess.check_call(['systemctl', 'restart', 'systemd-resolved'])
# test vpnclient specific domains; these should *not* be answered by
# the general DNS
out = subprocess.check_output(['systemd-resolve', 'math.lab'])
self.assertIn(b'math.lab: 10.241.3.3', out)
out = subprocess.check_output(['systemd-resolve', 'kettle.cantina.company'])
self.assertIn(b'kettle.cantina.company: 10.241.4.4', out)
# test general domains
out = subprocess.check_output(['systemd-resolve', 'megasearch.net'])
self.assertIn(b'megasearch.net: 192.168.42.1', out)
with open(self.dnsmasq_log) as f:
general_log = f.read()
with open(vpn_dnsmasq_log) as f:
vpn_log = f.read()
# VPN domains should only be sent to VPN DNS
self.assertRegex(vpn_log, 'query.*math.lab')
self.assertRegex(vpn_log, 'query.*cantina.company')
self.assertNotIn('.lab', general_log)
self.assertNotIn('.company', general_log)
# general domains should not be sent to the VPN DNS
self.assertRegex(general_log, 'query.*megasearch.net')
self.assertNotIn('megasearch.net', vpn_log)
def test_resolved_etc_hosts(self):
'''resolved queries to /etc/hosts'''
# FIXME: -t MX query fails with enabled DNSSEC (even when using
# the known negative trust anchor .internal instead of .example)
conf = '/run/systemd/resolved.conf.d/test-disable-dnssec.conf'
os.makedirs(os.path.dirname(conf), exist_ok=True)
with open(conf, 'w') as f:
f.write('[Resolve]\nDNSSEC=no')
self.addCleanup(os.remove, conf)
# create /etc/hosts bind mount which resolves my.example for IPv4
hosts = os.path.join(self.workdir, 'hosts')
with open(hosts, 'w') as f:
f.write('172.16.99.99 my.example\n')
subprocess.check_call(['mount', '--bind', hosts, '/etc/hosts'])
self.addCleanup(subprocess.call, ['umount', '/etc/hosts'])
subprocess.check_call(['systemctl', 'stop', 'systemd-resolved.service'])
# note: different IPv4 address here, so that it's easy to tell apart
# what resolved the query
self.create_iface(dnsmasq_opts=['--host-record=my.example,172.16.99.1,2600::99:99',
'--host-record=other.example,172.16.0.42,2600::42',
'--mx-host=example,mail.example'],
ipv6=True)
self.do_test(coldplug=None, ipv6=True)
try:
# family specific queries
out = subprocess.check_output(['systemd-resolve', '-4', 'my.example'])
self.assertIn(b'my.example: 172.16.99.99', out)
# we don't expect an IPv6 answer; if /etc/hosts has any IP address,
# it's considered a sufficient source
self.assertNotEqual(subprocess.call(['systemd-resolve', '-6', 'my.example']), 0)
# "any family" query; IPv4 should come from /etc/hosts
out = subprocess.check_output(['systemd-resolve', 'my.example'])
self.assertIn(b'my.example: 172.16.99.99', out)
# IP → name lookup; again, takes the /etc/hosts one
out = subprocess.check_output(['systemd-resolve', '172.16.99.99'])
self.assertIn(b'172.16.99.99: my.example', out)
# non-address RRs should fall back to DNS
out = subprocess.check_output(['systemd-resolve', '--type=MX', 'example'])
self.assertIn(b'example IN MX 1 mail.example', out)
# other domains query DNS
out = subprocess.check_output(['systemd-resolve', 'other.example'])
self.assertIn(b'172.16.0.42', out)
out = subprocess.check_output(['systemd-resolve', '172.16.0.42'])
self.assertIn(b'172.16.0.42: other.example', out)
except (AssertionError, subprocess.CalledProcessError):
self.show_journal('systemd-resolved.service')
self.print_server_log()
raise
def test_transient_hostname(self):
'''networkd sets transient hostname from DHCP'''
orig_hostname = socket.gethostname()
self.addCleanup(socket.sethostname, orig_hostname)
# temporarily move /etc/hostname away; restart hostnamed to pick it up
if os.path.exists('/etc/hostname'):
subprocess.check_call(['mount', '--bind', '/dev/null', '/etc/hostname'])
self.addCleanup(subprocess.call, ['umount', '/etc/hostname'])
subprocess.check_call(['systemctl', 'stop', 'systemd-hostnamed.service'])
self.create_iface(dnsmasq_opts=['--dhcp-host={},192.168.5.210,testgreen'.format(self.iface_mac)])
self.do_test(coldplug=None, extra_opts='IPv6AcceptRA=False', dhcp_mode='ipv4')
try:
# should have received the fixed IP above
out = subprocess.check_output(['ip', '-4', 'a', 'show', 'dev', self.iface])
self.assertRegex(out, b'inet 192.168.5.210/24 .* scope global dynamic')
# should have set transient hostname in hostnamed; this is
# sometimes a bit lagging (issue #4753), so retry a few times
for retry in range(1, 6):
out = subprocess.check_output(['hostnamectl'])
if b'testgreen' in out:
break
time.sleep(5)
sys.stdout.write('[retry %i] ' % retry)
sys.stdout.flush()
else:
self.fail('Transient hostname not found in hostnamectl:\n{}'.format(out.decode()))
# and also applied to the system
self.assertEqual(socket.gethostname(), 'testgreen')
except AssertionError:
self.show_journal('systemd-networkd.service')
self.show_journal('systemd-hostnamed.service')
self.print_server_log()
raise
def test_transient_hostname_with_static(self):
'''transient hostname is not applied if static hostname exists'''
orig_hostname = socket.gethostname()
self.addCleanup(socket.sethostname, orig_hostname)
if not os.path.exists('/etc/hostname'):
self.writeConfig('/etc/hostname', orig_hostname)
subprocess.check_call(['systemctl', 'stop', 'systemd-hostnamed.service'])
self.create_iface(dnsmasq_opts=['--dhcp-host={},192.168.5.210,testgreen'.format(self.iface_mac)])
self.do_test(coldplug=None, extra_opts='IPv6AcceptRA=False', dhcp_mode='ipv4')
try:
# should have received the fixed IP above
out = subprocess.check_output(['ip', '-4', 'a', 'show', 'dev', self.iface])
self.assertRegex(out, b'inet 192.168.5.210/24 .* scope global dynamic')
# static hostname wins over transient one, thus *not* applied
self.assertEqual(socket.gethostname(), orig_hostname)
except AssertionError:
self.show_journal('systemd-networkd.service')
self.show_journal('systemd-hostnamed.service')
self.print_server_log()
raise
class NetworkdClientTest(ClientTestBase, unittest.TestCase):
'''Test networkd client against networkd server'''
def setUp(self):
super().setUp()
self.dnsmasq = None
def create_iface(self, ipv6=False, dhcpserver_opts=None):
'''Create test interface with DHCP server behind it'''
# run "router-side" networkd in own mount namespace to shield it from
# "client-side" configuration and networkd
(fd, script) = tempfile.mkstemp(prefix='networkd-router.sh')
self.addCleanup(os.remove, script)
with os.fdopen(fd, 'w+') as f:
f.write('''\
#!/bin/sh
set -eu
mkdir -p /run/systemd/network
mkdir -p /run/systemd/netif
mount -t tmpfs none /run/systemd/network
mount -t tmpfs none /run/systemd/netif
[ ! -e /run/dbus ] || mount -t tmpfs none /run/dbus
# create router/client veth pair
cat << EOF > /run/systemd/network/test.netdev
[NetDev]
Name=%(ifr)s
Kind=veth
[Peer]
Name=%(ifc)s
EOF
cat << EOF > /run/systemd/network/test.network
[Match]
Name=%(ifr)s
[Network]
Address=192.168.5.1/24
%(addr6)s
DHCPServer=yes
[DHCPServer]
PoolOffset=10
PoolSize=50
DNS=192.168.5.1
%(dhopts)s
EOF
# run networkd as in systemd-networkd.service
exec $(systemctl cat systemd-networkd.service | sed -n '/^ExecStart=/ { s/^.*=//; s/^[@+-]//; s/^!*//; p}')
''' % {'ifr': self.if_router, 'ifc': self.iface, 'addr6': ipv6 and 'Address=2600::1/64' or '',
'dhopts': dhcpserver_opts or ''})
os.fchmod(fd, 0o755)
subprocess.check_call(['systemd-run', '--unit=networkd-test-router.service',
'-p', 'InaccessibleDirectories=-/etc/systemd/network',
'-p', 'InaccessibleDirectories=-/run/systemd/network',
'-p', 'InaccessibleDirectories=-/run/systemd/netif',
'--service-type=notify', script])
# wait until devices got created
for _ in range(50):
out = subprocess.check_output(['ip', 'a', 'show', 'dev', self.if_router])
if b'state UP' in out and b'scope global' in out:
break
time.sleep(0.1)
def shutdown_iface(self):
'''Remove test interface and stop DHCP server'''
if self.if_router:
subprocess.check_call(['systemctl', 'stop', 'networkd-test-router.service'])
# ensure failed transient unit does not stay around
subprocess.call(['systemctl', 'reset-failed', 'networkd-test-router.service'])
subprocess.call(['ip', 'link', 'del', 'dev', self.if_router])
self.if_router = None
def print_server_log(self):
'''Print DHCP server log for debugging failures'''
self.show_journal('networkd-test-router.service')
@unittest.skip('networkd does not have DHCPv6 server support')
def test_hotplug_dhcp_ip6(self):
pass
@unittest.skip('networkd does not have DHCPv6 server support')
def test_coldplug_dhcp_ip6(self):
pass
def test_search_domains(self):
# we don't use this interface for this test
self.if_router = None
self.write_network('test.netdev', '''\
[NetDev]
Name=dummy0
Kind=dummy
MACAddress=12:34:56:78:9a:bc''')
self.write_network('test.network', '''\
[Match]
Name=dummy0
[Network]
Address=192.168.42.100
DNS=192.168.42.1
Domains= one two three four five six seven eight nine ten''')
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
for timeout in range(50):
with open(RESOLV_CONF) as f:
contents = f.read()
if ' one' in contents:
break
time.sleep(0.1)
self.assertRegex(contents, 'search .*one two three four')
self.assertNotIn('seven\n', contents)
self.assertIn('# Too many search domains configured, remaining ones ignored.\n', contents)
def test_search_domains_too_long(self):
# we don't use this interface for this test
self.if_router = None
name_prefix = 'a' * 60
self.write_network('test.netdev', '''\
[NetDev]
Name=dummy0
Kind=dummy
MACAddress=12:34:56:78:9a:bc''')
self.write_network('test.network', '''\
[Match]
Name=dummy0
[Network]
Address=192.168.42.100
DNS=192.168.42.1
Domains={p}0 {p}1 {p}2 {p}3 {p}4'''.format(p=name_prefix))
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
for timeout in range(50):
with open(RESOLV_CONF) as f:
contents = f.read()
if ' one' in contents:
break
time.sleep(0.1)
self.assertRegex(contents, 'search .*{p}0 {p}1 {p}2'.format(p=name_prefix))
self.assertIn('# Total length of all search domains is too long, remaining ones ignored.', contents)
def test_dropin(self):
# we don't use this interface for this test
self.if_router = None
self.write_network('test.netdev', '''\
[NetDev]
Name=dummy0
Kind=dummy
MACAddress=12:34:56:78:9a:bc''')
self.write_network('test.network', '''\
[Match]
Name=dummy0
[Network]
Address=192.168.42.100
DNS=192.168.42.1''')
self.write_network_dropin('test.network', 'dns', '''\
[Network]
DNS=127.0.0.1''')
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
for timeout in range(50):
with open(RESOLV_CONF) as f:
contents = f.read()
if ' 127.0.0.1' in contents:
break
time.sleep(0.1)
self.assertIn('nameserver 192.168.42.1\n', contents)
self.assertIn('nameserver 127.0.0.1\n', contents)
def test_dhcp_timezone(self):
'''networkd sets time zone from DHCP'''
def get_tz():
out = subprocess.check_output(['busctl', 'get-property', 'org.freedesktop.timedate1',
'/org/freedesktop/timedate1', 'org.freedesktop.timedate1', 'Timezone'])
assert out.startswith(b's "')
out = out.strip()
assert out.endswith(b'"')
return out[3:-1].decode()
orig_timezone = get_tz()
self.addCleanup(subprocess.call, ['timedatectl', 'set-timezone', orig_timezone])
self.create_iface(dhcpserver_opts='EmitTimezone=yes\nTimezone=Pacific/Honolulu')
self.do_test(coldplug=None, extra_opts='IPv6AcceptRA=false\n[DHCP]\nUseTimezone=true', dhcp_mode='ipv4')
# should have applied the received timezone
try:
self.assertEqual(get_tz(), 'Pacific/Honolulu')
except AssertionError:
self.show_journal('systemd-networkd.service')
self.show_journal('systemd-hostnamed.service')
raise
class MatchClientTest(unittest.TestCase, NetworkdTestingUtilities):
"""Test [Match] sections in .network files.
Be aware that matching the test host's interfaces will wipe their
configuration, so as a precaution, all network files should have a
restrictive [Match] section to only ever interfere with the
temporary veth interfaces created here.
"""
def tearDown(self):
"""Stop networkd."""
subprocess.call(['systemctl', 'stop', 'systemd-networkd'])
def test_basic_matching(self):
"""Verify the Name= line works throughout this class."""
self.add_veth_pair('test_if1', 'fake_if2')
self.write_network('test.network', "[Match]\nName=test_*\n[Network]")
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
self.assert_link_states(test_if1='managed', fake_if2='unmanaged')
def test_inverted_matching(self):
"""Verify that a '!'-prefixed value inverts the match."""
# Use a MAC address as the interfaces' common matching attribute
# to avoid depending on udev, to support testing in containers.
mac = '00:01:02:03:98:99'
self.add_veth_pair('test_veth', 'test_peer',
['addr', mac], ['addr', mac])
self.write_network('no-veth.network', """\
[Match]
MACAddress={}
Name=!nonexistent *peer*
[Network]""".format(mac))
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
self.assert_link_states(test_veth='managed', test_peer='unmanaged')
class UnmanagedClientTest(unittest.TestCase, NetworkdTestingUtilities):
"""Test if networkd manages the correct interfaces."""
def setUp(self):
"""Write .network files to match the named veth devices."""
# Define the veth+peer pairs to be created.
# Their pairing doesn't actually matter, only their names do.
self.veths = {
'm1def': 'm0unm',
'm1man': 'm1unm',
}
# Define the contents of .network files to be read in order.
self.configs = (
"[Match]\nName=m1def\n",
"[Match]\nName=m1unm\n[Link]\nUnmanaged=yes\n",
"[Match]\nName=m1*\n[Link]\nUnmanaged=no\n",
)
# Write out the .network files to be cleaned up automatically.
for i, config in enumerate(self.configs):
self.write_network("%02d-test.network" % i, config)
def tearDown(self):
"""Stop networkd."""
subprocess.call(['systemctl', 'stop', 'systemd-networkd'])
def create_iface(self):
"""Create temporary veth pairs for interface matching."""
for veth, peer in self.veths.items():
self.add_veth_pair(veth, peer)
def test_unmanaged_setting(self):
"""Verify link states with Unmanaged= settings, hot-plug."""
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
self.create_iface()
self.assert_link_states(m1def='managed',
m1man='managed',
m1unm='unmanaged',
m0unm='unmanaged')
def test_unmanaged_setting_coldplug(self):
"""Verify link states with Unmanaged= settings, cold-plug."""
self.create_iface()
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
self.assert_link_states(m1def='managed',
m1man='managed',
m1unm='unmanaged',
m0unm='unmanaged')
def test_catchall_config(self):
"""Verify link states with a catch-all config, hot-plug."""
# Don't actually catch ALL interfaces. It messes up the host.
self.write_network('all.network', "[Match]\nName=m[01]???\n")
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
self.create_iface()
self.assert_link_states(m1def='managed',
m1man='managed',
m1unm='unmanaged',
m0unm='managed')
def test_catchall_config_coldplug(self):
"""Verify link states with a catch-all config, cold-plug."""
# Don't actually catch ALL interfaces. It messes up the host.
self.write_network('all.network', "[Match]\nName=m[01]???\n")
self.create_iface()
subprocess.check_call(['systemctl', 'start', 'systemd-networkd'])
self.assert_link_states(m1def='managed',
m1man='managed',
m1unm='unmanaged',
m0unm='managed')
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(stream=sys.stdout,
verbosity=2))
| gpl-2.0 |
fspaolo/scikit-learn | sklearn/covariance/robust_covariance.py | 1 | 28701 | """
Robust location and covariance estimators.
Here are implemented estimators that are resistant to outliers.
"""
# Author: Virgile Fritsch <virgile.fritsch@inria.fr>
#
# License: BSD 3 clause
import warnings
import numbers
import numpy as np
from scipy import linalg
from scipy.stats import chi2
from . import empirical_covariance, EmpiricalCovariance
from ..utils.extmath import fast_logdet, pinvh
from ..utils import check_random_state
###############################################################################
### Minimum Covariance Determinant
# Implementing of an algorithm by Rousseeuw & Van Driessen described in
# (A Fast Algorithm for the Minimum Covariance Determinant Estimator,
# 1999, American Statistical Association and the American Society
# for Quality, TECHNOMETRICS)
###############################################################################
def c_step(X, n_support, remaining_iterations=30, initial_estimates=None,
verbose=False, cov_computation_method=empirical_covariance,
random_state=None):
"""C_step procedure described in [Rouseeuw1984]_ aiming at computing MCD.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Data set in which we look for the n_support observations whose
scatter matrix has minimum determinant.
n_support : int, > n_samples / 2
Number of observations to compute the robust estimates of location
and covariance from.
remaining_iterations : int, optional
Number of iterations to perform.
According to [Rouseeuw1999]_, two iterations are sufficient to get
close to the minimum, and we never need more than 30 to reach
convergence.
initial_estimates : 2-tuple, optional
Initial estimates of location and shape from which to run the c_step
procedure:
- initial_estimates[0]: an initial location estimate
- initial_estimates[1]: an initial covariance estimate
verbose : boolean, optional
Verbose mode.
random_state : integer or numpy.RandomState, optional
The random generator used. If an integer is given, it fixes the
seed. Defaults to the global numpy random number generator.
Returns
-------
location : array-like, shape (n_features,)
Robust location estimates.
covariance : array-like, shape (n_features, n_features)
Robust covariance estimates.
support : array-like, shape (n_samples,)
A mask for the `n_support` observations whose scatter matrix has
minimum determinant.
References
----------
.. [Rouseeuw1999] A Fast Algorithm for the Minimum Covariance Determinant
Estimator, 1999, American Statistical Association and the American
Society for Quality, TECHNOMETRICS
"""
random_state = check_random_state(random_state)
n_samples, n_features = X.shape
# Initialisation
if initial_estimates is None:
# compute initial robust estimates from a random subset
support = np.zeros(n_samples).astype(bool)
support[random_state.permutation(n_samples)[:n_support]] = True
location = X[support].mean(0)
covariance = cov_computation_method(X[support])
else:
# get initial robust estimates from the function parameters
location = initial_estimates[0]
covariance = initial_estimates[1]
# run a special iteration for that case (to get an initial support)
precision = pinvh(covariance)
X_centered = X - location
dist = (np.dot(X_centered, precision) * X_centered).sum(1)
# compute new estimates
support = np.zeros(n_samples).astype(bool)
support[np.argsort(dist)[:n_support]] = True
location = X[support].mean(0)
covariance = cov_computation_method(X[support])
previous_det = np.inf
# Iterative procedure for Minimum Covariance Determinant computation
det = fast_logdet(covariance)
while (det < previous_det) and (remaining_iterations > 0):
# save old estimates values
previous_location = location
previous_covariance = covariance
previous_det = det
previous_support = support
# compute a new support from the full data set mahalanobis distances
precision = pinvh(covariance)
X_centered = X - location
dist = (np.dot(X_centered, precision) * X_centered).sum(axis=1)
# compute new estimates
support = np.zeros(n_samples).astype(bool)
support[np.argsort(dist)[:n_support]] = True
location = X[support].mean(axis=0)
covariance = cov_computation_method(X[support])
det = fast_logdet(covariance)
# update remaining iterations for early stopping
remaining_iterations -= 1
previous_dist = dist
dist = (np.dot(X - location, precision) * (X - location)).sum(axis=1)
# Catch computation errors
if np.isinf(det):
raise ValueError(
"Singular covariance matrix. "
"Please check that the covariance matrix corresponding "
"to the dataset is full rank and that MinCovDet is used with "
"Gaussian-distributed data (or at least data drawn from a "
"unimodal, symmetric distribution.")
# Check convergence
if np.allclose(det, previous_det):
# c_step procedure converged
if verbose:
print("Optimal couple (location, covariance) found before"
" ending iterations (%d left)" % (remaining_iterations))
results = location, covariance, det, support, dist
elif det > previous_det:
# determinant has increased (should not happen)
warnings.warn("Warning! det > previous_det (%.15f > %.15f)"
% (det, previous_det), RuntimeWarning)
results = previous_location, previous_covariance, \
previous_det, previous_support, previous_dist
# Check early stopping
if remaining_iterations == 0:
if verbose:
print('Maximum number of iterations reached')
det = fast_logdet(covariance)
results = location, covariance, det, support, dist
return results
def select_candidates(X, n_support, n_trials, select=1, n_iter=30,
verbose=False,
cov_computation_method=empirical_covariance,
random_state=None):
"""Finds the best pure subset of observations to compute MCD from it.
The purpose of this function is to find the best sets of n_support
observations with respect to a minimization of their covariance
matrix determinant. Equivalently, it removes n_samples-n_support
observations to construct what we call a pure data set (i.e. not
containing outliers). The list of the observations of the pure
data set is referred to as the `support`.
Starting from a random support, the pure data set is found by the
c_step procedure introduced by Rousseeuw and Van Driessen in
[Rouseeuw1999]_.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Data (sub)set in which we look for the n_support purest observations.
n_support : int, [(n + p + 1)/2] < n_support < n
The number of samples the pure data set must contain.
select : int, int > 0
Number of best candidates results to return.
n_trials : int, nb_trials > 0 or 2-tuple
Number of different initial sets of observations from which to
run the algorithm.
Instead of giving a number of trials to perform, one can provide a
list of initial estimates that will be used to iteratively run
c_step procedures. In this case:
- n_trials[0]: array-like, shape (n_trials, n_features)
is the list of `n_trials` initial location estimates
- n_trials[1]: array-like, shape (n_trials, n_features, n_features)
is the list of `n_trials` initial covariances estimates
n_iter : int, nb_iter > 0
Maximum number of iterations for the c_step procedure.
(2 is enough to be close to the final solution. "Never" exceeds 20).
random_state : integer or numpy.RandomState, optional
The random generator used. If an integer is given, it fixes the
seed. Defaults to the global numpy random number generator.
See Also
---------
`c_step` function
Returns
-------
best_locations : array-like, shape (select, n_features)
The `select` location estimates computed from the `select` best
supports found in the data set (`X`).
best_covariances : array-like, shape (select, n_features, n_features)
The `select` covariance estimates computed from the `select`
best supports found in the data set (`X`).
best_supports : array-like, shape (select, n_samples)
The `select` best supports found in the data set (`X`).
References
----------
.. [Rouseeuw1999] A Fast Algorithm for the Minimum Covariance Determinant
Estimator, 1999, American Statistical Association and the American
Society for Quality, TECHNOMETRICS
"""
random_state = check_random_state(random_state)
n_samples, n_features = X.shape
if isinstance(n_trials, numbers.Integral):
run_from_estimates = False
elif isinstance(n_trials, tuple):
run_from_estimates = True
estimates_list = n_trials
n_trials = estimates_list[0].shape[0]
else:
raise TypeError("Invalid 'n_trials' parameter, expected tuple or "
" integer, got %s (%s)" % (n_trials, type(n_trials)))
# compute `n_trials` location and shape estimates candidates in the subset
all_estimates = []
if not run_from_estimates:
# perform `n_trials` computations from random initial supports
for j in range(n_trials):
all_estimates.append(
c_step(
X, n_support, remaining_iterations=n_iter, verbose=verbose,
cov_computation_method=cov_computation_method,
random_state=random_state))
else:
# perform computations from every given initial estimates
for j in range(n_trials):
initial_estimates = (estimates_list[0][j], estimates_list[1][j])
all_estimates.append(c_step(
X, n_support, remaining_iterations=n_iter,
initial_estimates=initial_estimates, verbose=verbose,
cov_computation_method=cov_computation_method,
random_state=random_state))
all_locs_sub, all_covs_sub, all_dets_sub, all_supports_sub, all_ds_sub = \
zip(*all_estimates)
# find the `n_best` best results among the `n_trials` ones
index_best = np.argsort(all_dets_sub)[:select]
best_locations = np.asarray(all_locs_sub)[index_best]
best_covariances = np.asarray(all_covs_sub)[index_best]
best_supports = np.asarray(all_supports_sub)[index_best]
best_ds = np.asarray(all_ds_sub)[index_best]
return best_locations, best_covariances, best_supports, best_ds
def fast_mcd(X, support_fraction=None,
cov_computation_method=empirical_covariance,
random_state=None):
"""Estimates the Minimum Covariance Determinant matrix.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data matrix, with p features and n samples.
support_fraction : float, 0 < support_fraction < 1
The proportion of points to be included in the support of the raw
MCD estimate. Default is None, which implies that the minimum
value of support_fraction will be used within the algorithm:
`[n_sample + n_features + 1] / 2`.
random_state : integer or numpy.RandomState, optional
The generator used to randomly subsample. If an integer is
given, it fixes the seed. Defaults to the global numpy random
number generator.
Notes
-----
The FastMCD algorithm has been introduced by Rousseuw and Van Driessen
in "A Fast Algorithm for the Minimum Covariance Determinant Estimator,
1999, American Statistical Association and the American Society
for Quality, TECHNOMETRICS".
The principle is to compute robust estimates and random subsets before
pooling them into a larger subsets, and finally into the full data set.
Depending on the size of the initial sample, we have one, two or three
such computation levels.
Note that only raw estimates are returned. If one is intersted in
the correction and reweighting steps described in [Rouseeuw1999]_,
see the MinCovDet object.
References
----------
.. [Rouseeuw1999] A Fast Algorithm for the Minimum Covariance
Determinant Estimator, 1999, American Statistical Association
and the American Society for Quality, TECHNOMETRICS
.. [Butler1993] R. W. Butler, P. L. Davies and M. Jhun,
Asymptotics For The Minimum Covariance Determinant Estimator,
The Annals of Statistics, 1993, Vol. 21, No. 3, 1385-1400
Returns
-------
location : array-like, shape (n_features,)
Robust location of the data.
covariance : array-like, shape (n_features, n_features)
Robust covariance of the features.
support : array-like, type boolean, shape (n_samples,)
A mask of the observations that have been used to compute
the robust location and covariance estimates of the data set.
"""
random_state = check_random_state(random_state)
X = np.asarray(X)
if X.ndim == 1:
X = np.reshape(X, (1, -1))
warnings.warn("Only one sample available. "
"You may want to reshape your data array")
n_samples, n_features = X.shape
# minimum breakdown value
if support_fraction is None:
n_support = int(np.ceil(0.5 * (n_samples + n_features + 1)))
else:
n_support = int(support_fraction * n_samples)
# 1-dimensional case quick computation
# (Rousseeuw, P. J. and Leroy, A. M. (2005) References, in Robust
# Regression and Outlier Detection, John Wiley & Sons, chapter 4)
if n_features == 1:
if n_support < n_samples:
# find the sample shortest halves
X_sorted = np.sort(np.ravel(X))
diff = X_sorted[n_support:] - X_sorted[:(n_samples - n_support)]
halves_start = np.where(diff == np.min(diff))[0]
# take the middle points' mean to get the robust location estimate
location = 0.5 * (X_sorted[n_support + halves_start]
+ X_sorted[halves_start]).mean()
support = np.zeros(n_samples, dtype=bool)
X_centered = X - location
support[np.argsort(np.abs(X - location), 0)[:n_support]] = True
covariance = np.asarray([[np.var(X[support])]])
location = np.array([location])
# get precision matrix in an optimized way
precision = pinvh(covariance)
dist = (np.dot(X_centered, precision) * (X_centered)).sum(axis=1)
else:
support = np.ones(n_samples, dtype=bool)
covariance = np.asarray([[np.var(X)]])
location = np.asarray([np.mean(X)])
X_centered = X - location
# get precision matrix in an optimized way
precision = pinvh(covariance)
dist = (np.dot(X_centered, precision) * (X_centered)).sum(axis=1)
### Starting FastMCD algorithm for p-dimensional case
if (n_samples > 500) and (n_features > 1):
## 1. Find candidate supports on subsets
# a. split the set in subsets of size ~ 300
n_subsets = n_samples // 300
n_samples_subsets = n_samples // n_subsets
samples_shuffle = random_state.permutation(n_samples)
h_subset = int(np.ceil(n_samples_subsets *
(n_support / float(n_samples))))
# b. perform a total of 500 trials
n_trials_tot = 500
# c. select 10 best (location, covariance) for each subset
n_best_sub = 10
n_trials = max(10, n_trials_tot // n_subsets)
n_best_tot = n_subsets * n_best_sub
all_best_locations = np.zeros((n_best_tot, n_features))
try:
all_best_covariances = np.zeros((n_best_tot, n_features,
n_features))
except MemoryError:
# The above is too big. Let's try with something much small
# (and less optimal)
all_best_covariances = np.zeros((n_best_tot, n_features,
n_features))
n_best_tot = 10
n_best_sub = 2
for i in range(n_subsets):
low_bound = i * n_samples_subsets
high_bound = low_bound + n_samples_subsets
current_subset = X[samples_shuffle[low_bound:high_bound]]
best_locations_sub, best_covariances_sub, _, _ = select_candidates(
current_subset, h_subset, n_trials,
select=n_best_sub, n_iter=2,
cov_computation_method=cov_computation_method,
random_state=random_state)
subset_slice = np.arange(i * n_best_sub, (i + 1) * n_best_sub)
all_best_locations[subset_slice] = best_locations_sub
all_best_covariances[subset_slice] = best_covariances_sub
## 2. Pool the candidate supports into a merged set
## (possibly the full dataset)
n_samples_merged = min(1500, n_samples)
h_merged = int(np.ceil(n_samples_merged *
(n_support / float(n_samples))))
if n_samples > 1500:
n_best_merged = 10
else:
n_best_merged = 1
# find the best couples (location, covariance) on the merged set
selection = random_state.permutation(n_samples)[:n_samples_merged]
locations_merged, covariances_merged, supports_merged, d = \
select_candidates(
X[selection], h_merged,
n_trials=(all_best_locations, all_best_covariances),
select=n_best_merged,
cov_computation_method=cov_computation_method,
random_state=random_state)
## 3. Finally get the overall best (locations, covariance) couple
if n_samples < 1500:
# directly get the best couple (location, covariance)
location = locations_merged[0]
covariance = covariances_merged[0]
support = np.zeros(n_samples, dtype=bool)
dist = np.zeros(n_samples)
support[selection] = supports_merged[0]
dist[selection] = d[0]
else:
# select the best couple on the full dataset
locations_full, covariances_full, supports_full, d = \
select_candidates(
X, n_support,
n_trials=(locations_merged, covariances_merged),
select=1,
cov_computation_method=cov_computation_method,
random_state=random_state)
location = locations_full[0]
covariance = covariances_full[0]
support = supports_full[0]
dist = d[0]
elif n_features > 1:
## 1. Find the 10 best couples (location, covariance)
## considering two iterations
n_trials = 30
n_best = 10
locations_best, covariances_best, _, _ = select_candidates(
X, n_support, n_trials=n_trials, select=n_best, n_iter=2,
cov_computation_method=cov_computation_method,
random_state=random_state)
## 2. Select the best couple on the full dataset amongst the 10
locations_full, covariances_full, supports_full, d = select_candidates(
X, n_support, n_trials=(locations_best, covariances_best),
select=1, cov_computation_method=cov_computation_method,
random_state=random_state)
location = locations_full[0]
covariance = covariances_full[0]
support = supports_full[0]
dist = d[0]
return location, covariance, support, dist
class MinCovDet(EmpiricalCovariance):
"""Minimum Covariance Determinant (MCD): robust estimator of covariance.
The Minimum Covariance Determinant covariance estimator is to be applied
on Gaussian-distributed data, but could still be relevant on data
drawn from a unimodal, symmetric distribution. It is not meant to be used
with multi-modal data (the algorithm used to fit a MinCovDet object is
likely to fail in such a case).
One should consider projection pursuit methods to deal with multi-modal
datasets.
Parameters
----------
store_precision : bool
Specify if the estimated precision is stored.
assume_centered : Boolean
If True, the support of the robust location and the covariance
estimates is computed, and a covariance estimate is recomputed from
it, without centering the data.
Useful to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, the robust location and covariance are directly computed
with the FastMCD algorithm without additional treatment.
support_fraction : float, 0 < support_fraction < 1
The proportion of points to be included in the support of the raw
MCD estimate. Default is None, which implies that the minimum
value of support_fraction will be used within the algorithm:
[n_sample + n_features + 1] / 2
random_state : integer or numpy.RandomState, optional
The random generator used. If an integer is given, it fixes the
seed. Defaults to the global numpy random number generator.
Attributes
----------
`raw_location_` : array-like, shape (n_features,)
The raw robust estimated location before correction and re-weighting.
`raw_covariance_` : array-like, shape (n_features, n_features)
The raw robust estimated covariance before correction and re-weighting.
`raw_support_` : array-like, shape (n_samples,)
A mask of the observations that have been used to compute
the raw robust estimates of location and shape, before correction
and re-weighting.
`location_` : array-like, shape (n_features,)
Estimated robust location
`covariance_` : array-like, shape (n_features, n_features)
Estimated robust covariance matrix
`precision_` : array-like, shape (n_features, n_features)
Estimated pseudo inverse matrix.
(stored only if store_precision is True)
`support_` : array-like, shape (n_samples,)
A mask of the observations that have been used to compute
the robust estimates of location and shape.
`dist_` : array-like, shape (n_samples,)
Mahalanobis distances of the training set (on which `fit` is called)
observations.
References
----------
.. [Rouseeuw1984] `P. J. Rousseeuw. Least median of squares regression.
J. Am Stat Ass, 79:871, 1984.`
.. [Rouseeuw1999] `A Fast Algorithm for the Minimum Covariance Determinant
Estimator, 1999, American Statistical Association and the American
Society for Quality, TECHNOMETRICS`
.. [Butler1993] `R. W. Butler, P. L. Davies and M. Jhun,
Asymptotics For The Minimum Covariance Determinant Estimator,
The Annals of Statistics, 1993, Vol. 21, No. 3, 1385-1400`
"""
_nonrobust_covariance = staticmethod(empirical_covariance)
def __init__(self, store_precision=True, assume_centered=False,
support_fraction=None, random_state=None):
self.store_precision = store_precision
self.assume_centered = assume_centered
self.support_fraction = support_fraction
self.random_state = random_state
def fit(self, X, y=None):
"""Fits a Minimum Covariance Determinant with the FastMCD algorithm.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training data, where n_samples is the number of samples
and n_features is the number of features.
y : not used, present for API consistence purpose.
Returns
-------
self : object
Returns self.
"""
random_state = check_random_state(self.random_state)
n_samples, n_features = X.shape
# check that the empirical covariance is full rank
if (linalg.svdvals(np.dot(X.T, X)) > 1e-8).sum() != n_features:
warnings.warn("The covariance matrix associated to your dataset "
"is not full rank")
# compute and store raw estimates
raw_location, raw_covariance, raw_support, raw_dist = fast_mcd(
X, support_fraction=self.support_fraction,
cov_computation_method=self._nonrobust_covariance,
random_state=random_state)
if self.assume_centered:
raw_location = np.zeros(n_features)
raw_covariance = self._nonrobust_covariance(X[raw_support],
assume_centered=True)
# get precision matrix in an optimized way
precision = pinvh(raw_covariance)
raw_dist = np.sum(np.dot(X, precision) * X, 1)
self.raw_location_ = raw_location
self.raw_covariance_ = raw_covariance
self.raw_support_ = raw_support
self.location_ = raw_location
self.support_ = raw_support
self.dist_ = raw_dist
# obtain consistency at normal models
self.correct_covariance(X)
# re-weight estimator
self.reweight_covariance(X)
return self
def correct_covariance(self, data):
"""Apply a correction to raw Minimum Covariance Determinant estimates.
Correction using the empirical correction factor suggested
by Rousseeuw and Van Driessen in [Rouseeuw1984]_.
Parameters
----------
data : array-like, shape (n_samples, n_features)
The data matrix, with p features and n samples.
The data set must be the one which was used to compute
the raw estimates.
Returns
-------
covariance_corrected : array-like, shape (n_features, n_features)
Corrected robust covariance estimate.
"""
correction = np.median(self.dist_) / chi2(data.shape[1]).isf(0.5)
covariance_corrected = self.raw_covariance_ * correction
self.dist_ /= correction
return covariance_corrected
def reweight_covariance(self, data):
"""Re-weight raw Minimum Covariance Determinant estimates.
Re-weight observations using Rousseeuw's method (equivalent to
deleting outlying observations from the data set before
computing location and covariance estimates). [Rouseeuw1984]_
Parameters
----------
data : array-like, shape (n_samples, n_features)
The data matrix, with p features and n samples.
The data set must be the one which was used to compute
the raw estimates.
Returns
-------
location_reweighted : array-like, shape (n_features, )
Re-weighted robust location estimate.
covariance_reweighted : array-like, shape (n_features, n_features)
Re-weighted robust covariance estimate.
support_reweighted : array-like, type boolean, shape (n_samples,)
A mask of the observations that have been used to compute
the re-weighted robust location and covariance estimates.
"""
n_samples, n_features = data.shape
mask = self.dist_ < chi2(n_features).isf(0.025)
if self.assume_centered:
location_reweighted = np.zeros(n_features)
else:
location_reweighted = data[mask].mean(0)
covariance_reweighted = self._nonrobust_covariance(
data[mask], assume_centered=self.assume_centered)
support_reweighted = np.zeros(n_samples).astype(bool)
support_reweighted[mask] = True
self._set_covariance(covariance_reweighted)
self.location_ = location_reweighted
self.support_ = support_reweighted
X_centered = data - self.location_
self.dist_ = np.sum(
np.dot(X_centered, self.get_precision()) * X_centered, 1)
return location_reweighted, covariance_reweighted, support_reweighted
| bsd-3-clause |
phenoxim/cinder | cinder/volume/drivers/huawei/fc_zone_helper.py | 13 | 11359 | # Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from oslo_log import log as logging
from cinder import exception
from cinder.i18n import _
from cinder.volume.drivers.huawei import constants
LOG = logging.getLogger(__name__)
class FCZoneHelper(object):
"""FC zone helper for Huawei driver."""
def __init__(self, fcsan_lookup_service, client):
self.fcsan = fcsan_lookup_service
self.client = client
def _get_fc_ports_info(self):
ports_info = {}
data = self.client.get_fc_ports_on_array()
for item in data:
if item['RUNNINGSTATUS'] == constants.FC_PORT_CONNECTED:
location = item['PARENTID'].split('.')
port_info = {}
port_info['id'] = item['ID']
port_info['contr'] = location[0]
port_info['bandwidth'] = item['RUNSPEED']
ports_info[item['WWN']] = port_info
return ports_info
def _count_port_weight(self, port, ports_info):
LOG.debug("Count weight for port: %s.", port)
portgs = self.client.get_portgs_by_portid(ports_info[port]['id'])
LOG.debug("Port %(port)s belongs to PortGroup %(portgs)s.",
{"port": port, "portgs": portgs})
weight = 0
for portg in portgs:
views = self.client.get_views_by_portg(portg)
if not views:
LOG.debug("PortGroup %s doesn't belong to any view.", portg)
continue
LOG.debug("PortGroup %(portg)s belongs to view %(views)s.",
{"portg": portg, "views": views[0]})
# In fact, there is just one view for one port group.
lungroup = self.client.get_lungroup_by_view(views[0])
lun_num = self.client.get_obj_count_from_lungroup(lungroup)
ports_in_portg = self.client.get_ports_by_portg(portg)
LOG.debug("PortGroup %(portg)s contains ports: %(ports)s.",
{"portg": portg, "ports": ports_in_portg})
total_bandwidth = 0
for port_pg in ports_in_portg:
if port_pg in ports_info:
total_bandwidth += int(ports_info[port_pg]['bandwidth'])
LOG.debug("Total bandwidth for PortGroup %(portg)s is %(bindw)s.",
{"portg": portg, "bindw": total_bandwidth})
if total_bandwidth:
weight += float(lun_num) / float(total_bandwidth)
bandwidth = float(ports_info[port]['bandwidth'])
return (weight, 10000 / bandwidth)
def _get_weighted_ports_per_contr(self, ports, ports_info):
port_weight_map = {}
for port in ports:
port_weight_map[port] = self._count_port_weight(port, ports_info)
LOG.debug("port_weight_map: %s", port_weight_map)
sorted_ports = sorted(port_weight_map.items(), key=lambda d: d[1])
weighted_ports = []
count = 0
for port in sorted_ports:
if count >= constants.PORT_NUM_PER_CONTR:
break
weighted_ports.append(port[0])
count += 1
return weighted_ports
def _get_weighted_ports(self, contr_port_map, ports_info, contrs):
LOG.debug("_get_weighted_ports, we only select ports from "
"controllers: %s", contrs)
weighted_ports = []
for contr in contrs:
if contr in contr_port_map:
weighted_ports_per_contr = self._get_weighted_ports_per_contr(
contr_port_map[contr], ports_info)
LOG.debug("Selected ports %(ports)s on controller %(contr)s.",
{"ports": weighted_ports_per_contr,
"contr": contr})
weighted_ports.extend(weighted_ports_per_contr)
return weighted_ports
def _filter_by_fabric(self, wwns, ports):
"""Filter FC ports and initiators connected to fabrics."""
ini_tgt_map = self.fcsan.get_device_mapping_from_network(wwns, ports)
fabric_connected_ports = []
fabric_connected_initiators = []
for fabric in ini_tgt_map:
fabric_connected_ports.extend(
ini_tgt_map[fabric]['target_port_wwn_list'])
fabric_connected_initiators.extend(
ini_tgt_map[fabric]['initiator_port_wwn_list'])
if not fabric_connected_ports:
msg = _("No FC port connected to fabric.")
raise exception.VolumeBackendAPIException(data=msg)
if not fabric_connected_initiators:
msg = _("No initiator connected to fabric.")
raise exception.VolumeBackendAPIException(data=msg)
LOG.debug("Fabric connected ports: %(ports)s, "
"Fabric connected initiators: %(initiators)s.",
{'ports': fabric_connected_ports,
'initiators': fabric_connected_initiators})
return fabric_connected_ports, fabric_connected_initiators
def _get_lun_engine_contrs(self, engines, lun_id,
lun_type=constants.LUN_TYPE):
contrs = []
engine_id = None
lun_info = self.client.get_lun_info(lun_id, lun_type)
lun_contr_id = lun_info['OWNINGCONTROLLER']
for engine in engines:
contrs = json.loads(engine['NODELIST'])
engine_id = engine['ID']
if lun_contr_id in contrs:
break
LOG.debug("LUN %(lun_id)s belongs to engine %(engine_id)s. Engine "
"%(engine_id)s has controllers: %(contrs)s.",
{"lun_id": lun_id, "engine_id": engine_id, "contrs": contrs})
return contrs, engine_id
def _build_contr_port_map(self, fabric_connected_ports, ports_info):
contr_port_map = {}
for port in fabric_connected_ports:
contr = ports_info[port]['contr']
if not contr_port_map.get(contr):
contr_port_map[contr] = []
contr_port_map[contr].append(port)
LOG.debug("Controller port map: %s.", contr_port_map)
return contr_port_map
def _create_new_portg(self, portg_name, engine_id):
portg_id = self.client.get_tgt_port_group(portg_name)
if portg_id:
LOG.debug("Found port group %s not belonged to any view, "
"deleting it.", portg_name)
ports = self.client.get_fc_ports_by_portgroup(portg_id)
for port_id in ports.values():
self.client.remove_port_from_portgroup(portg_id, port_id)
self.client.delete_portgroup(portg_id)
description = constants.PORTGROUP_DESCRIP_PREFIX + engine_id
new_portg_id = self.client.create_portg(portg_name, description)
return new_portg_id
def build_ini_targ_map(self, wwns, host_id, lun_id,
lun_type=constants.LUN_TYPE):
engines = self.client.get_all_engines()
LOG.debug("Get array engines: %s", engines)
contrs, engine_id = self._get_lun_engine_contrs(engines, lun_id,
lun_type)
# Check if there is already a port group in the view.
# If yes and have already considered the engine,
# we won't change anything about the port group and zone.
view_name = constants.MAPPING_VIEW_PREFIX + host_id
portg_name = constants.PORTGROUP_PREFIX + host_id
view_id = self.client.find_mapping_view(view_name)
portg_info = self.client.get_portgroup_by_view(view_id)
portg_id = portg_info[0]['ID'] if portg_info else None
init_targ_map = {}
if portg_id:
description = portg_info[0].get("DESCRIPTION", '')
engines = description.replace(constants.PORTGROUP_DESCRIP_PREFIX,
"")
engines = engines.split(',')
ports = self.client.get_fc_ports_by_portgroup(portg_id)
if engine_id in engines:
LOG.debug("Have already selected ports for engine %s, just "
"use them.", engine_id)
return (list(ports.keys()), portg_id, init_targ_map)
# Filter initiators and ports that connected to fabrics.
ports_info = self._get_fc_ports_info()
(fabric_connected_ports, fabric_connected_initiators) = (
self._filter_by_fabric(wwns, ports_info.keys()))
# Build a controller->ports map for convenience.
contr_port_map = self._build_contr_port_map(fabric_connected_ports,
ports_info)
# Get the 'best' ports for the given controllers.
weighted_ports = self._get_weighted_ports(contr_port_map, ports_info,
contrs)
if not weighted_ports:
msg = _("No FC port can be used for LUN %s.") % lun_id
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
# Handle port group.
port_list = [ports_info[port]['id'] for port in weighted_ports]
if portg_id:
# Add engine ID to the description of the port group.
self.client.append_portg_desc(portg_id, engine_id)
# Extend the weighted_ports to include the ports already in the
# port group.
weighted_ports.extend(list(ports.keys()))
else:
portg_id = self._create_new_portg(portg_name, engine_id)
for port in port_list:
self.client.add_port_to_portg(portg_id, port)
for ini in fabric_connected_initiators:
init_targ_map[ini] = weighted_ports
LOG.debug("build_ini_targ_map: Port group name: %(portg_name)s, "
"init_targ_map: %(map)s.",
{"portg_name": portg_name,
"map": init_targ_map})
return weighted_ports, portg_id, init_targ_map
def get_init_targ_map(self, wwns, host_id):
error_ret = ([], None, {})
if not host_id:
return error_ret
view_name = constants.MAPPING_VIEW_PREFIX + host_id
view_id = self.client.find_mapping_view(view_name)
if not view_id:
return error_ret
port_group = self.client.get_portgroup_by_view(view_id)
portg_id = port_group[0]['ID'] if port_group else None
ports = self.client.get_fc_ports_by_portgroup(portg_id)
for port_id in ports.values():
self.client.remove_port_from_portgroup(portg_id, port_id)
init_targ_map = {}
for wwn in wwns:
init_targ_map[wwn] = list(ports.keys())
return list(ports.keys()), portg_id, init_targ_map
| apache-2.0 |
devigned/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyDate/autorestdatetestservice/auto_rest_date_test_service.py | 5 | 2091 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from .operations.date_model_operations import DateModelOperations
from . import models
class AutoRestDateTestServiceConfiguration(Configuration):
"""Configuration for AutoRestDateTestService
Note that all parameters used to create this instance are saved as instance
attributes.
:param str base_url: Service URL
"""
def __init__(
self, base_url=None):
if not base_url:
base_url = 'https://localhost'
super(AutoRestDateTestServiceConfiguration, self).__init__(base_url)
self.add_user_agent('autorestdatetestservice/{}'.format(VERSION))
class AutoRestDateTestService(object):
"""Test Infrastructure for AutoRest
:ivar config: Configuration for client.
:vartype config: AutoRestDateTestServiceConfiguration
:ivar date_model: DateModel operations
:vartype date_model: .operations.DateModelOperations
:param str base_url: Service URL
"""
def __init__(
self, base_url=None):
self.config = AutoRestDateTestServiceConfiguration(base_url)
self._client = ServiceClient(None, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '1.0.0'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.date_model = DateModelOperations(
self._client, self.config, self._serialize, self._deserialize)
| mit |
nanolearningllc/edx-platform-cypress | cms/djangoapps/contentstore/features/component_settings_editor_helpers.py | 38 | 9441 | # disable missing docstring
# pylint: disable=missing-docstring
from lettuce import world
from nose.tools import assert_equal, assert_in # pylint: disable=no-name-in-module
from terrain.steps import reload_the_page
from common import type_in_codemirror
from selenium.webdriver.common.keys import Keys
@world.absorb
def create_component_instance(step, category, component_type=None, is_advanced=False, advanced_component=None):
"""
Create a new component in a Unit.
Parameters
----------
category: component type (discussion, html, problem, video, advanced)
component_type: for components with multiple templates, the link text in the menu
is_advanced: for problems, is the desired component under the advanced menu?
advanced_component: for advanced components, the related value of policy key 'advanced_modules'
"""
assert_in(category, ['advanced', 'problem', 'html', 'video', 'discussion'])
component_button_css = 'span.large-{}-icon'.format(category.lower())
if category == 'problem':
module_css = 'div.xmodule_CapaModule'
elif category == 'advanced':
module_css = 'div.xmodule_{}Module'.format(advanced_component.title())
else:
module_css = 'div.xmodule_{}Module'.format(category.title())
# Count how many of that module is on the page. Later we will
# assert that one more was added.
# We need to use world.browser.find_by_css instead of world.css_find
# because it's ok if there are currently zero of them.
module_count_before = len(world.browser.find_by_css(module_css))
# Disable the jquery animation for the transition to the menus.
world.disable_jquery_animations()
world.css_click(component_button_css)
if category in ('problem', 'html', 'advanced'):
world.wait_for_invisible(component_button_css)
click_component_from_menu(category, component_type, is_advanced)
expected_count = module_count_before + 1
world.wait_for(
lambda _: len(world.css_find(module_css)) == expected_count,
timeout=20
)
@world.absorb
def click_new_component_button(step, component_button_css):
step.given('I have clicked the new unit button')
world.css_click(component_button_css)
def _click_advanced():
css = 'ul.problem-type-tabs a[href="#tab2"]'
world.css_click(css)
# Wait for the advanced tab items to be displayed
tab2_css = 'div.ui-tabs-panel#tab2'
world.wait_for_visible(tab2_css)
def _find_matching_button(category, component_type):
"""
Find the button with the specified text. There should be one and only one.
"""
# The tab shows buttons for the given category
buttons = world.css_find('div.new-component-{} button'.format(category))
# Find the button whose text matches what you're looking for
matched_buttons = [btn for btn in buttons if btn.text == component_type]
# There should be one and only one
assert_equal(len(matched_buttons), 1)
return matched_buttons[0]
def click_component_from_menu(category, component_type, is_advanced):
"""
Creates a component for a category with more
than one template, i.e. HTML and Problem.
For some problem types, it is necessary to click to
the Advanced tab.
The component_type is the link text, e.g. "Blank Common Problem"
"""
if is_advanced:
# Sometimes this click does not work if you go too fast.
world.retry_on_exception(
_click_advanced,
ignored_exceptions=AssertionError,
)
# Retry this in case the list is empty because you tried too fast.
link = world.retry_on_exception(
lambda: _find_matching_button(category, component_type),
ignored_exceptions=AssertionError
)
# Wait for the link to be clickable. If you go too fast it is not.
world.retry_on_exception(lambda: link.click())
@world.absorb
def edit_component_and_select_settings():
world.edit_component()
world.ensure_settings_visible()
@world.absorb
def ensure_settings_visible():
# Select the 'settings' tab if there is one (it isn't displayed if it is the only option)
settings_button = world.browser.find_by_css('.settings-button')
if len(settings_button) > 0:
world.css_click('.settings-button')
@world.absorb
def edit_component(index=0):
# Verify that the "loading" indication has been hidden.
world.wait_for_loading()
# Verify that the "edit" button is present.
world.wait_for(lambda _driver: world.css_visible('a.edit-button'))
world.css_click('a.edit-button', index)
world.wait_for_ajax_complete()
@world.absorb
def select_editor_tab(tab_name):
editor_tabs = world.browser.find_by_css('.editor-tabs a')
expected_tab_text = tab_name.strip().upper()
matching_tabs = [tab for tab in editor_tabs if tab.text.upper() == expected_tab_text]
assert len(matching_tabs) == 1
tab = matching_tabs[0]
tab.click()
world.wait_for_ajax_complete()
def enter_xml_in_advanced_problem(step, text):
"""
Edits an advanced problem (assumes only on page),
types the provided XML, and saves the component.
"""
world.edit_component()
type_in_codemirror(0, text)
world.save_component()
@world.absorb
def verify_setting_entry(setting, display_name, value, explicitly_set):
"""
Verify the capa module fields are set as expected in the
Advanced Settings editor.
Parameters
----------
setting: the WebDriverElement object found in the browser
display_name: the string expected as the label
html: the expected field value
explicitly_set: True if the value is expected to have been explicitly set
for the problem, rather than derived from the defaults. This is verified
by the existence of a "Clear" button next to the field value.
"""
assert_equal(display_name, setting.find_by_css('.setting-label')[0].html.strip())
# Check if the web object is a list type
# If so, we use a slightly different mechanism for determining its value
if setting.has_class('metadata-list-enum') or setting.has_class('metadata-dict') or setting.has_class('metadata-video-translations'):
list_value = ', '.join(ele.value for ele in setting.find_by_css('.list-settings-item'))
assert_equal(value, list_value)
elif setting.has_class('metadata-videolist-enum'):
list_value = ', '.join(ele.find_by_css('input')[0].value for ele in setting.find_by_css('.videolist-settings-item'))
assert_equal(value, list_value)
else:
assert_equal(value, setting.find_by_css('.setting-input')[0].value)
# VideoList doesn't have clear button
if not setting.has_class('metadata-videolist-enum'):
settingClearButton = setting.find_by_css('.setting-clear')[0]
assert_equal(explicitly_set, settingClearButton.has_class('active'))
assert_equal(not explicitly_set, settingClearButton.has_class('inactive'))
@world.absorb
def verify_all_setting_entries(expected_entries):
settings = world.browser.find_by_css('.wrapper-comp-setting')
assert_equal(len(expected_entries), len(settings))
for (counter, setting) in enumerate(settings):
world.verify_setting_entry(
setting, expected_entries[counter][0],
expected_entries[counter][1], expected_entries[counter][2]
)
@world.absorb
def save_component():
world.css_click("a.action-save")
world.wait_for_ajax_complete()
@world.absorb
def save_component_and_reopen(step):
save_component()
# We have a known issue that modifications are still shown within the edit window after cancel (though)
# they are not persisted. Refresh the browser to make sure the changes WERE persisted after Save.
reload_the_page(step)
edit_component_and_select_settings()
@world.absorb
def cancel_component(step):
world.css_click("a.action-cancel")
# We have a known issue that modifications are still shown within the edit window after cancel (though)
# they are not persisted. Refresh the browser to make sure the changes were not persisted.
reload_the_page(step)
@world.absorb
def revert_setting_entry(label):
get_setting_entry(label).find_by_css('.setting-clear')[0].click()
@world.absorb
def get_setting_entry(label):
def get_setting():
settings = world.css_find('.wrapper-comp-setting')
for setting in settings:
if setting.find_by_css('.setting-label')[0].value == label:
return setting
return None
return world.retry_on_exception(get_setting)
@world.absorb
def get_setting_entry_index(label):
def get_index():
settings = world.css_find('.metadata_edit .wrapper-comp-setting')
for index, setting in enumerate(settings):
if setting.find_by_css('.setting-label')[0].value == label:
return index
return None
return world.retry_on_exception(get_index)
@world.absorb
def set_field_value(index, value):
"""
Set the field to the specified value.
Note: we cannot use css_fill here because the value is not set
until after you move away from that field.
Instead we will find the element, set its value, then hit the Tab key
to get to the next field.
"""
elem = world.css_find('.metadata_edit div.wrapper-comp-setting input.setting-input')[index]
elem.value = value
elem.type(Keys.TAB)
| agpl-3.0 |
johankaito/fufuka | microblog/venv/lib/python2.7/site-packages/pip/_vendor/progress/bar.py | 404 | 2707 | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import unicode_literals
from . import Progress
from .helpers import WritelnMixin
class Bar(WritelnMixin, Progress):
width = 32
message = ''
suffix = '%(index)d/%(max)d'
bar_prefix = ' |'
bar_suffix = '| '
empty_fill = ' '
fill = '#'
hide_cursor = True
def update(self):
filled_length = int(self.width * self.progress)
empty_length = self.width - filled_length
message = self.message % self
bar = self.fill * filled_length
empty = self.empty_fill * empty_length
suffix = self.suffix % self
line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
suffix])
self.writeln(line)
class ChargingBar(Bar):
suffix = '%(percent)d%%'
bar_prefix = ' '
bar_suffix = ' '
empty_fill = '∙'
fill = '█'
class FillingSquaresBar(ChargingBar):
empty_fill = '▢'
fill = '▣'
class FillingCirclesBar(ChargingBar):
empty_fill = '◯'
fill = '◉'
class IncrementalBar(Bar):
phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█')
def update(self):
nphases = len(self.phases)
expanded_length = int(nphases * self.width * self.progress)
filled_length = int(self.width * self.progress)
empty_length = self.width - filled_length
phase = expanded_length - (filled_length * nphases)
message = self.message % self
bar = self.phases[-1] * filled_length
current = self.phases[phase] if phase > 0 else ''
empty = self.empty_fill * max(0, empty_length - len(current))
suffix = self.suffix % self
line = ''.join([message, self.bar_prefix, bar, current, empty,
self.bar_suffix, suffix])
self.writeln(line)
class ShadyBar(IncrementalBar):
phases = (' ', '░', '▒', '▓', '█')
| apache-2.0 |
yoer/hue | desktop/core/ext-py/django-openid-auth-0.5/django_openid_auth/forms.py | 45 | 3582 | # django-openid-auth - OpenID integration for django.contrib.auth
#
# Copyright (C) 2007 Simon Willison
# Copyright (C) 2008-2013 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from django import forms
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserChangeForm
from django.contrib.auth.models import Group
from django.utils.translation import ugettext as _
from django.conf import settings
from openid.yadis import xri
def teams_new_unicode(self):
"""
Replacement for Group.__unicode__()
Calls original method to chain results
"""
name = self.unicode_before_teams()
teams_mapping = getattr(settings, 'OPENID_LAUNCHPAD_TEAMS_MAPPING', {})
group_teams = [t for t in teams_mapping if teams_mapping[t] == self.name]
if len(group_teams) > 0:
return "%s -> %s" % (name, ", ".join(group_teams))
else:
return name
Group.unicode_before_teams = Group.__unicode__
Group.__unicode__ = teams_new_unicode
class UserChangeFormWithTeamRestriction(UserChangeForm):
"""
Extends UserChangeForm to add teams awareness to the user admin form
"""
def clean_groups(self):
data = self.cleaned_data['groups']
teams_mapping = getattr(settings, 'OPENID_LAUNCHPAD_TEAMS_MAPPING', {})
known_teams = teams_mapping.values()
user_groups = self.instance.groups.all()
for group in data:
if group.name in known_teams and group not in user_groups:
raise forms.ValidationError("""The group %s is mapped to an
external team. You cannot assign it manually.""" % group.name)
return data
UserAdmin.form = UserChangeFormWithTeamRestriction
class OpenIDLoginForm(forms.Form):
openid_identifier = forms.CharField(
max_length=255,
widget=forms.TextInput(attrs={'class': 'required openid'}))
def clean_openid_identifier(self):
if 'openid_identifier' in self.cleaned_data:
openid_identifier = self.cleaned_data['openid_identifier']
if xri.identifierScheme(openid_identifier) == 'XRI' and getattr(
settings, 'OPENID_DISALLOW_INAMES', False
):
raise forms.ValidationError(_('i-names are not supported'))
return self.cleaned_data['openid_identifier']
| apache-2.0 |
woltage/ansible | lib/ansible/plugins/action/set_fact.py | 51 | 1392 | # Copyright 2013 Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
class ActionModule(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=dict()):
facts = dict()
if self._task.args:
for (k, v) in self._task.args.iteritems():
k = self._templar.template(k)
if isinstance(v, basestring) and v.lower() in ('true', 'false', 'yes', 'no'):
v = boolean(v)
facts[k] = v
return dict(changed=False, ansible_facts=facts)
| gpl-3.0 |
pongem/python-bot-project | appengine/standard/botapp/env/lib/python2.7/site-packages/django/utils/dateparse.py | 458 | 3995 | """Functions to parse datetime objects."""
# We're using regular expressions rather than time.strptime because:
# - They provide both validation and parsing.
# - They're more flexible for datetimes.
# - The date/datetime/time constructors produce friendlier error messages.
import datetime
import re
from django.utils import six
from django.utils.timezone import get_fixed_timezone, utc
date_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})$'
)
time_re = re.compile(
r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
)
datetime_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
)
standard_duration_re = re.compile(
r'^'
r'(?:(?P<days>-?\d+) (days?, )?)?'
r'((?:(?P<hours>\d+):)(?=\d+:\d+))?'
r'(?:(?P<minutes>\d+):)?'
r'(?P<seconds>\d+)'
r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?'
r'$'
)
# Support the sections of ISO 8601 date representation that are accepted by
# timedelta
iso8601_duration_re = re.compile(
r'^P'
r'(?:(?P<days>\d+(.\d+)?)D)?'
r'(?:T'
r'(?:(?P<hours>\d+(.\d+)?)H)?'
r'(?:(?P<minutes>\d+(.\d+)?)M)?'
r'(?:(?P<seconds>\d+(.\d+)?)S)?'
r')?'
r'$'
)
def parse_date(value):
"""Parses a string and return a datetime.date.
Raises ValueError if the input is well formatted but not a valid date.
Returns None if the input isn't well formatted.
"""
match = date_re.match(value)
if match:
kw = {k: int(v) for k, v in six.iteritems(match.groupdict())}
return datetime.date(**kw)
def parse_time(value):
"""Parses a string and return a datetime.time.
This function doesn't support time zone offsets.
Raises ValueError if the input is well formatted but not a valid time.
Returns None if the input isn't well formatted, in particular if it
contains an offset.
"""
match = time_re.match(value)
if match:
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
kw = {k: int(v) for k, v in six.iteritems(kw) if v is not None}
return datetime.time(**kw)
def parse_datetime(value):
"""Parses a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
Returns None if the input isn't well formatted.
"""
match = datetime_re.match(value)
if match:
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
tzinfo = kw.pop('tzinfo')
if tzinfo == 'Z':
tzinfo = utc
elif tzinfo is not None:
offset_mins = int(tzinfo[-2:]) if len(tzinfo) > 3 else 0
offset = 60 * int(tzinfo[1:3]) + offset_mins
if tzinfo[0] == '-':
offset = -offset
tzinfo = get_fixed_timezone(offset)
kw = {k: int(v) for k, v in six.iteritems(kw) if v is not None}
kw['tzinfo'] = tzinfo
return datetime.datetime(**kw)
def parse_duration(value):
"""Parses a duration string and returns a datetime.timedelta.
The preferred format for durations in Django is '%d %H:%M:%S.%f'.
Also supports ISO 8601 representation.
"""
match = standard_duration_re.match(value)
if not match:
match = iso8601_duration_re.match(value)
if match:
kw = match.groupdict()
if kw.get('microseconds'):
kw['microseconds'] = kw['microseconds'].ljust(6, '0')
kw = {k: float(v) for k, v in six.iteritems(kw) if v is not None}
return datetime.timedelta(**kw)
| apache-2.0 |
nikhilprathapani/python-for-android | python-build/python-libs/gdata/src/atom/mock_http_core.py | 135 | 11149 | #!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import StringIO
import pickle
import os.path
import tempfile
import atom.http_core
class MockHttpClient(object):
debug = None
real_client = None
# The following members are used to construct the session cache temp file
# name.
# These are combined to form the file name
# /tmp/cache_prefix.cache_case_name.cache_test_name
cache_name_prefix = 'gdata_live_test'
cache_case_name = ''
cache_test_name = ''
def __init__(self, recordings=None, real_client=None):
self._recordings = recordings or []
if real_client is not None:
self.real_client = real_client
def add_response(self, http_request, status, reason, headers=None,
body=None):
response = MockHttpResponse(status, reason, headers, body)
# TODO Scrub the request and the response.
self._recordings.append((http_request._copy(), response))
AddResponse = add_response
def request(self, http_request):
"""Provide a recorded response, or record a response for replay.
If the real_client is set, the request will be made using the
real_client, and the response from the server will be recorded.
If the real_client is None (the default), this method will examine
the recordings and find the first which matches.
"""
request = http_request._copy()
_scrub_request(request)
if self.real_client is None:
for recording in self._recordings:
if _match_request(recording[0], request):
return recording[1]
else:
# Pass along the debug settings to the real client.
self.real_client.debug = self.debug
# Make an actual request since we can use the real HTTP client.
response = self.real_client.request(http_request)
_scrub_response(response)
self.add_response(request, response.status, response.reason,
dict(response.getheaders()), response.read())
# Return the recording which we just added.
return self._recordings[-1][1]
return None
Request = request
def _save_recordings(self, filename):
recording_file = open(os.path.join(tempfile.gettempdir(), filename),
'wb')
pickle.dump(self._recordings, recording_file)
def _load_recordings(self, filename):
recording_file = open(os.path.join(tempfile.gettempdir(), filename),
'rb')
self._recordings = pickle.load(recording_file)
def _delete_recordings(self, filename):
full_path = os.path.join(tempfile.gettempdir(), filename)
if os.path.exists(full_path):
os.remove(full_path)
def _load_or_use_client(self, filename, http_client):
if os.path.exists(os.path.join(tempfile.gettempdir(), filename)):
self._load_recordings(filename)
else:
self.real_client = http_client
def use_cached_session(self, name=None, real_http_client=None):
"""Attempts to load recordings from a previous live request.
If a temp file with the recordings exists, then it is used to fulfill
requests. If the file does not exist, then a real client is used to
actually make the desired HTTP requests. Requests and responses are
recorded and will be written to the desired temprary cache file when
close_session is called.
Args:
name: str (optional) The file name of session file to be used. The file
is loaded from the temporary directory of this machine. If no name
is passed in, a default name will be constructed using the
cache_name_prefix, cache_case_name, and cache_test_name of this
object.
real_http_client: atom.http_core.HttpClient the real client to be used
if the cached recordings are not found. If the default
value is used, this will be an
atom.http_core.HttpClient.
"""
if real_http_client is None:
real_http_client = atom.http_core.HttpClient()
if name is None:
self._recordings_cache_name = self.get_cache_file_name()
else:
self._recordings_cache_name = name
self._load_or_use_client(self._recordings_cache_name, real_http_client)
def close_session(self):
"""Saves recordings in the temporary file named in use_cached_session."""
if self.real_client is not None:
self._save_recordings(self._recordings_cache_name)
def delete_session(self, name=None):
"""Removes recordings from a previous live request."""
if name is None:
self._delete_recordings(self._recordings_cache_name)
else:
self._delete_recordings(name)
def get_cache_file_name(self):
return '%s.%s.%s' % (self.cache_name_prefix, self.cache_case_name,
self.cache_test_name)
def _match_request(http_request, stored_request):
"""Determines whether a request is similar enough to a stored request
to cause the stored response to be returned."""
# Check to see if the host names match.
if (http_request.uri.host is not None
and http_request.uri.host != stored_request.uri.host):
return False
# Check the request path in the URL (/feeds/private/full/x)
elif http_request.uri.path != stored_request.uri.path:
return False
# Check the method used in the request (GET, POST, etc.)
elif http_request.method != stored_request.method:
return False
# If there is a gsession ID in either request, make sure that it is matched
# exactly.
elif ('gsessionid' in http_request.uri.query
or 'gsessionid' in stored_request.uri.query):
if 'gsessionid' not in stored_request.uri.query:
return False
elif 'gsessionid' not in http_request.uri.query:
return False
elif (http_request.uri.query['gsessionid']
!= stored_request.uri.query['gsessionid']):
return False
# Ignores differences in the query params (?start-index=5&max-results=20),
# the body of the request, the port number, HTTP headers, just to name a
# few.
return True
def _scrub_request(http_request):
""" Removes email address and password from a client login request.
Since the mock server saves the request and response in plantext, sensitive
information like the password should be removed before saving the
recordings. At the moment only requests sent to a ClientLogin url are
scrubbed.
"""
if (http_request and http_request.uri and http_request.uri.path and
http_request.uri.path.endswith('ClientLogin')):
# Remove the email and password from a ClientLogin request.
http_request._body_parts = []
http_request.add_form_inputs(
{'form_data': 'client login request has been scrubbed'})
else:
# We can remove the body of the post from the recorded request, since
# the request body is not used when finding a matching recording.
http_request._body_parts = []
return http_request
def _scrub_response(http_response):
return http_response
class EchoHttpClient(object):
"""Sends the request data back in the response.
Used to check the formatting of the request as it was sent. Always responds
with a 200 OK, and some information from the HTTP request is returned in
special Echo-X headers in the response. The following headers are added
in the response:
'Echo-Host': The host name and port number to which the HTTP connection is
made. If no port was passed in, the header will contain
host:None.
'Echo-Uri': The path portion of the URL being requested. /example?x=1&y=2
'Echo-Scheme': The beginning of the URL, usually 'http' or 'https'
'Echo-Method': The HTTP method being used, 'GET', 'POST', 'PUT', etc.
"""
def request(self, http_request):
return self._http_request(http_request.uri, http_request.method,
http_request.headers, http_request._body_parts)
def _http_request(self, uri, method, headers=None, body_parts=None):
body = StringIO.StringIO()
response = atom.http_core.HttpResponse(status=200, reason='OK', body=body)
if headers is None:
response._headers = {}
else:
# Copy headers from the request to the response but convert values to
# strings. Server response headers always come in as strings, so an int
# should be converted to a corresponding string when echoing.
for header, value in headers.iteritems():
response._headers[header] = str(value)
response._headers['Echo-Host'] = '%s:%s' % (uri.host, str(uri.port))
response._headers['Echo-Uri'] = uri._get_relative_path()
response._headers['Echo-Scheme'] = uri.scheme
response._headers['Echo-Method'] = method
for part in body_parts:
if isinstance(part, str):
body.write(part)
elif hasattr(part, 'read'):
body.write(part.read())
body.seek(0)
return response
class SettableHttpClient(object):
"""An HTTP Client which responds with the data given in set_response."""
def __init__(self, status, reason, body, headers):
"""Configures the response for the server.
See set_response for details on the arguments to the constructor.
"""
self.set_response(status, reason, body, headers)
self.last_request = None
def set_response(self, status, reason, body, headers):
"""Determines the response which will be sent for each request.
Args:
status: An int for the HTTP status code, example: 200, 404, etc.
reason: String for the HTTP reason, example: OK, NOT FOUND, etc.
body: The body of the HTTP response as a string or a file-like
object (something with a read method).
headers: dict of strings containing the HTTP headers in the response.
"""
self.response = atom.http_core.HttpResponse(status=status, reason=reason,
body=body)
self.response._headers = headers.copy()
def request(self, http_request):
self.last_request = http_request
return self.response
class MockHttpResponse(atom.http_core.HttpResponse):
def __init__(self, status=None, reason=None, headers=None, body=None):
self._headers = headers or {}
if status is not None:
self.status = status
if reason is not None:
self.reason = reason
if body is not None:
# Instead of using a file-like object for the body, store as a string
# so that reads can be repeated.
if hasattr(body, 'read'):
self._body = body.read()
else:
self._body = body
def read(self):
return self._body
| apache-2.0 |
Lektorium-LLC/edx-platform | lms/djangoapps/mobile_api/tests/test_mobile_platform.py | 10 | 2348 | """
Tests for Platform against Mobile App Request
"""
import ddt
from django.test import TestCase
from mobile_api.mobile_platform import MobilePlatform
@ddt.ddt
class TestMobilePlatform(TestCase):
"""
Tests for platform against mobile app request
"""
@ddt.data(
("edX/org.edx.mobile (0.1.5; OS Version 9.2 (Build 13C75))", "iOS", "0.1.5"),
("edX/org.edx.mobile (1.01.1; OS Version 9.2 (Build 13C75))", "iOS", "1.01.1"),
("edX/org.edx.mobile (2.2.2; OS Version 9.2 (Build 13C75))", "iOS", "2.2.2"),
("edX/org.edx.mobile (3.3.3; OS Version 9.2 (Build 13C75))", "iOS", "3.3.3"),
("edX/org.edx.mobile (3.3.3.test; OS Version 9.2 (Build 13C75))", "iOS", "3.3.3.test"),
("edX/org.test-domain.mobile (0.1.5; OS Version 9.2 (Build 13C75))", "iOS", "0.1.5"),
("Dalvik/2.1.0 (Linux; U; Android 5.1; Nexus 5 Build/LMY47I) edX/org.edx.mobile/1.1.1", "Android", "1.1.1"),
("Dalvik/2.1.0 (Linux; U; Android 5.1; Nexus 5 Build/LMY47I) edX/org.edx.mobile/2.2.2", "Android", "2.2.2"),
("Dalvik/2.1.0 (Linux; U; Android 5.1; Nexus 5 Build/LMY47I) edX/org.edx.mobile/3.3.3", "Android", "3.3.3"),
("Dalvik/2.1.0 (Linux; U; Android 5.1; Nexus 5 Build/LMY47I) edX/org.edx.mobile/3.3.3.X", "Android", "3.3.3.X"),
)
@ddt.unpack
def test_platform_instance(self, user_agent, platform_name, version):
platform = MobilePlatform.get_instance(user_agent)
self.assertEqual(platform_name, platform.NAME)
self.assertEqual(version, platform.version)
@ddt.data(
("Mozilla/5.0 (Linux; Android 5.1; Nexus 5 Build/LMY47I; wv) AppleWebKit/537.36 (KHTML, like Gecko) "
"Version/4.0 Chrome/47.0.2526.100 Mobile Safari/537.36 edX/org.edx.mobile/2.0.0"),
("Mozilla/5.0 (iPhone; CPU iPhone OS 9_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) "
"Mobile/13C75 edX/org.edx.mobile/2.2.1"),
("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 "
"Safari/537.36"),
"edX/org.edx.mobile (0.1.5.2.; OS Version 9.2 (Build 13C75))",
"edX/org.edx.mobile (0.1.5.2.5.1; OS Version 9.2 (Build 13C75))",
)
def test_non_mobile_app_requests(self, user_agent):
self.assertIsNone(MobilePlatform.get_instance(user_agent))
| agpl-3.0 |
firebase/firebase-ios-sdk | FirebaseInAppMessaging/ProtoSupport/nanopb_build_protos.py | 4 | 8858 | #! /usr/bin/env python
# Copyright 2019 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Builds nanopb protos by calling a proto generator wrapper.
Example usage:
python Firebase/Core/nanopb_build_protos.py \
--nanopb \
--protos_dir=Firebase/Core/Protos/ \
--pythonpath=Firebase/Core/nanopb_temp/generator/ \
--output_dir=Firebase/Core/Protos/
"""
from __future__ import print_function
import sys
import argparse
import os
import os.path
import re
import subprocess
OBJC_GENERATOR='nanopb_proto_generator.py'
COPYRIGHT_NOTICE = '''
/*
* Copyright 2019 Google
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'''.lstrip()
def main():
parser = argparse.ArgumentParser(
description='Generates proto messages.')
parser.add_argument(
'--nanopb', action='store_true',
help='Generates nanopb messages.')
parser.add_argument(
'--objc', action='store_true',
help='Generates Objective-C messages.')
parser.add_argument(
'--protos_dir',
help='Source directory containing .proto files.')
parser.add_argument(
'--output_dir', '-d',
help='Directory to write files; subdirectories will be created.')
parser.add_argument(
'--protoc', default='protoc',
help='Location of the protoc executable')
parser.add_argument(
'--pythonpath',
help='Location of the protoc python library.')
parser.add_argument(
'--include', '-I', action='append', default=[],
help='Adds INCLUDE to the proto path.')
args = parser.parse_args()
if args.nanopb is None and args.objc is None:
parser.print_help()
sys.exit(1)
if args.protos_dir is None:
root_dir = os.path.abspath(os.path.dirname(__file__))
args.protos_dir = os.path.join(root_dir, 'protos')
if args.output_dir is None:
root_dir = os.path.abspath(os.path.dirname(__file__))
args.output_dir = os.path.join(root_dir, 'protogen-please-supply-an-outputdir')
all_proto_files = collect_files(args.protos_dir, '.proto')
if args.nanopb:
NanopbGenerator(args, all_proto_files).run()
proto_files = remove_well_known_protos(all_proto_files)
if args.objc:
ObjcProtobufGenerator(args, proto_files).run()
class NanopbGenerator(object):
"""Builds and runs the nanopb plugin to protoc."""
def __init__(self, args, proto_files):
self.args = args
self.proto_files = proto_files
def run(self):
"""Performs the action of the generator."""
nanopb_out = os.path.join(self.args.output_dir, 'nanopb')
mkdir(nanopb_out)
self.__run_generator(nanopb_out)
sources = collect_files(nanopb_out, '.nanopb.h', '.nanopb.c')
post_process_files(
sources,
add_copyright,
nanopb_remove_extern_c,
nanopb_rename_delete,
nanopb_use_module_import
)
def __run_generator(self, out_dir):
"""Invokes protoc using the nanopb plugin."""
cmd = protoc_command(self.args)
gen = os.path.join(os.path.dirname(__file__), OBJC_GENERATOR)
cmd.append('--plugin=protoc-gen-nanopb=%s' % gen)
nanopb_flags = [
'--extension=.nanopb',
'--source-extension=.c',
'--no-timestamp'
]
nanopb_flags.extend(['-I%s' % path for path in self.args.include])
cmd.append('--nanopb_out=%s:%s' % (' '.join(nanopb_flags), out_dir))
cmd.extend(self.proto_files)
run_protoc(self.args, cmd)
def protoc_command(args):
"""Composes the initial protoc command-line including its include path."""
cmd = [args.protoc]
if args.include is not None:
cmd.extend(['-I=%s' % path for path in args.include])
return cmd
def run_protoc(args, cmd):
"""Actually runs the given protoc command.
Args:
args: The command-line args (including pythonpath)
cmd: The command to run expressed as a list of strings
"""
kwargs = {}
if args.pythonpath:
env = os.environ.copy()
old_path = env.get('PYTHONPATH')
env['PYTHONPATH'] = os.path.expanduser(args.pythonpath)
if old_path is not None:
env['PYTHONPATH'] += os.pathsep + old_path
kwargs['env'] = env
try:
print(subprocess.check_output(cmd, stderr=subprocess.STDOUT, **kwargs))
except subprocess.CalledProcessError as error:
print('command failed: ', ' '.join(cmd), '\nerror: ', error.output)
def remove_well_known_protos(filenames):
"""Remove "well-known" protos for objc.
On those platforms we get these for free as a part of the protobuf runtime.
We only need them for nanopb.
Args:
filenames: A list of filenames, each naming a .proto file.
Returns:
The filenames with members of google/protobuf removed.
"""
return [f for f in filenames if 'protos/google/protobuf/' not in f]
def post_process_files(filenames, *processors):
for filename in filenames:
lines = []
with open(filename, 'r') as fd:
lines = fd.readlines()
for processor in processors:
lines = processor(lines)
write_file(filename, lines)
def write_file(filename, lines):
mkdir(os.path.dirname(filename))
with open(filename, 'w') as fd:
fd.write(''.join(lines))
def add_copyright(lines):
"""Adds a copyright notice to the lines."""
result = [COPYRIGHT_NOTICE, '\n']
result.extend(lines)
return result
def nanopb_remove_extern_c(lines):
"""Removes extern "C" directives from nanopb code.
Args:
lines: A nanobp-generated source file, split into lines.
Returns:
A list of strings, similar to the input but modified to remove extern "C".
"""
result = []
state = 'initial'
for line in lines:
if state == 'initial':
if '#ifdef __cplusplus' in line:
state = 'in-ifdef'
continue
result.append(line)
elif state == 'in-ifdef':
if '#endif' in line:
state = 'initial'
return result
def nanopb_rename_delete(lines):
"""Renames a delete symbol to delete_.
If a proto uses a field named 'delete', nanopb happily uses that in the
message definition. Works fine for C; not so much for C++.
Args:
lines: The lines to fix.
Returns:
The lines, fixed.
"""
delete_keyword = re.compile(r'\bdelete\b')
return [delete_keyword.sub('delete_', line) for line in lines]
def nanopb_use_module_import(lines):
"""Changes #include <pb.h> to include <nanopb/pb.h>""" # Don't let Copybara alter these lines.
return [line.replace('#include <pb.h>', '{}include <nanopb/pb.h>'.format("#")) for line in lines]
def strip_trailing_whitespace(lines):
"""Removes trailing whitespace from the given lines."""
return [line.rstrip() + '\n' for line in lines]
def objc_flatten_imports(lines):
"""Flattens the import statements for compatibility with CocoaPods."""
long_import = re.compile(r'#import ".*/')
return [long_import.sub('#import "', line) for line in lines]
def objc_strip_extension_registry(lines):
"""Removes extensionRegistry methods from the classes."""
skip = False
result = []
for line in lines:
if '+ (GPBExtensionRegistry*)extensionRegistry {' in line:
skip = True
if not skip:
result.append(line)
elif line == '}\n':
skip = False
return result
def collect_files(root_dir, *extensions):
"""Finds files with the given extensions in the root_dir.
Args:
root_dir: The directory from which to start traversing.
*extensions: Filename extensions (including the leading dot) to find.
Returns:
A list of filenames, all starting with root_dir, that have one of the given
extensions.
"""
result = []
for root, _, files in os.walk(root_dir):
for basename in files:
for ext in extensions:
if basename.endswith(ext):
filename = os.path.join(root, basename)
result.append(filename)
return result
def mkdir(dirname):
if not os.path.isdir(dirname):
os.makedirs(dirname)
if __name__ == '__main__':
main() | apache-2.0 |
dakcarto/QGIS | python/plugins/processing/tests/GdalTest.py | 12 | 6501 | # -*- coding: utf-8 -*-
"""
***************************************************************************
GdalTest.py
---------------------
Date : April 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'April 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import unittest
from osgeo import gdal
from osgeo.gdalconst import GA_ReadOnly
import processing
from processing.tools import dataobjects
from processing.tools.system import getTempFilename
from processing.tests.TestData import raster, union
class GdalTest(unittest.TestCase):
def test_gdalogrsieve(self):
outputs = processing.runalg('gdalogr:sieve', raster(), 2, 0, None)
output = outputs['dst_filename']
self.assertTrue(os.path.isfile(output))
dataset = gdal.Open(output, GA_ReadOnly)
strhash = hash(unicode(dataset.ReadAsArray(0).tolist()))
self.assertEqual(strhash, -1353696889)
def test_gdalogrsieveWithUnsupportedOutputFormat(self):
outputs = processing.runalg('gdalogr:sieve', raster(), 2, 0,
getTempFilename('img'))
output = outputs['dst_filename']
self.assertTrue(os.path.isfile(output))
dataset = gdal.Open(output, GA_ReadOnly)
strhash = hash(unicode(dataset.ReadAsArray(0).tolist()))
self.assertEqual(strhash, -1353696889)
def test_gdalogrwarpreproject(self):
outputs = processing.runalg(
'gdalogr:warpreproject',
raster(),
'EPSG:23030',
'EPSG:4326',
0,
0,
'',
None,
)
output = outputs['OUTPUT']
self.assertTrue(os.path.isfile(output))
dataset = gdal.Open(output, GA_ReadOnly)
strhash = hash(unicode(dataset.ReadAsArray(0).tolist()))
self.assertEqual(strhash, -2021328784)
def test_gdalogrmerge(self):
outputs = processing.runalg('gdalogr:merge', raster(), False, False,
None)
output = outputs['OUTPUT']
self.assertTrue(os.path.isfile(output))
dataset = gdal.Open(output, GA_ReadOnly)
strhash = hash(unicode(dataset.ReadAsArray(0).tolist()))
self.assertEqual(strhash, -1353696889)
def test_gdalogrogr2ogr(self):
outputs = processing.runalg('gdalogr:ogr2ogr', union(), 3, '', None)
output = outputs['OUTPUT_LAYER']
layer = dataobjects.getObjectFromUri(output, True)
fields = layer.pendingFields()
expectednames = [
'id',
'poly_num_a',
'poly_st_a',
'id_2',
'poly_num_b',
'poly_st_b',
]
expectedtypes = [
'Integer',
'Real',
'String',
'Integer',
'Real',
'String',
]
names = [unicode(f.name()) for f in fields]
types = [unicode(f.typeName()) for f in fields]
self.assertEqual(expectednames, names)
self.assertEqual(expectedtypes, types)
features = processing.features(layer)
self.assertEqual(8, len(features))
feature = features.next()
attrs = feature.attributes()
expectedvalues = [
'1',
'1.1',
'string a',
'2',
'1',
'string a',
]
values = [unicode(attr) for attr in attrs]
self.assertEqual(expectedvalues, values)
wkt = 'POLYGON((270807.08580285 4458940.1594565,270798.42294527 4458914.62661676,270780.81854858 4458914.21983449,270763.52289518 4458920.715993,270760.3449542 4458926.6570575,270763.78234766 4458958.22561242,270794.30290024 4458942.16424502,270807.08580285 4458940.1594565))'
self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
def test_gdalogrogr2ogrWrongExtension(self):
outputs = processing.runalg('gdalogr:ogr2ogr', union(), 3, '',
getTempFilename('wrongext'))
output = outputs['OUTPUT_LAYER']
layer = dataobjects.getObjectFromUri(output, True)
fields = layer.pendingFields()
expectednames = [
'id',
'poly_num_a',
'poly_st_a',
'id_2',
'poly_num_b',
'poly_st_b',
]
expectedtypes = [
'Integer',
'Real',
'String',
'Integer',
'Real',
'String',
]
names = [unicode(f.name()) for f in fields]
types = [unicode(f.typeName()) for f in fields]
self.assertEqual(expectednames, names)
self.assertEqual(expectedtypes, types)
features = processing.features(layer)
self.assertEqual(8, len(features))
feature = features.next()
attrs = feature.attributes()
expectedvalues = [
'1',
'1.1',
'string a',
'2',
'1',
'string a',
]
values = [unicode(attr) for attr in attrs]
self.assertEqual(expectedvalues, values)
wkt = 'POLYGON((270807.08580285 4458940.1594565,270798.42294527 4458914.62661676,270780.81854858 4458914.21983449,270763.52289518 4458920.715993,270760.3449542 4458926.6570575,270763.78234766 4458958.22561242,270794.30290024 4458942.16424502,270807.08580285 4458940.1594565))'
self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
def suite():
suite = unittest.makeSuite(GdalTest, 'test')
return suite
def runtests():
result = unittest.TestResult()
testsuite = suite()
testsuite.run(result)
return result
| gpl-2.0 |
PaloAltoNetworks-BD/SplunkforPaloAltoNetworks | SplunkforPaloAltoNetworks/bin/lib/pandevice/docs/moduleref.py | 2 | 3252 | #!/usr/bin/env python
# Copyright (c) 2016, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Author: Brian Torres-Gil <btorres-gil@paloaltonetworks.com>
"""Generate API reference page for each module"""
import os
import sys
import pkgutil
import errno
tree_exists = [
'device',
'firewall',
'ha',
'network',
'panorama',
'policies',
]
tree_not_exists = [
'base',
'errors',
'objects',
'updater',
'userid',
]
template_main = """Module: {0}
========{1}
Inheritance diagram
-------------------
.. inheritance-diagram:: pandevice.{0}
:parts: 1{2}
Class Reference
---------------
.. automodule:: pandevice.{0}
"""
template_tree = """
Configuration tree diagram
--------------------------
.. graphviz:: _diagrams/pandevice.{0}.dot """
def mkdir_p(path):
"""Make a full directory path"""
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def create_module_references(directory=None):
# Set paths to package and modules
curdir = os.path.dirname(os.path.abspath(__file__))
rootpath = [os.path.join(curdir, os.pardir)]
libpath = [os.path.join(curdir, os.pardir, 'pandevice')]
sys.path[:0] = rootpath
sys.path[:0] = libpath
#print "Looking for pandevice in path: %s" % libpath
# Import all modules in package
modules = []
for importer, modname, ispkg in pkgutil.iter_modules(path=libpath,
prefix="pandevice."):
modules.append(__import__(modname, fromlist="dummy"))
output = {}
# Create output for each module
for module in modules:
module_name = module.__name__.split(".")[-1]
header_pad = "="*len(module_name)
if module_name in tree_exists:
config_tree = template_tree.format(module_name)
else:
config_tree = ""
module_string = template_main.format(module_name, header_pad, config_tree)
output[module_name] = module_string
# Write output to file or stdout
path = ""
if directory is not None:
mkdir_p(directory)
path = directory + "/"
for module, lines in output.iteritems():
if module == "interface":
continue
if not lines:
continue
with open("{0}module-{1}.rst".format(path, module), 'w') as file:
file.write(lines)
if __name__ == "__main__":
create_module_references()
| isc |
google/or-tools | examples/contrib/lectures.py | 1 | 3038 | # Copyright 2010 Hakan Kjellerstrand hakank@gmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Lectures problem in Google CP Solver.
Biggs: Discrete Mathematics (2nd ed), page 187.
'''
Suppose we wish to schedule six one-hour lectures, v1, v2, v3, v4, v5, v6.
Among the the potential audience there are people who wish to hear both
- v1 and v2
- v1 and v4
- v3 and v5
- v2 and v6
- v4 and v5
- v5 and v6
- v1 and v6
How many hours are necessary in order that the lectures can be given
without clashes?
'''
Compare with the following models:
* MiniZinc: http://www.hakank.org/minizinc/lectures.mzn
* SICstus: http://hakank.org/sicstus/lectures.pl
* ECLiPSe: http://hakank.org/eclipse/lectures.ecl
* Gecode: http://hakank.org/gecode/lectures.cpp
This model was created by Hakan Kjellerstrand (hakank@gmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
import sys
from ortools.constraint_solver import pywrapcp
def main():
# Create the solver.
solver = pywrapcp.Solver('Lectures')
#
# data
#
#
# The schedule requirements:
# lecture a cannot be held at the same time as b
# Note: 1-based
g = [[1, 2], [1, 4], [3, 5], [2, 6], [4, 5], [5, 6], [1, 6]]
# number of nodes
n = 6
# number of edges
edges = len(g)
#
# declare variables
#
v = [solver.IntVar(0, n - 1, 'v[%i]' % i) for i in range(n)]
# maximum color, to minimize
# Note: since Python is 0-based, the
# number of colors is +1
max_c = solver.IntVar(0, n - 1, 'max_c')
#
# constraints
#
solver.Add(max_c == solver.Max(v))
# ensure that there are no clashes
# also, adjust to 0-base
for i in range(edges):
solver.Add(v[g[i][0] - 1] != v[g[i][1] - 1])
# symmetry breaking:
# - v0 has the color 0,
# - v1 has either color 0 or 1
solver.Add(v[0] == 0)
solver.Add(v[1] <= 1)
# objective
objective = solver.Minimize(max_c, 1)
#
# solution and search
#
db = solver.Phase(v, solver.CHOOSE_MIN_SIZE_LOWEST_MIN,
solver.ASSIGN_CENTER_VALUE)
solver.NewSearch(db, [objective])
num_solutions = 0
while solver.NextSolution():
num_solutions += 1
print('max_c:', max_c.Value() + 1, 'colors')
print('v:', [v[i].Value() for i in range(n)])
print()
print('num_solutions:', num_solutions)
print('failures:', solver.Failures())
print('branches:', solver.Branches())
print('WallTime:', solver.WallTime(), 'ms')
if __name__ == '__main__':
main()
| apache-2.0 |
lafayette/JBTT | framework/python/Lib/encodings/cp737.py | 593 | 34937 | """ Python Character Mapping Codec cp737 generated from 'VENDORS/MICSFT/PC/CP737.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp737',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0391, # GREEK CAPITAL LETTER ALPHA
0x0081: 0x0392, # GREEK CAPITAL LETTER BETA
0x0082: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x0083: 0x0394, # GREEK CAPITAL LETTER DELTA
0x0084: 0x0395, # GREEK CAPITAL LETTER EPSILON
0x0085: 0x0396, # GREEK CAPITAL LETTER ZETA
0x0086: 0x0397, # GREEK CAPITAL LETTER ETA
0x0087: 0x0398, # GREEK CAPITAL LETTER THETA
0x0088: 0x0399, # GREEK CAPITAL LETTER IOTA
0x0089: 0x039a, # GREEK CAPITAL LETTER KAPPA
0x008a: 0x039b, # GREEK CAPITAL LETTER LAMDA
0x008b: 0x039c, # GREEK CAPITAL LETTER MU
0x008c: 0x039d, # GREEK CAPITAL LETTER NU
0x008d: 0x039e, # GREEK CAPITAL LETTER XI
0x008e: 0x039f, # GREEK CAPITAL LETTER OMICRON
0x008f: 0x03a0, # GREEK CAPITAL LETTER PI
0x0090: 0x03a1, # GREEK CAPITAL LETTER RHO
0x0091: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x0092: 0x03a4, # GREEK CAPITAL LETTER TAU
0x0093: 0x03a5, # GREEK CAPITAL LETTER UPSILON
0x0094: 0x03a6, # GREEK CAPITAL LETTER PHI
0x0095: 0x03a7, # GREEK CAPITAL LETTER CHI
0x0096: 0x03a8, # GREEK CAPITAL LETTER PSI
0x0097: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x0098: 0x03b1, # GREEK SMALL LETTER ALPHA
0x0099: 0x03b2, # GREEK SMALL LETTER BETA
0x009a: 0x03b3, # GREEK SMALL LETTER GAMMA
0x009b: 0x03b4, # GREEK SMALL LETTER DELTA
0x009c: 0x03b5, # GREEK SMALL LETTER EPSILON
0x009d: 0x03b6, # GREEK SMALL LETTER ZETA
0x009e: 0x03b7, # GREEK SMALL LETTER ETA
0x009f: 0x03b8, # GREEK SMALL LETTER THETA
0x00a0: 0x03b9, # GREEK SMALL LETTER IOTA
0x00a1: 0x03ba, # GREEK SMALL LETTER KAPPA
0x00a2: 0x03bb, # GREEK SMALL LETTER LAMDA
0x00a3: 0x03bc, # GREEK SMALL LETTER MU
0x00a4: 0x03bd, # GREEK SMALL LETTER NU
0x00a5: 0x03be, # GREEK SMALL LETTER XI
0x00a6: 0x03bf, # GREEK SMALL LETTER OMICRON
0x00a7: 0x03c0, # GREEK SMALL LETTER PI
0x00a8: 0x03c1, # GREEK SMALL LETTER RHO
0x00a9: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00aa: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
0x00ab: 0x03c4, # GREEK SMALL LETTER TAU
0x00ac: 0x03c5, # GREEK SMALL LETTER UPSILON
0x00ad: 0x03c6, # GREEK SMALL LETTER PHI
0x00ae: 0x03c7, # GREEK SMALL LETTER CHI
0x00af: 0x03c8, # GREEK SMALL LETTER PSI
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03c9, # GREEK SMALL LETTER OMEGA
0x00e1: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
0x00e2: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
0x00e3: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
0x00e4: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
0x00e5: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
0x00e6: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
0x00e7: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
0x00e8: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
0x00e9: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
0x00ea: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
0x00eb: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
0x00ec: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
0x00ed: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
0x00ee: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
0x00ef: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
0x00f0: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
0x00f5: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\u0391' # 0x0080 -> GREEK CAPITAL LETTER ALPHA
u'\u0392' # 0x0081 -> GREEK CAPITAL LETTER BETA
u'\u0393' # 0x0082 -> GREEK CAPITAL LETTER GAMMA
u'\u0394' # 0x0083 -> GREEK CAPITAL LETTER DELTA
u'\u0395' # 0x0084 -> GREEK CAPITAL LETTER EPSILON
u'\u0396' # 0x0085 -> GREEK CAPITAL LETTER ZETA
u'\u0397' # 0x0086 -> GREEK CAPITAL LETTER ETA
u'\u0398' # 0x0087 -> GREEK CAPITAL LETTER THETA
u'\u0399' # 0x0088 -> GREEK CAPITAL LETTER IOTA
u'\u039a' # 0x0089 -> GREEK CAPITAL LETTER KAPPA
u'\u039b' # 0x008a -> GREEK CAPITAL LETTER LAMDA
u'\u039c' # 0x008b -> GREEK CAPITAL LETTER MU
u'\u039d' # 0x008c -> GREEK CAPITAL LETTER NU
u'\u039e' # 0x008d -> GREEK CAPITAL LETTER XI
u'\u039f' # 0x008e -> GREEK CAPITAL LETTER OMICRON
u'\u03a0' # 0x008f -> GREEK CAPITAL LETTER PI
u'\u03a1' # 0x0090 -> GREEK CAPITAL LETTER RHO
u'\u03a3' # 0x0091 -> GREEK CAPITAL LETTER SIGMA
u'\u03a4' # 0x0092 -> GREEK CAPITAL LETTER TAU
u'\u03a5' # 0x0093 -> GREEK CAPITAL LETTER UPSILON
u'\u03a6' # 0x0094 -> GREEK CAPITAL LETTER PHI
u'\u03a7' # 0x0095 -> GREEK CAPITAL LETTER CHI
u'\u03a8' # 0x0096 -> GREEK CAPITAL LETTER PSI
u'\u03a9' # 0x0097 -> GREEK CAPITAL LETTER OMEGA
u'\u03b1' # 0x0098 -> GREEK SMALL LETTER ALPHA
u'\u03b2' # 0x0099 -> GREEK SMALL LETTER BETA
u'\u03b3' # 0x009a -> GREEK SMALL LETTER GAMMA
u'\u03b4' # 0x009b -> GREEK SMALL LETTER DELTA
u'\u03b5' # 0x009c -> GREEK SMALL LETTER EPSILON
u'\u03b6' # 0x009d -> GREEK SMALL LETTER ZETA
u'\u03b7' # 0x009e -> GREEK SMALL LETTER ETA
u'\u03b8' # 0x009f -> GREEK SMALL LETTER THETA
u'\u03b9' # 0x00a0 -> GREEK SMALL LETTER IOTA
u'\u03ba' # 0x00a1 -> GREEK SMALL LETTER KAPPA
u'\u03bb' # 0x00a2 -> GREEK SMALL LETTER LAMDA
u'\u03bc' # 0x00a3 -> GREEK SMALL LETTER MU
u'\u03bd' # 0x00a4 -> GREEK SMALL LETTER NU
u'\u03be' # 0x00a5 -> GREEK SMALL LETTER XI
u'\u03bf' # 0x00a6 -> GREEK SMALL LETTER OMICRON
u'\u03c0' # 0x00a7 -> GREEK SMALL LETTER PI
u'\u03c1' # 0x00a8 -> GREEK SMALL LETTER RHO
u'\u03c3' # 0x00a9 -> GREEK SMALL LETTER SIGMA
u'\u03c2' # 0x00aa -> GREEK SMALL LETTER FINAL SIGMA
u'\u03c4' # 0x00ab -> GREEK SMALL LETTER TAU
u'\u03c5' # 0x00ac -> GREEK SMALL LETTER UPSILON
u'\u03c6' # 0x00ad -> GREEK SMALL LETTER PHI
u'\u03c7' # 0x00ae -> GREEK SMALL LETTER CHI
u'\u03c8' # 0x00af -> GREEK SMALL LETTER PSI
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
u'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
u'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
u'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
u'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
u'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
u'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
u'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
u'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
u'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
u'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
u'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
u'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
u'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
u'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u258c' # 0x00dd -> LEFT HALF BLOCK
u'\u2590' # 0x00de -> RIGHT HALF BLOCK
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\u03c9' # 0x00e0 -> GREEK SMALL LETTER OMEGA
u'\u03ac' # 0x00e1 -> GREEK SMALL LETTER ALPHA WITH TONOS
u'\u03ad' # 0x00e2 -> GREEK SMALL LETTER EPSILON WITH TONOS
u'\u03ae' # 0x00e3 -> GREEK SMALL LETTER ETA WITH TONOS
u'\u03ca' # 0x00e4 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
u'\u03af' # 0x00e5 -> GREEK SMALL LETTER IOTA WITH TONOS
u'\u03cc' # 0x00e6 -> GREEK SMALL LETTER OMICRON WITH TONOS
u'\u03cd' # 0x00e7 -> GREEK SMALL LETTER UPSILON WITH TONOS
u'\u03cb' # 0x00e8 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
u'\u03ce' # 0x00e9 -> GREEK SMALL LETTER OMEGA WITH TONOS
u'\u0386' # 0x00ea -> GREEK CAPITAL LETTER ALPHA WITH TONOS
u'\u0388' # 0x00eb -> GREEK CAPITAL LETTER EPSILON WITH TONOS
u'\u0389' # 0x00ec -> GREEK CAPITAL LETTER ETA WITH TONOS
u'\u038a' # 0x00ed -> GREEK CAPITAL LETTER IOTA WITH TONOS
u'\u038c' # 0x00ee -> GREEK CAPITAL LETTER OMICRON WITH TONOS
u'\u038e' # 0x00ef -> GREEK CAPITAL LETTER UPSILON WITH TONOS
u'\u038f' # 0x00f0 -> GREEK CAPITAL LETTER OMEGA WITH TONOS
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO
u'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO
u'\u03aa' # 0x00f4 -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
u'\u03ab' # 0x00f5 -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\u2248' # 0x00f7 -> ALMOST EQUAL TO
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\u2219' # 0x00f9 -> BULLET OPERATOR
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\u221a' # 0x00fb -> SQUARE ROOT
u'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b7: 0x00fa, # MIDDLE DOT
0x00f7: 0x00f6, # DIVISION SIGN
0x0386: 0x00ea, # GREEK CAPITAL LETTER ALPHA WITH TONOS
0x0388: 0x00eb, # GREEK CAPITAL LETTER EPSILON WITH TONOS
0x0389: 0x00ec, # GREEK CAPITAL LETTER ETA WITH TONOS
0x038a: 0x00ed, # GREEK CAPITAL LETTER IOTA WITH TONOS
0x038c: 0x00ee, # GREEK CAPITAL LETTER OMICRON WITH TONOS
0x038e: 0x00ef, # GREEK CAPITAL LETTER UPSILON WITH TONOS
0x038f: 0x00f0, # GREEK CAPITAL LETTER OMEGA WITH TONOS
0x0391: 0x0080, # GREEK CAPITAL LETTER ALPHA
0x0392: 0x0081, # GREEK CAPITAL LETTER BETA
0x0393: 0x0082, # GREEK CAPITAL LETTER GAMMA
0x0394: 0x0083, # GREEK CAPITAL LETTER DELTA
0x0395: 0x0084, # GREEK CAPITAL LETTER EPSILON
0x0396: 0x0085, # GREEK CAPITAL LETTER ZETA
0x0397: 0x0086, # GREEK CAPITAL LETTER ETA
0x0398: 0x0087, # GREEK CAPITAL LETTER THETA
0x0399: 0x0088, # GREEK CAPITAL LETTER IOTA
0x039a: 0x0089, # GREEK CAPITAL LETTER KAPPA
0x039b: 0x008a, # GREEK CAPITAL LETTER LAMDA
0x039c: 0x008b, # GREEK CAPITAL LETTER MU
0x039d: 0x008c, # GREEK CAPITAL LETTER NU
0x039e: 0x008d, # GREEK CAPITAL LETTER XI
0x039f: 0x008e, # GREEK CAPITAL LETTER OMICRON
0x03a0: 0x008f, # GREEK CAPITAL LETTER PI
0x03a1: 0x0090, # GREEK CAPITAL LETTER RHO
0x03a3: 0x0091, # GREEK CAPITAL LETTER SIGMA
0x03a4: 0x0092, # GREEK CAPITAL LETTER TAU
0x03a5: 0x0093, # GREEK CAPITAL LETTER UPSILON
0x03a6: 0x0094, # GREEK CAPITAL LETTER PHI
0x03a7: 0x0095, # GREEK CAPITAL LETTER CHI
0x03a8: 0x0096, # GREEK CAPITAL LETTER PSI
0x03a9: 0x0097, # GREEK CAPITAL LETTER OMEGA
0x03aa: 0x00f4, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
0x03ab: 0x00f5, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
0x03ac: 0x00e1, # GREEK SMALL LETTER ALPHA WITH TONOS
0x03ad: 0x00e2, # GREEK SMALL LETTER EPSILON WITH TONOS
0x03ae: 0x00e3, # GREEK SMALL LETTER ETA WITH TONOS
0x03af: 0x00e5, # GREEK SMALL LETTER IOTA WITH TONOS
0x03b1: 0x0098, # GREEK SMALL LETTER ALPHA
0x03b2: 0x0099, # GREEK SMALL LETTER BETA
0x03b3: 0x009a, # GREEK SMALL LETTER GAMMA
0x03b4: 0x009b, # GREEK SMALL LETTER DELTA
0x03b5: 0x009c, # GREEK SMALL LETTER EPSILON
0x03b6: 0x009d, # GREEK SMALL LETTER ZETA
0x03b7: 0x009e, # GREEK SMALL LETTER ETA
0x03b8: 0x009f, # GREEK SMALL LETTER THETA
0x03b9: 0x00a0, # GREEK SMALL LETTER IOTA
0x03ba: 0x00a1, # GREEK SMALL LETTER KAPPA
0x03bb: 0x00a2, # GREEK SMALL LETTER LAMDA
0x03bc: 0x00a3, # GREEK SMALL LETTER MU
0x03bd: 0x00a4, # GREEK SMALL LETTER NU
0x03be: 0x00a5, # GREEK SMALL LETTER XI
0x03bf: 0x00a6, # GREEK SMALL LETTER OMICRON
0x03c0: 0x00a7, # GREEK SMALL LETTER PI
0x03c1: 0x00a8, # GREEK SMALL LETTER RHO
0x03c2: 0x00aa, # GREEK SMALL LETTER FINAL SIGMA
0x03c3: 0x00a9, # GREEK SMALL LETTER SIGMA
0x03c4: 0x00ab, # GREEK SMALL LETTER TAU
0x03c5: 0x00ac, # GREEK SMALL LETTER UPSILON
0x03c6: 0x00ad, # GREEK SMALL LETTER PHI
0x03c7: 0x00ae, # GREEK SMALL LETTER CHI
0x03c8: 0x00af, # GREEK SMALL LETTER PSI
0x03c9: 0x00e0, # GREEK SMALL LETTER OMEGA
0x03ca: 0x00e4, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
0x03cb: 0x00e8, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
0x03cc: 0x00e6, # GREEK SMALL LETTER OMICRON WITH TONOS
0x03cd: 0x00e7, # GREEK SMALL LETTER UPSILON WITH TONOS
0x03ce: 0x00e9, # GREEK SMALL LETTER OMEGA WITH TONOS
0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N
0x2219: 0x00f9, # BULLET OPERATOR
0x221a: 0x00fb, # SQUARE ROOT
0x2248: 0x00f7, # ALMOST EQUAL TO
0x2264: 0x00f3, # LESS-THAN OR EQUAL TO
0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| mit |
deapubhi/myblog | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/styles/pastie.py | 364 | 2473 | # -*- coding: utf-8 -*-
"""
pygments.styles.pastie
~~~~~~~~~~~~~~~~~~~~~~
Style similar to the `pastie`_ default style.
.. _pastie: http://pastie.caboo.se/
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class PastieStyle(Style):
"""
Style similar to the pastie default style.
"""
default_style = ''
styles = {
Whitespace: '#bbbbbb',
Comment: '#888888',
Comment.Preproc: 'bold #cc0000',
Comment.Special: 'bg:#fff0f0 bold #cc0000',
String: 'bg:#fff0f0 #dd2200',
String.Regex: 'bg:#fff0ff #008800',
String.Other: 'bg:#f0fff0 #22bb22',
String.Symbol: '#aa6600',
String.Interpol: '#3333bb',
String.Escape: '#0044dd',
Operator.Word: '#008800',
Keyword: 'bold #008800',
Keyword.Pseudo: 'nobold',
Keyword.Type: '#888888',
Name.Class: 'bold #bb0066',
Name.Exception: 'bold #bb0066',
Name.Function: 'bold #0066bb',
Name.Property: 'bold #336699',
Name.Namespace: 'bold #bb0066',
Name.Builtin: '#003388',
Name.Variable: '#336699',
Name.Variable.Class: '#336699',
Name.Variable.Instance: '#3333bb',
Name.Variable.Global: '#dd7700',
Name.Constant: 'bold #003366',
Name.Tag: 'bold #bb0066',
Name.Attribute: '#336699',
Name.Decorator: '#555555',
Name.Label: 'italic #336699',
Number: 'bold #0000DD',
Generic.Heading: '#333',
Generic.Subheading: '#666',
Generic.Deleted: 'bg:#ffdddd #000000',
Generic.Inserted: 'bg:#ddffdd #000000',
Generic.Error: '#aa0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: '#555555',
Generic.Output: '#888888',
Generic.Traceback: '#aa0000',
Error: 'bg:#e3d2d2 #a61717'
}
| mit |
ithemal/Ithemal | timing_tools/timing/gettiming.py | 1 | 10504 | from os import listdir
from os.path import isfile, join
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import sys
import common_libs.utilities as ut
from tqdm import tqdm
import subprocess
import os
import re
import time
import argparse
def wait_timeout(proc, seconds):
"""Wait for a process to finish, or raise exception after timeout"""
start = time.time()
end = start + seconds
interval = min(seconds / 1000.0, .25)
while True:
result = proc.poll()
if result is not None:
return result
if time.time() >= end:
proc.kill()
return None
time.sleep(interval)
def fix_reg_names(line):
# nasm recognizes, for instance, r14d rather than r14l
regs = [('r%dl'%x, 'r%dd'%x) for x in range(8, 16)]
for old, new in regs:
line = line.replace(old, new)
return line
def remove_unrecog_words(line):
words = ['ptr', '<rel>']
for word in words:
line = line.replace(word,'')
return line
def add_memory_prefix(line):
mem = re.search('.*\[(.*)\].*', line)
if (mem != None and
re.match('.*(rsp|rbp|esp|ebp)', mem.group(1)) is None and
not line.strip().startswith('lea')):
index = mem.span(1)[0]
line = line[:index] + 'UserData + ' + line[index:]
return line
def insert_time_value(cnx,code_id, time, arch, ttable):
sql = 'INSERT INTO ' + ttable + ' (code_id, arch, kind, time) VALUES(' + str(code_id) + ',' + str(arch) + ',\'actual\',' + str(time) + ')'
ut.execute_query(cnx, sql, False)
cnx.commit()
def insert_col_values(cnx, cols, values, code_id, arch, ttable):
for i in range(len(values[0])):
colstr = ''
valuestr = ''
for j, col in enumerate(cols):
if j != len(cols) - 1:
colstr += col + ', '
valuestr += str(values[j][i]) + ', '
else:
colstr += col
valuestr += str(values[j][i])
sql = 'INSERT INTO ' + ttable + ' (code_id, arch, kind,' + colstr + ') VALUES(' + str(code_id) + ',' + str(arch) + ',\'actual\',' + valuestr + ')'
print sql
ut.execute_query(cnx, sql, False)
cnx.commit()
class PMCValue:
def __init__(self, value):
self.value = value
self.count = 1
class PMC:
def __init__(self, name):
self.name = name
self.values = []
self.mod_values = []
self.mode = None
self.percentage = 5
def add_value(self, nvalue):
self.values.append(nvalue)
added = False
for val in self.mod_values:
if val.value == 0:
val.value = 1e-3
if (abs(val.value - nvalue) * 100.0 / val.value) < self.percentage:
val.value = (val.value * val.count + nvalue) / (val.count + 1)
val.count += 1
added = True
break
if not added:
val = PMCValue(nvalue)
self.mod_values.append(val)
def set_mode(self):
max_count = 0
for val in self.mod_values:
if val.count > max_count:
self.mode = val.value
max_count = val.count
class PMCCounters:
def __init__(self,line):
names = line.split()
#print names
self.counters = list()
for name in names:
self.counters.append(PMC(name))
def add_to_counters(self, line):
values = line.split()
#print values
if len(values) != len(self.counters):
return
for i, value in enumerate(values):
self.counters[i].add_value(int(value))
def set_modes(self):
for counter in self.counters:
counter.set_mode()
def get_value(self, name):
for counter in self.counters:
if name == counter.name:
return counter.values
return None
def get_mode(self, name):
for counter in self.counters:
if name == counter.name:
return counter.mode
return None
def check_error(line):
errors = ['error','fault']
warnings = ['warning']
for error in errors:
for warning in warnings:
if error in line and not warning in line:
return True
return False
if __name__ == '__main__':
#command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--arch',action='store',type=int,required=True)
parser.add_argument('--database',action='store',type=str,required=True)
parser.add_argument('--user',action='store', type=str, required=True)
parser.add_argument('--password',action='store', type=str, required=True)
parser.add_argument('--port',action='store', type=int, required=True)
parser.add_argument('--ctable',action='store',type=str, required=True)
parser.add_argument('--ttable',action='store',type=str, required=True)
parser.add_argument('--limit',action='store',type=int, default=None)
parser.add_argument('--tp',action='store',type=bool,default=False)
args = parser.parse_args(sys.argv[1:])
cnx = ut.create_connection(database=args.database, user=args.user, password=args.password, port=args.port)
sql = 'SELECT code_intel, code_id from ' + args.ctable
rows = ut.execute_query(cnx, sql, True)
print len(rows)
harness_dir = os.environ['ITHEMAL_HOME'] + '/timing_tools/harness'
os.chdir(harness_dir)
total = 0
errors = 0
except_errors = 0
success = 0
not_finished = 0
total_time = 0.0
total_bbs = 0
# do a dry run to figure out measurement overhead
with open('bb.nasm', 'w') as f:
f.close()
proc = subprocess.Popen('./a64-out.sh', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = wait_timeout(proc, 10)
startHeading = False
startTimes = False
counters = None
for i, line in enumerate(iter(proc.stdout.readline, '')):
if 'Clock' in line and startTimes == False and startHeading == False: #still didn't start collecting the actual timing data
startHeading = True
if startHeading == True:
counters = PMCCounters(line)
startTimes = True
startHeading = False
elif startTimes == True:
counters.add_to_counters(line)
assert counters is not None
counters.set_modes()
overhead = counters.get_mode('Core_cyc')
print 'OVERHEAD =', overhead
for row in rows:
if row[0] == None:
continue
splitted = row[0].split('\n')
written = 0
final_bb = []
for i, line in enumerate(splitted):
if line != '':
line = remove_unrecog_words(line + '\n')
line = fix_reg_names(line)
final_bb.append(line)
written += 1
if written > 0:
total += 1
with open('bb.nasm','w+') as f:
f.writelines(final_bb)
proc = subprocess.Popen('./a64-out.sh', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
start_time = time.time()
result = wait_timeout(proc, 10)
end_time = time.time()
if result != None:
print final_bb
try:
error_lines = False
for line in iter(proc.stderr.readline, ''):
if check_error(line):
print 'error ' + line
error_lines = True
break
if error_lines == False:
startHeading = False
startTimes = False
counters = None
for i, line in enumerate(iter(proc.stdout.readline, '')):
print line
if 'Clock' in line and startTimes == False and startHeading == False: #still didn't start collecting the actual timing data
startHeading = True
if startHeading == True:
#print 'headings ' + line
counters = PMCCounters(line)
startTimes = True
startHeading = False
elif startTimes == True:
#print 'values ' + line
counters.add_to_counters(line)
if counters != None:
names = ['Core_cyc', 'L1_read_misses', 'L1_write_misses', 'iCache_misses', 'Context_switches']
columns = ['time', 'l1drmisses', 'l1dwmisses', 'l1imisses', 'conswitch']
values = []
aval_cols = []
for i, name in enumerate(names):
vs = counters.get_value(name)
if vs != None:
values.append(vs)
aval_cols.append(columns[i])
if name == 'Core_cyc':
for j, v in enumerate(values[-1]):
values[-1][j] -= overhead
print aval_cols, values
if not args.tp:
insert_col_values(cnx, aval_cols, values, row[1], args.arch, args.ttable)
total_time += end_time - start_time
total_bbs += 1
print float(total_bbs)/total_time
success += 1
else:
for line in final_bb:
print line[:-1]
errors += 1
except Exception as e:
print e
print 'exception occurred'
except_errors += 1
else:
print 'error not completed'
not_finished += 1
if args.limit != None:
if success == args.limit:
break
print total, success, errors, not_finished, except_errors
print overhead
cnx.close()
| mit |
garverp/gnuradio | gr-analog/python/analog/qa_dpll.py | 47 | 2019 | #!/usr/bin/env python
#
# Copyright 2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, analog, blocks
class test_dpll_bb(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_dpll_bb_001(self):
# Test set/gets
period = 1.0
gain = 0.1
op = analog.dpll_bb(period, gain)
op.set_gain(0.2)
g = op.gain()
self.assertAlmostEqual(g, 0.2)
f = op.freq()
self.assertEqual(1/period, f)
d0 = 1.0 - 0.5*f;
d1 = op.decision_threshold()
self.assertAlmostEqual(d0, d1)
p = op.phase()
self.assertEqual(0, p)
def test_dpll_bb_002(self):
period = 4
gain = 0.1
src_data = 10*((period-1)*[0,] + [1,])
expected_result = src_data
src = blocks.vector_source_b(src_data)
op = analog.dpll_bb(period, gain)
dst = blocks.vector_sink_b()
self.tb.connect(src, op)
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_result, result_data, 4)
if __name__ == '__main__':
gr_unittest.run(test_dpll_bb, "test_dpll_bb.xml")
| gpl-3.0 |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/fuzzbunch/iohandler.py | 1 | 27412 | """
IO Handling classes
The IO Handler works as a wrapper around all user input and output.
Theoretically, to change the way Fuzzbunch looks should only require changing
this file. Specialized print functions provide a template-like facility.
"""
import sys
import exception
from util import variable_replace
MAX_OUTPUT_ROWS = 5
MAX_PROMPT_ECHO_LEN = 50
try:
try:
import readline
except ImportError:
import pyreadline as readline
HAVE_READLINE = True
except:
HAVE_READLINE = False
mswindows = (sys.platform == "win32")
__all__ = ["IOhandler"]
"""
Valid Attributes : none, bold,
faint, italic,
underline, blink,
fast, reverse,
concealed
Valid Colors : grey red
green yellow
blue magenta
cyan white
"""
COLORIZEMAP = {"[-]" : {"fg" : "red", "attr" : "bold"},
"[+]" : {"fg" : "green", "attr" : "bold"},
"[!]" : {"fg" : "red", "attr" : "bold"},
"[*]" : {"fg" : "green", "attr" : "bold"},
"[?]" : {"fg" : "blue", "attr" : "bold"}}
VMAP = {"[-]" : {"fg" : "magenta", "attr" : "bold"},
"[+]" : {"fg" : "magenta", "attr" : "bold"},
"[!]" : {"fg" : "magenta", "attr" : "bold"},
"[*]" : {"fg" : "magenta", "attr" : "bold"},
"[?]" : {"fg" : "magenta", "attr" : "bold"}}
class DevNull:
def write(self, *ignore):
pass
def close(self):
pass
def flush(self):
pass
def truncate(string, length=MAX_PROMPT_ECHO_LEN):
return string if (len(string) <= length) else ("%s... (plus %d characters)" % (string[:length], len(string) - length))
class IOhandler:
"""Handle I/O for fuzzbunch commands"""
def __init__(self, stdin=None,
stdout=None,
use_raw=1,
noprompt=False,
completekey='tab',
enablecolor=True,
history=4096):
"""
@param stdin
@param stdout
@param use_raw
@param noprompt Do we want to prompt for values upon plugin execution?
@param completekey Command completion
"""
import sys
if stdin is not None:
self.stdin = stdin
else:
self.stdin = sys.stdin
if stdout is not None:
self.stdout = stdout
else:
self.stdout = sys.stdout
self.stderr = self.stdout
self.logout = DevNull()
self.noprompt = noprompt
self.raw_input = use_raw
self.completekey = completekey
self.havecolor = False # Are we in color mode?
self.enablecolor = enablecolor # Do we want to permanently enable color. Change to False to disable
self.colormode = "ansi"
self.colormap = COLORIZEMAP
self.historysize = history
def setlogfile(self, logfile):
self.logout.close()
try:
self.logout = open(logfile, "w")
except:
self.logout = DevNull()
def setcolormode(self, isEnabled):
"""Switch to enable or disable color output"""
self.enablecolor=isEnabled
def switch(self):
if self.colormap == COLORIZEMAP:
self.colormap = VMAP
else:
self.colormap = COLORIZEMAP
def pre_input(self, completefn):
if self.raw_input:
if HAVE_READLINE:
import atexit
self.old_completer = readline.get_completer()
# Fix Bug #3129: Limit the history size to consume less memory
readline.set_history_length(self.historysize)
readline.set_completer(completefn)
readline.parse_and_bind(self.completekey+": complete")
try:
readline.read_history_file()
except IOError:
pass
atexit.register(readline.write_history_file)
self.havecolor = True
if mswindows and self.enablecolor:
self.cwrite = readline.GetOutputFile().write_color
else:
self.cwrite = self.stdout.write
def post_input(self):
if self.raw_input:
if HAVE_READLINE:
readline.set_completer(self.old_completer)
self.havecolor = False
self.cwrite = None
"""
User input handling
"""
def get_input_lines(self, prompt):
done = False
lines = []
while not done:
line = self.get_input(prompt)
if line == 'EOF':
done = True
else:
lines.append(line)
return ''.join(lines)
def get_input_line(self, prompt):
line = self.get_input(prompt)
if line == 'EOF':
return ''
else:
return line.strip()
def get_input(self, prompt):
if self.raw_input:
try:
line = raw_input(prompt)
except (EOFError, KeyboardInterrupt):
line = 'EOF'
else:
self.write(prompt)
self.flush()
line = self.stdin.readline()
if not len(line):
line = 'EOF'
else:
line = line[:-1]
return line
def prompt_user(self, msg, default=None, params=None, gvars=None):
if self.noprompt:
return variable_replace(default, gvars) # Fix a small bug in 3.2.0
if default is not None:
# If we pass a default, interepret any variables (marked with '$')
interpreted_default = variable_replace(default, gvars)
p = "[?] %s [%s] : " % (msg, truncate(interpreted_default))
else:
# No default, so empty string
p = "[?] %s : " % msg
interpreted_default = ""
if self.havecolor and self.enablecolor:
p = self.colorize(p)
line = self.get_input(p)
# Check the user input
if line.upper() in ("EOF",):
self.newline()
if line.upper() in ("EOF", "Q", "QUIT"):
raise exception.PromptErr, "Aborted by user"
if line.upper() in ("?", "HELP"):
raise exception.PromptHelp, "No help available"
# Retrieve the line, and replace any '$' vars with their values
line = variable_replace(line, gvars)
if not len(line.strip()):
# If line is empty, just use the default
line = interpreted_default
# If it's a choice, figure out which value they chose
if params:
try:
index = int(line)
line = params[index][0]
except (IndexError, ValueError):
raise exception.CmdErr, "Invalid input"
return line
def prompt_yn(self, msg, default="Yes", params=None, gvars=None):
if default.lower() not in ["yes", 'y', 'no', 'n']:
default = 'No'
yn = self.prompt_user(msg, default, params=params, gvars=gvars).lower()
if yn == 'yes' or yn == 'y':
return True
return False
def prompt_continue(self):
line = self.prompt_user("Execute Plugin?", "Yes")
if line.lower() not in ("yes", "y"):
raise exception.CmdErr, "Execution Aborted"
return
def prompt_confirm_redir(self):
line = self.prompt_user("Press Any Key To Continue")
del line
return
def prompt_runsubmode(self, text):
line = self.prompt_user(text, "Yes")
if line.lower() not in ("yes", "y"):
return False
else:
return True
"""
Basic user output handling
"""
def flush(self):
self.logout.flush()
self.stdout.flush()
def log(self, line):
line += "\n"
self.logout.write(line)
def write(self, line):
self.log(line)
line += "\n"
# Fix bug #2910
if self.havecolor and self.enablecolor:
try:
self.cwrite(self.colorize(line))
except LookupError:
# We failed to print in color. This is a problem looking up the encoding
# Permanently disable color and continue
self.havecolor = False
self.enablecolor = False
self.stdout.write(line)
else:
self.stdout.write(line)
self.flush()
def newline(self):
self.write("")
def print_error(self, line):
self.write("[-] " + line)
def print_success(self, line):
self.write("[+] " + line)
def print_warning(self, line):
self.write("[!] " + line)
def print_msg(self, line):
self.write("[*] " + line)
"""
Output formatting help
"""
def get_single_col_max_width(self, data, colnum):
# Min len = 6
maxlen = 6
for row in data:
for i,col in enumerate(row):
if i == colnum:
maxlen = max(maxlen, len(col))
return maxlen
def get_column_max_width(self, data):
maxlens = list(0 for i in range(0, len(data[0])))
for row in data:
for i,col in enumerate(row):
maxlens[i] = max(maxlens[i], len(col), 1)
return maxlens
def print_headingline(self, msg):
self.newline()
self.write(msg)
self.write("=" * len(msg))
self.newline()
def print_vheading(self, vector):
self.newline()
for v in vector:
self.write(v)
self.newline()
def vprint(self, fmt, vector, max_str_len=None):
for v in vector:
if max_str_len:
if isinstance(v, tuple):
v = tuple(truncate(field, max_str_len) for field in v)
else:
v = truncate(field, max_str_len)
self.write(fmt % v)
self.newline()
def makeplural(self, name, count):
if count < 2:
return name
if name.endswith("h"):
return name + "es"
elif name.endswith("s"):
return name
else:
return name + "s"
"""
Color support
"""
def colorize(self, line):
for pattern, attrs in self.colormap.items():
plen = len(pattern)
index = line.find(pattern)
if self.colormap == VMAP:
pattern = "[TF]"
if index != -1:
r = index + plen
line = (line[:index] +
self.color(**attrs) +
pattern +
self.color() +
line[r:])
return line
return line
def color(self, fg=None, bg=None, attr=None):
if self.colormode != "ansi":
return ""
attrs = 'none bold faint italic underline blink fast reverse concealed'
colors = 'grey red green yellow blue magenta cyan white'
attrs = dict((s,i) for i,s in enumerate(attrs.split()))
colors = dict((s,i) for i,s in enumerate(colors.split()))
fgoffset, bgoffset = 30,40
cmd = ["0"]
if fg in colors:
cmd.append("%s" % (colors[fg] + fgoffset))
if bg in colors:
cmd.append("%s" % (colors[bg] + bgoffset))
if attr:
for a in attr.split():
if a in attrs:
cmd.append("%s" % attrs[a])
return "\033[" + ";".join(cmd) + "m"
"""
Specialized output routines
"""
# OK because it's readline
def print_history(self, args):
self.newline()
for index,item in args['items']:
self.write("%4d %s" % (index, item))
self.newline()
def print_cmd_list(self, args):
self.print_headingline(args['title'])
cmds = args['commands']
cmds.insert(0, ("Command", "Description"))
cmds.insert(1, ("-------", "-----------"))
self.vprint(" %-15s %s", cmds)
def print_banner(self, args):
self.write(args['banner'])
self.write("--[ Version %s" % args['version'])
for count, type in args['stats']:
self.write(" * %d %s" % (count, self.makeplural(type, count)))
self.newline()
def print_opensessions(self, args):
self.newline()
for session in enumerate(args['sessions']):
self.print_warning("Session item %d (%s) has open contract" % session)
self.newline()
def print_usage(self, arg):
self.write("Usage: %s\n" % arg[0])
self.write("\n".join(arg[1:]))
def print_module_lists(self, args):
category = args['module']
modules = args['plugins']
self.print_headingline("Plugin Category: %s" % category)
modules.sort()
modules.insert(0, ("Name", "Version"))
modules.insert(1, ("----", "-------"))
widths = self.get_column_max_width(modules)
self.vprint(" %%-%ds %%s" % (widths[0] + 4), modules)
def print_module_types(self, args):
self.print_headingline("Plugin Categories")
modules = args['modules']
modules.sort()
modules.insert(0, ("Category", "Active Plugin"))
modules.insert(1, ("--------", "-------------"))
widths = self.get_column_max_width(modules)
self.vprint(" %%-%ds %%s" % (widths[0] + 4), modules)
def print_session_item(self, args):
self.print_vheading(("Name: %s" % args['name'],
"Status: %s" % args['status']))
for type,info in args['info']:
self.write("%s:\n" % type)
if info:
params = [x[:2] for x in dict(info.get_paramlist()).values()]
params.insert(0, ("Name", "Value"))
params.insert(1, ("----", "-----"))
widths = self.get_column_max_width(params)
self.vprint(" %%-%ds %%s" % (widths[0] + 4), params, MAX_PROMPT_ECHO_LEN)
def print_session_items(self, args):
items = args['items']
self.print_headingline("Session History")
if not items:
self.write(" *Empty*\n")
self.newline()
else:
_list = [("Index", "Name", "Status"),
("-----", "----", "------")]
for index,item in enumerate(items):
_list.append((str(index), item[0], item[1]))
widths = self.get_column_max_width(_list)
fmt = "%%7s %%-%ds %%s" % (widths[1] + 4)
self.vprint(fmt, _list, MAX_PROMPT_ECHO_LEN)
# Move to table
def is_table_row_empty(self, v):
ncols = len(v)
for i in range(0, ncols):
if len(v[i]) > 0 and len(v[i][0]) > 0:
return False
return True
# Move to table
def get_table_row(self, vector):
ncols = len(vector)
while not self.is_table_row_empty(vector):
line = []
for i in range(0, ncols):
try:
val = vector[i].pop(0)
except IndexError:
val = ""
line.append(val)
yield line
# Move to Table
def print_row(self, widths, data):
fmt = "%%-%ds " * len(widths) % tuple(widths)
vectors = []
for i,width in enumerate(widths):
vector = []
col = data[i]
printed_rows = 0
while col:
if printed_rows > MAX_OUTPUT_ROWS:
vector.append("... (plus %d more lines)" % (len(col) / widths[i]))
break
vector.append(col[:widths[i]])
col = col[widths[i]:]
printed_rows += 1
vectors.append(vector)
for line in self.get_table_row(vectors):
self.write(fmt % tuple(line))
# Move to Table
def print_table(self, widths, heading, params):
sep = tuple(['-' * len(word) for word in heading[0]])
heading.append(sep)
heading.extend(params)
for d in heading:
self.print_row(widths, d)
self.newline()
def print_set_names(self, args):
self.print_headingline("Module: %s" % args['title'])
params = args['vars']
if not params:
self.write(" *Empty")
self.newline()
else:
widths = [self.get_single_col_max_width(params, 0), 50]
self.print_table(widths, [("Name", "Value")], params)
def print_exe_set_names(self, args):
if not args['session']:
return self.print_set_names(args)
self.print_headingline("Module: %s" % args['title'])
params = args['vars']
session = args['session']['params']
print_params = []
for param in params:
if session.get(param.name):
realval = truncate(session.get(param.name))
redirval = truncate(param.value)
else:
realval = truncate(param.value)
redirval = ""
print_params.append((param.name, realval, redirval))
widths = self.get_column_max_width(print_params)
widths[1] = max(widths[1], len("Set Value"))
widths[2] = max(widths[2], len("Redirected Value"))
self.print_table(widths, [("Name", "Set Value", "Redirected Value")], print_params)
def print_set_attributes(self, title, param, attribs):
self.print_vheading(("Module: %s" % title,
"Parameter: %s" % param))
attribs.insert(0, ("Parameter Attribute", "Value"))
attribs.insert(1, ("-------------------", "-----"))
widths = self.get_column_max_width(attribs)
fmt = " %%-%ds %%s" % (widths[0] + 4)
self.vprint(fmt, attribs)
def print_set_choices(self, choices):
choices.insert(0, ("Parameter Options", "Description"))
choices.insert(1, ("-----------------", "-----------"))
widths = self.get_column_max_width(choices)
fmt = " %%-%ds %%s" % (widths[0] + 4)
self.vprint(fmt, choices)
def print_sorted_vals(self, sorted_vals, vals):
i = 0
for cat in ("Contract", "History", "Other"):
self.write("\n---[ %s\n" % cat)
for index in sorted_vals[cat]:
(val, name, label, info) = vals[index]
self.write(" %d) %s (%s)" % (i, val, name))
i += 1
if len(sorted_vals["Contract"]):
default = 0
else:
default = i - 1
return i,default
def print_param_list(self, param_list):
self.write("")
widths = self.get_column_max_width(param_list)
widths[0] = max(widths[0], len('Name'))
widths[1] = max(widths[1], len('Value'))
widths[2] = 50
self.print_table(widths, [('Name', 'Value', 'Description')], param_list)
def print_touch_info(self, args):
touchlist = args['touchlist']
self.print_headingline("Touch List")
if not touchlist:
self.write(" *Empty*\n")
self.newline()
else:
hdrlist = []
hdrlist.append(("Index", "Name", "Description"))
hdrlist.append(("-----", "----", "-----------"))
widths = self.get_column_max_width(touchlist)
fmt = "%%7s %%-%ds %%s" % (widths[0] + 4)
self.vprint(fmt, hdrlist)
fmt = "%%7d %%-%ds %%s (%%s)" % (widths[0] + 4)
for i,touch in enumerate(touchlist):
self.write(fmt % ((i,) + touch))
self.newline()
"""
Apply command
"""
def print_apply_prompt_list(self, args):
if args['default']:
default = args['default']
else:
default = '[NOT SET]'
vals = args['vals']
vals = [(str(i), args['contract'], v) for i,v in enumerate(args['vals'])]
vals.append((str(i+1), "Current Value", default))
widths = self.get_column_max_width(vals)
widths[0] = len("Index")
widths[2] = 50
self.newline()
self.print_msg("%s :: Deconflict" % args['variable'])
self.newline()
self.print_table(widths, [("Index", "Session ID", "Value")], vals)
def print_apply_prompt(self, args):
if args['default']:
default = args['default']
else:
default = '[NOT SET]'
vals = [(str(i), contract.get_item_info(), param.value.value)
for i,(param,contract) in enumerate(args['vals'])]
vals.append((str(i+1), "Current Value", default))
widths = self.get_column_max_width(vals)
widths[0] = len("Index")
widths[2] = 50
self.newline()
self.print_msg("%s :: Deconflict" % args['variable'])
self.newline()
self.print_table(widths, [("Index", "Session ID", "Value")], vals)
"""
Prompt Command
"""
def print_prompt_param(self, args, default):
self.newline()
self.print_msg(" %s :: %s" % (args['name'], args['description']))
fmt = []
if args['attribs']:
fmt.append("")
widths = self.get_column_max_width(args['attribs'])
choice_fmt = " %%s%%d) %%-%ds %%s" % (widths[0] + 4)
for i,(attr,val) in enumerate(args['attribs']):
if default and i == int(default):
markdef = "*"
else:
markdef = " "
fmt.append(choice_fmt % (markdef,i,attr,val))
self.vprint("%s", fmt)
def print_prompt_param_help(self, args):
self.newline()
fmt = [ " Name : %s" % args['name'],
" Desc : %s" % args['description'],
" Type : %s" % args['type'],
" Required : %s" % args['required'],
" Is Valid : %s" % args['valid']]
if args['type'] != "Choice":
fmt.append(" Value : %s" % args['value'])
self.vprint("%s", fmt)
"""
Redirection
"""
def print_localiplist(self, iplist):
self.print_headingline("Local IP Addresses")
for ip in iplist:
self.write(" * %s " % ip)
self.newline()
def print_redir_info(self, redir, paramList):
#params = iDict(paramList)
try:
self.print_headingline("Local")
if not redir or not redir['local']:
self.write(" *Empty* ")
else:
for l in redir['local']:
self.write("Name: %s" % l.name)
self.write("Protocol: %s" % l.protocol)
self.write("Listen Address: %s" % l.listenaddr)
self.write("Listen Port: %s" % l.listenport)
self.write("Destination Address: %s" % l.destaddr)
self.write("Destination Port: %s" % l.destport)
self.write("Source Port : %s" % l.srcport)
self.write("")
self.print_headingline("Remote")
if not redir or not redir['remote']:
self.write(" *Empty* ")
else:
for r in redir['remote']:
self.write("Name: %s" % r.name)
self.write("Protocol: %s" % r.protocol)
self.write("Listen Address: %s" % r.listenaddr)
self.write("Listen Port: %s" % r.listenport)
self.write("Destination Address: %s" % r.destaddr)
self.write("Destination Port: %s" % r.destport)
except:
# XXX - ESKE. Due to group visibility some of the redir params
# being looked up might not be in the params list
pass
self.write("")
def print_local_tunnels(self, tunnels):
data = [("Proto", "Listen IP", "Source IP", "Destination IP")]
for tunnel in tunnels:
if tunnel.srcport is None:
srcport = 'ANY'
else:
srcport = '0'
data.append((tunnel.protocol,
tunnel.listenaddr + ":" + tunnel.listenport,
"Redirector" + ":" + srcport,
tunnel.destaddr + ":" + tunnel.destport))
maxlens = self.get_column_max_width(data)
data.insert(1, tuple(["-" * length for length in maxlens]))
fmt = "%%-%ds %%-%ds %%-%ds %%-%ds" % tuple(maxlens)
for line in data:
self.write(fmt % line)
def print_remote_tunnels(self, tunnels):
data = [("Proto", "Destination IP", "Listen IP", "Target")]
for tunnel in tunnels:
data.append((tunnel.protocol,
tunnel.destaddr + ":" + tunnel.destport,
tunnel.listenaddr + ":" + tunnel.listenport,
"TARGET"))
maxlens = self.get_column_max_width(data)
data.insert(1, tuple(["-" * length for length in maxlens]))
fmt = "%%-%ds %%-%ds %%-%ds %%-%ds" % tuple(maxlens)
for line in data:
self.write(fmt % line)
def print_global_redir(self, tunnels):
self.print_headingline("Local")
if not tunnels['local']:
self.write(" *empty* ")
else:
self.print_local_tunnels(tunnels['local'])
self.print_headingline("Remote")
if not tunnels['remote']:
self.write(" *empty* ")
else:
self.print_remote_tunnels(tunnels['remote'])
self.write("")
"""
Autorun
"""
def print_autoruncmds(self, status, auto):
if status:
self.print_msg("Autorun ON")
for cat, cmds in auto.items():
self.print_headingline(cat + " Autorun List")
for i,cmd in enumerate(cmds):
self.write(" %d) %s" % (i,cmd[0]))
self.newline()
else:
self.print_msg("Autorun OFF")
self.newline()
"""
General Help
"""
def print_standardop(self):
standardop = """
Fuzzbunch2 Standard OP Usage Help
---------------------------------
=== Summary ===
Run the following commands. Answer questions along the way.
Abort on any failures.
use PcConfig
use Explodingcan
use Pclauncher
=== Detail ===
use PcConfig will run the Peddlecheap configuration plugin and will
generate a configured Peddlecheap DLL.
use Explodingcan will run the Explodingcan exploit. It will first run
through the Explodingcan touch plugin then try to run the exploit. This
plugin will generate an open socket connection that MUST be consumed by the
Pclauncher plugin before exiting.
use Pclauncher will upload the configured Peddlecheap DLL to target over
the open connection from Explodingcan and run it from memory. A new window
will be opened for the LP to communicate with target.
"""
self.write(standardop)
| unlicense |
mikebz/sentry-trends | main.py | 1 | 3112 | """small utility for dumping out Sentry event trends"""
import ConfigParser
from datetime import datetime
import sys
from dateutil import parser
import pytz
from sentry_stats import SentryStats
def deep_get(dictionary, *keys):
"""
deep get for nested dictionaries, great for getting stuff from
REST api_keys
Courtesy of:
http://stackoverflow.com/questions/25833613/python-safe-method-to-get-value-of-nested-dictionary # noqa
"""
return reduce(lambda d, key: d.get(key) if d else None, keys, dictionary)
def process_issues(sentry_key, organization, project):
"""process issues"""
stats = SentryStats(sentry_key, organization)
issues = stats.retrieve_issues(project)
print "\n\nTitle, Links, hits, percent gain"
for issue in issues:
row = [
issue["title"],
issue["status"],
issue["jiraLink"],
str(issue["hitsPerIssue"]),
str(issue["percentGain"])
]
print ", ".join(row)
def process_events(sentry_key, organization, project, days):
"""process and output the events for this project"""
stats = SentryStats(sentry_key, organization)
events = stats.retrieve_events(project, days)
end_date = pytz.utc.localize(datetime.utcnow())
day_breakdown = dict.fromkeys(range(0, days), 0)
print "\n\nDate Created, Date Received, User ID, Type, Message"
for event in events:
row = [
event["dateCreated"],
event["dateReceived"],
deep_get(event, "user", "id") or "",
event["type"],
event["message"].strip()
]
print ", ".join(row)
date_created = parser.parse(event["dateCreated"])
delta = end_date - date_created
day_slot = days - delta.days - 1
day_breakdown[day_slot] += 1
print "\n\nTotal: " + str(len(events))
print "\n\nDay,Events"
for day, occurances in day_breakdown.iteritems():
print str(day) + ", " + str(occurances)
def main(argv):
"""main command line entry point"""
if len(argv) == 0 \
or argv[0] in ['/?', '-?', 'help', '-h', '/h'] \
or not argv[0] in ['issues', 'events']:
print "main.py help - for help"
print "main.py events - for event report"
print "main.py issues - for issues"
sys.exit()
config_parser = ConfigParser.ConfigParser()
config_parser.read("config.ini")
sentry_key = config_parser.get("api_keys", "sentry")
organization = config_parser.get("common_filters", "organization")
project = config_parser.get("common_filters", "project")
print "Sentry Key: " + sentry_key[0:5] + "..."
print "Organization: " + organization
print "Project: " + project
command = argv[0]
if command == 'issues':
process_issues(sentry_key, organization, project)
elif command == 'events':
days = config_parser.getint("event_filters", "days")
print "Days of data: " + str(days)
process_events(sentry_key, organization, project, days)
if __name__ == "__main__":
main(sys.argv[1:])
| mit |
mattkretz/root | interpreter/llvm/src/utils/sort_includes.py | 53 | 2599 | #!/usr/bin/env python
"""Script to sort the top-most block of #include lines.
Assumes the LLVM coding conventions.
Currently, this script only bothers sorting the llvm/... headers. Patches
welcome for more functionality, and sorting other header groups.
"""
import argparse
import os
def sort_includes(f):
"""Sort the #include lines of a specific file."""
# Skip files which are under INPUTS trees or test trees.
if 'INPUTS/' in f.name or 'test/' in f.name:
return
ext = os.path.splitext(f.name)[1]
if ext not in ['.cpp', '.c', '.h', '.inc', '.def']:
return
lines = f.readlines()
look_for_api_header = ext in ['.cpp', '.c']
found_headers = False
headers_begin = 0
headers_end = 0
api_headers = []
local_headers = []
project_headers = []
system_headers = []
for (i, l) in enumerate(lines):
if l.strip() == '':
continue
if l.startswith('#include'):
if not found_headers:
headers_begin = i
found_headers = True
headers_end = i
header = l[len('#include'):].lstrip()
if look_for_api_header and header.startswith('"'):
api_headers.append(header)
look_for_api_header = False
continue
if header.startswith('<') or header.startswith('"gtest/'):
system_headers.append(header)
continue
if (header.startswith('"llvm/') or header.startswith('"llvm-c/') or
header.startswith('"clang/') or header.startswith('"clang-c/')):
project_headers.append(header)
continue
local_headers.append(header)
continue
# Only allow comments and #defines prior to any includes. If either are
# mixed with includes, the order might be sensitive.
if found_headers:
break
if l.startswith('//') or l.startswith('#define') or l.startswith('#ifndef'):
continue
break
if not found_headers:
return
local_headers = sorted(set(local_headers))
project_headers = sorted(set(project_headers))
system_headers = sorted(set(system_headers))
headers = api_headers + local_headers + project_headers + system_headers
header_lines = ['#include ' + h for h in headers]
lines = lines[:headers_begin] + header_lines + lines[headers_end + 1:]
f.seek(0)
f.truncate()
f.writelines(lines)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('files', nargs='+', type=argparse.FileType('r+'),
help='the source files to sort includes within')
args = parser.parse_args()
for f in args.files:
sort_includes(f)
if __name__ == '__main__':
main()
| lgpl-2.1 |
paulojamorim/invesalius3 | invesalius/gui/default_viewers.py | 5 | 24363 | #--------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
#--------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
#--------------------------------------------------------------------------
import sys
import os
import wx
import wx.lib.agw.fourwaysplitter as fws
from pubsub import pub as Publisher
import invesalius.data.viewer_slice as slice_viewer
import invesalius.data.viewer_volume as volume_viewer
import invesalius.project as project
import invesalius.gui.widgets.slice_menu as slice_menu_
from invesalius.gui.widgets.clut_raycasting import CLUTRaycastingWidget, \
EVT_CLUT_POINT_RELEASE, EVT_CLUT_CURVE_SELECT, \
EVT_CLUT_CURVE_WL_CHANGE
from invesalius.constants import ID_TO_BMP
from invesalius import inv_paths
import invesalius.session as ses
import invesalius.constants as const
class Panel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, pos=wx.Point(0, 50),
size=wx.Size(744, 656))
self.__init_aui_manager()
self.__bind_events_wx()
self.__bind_events()
#self.__init_four_way_splitter()
#self.__init_mix()
def __init_aui_manager(self):
self.aui_manager = wx.aui.AuiManager()
self.aui_manager.SetManagedWindow(self)
# TODO: Testar mais e verificar melhor opcao
# Position
# volume | pos = 0
# sagital | pos = 1
# coronal | pos = 2
# axial | pos = 3
# Automatico: assim que painel eh inserido ele ocupa local mais acima na janela (menor numero de posicao)
# Layer
# Layer 0 | Layer 1 | Layer 2 | ...
# Automatico: todos sao colocados no mesmo layer
# O que eh o Dockable?
# Row
# Row 0 | Row 1
# Idem ao layer
# Como funciona Direction?
# Primeira alternativa:
# modo: 2 no Layer 0, 2 no Layer 1 por exemplo - posicao automatica (ao inves de Layer pode ser utilizado Row)
# problema: sash control soh aparece no sentido ertical
# tentativa de solucionar problema seria utilizar Fixed, mas qdo se aciona maximizar nao maximiza inteiro
p1 = slice_viewer.Viewer(self, "AXIAL")
s1 = wx.aui.AuiPaneInfo().Centre().Row(0).\
Name("Axial Slice").Caption(_("Axial slice")).\
MaximizeButton(True).CloseButton(False)
p2 = slice_viewer.Viewer(self, "CORONAL")
s2 = wx.aui.AuiPaneInfo().Centre().Row(0).\
Name("Coronal Slice").Caption(_("Coronal slice")).\
MaximizeButton(True).CloseButton(False)
p3 = slice_viewer.Viewer(self, "SAGITAL")
s3 = wx.aui.AuiPaneInfo().Centre().Row(1).\
Name("Sagittal Slice").Caption(_("Sagittal slice")).\
MaximizeButton(True).CloseButton(False)
p4 = VolumeViewerCover(self)
#p4 = volume_viewer.Viewer(self)
s4 = wx.aui.AuiPaneInfo().Row(1).Name("Volume").\
Bottom().Centre().Caption(_("Volume")).\
MaximizeButton(True).CloseButton(False)
self.s4 = s4
self.p4 = p4
menu = slice_menu_.SliceMenu()
p1.SetPopupMenu(menu)
p2.SetPopupMenu(menu)
p3.SetPopupMenu(menu)
if sys.platform == 'win32' or wx.VERSION >= (4, 1):
self.aui_manager.AddPane(p1, s1)
self.aui_manager.AddPane(p2, s2)
self.aui_manager.AddPane(p3, s3)
self.aui_manager.AddPane(p4, s4)
else:
self.aui_manager.AddPane(p4, s4)
self.aui_manager.AddPane(p3, s3)
self.aui_manager.AddPane(p2, s2)
self.aui_manager.AddPane(p1, s1)
self.aui_manager.Update()
if int(ses.Session().mode) != const.MODE_NAVIGATOR:
Publisher.sendMessage('Deactive target button')
def __bind_events_wx(self):
self.aui_manager.Bind(wx.aui.EVT_AUI_PANE_MAXIMIZE, self.OnMaximize)
self.aui_manager.Bind(wx.aui.EVT_AUI_PANE_RESTORE, self.OnRestore)
def __bind_events(self):
Publisher.subscribe(self._Exit, 'Exit')
def OnMaximize(self, evt):
if evt.GetPane().name == self.s4.name:
Publisher.sendMessage('Show raycasting widget')
def OnRestore(self, evt):
if evt.GetPane().name == self.s4.name:
Publisher.sendMessage('Hide raycasting widget')
def __init_four_way_splitter(self):
splitter = fws.FourWaySplitter(self, style=wx.SP_LIVE_UPDATE)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(splitter, 1, wx.EXPAND)
self.SetSizer(sizer)
p1 = slice_viewer.Viewer(self, "AXIAL")
splitter.AppendWindow(p1)
p2 = slice_viewer.Viewer(self, "CORONAL")
splitter.AppendWindow(p2)
p3 = slice_viewer.Viewer(self, "SAGITAL")
splitter.AppendWindow(p3)
p4 = volume_viewer.Viewer(self)
splitter.AppendWindow(p4)
def _Exit(self):
self.aui_manager.UnInit()
def __init_mix(self):
aui_manager = wx.aui.AuiManager()
aui_manager.SetManagedWindow(self)
splitter = fws.FourWaySplitter(self, style=wx.SP_LIVE_UPDATE)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(splitter, 1, wx.EXPAND)
self.SetSizer(sizer)
p1 = slice_viewer.Viewer(self, "AXIAL")
aui_manager.AddPane(p1,
wx.aui.AuiPaneInfo().
Name("Axial Slice").Caption(_("Axial slice")).
MaximizeButton(True).CloseButton(False))
p2 = slice_viewer.Viewer(self, "CORONAL")
aui_manager.AddPane(p2,
wx.aui.AuiPaneInfo().
Name("Coronal Slice").Caption(_("Coronal slice")).
MaximizeButton(True).CloseButton(False))
p3 = slice_viewer.Viewer(self, "SAGITAL")
aui_manager.AddPane(p3,
wx.aui.AuiPaneInfo().
Name("Sagittal Slice").Caption(_("Sagittal slice")).
MaximizeButton(True).CloseButton(False))
#p4 = volume_viewer.Viewer(self)
aui_manager.AddPane(VolumeViewerCover,
wx.aui.AuiPaneInfo().
Name("Volume").Caption(_("Volume")).
MaximizeButton(True).CloseButton(False))
splitter.AppendWindow(p1)
splitter.AppendWindow(p2)
splitter.AppendWindow(p3)
splitter.AppendWindow(p4)
aui_manager.Update()
class VolumeInteraction(wx.Panel):
def __init__(self, parent, id):
super(VolumeInteraction, self).__init__(parent, id)
self.can_show_raycasting_widget = 0
self.__init_aui_manager()
#sizer = wx.BoxSizer(wx.HORIZONTAL)
#sizer.Add(volume_viewer.Viewer(self), 1, wx.EXPAND|wx.GROW)
#self.SetSizer(sizer)
self.__bind_events()
self.__bind_events_wx()
#sizer.Fit(self)
def __init_aui_manager(self):
self.aui_manager = wx.aui.AuiManager()
self.aui_manager.SetManagedWindow(self)
p1 = volume_viewer.Viewer(self)
s1 = wx.aui.AuiPaneInfo().Centre().\
CloseButton(False).MaximizeButton(False).CaptionVisible(0)
self.s1 = s1
self.clut_raycasting = CLUTRaycastingWidget(self, -1)
self.s2 = wx.aui.AuiPaneInfo().Bottom().BestSize((200, 200)).\
CloseButton(False).MaximizeButton(False).CaptionVisible(0).\
Hide()
self.aui_manager.AddPane(p1, s1)
self.aui_manager.AddPane(self.clut_raycasting, self.s2)
self.aui_manager.Update()
def __bind_events_wx(self):
self.clut_raycasting.Bind(EVT_CLUT_POINT_RELEASE, self.OnPointChanged)
self.clut_raycasting.Bind(EVT_CLUT_CURVE_SELECT, self.OnCurveSelected)
self.clut_raycasting.Bind(EVT_CLUT_CURVE_WL_CHANGE,
self.OnChangeCurveWL)
#self.Bind(wx.EVT_SIZE, self.OnSize)
#self.Bind(wx.EVT_MAXIMIZE, self.OnMaximize)
def __bind_events(self):
Publisher.subscribe(self.ShowRaycastingWidget,
'Show raycasting widget')
Publisher.subscribe(self.HideRaycastingWidget,
'Hide raycasting widget')
Publisher.subscribe(self.OnSetRaycastPreset,
'Update raycasting preset')
Publisher.subscribe(self.RefreshPoints,
'Refresh raycasting widget points')
Publisher.subscribe(self.LoadHistogram,
'Load histogram')
Publisher.subscribe(self._Exit, 'Exit')
def __update_curve_wwwl_text(self, curve):
ww, wl = self.clut_raycasting.GetCurveWWWl(curve)
Publisher.sendMessage('Set raycasting wwwl', ww=ww, wl=wl, curve=curve)
def ShowRaycastingWidget(self):
self.can_show_raycasting_widget = 1
if self.clut_raycasting.to_draw_points:
p = self.aui_manager.GetPane(self.clut_raycasting)
p.Show()
self.aui_manager.Update()
def HideRaycastingWidget(self):
self.can_show_raycasting_widget = 0
p = self.aui_manager.GetPane(self.clut_raycasting)
p.Hide()
self.aui_manager.Update()
def OnPointChanged(self, evt):
Publisher.sendMessage('Set raycasting refresh')
Publisher.sendMessage('Set raycasting curve', curve=evt.GetCurve())
Publisher.sendMessage('Render volume viewer')
def OnCurveSelected(self, evt):
Publisher.sendMessage('Set raycasting curve', curve=evt.GetCurve())
Publisher.sendMessage('Render volume viewer')
def OnChangeCurveWL(self, evt):
curve = evt.GetCurve()
self.__update_curve_wwwl_text(curve)
Publisher.sendMessage('Render volume viewer')
def OnSetRaycastPreset(self):
preset = project.Project().raycasting_preset
p = self.aui_manager.GetPane(self.clut_raycasting)
self.clut_raycasting.SetRaycastPreset(preset)
if self.clut_raycasting.to_draw_points and \
self.can_show_raycasting_widget:
p.Show()
else:
p.Hide()
self.aui_manager.Update()
def LoadHistogram(self, histogram, init, end):
self.clut_raycasting.SetRange((init, end))
self.clut_raycasting.SetHistogramArray(histogram, (init, end))
def RefreshPoints(self):
self.clut_raycasting.CalculatePixelPoints()
self.clut_raycasting.Refresh()
def _Exit(self):
self.aui_manager.UnInit()
import wx.lib.platebtn as pbtn
import wx.lib.buttons as btn
from pubsub import pub as Publisher
import wx.lib.colourselect as csel
RAYCASTING_TOOLS = wx.NewId()
ID_TO_NAME = {}
ID_TO_TOOL = {}
ID_TO_TOOL_ITEM = {}
TOOL_STATE = {}
ID_TO_ITEMSLICEMENU = {}
ID_TO_ITEM_3DSTEREO = {}
ID_TO_STEREO_NAME = {}
ICON_SIZE = (32, 32)
class VolumeViewerCover(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(VolumeInteraction(self, -1), 1, wx.EXPAND|wx.GROW)
sizer.Add(VolumeToolPanel(self), 0, wx.EXPAND|wx.GROW)
sizer.Fit(self)
self.SetSizer(sizer)
self.Update()
self.SetAutoLayout(1)
class VolumeToolPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
# VOLUME RAYCASTING BUTTON
BMP_RAYCASTING = wx.Bitmap(str(inv_paths.ICON_DIR.joinpath("volume_raycasting.png")), wx.BITMAP_TYPE_PNG)
BMP_SLICE_PLANE = wx.Bitmap(str(inv_paths.ICON_DIR.joinpath("slice_plane.png")), wx.BITMAP_TYPE_PNG)
BMP_3D_STEREO = wx.Bitmap(str(inv_paths.ICON_DIR.joinpath("3D_glasses.png")), wx.BITMAP_TYPE_PNG)
BMP_TARGET = wx.Bitmap(str(inv_paths.ICON_DIR.joinpath("target.png")), wx.BITMAP_TYPE_PNG)
BMP_3D_MASK = wx.Bitmap(str(inv_paths.ICON_DIR.joinpath("file_from_internet.png")), wx.BITMAP_TYPE_PNG)
self.button_raycasting = pbtn.PlateButton(self, -1,"", BMP_RAYCASTING, style=pbtn.PB_STYLE_SQUARE, size=ICON_SIZE)
self.button_stereo = pbtn.PlateButton(self, -1,"", BMP_3D_STEREO, style=pbtn.PB_STYLE_SQUARE, size=ICON_SIZE)
self.button_slice_plane = pbtn.PlateButton(self, -1, "", BMP_SLICE_PLANE, style=pbtn.PB_STYLE_SQUARE, size=ICON_SIZE)
self.button_target = pbtn.PlateButton(self, -1,"", BMP_TARGET, style=pbtn.PB_STYLE_SQUARE|pbtn.PB_STYLE_TOGGLE, size=ICON_SIZE)
self.button_target.Enable(0)
# self.button_3d_mask = pbtn.PlateButton(self, -1, "", BMP_3D_MASK, style=pbtn.PB_STYLE_SQUARE|pbtn.PB_STYLE_TOGGLE, size=ICON_SIZE)
# VOLUME VIEW ANGLE BUTTON
BMP_FRONT = wx.Bitmap(ID_TO_BMP[const.VOL_FRONT][1], wx.BITMAP_TYPE_PNG)
self.button_view = pbtn.PlateButton(self, -1, "", BMP_FRONT, size=(32,32), style=pbtn.PB_STYLE_SQUARE)
# VOLUME COLOUR BUTTON
if sys.platform.startswith('linux'):
size = (32,32)
sp = 2
else:
size = (24,24)
sp = 5
self.button_colour= csel.ColourSelect(self, -1, colour=(0,0,0), size=size)
# SIZER TO ORGANIZE ALL
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.button_colour, 0, wx.ALL, sp)
sizer.Add(self.button_raycasting, 0, wx.TOP|wx.BOTTOM, 1)
sizer.Add(self.button_view, 0, wx.TOP|wx.BOTTOM, 1)
sizer.Add(self.button_slice_plane, 0, wx.TOP|wx.BOTTOM, 1)
sizer.Add(self.button_stereo, 0, wx.TOP|wx.BOTTOM, 1)
sizer.Add(self.button_target, 0, wx.TOP | wx.BOTTOM, 1)
# sizer.Add(self.button_3d_mask, 0, wx.TOP | wx.BOTTOM, 1)
self.navigation_status = False
self.status_target_select = False
self.status_obj_tracker = False
sizer.Fit(self)
self.SetSizer(sizer)
self.SetAutoLayout(1)
self.Update()
self.Refresh()
self.__init_menus()
self.__bind_events()
self.__bind_events_wx()
def __bind_events(self):
Publisher.subscribe(self.ChangeButtonColour,
'Change volume viewer gui colour')
Publisher.subscribe(self.DisablePreset, 'Close project data')
Publisher.subscribe(self.Uncheck, 'Uncheck image plane menu')
Publisher.subscribe(self.DisableVolumeCutMenu, 'Disable volume cut menu')
Publisher.subscribe(self.StatusTargetSelect, 'Disable or enable coil tracker')
Publisher.subscribe(self.StatusObjTracker, 'Status target button')
Publisher.subscribe(self.ActiveTarget, 'Active target button')
Publisher.subscribe(self.DeactiveTarget, 'Deactive target button')
def DisablePreset(self):
self.off_item.Check(1)
def __bind_events_wx(self):
self.button_slice_plane.Bind(wx.EVT_LEFT_DOWN, self.OnButtonSlicePlane)
self.button_raycasting.Bind(wx.EVT_LEFT_DOWN, self.OnButtonRaycasting)
self.button_view.Bind(wx.EVT_LEFT_DOWN, self.OnButtonView)
self.button_colour.Bind(csel.EVT_COLOURSELECT, self.OnSelectColour)
self.button_stereo.Bind(wx.EVT_LEFT_DOWN, self.OnButtonStereo)
self.button_target.Bind(wx.EVT_LEFT_DOWN, self.OnButtonTarget)
def OnButtonRaycasting(self, evt):
# MENU RELATED TO RAYCASTING TYPES
self.button_raycasting.PopupMenu(self.menu_raycasting)
def OnButtonStereo(self, evt):
self.button_stereo.PopupMenu(self.stereo_menu)
def OnButtonView(self, evt):
self.button_view.PopupMenu(self.menu_view)
def OnButtonSlicePlane(self, evt):
self.button_slice_plane.PopupMenu(self.slice_plane_menu)
def StatusObjTracker(self, status):
self.status_obj_tracker = status
self.StatusNavigation()
def StatusTargetSelect(self, status):
self.status_target_select = status
self.StatusNavigation()
def ActiveTarget(self):
self.button_target.Show()
def DeactiveTarget(self):
self.button_target.Hide()
def StatusNavigation(self):
if self.status_target_select and self.status_obj_tracker:
self.button_target.Enable(1)
else:
self.OnButtonTarget(False)
self.button_target.Enable(0)
def OnButtonTarget(self, evt):
if not self.button_target.IsPressed() and evt is not False:
self.button_target._pressed = True
Publisher.sendMessage('Target navigation mode', target_mode=self.button_target._pressed)
Publisher.sendMessage('Change camera checkbox', status=self.button_target._pressed)
elif self.button_target.IsPressed() or evt is False:
self.button_target._pressed = False
Publisher.sendMessage('Target navigation mode', target_mode=self.button_target._pressed)
Publisher.sendMessage('Change camera checkbox', status=self.button_target._pressed)
def OnSavePreset(self, evt):
d = wx.TextEntryDialog(self, _("Preset name"))
if d.ShowModal() == wx.ID_OK:
preset_name = d.GetValue()
Publisher.sendMessage("Save raycasting preset", preset_name=preset_name)
def __init_menus(self):
# MENU RELATED TO RAYCASTING TYPES
menu = self.menu = wx.Menu()
#print "\n\n"
#print ">>>>", const.RAYCASTING_TYPES.sort()
#print "\n\n"
for name in const.RAYCASTING_TYPES:
id = wx.NewId()
item = wx.MenuItem(menu, id, name, kind=wx.ITEM_RADIO)
menu.Append(item)
if name == const.RAYCASTING_OFF_LABEL:
self.off_item = item
item.Check(1)
ID_TO_NAME[id] = name
menu.AppendSeparator()
# MENU RELATED TO RAYCASTING TOOLS
self.id_cutplane = None
submenu = wx.Menu()
for name in const.RAYCASTING_TOOLS:
id = wx.NewId()
if not(self.id_cutplane):
self.id_cutplane = id
item = wx.MenuItem(submenu, id, name, kind=wx.ITEM_CHECK)
submenu.Append(item)
ID_TO_TOOL[id] = name
ID_TO_TOOL_ITEM[id] = item
TOOL_STATE[id] = False
self.submenu_raycasting_tools = submenu
menu.Append(RAYCASTING_TOOLS, _("Tools"), submenu)
menu.Enable(RAYCASTING_TOOLS, 0)
self.menu_raycasting = menu
# In MacOS X and Windows, binding parent menu is enough. But
# not in GNU Linux - in the last it is necessary to bind the
# submenu
if sys.platform != 'win32':
submenu.Bind(wx.EVT_MENU, self.OnMenuRaycasting)
menu.Bind(wx.EVT_MENU, self.OnMenuRaycasting)
# VOLUME VIEW ANGLE BUTTON
menu = wx.Menu()
for id in ID_TO_BMP:
bmp = wx.Bitmap(ID_TO_BMP[id][1], wx.BITMAP_TYPE_PNG)
item = wx.MenuItem(menu, id, ID_TO_BMP[id][0])
item.SetBitmap(bmp)
menu.Append(item)
menu.Bind(wx.EVT_MENU, self.OnMenuView)
self.menu_view = menu
#SLICE PLANES BUTTON
self.slice_plane_menu = slice_plane_menu = wx.Menu()
itens = ["Axial", "Coronal", "Sagital"]
for value in itens:
new_id = wx.NewId()
item = wx.MenuItem(slice_plane_menu, new_id, value,
kind = wx.ITEM_CHECK)
ID_TO_ITEMSLICEMENU[new_id] = item
slice_plane_menu.Append(item)
slice_plane_menu.Bind(wx.EVT_MENU, self.OnMenuPlaneSlice)
#3D Stereo Buttons
self.stereo_menu = stereo_menu = wx.Menu()
itens = [const.STEREO_OFF, const.STEREO_RED_BLUE,const.STEREO_ANAGLYPH, const.STEREO_CRISTAL,
const.STEREO_INTERLACED, const.STEREO_LEFT, const.STEREO_RIGHT, const.STEREO_DRESDEN,
const.STEREO_CHECKBOARD]
for value in itens:
new_id = wx.NewId()
item = wx.MenuItem(stereo_menu, new_id, value,
kind = wx.ITEM_RADIO)
ID_TO_ITEM_3DSTEREO[new_id] = item
ID_TO_STEREO_NAME[new_id] = value
stereo_menu.Append(item)
stereo_menu.Bind(wx.EVT_MENU, self.OnMenuStereo)
self.Fit()
self.Update()
def DisableVolumeCutMenu(self):
self.menu.Enable(RAYCASTING_TOOLS, 0)
item = ID_TO_TOOL_ITEM[self.id_cutplane]
item.Check(0)
def BuildRaycastingMenu(self):
presets = []
for folder in const.RAYCASTING_PRESETS_FOLDERS:
presets += [filename.split(".")[0] for filename in
os.listdir(folder) if
os.path.isfile(os.path.join(folder,filename))]
def OnMenuPlaneSlice(self, evt):
id = evt.GetId()
item = ID_TO_ITEMSLICEMENU[id]
checked = item.IsChecked()
label = item.GetItemLabelText()
if not (checked):
Publisher.sendMessage('Disable plane', plane_label=label)
else:
Publisher.sendMessage('Enable plane', plane_label=label)
def OnMenuStereo(self, evt):
id = evt.GetId()
mode = ID_TO_STEREO_NAME[id]
Publisher.sendMessage('Set stereo mode', mode=mode)
def Uncheck(self):
for item in self.slice_plane_menu.GetMenuItems():
if (item.IsChecked()):
item.Check(0)
def ChangeButtonColour(self, colour):
colour = [i*255 for i in colour]
self.button_colour.SetColour(colour)
def OnMenuRaycasting(self, evt):
"""Events from raycasting menu."""
id = evt.GetId()
if id in ID_TO_NAME.keys():
# Raycassting type was selected
name = ID_TO_NAME[id]
Publisher.sendMessage('Load raycasting preset',
preset_name=ID_TO_NAME[id])
# Enable or disable tools
if name != const.RAYCASTING_OFF_LABEL:
self.menu_raycasting.Enable(RAYCASTING_TOOLS, 1)
else:
self.menu_raycasting.Enable(RAYCASTING_TOOLS, 0)
else:
# Raycasting tool
# TODO: In future, when more tools are available
item = ID_TO_TOOL_ITEM[id]
#if not item.IsChecked():
# for i in ID_TO_TOOL_ITEM.values():
# if i is not item:
# i.Check(0)
if not TOOL_STATE[id]:
Publisher.sendMessage('Enable raycasting tool',
tool_name=ID_TO_TOOL[id], flag=1)
TOOL_STATE[id] = True
item.Check(1)
else:
Publisher.sendMessage('Enable raycasting tool',
tool_name=ID_TO_TOOL[id], flag=0)
TOOL_STATE[id] = False
item.Check(0)
def OnMenuView(self, evt):
"""Events from button menus."""
bmp = wx.Bitmap(ID_TO_BMP[evt.GetId()][1], wx.BITMAP_TYPE_PNG)
self.button_view.SetBitmapSelected(bmp)
Publisher.sendMessage('Set volume view angle',
view=evt.GetId())
self.Refresh()
def OnSelectColour(self, evt):
colour = c = [i/255.0 for i in evt.GetValue()]
Publisher.sendMessage('Change volume viewer background colour', colour=colour)
| gpl-2.0 |
xuxiao19910803/edx | openedx/core/djangoapps/user_api/migrations/0004_auto__add_userorgtag__add_unique_userorgtag_user_org_key__chg_field_us.py | 114 | 7274 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserOrgTag'
db.create_table('user_api_userorgtag', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['auth.User'])),
('key', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('org', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('value', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('user_api', ['UserOrgTag'])
# Adding unique constraint on 'UserOrgTag', fields ['user', 'org', 'key']
db.create_unique('user_api_userorgtag', ['user_id', 'org', 'key'])
# Create a composite index of user_id, org, and key.
db.create_index('user_api_userorgtag', ['user_id', 'org', 'key'])
# Changing field 'UserCourseTag.course_id'
db.alter_column('user_api_usercoursetag', 'course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255))
def backwards(self, orm):
# Delete the composite index of user_id, org, and key.
db.delete_index('user_api_userorgtag', ['user_id', 'org', 'key'])
# Removing unique constraint on 'UserOrgTag', fields ['user', 'org', 'key']
db.delete_unique('user_api_userorgtag', ['user_id', 'org', 'key'])
# Deleting model 'UserOrgTag'
db.delete_table('user_api_userorgtag')
# Changing field 'UserCourseTag.course_id'
db.alter_column('user_api_usercoursetag', 'course_id', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'user_api.usercoursetag': {
'Meta': {'unique_together': "(('user', 'course_id', 'key'),)", 'object_name': 'UserCourseTag'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {})
},
'user_api.userorgtag': {
'Meta': {'unique_together': "(('user', 'org', 'key'),)", 'object_name': 'UserOrgTag'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'org': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {})
},
'user_api.userpreference': {
'Meta': {'unique_together': "(('user', 'key'),)", 'object_name': 'UserPreference'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'preferences'", 'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['user_api']
| agpl-3.0 |
google-code/android-scripting | python/src/Demo/pdist/client.py | 47 | 4708 | """RPC Client module."""
import sys
import socket
import pickle
import __builtin__
import os
# Default verbosity (0 = silent, 1 = print connections, 2 = print requests too)
VERBOSE = 1
class Client:
"""RPC Client class. No need to derive a class -- it's fully generic."""
def __init__(self, address, verbose = VERBOSE):
self._pre_init(address, verbose)
self._post_init()
def _pre_init(self, address, verbose = VERBOSE):
if type(address) == type(0):
address = ('', address)
self._address = address
self._verbose = verbose
if self._verbose: print "Connecting to %s ..." % repr(address)
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.connect(address)
if self._verbose: print "Connected."
self._lastid = 0 # Last id for which a reply has been received
self._nextid = 1 # Id of next request
self._replies = {} # Unprocessed replies
self._rf = self._socket.makefile('r')
self._wf = self._socket.makefile('w')
def _post_init(self):
self._methods = self._call('.methods')
def __del__(self):
self._close()
def _close(self):
if self._rf: self._rf.close()
self._rf = None
if self._wf: self._wf.close()
self._wf = None
if self._socket: self._socket.close()
self._socket = None
def __getattr__(self, name):
if name in self._methods:
method = _stub(self, name)
setattr(self, name, method) # XXX circular reference
return method
raise AttributeError, name
def _setverbose(self, verbose):
self._verbose = verbose
def _call(self, name, *args):
return self._vcall(name, args)
def _vcall(self, name, args):
return self._recv(self._vsend(name, args))
def _send(self, name, *args):
return self._vsend(name, args)
def _send_noreply(self, name, *args):
return self._vsend(name, args, 0)
def _vsend_noreply(self, name, args):
return self._vsend(name, args, 0)
def _vsend(self, name, args, wantreply = 1):
id = self._nextid
self._nextid = id+1
if not wantreply: id = -id
request = (name, args, id)
if self._verbose > 1: print "sending request: %s" % repr(request)
wp = pickle.Pickler(self._wf)
wp.dump(request)
return id
def _recv(self, id):
exception, value, rid = self._vrecv(id)
if rid != id:
raise RuntimeError, "request/reply id mismatch: %d/%d" % (id, rid)
if exception is None:
return value
x = exception
if hasattr(__builtin__, exception):
x = getattr(__builtin__, exception)
elif exception in ('posix.error', 'mac.error'):
x = os.error
if x == exception:
exception = x
raise exception, value
def _vrecv(self, id):
self._flush()
if self._replies.has_key(id):
if self._verbose > 1: print "retrieving previous reply, id = %d" % id
reply = self._replies[id]
del self._replies[id]
return reply
aid = abs(id)
while 1:
if self._verbose > 1: print "waiting for reply, id = %d" % id
rp = pickle.Unpickler(self._rf)
reply = rp.load()
del rp
if self._verbose > 1: print "got reply: %s" % repr(reply)
rid = reply[2]
arid = abs(rid)
if arid == aid:
if self._verbose > 1: print "got it"
return reply
self._replies[rid] = reply
if arid > aid:
if self._verbose > 1: print "got higher id, assume all ok"
return (None, None, id)
def _flush(self):
self._wf.flush()
from security import Security
class SecureClient(Client, Security):
def __init__(self, *args):
import string
apply(self._pre_init, args)
Security.__init__(self)
self._wf.flush()
line = self._rf.readline()
challenge = string.atoi(string.strip(line))
response = self._encode_challenge(challenge)
line = repr(long(response))
if line[-1] in 'Ll': line = line[:-1]
self._wf.write(line + '\n')
self._wf.flush()
self._post_init()
class _stub:
"""Helper class for Client -- each instance serves as a method of the client."""
def __init__(self, client, name):
self._client = client
self._name = name
def __call__(self, *args):
return self._client._vcall(self._name, args)
| apache-2.0 |
admcrae/tensorflow | tensorflow/python/layers/pooling.py | 12 | 24692 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# pylint: disable=unused-import,g-bad-import-order
"""Contains the pooling layer classes and their functional aliases.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from six.moves import xrange # pylint: disable=redefined-builtin
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.layers import base
from tensorflow.python.layers import utils
class _Pooling1D(base._Layer): # pylint: disable=protected-access
"""Pooling layer for arbitrary pooling functions, for 1D inputs.
This class only exists for code reuse. It will never be an exposed API.
Arguments:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool`.
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(_Pooling1D, self).__init__(name=name, **kwargs)
self.pool_function = pool_function
self.pool_size = utils.normalize_tuple(pool_size, 1, 'pool_size')
self.strides = utils.normalize_tuple(strides, 1, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
def build(self, input_shape):
if len(input_shape) != 3:
raise ValueError('Inputs should have rank 3. '
'Received input shape:', str(input_shape))
def call(self, inputs):
# There is no TF op for 1D pooling, hence we make the inputs 4D.
if self.data_format == 'channels_last':
inputs = array_ops.expand_dims(inputs, 2)
pool_shape = (1,) + self.pool_size + (1, 1)
strides = (1,) + self.strides + (1, 1)
data_format = 'NHWC'
else:
inputs = array_ops.expand_dims(inputs, 1)
pool_shape = (1, 1) + self.pool_size + (1,)
strides = (1, 1) + self.strides + (1,)
data_format = 'NCHW'
outputs = self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper(),
data_format=data_format)
if self.data_format == 'channels_last':
return array_ops.squeeze(outputs, 2)
else:
return array_ops.squeeze(outputs, 1)
class AveragePooling1D(_Pooling1D):
"""Average Pooling layer for 1D inputs.
Arguments:
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(AveragePooling1D, self).__init__(
nn.avg_pool,
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name,
**kwargs)
def average_pooling1d(inputs, pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Average Pooling layer for 1D inputs.
Arguments:
inputs: The tensor over which to pool. Must have rank 3.
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
Returns:
The output tensor, of rank 3.
"""
layer = AveragePooling1D(pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name)
return layer.apply(inputs)
class MaxPooling1D(_Pooling1D):
"""Max Pooling layer for 1D inputs.
Arguments:
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(MaxPooling1D, self).__init__(
nn.max_pool,
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name,
**kwargs)
def max_pooling1d(inputs, pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Max Pooling layer for 1D inputs.
Arguments:
inputs: The tensor over which to pool. Must have rank 3.
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
Returns:
The output tensor, of rank 3.
"""
layer = MaxPooling1D(pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name)
return layer.apply(inputs)
class _Pooling2D(base._Layer): # pylint: disable=protected-access
"""Pooling layer for arbitrary pooling functions, for 2D inputs (e.g. images).
This class only exists for code reuse. It will never be an exposed API.
Arguments:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool`.
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(_Pooling2D, self).__init__(name=name, **kwargs)
self.pool_function = pool_function
self.pool_size = utils.normalize_tuple(pool_size, 2, 'pool_size')
self.strides = utils.normalize_tuple(strides, 2, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
def build(self, input_shape):
if len(input_shape) != 4:
raise ValueError('Inputs should have rank 4. '
'Received input shape:', str(input_shape))
def call(self, inputs):
if self.data_format == 'channels_last':
pool_shape = (1,) + self.pool_size + (1,)
strides = (1,) + self.strides + (1,)
else:
pool_shape = (1, 1) + self.pool_size
strides = (1, 1) + self.strides
return self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format, 4))
class AveragePooling2D(_Pooling2D):
"""Average pooling layer for 2D inputs (e.g. images).
Arguments:
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(AveragePooling2D, self).__init__(
nn.avg_pool,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def average_pooling2d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Average pooling layer for 2D inputs (e.g. images).
Arguments:
inputs: The tensor over which to pool. Must have rank 4.
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = AveragePooling2D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
class MaxPooling2D(_Pooling2D):
"""Max pooling layer for 2D inputs (e.g. images).
Arguments:
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(MaxPooling2D, self).__init__(
nn.max_pool,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def max_pooling2d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Max pooling layer for 2D inputs (e.g. images).
Arguments:
inputs: The tensor over which to pool. Must have rank 4.
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = MaxPooling2D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
class _Pooling3D(base._Layer): # pylint: disable=protected-access
"""Pooling layer for arbitrary pooling functions, for 3D inputs.
This class only exists for code reuse. It will never be an exposed API.
Arguments:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool`.
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)`
while `channels_first` corresponds to
inputs with shape `(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(_Pooling3D, self).__init__(name=name, **kwargs)
self.pool_function = pool_function
self.pool_size = utils.normalize_tuple(pool_size, 3, 'pool_size')
self.strides = utils.normalize_tuple(strides, 3, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
def build(self, input_shape):
if len(input_shape) != 5:
raise ValueError('Inputs should have rank 5. '
'Received input shape:', str(input_shape))
def call(self, inputs):
pool_shape = (1,) + self.pool_size + (1,)
strides = (1,) + self.strides + (1,)
if self.data_format == 'channels_first':
# TF does not support channels first with 3D pooling operations,
# so we must handle this case manually.
inputs = array_ops.transpose(inputs, (0, 2, 3, 4, 1))
outputs = self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper())
if self.data_format == 'channels_first':
outputs = array_ops.transpose(outputs, (0, 4, 1, 2, 3))
return outputs
class AveragePooling3D(_Pooling3D):
"""Average pooling layer for 3D inputs (e.g. volumes).
Arguments:
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(AveragePooling3D, self).__init__(
nn.avg_pool3d,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def average_pooling3d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Average pooling layer for 3D inputs (e.g. volumes).
Arguments:
inputs: The tensor over which to pool. Must have rank 5.
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = AveragePooling3D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
class MaxPooling3D(_Pooling3D):
"""Max pooling layer for 3D inputs (e.g. volumes).
Arguments:
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(MaxPooling3D, self).__init__(
nn.max_pool3d,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def max_pooling3d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Max pooling layer for 3D inputs (e.g. volumes).
Arguments:
inputs: The tensor over which to pool. Must have rank 5.
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = MaxPooling3D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
# Aliases
AvgPool2D = AveragePooling2D
MaxPool2D = MaxPooling2D
max_pool2d = max_pooling2d
avg_pool2d = average_pooling2d
| apache-2.0 |
admcrae/tensorflow | tensorflow/examples/tutorials/mnist/mnist_softmax.py | 73 | 2742 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A very simple MNIST classifier.
See extensive documentation at
https://www.tensorflow.org/get_started/mnist/beginners
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
FLAGS = None
def main(_):
# Import data
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(x, W) + b
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.nn.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the raw
# outputs of 'y', and then average across the batch.
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
# Train
for _ in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# Test trained model
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(sess.run(accuracy, feed_dict={x: mnist.test.images,
y_: mnist.test.labels}))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='/tmp/tensorflow/mnist/input_data',
help='Directory for storing input data')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |
bmcage/stickproject | stick/fiberfabric/fiberfabricprog.py | 1 | 3751 | #!/usr/bin env python
# Copyright (C) 2012 B. Malengier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Main program that reads in ini file for fiberfabric simulation and decides
how to handle it
"""
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
from __future__ import division
import sys, os, shutil
import getopt
import time
#-------------------------------------------------------------------------
#
# local modules
#
#-------------------------------------------------------------------------
from stick.lib.utils.utils import set_outputdir
import stick.const as const
import stick.fiberfabric.config as conf
#-------------------------------------------------------------------------
#
# Initialization
#
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
#
# the main prog
#
#-------------------------------------------------------------------------
def main(argv=None):
if argv is None:
argv = sys.argv
try:
if argv and argv[0].endswith(".py"):
argv = argv[1:]
options, leftargs = getopt.getopt(argv,
conf.SHORTOPTS, conf.LONGOPTS)
except getopt.GetoptError, msg:
print msg
# return without filling anything if we could not parse the args
print "Error parsing the arguments: %s " % argv[1:]
sys.exit(0)
if leftargs:
print 'fiberfabricprog.py does not understand argument %s' % leftargs
sys.exit(0)
inifile = conf.INIFILE_DEFAULT
outputdir = const.DATA_DIR
writeini = False
for opt_ix in range(len(options)):
option, value = options[opt_ix]
if option in ( '-i', '--inifile'):
inifile = value
elif option in ('-o', '--outputdir'):
outputdir = value
elif option in ('--write-ini',):
writeini = True
#Parse ini file to obtain parameters.
cfg = conf.FiberFabricConfigManager.get_instance(inifile)
#create outputdir if not existing
if not os.path.isdir(outputdir):
os.mkdir(outputdir)
#create outputdir for this run, remove if existing
outputdir = outputdir + os.sep + os.path.basename(inifile)
if not os.path.isdir(outputdir):
os.mkdir(outputdir)
else:
shutil.rmtree(outputdir)
os.mkdir(outputdir)
set_outputdir(outputdir)
#store the ini file in the outputdir so the experiment can be repeated
shutil.copy(inifile, outputdir)
#create the correct model, and run it
from stick.fiberfabric.fiberfabricmodel import FiberFabricModel
model = FiberFabricModel(cfg)
if writeini:
print "Writing out ini file cleaned.ini to outputdir %s" % outputdir
cfg.save(outputdir + os.sep + 'cleaned.ini')
sys.exit()
#pass further execution to the model
model.run()
if __name__ == "__main__":
main()
| gpl-2.0 |
38elements/feedhoos | feedhoos/tests/entry.py | 1 | 3528 | # coding: utf-8
from django.test import TestCase
from feedhoos.worker.models.entry import EntryModel
from feedhoos.worker.models.entry import MAX_CONTENT_LENGTH
import datetime
from feedparser import FeedParserDict
class EntryModelTestCase(TestCase):
fixtures = [
'feedhoos/fixtures/entry.json',
]
def setUp(self):
super(EntryModelTestCase, self).setUp()
def tearDown(self):
super(EntryModelTestCase, self).tearDown()
def test_add(self):
before_count = EntryModel.objects.all().count()
feed_id = 1
updated_parsed = datetime.date(2013, 11, 23).timetuple()
entry = FeedParserDict({
"link": "http://example.con/1",
"title": "test1 title",
"updated_parsed": updated_parsed,
"content": [{"value": "content"}],
"summary": "summary"
})
EntryModel.add(feed_id, entry)
after_count = EntryModel.objects.all().count()
self.assertEqual(before_count + 1, after_count)
def test_dict(self):
entry_model = EntryModel.objects.get(pk=1)
result = {
'id': 1,
'updated': '2014-02-21 00:03',
'title': 'title1',
'url': 'http://example.com/110001',
'content': 'content1',
'feed_id': 1
}
self.assertEqual(result, entry_model.dict)
def test_updated_stftime(self):
entry_model = EntryModel.objects.get(pk=1)
self.assertEqual('2014-02-21 00:03', entry_model.updated_stftime)
def test_count(self):
feed_id = 1
c = EntryModel.count(feed_id)
self.assertEqual(2, c)
min_updated = 1392942999
c = EntryModel.count(feed_id, min_updated)
self.assertEqual(1, c)
def test_get_entries(self):
empty = EntryModel.get_entries(1000, 1)
self.assertEqual(len([]), len(empty))
entries = EntryModel.get_entries(1, 1)
self.assertEqual(len(entries), 2)
self.assertTrue(isinstance(entries[0], EntryModel))
entries = EntryModel.get_entries(1, 1, 1392942999)
self.assertEqual(len(entries), 1)
def test_get_timeline(self):
empty = EntryModel.get_timeline(1000, 1)
self.assertEqual(len([]), len(empty))
entries = EntryModel.get_timeline(1, 1)
self.assertEqual(len(entries), 2)
self.assertTrue(isinstance(entries[0], EntryModel))
def test_get_folder(self):
empty = EntryModel.get_folder([1000, 1001], 1)
self.assertEqual(len([]), len(empty))
entries = EntryModel.get_folder([1, 2], 1)
self.assertEqual(len(entries), 3)
self.assertTrue(isinstance(entries[0], EntryModel))
def test_get_content(self):
updated_parsed = datetime.date(2013, 11, 23).timetuple()
entry = FeedParserDict({
"link": "http://example.con/1",
"title": "test1 title",
"updated_parsed": updated_parsed,
"content": [{"value": "content"}],
"summary": "summary"
})
content = EntryModel.get_content(entry)
self.assertEqual(content, "content")
entry = FeedParserDict({
"link": "http://example.con/1",
"title": "test1 title",
"updated_parsed": updated_parsed,
"content": [{"value": "t" * (MAX_CONTENT_LENGTH + 1)}],
"summary": "summary"
})
content = EntryModel.get_content(entry)
self.assertEqual(content, "summary")
| mit |
cjhak/b2share | invenio/modules/upgrader/upgrades/invenio_2014_01_24_seqSTORE_larger_value.py | 15 | 1180 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql
depends_on = ['invenio_2013_12_04_seqSTORE_larger_value']
def info():
return "Larger values allowed for seqSTORE.seq_value"
def do_upgrade():
""" Implement your upgrades here """
run_sql("ALTER TABLE seqSTORE MODIFY COLUMN seq_value varchar(255);")
def estimate():
""" Estimate running time of upgrade in seconds (optional). """
return 1
| gpl-2.0 |
adieu/allbuttonspressed | docutils/transforms/misc.py | 183 | 4882 | # $Id: misc.py 6314 2010-04-26 10:04:17Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Miscellaneous transforms.
"""
__docformat__ = 'reStructuredText'
from docutils import nodes
from docutils.transforms import Transform, TransformError
class CallBack(Transform):
"""
Inserts a callback into a document. The callback is called when the
transform is applied, which is determined by its priority.
For use with `nodes.pending` elements. Requires a ``details['callback']``
entry, a bound method or function which takes one parameter: the pending
node. Other data can be stored in the ``details`` attribute or in the
object hosting the callback method.
"""
default_priority = 990
def apply(self):
pending = self.startnode
pending.details['callback'](pending)
pending.parent.remove(pending)
class ClassAttribute(Transform):
"""
Move the "class" attribute specified in the "pending" node into the
immediately following non-comment element.
"""
default_priority = 210
def apply(self):
pending = self.startnode
parent = pending.parent
child = pending
while parent:
# Check for appropriate following siblings:
for index in range(parent.index(child) + 1, len(parent)):
element = parent[index]
if (isinstance(element, nodes.Invisible) or
isinstance(element, nodes.system_message)):
continue
element['classes'] += pending.details['class']
pending.parent.remove(pending)
return
else:
# At end of section or container; apply to sibling
child = parent
parent = parent.parent
error = self.document.reporter.error(
'No suitable element following "%s" directive'
% pending.details['directive'],
nodes.literal_block(pending.rawsource, pending.rawsource),
line=pending.line)
pending.replace_self(error)
class Transitions(Transform):
"""
Move transitions at the end of sections up the tree. Complain
on transitions after a title, at the beginning or end of the
document, and after another transition.
For example, transform this::
<section>
...
<transition>
<section>
...
into this::
<section>
...
<transition>
<section>
...
"""
default_priority = 830
def apply(self):
for node in self.document.traverse(nodes.transition):
self.visit_transition(node)
def visit_transition(self, node):
index = node.parent.index(node)
error = None
if (index == 0 or
isinstance(node.parent[0], nodes.title) and
(index == 1 or
isinstance(node.parent[1], nodes.subtitle) and
index == 2)):
assert (isinstance(node.parent, nodes.document) or
isinstance(node.parent, nodes.section))
error = self.document.reporter.error(
'Document or section may not begin with a transition.',
source=node.source, line=node.line)
elif isinstance(node.parent[index - 1], nodes.transition):
error = self.document.reporter.error(
'At least one body element must separate transitions; '
'adjacent transitions are not allowed.',
source=node.source, line=node.line)
if error:
# Insert before node and update index.
node.parent.insert(index, error)
index += 1
assert index < len(node.parent)
if index != len(node.parent) - 1:
# No need to move the node.
return
# Node behind which the transition is to be moved.
sibling = node
# While sibling is the last node of its parent.
while index == len(sibling.parent) - 1:
sibling = sibling.parent
# If sibling is the whole document (i.e. it has no parent).
if sibling.parent is None:
# Transition at the end of document. Do not move the
# transition up, and place an error behind.
error = self.document.reporter.error(
'Document may not end with a transition.',
line=node.line)
node.parent.insert(node.parent.index(node) + 1, error)
return
index = sibling.parent.index(sibling)
# Remove the original transition node.
node.parent.remove(node)
# Insert the transition after the sibling.
sibling.parent.insert(index + 1, node)
| bsd-3-clause |
AnhellO/DAS_Sistemas | Ene-Jun-2019/juanalmaguer/Extraordinario/Ejercicio 2.py | 1 | 1238 | from abc import ABC, abstractmethod
class Figura(ABC):
@abstractmethod
def calcular_area(self):
pass
class Rectangulo(Figura):
def _init_(self, base, altura):
self.base = base
self.altura = altura
def calcular_area(self):
return self.base * self.altura
class CalculadorArea:
def _init_(self, figuras):
assert isinstance(figuras, list)
self.figuras = figuras
def suma_areas(self):
total = 0
for figura in self.figuras:
total += figura.calcular_area()
return total
# ¿Que sucederia si quisieramos sumar las areas de otros tipos de figuras?
# Se tendría que hacer una clase para esa figura, que herede de la clase 'Figura'.
# y la misma tendría que implementar el metodo 'calcular_area(self)' que devuelva un valor numerico.
# ¿Que tendriamos que hacer para poder sumar volumenes ademas de areas?
# Crearíamos una interfaz 'Cuerpo' o 'Figura3d', que contenga un metodo 'calcular_volumen(self)'.
# y se tendrían que crear clases que hereden de ella
if __name__ == '_main_':
r = Rectangulo(3, 4)
r2 = Rectangulo(5, 6)
r3 = Rectangulo(2, 3.5)
calc = CalculadorArea([r, r2, r3])
print(calc.suma_areas()) | mit |
Kongsea/tensorflow | tensorflow/python/kernel_tests/benchmark_test.py | 116 | 7664 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.importer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import random
from tensorflow.core.util import test_log_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
# Used by SomeRandomBenchmark class below.
_ran_somebenchmark_1 = [False]
_ran_somebenchmark_2 = [False]
_ran_somebenchmark_but_shouldnt = [False]
class SomeRandomBenchmark(test.Benchmark):
"""This Benchmark should automatically be registered in the registry."""
def _dontRunThisBenchmark(self):
_ran_somebenchmark_but_shouldnt[0] = True
def notBenchmarkMethod(self):
_ran_somebenchmark_but_shouldnt[0] = True
def benchmark1(self):
_ran_somebenchmark_1[0] = True
def benchmark2(self):
_ran_somebenchmark_2[0] = True
class TestReportingBenchmark(test.Benchmark):
"""This benchmark (maybe) reports some stuff."""
def benchmarkReport1(self):
self.report_benchmark(iters=1)
def benchmarkReport2(self):
self.report_benchmark(
iters=2,
name="custom_benchmark_name",
extras={"number_key": 3,
"other_key": "string"})
def benchmark_times_an_op(self):
with session.Session() as sess:
a = constant_op.constant(0.0)
a_plus_a = a + a
self.run_op_benchmark(
sess, a_plus_a, min_iters=1000, store_trace=True, name="op_benchmark")
class BenchmarkTest(test.TestCase):
def testGlobalBenchmarkRegistry(self):
registry = list(benchmark.GLOBAL_BENCHMARK_REGISTRY)
self.assertEqual(len(registry), 2)
self.assertTrue(SomeRandomBenchmark in registry)
self.assertTrue(TestReportingBenchmark in registry)
def testRunSomeRandomBenchmark(self):
# Validate that SomeBenchmark has not run yet
self.assertFalse(_ran_somebenchmark_1[0])
self.assertFalse(_ran_somebenchmark_2[0])
self.assertFalse(_ran_somebenchmark_but_shouldnt[0])
# Run other benchmarks, but this wont run the one we care about
benchmark._run_benchmarks("unrelated")
# Validate that SomeBenchmark has not run yet
self.assertFalse(_ran_somebenchmark_1[0])
self.assertFalse(_ran_somebenchmark_2[0])
self.assertFalse(_ran_somebenchmark_but_shouldnt[0])
# Run all the benchmarks, avoid generating any reports
if benchmark.TEST_REPORTER_TEST_ENV in os.environ:
del os.environ[benchmark.TEST_REPORTER_TEST_ENV]
benchmark._run_benchmarks("SomeRandom")
# Validate that SomeRandomBenchmark ran correctly
self.assertTrue(_ran_somebenchmark_1[0])
self.assertTrue(_ran_somebenchmark_2[0])
self.assertFalse(_ran_somebenchmark_but_shouldnt[0])
_ran_somebenchmark_1[0] = False
_ran_somebenchmark_2[0] = False
_ran_somebenchmark_but_shouldnt[0] = False
# Test running a specific method of SomeRandomBenchmark
if benchmark.TEST_REPORTER_TEST_ENV in os.environ:
del os.environ[benchmark.TEST_REPORTER_TEST_ENV]
benchmark._run_benchmarks("SomeRandom.*1$")
self.assertTrue(_ran_somebenchmark_1[0])
self.assertFalse(_ran_somebenchmark_2[0])
self.assertFalse(_ran_somebenchmark_but_shouldnt[0])
def testReportingBenchmark(self):
tempdir = test.get_temp_dir()
try:
gfile.MakeDirs(tempdir)
except OSError as e:
# It's OK if the directory already exists.
if " exists:" not in str(e):
raise e
prefix = os.path.join(tempdir,
"reporting_bench_%016x_" % random.getrandbits(64))
expected_output_file = "%s%s" % (prefix,
"TestReportingBenchmark.benchmarkReport1")
expected_output_file_2 = "%s%s" % (
prefix, "TestReportingBenchmark.custom_benchmark_name")
expected_output_file_3 = "%s%s" % (prefix,
"TestReportingBenchmark.op_benchmark")
try:
self.assertFalse(gfile.Exists(expected_output_file))
# Run benchmark but without env, shouldn't write anything
if benchmark.TEST_REPORTER_TEST_ENV in os.environ:
del os.environ[benchmark.TEST_REPORTER_TEST_ENV]
reporting = TestReportingBenchmark()
reporting.benchmarkReport1() # This should run without writing anything
self.assertFalse(gfile.Exists(expected_output_file))
# Runbenchmark with env, should write
os.environ[benchmark.TEST_REPORTER_TEST_ENV] = prefix
reporting = TestReportingBenchmark()
reporting.benchmarkReport1() # This should write
reporting.benchmarkReport2() # This should write
reporting.benchmark_times_an_op() # This should write
# Check the files were written
self.assertTrue(gfile.Exists(expected_output_file))
self.assertTrue(gfile.Exists(expected_output_file_2))
self.assertTrue(gfile.Exists(expected_output_file_3))
# Check the contents are correct
expected_1 = test_log_pb2.BenchmarkEntry()
expected_1.name = "TestReportingBenchmark.benchmarkReport1"
expected_1.iters = 1
expected_2 = test_log_pb2.BenchmarkEntry()
expected_2.name = "TestReportingBenchmark.custom_benchmark_name"
expected_2.iters = 2
expected_2.extras["number_key"].double_value = 3
expected_2.extras["other_key"].string_value = "string"
expected_3 = test_log_pb2.BenchmarkEntry()
expected_3.name = "TestReportingBenchmark.op_benchmark"
expected_3.iters = 1000
def read_benchmark_entry(f):
s = gfile.GFile(f, "rb").read()
entries = test_log_pb2.BenchmarkEntries.FromString(s)
self.assertEquals(1, len(entries.entry))
return entries.entry[0]
read_benchmark_1 = read_benchmark_entry(expected_output_file)
self.assertProtoEquals(expected_1, read_benchmark_1)
read_benchmark_2 = read_benchmark_entry(expected_output_file_2)
self.assertProtoEquals(expected_2, read_benchmark_2)
read_benchmark_3 = read_benchmark_entry(expected_output_file_3)
self.assertEquals(expected_3.name, read_benchmark_3.name)
self.assertEquals(expected_3.iters, read_benchmark_3.iters)
self.assertGreater(read_benchmark_3.wall_time, 0)
full_trace = read_benchmark_3.extras["full_trace_chrome_format"]
json_trace = json.loads(full_trace.string_value)
self.assertTrue(isinstance(json_trace, dict))
self.assertTrue("traceEvents" in json_trace.keys())
allocator_keys = [k for k in read_benchmark_3.extras.keys()
if k.startswith("allocator_maximum_num_bytes_")]
self.assertGreater(len(allocator_keys), 0)
for k in allocator_keys:
self.assertGreater(read_benchmark_3.extras[k].double_value, 0)
finally:
gfile.DeleteRecursively(tempdir)
if __name__ == "__main__":
test.main()
| apache-2.0 |
WeblateOrg/weblate | weblate/accounts/templatetags/site_url.py | 2 | 1680 | #
# Copyright © 2012 - 2021 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
"""Provide user friendly names for social authentication methods."""
from io import StringIO
from django import template
from django.utils.safestring import mark_safe
from lxml import etree
from weblate.utils.site import get_site_url
register = template.Library()
@register.filter
def add_site_url(content):
"""Automatically add site URL to any relative links or images."""
parser = etree.HTMLParser(collect_ids=False)
tree = etree.parse(StringIO(content), parser)
for link in tree.findall("//a"):
url = link.get("href")
if url.startswith("/"):
link.set("href", get_site_url(url))
for link in tree.findall("//img"):
url = link.get("src")
if url.startswith("/"):
link.set("src", get_site_url(url))
return mark_safe(
etree.tostring(
tree.getroot(), pretty_print=True, method="html", encoding="unicode"
)
)
| gpl-3.0 |
appliedx/edx-platform | common/lib/xmodule/xmodule/tests/__init__.py | 44 | 28179 | """
unittests for xmodule
Run like this:
paver test_lib -l common/lib/xmodule
"""
import inspect
import json
import os
import pprint
import sys
import traceback
import unittest
from contextlib import contextmanager, nested
from functools import wraps
from lazy import lazy
from mock import Mock, patch
from operator import attrgetter
from path import Path as path
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds, Scope, Reference, ReferenceList, ReferenceValueDict
from xmodule.assetstore import AssetMetadata
from xmodule.error_module import ErrorDescriptor
from xmodule.mako_module import MakoDescriptorSystem
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.draft_and_published import DIRECT_ONLY_CATEGORIES, ModuleStoreDraftAndPublished
from xmodule.modulestore.inheritance import InheritanceMixin, own_metadata
from xmodule.modulestore.mongo.draft import DraftModuleStore
from xmodule.modulestore.xml import CourseLocationManager
from xmodule.x_module import ModuleSystem, XModuleDescriptor, XModuleMixin
MODULE_DIR = path(__file__).dirname()
# Location of common test DATA directory
# '../../../../edx-platform/common/test/data/'
DATA_DIR = MODULE_DIR.parent.parent.parent.parent / "test" / "data"
open_ended_grading_interface = {
'url': 'blah/',
'username': 'incorrect_user',
'password': 'incorrect_pass',
'staff_grading': 'staff_grading',
'peer_grading': 'peer_grading',
'grading_controller': 'grading_controller',
}
class TestModuleSystem(ModuleSystem): # pylint: disable=abstract-method
"""
ModuleSystem for testing
"""
def __init__(self, **kwargs): # pylint: disable=unused-argument
id_manager = CourseLocationManager(kwargs['course_id'])
kwargs.setdefault('id_reader', id_manager)
kwargs.setdefault('id_generator', id_manager)
kwargs.setdefault('services', {}).setdefault('field-data', DictFieldData({}))
super(TestModuleSystem, self).__init__(**kwargs)
def handler_url(self, block, handler, suffix='', query='', thirdparty=False):
return '{usage_id}/{handler}{suffix}?{query}'.format(
usage_id=unicode(block.scope_ids.usage_id),
handler=handler,
suffix=suffix,
query=query,
)
def local_resource_url(self, block, uri):
return 'resource/{usage_id}/{uri}'.format(
usage_id=unicode(block.scope_ids.usage_id),
uri=uri,
)
# Disable XBlockAsides in most tests
def get_asides(self, block):
return []
def __repr__(self):
"""
Custom hacky repr.
XBlock.Runtime.render() replaces the _view_name attribute while rendering, which
causes rendered comparisons of blocks to fail as unequal. So make the _view_name
attribute None during the base repr - and set it back to original value afterward.
"""
orig_view_name = None
if hasattr(self, '_view_name'):
orig_view_name = self._view_name
self._view_name = None
rt_repr = super(TestModuleSystem, self).__repr__()
self._view_name = orig_view_name
return rt_repr
def get_test_system(course_id=SlashSeparatedCourseKey('org', 'course', 'run')):
"""
Construct a test ModuleSystem instance.
By default, the render_template() method simply returns the repr of the
context it is passed. You can override this behavior by monkey patching::
system = get_test_system()
system.render_template = my_render_func
where `my_render_func` is a function of the form my_render_func(template, context).
"""
user = Mock(name='get_test_system.user', is_staff=False)
descriptor_system = get_test_descriptor_system()
def get_module(descriptor):
"""Mocks module_system get_module function"""
# pylint: disable=protected-access
# Unlike XBlock Runtimes or DescriptorSystems,
# each XModule is provided with a new ModuleSystem.
# Construct one for the new XModule.
module_system = get_test_system()
# Descriptors can all share a single DescriptorSystem.
# So, bind to the same one as the current descriptor.
module_system.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access
descriptor.bind_for_student(module_system, user.id)
return descriptor
return TestModuleSystem(
static_url='/static',
track_function=Mock(name='get_test_system.track_function'),
get_module=get_module,
render_template=mock_render_template,
replace_urls=str,
user=user,
get_real_user=lambda __: user,
filestore=Mock(name='get_test_system.filestore'),
debug=True,
hostname="edx.org",
xqueue={
'interface': None,
'callback_url': '/',
'default_queuename': 'testqueue',
'waittime': 10,
'construct_callback': Mock(name='get_test_system.xqueue.construct_callback', side_effect="/"),
},
node_path=os.environ.get("NODE_PATH", "/usr/local/lib/node_modules"),
anonymous_student_id='student',
open_ended_grading_interface=open_ended_grading_interface,
course_id=course_id,
error_descriptor_class=ErrorDescriptor,
get_user_role=Mock(name='get_test_system.get_user_role', is_staff=False),
user_location=Mock(name='get_test_system.user_location'),
descriptor_runtime=descriptor_system,
)
def get_test_descriptor_system():
"""
Construct a test DescriptorSystem instance.
"""
field_data = DictFieldData({})
descriptor_system = MakoDescriptorSystem(
load_item=Mock(name='get_test_descriptor_system.load_item'),
resources_fs=Mock(name='get_test_descriptor_system.resources_fs'),
error_tracker=Mock(name='get_test_descriptor_system.error_tracker'),
render_template=mock_render_template,
mixins=(InheritanceMixin, XModuleMixin),
field_data=field_data,
services={'field-data': field_data},
)
descriptor_system.get_asides = lambda block: []
return descriptor_system
def mock_render_template(*args, **kwargs):
"""
Pretty-print the args and kwargs.
Allows us to not depend on any actual template rendering mechanism,
while still returning a unicode object
"""
return pprint.pformat((args, kwargs)).decode()
class ModelsTest(unittest.TestCase):
def test_load_class(self):
vc = XModuleDescriptor.load_class('video')
vc_str = "<class 'xmodule.video_module.video_module.VideoDescriptor'>"
self.assertEqual(str(vc), vc_str)
class LogicTest(unittest.TestCase):
"""Base class for testing xmodule logic."""
descriptor_class = None
raw_field_data = {}
def setUp(self):
super(LogicTest, self).setUp()
self.system = get_test_system()
self.descriptor = Mock(name="descriptor", url_name='', category='test')
self.xmodule_class = self.descriptor_class.module_class
usage_key = self.system.course_id.make_usage_key(self.descriptor.category, 'test_loc')
# ScopeIds has 4 fields: user_id, block_type, def_id, usage_id
scope_ids = ScopeIds(1, self.descriptor.category, usage_key, usage_key)
self.xmodule = self.xmodule_class(
self.descriptor, self.system, DictFieldData(self.raw_field_data), scope_ids
)
def ajax_request(self, dispatch, data):
"""Call Xmodule.handle_ajax."""
return json.loads(self.xmodule.handle_ajax(dispatch, data))
def map_references(value, field, actual_course_key):
"""
Map the references in value to actual_course_key and return value
"""
if not value: # if falsey
return value
if isinstance(field, Reference):
return value.map_into_course(actual_course_key)
if isinstance(field, ReferenceList):
return [sub.map_into_course(actual_course_key) for sub in value]
if isinstance(field, ReferenceValueDict):
return {key: ele.map_into_course(actual_course_key) for key, ele in value.iteritems()}
return value
class BulkAssertionError(AssertionError):
"""
An AssertionError that contains many sub-assertions.
"""
def __init__(self, assertion_errors):
self.errors = assertion_errors
super(BulkAssertionError, self).__init__("The following assertions were raised:\n{}".format(
"\n\n".join(self.errors)
))
class _BulkAssertionManager(object):
"""
This provides a facility for making a large number of assertions, and seeing all of
the failures at once, rather than only seeing single failures.
"""
def __init__(self, test_case):
self._assertion_errors = []
self._test_case = test_case
def log_error(self, formatted_exc):
"""
Record ``formatted_exc`` in the set of exceptions captured by this assertion manager.
"""
self._assertion_errors.append(formatted_exc)
def raise_assertion_errors(self):
"""
Raise a BulkAssertionError containing all of the captured AssertionErrors,
if there were any.
"""
if self._assertion_errors:
raise BulkAssertionError(self._assertion_errors)
class BulkAssertionTest(unittest.TestCase):
"""
This context manager provides a _BulkAssertionManager to assert with,
and then calls `raise_assertion_errors` at the end of the block to validate all
of the assertions.
"""
def setUp(self, *args, **kwargs):
super(BulkAssertionTest, self).setUp(*args, **kwargs)
# Use __ to not pollute the namespace of subclasses with what could be a fairly generic name.
self.__manager = None
@contextmanager
def bulk_assertions(self):
"""
A context manager that will capture all assertion failures made by self.assert*
methods within its context, and raise a single combined assertion error at
the end of the context.
"""
if self.__manager:
yield
else:
try:
self.__manager = _BulkAssertionManager(self)
yield
except Exception:
raise
else:
manager = self.__manager
self.__manager = None
manager.raise_assertion_errors()
@contextmanager
def _capture_assertion_errors(self):
"""
A context manager that captures any AssertionError raised within it,
and, if within a ``bulk_assertions`` context, records the captured
assertion to the bulk assertion manager. If not within a ``bulk_assertions``
context, just raises the original exception.
"""
try:
# Only wrap the first layer of assert functions by stashing away the manager
# before executing the assertion.
manager = self.__manager
self.__manager = None
yield
except AssertionError: # pylint: disable=broad-except
if manager is not None:
# Reconstruct the stack in which the error was thrown (so that the traceback)
# isn't cut off at `assertion(*args, **kwargs)`.
exc_type, exc_value, exc_tb = sys.exc_info()
# Count the number of stack frames before you get to a
# unittest context (walking up the stack from here).
relevant_frames = 0
for frame_record in inspect.stack():
# This is the same criterion used by unittest to decide if a
# stack frame is relevant to exception printing.
frame = frame_record[0]
if '__unittest' in frame.f_globals:
break
relevant_frames += 1
stack_above = traceback.extract_stack()[-relevant_frames:-1]
stack_below = traceback.extract_tb(exc_tb)
formatted_stack = traceback.format_list(stack_above + stack_below)
formatted_exc = traceback.format_exception_only(exc_type, exc_value)
manager.log_error(
"".join(formatted_stack + formatted_exc)
)
else:
raise
finally:
self.__manager = manager
def _wrap_assertion(self, assertion):
"""
Wraps an assert* method to capture an immediate exception,
or to generate a new assertion capturing context (in the case of assertRaises
and assertRaisesRegexp).
"""
@wraps(assertion)
def assert_(*args, **kwargs):
"""
Execute a captured assertion, and catch any assertion errors raised.
"""
context = None
# Run the assertion, and capture any raised assertionErrors
with self._capture_assertion_errors():
context = assertion(*args, **kwargs)
# Handle the assertRaises family of functions by returning
# a context manager that surrounds the assertRaises
# with our assertion capturing context manager.
if context is not None:
return nested(self._capture_assertion_errors(), context)
return assert_
def __getattribute__(self, name):
"""
Wrap all assert* methods of this class using self._wrap_assertion,
to capture all assertion errors in bulk.
"""
base_attr = super(BulkAssertionTest, self).__getattribute__(name)
if name.startswith('assert'):
return self._wrap_assertion(base_attr)
else:
return base_attr
class LazyFormat(object):
"""
An stringy object that delays formatting until it's put into a string context.
"""
__slots__ = ('template', 'args', 'kwargs', '_message')
def __init__(self, template, *args, **kwargs):
self.template = template
self.args = args
self.kwargs = kwargs
self._message = None
def __unicode__(self):
if self._message is None:
self._message = self.template.format(*self.args, **self.kwargs)
return self._message
def __repr__(self):
return unicode(self)
def __len__(self):
return len(unicode(self))
def __getitem__(self, index):
return unicode(self)[index]
class CourseComparisonTest(BulkAssertionTest):
"""
Mixin that has methods for comparing courses for equality.
"""
def setUp(self):
super(CourseComparisonTest, self).setUp()
self.field_exclusions = set()
self.ignored_asset_keys = set()
def exclude_field(self, usage_id, field_name):
"""
Mark field ``field_name`` of expected block usage ``usage_id`` as ignored
Args:
usage_id (:class:`opaque_keys.edx.UsageKey` or ``None``). If ``None``, skip, this field in all blocks
field_name (string): The name of the field to skip
"""
self.field_exclusions.add((usage_id, field_name))
def ignore_asset_key(self, key_name):
"""
Add an asset key to the list of keys to be ignored when comparing assets.
Args:
key_name: The name of the key to ignore.
"""
self.ignored_asset_keys.add(key_name)
def assertReferenceRelativelyEqual(self, reference_field, expected_block, actual_block):
"""
Assert that the supplied reference field is identical on the expected_block and actual_block,
assoming that the references are only relative (that is, comparing only on block_type and block_id,
not course_key).
"""
def extract_key(usage_key):
if usage_key is None:
return None
else:
return (usage_key.block_type, usage_key.block_id)
expected = reference_field.read_from(expected_block)
actual = reference_field.read_from(actual_block)
if isinstance(reference_field, Reference):
expected = extract_key(expected)
actual = extract_key(actual)
elif isinstance(reference_field, ReferenceList):
expected = [extract_key(key) for key in expected]
actual = [extract_key(key) for key in actual]
elif isinstance(reference_field, ReferenceValueDict):
expected = {key: extract_key(val) for (key, val) in expected.iteritems()}
actual = {key: extract_key(val) for (key, val) in actual.iteritems()}
self.assertEqual(
expected,
actual,
LazyFormat(
"Field {} doesn't match between usages {} and {}: {!r} != {!r}",
reference_field.name,
expected_block.scope_ids.usage_id,
actual_block.scope_ids.usage_id,
expected,
actual
)
)
def assertBlocksEqualByFields(self, expected_block, actual_block):
"""
Compare block fields to check for equivalence.
"""
self.assertEqual(expected_block.fields, actual_block.fields)
for field in expected_block.fields.values():
self.assertFieldEqual(field, expected_block, actual_block)
def assertFieldEqual(self, field, expected_block, actual_block):
"""
Compare a single block field for equivalence.
"""
if isinstance(field, (Reference, ReferenceList, ReferenceValueDict)):
self.assertReferenceRelativelyEqual(field, expected_block, actual_block)
else:
expected = field.read_from(expected_block)
actual = field.read_from(actual_block)
self.assertEqual(
expected,
actual,
LazyFormat(
"Field {} doesn't match between usages {} and {}: {!r} != {!r}",
field.name,
expected_block.scope_ids.usage_id,
actual_block.scope_ids.usage_id,
expected,
actual
)
)
def assertCoursesEqual(self, expected_store, expected_course_key, actual_store, actual_course_key):
"""
Assert that the courses identified by ``expected_course_key`` in ``expected_store`` and
``actual_course_key`` in ``actual_store`` are identical (ignore differences related
owing to the course_keys being different).
Any field value mentioned in ``self.field_exclusions`` by the key (usage_id, field_name)
will be ignored for the purpose of equality checking.
"""
# compare published
with expected_store.branch_setting(ModuleStoreEnum.Branch.published_only, expected_course_key):
with actual_store.branch_setting(ModuleStoreEnum.Branch.published_only, actual_course_key):
expected_items = expected_store.get_items(expected_course_key, revision=ModuleStoreEnum.RevisionOption.published_only)
actual_items = actual_store.get_items(actual_course_key, revision=ModuleStoreEnum.RevisionOption.published_only)
self.assertGreater(len(expected_items), 0)
self._assertCoursesEqual(expected_items, actual_items, actual_course_key)
# if the modulestore supports having a draft branch
if isinstance(expected_store, ModuleStoreDraftAndPublished):
with expected_store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, expected_course_key):
with actual_store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, actual_course_key):
# compare draft
if expected_store.get_modulestore_type(None) == ModuleStoreEnum.Type.split:
revision = ModuleStoreEnum.RevisionOption.draft_only
else:
revision = None
expected_items = expected_store.get_items(expected_course_key, revision=revision)
if actual_store.get_modulestore_type(None) == ModuleStoreEnum.Type.split:
revision = ModuleStoreEnum.RevisionOption.draft_only
else:
revision = None
actual_items = actual_store.get_items(actual_course_key, revision=revision)
self._assertCoursesEqual(expected_items, actual_items, actual_course_key, expect_drafts=True)
def _assertCoursesEqual(self, expected_items, actual_items, actual_course_key, expect_drafts=False):
"""
Actual algorithm to compare courses.
"""
with self.bulk_assertions():
self.assertEqual(len(expected_items), len(actual_items))
def map_key(usage_key):
return (usage_key.block_type, usage_key.block_id)
actual_item_map = {
map_key(item.location): item
for item in actual_items
}
# Split Mongo and Old-Mongo disagree about what the block_id of courses is, so skip those in
# this comparison
self.assertItemsEqual(
[map_key(item.location) for item in expected_items if item.scope_ids.block_type != 'course'],
[key for key in actual_item_map.keys() if key[0] != 'course'],
)
for expected_item in expected_items:
actual_item_location = actual_course_key.make_usage_key(expected_item.category, expected_item.location.block_id)
# split and old mongo use different names for the course root but we don't know which
# modulestore actual's come from here; so, assume old mongo and if that fails, assume split
if expected_item.location.category == 'course':
actual_item_location = actual_item_location.replace(name=actual_item_location.run)
actual_item = actual_item_map.get(map_key(actual_item_location))
# must be split
if actual_item is None and expected_item.location.category == 'course':
actual_item_location = actual_item_location.replace(name='course')
actual_item = actual_item_map.get(map_key(actual_item_location))
# Formatting the message slows down tests of large courses significantly, so only do it if it would be used
self.assertIn(map_key(actual_item_location), actual_item_map.keys())
if actual_item is None:
continue
# compare fields
self.assertEqual(expected_item.fields, actual_item.fields)
for field_name, field in expected_item.fields.iteritems():
if (expected_item.scope_ids.usage_id, field_name) in self.field_exclusions:
continue
if (None, field_name) in self.field_exclusions:
continue
# Children are handled specially
if field_name == 'children':
continue
self.assertFieldEqual(field, expected_item, actual_item)
# compare children
self.assertEqual(expected_item.has_children, actual_item.has_children)
if expected_item.has_children:
expected_children = [
(expected_item_child.location.block_type, expected_item_child.location.block_id)
# get_children() rather than children to strip privates from public parents
for expected_item_child in expected_item.get_children()
]
actual_children = [
(item_child.location.block_type, item_child.location.block_id)
# get_children() rather than children to strip privates from public parents
for item_child in actual_item.get_children()
]
self.assertEqual(expected_children, actual_children)
def assertAssetEqual(self, expected_course_key, expected_asset, actual_course_key, actual_asset):
"""
Assert that two assets are equal, allowing for differences related to their being from different courses.
"""
for key in self.ignored_asset_keys:
if key in expected_asset:
del expected_asset[key]
if key in actual_asset:
del actual_asset[key]
expected_key = expected_asset.pop('asset_key')
actual_key = actual_asset.pop('asset_key')
self.assertEqual(expected_key.map_into_course(actual_course_key), actual_key)
self.assertEqual(expected_key, actual_key.map_into_course(expected_course_key))
expected_filename = expected_asset.pop('filename')
actual_filename = actual_asset.pop('filename')
self.assertEqual(expected_key.to_deprecated_string(), expected_filename)
self.assertEqual(actual_key.to_deprecated_string(), actual_filename)
self.assertEqual(expected_asset, actual_asset)
def _assertAssetsEqual(self, expected_course_key, expected_assets, actual_course_key, actual_assets): # pylint: disable=invalid-name
"""
Private helper method for assertAssetsEqual
"""
self.assertEqual(len(expected_assets), len(actual_assets))
actual_assets_map = {asset['asset_key']: asset for asset in actual_assets}
for expected_item in expected_assets:
actual_item = actual_assets_map[expected_item['asset_key'].map_into_course(actual_course_key)]
self.assertAssetEqual(expected_course_key, expected_item, actual_course_key, actual_item)
def assertAssetsEqual(self, expected_store, expected_course_key, actual_store, actual_course_key):
"""
Assert that the course assets identified by ``expected_course_key`` in ``expected_store`` and
``actual_course_key`` in ``actual_store`` are identical, allowing for differences related
to their being from different course keys.
"""
expected_content, expected_count = expected_store.get_all_content_for_course(expected_course_key)
actual_content, actual_count = actual_store.get_all_content_for_course(actual_course_key)
with self.bulk_assertions():
self.assertEqual(expected_count, actual_count)
self._assertAssetsEqual(expected_course_key, expected_content, actual_course_key, actual_content)
expected_thumbs = expected_store.get_all_content_thumbnails_for_course(expected_course_key)
actual_thumbs = actual_store.get_all_content_thumbnails_for_course(actual_course_key)
self._assertAssetsEqual(expected_course_key, expected_thumbs, actual_course_key, actual_thumbs)
def assertAssetsMetadataEqual(self, expected_modulestore, expected_course_key, actual_modulestore, actual_course_key):
"""
Assert that the modulestore asset metdata for the ``expected_course_key`` and the ``actual_course_key``
are equivalent.
"""
expected_course_assets = expected_modulestore.get_all_asset_metadata(
expected_course_key, None, sort=('displayname', ModuleStoreEnum.SortOrder.descending)
)
actual_course_assets = actual_modulestore.get_all_asset_metadata(
actual_course_key, None, sort=('displayname', ModuleStoreEnum.SortOrder.descending)
)
self.assertEquals(len(expected_course_assets), len(actual_course_assets))
for idx, __ in enumerate(expected_course_assets):
for attr in AssetMetadata.ATTRS_ALLOWED_TO_UPDATE:
if attr in ('edited_on',):
# edited_on is updated upon import.
continue
self.assertEquals(getattr(expected_course_assets[idx], attr), getattr(actual_course_assets[idx], attr))
| agpl-3.0 |
phborba/dsgtoolsop | auxiliar/geographiclib/geodesiccapability.py | 2 | 1406 | """geodesiccapability.py: capability constants for geodesic{,line}.py"""
# geodesiccapability.py
#
# This gathers the capability constants need by geodesic.py and
# geodesicline.py. See the documentation for the GeographicLib::Geodesic class
# for more information at
#
# https://geographiclib.sourceforge.io/html/annotated.html
#
# Copyright (c) Charles Karney (2011-2014) <charles@karney.com> and licensed
# under the MIT/X11 License. For more information, see
# https://geographiclib.sourceforge.io/
######################################################################
class GeodesicCapability(object):
"""
Capability constants shared between Geodesic and GeodesicLine.
"""
CAP_NONE = 0
CAP_C1 = 1 << 0
CAP_C1p = 1 << 1
CAP_C2 = 1 << 2
CAP_C3 = 1 << 3
CAP_C4 = 1 << 4
CAP_ALL = 0x1F
CAP_MASK = CAP_ALL
OUT_ALL = 0x7F80
OUT_MASK = 0xFF80 # Includes LONG_UNROLL
EMPTY = 0
LATITUDE = 1 << 7 | CAP_NONE
LONGITUDE = 1 << 8 | CAP_C3
AZIMUTH = 1 << 9 | CAP_NONE
DISTANCE = 1 << 10 | CAP_C1
STANDARD = LATITUDE | LONGITUDE | AZIMUTH | DISTANCE
DISTANCE_IN = 1 << 11 | CAP_C1 | CAP_C1p
REDUCEDLENGTH = 1 << 12 | CAP_C1 | CAP_C2
GEODESICSCALE = 1 << 13 | CAP_C1 | CAP_C2
AREA = 1 << 14 | CAP_C4
LONG_UNROLL = 1 << 15
ALL = OUT_ALL | CAP_ALL # Does not include LONG_UNROLL
| gpl-2.0 |
pudo/aleph | aleph/settings.py | 1 | 5037 | # Configuration defaults.
#
# You should never edit this file directly for deployment or in the developer
# setup. Wherever possible use environment variables to override the
# defaults.
import os
import uuid
from servicelayer import env
from flask_babel import lazy_gettext
# Show error messages to the user.
DEBUG = env.to_bool('ALEPH_DEBUG', False)
# Propose HTTP caching to the user agents.
CACHE = env.to_bool('ALEPH_CACHE', not DEBUG)
# Puts the system into read-only mode and displays a warning.
MAINTENANCE = env.to_bool('ALEPH_MAINTENANCE', False)
# Unit test context.
TESTING = False
###############################################################################
# General instance information
APP_TITLE = env.get('ALEPH_APP_TITLE', lazy_gettext('Aleph'))
APP_DESCRIPTION = env.get('ALEPH_APP_DESCRIPTION', '')
APP_NAME = env.get('ALEPH_APP_NAME', 'aleph')
APP_UI_URL = env.get('ALEPH_UI_URL', 'http://localhost:8080/')
APP_LOGO = env.get('ALEPH_LOGO', '/static/logo.png')
APP_FAVICON = env.get('ALEPH_FAVICON', '/static/favicon.png')
# Show a system-wide banner in the user interface.
APP_BANNER = env.get('ALEPH_APP_BANNER')
# Shown on the home page as a few sample queries:
SAMPLE_SEARCHES = [lazy_gettext('TeliaSonera'), lazy_gettext('Vladimir Putin')]
SAMPLE_SEARCHES = env.to_list('ALEPH_SAMPLE_SEARCHES', SAMPLE_SEARCHES)
# Force HTTPS here:
FORCE_HTTPS = env.to_bool('ALEPH_FORCE_HTTPS', False)
# Content security policy:
CONTENT_POLICY = 'default-src: \'self\' \'unsafe-inline\' \'unsafe-eval\' data: *' # noqa
CONTENT_POLICY = env.get('ALEPH_CONTENT_POLICY', CONTENT_POLICY)
# Cross-origin resource sharing
CORS_ORIGINS = env.to_list('ALEPH_CORS_ORIGINS', ['*'], separator='|')
##############################################################################
# Security and authentication.
# Required: set a secret key
SECRET_KEY = env.get('ALEPH_SECRET_KEY')
# A process identifier
PROCESS_ID = uuid.uuid4().hex
# Designate users with the given email as admins automatically:
# Assumes a comma-separated list.
ADMINS = env.to_list('ALEPH_ADMINS')
# Set the foreign ID of the default system user.
SYSTEM_USER = env.get('ALEPH_SYSTEM_USER', 'system:aleph')
# Configure your OAUTH login provider, providing the details as described in
# https://flask-oauthlib.readthedocs.io/en/latest/client.html
#
OAUTH = env.to_bool('ALEPH_OAUTH', False)
OAUTH_NAME = env.get('ALEPH_OAUTH_NAME', 'google')
OAUTH_KEY = env.get('ALEPH_OAUTH_KEY')
OAUTH_SECRET = env.get('ALEPH_OAUTH_SECRET')
OAUTH_SCOPE = env.get('ALEPH_OAUTH_SCOPE')
OAUTH_BASE_URL = env.get('ALEPH_OAUTH_BASE_URL')
OAUTH_REQUEST_TOKEN_URL = env.get('ALEPH_OAUTH_REQUEST_TOKEN_URL')
OAUTH_TOKEN_METHOD = env.get('ALEPH_OAUTH_TOKEN_METHOD', 'POST')
OAUTH_TOKEN_URL = env.get('ALEPH_OAUTH_TOKEN_URL')
OAUTH_AUTHORIZE_URL = env.get('ALEPH_OAUTH_AUTHORIZE_URL')
OAUTH_UI_CALLBACK = env.get('ALEPH_OAUTH_UI_CALLBACK', '/oauth')
# Disable password-based authentication for SSO settings:
PASSWORD_LOGIN = env.to_bool('ALEPH_PASSWORD_LOGIN', not OAUTH)
###############################################################################
# Content processing options
DEFAULT_LANGUAGE = env.get('ALEPH_DEFAULT_LANGUAGE', 'en')
# User interface
UI_LANGUAGES = ['ru', 'es', 'de', 'bs', 'en']
UI_LANGUAGES = env.to_list('ALEPH_UI_LANGUAGES', UI_LANGUAGES)
UI_LANGUAGES = [l.lower().strip() for l in UI_LANGUAGES]
# Geonames data file
GEONAMES_DATA = env.get('ALEPH_GEONAMES_DATA')
# FastText lid model file
LID_MODEL_PATH = env.get('ALEPH_LID_MODEL_PATH', '/aleph/contrib/lid.176.ftz')
# Disable entity extraction
TAG_ENTITIES = env.to_bool('ALEPH_TAG_ENTITIES', True)
# List available NER models
NER_MODELS = set(env.to_list('ALEPH_NER_MODELS', ['eng']))
NER_DEFAULT_MODEL = 'xx'
# Result high-lighting
RESULT_HIGHLIGHT = env.to_bool('ALEPH_RESULT_HIGHLIGHT', True)
# Minimum update date for sitemap.xml
SITEMAP_FLOOR = '2019-06-22'
##############################################################################
# E-mail settings
MAIL_FROM = env.get('ALEPH_MAIL_FROM', 'aleph@domain.com')
MAIL_SERVER = env.get('ALEPH_MAIL_HOST', 'localhost')
MAIL_USERNAME = env.get('ALEPH_MAIL_USERNAME')
MAIL_PASSWORD = env.get('ALEPH_MAIL_PASSWORD')
MAIL_USE_SSL = env.to_bool('ALEPH_MAIL_SSL', False)
MAIL_USE_TLS = env.to_bool('ALEPH_MAIL_TLS', True)
MAIL_PORT = env.to_int('ALEPH_MAIL_PORT', 465)
MAIL_DEBUG = env.to_bool('ALEPH_MAIL_DEBUG', DEBUG)
###############################################################################
# Database and search index
DATABASE_URI = env.get('ALEPH_DATABASE_URI')
SQLALCHEMY_TRACK_MODIFICATIONS = False
ALEMBIC_DIR = os.path.join(os.path.dirname(__file__), 'migrate')
ALEMBIC_DIR = os.path.abspath(ALEMBIC_DIR)
ELASTICSEARCH_URL = env.get('ALEPH_ELASTICSEARCH_URI', 'http://localhost:9200')
ELASTICSEARCH_TIMEOUT = env.to_int('ELASTICSEARCH_TIMEOUT', 30)
INDEX_PREFIX = env.get('ALEPH_INDEX_PREFIX', APP_NAME)
INDEX_WRITE = env.get('ALEPH_INDEX_WRITE', 'v1')
INDEX_READ = env.to_list('ALEPH_INDEX_READ', [INDEX_WRITE])
| mit |
karmix/anaconda | pyanaconda/bootloader.py | 2 | 89401 | # bootloader.py
# Anaconda's bootloader configuration module.
#
# Copyright (C) 2011 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Lehman <dlehman@redhat.com>
# Matthew Miller <mattdm@redhat.com> (extlinux portion)
#
import collections
import os
import re
import blivet
from parted import PARTITION_BIOS_GRUB
from glob import glob
from itertools import chain
from pyanaconda import iutil
from pyanaconda.iutil import open # pylint: disable=redefined-builtin
from blivet.devicelibs import raid
from pyanaconda.product import productName
from pyanaconda.flags import flags, can_touch_runtime_system
from blivet.fcoe import fcoe
import pyanaconda.network
from pyanaconda.errors import errorHandler, ERROR_RAISE, ZIPLError
from pyanaconda.packaging.rpmostreepayload import RPMOSTreePayload
from pyanaconda.nm import nm_device_hwaddress
from blivet import platform
from blivet.size import Size
from pyanaconda.i18n import _, N_
from pyanaconda.orderedset import OrderedSet
import logging
log = logging.getLogger("anaconda")
class serial_opts(object):
def __init__(self):
self.speed = None
self.parity = None
self.word = None
self.stop = None
self.flow = None
def parse_serial_opt(arg):
"""
Parse and split serial console options.
.. NOTE::
Documentation/kernel-parameters.txt says:
ttyS<n>[,options]
Use the specified serial port. The options are of
the form "bbbbpnf", where "bbbb" is the baud rate,
"p" is parity ("n", "o", or "e"), "n" is number of
bits, and "f" is flow control ("r" for RTS or
omit it). Default is "9600n8".
but note that everything after the baud rate is optional, so these are
all valid: 9600, 19200n, 38400n8, 9600e7r.
Also note that the kernel assumes 1 stop bit; this can't be changed.
"""
opts = serial_opts()
m = re.match(r'\d+', arg)
if m is None:
return opts
opts.speed = m.group()
idx = len(opts.speed)
try:
opts.parity = arg[idx+0]
opts.word = arg[idx+1]
opts.flow = arg[idx+2]
except IndexError:
pass
return opts
def _is_on_iscsi(device):
"""Tells whether a given device is on an iSCSI disk or not."""
return all(isinstance(disk, blivet.devices.iScsiDiskDevice)
for disk in device.disks)
def _is_on_ibft(device):
"""Tells whether a given device is ibft disk or not."""
return all(getattr(disk, "ibft", False) for disk in device.disks)
class BootLoaderError(Exception):
pass
class Arguments(OrderedSet):
def _merge_ip(self):
"""
Find ip= arguments targetting the same interface and merge them.
"""
# partition the input
def partition_p(arg):
# we are only interested in ip= parameters that use some kind of
# automatic network setup:
return arg.startswith("ip=") and arg.count(":") == 1
ip_params = filter(partition_p, self)
rest = OrderedSet(filter(lambda p: not partition_p(p), self))
# split at the colon:
ip_params = map(lambda p: p.split(":"), ip_params)
# create mapping from nics to their configurations
config = collections.defaultdict(list)
for (nic, cfg) in ip_params:
config[nic].append(cfg)
# generate the new parameters:
ip_params = set()
for nic in config:
ip_params.add("%s:%s" % (nic, ",".join(sorted(config[nic]))))
# update the set
self.clear()
self.update(rest)
self.update(ip_params)
return self
def __str__(self):
self._merge_ip()
return " ".join(list(self))
def add(self, key):
self.discard(key)
super(Arguments, self).add(key)
def update(self, other):
for key in other:
self.discard(key)
self.add(key)
class BootLoaderImage(object):
""" Base class for bootloader images. Suitable for non-linux OS images. """
def __init__(self, device=None, label=None, short=None):
self.label = label
self.short_label = short
self.device = device
class LinuxBootLoaderImage(BootLoaderImage):
def __init__(self, device=None, label=None, short=None, version=None):
super(LinuxBootLoaderImage, self).__init__(device=device, label=label)
self.label = label # label string
self.short_label = short # shorter label string
self.device = device # StorageDevice instance
self.version = version # kernel version string
self._kernel = None # filename string
self._initrd = None # filename string
@property
def kernel(self):
filename = self._kernel
if self.version and not filename:
filename = "vmlinuz-%s" % self.version
return filename
@property
def initrd(self):
filename = self._initrd
if self.version and not filename:
filename = "initramfs-%s.img" % self.version
return filename
class TbootLinuxBootLoaderImage(LinuxBootLoaderImage):
_multiboot = "tboot.gz" # filename string
_mbargs = ["logging=vga,serial,memory"]
_args = ["intel_iommu=on"]
def __init__(self, device=None, label=None, short=None, version=None):
super(TbootLinuxBootLoaderImage, self).__init__(
device=device, label=label,
short=short, version=version)
@property
def multiboot(self):
return self._multiboot
@property
def mbargs(self):
return self._mbargs
@property
def args(self):
return self._args
class BootLoader(object):
name = "Generic Bootloader"
packages = []
config_file = None
config_file_mode = 0o600
can_dual_boot = False
can_update = False
image_label_attr = "label"
encryption_support = False
stage2_is_valid_stage1 = False
# requirements for stage2 devices
stage2_device = None
stage2_device_types = []
stage2_raid_levels = []
stage2_raid_metadata = []
stage2_raid_member_types = []
stage2_mountpoints = ["/boot", "/"]
stage2_bootable = False
stage2_must_be_primary = True
stage2_description = N_("/boot file system")
stage2_max_end = Size("2 TiB")
@property
def stage2_format_types(self):
return ["ext4", "ext3", "ext2"]
# this is so stupid...
global_preserve_args = ["speakup_synth", "apic", "noapic", "apm", "ide",
"noht", "acpi", "video", "pci", "nodmraid",
"nompath", "nomodeset", "noiswmd", "fips",
"selinux", "biosdevname", "ipv6.disable",
"net.ifnames"]
preserve_args = []
_trusted_boot = False
def __init__(self):
self.boot_args = Arguments()
self.dracut_args = Arguments()
self.disks = []
self._disk_order = []
# timeout in seconds
self._timeout = None
self.password = None
# console/serial stuff
self.console = ""
self.console_options = ""
self._set_console()
# list of BootLoaderImage instances representing bootable OSs
self.linux_images = []
self.chain_images = []
# default image
self._default_image = None
self._update_only = False
self.skip_bootloader = False
self.errors = []
self.warnings = []
self.reset()
def reset(self):
""" Reset stage1 and stage2 values """
# the device the bootloader will be installed on
self.stage1_device = None
# the "boot disk", meaning the disk stage1 _will_ go on
self.stage1_disk = None
self.stage2_device = None
self.stage2_is_preferred_stage1 = False
self.errors = []
self.problems = []
self.warnings = []
#
# disk list access
#
@property
def disk_order(self):
"""Potentially partial order for disks."""
return self._disk_order
@disk_order.setter
def disk_order(self, order):
log.debug("new disk order: %s", order)
self._disk_order = order
if self.disks:
self._sort_disks()
def _sort_disks(self):
"""Sort the internal disk list. """
for name in reversed(self.disk_order):
try:
idx = [d.name for d in self.disks].index(name)
except ValueError:
log.error("bios order specified unknown disk %s", name)
continue
self.disks.insert(0, self.disks.pop(idx))
def set_disk_list(self, disks):
self.disks = disks[:]
self._sort_disks()
#
# image list access
#
@property
def default(self):
"""The default image."""
if not self._default_image and self.linux_images:
self._default_image = self.linux_images[0]
return self._default_image
@default.setter
def default(self, image):
if image not in self.images:
raise ValueError("new default image not in image list")
log.debug("new default image: %s", image)
self._default_image = image
@property
def images(self):
""" List of OS images that will be included in the configuration. """
all_images = self.linux_images
all_images.extend(i for i in self.chain_images if i.label)
return all_images
def clear_images(self):
"""Empty out the image list."""
self.linux_images = []
self.chain_images = []
def add_image(self, image):
"""Add a BootLoaderImage instance to the image list."""
if isinstance(image, LinuxBootLoaderImage):
self.linux_images.append(image)
else:
self.chain_images.append(image)
def image_label(self, image):
"""Return the appropriate image label for this bootloader."""
return getattr(image, self.image_label_attr)
#
# platform-specific data access
#
@property
def disklabel_types(self):
return platform.platform._disklabel_types
@property
def device_descriptions(self):
return platform.platform.bootStage1ConstraintDict["descriptions"]
#
# constraint checking for target devices
#
def _is_valid_md(self, device, raid_levels=None,
metadata=None, member_types=None, desc=""):
ret = True
if device.type != "mdarray":
return ret
if raid_levels and device.level not in raid_levels:
levels_str = ",".join("%s" % l for l in raid_levels)
self.errors.append(_("RAID sets that contain '%(desc)s' must have one "
"of the following raid levels: %(raid_level)s.")
% {"desc" : desc, "raid_level" : levels_str})
ret = False
# new arrays will be created with an appropriate metadata format
if device.exists and \
metadata and device.metadataVersion not in metadata:
self.errors.append(_("RAID sets that contain '%(desc)s' must have one "
"of the following metadata versions: %(metadata_versions)s.")
% {"desc": desc, "metadata_versions": ",".join(metadata)})
ret = False
if member_types:
for member in device.members:
if not self._device_type_match(member, member_types):
self.errors.append(_("RAID sets that contain '%(desc)s' must "
"have one of the following device "
"types: %(types)s.")
% {"desc" : desc, "types" : ",".join(member_types)})
ret = False
log.debug("_is_valid_md(%s) returning %s", device.name, ret)
return ret
def _is_valid_disklabel(self, device, disklabel_types=None):
ret = True
if self.disklabel_types:
for disk in device.disks:
label_type = getattr(disk.format, "labelType", None)
if not label_type or label_type not in self.disklabel_types:
types_str = ",".join(disklabel_types)
self.errors.append(_("%(name)s must have one of the following "
"disklabel types: %(types)s.")
% {"name" : device.name, "types" : types_str})
ret = False
log.debug("_is_valid_disklabel(%s) returning %s", device.name, ret)
return ret
def _is_valid_format(self, device, format_types=None, mountpoints=None,
desc=""):
ret = True
if format_types and device.format.type not in format_types:
self.errors.append(_("%(desc)s cannot be of type %(type)s.")
% {"desc" : desc, "type" : device.format.type})
ret = False
if mountpoints and hasattr(device.format, "mountpoint") \
and device.format.mountpoint not in mountpoints:
self.errors.append(_("%(desc)s must be mounted on one of %(mountpoints)s.")
% {"desc" : desc, "mountpoints" : ", ".join(mountpoints)})
ret = False
log.debug("_is_valid_format(%s) returning %s", device.name, ret)
return ret
def _is_valid_size(self, device, desc=""):
ret = True
msg = None
errors = []
if device.format.minSize and device.format.maxSize:
msg = (_("%(desc)s must be between %(min)d and %(max)d MB in size")
% {"desc" : desc, "min" : device.format.minSize,
"max" : device.format.maxSize})
if device.format.minSize and device.size < device.format.minSize:
if msg is None:
errors.append(_("%(desc)s must not be smaller than %(min)dMB.")
% {"desc" : desc, "min" : device.format.minSize})
else:
errors.append(msg)
ret = False
if device.format.maxSize and device.size > device.format.maxSize:
if msg is None:
errors.append(_("%(desc)s must not be larger than %(max)dMB.")
% {"desc" : desc, "max" : device.format.maxSize})
elif msg not in errors:
# don't add the same error string twice
errors.append(msg)
ret = False
log.debug("_is_valid_size(%s) returning %s", device.name, ret)
return ret
def _is_valid_location(self, device, max_end=None, desc=""):
ret = True
if max_end and device.type == "partition" and device.partedPartition:
end_sector = device.partedPartition.geometry.end
sector_size = device.partedPartition.disk.device.sectorSize
end = Size(sector_size * end_sector)
if end > max_end:
self.errors.append(_("%(desc)s must be within the first %(max_end)s of "
"the disk.") % {"desc": desc, "max_end": max_end})
ret = False
log.debug("_is_valid_location(%s) returning %s", device.name, ret)
return ret
def _is_valid_partition(self, device, primary=None, desc=""):
ret = True
if device.type == "partition" and primary and not device.isPrimary:
self.errors.append(_("%s must be on a primary partition.") % desc)
ret = False
log.debug("_is_valid_partition(%s) returning %s", device.name, ret)
return ret
#
# target/stage1 device access
#
def _device_type_index(self, device, types):
""" Return the index of the matching type in types to device's type.
Return None if no match is found. """
index = None
try:
index = types.index(device.type)
except ValueError:
if "disk" in types and device.isDisk:
index = types.index("disk")
return index
def _device_type_match(self, device, types):
""" Return True if device is of one of the types in the list types. """
return self._device_type_index(device, types) is not None
def device_description(self, device):
device_types = list(self.device_descriptions.keys())
idx = self._device_type_index(device, device_types)
if idx is None:
raise ValueError("No description available for %s" % device.type)
# this looks unnecessarily complicated, but it handles the various
# device types that we treat as disks
return self.device_descriptions[device_types[idx]]
def set_preferred_stage1_type(self, preferred):
""" Set a preferred type of stage1 device. """
if not self.stage2_is_valid_stage1:
# "partition" means first sector of stage2 and is only meaningful
# for bootloaders that can use stage2 as stage1
return
if preferred == "mbr":
# "mbr" is already the default
return
# partition means "use the stage2 device for a stage1 device"
self.stage2_is_preferred_stage1 = True
def is_valid_stage1_device(self, device, early=False):
""" Return True if the device is a valid stage1 target device.
Also collect lists of errors and warnings.
The criteria for being a valid stage1 target device vary from
platform to platform. On some platforms a disk with an msdos
disklabel is a valid stage1 target, while some platforms require
a special device. Some examples of these special devices are EFI
system partitions on EFI machines, PReP boot partitions on
iSeries, and Apple bootstrap partitions on Mac.
The 'early' keyword argument is a boolean flag indicating whether
or not this check is being performed at a point where the mountpoint
cannot be expected to be set for things like EFI system partitions.
"""
self.errors = []
self.warnings = []
valid = True
constraint = platform.platform.bootStage1ConstraintDict
if device is None:
return False
if not self._device_type_match(device, constraint["device_types"]):
log.debug("stage1 device cannot be of type %s", device.type)
return False
if _is_on_iscsi(device) and not _is_on_ibft(device):
log.debug("stage1 device cannot be on an iSCSI disk")
return False
description = self.device_description(device)
if self.stage2_is_valid_stage1 and device == self.stage2_device:
# special case
valid = (self.stage2_is_preferred_stage1 and
self.is_valid_stage2_device(device))
# we'll be checking stage2 separately so don't duplicate messages
self.problems = []
self.warnings = []
return valid
if device.protected:
valid = False
if not self._is_valid_disklabel(device,
disklabel_types=self.disklabel_types):
valid = False
if not self._is_valid_size(device, desc=description):
valid = False
if not self._is_valid_location(device,
max_end=constraint["max_end"],
desc=description):
valid = False
if not self._is_valid_md(device,
raid_levels=constraint["raid_levels"],
metadata=constraint["raid_metadata"],
member_types=constraint["raid_member_types"],
desc=description):
valid = False
if not self.stage2_bootable and not getattr(device, "bootable", True):
log.warning("%s not bootable", device.name)
# XXX does this need to be here?
if getattr(device.format, "label", None) in ("ANACONDA", "LIVE"):
log.info("ignoring anaconda boot disk")
valid = False
if early:
mountpoints = []
else:
mountpoints = constraint["mountpoints"]
if not self._is_valid_format(device,
format_types=constraint["format_types"],
mountpoints=mountpoints,
desc=description):
valid = False
if not self.encryption_support and device.encrypted:
self.errors.append(_("%s cannot be on an encrypted block "
"device.") % description)
valid = False
log.debug("is_valid_stage1_device(%s) returning %s", device.name, valid)
return valid
def set_stage1_device(self, devices):
self.stage1_device = None
if not self.stage1_disk:
self.reset()
raise BootLoaderError("need stage1 disk to set stage1 device")
if self.stage2_is_preferred_stage1:
self.stage1_device = self.stage2_device
return
for device in devices:
if self.stage1_disk not in device.disks:
continue
if self.is_valid_stage1_device(device):
if flags.imageInstall and device.isDisk:
# GRUB2 will install to /dev/loop0 but not to
# /dev/mapper/<image_name>
self.stage1_device = device.parents[0]
else:
self.stage1_device = device
break
if not self.stage1_device:
self.reset()
raise BootLoaderError("failed to find a suitable stage1 device")
#
# boot/stage2 device access
#
def is_valid_stage2_device(self, device, linux=True, non_linux=False):
""" Return True if the device is suitable as a stage2 target device.
Also collect lists of errors and warnings.
"""
self.errors = []
self.warnings = []
valid = True
if device is None:
return False
if device.protected:
valid = False
if _is_on_iscsi(device) and not _is_on_ibft(device):
self.errors.append(_("%s cannot be on an iSCSI disk") % self.stage2_description)
valid = False
if not self._device_type_match(device, self.stage2_device_types):
self.errors.append(_("%(desc)s cannot be of type %(type)s")
% {"desc" : _(self.stage2_description), "type" : device.type})
valid = False
if not self._is_valid_disklabel(device,
disklabel_types=self.disklabel_types):
valid = False
if not self._is_valid_size(device, desc=_(self.stage2_description)):
valid = False
if self.stage2_max_end and not self._is_valid_location(device,
max_end=self.stage2_max_end,
desc=_(self.stage2_description)):
valid = False
if not self._is_valid_partition(device,
primary=self.stage2_must_be_primary):
valid = False
if not self._is_valid_md(device,
raid_levels=self.stage2_raid_levels,
metadata=self.stage2_raid_metadata,
member_types=self.stage2_raid_member_types,
desc=_(self.stage2_description)):
valid = False
if linux and \
not self._is_valid_format(device,
format_types=self.stage2_format_types,
mountpoints=self.stage2_mountpoints,
desc=_(self.stage2_description)):
valid = False
non_linux_format_types = platform.platform._non_linux_format_types
if non_linux and \
not self._is_valid_format(device,
format_types=non_linux_format_types):
valid = False
if not self.encryption_support and device.encrypted:
self.errors.append(_("%s cannot be on an encrypted block "
"device.") % _(self.stage2_description))
valid = False
log.debug("is_valid_stage2_device(%s) returning %s", device.name, valid)
return valid
#
# miscellaneous
#
def has_windows(self, devices):
return False
@property
def timeout(self):
"""Bootloader timeout in seconds."""
if self._timeout is not None:
t = self._timeout
else:
t = 5
return t
def check(self):
""" Run additional bootloader checks """
return True
@timeout.setter
def timeout(self, seconds):
self._timeout = seconds
@property
def update_only(self):
return self._update_only
@update_only.setter
def update_only(self, value):
if value and not self.can_update:
raise ValueError("this boot loader does not support updates")
elif self.can_update:
self._update_only = value
def set_boot_args(self, *args, **kwargs):
""" Set up the boot command line.
Keyword Arguments:
storage - a blivet.Storage instance
All other arguments are expected to have a dracutSetupArgs()
method.
"""
storage = kwargs.pop("storage", None)
#
# FIPS
#
boot_device = storage.mountpoints.get("/boot")
if flags.cmdline.get("fips") == "1" and boot_device:
self.boot_args.add("boot=%s" % self.stage2_device.fstabSpec)
#
# dracut
#
# storage
from blivet.devices import NetworkStorageDevice
dracut_devices = [storage.rootDevice]
if self.stage2_device != storage.rootDevice:
dracut_devices.append(self.stage2_device)
dracut_devices.extend(storage.fsset.swapDevices)
# Does /usr have its own device? If so, we need to tell dracut
usr_device = storage.mountpoints.get("/usr")
if usr_device:
dracut_devices.extend([usr_device])
netdevs = storage.devicetree.getDevicesByInstance(NetworkStorageDevice)
rootdev = storage.rootDevice
if any(rootdev.dependsOn(netdev) for netdev in netdevs):
dracut_devices = set(dracut_devices)
# By this time this thread should be the only one running, and also
# mountpoints is a property function that returns a new dict every
# time, so iterating over the values is safe.
for dev in storage.mountpoints.values():
if any(dev.dependsOn(netdev) for netdev in netdevs):
dracut_devices.add(dev)
done = []
for device in dracut_devices:
for dep in storage.devices:
if dep in done:
continue
if device != dep and not device.dependsOn(dep):
continue
setup_args = dep.dracutSetupArgs()
if not setup_args:
continue
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
done.append(dep)
# network storage
# XXX this is nothing to be proud of
if isinstance(dep, NetworkStorageDevice):
setup_args = pyanaconda.network.dracutSetupArgs(dep)
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
# passed-in objects
for cfg_obj in chain(args, kwargs.values()):
if hasattr(cfg_obj, "dracutSetupArgs"):
setup_args = cfg_obj.dracutSetupArgs()
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
else:
setup_string = cfg_obj.dracutSetupString()
self.boot_args.add(setup_string)
self.dracut_args.add(setup_string)
# This is needed for FCoE, bug #743784. The case:
# We discover LUN on an iface which is part of multipath setup.
# If the iface is disconnected after discovery anaconda doesn't
# write dracut ifname argument for the disconnected iface path
# (in Network.dracutSetupArgs).
# Dracut needs the explicit ifname= because biosdevname
# fails to rename the iface (because of BFS booting from it).
for nic, _dcb, _auto_vlan in fcoe().nics:
try:
hwaddr = nm_device_hwaddress(nic)
except ValueError:
continue
self.boot_args.add("ifname=%s:%s" % (nic, hwaddr.lower()))
# Add iscsi_firmware to trigger dracut running iscsistart
# See rhbz#1099603 and rhbz#1185792
if len(glob("/sys/firmware/iscsi_boot*")) > 0:
self.boot_args.add("iscsi_firmware")
#
# preservation of some of our boot args
# FIXME: this is stupid.
#
for opt in self.global_preserve_args + self.preserve_args:
if opt not in flags.cmdline:
continue
arg = flags.cmdline.get(opt)
new_arg = opt
if arg:
new_arg += "=%s" % arg
self.boot_args.add(new_arg)
# passed-in objects
for cfg_obj in chain(args, kwargs.values()):
if hasattr(cfg_obj, "dracutSetupArgs"):
setup_args = cfg_obj.dracutSetupArgs()
self.boot_args.update(setup_args)
self.dracut_args.update(setup_args)
else:
setup_string = cfg_obj.dracutSetupString()
self.boot_args.add(setup_string)
self.dracut_args.add(setup_string)
#
# configuration
#
@property
def boot_prefix(self):
""" Prefix, if any, to paths in /boot. """
if self.stage2_device.format.mountpoint == "/":
prefix = "/boot"
else:
prefix = ""
return prefix
def _set_console(self):
""" Set console options based on boot arguments. """
console = flags.cmdline.get("console", "")
console = os.path.basename(console)
self.console, _x, self.console_options = console.partition(",")
def write_config_console(self, config):
"""Write console-related configuration lines."""
pass
def write_config_password(self, config):
"""Write password-related configuration lines."""
pass
def write_config_header(self, config):
"""Write global configuration lines."""
self.write_config_console(config)
self.write_config_password(config)
def write_config_images(self, config):
"""Write image configuration entries."""
raise NotImplementedError()
def write_config_post(self):
pass
def write_config(self):
""" Write the bootloader configuration. """
if not self.config_file:
raise BootLoaderError("no config file defined for this boot loader")
config_path = os.path.normpath(iutil.getSysroot() + self.config_file)
if os.access(config_path, os.R_OK):
os.rename(config_path, config_path + ".anacbak")
config = iutil.open_with_perm(config_path, "w", self.config_file_mode)
self.write_config_header(config)
self.write_config_images(config)
config.close()
self.write_config_post()
@property
def trusted_boot(self):
return self._trusted_boot
@trusted_boot.setter
def trusted_boot(self, trusted_boot):
self._trusted_boot = trusted_boot
#
# installation
#
def write(self):
""" Write the bootloader configuration and install the bootloader. """
if self.skip_bootloader:
return
if self.update_only:
self.update()
return
self.write_config()
os.sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
self.install()
def install(self, args=None):
raise NotImplementedError()
def update(self):
""" Update an existing bootloader configuration. """
pass
class GRUB(BootLoader):
name = "GRUB"
_config_dir = "grub"
_config_file = "grub.conf"
_device_map_file = "device.map"
can_dual_boot = True
can_update = True
stage2_is_valid_stage1 = True
stage2_bootable = True
stage2_must_be_primary = False
# list of strings representing options for boot device types
stage2_device_types = ["partition", "mdarray"]
stage2_raid_levels = [raid.RAID1]
stage2_raid_member_types = ["partition"]
stage2_raid_metadata = ["0", "0.90", "1.0"]
packages = ["grub"]
_serial_consoles = ["ttyS"]
def __init__(self):
super(GRUB, self).__init__()
self.encrypted_password = ""
#
# grub-related conveniences
#
def grub_device_name(self, device):
""" Return a grub-friendly representation of device. """
disk = getattr(device, "disk", device)
name = "(hd%d" % self.disks.index(disk)
if hasattr(device, "disk"):
name += ",%d" % (device.partedPartition.number - 1,)
name += ")"
return name
@property
def grub_config_dir(self):
""" Config dir, adjusted for grub's view of the world. """
return self.boot_prefix + self._config_dir
#
# configuration
#
@property
def config_dir(self):
""" Full path to configuration directory. """
return "/boot/" + self._config_dir
@property
def config_file(self):
""" Full path to configuration file. """
return "%s/%s" % (self.config_dir, self._config_file)
@property
def device_map_file(self):
""" Full path to device.map file. """
return "%s/%s" % (self.config_dir, self._device_map_file)
@property
def grub_conf_device_line(self):
return ""
@property
def splash_dir(self):
""" relative path to splash image directory."""
return GRUB._config_dir
@property
def has_serial_console(self):
""" true if the console is a serial console. """
return any(self.console.startswith(sconsole) for sconsole in self._serial_consoles)
@property
def serial_command(self):
command = ""
if self.console and self.has_serial_console:
unit = self.console[-1]
command = ["serial"]
s = parse_serial_opt(self.console_options)
if unit and unit != '0':
command.append("--unit=%s" % unit)
if s.speed and s.speed != '9600':
command.append("--speed=%s" % s.speed)
if s.parity:
if s.parity == 'o':
command.append("--parity=odd")
elif s.parity == 'e':
command.append("--parity=even")
if s.word and s.word != '8':
command.append("--word=%s" % s.word)
if s.stop and s.stop != '1':
command.append("--stop=%s" % s.stop)
command = " ".join(command)
return command
def write_config_console(self, config):
""" Write console-related configuration. """
if not self.console:
return
if self.has_serial_console:
config.write("%s\n" % self.serial_command)
config.write("terminal --timeout=%s serial console\n"
% self.timeout)
console_arg = "console=%s" % self.console
if self.console_options:
console_arg += ",%s" % self.console_options
self.boot_args.add(console_arg)
def _encrypt_password(self):
""" Make sure self.encrypted_password is set up correctly. """
if self.encrypted_password:
return
if not self.password:
raise BootLoaderError("cannot encrypt empty password")
# Used for ascii_letters and digits constants
import string # pylint: disable=deprecated-module
import crypt
import random
salt = "$6$"
salt_len = 16
salt_chars = string.ascii_letters + string.digits + './'
rand_gen = random.SystemRandom()
salt += "".join(rand_gen.choice(salt_chars) for i in range(salt_len))
self.encrypted_password = crypt.crypt(self.password, salt)
def write_config_password(self, config):
""" Write password-related configuration. """
if not self.password and not self.encrypted_password:
return
self._encrypt_password()
password_line = "--encrypted " + self.encrypted_password
config.write("password %s\n" % password_line)
def write_config_header(self, config):
"""Write global configuration information. """
if self.boot_prefix:
have_boot = "do not "
else:
have_boot = ""
s = """# grub.conf generated by anaconda
# Note that you do not have to rerun grub after making changes to this file.
# NOTICE: You %(do)shave a /boot partition. This means that all kernel and
# initrd paths are relative to %(boot)s, eg.
# root %(grub_target)s
# kernel %(prefix)s/vmlinuz-version ro root=%(root_device)s
# initrd %(prefix)s/initrd-[generic-]version.img
""" % {"do": have_boot, "boot": self.stage2_device.format.mountpoint,
"root_device": self.stage2_device.path,
"grub_target": self.grub_device_name(self.stage1_device),
"prefix": self.boot_prefix}
config.write(s)
config.write("boot=%s\n" % self.stage1_device.path)
config.write(self.grub_conf_device_line)
# find the index of the default image
try:
default_index = self.images.index(self.default)
except ValueError:
e = "Failed to find default image (%s)" % self.default.label
raise BootLoaderError(e)
config.write("default=%d\n" % default_index)
config.write("timeout=%d\n" % self.timeout)
self.write_config_console(config)
if iutil.isConsoleOnVirtualTerminal(self.console):
splash = "splash.xpm.gz"
splash_path = os.path.normpath("%s/boot/%s/%s" % (iutil.getSysroot(),
self.splash_dir,
splash))
if os.access(splash_path, os.R_OK):
grub_root_grub_name = self.grub_device_name(self.stage2_device)
config.write("splashimage=%s/%s/%s\n" % (grub_root_grub_name,
self.splash_dir,
splash))
config.write("hiddenmenu\n")
self.write_config_password(config)
def write_config_images(self, config):
""" Write image entries into configuration file. """
for image in self.images:
args = Arguments()
if isinstance(image, LinuxBootLoaderImage):
grub_root = self.grub_device_name(self.stage2_device)
args.update(["ro", "root=%s" % image.device.fstabSpec])
args.update(self.boot_args)
if isinstance(image, TbootLinuxBootLoaderImage):
args.update(image.args)
snippet = ("\tkernel %(prefix)s/%(multiboot)s %(mbargs)s\n"
"\tmodule %(prefix)s/%(kernel)s %(args)s\n"
"\tmodule %(prefix)s/%(initrd)s\n"
% {"prefix": self.boot_prefix,
"multiboot": image.multiboot,
"mbargs": image.mbargs,
"kernel": image.kernel, "args": args,
"initrd": image.initrd})
else:
snippet = ("\tkernel %(prefix)s/%(kernel)s %(args)s\n"
"\tinitrd %(prefix)s/%(initrd)s\n"
% {"prefix": self.boot_prefix,
"kernel": image.kernel, "args": args,
"initrd": image.initrd})
stanza = ("title %(label)s (%(version)s)\n"
"\troot %(grub_root)s\n"
"%(snippet)s"
% {"label": image.label, "version": image.version,
"grub_root": grub_root, "snippet": snippet})
else:
stanza = ("title %(label)s\n"
"\trootnoverify %(grub_root)s\n"
"\tchainloader +1\n"
% {"label": image.label,
"grub_root": self.grub_device_name(image.device)})
log.info("bootloader.py: used boot args: %s ", args)
config.write(stanza)
def write_device_map(self):
""" Write out a device map containing all supported devices. """
map_path = os.path.normpath(iutil.getSysroot() + self.device_map_file)
if os.access(map_path, os.R_OK):
os.rename(map_path, map_path + ".anacbak")
dev_map = open(map_path, "w")
dev_map.write("# this device map was generated by anaconda\n")
for disk in self.disks:
dev_map.write("%s %s\n" % (self.grub_device_name(disk),
disk.path))
dev_map.close()
def write_config_post(self):
""" Perform additional configuration after writing config file(s). """
super(GRUB, self).write_config_post()
# make symlink for menu.lst (grub's default config file name)
menu_lst = "%s%s/menu.lst" % (iutil.getSysroot(), self.config_dir)
if os.access(menu_lst, os.R_OK):
try:
os.rename(menu_lst, menu_lst + '.anacbak')
except OSError as e:
log.error("failed to back up %s: %s", menu_lst, e)
try:
os.symlink(self._config_file, menu_lst)
except OSError as e:
log.error("failed to create grub menu.lst symlink: %s", e)
# make symlink to grub.conf in /etc since that's where configs belong
etc_grub = "%s/etc/%s" % (iutil.getSysroot(), self._config_file)
if os.access(etc_grub, os.R_OK):
try:
os.unlink(etc_grub)
except OSError as e:
log.error("failed to remove %s: %s", etc_grub, e)
try:
os.symlink("..%s" % self.config_file, etc_grub)
except OSError as e:
log.error("failed to create /etc/grub.conf symlink: %s", e)
def write_config(self):
""" Write bootloader configuration to disk. """
# write device.map
self.write_device_map()
# this writes the actual configuration file
super(GRUB, self).write_config()
#
# installation
#
@property
def install_targets(self):
""" List of (stage1, stage2) tuples representing install targets. """
targets = []
# make sure we have stage1 and stage2 installed with redundancy
# so that boot can succeed even in the event of failure or removal
# of some of the disks containing the member partitions of the
# /boot array. If the stage1 is not a disk, it probably needs to
# be a partition on a particular disk (biosboot, prepboot), so only
# add the redundant targets if installing stage1 to a disk that is
# a member of the stage2 array.
# Look for both mdraid and btrfs raid
if self.stage2_device.type == "mdarray" and \
self.stage2_device.level == raid.RAID1:
stage2_raid = True
# Set parents to the list of partitions in the RAID
stage2_parents = self.stage2_device.parents
elif self.stage2_device.type == "btrfs subvolume" and \
self.stage2_device.parents[0].dataLevel == raid.RAID1:
stage2_raid = True
# Set parents to the list of partitions in the parent volume
stage2_parents = self.stage2_device.parents[0].parents
else:
stage2_raid = False
if stage2_raid and \
self.stage1_device.isDisk and \
self.stage2_device.dependsOn(self.stage1_device):
for stage2dev in stage2_parents:
# if target disk contains any of /boot array's member
# partitions, set up stage1 on each member's disk
stage1dev = stage2dev.disk
targets.append((stage1dev, self.stage2_device))
else:
targets.append((self.stage1_device, self.stage2_device))
return targets
def install(self, args=None):
rc = iutil.execInSysroot("grub-install", ["--just-copy"])
if rc:
raise BootLoaderError("boot loader install failed")
for (stage1dev, stage2dev) in self.install_targets:
cmd = ("root %(stage2dev)s\n"
"install --stage2=%(config_dir)s/stage2"
" /%(grub_config_dir)s/stage1 d %(stage1dev)s"
" /%(grub_config_dir)s/stage2 p"
" %(stage2dev)s/%(grub_config_dir)s/%(config_basename)s\n"
% {"grub_config_dir": self.grub_config_dir,
"config_dir": self.config_dir,
"config_basename": self._config_file,
"stage1dev": self.grub_device_name(stage1dev),
"stage2dev": self.grub_device_name(stage2dev)})
(pread, pwrite) = os.pipe()
iutil.eintr_retry_call(os.write, pwrite, cmd.encode("utf-8"))
iutil.eintr_ignore(os.close, pwrite)
args = ["--batch", "--no-floppy",
"--device-map=%s" % self.device_map_file]
rc = iutil.execInSysroot("grub", args, stdin=pread)
iutil.eintr_ignore(os.close, pread)
if rc:
raise BootLoaderError("boot loader install failed")
def update(self):
self.install()
#
# miscellaneous
#
def has_windows(self, devices):
""" Potential boot devices containing non-linux operating systems. """
# make sure we don't clobber error/warning lists
errors = self.errors[:]
warnings = self.warnings[:]
ret = [d for d in devices if self.is_valid_stage2_device(d, linux=False, non_linux=True)]
self.errors = errors
self.warnings = warnings
return bool(ret)
# Add a warning about certain RAID situations to is_valid_stage2_device
def is_valid_stage2_device(self, device, linux=True, non_linux=False):
valid = super(GRUB, self).is_valid_stage2_device(device, linux, non_linux)
# If the stage2 device is on a raid1, check that the stage1 device is also redundant,
# either by also being part of an array or by being a disk (which is expanded
# to every disk in the array by install_targets).
if self.stage1_device and self.stage2_device and \
self.stage2_device.type == "mdarray" and \
self.stage2_device.level == raid.RAID1 and \
self.stage1_device.type != "mdarray":
if not self.stage1_device.isDisk:
msg = _("boot loader stage2 device %(stage2dev)s is on a multi-disk array, but boot loader stage1 device %(stage1dev)s is not. " \
"A drive failure in %(stage2dev)s could render the system unbootable.") % \
{"stage1dev" : self.stage1_device.name,
"stage2dev" : self.stage2_device.name}
self.warnings.append(msg)
elif not self.stage2_device.dependsOn(self.stage1_device):
msg = _("boot loader stage2 device %(stage2dev)s is on a multi-disk array, but boot loader stage1 device %(stage1dev)s is not part of this array. " \
"The stage1 boot loader will only be installed to a single drive.") % \
{"stage1dev" : self.stage1_device.name,
"stage2dev" : self.stage2_device.name}
self.warnings.append(msg)
return valid
class GRUB2(GRUB):
""" GRUBv2
- configuration
- password (insecure), password_pbkdf2
- http://www.gnu.org/software/grub/manual/grub.html#Invoking-grub_002dmkpasswd_002dpbkdf2
- --users per-entry specifies which users can access, otherwise
entry is unrestricted
- /etc/grub/custom.cfg
- how does grub resolve names of md arrays?
- disable automatic use of grub-mkconfig?
- on upgrades?
- BIOS boot partition (GPT)
- parted /dev/sda set <partition_number> bios_grub on
- can't contain a file system
- 31KiB min, 1MiB recommended
"""
name = "GRUB2"
packages = ["grub2"]
_config_file = "grub.cfg"
_config_dir = "grub2"
defaults_file = "/etc/default/grub"
terminal_type = "console"
stage2_max_end = None
# requirements for boot devices
stage2_device_types = ["partition", "mdarray", "lvmlv"]
stage2_raid_levels = [raid.RAID0, raid.RAID1, raid.RAID4,
raid.RAID5, raid.RAID6, raid.RAID10]
stage2_raid_metadata = ["0", "0.90", "1.0", "1.2"]
@property
def stage2_format_types(self):
if productName.startswith("Red Hat "):
return ["xfs", "ext4", "ext3", "ext2", "btrfs"]
else:
return ["ext4", "ext3", "ext2", "btrfs", "xfs"]
def __init__(self):
super(GRUB2, self).__init__()
# XXX we probably need special handling for raid stage1 w/ gpt disklabel
# since it's unlikely there'll be a bios boot partition on each disk
#
# grub-related conveniences
#
def grub_device_name(self, device):
""" Return a grub-friendly representation of device.
Disks and partitions use the (hdX,Y) notation, while lvm and
md devices just use their names.
"""
disk = None
name = "(%s)" % device.name
if device.isDisk:
disk = device
elif hasattr(device, "disk"):
disk = device.disk
if disk is not None:
name = "(hd%d" % self.disks.index(disk)
if hasattr(device, "disk"):
lt = device.disk.format.labelType
name += ",%s%d" % (lt, device.partedPartition.number)
name += ")"
return name
def write_config_console(self, config):
if not self.console:
return
console_arg = "console=%s" % self.console
if self.console_options:
console_arg += ",%s" % self.console_options
self.boot_args.add(console_arg)
def write_device_map(self):
""" Write out a device map containing all supported devices. """
map_path = os.path.normpath(iutil.getSysroot() + self.device_map_file)
if os.access(map_path, os.R_OK):
os.rename(map_path, map_path + ".anacbak")
devices = self.disks
if self.stage1_device not in devices:
devices.append(self.stage1_device)
for disk in self.stage2_device.disks:
if disk not in devices:
devices.append(disk)
devices = [d for d in devices if d.isDisk]
if len(devices) == 0:
return
dev_map = open(map_path, "w")
dev_map.write("# this device map was generated by anaconda\n")
for drive in devices:
dev_map.write("%s %s\n" % (self.grub_device_name(drive),
drive.path))
dev_map.close()
def write_defaults(self):
defaults_file = "%s%s" % (iutil.getSysroot(), self.defaults_file)
defaults = open(defaults_file, "w+")
defaults.write("GRUB_TIMEOUT=%d\n" % self.timeout)
defaults.write("GRUB_DISTRIBUTOR=\"$(sed 's, release .*$,,g' /etc/system-release)\"\n")
defaults.write("GRUB_DEFAULT=saved\n")
defaults.write("GRUB_DISABLE_SUBMENU=true\n")
if self.console and self.has_serial_console:
defaults.write("GRUB_TERMINAL=\"serial console\"\n")
defaults.write("GRUB_SERIAL_COMMAND=\"%s\"\n" % self.serial_command)
else:
defaults.write("GRUB_TERMINAL_OUTPUT=\"%s\"\n" % self.terminal_type)
# this is going to cause problems for systems containing multiple
# linux installations or even multiple boot entries with different
# boot arguments
log.info("bootloader.py: used boot args: %s ", self.boot_args)
defaults.write("GRUB_CMDLINE_LINUX=\"%s\"\n" % self.boot_args)
defaults.write("GRUB_DISABLE_RECOVERY=\"true\"\n")
#defaults.write("GRUB_THEME=\"/boot/grub2/themes/system/theme.txt\"\n")
defaults.close()
def _encrypt_password(self):
""" Make sure self.encrypted_password is set up properly. """
if self.encrypted_password:
return
if not self.password:
raise RuntimeError("cannot encrypt empty password")
(pread, pwrite) = os.pipe()
passwords = "%s\n%s\n" % (self.password, self.password)
iutil.eintr_retry_call(os.write, pwrite, passwords.encode("utf-8"))
iutil.eintr_ignore(os.close, pwrite)
buf = iutil.execWithCapture("grub2-mkpasswd-pbkdf2", [],
stdin=pread,
root=iutil.getSysroot())
iutil.eintr_ignore(os.close, pread)
self.encrypted_password = buf.split()[-1].strip()
if not self.encrypted_password.startswith("grub.pbkdf2."):
raise BootLoaderError("failed to encrypt boot loader password")
def write_password_config(self):
if not self.password and not self.encrypted_password:
return
users_file = iutil.getSysroot() + "/etc/grub.d/01_users"
header = iutil.open_with_perm(users_file, "w", 0o700)
header.write("#!/bin/sh -e\n\n")
header.write("cat << \"EOF\"\n")
# XXX FIXME: document somewhere that the username is "root"
header.write("set superusers=\"root\"\n")
header.write("export superusers\n")
self._encrypt_password()
password_line = "password_pbkdf2 root " + self.encrypted_password
header.write("%s\n" % password_line)
header.write("EOF\n")
header.close()
def write_config(self):
self.write_config_console(None)
# See if we have a password and if so update the boot args before we
# write out the defaults file.
if self.password or self.encrypted_password:
self.boot_args.add("rd.shell=0")
self.write_defaults()
# if we fail to setup password auth we should complete the
# installation so the system is at least bootable
try:
self.write_password_config()
except (BootLoaderError, OSError, RuntimeError) as e:
log.error("boot loader password setup failed: %s", e)
# make sure the default entry is the OS we are installing
if self.default is not None:
# find the index of the default image
try:
default_index = self.images.index(self.default)
except ValueError:
log.warn("Failed to find default image (%s), defaulting to 0", self.default.label)
default_index = 0
rc = iutil.execInSysroot("grub2-set-default", [str(default_index)])
if rc:
log.error("failed to set default menu entry to %s", productName)
# now tell grub2 to generate the main configuration file
rc = iutil.execInSysroot("grub2-mkconfig",
["-o", self.config_file])
if rc:
raise BootLoaderError("failed to write boot loader configuration")
#
# installation
#
def install(self, args=None):
if args is None:
args = []
# XXX will installing to multiple drives work as expected with GRUBv2?
for (stage1dev, stage2dev) in self.install_targets:
grub_args = args + ["--no-floppy", stage1dev.path]
if stage1dev == stage2dev:
# This is hopefully a temporary hack. GRUB2 currently refuses
# to install to a partition's boot block without --force.
grub_args.insert(0, '--force')
else:
if flags.nombr:
grub_args.insert(0, '--grub-setup=/bin/true')
log.info("bootloader.py: mbr update by grub2 disabled")
else:
log.info("bootloader.py: mbr will be updated for grub2")
rc = iutil.execWithRedirect("grub2-install", grub_args,
root=iutil.getSysroot(),
env_prune=['MALLOC_PERTURB_'])
if rc:
raise BootLoaderError("boot loader install failed")
def write(self):
""" Write the bootloader configuration and install the bootloader. """
if self.skip_bootloader:
return
if self.update_only:
self.update()
return
try:
self.write_device_map()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
os.sync()
self.install()
os.sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
finally:
self.write_config()
os.sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
def check(self):
""" When installing to the mbr of a disk grub2 needs enough space
before the first partition in order to embed its core.img
Until we have a way to ask grub2 what the size is we check to make
sure it starts >= 512K, otherwise return an error.
"""
ret = True
base_gap_bytes = 32256 # 31.5KiB
advanced_gap_bytes = 524288 # 512KiB
self.errors = []
self.warnings = []
if self.stage1_device == self.stage2_device:
return ret
# These are small enough to fit
if self.stage2_device.type == "partition":
min_start = base_gap_bytes
else:
min_start = advanced_gap_bytes
if not self.stage1_disk:
return False
# If the first partition starts too low and there is no biosboot partition show an error.
error_msg = None
biosboot = False
parts = self.stage1_disk.format.partedDisk.partitions
for p in parts:
if p.getFlag(PARTITION_BIOS_GRUB):
biosboot = True
break
start = p.geometry.start * p.disk.device.sectorSize
if start < min_start:
error_msg = _("%(deviceName)s may not have enough space for grub2 to embed "
"core.img when using the %(fsType)s file system on %(deviceType)s") \
% {"deviceName": self.stage1_device.name, "fsType": self.stage2_device.format.type,
"deviceType": self.stage2_device.type}
if error_msg and not biosboot:
log.error(error_msg)
self.errors.append(error_msg)
ret = False
return ret
class EFIGRUB(GRUB2):
packages = ["grub2-efi", "efibootmgr", "shim"]
can_dual_boot = False
stage2_is_valid_stage1 = False
stage2_bootable = False
_efi_binary = "\\shim.efi"
@property
def _config_dir(self):
return "efi/EFI/%s" % (self.efi_dir,)
def __init__(self):
super(EFIGRUB, self).__init__()
self.efi_dir = 'BOOT'
def efibootmgr(self, *args, **kwargs):
if flags.imageInstall or flags.dirInstall:
log.info("Skipping efibootmgr for image/directory install.")
return ""
if "noefi" in flags.cmdline:
log.info("Skipping efibootmgr for noefi")
return ""
if kwargs.pop("capture", False):
exec_func = iutil.execWithCapture
else:
exec_func = iutil.execWithRedirect
if "root" not in kwargs:
kwargs["root"] = iutil.getSysroot()
return exec_func("efibootmgr", list(args), **kwargs)
#
# installation
#
def remove_efi_boot_target(self):
buf = self.efibootmgr(capture=True)
for line in buf.splitlines():
try:
(slot, _product) = line.split(None, 1)
except ValueError:
continue
if _product == productName.split("-")[0]:
slot_id = slot[4:8]
# slot_id is hex, we can't use .isint and use this regex:
if not re.match("^[0-9a-fA-F]+$", slot_id):
log.warning("failed to parse efi boot slot (%s)", slot)
continue
rc = self.efibootmgr("-b", slot_id, "-B")
if rc:
raise BootLoaderError("failed to remove old efi boot entry. This is most likely a kernel or firmware bug.")
@property
def efi_dir_as_efifs_dir(self):
ret = self._config_dir.replace('efi/', '')
return "\\" + ret.replace('/', '\\')
def _add_single_efi_boot_target(self, partition):
boot_disk = partition.disk
boot_part_num = str(partition.partedPartition.number)
rc = self.efibootmgr("-c", "-w", "-L", productName.split("-")[0],
"-d", boot_disk.path, "-p", boot_part_num,
"-l",
self.efi_dir_as_efifs_dir + self._efi_binary,
root=iutil.getSysroot())
if rc:
raise BootLoaderError("failed to set new efi boot target. This is most likely a kernel or firmware bug.")
def add_efi_boot_target(self):
if self.stage1_device.type == "partition":
self._add_single_efi_boot_target(self.stage1_device)
elif self.stage1_device.type == "mdarray":
for parent in self.stage1_device.parents:
self._add_single_efi_boot_target(parent)
def install(self, args=None):
if not flags.leavebootorder:
self.remove_efi_boot_target()
self.add_efi_boot_target()
def update(self):
self.install()
#
# installation
#
def write(self):
""" Write the bootloader configuration and install the bootloader. """
if self.skip_bootloader:
return
if self.update_only:
self.update()
return
try:
os.sync()
self.stage2_device.format.sync(root=iutil.getTargetPhysicalRoot())
self.install()
finally:
self.write_config()
def check(self):
return True
class Aarch64EFIGRUB(EFIGRUB):
_serial_consoles = ["ttyAMA", "ttyS"]
class MacEFIGRUB(EFIGRUB):
def mactel_config(self):
if os.path.exists(iutil.getSysroot() + "/usr/libexec/mactel-boot-setup"):
rc = iutil.execInSysroot("/usr/libexec/mactel-boot-setup", [])
if rc:
log.error("failed to configure Mac boot loader")
def install(self, args=None):
super(MacEFIGRUB, self).install()
self.mactel_config()
def is_valid_stage1_device(self, device, early=False):
valid = super(MacEFIGRUB, self).is_valid_stage1_device(device, early)
# Make sure we don't pick the OSX root partition
if valid and getattr(device.format, "name", "") != "Linux HFS+ ESP":
valid = False
if hasattr(device.format, "name"):
log.debug("device.format.name is '%s'", device.format.name)
log.debug("MacEFIGRUB.is_valid_stage1_device(%s) returning %s", device.name, valid)
return valid
# Inherit abstract methods from BootLoader
# pylint: disable=abstract-method
class YabootBase(BootLoader):
def write_config_password(self, config):
if self.password:
config.write("password=%s\n" % self.password)
config.write("restricted\n")
def write_config_images(self, config):
for image in self.images:
if not isinstance(image, LinuxBootLoaderImage):
# mac os images are handled specially in the header on mac
continue
args = Arguments()
if self.password:
args.add("rd.shell=0")
if image.initrd:
initrd_line = "\tinitrd=%s/%s\n" % (self.boot_prefix,
image.initrd)
else:
initrd_line = ""
root_device_spec = image.device.fstabSpec
if root_device_spec.startswith("/"):
root_line = "\troot=%s\n" % root_device_spec
else:
args.add("root=%s" % root_device_spec)
root_line = ""
args.update(self.boot_args)
log.info("bootloader.py: used boot args: %s ", args)
stanza = ("image=%(boot_prefix)s%(kernel)s\n"
"\tlabel=%(label)s\n"
"\tread-only\n"
"%(initrd_line)s"
"%(root_line)s"
"\tappend=\"%(args)s\"\n\n"
% {"kernel": image.kernel, "initrd_line": initrd_line,
"label": self.image_label(image),
"root_line": root_line, "args": args,
"boot_prefix": self.boot_prefix})
config.write(stanza)
class Yaboot(YabootBase):
name = "Yaboot"
_config_file = "yaboot.conf"
prog = "ybin"
image_label_attr = "short_label"
packages = ["yaboot"]
# stage2 device requirements
stage2_device_types = ["partition", "mdarray"]
stage2_device_raid_levels = [raid.RAID1]
#
# configuration
#
@property
def config_dir(self):
conf_dir = "/etc"
if self.stage2_device.format.mountpoint == "/boot":
conf_dir = "/boot/etc"
return conf_dir
@property
def config_file(self):
return "%s/%s" % (self.config_dir, self._config_file)
def write_config_header(self, config):
if self.stage2_device.type == "mdarray":
boot_part_num = self.stage2_device.parents[0].partedPartition.number
else:
boot_part_num = self.stage2_device.partedPartition.number
# yaboot.conf timeout is in tenths of a second. Brilliant.
header = ("# yaboot.conf generated by anaconda\n\n"
"boot=%(stage1dev)s\n"
"init-message=\"Welcome to %(product)s!\\nHit <TAB> for "
"boot options\"\n\n"
"partition=%(part_num)d\n"
"timeout=%(timeout)d\n"
"install=/usr/lib/yaboot/yaboot\n"
"delay=5\n"
"enablecdboot\n"
"enableofboot\n"
"enablenetboot\n"
% {"stage1dev": self.stage1_device.path,
"product": productName, "part_num": boot_part_num,
"timeout": self.timeout * 10})
config.write(header)
self.write_config_variant_header(config)
self.write_config_password(config)
config.write("\n")
def write_config_variant_header(self, config):
config.write("nonvram\n")
config.write("mntpoint=/boot/yaboot\n")
config.write("usemount\n")
def write_config_post(self):
super(Yaboot, self).write_config_post()
# make symlink in /etc to yaboot.conf if config is in /boot/etc
etc_yaboot_conf = iutil.getSysroot() + "/etc/yaboot.conf"
if not os.access(etc_yaboot_conf, os.R_OK):
try:
os.symlink("../boot/etc/yaboot.conf", etc_yaboot_conf)
except OSError as e:
log.error("failed to create /etc/yaboot.conf symlink: %s", e)
def write_config(self):
if not os.path.isdir(iutil.getSysroot() + self.config_dir):
os.mkdir(iutil.getSysroot() + self.config_dir)
# this writes the config
super(Yaboot, self).write_config()
#
# installation
#
def install(self, args=None):
args = ["-f", "-C", self.config_file]
rc = iutil.execInSysroot(self.prog, args)
if rc:
raise BootLoaderError("boot loader installation failed")
class IPSeriesYaboot(Yaboot):
prog = "mkofboot"
#
# configuration
#
def write_config_variant_header(self, config):
config.write("nonvram\n") # only on pSeries?
config.write("fstype=raw\n")
#
# installation
#
def install(self, args=None):
self.updatePowerPCBootList()
super(IPSeriesYaboot, self).install()
def updatePowerPCBootList(self):
if not can_touch_runtime_system("updatePowerPCBootList", touch_live=True):
return
log.debug("updatePowerPCBootList: self.stage1_device.path = %s", self.stage1_device.path)
buf = iutil.execWithCapture("nvram",
["--print-config=boot-device"])
if len(buf) == 0:
log.error("FAIL: nvram --print-config=boot-device")
return
boot_list = buf.strip().split()
log.debug("updatePowerPCBootList: boot_list = %s", boot_list)
buf = iutil.execWithCapture("ofpathname",
[self.stage1_device.path])
if len(buf) > 0:
boot_disk = buf.strip()
log.debug("updatePowerPCBootList: boot_disk = %s", boot_disk)
else:
log.error("FAIL: ofpathname %s", self.stage1_device.path)
return
# Place the disk containing the PReP partition first.
# Remove all other occurances of it.
boot_list = [boot_disk] + [x for x in boot_list if x != boot_disk]
log.debug("updatePowerPCBootList: updated boot_list = %s", boot_list)
update_value = "boot-device=%s" % " ".join(boot_list)
rc = iutil.execWithRedirect("nvram", ["--update-config", update_value])
if rc:
log.error("FAIL: nvram --update-config %s", update_value)
else:
log.info("Updated PPC boot list with the command: nvram --update-config %s", update_value)
class IPSeriesGRUB2(GRUB2):
# GRUB2 sets /boot bootable and not the PReP partition. This causes the Open Firmware BIOS not
# to present the disk as a bootable target. If stage2_bootable is False, then the PReP partition
# will be marked bootable. Confusing.
stage2_bootable = False
terminal_type = "ofconsole"
#
# installation
#
def install(self, args=None):
if flags.leavebootorder:
log.info("leavebootorder passed as an option. Will not update the NVRAM boot list.")
else:
self.updateNVRAMBootList()
super(IPSeriesGRUB2, self).install(args=["--no-nvram"])
# This will update the PowerPC's (ppc) bios boot devive order list
def updateNVRAMBootList(self):
if not can_touch_runtime_system("updateNVRAMBootList", touch_live=True):
return
log.debug("updateNVRAMBootList: self.stage1_device.path = %s", self.stage1_device.path)
buf = iutil.execWithCapture("nvram",
["--print-config=boot-device"])
if len(buf) == 0:
log.error("Failed to determine nvram boot device")
return
boot_list = buf.strip().replace("\"", "").split()
log.debug("updateNVRAMBootList: boot_list = %s", boot_list)
buf = iutil.execWithCapture("ofpathname",
[self.stage1_device.path])
if len(buf) > 0:
boot_disk = buf.strip()
else:
log.error("Failed to translate boot path into device name")
return
# Place the disk containing the PReP partition first.
# Remove all other occurances of it.
boot_list = [boot_disk] + [x for x in boot_list if x != boot_disk]
update_value = "boot-device=%s" % " ".join(boot_list)
rc = iutil.execWithRedirect("nvram", ["--update-config", update_value])
if rc:
log.error("Failed to update new boot device order")
#
# In addition to the normal grub configuration variable, add one more to set the size of the
# console's window to a standard 80x24
#
def write_defaults(self):
super(IPSeriesGRUB2, self).write_defaults()
defaults_file = "%s%s" % (iutil.getSysroot(), self.defaults_file)
defaults = open(defaults_file, "a+")
# The terminfo's X and Y size, and output location could change in the future
defaults.write("GRUB_TERMINFO=\"terminfo -g 80x24 console\"\n")
# Disable OS Prober on pSeries systems
# TODO: This will disable across all POWER platforms. Need to get
# into blivet and rework how it segments the POWER systems
# to allow for differentiation between PowerNV and
# PowerVM / POWER on qemu/kvm
defaults.write("GRUB_DISABLE_OS_PROBER=true\n")
defaults.close()
class MacYaboot(Yaboot):
prog = "mkofboot"
can_dual_boot = True
#
# configuration
#
def write_config_variant_header(self, config):
try:
mac_os = [i for i in self.chain_images if i.label][0]
except IndexError:
pass
else:
config.write("macosx=%s\n" % mac_os.device.path)
config.write("magicboot=/usr/lib/yaboot/ofboot\n")
class ZIPL(BootLoader):
name = "ZIPL"
config_file = "/etc/zipl.conf"
packages = ["s390utils-base"]
# stage2 device requirements
stage2_device_types = ["partition"]
@property
def stage2_format_types(self):
if productName.startswith("Red Hat "):
return ["xfs", "ext4", "ext3", "ext2"]
else:
return ["ext4", "ext3", "ext2", "xfs"]
image_label_attr = "short_label"
preserve_args = ["cio_ignore", "rd.znet", "rd_ZNET"]
def __init__(self):
super(ZIPL, self).__init__()
self.stage1_name = None
#
# configuration
#
@property
def boot_dir(self):
return "/boot"
def write_config_images(self, config):
for image in self.images:
if "kdump" in (image.initrd or image.kernel):
# no need to create bootloader entries for kdump
continue
args = Arguments()
if image.initrd:
initrd_line = "\tramdisk=%s/%s\n" % (self.boot_dir,
image.initrd)
else:
initrd_line = ""
args.add("root=%s" % image.device.fstabSpec)
args.update(self.boot_args)
if image.device.type == "btrfs subvolume":
args.update(["rootflags=subvol=%s" % image.device.name])
log.info("bootloader.py: used boot args: %s ", args)
stanza = ("[%(label)s]\n"
"\timage=%(boot_dir)s/%(kernel)s\n"
"%(initrd_line)s"
"\tparameters=\"%(args)s\"\n"
% {"label": self.image_label(image),
"kernel": image.kernel, "initrd_line": initrd_line,
"args": args,
"boot_dir": self.boot_dir})
config.write(stanza)
def write_config_header(self, config):
header = ("[defaultboot]\n"
"defaultauto\n"
"prompt=1\n"
"timeout=%(timeout)d\n"
"default=%(default)s\n"
"target=/boot\n"
% {"timeout": self.timeout,
"default": self.image_label(self.default)})
config.write(header)
#
# installation
#
def install(self, args=None):
buf = iutil.execWithCapture("zipl", [], root=iutil.getSysroot())
for line in buf.splitlines():
if line.startswith("Preparing boot device: "):
# Output here may look like:
# Preparing boot device: dasdb (0200).
# Preparing boot device: dasdl.
# We want to extract the device name and pass that.
name = re.sub(r".+?: ", "", line)
self.stage1_name = re.sub(r"(\s\(.+\))?\.$", "", name)
# a limitation of s390x is that the kernel parameter list must not
# exceed 896 bytes; there is nothing we can do about this, so just
# catch the error and show it to the user instead of crashing
elif line.startswith("Error: The length of the parameters "):
errorHandler.cb(ZIPLError(line))
if not self.stage1_name:
raise BootLoaderError("could not find IPL device")
# do the reipl
iutil.reIPL(self.stage1_name)
class EXTLINUX(BootLoader):
name = "EXTLINUX"
_config_file = "extlinux.conf"
_config_dir = "/boot/extlinux"
# stage1 device requirements
stage1_device_types = ["disk"]
# stage2 device requirements
stage2_format_types = ["ext4", "ext3", "ext2"]
stage2_device_types = ["partition"]
stage2_bootable = True
packages = ["syslinux-extlinux"]
@property
def config_file(self):
return "%s/%s" % (self._config_dir, self._config_file)
@property
def boot_prefix(self):
""" Prefix, if any, to paths in /boot. """
if self.stage2_device.format.mountpoint == "/":
prefix = "/boot"
else:
prefix = ""
return prefix
def write_config_console(self, config):
if not self.console:
return
console_arg = "console=%s" % self.console
if self.console_options:
console_arg += ",%s" % self.console_options
self.boot_args.add(console_arg)
def write_config_images(self, config):
self.write_config_console(config)
for image in self.images:
args = Arguments()
args.update(["root=%s" % image.device.fstabSpec, "ro"])
if image.device.type == "btrfs subvolume":
args.update(["rootflags=subvol=%s" % image.device.name])
args.update(self.boot_args)
log.info("bootloader.py: used boot args: %s ", args)
stanza = ("label %(label)s (%(version)s)\n"
"\tkernel %(boot_prefix)s/%(kernel)s\n"
"\tinitrd %(boot_prefix)s/%(initrd)s\n"
"\tappend %(args)s\n\n"
% {"label": self.image_label(image),
"version": image.version,
"kernel": image.kernel,
"initrd": image.initrd,
"args": args,
"boot_prefix": self.boot_prefix})
config.write(stanza)
def write_config_header(self, config):
header = ("# extlinux.conf generated by anaconda\n\n"
"ui menu.c32\n\n"
"menu autoboot Welcome to %(productName)s. Automatic boot in # second{,s}. Press a key for options.\n"
"menu title %(productName)s Boot Options.\n"
"menu hidden\n\n"
"timeout %(timeout)d\n"
"#totaltimeout 9000\n\n"
% {"productName": productName, "timeout": self.timeout *10})
config.write(header)
if self.default is not None:
config.write("default %(default)s\n\n" % {"default" : self.image_label(self.default)})
self.write_config_password(config)
def write_config_password(self, config):
if self.password:
config.write("menu master passwd %s\n" % self.password)
config.write("menu notabmsg Press [Tab] and enter the password to edit options")
def write_config_post(self):
etc_extlinux = os.path.normpath(iutil.getSysroot() + "/etc/" + self._config_file)
if not os.access(etc_extlinux, os.R_OK):
try:
os.symlink("../boot/%s" % self._config_file, etc_extlinux)
except OSError as e:
log.warning("failed to create /etc/extlinux.conf symlink: %s", e)
def write_config(self):
super(EXTLINUX, self).write_config()
#
# installation
#
def install(self, args=None):
args = ["--install", self._config_dir]
rc = iutil.execInSysroot("extlinux", args)
if rc:
raise BootLoaderError("boot loader install failed")
# every platform that wants a bootloader needs to be in this dict
bootloader_by_platform = {platform.X86: GRUB2,
platform.EFI: EFIGRUB,
platform.MacEFI: MacEFIGRUB,
platform.PPC: GRUB2,
platform.IPSeriesPPC: IPSeriesGRUB2,
platform.NewWorldPPC: MacYaboot,
platform.S390: ZIPL,
platform.Aarch64EFI: Aarch64EFIGRUB,
platform.ARM: EXTLINUX,
platform.omapARM: EXTLINUX}
def get_bootloader():
platform_name = platform.platform.__class__.__name__
if flags.extlinux:
cls = EXTLINUX
else:
cls = bootloader_by_platform.get(platform.platform.__class__, BootLoader)
log.info("bootloader %s on %s platform", cls.__name__, platform_name)
return cls()
# anaconda-specific functions
def writeSysconfigKernel(storage, version, instClass):
# get the name of the default kernel package based on the version
kernel_basename = "vmlinuz-" + version
kernel_file = "/boot/%s" % kernel_basename
if not os.path.isfile(iutil.getSysroot() + kernel_file):
kernel_file = "/boot/efi/EFI/%s/%s" % (instClass.efi_dir, kernel_basename)
if not os.path.isfile(iutil.getSysroot() + kernel_file):
log.error("failed to recreate path to default kernel image")
return
try:
import rpm
except ImportError:
log.error("failed to import rpm python module")
return
ts = rpm.TransactionSet(iutil.getSysroot())
mi = ts.dbMatch('basenames', kernel_file)
try:
h = next(mi)
except StopIteration:
log.error("failed to get package name for default kernel")
return
kernel = h.name
f = open(iutil.getSysroot() + "/etc/sysconfig/kernel", "w+")
f.write("# UPDATEDEFAULT specifies if new-kernel-pkg should make\n"
"# new kernels the default\n")
# only update the default if we're setting the default to linux (#156678)
if storage.bootloader.default.device == storage.rootDevice:
f.write("UPDATEDEFAULT=yes\n")
else:
f.write("UPDATEDEFAULT=no\n")
f.write("\n")
f.write("# DEFAULTKERNEL specifies the default kernel package type\n")
f.write("DEFAULTKERNEL=%s\n" % kernel)
if storage.bootloader.trusted_boot:
f.write("# HYPERVISOR specifies the default multiboot kernel\n")
f.write("HYPERVISOR=/boot/tboot.gz\n")
f.write("HYPERVISOR_ARGS=logging=vga,serial,memory\n")
f.close()
def writeBootLoaderFinal(storage, payload, instClass, ksdata):
""" Do the final write of the bootloader. """
# set up dracut/fips boot args
# XXX FIXME: do this from elsewhere?
storage.bootloader.set_boot_args(storage=storage,
payload=payload)
try:
storage.bootloader.write()
except BootLoaderError as e:
log.error("bootloader.write failed: %s", e)
if errorHandler.cb(e) == ERROR_RAISE:
raise
def writeBootLoader(storage, payload, instClass, ksdata):
""" Write bootloader configuration to disk.
When we get here, the bootloader will already have a default linux
image. We only have to add images for the non-default kernels and
adjust the default to reflect whatever the default variant is.
"""
if not storage.bootloader.skip_bootloader:
stage1_device = storage.bootloader.stage1_device
log.info("boot loader stage1 target device is %s", stage1_device.name)
stage2_device = storage.bootloader.stage2_device
log.info("boot loader stage2 target device is %s", stage2_device.name)
# Bridge storage EFI configuration to bootloader
if hasattr(storage.bootloader, 'efi_dir'):
storage.bootloader.efi_dir = instClass.efi_dir
if isinstance(payload, RPMOSTreePayload):
if storage.bootloader.skip_bootloader:
log.info("skipping boot loader install per user request")
return
writeBootLoaderFinal(storage, payload, instClass, ksdata)
return
# get a list of installed kernel packages
# add whatever rescue kernels we can find to the end
kernel_versions = list(payload.kernelVersionList)
rescue_versions = glob(iutil.getSysroot() + "/boot/vmlinuz-*-rescue-*")
rescue_versions += glob(iutil.getSysroot() + "/boot/efi/EFI/%s/vmlinuz-*-rescue-*" % instClass.efi_dir)
kernel_versions += (f.split("/")[-1][8:] for f in rescue_versions)
if not kernel_versions:
log.warning("no kernel was installed -- boot loader config unchanged")
return
# all the linux images' labels are based on the default image's
base_label = productName
base_short_label = "linux"
# The first one is the default kernel. Update the bootloader's default
# entry to reflect the details of the default kernel.
version = kernel_versions.pop(0)
default_image = LinuxBootLoaderImage(device=storage.rootDevice,
version=version,
label=base_label,
short=base_short_label)
storage.bootloader.add_image(default_image)
storage.bootloader.default = default_image
# write out /etc/sysconfig/kernel
writeSysconfigKernel(storage, version, instClass)
if storage.bootloader.skip_bootloader:
log.info("skipping boot loader install per user request")
return
# now add an image for each of the other kernels
for version in kernel_versions:
label = "%s-%s" % (base_label, version)
short = "%s-%s" % (base_short_label, version)
if storage.bootloader.trusted_boot:
image = TbootLinuxBootLoaderImage(
device=storage.rootDevice,
version=version,
label=label, short=short)
else:
image = LinuxBootLoaderImage(device=storage.rootDevice,
version=version,
label=label, short=short)
storage.bootloader.add_image(image)
writeBootLoaderFinal(storage, payload, instClass, ksdata)
| gpl-2.0 |
endlessm/chromium-browser | components/policy/tools/generate_policy_source.py | 1 | 65169 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''python %prog [options]
Pass at least:
--chrome-version-file <path to src/chrome/VERSION> or --all-chrome-versions
--target-platform <which platform the target code will be generated for and can
be one of (win, mac, linux, chromeos, ios)>
--policy_templates <path to the policy_templates.json input file>.'''
from __future__ import with_statement
from argparse import ArgumentParser
from collections import namedtuple
from collections import OrderedDict
from functools import partial
import json
import re
import sys
import textwrap
from xml.sax.saxutils import escape as xml_escape
if sys.version_info.major == 2:
string_type = basestring
else:
string_type = str
CHROME_POLICY_KEY = 'SOFTWARE\\\\Policies\\\\Google\\\\Chrome'
CHROMIUM_POLICY_KEY = 'SOFTWARE\\\\Policies\\\\Chromium'
PLATFORM_STRINGS = {
'chrome_frame': ['win'],
'chrome_os': ['chrome_os'],
'android': ['android'],
'webview_android': ['android'],
'ios': ['ios'],
'chrome.win': ['win'],
'chrome.linux': ['linux'],
'chrome.mac': ['mac'],
'chrome.*': ['win', 'mac', 'linux'],
'chrome.win7': ['win']
}
class PolicyDetails:
"""Parses a policy template and caches all its details."""
# Maps policy types to a tuple with 4 other types:
# - the equivalent base::Value::Type or 'TYPE_EXTERNAL' if the policy
# references external data
# - the equivalent Protobuf field type
# - the name of one of the protobufs for shared policy types
# - the equivalent type in Android's App Restriction Schema
# TODO(joaodasilva): refactor the 'dict' type into a more generic 'json' type
# that can also be used to represent lists of other JSON objects.
TYPE_MAP = {
'dict': ('Type::DICTIONARY', 'string', 'String', 'string'),
'external': ('TYPE_EXTERNAL', 'string', 'String', 'invalid'),
'int': ('Type::INTEGER', 'int64', 'Integer', 'integer'),
'int-enum': ('Type::INTEGER', 'int64', 'Integer', 'choice'),
'list': ('Type::LIST', 'StringList', 'StringList', 'string'),
'main': ('Type::BOOLEAN', 'bool', 'Boolean', 'bool'),
'string': ('Type::STRING', 'string', 'String', 'string'),
'string-enum': ('Type::STRING', 'string', 'String', 'choice'),
'string-enum-list': ('Type::LIST', 'StringList', 'StringList',
'multi-select'),
}
class EnumItem:
def __init__(self, item):
self.caption = PolicyDetails._RemovePlaceholders(item['caption'])
self.value = item['value']
def _ConvertPlatform(self, platform):
'''Converts product platform string in policy_templates.json to platform
string that is defined in build config.'''
if platform not in PLATFORM_STRINGS:
raise RuntimeError('Platform "%s" is not supported' % platform)
return PLATFORM_STRINGS[platform]
def __init__(self, policy, chrome_major_version, target_platform, valid_tags):
self.id = policy['id']
self.name = policy['name']
self.tags = policy.get('tags', None)
self._CheckTagsValidity(valid_tags)
features = policy.get('features', {})
self.can_be_recommended = features.get('can_be_recommended', False)
self.can_be_mandatory = features.get('can_be_mandatory', True)
self.is_deprecated = policy.get('deprecated', False)
self.is_device_only = policy.get('device_only', False)
self.is_future = policy.get('future', False)
self.supported_chrome_os_management = policy.get(
'supported_chrome_os_management', ['active_directory', 'google_cloud'])
self.schema = policy['schema']
self.validation_schema = policy.get('validation_schema')
self.has_enterprise_default = 'default_for_enterprise_users' in policy
if self.has_enterprise_default:
self.enterprise_default = policy['default_for_enterprise_users']
self.platforms = set()
self.future_on = set()
for platform, version_range in map(lambda s: s.split(':'),
policy.get('supported_on', [])):
split_result = version_range.split('-')
if len(split_result) != 2:
raise RuntimeError('supported_on must have exactly one dash: "%s"' % p)
(version_min, version_max) = split_result
if version_min == '':
raise RuntimeError('supported_on must define a start version: "%s"' % p)
# Skip if filtering by Chromium version and the current Chromium version
# does not support the policy.
if chrome_major_version:
if (int(version_min) > chrome_major_version or
version_max != '' and int(version_max) < chrome_major_version):
continue
self.platforms.update(self._ConvertPlatform(platform))
for platform in policy.get('future_on', []):
self.future_on.update(self._ConvertPlatform(platform))
if self.is_device_only and self.platforms.union(self.future_on) > set(
['chrome_os']):
raise RuntimeError('device_only is only allowed for Chrome OS: "%s"' %
self.name)
self.is_supported = (target_platform in self.platforms
or target_platform in self.future_on)
self.is_future = self.is_future or target_platform in self.future_on
if policy['type'] not in PolicyDetails.TYPE_MAP:
raise NotImplementedError(
'Unknown policy type for %s: %s' % (policy['name'], policy['type']))
self.policy_type, self.protobuf_type, self.policy_protobuf_type, \
self.restriction_type = PolicyDetails.TYPE_MAP[policy['type']]
self.desc = '\n'.join(
map(str.strip,
PolicyDetails._RemovePlaceholders(policy['desc']).splitlines()))
self.caption = PolicyDetails._RemovePlaceholders(policy['caption'])
self.max_size = policy.get('max_size', 0)
items = policy.get('items')
if items is None:
self.items = None
else:
self.items = [PolicyDetails.EnumItem(entry) for entry in items]
PH_PATTERN = re.compile('<ph[^>]*>([^<]*|[^<]*<ex>([^<]*)</ex>[^<]*)</ph>')
def _CheckTagsValidity(self, valid_tags):
if self.tags == None:
raise RuntimeError('Policy ' + self.name + ' has to contain a list of '
'tags!\n An empty list is also valid but means '
'setting this policy can never harm the user\'s '
'privacy or security.\n')
for tag in self.tags:
if not tag in valid_tags:
raise RuntimeError('Invalid Tag:' + tag + '!\n'
'Chose a valid tag from \'risk_tag_definitions\' (a '
'subproperty of root in policy_templates.json)!')
# Simplistic grit placeholder stripper.
@staticmethod
def _RemovePlaceholders(text):
result = ''
pos = 0
for m in PolicyDetails.PH_PATTERN.finditer(text):
result += text[pos:m.start(0)]
result += m.group(2) or m.group(1)
pos = m.end(0)
result += text[pos:]
return result
class PolicyAtomicGroup:
"""Parses a policy atomic group and caches its name and policy names"""
def __init__(self, policy_group, available_policies,
policies_already_in_group):
self.id = policy_group['id']
self.name = policy_group['name']
self.policies = policy_group.get('policies', None)
self._CheckPoliciesValidity(available_policies, policies_already_in_group)
def _CheckPoliciesValidity(self, available_policies,
policies_already_in_group):
if self.policies == None or len(self.policies) <= 0:
raise RuntimeError('Atomic policy group ' + self.name +
' has to contain a list of '
'policies!\n')
for policy in self.policies:
if policy in policies_already_in_group:
raise RuntimeError('Policy: ' + policy +
' cannot be in more than one atomic group '
'in policy_templates.json)!')
policies_already_in_group.add(policy)
if not policy in available_policies:
raise RuntimeError('Invalid policy: ' + policy + ' in atomic group ' +
self.name + '.\n')
def ParseVersionFile(version_path):
chrome_major_version = None
for line in open(version_path, 'r').readlines():
key, val = line.rstrip('\r\n').split('=', 1)
if key == 'MAJOR':
chrome_major_version = val
break
if chrome_major_version is None:
raise RuntimeError('VERSION file does not contain major version.')
return int(chrome_major_version)
def main():
parser = ArgumentParser(usage=__doc__)
parser.add_argument(
'--pch',
'--policy-constants-header',
dest='header_path',
help='generate header file of policy constants',
metavar='FILE')
parser.add_argument(
'--pcc',
'--policy-constants-source',
dest='source_path',
help='generate source file of policy constants',
metavar='FILE')
parser.add_argument(
'--cpp',
'--cloud-policy-protobuf',
dest='cloud_policy_proto_path',
help='generate cloud policy protobuf file',
metavar='FILE')
parser.add_argument(
'--cpfrp',
'--cloud-policy-full-runtime-protobuf',
dest='cloud_policy_full_runtime_proto_path',
help='generate cloud policy full runtime protobuf',
metavar='FILE')
parser.add_argument(
'--csp',
'--chrome-settings-protobuf',
dest='chrome_settings_proto_path',
help='generate chrome settings protobuf file',
metavar='FILE')
parser.add_argument(
'--policy-common-definitions-protobuf',
dest='policy_common_definitions_proto_path',
help='policy common definitions protobuf file path',
metavar='FILE')
parser.add_argument(
'--policy-common-definitions-full-runtime-protobuf',
dest='policy_common_definitions_full_runtime_proto_path',
help='generate policy common definitions full runtime protobuf file',
metavar='FILE')
parser.add_argument(
'--csfrp',
'--chrome-settings-full-runtime-protobuf',
dest='chrome_settings_full_runtime_proto_path',
help='generate chrome settings full runtime protobuf',
metavar='FILE')
parser.add_argument(
'--ard',
'--app-restrictions-definition',
dest='app_restrictions_path',
help='generate an XML file as specified by '
'Android\'s App Restriction Schema',
metavar='FILE')
parser.add_argument(
'--rth',
'--risk-tag-header',
dest='risk_header_path',
help='generate header file for policy risk tags',
metavar='FILE')
parser.add_argument(
'--crospch',
'--cros-policy-constants-header',
dest='cros_constants_header_path',
help='generate header file of policy constants for use in '
'Chrome OS',
metavar='FILE')
parser.add_argument(
'--crospcc',
'--cros-policy-constants-source',
dest='cros_constants_source_path',
help='generate source file of policy constants for use in '
'Chrome OS',
metavar='FILE')
parser.add_argument(
'--chrome-version-file',
dest='chrome_version_file',
help='path to src/chrome/VERSION',
metavar='FILE')
parser.add_argument(
'--all-chrome-versions',
action='store_true',
dest='all_chrome_versions',
default=False,
help='do not restrict generated policies by chrome version')
parser.add_argument(
'--target-platform',
dest='target_platform',
help='the platform the generated code should run on - can be one of'
'(win, mac, linux, chromeos, fuchsia)',
metavar='PLATFORM')
parser.add_argument(
'--policy-templates-file',
dest='policy_templates_file',
help='path to the policy_templates.json input file',
metavar='FILE')
args = parser.parse_args()
has_arg_error = False
if not args.target_platform:
print('Error: Missing --target-platform=<platform>')
has_arg_error = True
if not args.policy_templates_file:
print('Error: Missing'
' --policy-templates-file=<path to policy_templates.json>')
has_arg_error = True
if not args.chrome_version_file and not args.all_chrome_versions:
print('Error: Missing'
' --chrome-version-file=<path to src/chrome/VERSION>\n'
' or --all-chrome-versions')
has_arg_error = True
if has_arg_error:
print('')
parser.print_help()
return 2
version_path = args.chrome_version_file
target_platform = args.target_platform
template_file_name = args.policy_templates_file
# --target-platform accepts "chromeos" as its input because that's what is
# used within GN. Within policy templates, "chrome_os" is used instead.
if target_platform == 'chromeos':
target_platform = 'chrome_os'
if args.all_chrome_versions:
chrome_major_version = None
else:
chrome_major_version = ParseVersionFile(version_path)
template_file_contents = _LoadJSONFile(template_file_name)
risk_tags = RiskTags(template_file_contents)
policy_details = [
PolicyDetails(policy, chrome_major_version, target_platform,
risk_tags.GetValidTags())
for policy in template_file_contents['policy_definitions']
if policy['type'] != 'group'
]
risk_tags.ComputeMaxTags(policy_details)
sorted_policy_details = sorted(policy_details, key=lambda policy: policy.name)
policy_details_set = list(map((lambda x: x.name), policy_details))
policies_already_in_group = set()
policy_atomic_groups = [
PolicyAtomicGroup(group, policy_details_set, policies_already_in_group)
for group in template_file_contents['policy_atomic_group_definitions']
]
sorted_policy_atomic_groups = sorted(
policy_atomic_groups, key=lambda group: group.name)
def GenerateFile(path, writer, sorted=False, xml=False):
if path:
with open(path, 'w') as f:
_OutputGeneratedWarningHeader(f, template_file_name, xml)
writer(sorted and sorted_policy_details or policy_details,
sorted and sorted_policy_atomic_groups or policy_atomic_groups,
target_platform, f, risk_tags)
if args.header_path:
GenerateFile(args.header_path, _WritePolicyConstantHeader, sorted=True)
if args.source_path:
GenerateFile(args.source_path, _WritePolicyConstantSource, sorted=True)
if args.risk_header_path:
GenerateFile(args.risk_header_path, _WritePolicyRiskTagHeader)
if args.cloud_policy_proto_path:
GenerateFile(args.cloud_policy_proto_path, _WriteCloudPolicyProtobuf)
if (args.policy_common_definitions_full_runtime_proto_path and
args.policy_common_definitions_proto_path):
GenerateFile(
args.policy_common_definitions_full_runtime_proto_path,
partial(_WritePolicyCommonDefinitionsFullRuntimeProtobuf,
args.policy_common_definitions_proto_path))
if args.cloud_policy_full_runtime_proto_path:
GenerateFile(args.cloud_policy_full_runtime_proto_path,
_WriteCloudPolicyFullRuntimeProtobuf)
if args.chrome_settings_proto_path:
GenerateFile(args.chrome_settings_proto_path, _WriteChromeSettingsProtobuf)
if args.chrome_settings_full_runtime_proto_path:
GenerateFile(args.chrome_settings_full_runtime_proto_path,
_WriteChromeSettingsFullRuntimeProtobuf)
if target_platform == 'android' and args.app_restrictions_path:
GenerateFile(args.app_restrictions_path, _WriteAppRestrictions, xml=True)
# Generated code for Chrome OS (unused in Chromium).
if args.cros_constants_header_path:
GenerateFile(
args.cros_constants_header_path,
_WriteChromeOSPolicyConstantsHeader,
sorted=True)
if args.cros_constants_source_path:
GenerateFile(
args.cros_constants_source_path,
_WriteChromeOSPolicyConstantsSource,
sorted=True)
return 0
#------------------ shared helpers ---------------------------------#
def _OutputGeneratedWarningHeader(f, template_file_path, xml_style):
left_margin = '//'
if xml_style:
left_margin = ' '
f.write('<?xml version="1.0" encoding="utf-8"?>\n' '<!--\n')
else:
f.write('//\n')
f.write(left_margin + ' DO NOT MODIFY THIS FILE DIRECTLY!\n')
f.write(left_margin + ' IT IS GENERATED BY generate_policy_source.py\n')
f.write(left_margin + ' FROM ' + template_file_path + '\n')
if xml_style:
f.write('-->\n\n')
else:
f.write(left_margin + '\n\n')
COMMENT_WRAPPER = textwrap.TextWrapper()
COMMENT_WRAPPER.width = 80
COMMENT_WRAPPER.initial_indent = '// '
COMMENT_WRAPPER.subsequent_indent = '// '
COMMENT_WRAPPER.replace_whitespace = False
# Writes a comment, each line prefixed by // and wrapped to 80 spaces.
def _OutputComment(f, comment):
for line in comment.splitlines():
if len(line) == 0:
f.write('//')
else:
f.write(COMMENT_WRAPPER.fill(line))
f.write('\n')
def _LoadJSONFile(json_file):
with open(json_file, 'r') as f:
text = f.read()
return eval(text)
#------------------ policy constants header ------------------------#
def _WritePolicyConstantHeader(policies, policy_atomic_groups, target_platform,
f, risk_tags):
f.write('#ifndef CHROME_COMMON_POLICY_CONSTANTS_H_\n'
'#define CHROME_COMMON_POLICY_CONSTANTS_H_\n'
'\n'
'#include <cstdint>\n'
'#include <string>\n'
'\n'
'#include "base/values.h"\n'
'#include "components/policy/core/common/policy_details.h"\n'
'#include "components/policy/core/common/policy_map.h"\n'
'#include "components/policy/proto/cloud_policy.pb.h"\n'
'\n'
'namespace policy {\n'
'\n'
'namespace internal {\n'
'struct SchemaData;\n'
'}\n\n')
if target_platform == 'win':
f.write('// The windows registry path where Chrome policy '
'configuration resides.\n'
'extern const wchar_t kRegistryChromePolicyKey[];\n')
f.write('#if defined (OS_CHROMEOS)\n'
'// Sets default values for enterprise users.\n'
'void SetEnterpriseUsersDefaults(PolicyMap* policy_map);\n'
'#endif\n'
'\n'
'// Returns the PolicyDetails for |policy| if |policy| is a known\n'
'// Chrome policy, otherwise returns NULL.\n'
'const PolicyDetails* GetChromePolicyDetails('
'const std::string& policy);\n'
'\n'
'// Returns the schema data of the Chrome policy schema.\n'
'const internal::SchemaData* GetChromeSchemaData();\n'
'\n')
f.write('// Key names for the policy settings.\n' 'namespace key {\n\n')
for policy in policies:
# TODO(joaodasilva): Include only supported policies in
# configuration_policy_handler.cc and configuration_policy_handler_list.cc
# so that these names can be conditional on 'policy.is_supported'.
# http://crbug.com/223616
f.write('extern const char k' + policy.name + '[];\n')
f.write('\n} // namespace key\n\n')
f.write('// Group names for the policy settings.\n' 'namespace group {\n\n')
for group in policy_atomic_groups:
f.write('extern const char k' + group.name + '[];\n')
f.write('\n} // namespace group\n\n')
f.write('struct AtomicGroup {\n'
' const short id;\n'
' const char* policy_group;\n'
' const char* const* policies;\n'
'};\n\n')
f.write('extern const AtomicGroup kPolicyAtomicGroupMappings[];\n\n')
f.write('extern const size_t kPolicyAtomicGroupMappingsLength;\n\n')
f.write('enum class StringPolicyType {\n'
' STRING,\n'
' JSON,\n'
' EXTERNAL,\n'
'};\n\n')
# User policy proto pointers, one struct for each protobuf type.
protobuf_types = _GetProtobufTypes(policies)
for protobuf_type in protobuf_types:
_WriteChromePolicyAccessHeader(f, protobuf_type)
f.write('constexpr int64_t kDevicePolicyExternalDataResourceCacheSize = %d;\n'
% _ComputeTotalDevicePolicyExternalDataMaxSize(policies))
f.write('\n} // namespace policy\n\n'
'#endif // CHROME_COMMON_POLICY_CONSTANTS_H_\n')
def _WriteChromePolicyAccessHeader(f, protobuf_type):
f.write('// Read access to the protobufs of all supported %s user policies.\n'
% protobuf_type.lower())
f.write('struct %sPolicyAccess {\n' % protobuf_type)
f.write(' const char* policy_key;\n'
' bool (enterprise_management::CloudPolicySettings::'
'*has_proto)() const;\n'
' const enterprise_management::%sPolicyProto&\n'
' (enterprise_management::CloudPolicySettings::'
'*get_proto)() const;\n' % protobuf_type)
if protobuf_type == 'String':
f.write(' const StringPolicyType type;\n')
f.write('};\n')
f.write('extern const %sPolicyAccess k%sPolicyAccess[];\n\n' %
(protobuf_type, protobuf_type))
def _ComputeTotalDevicePolicyExternalDataMaxSize(policies):
total_device_policy_external_data_max_size = 0
for policy in policies:
if policy.is_device_only and policy.policy_type == 'TYPE_EXTERNAL':
total_device_policy_external_data_max_size += policy.max_size
return total_device_policy_external_data_max_size
#------------------ policy constants source ------------------------#
SchemaNodeKey = namedtuple('SchemaNodeKey',
'schema_type extra is_sensitive_value')
SchemaNode = namedtuple(
'SchemaNode',
'schema_type extra is_sensitive_value has_sensitive_children comments')
PropertyNode = namedtuple('PropertyNode', 'key schema')
PropertiesNode = namedtuple(
'PropertiesNode',
'begin end pattern_end required_begin required_end additional name')
RestrictionNode = namedtuple('RestrictionNode', 'first second')
# A mapping of the simple schema types to base::Value::Types.
SIMPLE_SCHEMA_NAME_MAP = {
'boolean': 'Type::BOOLEAN',
'integer': 'Type::INTEGER',
'null': 'Type::NONE',
'number': 'Type::DOUBLE',
'string': 'Type::STRING',
}
INVALID_INDEX = -1
MIN_INDEX = -1
MAX_INDEX = (1 << 15) - 1 # signed short in c++
MIN_POLICY_ID = 0
MAX_POLICY_ID = (1 << 16) - 1 # unsigned short
MIN_EXTERNAL_DATA_SIZE = 0
MAX_EXTERNAL_DATA_SIZE = (1 << 32) - 1 # unsigned int32
class SchemaNodesGenerator:
"""Builds the internal structs to represent a JSON schema."""
def __init__(self, shared_strings):
"""Creates a new generator.
|shared_strings| is a map of strings to a C expression that evaluates to
that string at runtime. This mapping can be used to reuse existing string
constants."""
self.shared_strings = shared_strings
self.key_index_map = {} # |SchemaNodeKey| -> index in |schema_nodes|
self.schema_nodes = [] # List of |SchemaNode|s
self.property_nodes = [] # List of |PropertyNode|s
self.properties_nodes = [] # List of |PropertiesNode|s
self.restriction_nodes = [] # List of |RestrictionNode|s
self.required_properties = []
self.int_enums = []
self.string_enums = []
self.ranges = {}
self.id_map = {}
def GetString(self, s):
if s in self.shared_strings:
return self.shared_strings[s]
# Generate JSON escaped string, which is slightly different from desired
# C/C++ escaped string. Known differences includes unicode escaping format.
return json.dumps(s)
def AppendSchema(self, schema_type, extra, is_sensitive_value, comment=''):
# Find existing schema node with same structure.
key_node = SchemaNodeKey(schema_type, extra, is_sensitive_value)
if key_node in self.key_index_map:
index = self.key_index_map[key_node]
if comment:
self.schema_nodes[index].comments.add(comment)
return index
# Create new schema node.
index = len(self.schema_nodes)
comments = {comment} if comment else set()
schema_node = SchemaNode(schema_type, extra, is_sensitive_value, False,
comments)
self.schema_nodes.append(schema_node)
self.key_index_map[key_node] = index
return index
def AppendRestriction(self, first, second):
r = RestrictionNode(str(first), str(second))
if not r in self.ranges:
self.ranges[r] = len(self.restriction_nodes)
self.restriction_nodes.append(r)
return self.ranges[r]
def GetSimpleType(self, name, is_sensitive_value):
return self.AppendSchema(SIMPLE_SCHEMA_NAME_MAP[name], INVALID_INDEX,
is_sensitive_value, 'simple type: ' + name)
def SchemaHaveRestriction(self, schema):
return any(keyword in schema
for keyword in ['minimum', 'maximum', 'enum', 'pattern'])
def IsConsecutiveInterval(self, seq):
sortedSeq = sorted(seq)
return all(
sortedSeq[i] + 1 == sortedSeq[i + 1] for i in range(len(sortedSeq) - 1))
def GetEnumIntegerType(self, schema, is_sensitive_value, name):
assert all(type(x) == int for x in schema['enum'])
possible_values = schema['enum']
if self.IsConsecutiveInterval(possible_values):
index = self.AppendRestriction(max(possible_values), min(possible_values))
return self.AppendSchema(
'Type::INTEGER', index, is_sensitive_value,
'integer with enumeration restriction (use range instead): %s' % name)
offset_begin = len(self.int_enums)
self.int_enums += possible_values
offset_end = len(self.int_enums)
return self.AppendSchema('Type::INTEGER',
self.AppendRestriction(offset_begin, offset_end),
is_sensitive_value,
'integer with enumeration restriction: %s' % name)
def GetEnumStringType(self, schema, is_sensitive_value, name):
assert all(type(x) == str for x in schema['enum'])
offset_begin = len(self.string_enums)
self.string_enums += schema['enum']
offset_end = len(self.string_enums)
return self.AppendSchema('Type::STRING',
self.AppendRestriction(offset_begin, offset_end),
is_sensitive_value,
'string with enumeration restriction: %s' % name)
def GetEnumType(self, schema, is_sensitive_value, name):
if len(schema['enum']) == 0:
raise RuntimeError('Empty enumeration in %s' % name)
elif schema['type'] == 'integer':
return self.GetEnumIntegerType(schema, is_sensitive_value, name)
elif schema['type'] == 'string':
return self.GetEnumStringType(schema, is_sensitive_value, name)
else:
raise RuntimeError('Unknown enumeration type in %s' % name)
def GetPatternType(self, schema, is_sensitive_value, name):
if schema['type'] != 'string':
raise RuntimeError('Unknown pattern type in %s' % name)
pattern = schema['pattern']
# Try to compile the pattern to validate it, note that the syntax used
# here might be slightly different from re2.
# TODO(binjin): Add a python wrapper of re2 and use it here.
re.compile(pattern)
index = len(self.string_enums)
self.string_enums.append(pattern)
return self.AppendSchema('Type::STRING', self.AppendRestriction(
index, index), is_sensitive_value,
'string with pattern restriction: %s' % name)
def GetRangedType(self, schema, is_sensitive_value, name):
if schema['type'] != 'integer':
raise RuntimeError('Unknown ranged type in %s' % name)
min_value_set, max_value_set = False, False
if 'minimum' in schema:
min_value = int(schema['minimum'])
min_value_set = True
if 'maximum' in schema:
max_value = int(schema['maximum'])
max_value_set = True
if min_value_set and max_value_set and min_value > max_value:
raise RuntimeError('Invalid ranged type in %s' % name)
index = self.AppendRestriction(
str(max_value) if max_value_set else 'INT_MAX',
str(min_value) if min_value_set else 'INT_MIN')
return self.AppendSchema('Type::INTEGER', index, is_sensitive_value,
'integer with ranged restriction: %s' % name)
def Generate(self, schema, name):
"""Generates the structs for the given schema.
|schema|: a valid JSON schema in a dictionary.
|name|: the name of the current node, for the generated comments."""
if '$ref' in schema:
if 'id' in schema:
raise RuntimeError("Schemas with a $ref can't have an id")
if not isinstance(schema['$ref'], string_type):
raise RuntimeError("$ref attribute must be a string")
return schema['$ref']
is_sensitive_value = schema.get('sensitiveValue', False)
assert type(is_sensitive_value) is bool
if schema['type'] in SIMPLE_SCHEMA_NAME_MAP:
if not self.SchemaHaveRestriction(schema):
# Simple types use shared nodes.
return self.GetSimpleType(schema['type'], is_sensitive_value)
elif 'enum' in schema:
return self.GetEnumType(schema, is_sensitive_value, name)
elif 'pattern' in schema:
return self.GetPatternType(schema, is_sensitive_value, name)
else:
return self.GetRangedType(schema, is_sensitive_value, name)
if schema['type'] == 'array':
return self.AppendSchema(
'Type::LIST',
self.GenerateAndCollectID(schema['items'], 'items of ' + name),
is_sensitive_value)
elif schema['type'] == 'object':
# Reserve an index first, so that dictionaries come before their
# properties. This makes sure that the root node is the first in the
# SchemaNodes array.
# This however, prevents de-duplication for object schemas since we could
# only determine duplicates after all child schema nodes are generated as
# well and then we couldn't remove the newly created schema node without
# invalidating all child schema indices.
index = len(self.schema_nodes)
self.schema_nodes.append(
SchemaNode('Type::DICTIONARY', INVALID_INDEX, is_sensitive_value,
False, {name}))
if 'additionalProperties' in schema:
additionalProperties = self.GenerateAndCollectID(
schema['additionalProperties'], 'additionalProperties of ' + name)
else:
additionalProperties = INVALID_INDEX
# Properties must be sorted by name, for the binary search lookup.
# Note that |properties| must be evaluated immediately, so that all the
# recursive calls to Generate() append the necessary child nodes; if
# |properties| were a generator then this wouldn't work.
sorted_properties = sorted(schema.get('properties', {}).items())
properties = [
PropertyNode(
self.GetString(key), self.GenerateAndCollectID(subschema, key))
for key, subschema in sorted_properties
]
pattern_properties = []
for pattern, subschema in schema.get('patternProperties', {}).items():
pattern_properties.append(
PropertyNode(
self.GetString(pattern),
self.GenerateAndCollectID(subschema, pattern)))
begin = len(self.property_nodes)
self.property_nodes += properties
end = len(self.property_nodes)
self.property_nodes += pattern_properties
pattern_end = len(self.property_nodes)
if index == 0:
self.root_properties_begin = begin
self.root_properties_end = end
required_begin = len(self.required_properties)
required_properties = schema.get('required', [])
assert type(required_properties) is list
assert all(type(x) == str for x in required_properties)
self.required_properties += required_properties
required_end = len(self.required_properties)
# Check that each string in |required_properties| is in |properties|.
properties = schema.get('properties', {})
for name in required_properties:
assert name in properties
extra = len(self.properties_nodes)
self.properties_nodes.append(
PropertiesNode(begin, end, pattern_end, required_begin, required_end,
additionalProperties, name))
# Update index at |extra| now, since that was filled with a dummy value
# when the schema node was created.
self.schema_nodes[index] = self.schema_nodes[index]._replace(extra=extra)
return index
else:
assert False
def GenerateAndCollectID(self, schema, name):
"""A wrapper of Generate(), will take the return value, check and add 'id'
attribute to self.id_map. The wrapper needs to be used for every call to
Generate().
"""
index = self.Generate(schema, name)
if 'id' not in schema:
return index
id_str = schema['id']
if id_str in self.id_map:
raise RuntimeError('Duplicated id: ' + id_str)
self.id_map[id_str] = index
return index
def Write(self, f):
"""Writes the generated structs to the given file.
|f| an open file to write to."""
f.write('const internal::SchemaNode kSchemas[] = {\n'
'// Type' + ' ' * 27 +
'Extra IsSensitiveValue HasSensitiveChildren\n')
for schema_node in self.schema_nodes:
assert schema_node.extra >= MIN_INDEX and schema_node.extra <= MAX_INDEX
comment = ('\n' + ' ' * 69 + '// ').join(schema_node.comments)
f.write(' { base::Value::%-19s %4s %-16s %-5s }, // %s\n' %
(schema_node.schema_type + ',', str(schema_node.extra) + ',',
str(schema_node.is_sensitive_value).lower() + ',',
str(schema_node.has_sensitive_children).lower(), comment))
f.write('};\n\n')
if self.property_nodes:
f.write('const internal::PropertyNode kPropertyNodes[] = {\n'
'// Property' + ' ' * 61 + 'Schema\n')
for property_node in self.property_nodes:
f.write(' { %-64s %6d },\n' % (property_node.key + ',',
property_node.schema))
f.write('};\n\n')
if self.properties_nodes:
f.write('const internal::PropertiesNode kProperties[] = {\n'
'// Begin End PatternEnd RequiredBegin RequiredEnd'
' Additional Properties\n')
for properties_node in self.properties_nodes:
for i in range(0, len(properties_node) - 1):
assert (properties_node[i] >= MIN_INDEX and
properties_node[i] <= MAX_INDEX)
f.write(
' { %5d, %5d, %5d, %5d, %10d, %5d }, // %s\n' % properties_node)
f.write('};\n\n')
if self.restriction_nodes:
f.write('const internal::RestrictionNode kRestrictionNodes[] = {\n')
f.write('// FIRST, SECOND\n')
for restriction_node in self.restriction_nodes:
f.write(' {{ %-8s %4s}},\n' % (restriction_node.first + ',',
restriction_node.second))
f.write('};\n\n')
if self.required_properties:
f.write('const char* const kRequiredProperties[] = {\n')
for required_property in self.required_properties:
f.write(' %s,\n' % self.GetString(required_property))
f.write('};\n\n')
if self.int_enums:
f.write('const int kIntegerEnumerations[] = {\n')
for possible_values in self.int_enums:
f.write(' %d,\n' % possible_values)
f.write('};\n\n')
if self.string_enums:
f.write('const char* const kStringEnumerations[] = {\n')
for possible_values in self.string_enums:
f.write(' %s,\n' % self.GetString(possible_values))
f.write('};\n\n')
f.write('const internal::SchemaData kChromeSchemaData = {\n'
' kSchemas,\n')
f.write(' kPropertyNodes,\n' if self.property_nodes else ' NULL,\n')
f.write(' kProperties,\n' if self.properties_nodes else ' NULL,\n')
f.write(' kRestrictionNodes,\n' if self.restriction_nodes else ' NULL,\n')
f.write(' kRequiredProperties,\n' if self
.required_properties else ' NULL,\n')
f.write(' kIntegerEnumerations,\n' if self.int_enums else ' NULL,\n')
f.write(' kStringEnumerations,\n' if self.string_enums else ' NULL,\n')
f.write(' %d, // validation_schema root index\n' %
self.validation_schema_root_index)
f.write('};\n\n')
def GetByID(self, id_str):
if not isinstance(id_str, string_type):
return id_str
if id_str not in self.id_map:
raise RuntimeError('Invalid $ref: ' + id_str)
return self.id_map[id_str]
def ResolveID(self, index, tuple_type, params):
simple_tuple = params[:index] + (self.GetByID(
params[index]),) + params[index + 1:]
return tuple_type(*simple_tuple)
def ResolveReferences(self):
"""Resolve reference mapping, required to be called after Generate()
After calling Generate(), the type of indices used in schema structures
might be either int or string. An int type suggests that it's a resolved
index, but for string type it's unresolved. Resolving a reference is as
simple as looking up for corresponding ID in self.id_map, and replace the
old index with the mapped index.
"""
self.schema_nodes = list(
map(partial(self.ResolveID, 1, SchemaNode), self.schema_nodes))
self.property_nodes = list(
map(partial(self.ResolveID, 1, PropertyNode), self.property_nodes))
self.properties_nodes = list(
map(partial(self.ResolveID, 3, PropertiesNode), self.properties_nodes))
def FindSensitiveChildren(self):
"""Wrapper function, which calls FindSensitiveChildrenRecursive().
"""
if self.schema_nodes:
self.FindSensitiveChildrenRecursive(0, set())
def FindSensitiveChildrenRecursive(self, index, handled_schema_nodes):
"""Recursively compute |has_sensitive_children| for the schema node at
|index| and all its child elements. A schema has sensitive children if any
of its children has |is_sensitive_value|==True or has sensitive children
itself.
"""
node = self.schema_nodes[index]
if index in handled_schema_nodes:
return node.has_sensitive_children or node.is_sensitive_value
handled_schema_nodes.add(index)
has_sensitive_children = False
if node.schema_type == 'Type::DICTIONARY':
properties_node = self.properties_nodes[node.extra]
# Iterate through properties and patternProperties.
for property_index in range(properties_node.begin,
properties_node.pattern_end - 1):
sub_index = self.property_nodes[property_index].schema
has_sensitive_children |= self.FindSensitiveChildrenRecursive(
sub_index, handled_schema_nodes)
# AdditionalProperties
if properties_node.additional != INVALID_INDEX:
sub_index = properties_node.additional
has_sensitive_children |= self.FindSensitiveChildrenRecursive(
sub_index, handled_schema_nodes)
elif node.schema_type == 'Type::LIST':
sub_index = node.extra
has_sensitive_children |= self.FindSensitiveChildrenRecursive(
sub_index, handled_schema_nodes)
if has_sensitive_children:
self.schema_nodes[index] = self.schema_nodes[index]._replace(
has_sensitive_children=True)
return has_sensitive_children or node.is_sensitive_value
def _GenerateDefaultValue(value):
"""Converts a JSON object into a base::Value entry. Returns a tuple, the first
entry being a list of declaration statements to define the variable, the
second entry being a way to access the variable.
If no definition is needed, the first return value will be an empty list. If
any error occurs, the second return value will be None (ie, no way to fetch
the value).
|value|: The deserialized value to convert to base::Value."""
if type(value) == bool or type(value) == int:
return [], 'std::make_unique<base::Value>(%s)' % json.dumps(value)
elif type(value) == str:
return [], 'std::make_unique<base::Value>("%s")' % value
elif type(value) == list:
setup = ['auto default_value = std::make_unique<base::ListValue>();']
for entry in value:
decl, fetch = _GenerateDefaultValue(entry)
# Nested lists are not supported.
if decl:
return [], None
setup.append('default_value->Append(%s);' % fetch)
return setup, 'std::move(default_value)'
return [], None
def _WritePolicyConstantSource(policies, policy_atomic_groups, target_platform,
f, risk_tags):
f.write('''#include "components/policy/policy_constants.h"
#include <algorithm>
#include <climits>
#include <memory>
#include "base/logging.h"
#include "base/stl_util.h" // base::size()
#include "build/branding_buildflags.h"
#include "components/policy/core/common/policy_types.h"
#include "components/policy/core/common/schema_internal.h"
#include "components/policy/proto/cloud_policy.pb.h"
#include "components/policy/risk_tag.h"
namespace em = enterprise_management;
namespace policy {
''')
# Generate the Chrome schema.
chrome_schema = {
'type': 'object',
'properties': {},
}
chrome_validation_schema = {
'type': 'object',
'properties': {},
}
shared_strings = {}
for policy in policies:
shared_strings[policy.name] = "key::k%s" % policy.name
if policy.is_supported:
chrome_schema['properties'][policy.name] = policy.schema
if policy.validation_schema is not None:
(chrome_validation_schema['properties'][policy.name]
) = policy.validation_schema
# Note: this list must be kept in sync with the known property list of the
# Chrome schema, so that binary searching in the PropertyNode array gets the
# right index on this array as well. See the implementation of
# GetChromePolicyDetails() below.
# TODO(crbug.com/1074336): kChromePolicyDetails shouldn't be declare if there
# is no policy.
f.write(
'''const __attribute__((unused)) PolicyDetails kChromePolicyDetails[] = {
// is_deprecated is_future is_device_policy id max_external_data_size, risk tags
''')
for policy in policies:
if policy.is_supported:
assert policy.id >= MIN_POLICY_ID and policy.id <= MAX_POLICY_ID
assert (policy.max_size >= MIN_EXTERNAL_DATA_SIZE and
policy.max_size <= MAX_EXTERNAL_DATA_SIZE)
f.write(' // %s\n' % policy.name)
f.write(' { %-14s%-10s%-17s%4s,%22s, %s },\n' %
('true,' if policy.is_deprecated else 'false,',
'true,' if policy.is_future else 'false, ',
'true,' if policy.is_device_only else 'false,', policy.id,
policy.max_size, risk_tags.ToInitString(policy.tags)))
f.write('};\n\n')
schema_generator = SchemaNodesGenerator(shared_strings)
schema_generator.GenerateAndCollectID(chrome_schema, 'root node')
if chrome_validation_schema['properties']:
schema_generator.validation_schema_root_index = \
schema_generator.GenerateAndCollectID(chrome_validation_schema,
'validation_schema root node')
else:
schema_generator.validation_schema_root_index = INVALID_INDEX
schema_generator.ResolveReferences()
schema_generator.FindSensitiveChildren()
schema_generator.Write(f)
f.write('\n')
if schema_generator.property_nodes:
f.write('namespace {\n')
f.write('bool CompareKeys(const internal::PropertyNode& node,\n'
' const std::string& key) {\n'
' return node.key < key;\n'
'}\n\n')
f.write('} // namespace\n\n')
if target_platform == 'win':
f.write('#if BUILDFLAG(GOOGLE_CHROME_BRANDING)\n'
'const wchar_t kRegistryChromePolicyKey[] = '
'L"' + CHROME_POLICY_KEY + '";\n'
'#else\n'
'const wchar_t kRegistryChromePolicyKey[] = '
'L"' + CHROMIUM_POLICY_KEY + '";\n'
'#endif\n\n')
f.write('const internal::SchemaData* GetChromeSchemaData() {\n'
' return &kChromeSchemaData;\n'
'}\n\n')
f.write('#if defined (OS_CHROMEOS)\n'
'void SetEnterpriseUsersDefaults(PolicyMap* policy_map) {\n')
for policy in policies:
if policy.has_enterprise_default and policy.is_supported:
declare_default_stmts, fetch_default = _GenerateDefaultValue(
policy.enterprise_default)
if not fetch_default:
raise RuntimeError(
'Type %s of policy %s is not supported at '
'enterprise defaults' % (policy.policy_type, policy.name))
# Convert declare_default_stmts to a string with the correct identation.
if declare_default_stmts:
declare_default = ' %s\n' % '\n '.join(declare_default_stmts)
else:
declare_default = ''
f.write(
' if (!policy_map->Get(key::k%s)) {\n'
'%s'
' policy_map->Set(key::k%s,\n'
' POLICY_LEVEL_MANDATORY,\n'
' POLICY_SCOPE_USER,\n'
' POLICY_SOURCE_ENTERPRISE_DEFAULT,\n'
' %s,\n'
' nullptr);\n'
' }\n' % (policy.name, declare_default, policy.name, fetch_default))
f.write('}\n' '#endif\n\n')
f.write('const PolicyDetails* GetChromePolicyDetails('
'const std::string& policy) {\n')
if schema_generator.property_nodes:
f.write(' // First index in kPropertyNodes of the Chrome policies.\n'
' static const int begin_index = %s;\n'
' // One-past-the-end of the Chrome policies in kPropertyNodes.\n'
' static const int end_index = %s;\n' %
(schema_generator.root_properties_begin,
schema_generator.root_properties_end))
f.write(''' const internal::PropertyNode* begin =
kPropertyNodes + begin_index;
const internal::PropertyNode* end = kPropertyNodes + end_index;
const internal::PropertyNode* it =
std::lower_bound(begin, end, policy, CompareKeys);
if (it == end || it->key != policy)
return nullptr;
// This relies on kPropertyNodes from begin_index to end_index
// having exactly the same policies (and in the same order) as
// kChromePolicyDetails, so that binary searching on the first
// gets the same results as a binary search on the second would.
// However, kPropertyNodes has the policy names and
// kChromePolicyDetails doesn't, so we obtain the index into
// the second array by searching the first to avoid duplicating
// the policy name pointers.
// Offsetting |it| from |begin| here obtains the index we're
// looking for.
size_t index = it - begin;
CHECK_LT(index, base::size(kChromePolicyDetails));
return kChromePolicyDetails + index;
''')
else:
f.write('return nullptr;')
f.write('}\n\n')
f.write('namespace key {\n\n')
for policy in policies:
# TODO(joaodasilva): Include only supported policies in
# configuration_policy_handler.cc and configuration_policy_handler_list.cc
# so that these names can be conditional on 'policy.is_supported'.
# http://crbug.com/223616
f.write('const char k{name}[] = "{name}";\n'.format(name=policy.name))
f.write('\n} // namespace key\n\n')
f.write('namespace group {\n\n')
for group in policy_atomic_groups:
f.write('const char k{name}[] = "{name}";\n'.format(name=group.name))
f.write('\n')
f.write('namespace {\n\n')
for group in policy_atomic_groups:
f.write('const char* const %s[] = {' % (group.name))
for policy in group.policies:
f.write('key::k%s, ' % (policy))
f.write('nullptr};\n')
f.write('\n} // namespace\n')
f.write('\n} // namespace group\n\n')
atomic_groups_length = 0
f.write('const AtomicGroup kPolicyAtomicGroupMappings[] = {\n')
for group in policy_atomic_groups:
atomic_groups_length += 1
f.write(' {')
f.write(' {id}, group::k{name}, group::{name}'.format(
id=group.id, name=group.name))
f.write(' },\n')
f.write('};\n\n')
f.write('const size_t kPolicyAtomicGroupMappingsLength = %s;\n\n' %
(atomic_groups_length))
supported_user_policies = [
p for p in policies if p.is_supported and not p.is_device_only
]
protobuf_types = _GetProtobufTypes(supported_user_policies)
for protobuf_type in protobuf_types:
_WriteChromePolicyAccessSource(supported_user_policies, f, protobuf_type)
f.write('\n} // namespace policy\n')
# Return the StringPolicyType enum value for a particular policy type.
def _GetStringPolicyType(policy_type):
if policy_type == 'Type::STRING':
return 'StringPolicyType::STRING'
elif policy_type == 'Type::DICTIONARY':
return 'StringPolicyType::JSON'
elif policy_type == 'TYPE_EXTERNAL':
return 'StringPolicyType::EXTERNAL'
raise RuntimeError('Invalid string type: ' + policy_type + '!\n')
# Writes an array that contains the pointers to the proto field for each policy
# in |policies| of the given |protobuf_type|.
def _WriteChromePolicyAccessSource(policies, f, protobuf_type):
f.write('const %sPolicyAccess k%sPolicyAccess[] = {\n' % (protobuf_type,
protobuf_type))
extra_args = ''
for policy in policies:
if policy.policy_protobuf_type == protobuf_type:
name = policy.name
if protobuf_type == 'String':
extra_args = ',\n ' + _GetStringPolicyType(policy.policy_type)
f.write(' {key::k%s,\n'
' &em::CloudPolicySettings::has_%s,\n'
' &em::CloudPolicySettings::%s%s},\n' %
(name, name.lower(), name.lower(), extra_args))
# The list is nullptr-terminated.
f.write(' {nullptr, nullptr, nullptr},\n' '};\n\n')
#------------------ policy risk tag header -------------------------#
class RiskTags(object):
'''Generates files and strings to translate the parsed risk tags.'''
# TODO(fhorschig|tnagel): Add, Check & Generate translation descriptions.
def __init__(self, template_file_contents):
self.max_tags = None
self.enum_for_tag = OrderedDict() # Ordered by severity as stated in JSON.
self._ReadRiskTagMetaData(template_file_contents)
def GenerateEnum(self):
values = [' ' + self.enum_for_tag[tag] for tag in self.enum_for_tag]
values.append(' RISK_TAG_COUNT')
values.append(' RISK_TAG_NONE')
enum_text = 'enum RiskTag {\n'
enum_text += ',\n'.join(values) + '\n};\n'
return enum_text
def GetMaxTags(self):
return str(self.max_tags)
def GetValidTags(self):
return [tag for tag in self.enum_for_tag]
def ToInitString(self, tags):
all_tags = [self._ToEnum(tag) for tag in tags]
all_tags += ["RISK_TAG_NONE" for missing in range(len(tags), self.max_tags)]
str_tags = "{ " + ", ".join(all_tags) + " }"
return "\n ".join(textwrap.wrap(str_tags, 69))
def ComputeMaxTags(self, policies):
self.max_tags = 0
for policy in policies:
if not policy.is_supported or policy.tags == None:
continue
self.max_tags = max(len(policy.tags), self.max_tags)
def _ToEnum(self, tag):
if tag in self.enum_for_tag:
return self.enum_for_tag[tag]
raise RuntimeError('Invalid Tag:' + tag + '!\n'
'Chose a valid tag from \'risk_tag_definitions\' (a '
'subproperty of root in policy_templates.json)!')
def _ReadRiskTagMetaData(self, template_file_contents):
for tag in template_file_contents['risk_tag_definitions']:
if tag.get('name', None) == None:
raise RuntimeError('Tag in \'risk_tag_definitions\' without '
'description found!')
if tag.get('description', None) == None:
raise RuntimeError('Tag ' + tag['name'] + ' has no description!')
if tag.get('user-description', None) == None:
raise RuntimeError('Tag ' + tag['name'] + ' has no user-description!')
self.enum_for_tag[tag['name']] = "RISK_TAG_" + tag['name'].replace(
"-", "_").upper()
def _WritePolicyRiskTagHeader(policies, policy_atomic_groups, target_platform,
f, risk_tags):
f.write('''#ifndef CHROME_COMMON_POLICY_RISK_TAG_H_
#define CHROME_COMMON_POLICY_RISK_TAG_H_
#include <stddef.h>
namespace policy {
// The tag of a policy indicates which impact a policy can have on
// a user's privacy and/or security. Ordered descending by
// impact.
// The explanation of the single tags is stated in
// policy_templates.json within the 'risk_tag_definitions' tag.
''')
f.write(risk_tags.GenerateEnum() + '\n')
f.write('// This constant describes how many risk tags were used by the\n'
'// policy which uses the most risk tags.\n'
'const size_t kMaxRiskTagCount = ' + risk_tags.GetMaxTags() + ';\n'
'\n'
'} // namespace policy\n\n'
'\n'
'#endif // CHROME_COMMON_POLICY_RISK_TAG_H_')
#------------------ policy protobufs -------------------------------#
# This code applies to both Active Directory and Google cloud management.
CHROME_SETTINGS_PROTO_HEAD = '''
syntax = "proto2";
option optimize_for = LITE_RUNTIME;
package enterprise_management;
// For StringList and PolicyOptions.
import "policy_common_definitions.proto";
'''
CLOUD_POLICY_PROTO_HEAD = '''
syntax = "proto2";
option optimize_for = LITE_RUNTIME;
package enterprise_management;
import "policy_common_definitions.proto";
'''
# Field IDs [1..RESERVED_IDS] will not be used in the wrapping protobuf.
RESERVED_IDS = 2
def _WritePolicyProto(f, policy, fields):
_OutputComment(f, policy.caption + '\n\n' + policy.desc)
if policy.items is not None:
_OutputComment(f, '\nValid values:')
for item in policy.items:
_OutputComment(f, ' %s: %s' % (str(item.value), item.caption))
if policy.policy_type == 'Type::DICTIONARY':
_OutputComment(
f, '\nValue schema:\n%s' % json.dumps(
policy.schema, sort_keys=True, indent=4, separators=(',', ': ')))
_OutputComment(
f, '\nSupported on: %s' %
', '.join(sorted(list(policy.platforms.union(policy.future_on)))))
if policy.can_be_recommended and not policy.can_be_mandatory:
_OutputComment(
f, '\nNote: this policy must have a RECOMMENDED ' +
'PolicyMode set in PolicyOptions.')
f.write('message %sProto {\n' % policy.name)
f.write(' optional PolicyOptions policy_options = 1;\n')
f.write(' optional %s %s = 2;\n' % (policy.protobuf_type, policy.name))
f.write('}\n\n')
fields += [
' optional %sProto %s = %s;\n' % (policy.name, policy.name,
policy.id + RESERVED_IDS)
]
def _WriteChromeSettingsProtobuf(policies, policy_atomic_groups,
target_platform, f, risk_tags):
f.write(CHROME_SETTINGS_PROTO_HEAD)
fields = []
f.write('// PBs for individual settings.\n\n')
for policy in policies:
# Note: This protobuf also gets the unsupported policies, since it's an
# exhaustive list of all the supported user policies on any platform.
if not policy.is_device_only:
_WritePolicyProto(f, policy, fields)
f.write('// --------------------------------------------------\n'
'// Big wrapper PB containing the above groups.\n\n'
'message ChromeSettingsProto {\n')
f.write(''.join(fields))
f.write('}\n\n')
def _WriteChromeSettingsFullRuntimeProtobuf(policies, policy_atomic_groups,
target_platform, f, risk_tags):
# For full runtime, disable LITE_RUNTIME switch and import full runtime
# version of cloud_policy.proto.
f.write(
CHROME_SETTINGS_PROTO_HEAD.replace(
"option optimize_for = LITE_RUNTIME;",
"//option optimize_for = LITE_RUNTIME;").replace(
"import \"cloud_policy.proto\";",
"import \"cloud_policy_full_runtime.proto\";").replace(
"import \"policy_common_definitions.proto\";",
"import \"policy_common_definitions_full_runtime.proto\";"))
fields = []
f.write('// PBs for individual settings.\n\n')
for policy in policies:
# Note: This protobuf also gets the unsupported policies, since it's an
# exhaustive list of all the supported user policies on any platform.
if not policy.is_device_only:
_WritePolicyProto(f, policy, fields)
f.write('// --------------------------------------------------\n'
'// Big wrapper PB containing the above groups.\n\n'
'message ChromeSettingsProto {\n')
f.write(''.join(fields))
f.write('}\n\n')
def _WriteCloudPolicyProtobuf(policies, policy_atomic_groups, target_platform,
f, risk_tags):
f.write(CLOUD_POLICY_PROTO_HEAD)
f.write('message CloudPolicySettings {\n')
for policy in policies:
if policy.is_supported and not policy.is_device_only:
f.write(
' optional %sPolicyProto %s = %s;\n' %
(policy.policy_protobuf_type, policy.name, policy.id + RESERVED_IDS))
f.write('}\n\n')
def _WriteCloudPolicyFullRuntimeProtobuf(policies, policy_atomic_groups,
target_platform, f, risk_tags):
# For full runtime, disable LITE_RUNTIME switch
f.write(
CLOUD_POLICY_PROTO_HEAD.replace(
"option optimize_for = LITE_RUNTIME;",
"//option optimize_for = LITE_RUNTIME;").replace(
"import \"policy_common_definitions.proto\";",
"import \"policy_common_definitions_full_runtime.proto\";"))
f.write('message CloudPolicySettings {\n')
for policy in policies:
if policy.is_supported and not policy.is_device_only:
f.write(
' optional %sPolicyProto %s = %s;\n' %
(policy.policy_protobuf_type, policy.name, policy.id + RESERVED_IDS))
f.write('}\n\n')
def _WritePolicyCommonDefinitionsFullRuntimeProtobuf(
policy_common_definitions_proto_path, policies, policy_atomic_groups,
target_platform, f, risk_tags):
# For full runtime, disable LITE_RUNTIME switch
with open(policy_common_definitions_proto_path, 'r') as proto_file:
policy_common_definitions_proto_code = proto_file.read()
f.write(
policy_common_definitions_proto_code.replace(
"option optimize_for = LITE_RUNTIME;",
"//option optimize_for = LITE_RUNTIME;"))
#------------------ Chrome OS policy constants header --------------#
# This code applies to Active Directory management only.
# Filter for _GetSupportedChromeOSPolicies().
def _IsSupportedChromeOSPolicy(type, policy):
# Filter out unsupported policies.
if not policy.is_supported:
return False
# Filter out device policies if user policies are requested.
if type == 'user' and policy.is_device_only:
return False
# Filter out user policies if device policies are requested.
if type == 'device' and not policy.is_device_only:
return False
# Filter out non-Active-Directory policies.
if 'active_directory' not in policy.supported_chrome_os_management:
return False
return True
# Returns a list of supported user and/or device policies `by filtering
# |policies|. |type| may be 'user', 'device' or 'both'.
def _GetSupportedChromeOSPolicies(policies, type):
if (type not in ['user', 'device', 'both']):
raise RuntimeError('Unsupported type "%s"' % type)
return filter(partial(_IsSupportedChromeOSPolicy, type), policies)
# Returns the set of all policy.policy_protobuf_type strings from |policies|.
def _GetProtobufTypes(policies):
return set(['Integer', 'Boolean', 'String', 'StringList'])
# Writes the definition of an array that contains the pointers to the mutable
# proto field for each policy in |policies| of the given |protobuf_type|.
def _WriteChromeOSPolicyAccessHeader(f, protobuf_type):
f.write('// Access to the mutable protobuf function of all supported '
'%s user\n// policies.\n' % protobuf_type.lower())
f.write('struct %sPolicyAccess {\n'
' const char* policy_key;\n'
' enterprise_management::%sPolicyProto*\n'
' (enterprise_management::CloudPolicySettings::'
'*mutable_proto_ptr)();\n'
'};\n' % (protobuf_type, protobuf_type))
f.write('extern const %sPolicyAccess k%sPolicyAccess[];\n\n' %
(protobuf_type, protobuf_type))
# Writes policy_constants.h for use in Chrome OS.
def _WriteChromeOSPolicyConstantsHeader(policies, policy_atomic_groups,
target_platform, f, risk_tags):
f.write('#ifndef __BINDINGS_POLICY_CONSTANTS_H_\n'
'#define __BINDINGS_POLICY_CONSTANTS_H_\n\n')
# Forward declarations.
supported_user_policies = _GetSupportedChromeOSPolicies(policies, 'user')
protobuf_types = _GetProtobufTypes(supported_user_policies)
f.write('namespace enterprise_management {\n' 'class CloudPolicySettings;\n')
for protobuf_type in protobuf_types:
f.write('class %sPolicyProto;\n' % protobuf_type)
f.write('} // namespace enterprise_management\n\n')
f.write('namespace policy {\n\n')
# Policy keys.
all_supported_policies = _GetSupportedChromeOSPolicies(policies, 'both')
f.write('// Registry key names for user and device policies.\n'
'namespace key {\n\n')
for policy in all_supported_policies:
f.write('extern const char k' + policy.name + '[];\n')
f.write('\n} // namespace key\n\n')
# Device policy keys.
f.write('// NULL-terminated list of device policy registry key names.\n')
f.write('extern const char* kDevicePolicyKeys[];\n\n')
# User policy proto pointers, one struct for each protobuf type.
for protobuf_type in protobuf_types:
_WriteChromeOSPolicyAccessHeader(f, protobuf_type)
f.write('} // namespace policy\n\n'
'#endif // __BINDINGS_POLICY_CONSTANTS_H_\n')
#------------------ Chrome OS policy constants source --------------#
# Writes an array that contains the pointers to the mutable proto field for each
# policy in |policies| of the given |protobuf_type|.
def _WriteChromeOSPolicyAccessSource(policies, f, protobuf_type):
f.write('constexpr %sPolicyAccess k%sPolicyAccess[] = {\n' % (protobuf_type,
protobuf_type))
for policy in policies:
if policy.policy_protobuf_type == protobuf_type:
f.write(' {key::k%s,\n'
' &em::CloudPolicySettings::mutable_%s},\n' %
(policy.name, policy.name.lower()))
# The list is nullptr-terminated.
f.write(' {nullptr, nullptr},\n' '};\n\n')
# Writes policy_constants.cc for use in Chrome OS.
def _WriteChromeOSPolicyConstantsSource(policies, policy_atomic_groups,
target_platform, f, risk_tags):
f.write('#include "bindings/cloud_policy.pb.h"\n'
'#include "bindings/policy_constants.h"\n\n'
'namespace em = enterprise_management;\n\n'
'namespace policy {\n\n')
# Policy keys.
all_supported_policies = _GetSupportedChromeOSPolicies(policies, 'both')
f.write('namespace key {\n\n')
for policy in all_supported_policies:
f.write('const char k{name}[] = "{name}";\n'.format(name=policy.name))
f.write('\n} // namespace key\n\n')
# Device policy keys.
supported_device_policies = _GetSupportedChromeOSPolicies(policies, 'device')
f.write('const char* kDevicePolicyKeys[] = {\n\n')
for policy in supported_device_policies:
f.write(' key::k%s,\n' % policy.name)
f.write(' nullptr};\n\n')
# User policy proto pointers, one struct for each protobuf type.
supported_user_policies = _GetSupportedChromeOSPolicies(policies, 'user')
protobuf_types = _GetProtobufTypes(supported_user_policies)
for protobuf_type in protobuf_types:
_WriteChromeOSPolicyAccessSource(supported_user_policies, f, protobuf_type)
f.write('} // namespace policy\n')
#------------------ app restrictions -------------------------------#
def _WriteAppRestrictions(policies, policy_atomic_groups, target_platform, f,
risk_tags):
def WriteRestrictionCommon(key):
f.write(' <restriction\n' ' android:key="%s"\n' % key)
f.write(' android:title="@string/%sTitle"\n' % key)
f.write(' android:description="@string/%sDesc"\n' % key)
def WriteItemsDefinition(key):
f.write(' android:entries="@array/%sEntries"\n' % key)
f.write(' android:entryValues="@array/%sValues"\n' % key)
def WriteAppRestriction(policy):
policy_name = policy.name
WriteRestrictionCommon(policy_name)
if policy.items is not None:
WriteItemsDefinition(policy_name)
f.write(' android:restrictionType="%s"/>' % policy.restriction_type)
f.write('\n\n')
# _WriteAppRestrictions body
f.write('<restrictions xmlns:android="'
'http://schemas.android.com/apk/res/android">\n\n')
for policy in policies:
if (policy.is_supported and policy.restriction_type != 'invalid' and
not policy.is_deprecated and not policy.is_future):
WriteAppRestriction(policy)
f.write('</restrictions>')
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
kurdd/Oauth | social_auth/backends/contrib/live.py | 6 | 2704 | """
MSN Live Connect oAuth 2.0
Settings:
LIVE_CLIENT_ID
LIVE_CLIENT_SECRET
LIVE_EXTENDED_PERMISSIONS (defaults are: wl.basic, wl.emails)
References:
* oAuth http://msdn.microsoft.com/en-us/library/live/hh243649.aspx
* Scopes http://msdn.microsoft.com/en-us/library/live/hh243646.aspx
* REST http://msdn.microsoft.com/en-us/library/live/hh243648.aspx
Throws:
AuthUnknownError - if user data retrieval fails
"""
from urllib import urlencode
from django.utils import simplejson
from social_auth.utils import setting, dsa_urlopen
from social_auth.backends import BaseOAuth2, OAuthBackend, USERNAME
from social_auth.backends.exceptions import AuthUnknownError
# Live Connect configuration
LIVE_AUTHORIZATION_URL = 'https://login.live.com/oauth20_authorize.srf'
LIVE_ACCESS_TOKEN_URL = 'https://login.live.com/oauth20_token.srf'
LIVE_USER_DATA_URL = 'https://apis.live.net/v5.0/me'
LIVE_SERVER = 'live.com'
LIVE_DEFAULT_PERMISSIONS = ['wl.basic', 'wl.emails']
class LiveBackend(OAuthBackend):
name = 'live'
EXTRA_DATA = [
('id', 'id'),
('access_token', 'access_token'),
('reset_token', 'reset_token'),
('expires', setting('SOCIAL_AUTH_EXPIRATION', 'expires')),
('email', 'email'),
('first_name', 'first_name'),
('last_name', 'last_name'),
]
def get_user_id(self, details, response):
return response['id']
def get_user_details(self, response):
"""Return user details from Live Connect account"""
try:
email = response['emails']['account']
except KeyError:
email = ''
return {USERNAME: response.get('name'),
'email': email,
'first_name': response.get('first_name'),
'last_name': response.get('last_name')}
class LiveAuth(BaseOAuth2):
AUTHORIZATION_URL = LIVE_AUTHORIZATION_URL
ACCESS_TOKEN_URL = LIVE_ACCESS_TOKEN_URL
SERVER_URL = LIVE_SERVER
AUTH_BACKEND = LiveBackend
SETTINGS_KEY_NAME = 'LIVE_CLIENT_ID'
SETTINGS_SECRET_NAME = 'LIVE_CLIENT_SECRET'
SCOPE_SEPARATOR = ','
SCOPE_VAR_NAME = 'LIVE_EXTENDED_PERMISSIONS'
DEFAULT_SCOPE = LIVE_DEFAULT_PERMISSIONS
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
url = LIVE_USER_DATA_URL + '?' + urlencode({
'access_token': access_token
})
try:
return simplejson.load(dsa_urlopen(url))
except (ValueError, IOError):
raise AuthUnknownError('Error during profile retrieval, '
'please, try again later')
# Backend definition
BACKENDS = {
'live': LiveAuth,
}
| apache-2.0 |
PatKayongo/patkayongo.github.io | node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/special.py | 95 | 3062 | # -*- coding: utf-8 -*-
"""
pygments.lexers.special
~~~~~~~~~~~~~~~~~~~~~~~
Special lexers.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import io
from pygments.lexer import Lexer
from pygments.token import Token, Error, Text
from pygments.util import get_choice_opt, b
__all__ = ['TextLexer', 'RawTokenLexer']
class TextLexer(Lexer):
"""
"Null" lexer, doesn't highlight anything.
"""
name = 'Text only'
aliases = ['text']
filenames = ['*.txt']
mimetypes = ['text/plain']
def get_tokens_unprocessed(self, text):
yield 0, Text, text
_ttype_cache = {}
line_re = re.compile(b('.*?\n'))
class RawTokenLexer(Lexer):
"""
Recreate a token stream formatted with the `RawTokenFormatter`. This
lexer raises exceptions during parsing if the token stream in the
file is malformed.
Additional options accepted:
`compress`
If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
the given compression algorithm before lexing (default: ``""``).
"""
name = 'Raw token data'
aliases = ['raw']
filenames = []
mimetypes = ['application/x-pygments-tokens']
def __init__(self, **options):
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
Lexer.__init__(self, **options)
def get_tokens(self, text):
if isinstance(text, str):
# raw token stream never has any non-ASCII characters
text = text.encode('ascii')
if self.compress == 'gz':
import gzip
gzipfile = gzip.GzipFile('', 'rb', 9, io.StringIO(text))
text = gzipfile.read()
elif self.compress == 'bz2':
import bz2
text = bz2.decompress(text)
# do not call Lexer.get_tokens() because we do not want Unicode
# decoding to occur, and stripping is not optional.
text = text.strip(b('\n')) + b('\n')
for i, t, v in self.get_tokens_unprocessed(text):
yield t, v
def get_tokens_unprocessed(self, text):
length = 0
for match in line_re.finditer(text):
try:
ttypestr, val = match.group().split(b('\t'), 1)
except ValueError:
val = match.group().decode(self.encoding)
ttype = Error
else:
ttype = _ttype_cache.get(ttypestr)
if not ttype:
ttype = Token
ttypes = ttypestr.split('.')[1:]
for ttype_ in ttypes:
if not ttype_ or not ttype_[0].isupper():
raise ValueError('malformed token name')
ttype = getattr(ttype, ttype_)
_ttype_cache[ttypestr] = ttype
val = val[2:-2].decode('unicode-escape')
yield length, ttype, val
length += len(val)
| mit |
boyko/django-guardian | guardian/forms.py | 12 | 6350 | from django import forms
from django.utils.translation import ugettext as _
from guardian.shortcuts import assign
from guardian.shortcuts import remove_perm
from guardian.shortcuts import get_perms
from guardian.shortcuts import get_perms_for_model
class BaseObjectPermissionsForm(forms.Form):
"""
Base form for object permissions management. Needs to be extended for usage
with users and/or groups.
"""
def __init__(self, obj, *args, **kwargs):
"""
:param obj: Any instance which form would use to manage object
permissions"
"""
self.obj = obj
super(BaseObjectPermissionsForm, self).__init__(*args, **kwargs)
field_name = self.get_obj_perms_field_name()
self.fields[field_name] = self.get_obj_perms_field()
def get_obj_perms_field(self):
"""
Returns field instance for object permissions management. May be
replaced entirely.
"""
field_class = self.get_obj_perms_field_class()
field = field_class(
label=self.get_obj_perms_field_label(),
choices=self.get_obj_perms_field_choices(),
initial=self.get_obj_perms_field_initial(),
widget=self.get_obj_perms_field_widget(),
required=self.are_obj_perms_required(),
)
return field
def get_obj_perms_field_name(self):
"""
Returns name of the object permissions management field. Default:
``permission``.
"""
return 'permissions'
def get_obj_perms_field_label(self):
"""
Returns label of the object permissions management field. Defualt:
``_("Permissions")`` (marked to be translated).
"""
return _("Permissions")
def get_obj_perms_field_choices(self):
"""
Returns choices for object permissions management field. Default:
list of tuples ``(codename, name)`` for each ``Permission`` instance
for the managed object.
"""
choices = [(p.codename, p.name) for p in get_perms_for_model(self.obj)]
return choices
def get_obj_perms_field_initial(self):
"""
Returns initial object permissions management field choices. Default:
``[]`` (empty list).
"""
return []
def get_obj_perms_field_class(self):
"""
Returns object permissions management field's base class. Default:
``django.forms.MultipleChoiceField``.
"""
return forms.MultipleChoiceField
def get_obj_perms_field_widget(self):
"""
Returns object permissions management field's widget base class.
Default: ``django.forms.SelectMultiple``.
"""
return forms.SelectMultiple
def are_obj_perms_required(self):
"""
Indicates if at least one object permission should be required. Default:
``False``.
"""
return False
def save_obj_perms(self):
"""
Must be implemented in concrete form class. This method should store
selected object permissions.
"""
raise NotImplementedError
class UserObjectPermissionsForm(BaseObjectPermissionsForm):
"""
Object level permissions management form for usage with ``User`` instances.
Example usage::
from django.shortcuts import get_object_or_404
from myapp.models import Post
from guardian.forms import UserObjectPermissionsForm
from guardian.models import User
def my_view(request, post_slug, user_id):
user = get_object_or_404(User, id=user_id)
post = get_object_or_404(Post, slug=post_slug)
form = UserObjectPermissionsForm(user, post, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
...
"""
def __init__(self, user, *args, **kwargs):
self.user = user
super(UserObjectPermissionsForm, self).__init__(*args, **kwargs)
def get_obj_perms_field_initial(self):
perms = get_perms(self.user, self.obj)
return perms
def save_obj_perms(self):
"""
Saves selected object permissions by creating new ones and removing
those which were not selected but already exists.
Should be called *after* form is validated.
"""
perms = self.cleaned_data[self.get_obj_perms_field_name()]
model_perms = [c[0] for c in self.get_obj_perms_field_choices()]
to_remove = set(model_perms) - set(perms)
for perm in to_remove:
remove_perm(perm, self.user, self.obj)
for perm in perms:
assign(perm, self.user, self.obj)
class GroupObjectPermissionsForm(BaseObjectPermissionsForm):
"""
Object level permissions management form for usage with ``Group`` instances.
Example usage::
from django.shortcuts import get_object_or_404
from myapp.models import Post
from guardian.forms import GroupObjectPermissionsForm
from guardian.models import Group
def my_view(request, post_slug, group_id):
group = get_object_or_404(Group, id=group_id)
post = get_object_or_404(Post, slug=post_slug)
form = GroupObjectPermissionsForm(group, post, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
...
"""
def __init__(self, group, *args, **kwargs):
self.group = group
super(GroupObjectPermissionsForm, self).__init__(*args, **kwargs)
def get_obj_perms_field_initial(self):
perms = get_perms(self.group, self.obj)
return perms
def save_obj_perms(self):
"""
Saves selected object permissions by creating new ones and removing
those which were not selected but already exists.
Should be called *after* form is validated.
"""
perms = self.cleaned_data[self.get_obj_perms_field_name()]
model_perms = [c[0] for c in self.get_obj_perms_field_choices()]
to_remove = set(model_perms) - set(perms)
for perm in to_remove:
remove_perm(perm, self.group, self.obj)
for perm in perms:
assign(perm, self.group, self.obj)
| bsd-2-clause |
BtXin/VirtualIRLab | api/instructorAPI.py | 1 | 5246 | from flask import make_response, render_template, flash, redirect, url_for, current_app
from flask_login import login_required, current_user
from flask_restful import Resource, reqparse
from schema.DataSet import DataSet
from schema.User import User
from schema.Query import Query
from schema.Assignment import Assignment
from schema.Document import Document
from schema.Score import Score
from schema.Annotation import Annotation
from util.util import check_role
from mongoengine.errors import NotUniqueError, ValidationError
import os
from search.searcher import Searcher
parser = reqparse.RequestParser()
parser.add_argument('ds_name', type=str)
parser.add_argument('assignment_name', type=str)
parser.add_argument('query[]', type=str, action='append')
parser.add_argument('ranker', type=str)
parser.add_argument('param[]', type=str, action='append')
parser.add_argument('num_results', type=int)
class InstructorAPI(Resource):
@login_required
def get(self):
check_role('instructor')
datasets = [(d.name, d.author.name) for d in DataSet.objects()]
instructor = User.objects(email=current_user.email).first()
assignments = {}
for assignment in Assignment.objects(instructor=instructor):
assignments[(assignment.name, assignment.instructor.name)] = (self.get_judgement_score(assignment))
return make_response(render_template('instructor.html',
datasets=datasets, assignments=assignments))
@login_required
def post(self):
check_role('instructor')
args = parser.parse_args()
queries = [q for q in args['query[]'] if q != '']
if not queries:
flash('Empty Query!')
return redirect(url_for('instructorapi'))
ranker = args['ranker']
assignment_name = args['assignment_name']
dataset_name = args['ds_name']
params = self.generate_params_dict(ranker, args['param[]'])
num_results = args['num_results']
instructor = User.objects(email=current_user.email).first()
dataset = DataSet.objects(name=dataset_name).first()
assignment = Assignment(name=assignment_name, instructor=instructor, data_set=dataset,
ranker=ranker, params=params, num_results=num_results)
try:
assignment.save()
except (NotUniqueError, ValidationError):
flash('Invalid Input!')
return redirect(url_for('instructorapi'))
q = self.generate_queries(queries)
assignment.update(queries=q)
assignment.update(queries_need_to_show=q)
try:
self.search(assignment, dataset_name, queries, ranker, params, num_results)
except Exception as e:
print(e)
flash('Assignment Created!')
return redirect(url_for('instructorapi'))
@staticmethod
def search(assignment, dataset_name, queries, ranker, params, num_results):
author = User.objects(email=current_user.email).first()
path = os.path.join(current_app.root_path, 'data', author.name)
searcher = Searcher(dataset_name, path)
for query in queries:
results = searcher.search(query, ranker, params, num_results)['results']
for result in results:
doc_path = str(os.path.join(path, result['path'].encode('utf8')[2:]))
doc_score = result['score']
document = Document.objects(path=doc_path).first()
q = Query.objects(content=query).first()
Score(result=doc_score, assignment=assignment, query=q, document=document).save()
@staticmethod
def generate_queries(queries):
result = []
for q in queries:
query = Query.objects(content=q).first()
if not query:
query = Query(q)
query.save()
result.append(query)
return result
@staticmethod
def generate_params_dict(ranker, params):
rankers = {'OkapiBM25': ['k1', 'b', 'k3'],
'JelinekMercer': ['lambda'],
'DirichletPrior': ['mu'],
'AbsoluteDiscount': ['delta'],
'PivotedLength': ['s']}
keys = rankers[ranker]
result = {}
for i in range(len(keys)):
result[keys[i]] = float(params[i])
return result
@staticmethod
def get_judgement_score(assignment):
queries = assignment.queries
scores = Score.objects(assignment=assignment)
result = {}
for query in queries:
query_result = {}
for doc_score in scores.filter(query=query):
score = doc_score.result
doc = doc_score.document
relevant = Annotation.objects(doc=doc, query=query, assignment=assignment, judgement=True).count()
irrelevant = Annotation.objects(doc=doc, query=query, assignment=assignment, judgement=False).count()
doc_result = {'score': score, 'relevant': relevant, 'irrelevant': irrelevant}
query_result[doc.name] = doc_result
result[query.content] = query_result
return result
| mit |
Groovy-Dragon/tcRIP | UL_HMM.py | 1 | 24366 | # -*- coding: utf-8 -*-
"""
Created on Fri Aug 4 10:44:38 2017
@author: lewismoffat
"""
"""
Python module for creating, training and applying hidden
Markov models to discrete or continuous observations.
Author: Michael Hamilton, hamiltom@cs.colostate.edu
Theoretical concepts obtained from Rabiner, 1989.
"""
import numpy, pylab, time, copy
from numpy import random as rand
from numpy import linalg
from matplotlib import rc
rc('text', usetex=True)
class HMM_Classifier:
"""
A binary hmm classifier that utilizes two hmms: one corresponding
to the positive activity and one corresponding to the negative
activity.
"""
def __init__( self, **args ):
"""
:Keywords:
- `neg_hmm` - hmm corresponding to negative activity
- `pos_hmm` - hmm corresponding to positive activity
"""
self.neg_hmm = None
self.pos_hmm = None
if 'neg_hmm' in args:
self.neg_hmm = args[ 'neg_hmm' ]
if 'pos_hmm' in args:
self.pos_hmm = args[ 'pos_hmm' ]
def classify( self, sample ):
"""
Classification is performed by calculating the
log odds for the positive activity. Since the hmms
return a log-likelihood (due to scaling)
of the corresponding activity, the difference of
the two log-likelihoods is the log odds.
"""
# Scream if an hmm is missing
if self.pos_hmm == None or self.neg_hmm == None:
raise "pos/neg hmm(s) missing"
pos_ll = forward( self.pos_hmm, sample, scaling=1 )[ 0 ]
neg_ll = forward( self.neg_hmm, sample, scaling=1 )[ 0 ]
# log odds by difference of log-likelihoods
return pos_ll - neg_ll
def add_pos_hmm( self, pos_hmm ):
"""
Add the hmm corresponding to positive
activity. Replaces current positive hmm, if it exists.
"""
self.pos_hmm = pos_hmm
def add_neg_hmm( self, neg_hmm ):
"""
Add the hmm corresponding to negative
activity. Replaces current negative hmm, if it exists.
"""
self.neg_hmm = neg_hmm
class HMM:
"""
Creates and maintains a hidden Markov model. This version assumes the every state can be
reached DIRECTLY from any other state (ergodic). This, of course, excludes the start state.
Hence the state transition matrix, A, must be N X N . The observable symbol probability
distributions are represented by an N X M matrix where M is the number of observation
symbols.
|a_11 a_12 ... a_1N| |b_11 b_12 ... b_1M|
|a_21 a_22 ... a_2N| |b_21 b_22 ... b_2M|
A = | . . . | B = | . . . |
| . . . | | . . . |
|a_N1 a_N2 ... a_NN| |b_N1 b_N2 ... b_NM|
a_ij = P(q_t = S_j|q_t-1 = S_i) b_ik = P(v_k at t|q_t = S_i)
where q_t is state at time t and v_k is k_th symbol of observation sequence
"""
def __init__( self, n_states=1, **args ):
"""
:Keywords:
- `n_states` - number of hidden states
- `V` - list of all observable symbols
- `A` - transition matrix
- `B` - observable symbol probability distribution
- `D` - dimensionality of continuous observations
- `F` - Fixed emission probabilities for the given state ( dict: i -> numpy.array( [n_states] ),
where i is the state to hold fixed.
"""
self.N = n_states # Number of hidden states
# Initialize observable symbol set parameters
self.V = args[ 'V' ]
self.M = len( self.V )
self.symbol_map = dict( zip ( self.V, range( len( self.V ) )) )
# Initialize transition probability matrix
if 'A' in args:
self.A = args[ 'A' ]
assert numpy.shape( self.A ) == ( self.N, self.N )
else:
# Randomly initialize matrix and normalize so sum over a row = 1
raw_A = rand.uniform( size = self.N * self.N ).reshape( ( self.N, self.N ) )
self.A = ( raw_A.T / raw_A.T.sum( 0 ) ).T
if n_states == 1:
self.A.reshape( (1,1) )
# Initialize observable symbol probability distributions
if 'B' in args:
self.B = args[ 'B' ]
if n_states > 1:
assert numpy.shape( self.B ) == ( self.N, self.M )
else:
self.B = numpy.reshape(self.B, (1,self.M) )
if 'F' in args:
self.F = args[ 'F' ]
for i in self.F.keys():
self.B[ i,: ] = self.F[ i ]
else:
self.F = {}
else:
# initialize distribution
B_raw = rand.uniform( 0, 1, self.N * self.M ).reshape( ( self.N, self.M ) )
self.B = ( B_raw.T / B_raw.T.sum( 0 ) ).T
if 'F' in args:
self.F = args[ 'F' ]
for i in self.F.keys():
self.B[ i,: ] = self.F[ i ]
else:
self.F = {}
# Initialize the intitial state distribution
if 'Pi' in args:
self.Pi = args[ 'Pi' ]
assert len( self.Pi ) == self.N
else:
# initialize to uniform distribution
self.Pi = numpy.array ( 1.0 / self.N ).repeat( self.N )
if 'Labels' in args:
self.Labels = args[ 'Labels' ]
else:
self.Labels = range( self.N )
if 'F' in args:
self.F = args[ 'F' ]
for i in self.F.keys():
self.B[ i,: ] = self.F[ i ]
else:
self.F = {}
def __repr__( self ):
print(self.A)
retn = ""
retn += "num hiddens: %d\n" % ( self.N ) + \
"symbols: %s\n" % ( self.V ) + \
"\nA:\n %s\n" % ( str( self.A ) ) + \
"Pi:\n %s" % ( str( self.Pi ) )
return retn
def symbol_index( hmm, Obs ):
"""
Converts an obeservation symbol sequence into a sequence
of indices for accessing distribution matrices.
"""
Obs_ind = []
for o in Obs: Obs_ind.append( hmm.symbol_map[ o ] )
return Obs_ind
def forward( hmm, Obs, scaling=True ):
"""
Calculate the probability of an observation sequence, Obs,
given the model, P(Obs|hmm).
Obs: observation sequence
hmm: model
returns: P(Obs|hmm)
"""
T = len( Obs ) # Number of states in observation sequence
# Get index sequence of observation sequence to access
# the observable symbol probabilty distribution matrix
Obs = symbol_index( hmm, Obs )
# create scaling vector
if scaling:
c = numpy.zeros( [ T ], float )
# Base Case:
Alpha = numpy.zeros( [ hmm.N, T ], float )
Alpha[ :,0 ] = hmm.Pi * hmm.B[ :,Obs[ 0 ] ]
if scaling:
c[ 0 ] = 1.0 / numpy.sum( Alpha[ :,0 ] )
Alpha[ :,0 ] = c[ 0 ] * Alpha[ :,0 ]
# Induction Step:
for t in xrange( 1,T ):
Alpha[ :,t ] = numpy.dot( Alpha[ :,t-1 ], hmm.A) * hmm.B[ :,Obs[ t ] ]
if scaling:
c[ t ] = 1.0 / numpy.sum( Alpha[ :,t ] )
Alpha[ :,t] = Alpha[ :,t] * c[ t ]
if scaling:
log_Prob_Obs = -( numpy.sum( numpy.log( c ) ) )
return ( log_Prob_Obs, Alpha, c )
else:
prob_Obs = numpy.sum( Alpha[ :,T-1 ] )
return ( prob_Obs, Alpha )
def backward( hmm, Obs, c=None ):
"""
Calculate the probability of a partial observation sequence
from t+1 to T, given some state t.
Obs: observation sequence
hmm: model
c: the scaling coefficients from forward algorithm
returns: B_t(i)
"""
T = len( Obs ) # Number of states in observation sequence
# Get index sequence of observation sequence to access
# the observable symbol probabilty distribution matrix
Obs = symbol_index( hmm, Obs )
# Base Case:
Beta = numpy.zeros( [ hmm.N, T ], float )
Beta[ :, T-1 ] = 1.0
if c is not None:
Beta [ :,T-1 ] = Beta [ :,T-1 ] * c[ T-1 ]
# Inductive Step:
for t in reversed( xrange( T-1 ) ):
Beta[ :,t ] = numpy.dot( hmm.A, ( hmm.B[ :,Obs[ t+1 ] ] * Beta[ :,t+1 ] ) )
if c is not None:
Beta[ :,t ] = Beta[ :,t ] * c[ t ]
return Beta
def viterbi( hmm, Obs, scaling=True ):
"""
Calculate P(Q|Obs, hmm) and yield the state sequence Q* that
maximizes this probability.
Obs: observation sequence
hmm: model
"""
T = len( Obs ) # Number of states in observation sequence
# Get index sequence of observation sequence to access
# the observable symbol probabilty distribution matrix
Obs = symbol_index( hmm, Obs )
# Initialization
# Delta[ i,j ] = max_q1,q2,...,qt P( q1, q2,...,qt = i, O_1, O_2,...,O_t|hmm )
# this is the highest prob along a single path at time t ending in state S_i
Delta = numpy.zeros( [ hmm.N,T ], float)
if scaling:
Delta[ :,0 ] = numpy.log( hmm.Pi ) + numpy.log( hmm.B[ :,Obs[ 0] ] )
else:
Delta[ :,0 ] = hmm.Pi * hmm.B[ :,Obs[ 0] ]
# Track Maximal States
Psi = numpy.zeros( [ hmm.N, T ], int )
# Inductive Step:
if scaling:
for t in xrange( 1,T ):
nus = Delta[ :,t-1 ] + numpy.log( hmm.A )
Delta[ :,t ] = nus.max(1) + numpy.log( hmm.B[ :,Obs[ t ] ] )
Psi[ :,t ] = nus.argmax( 1 )
else:
for t in xrange( 1,T ):
nus = Delta[ :,t-1 ] * hmm.A
Delta[ :,t ] = nus.max( 1 ) * hmm.B[ :,Obs[ t ] ]
Psi[ :,t ] = nus.argmax(1)
# Calculate State Sequence, Q*:
Q_star = [ numpy.argmax( Delta[ :,T-1 ] ) ]
for t in reversed( xrange( T-1 ) ) :
Q_star.insert( 0, Psi[ Q_star[ 0 ],t+1 ] )
return ( Q_star, Delta, Psi )
def baum_welch( hmm, Obs_seqs, **args ):
"""
EM algorithm to update Pi, A, and B for the HMM
:Parameters:
- `hmm` - hmm model to train
- `Obs_seqs` - list of observation sequences to train over
:Return:
a trained hmm
:Keywords:
- `epochs` - number of iterations to perform EM, default is 20
- `val_set` - validation data set, not required but recommended to prevent over-fitting
- `updatePi` - flag to update initial state probabilities
- `updateA` - flag to update transition probabilities, default is True
- `updateB` - flag to update observation emission probabilites for discrete types, default is True
- `scaling` - flag to scale probabilities (log scale), default is True
- `graph` - flag to plot log-likelihoods of the training epochs, default is False
- `normUpdate` - flag to use 1 / -(normed log-likelihood) contribution for each observation
sequence when updating model parameters, default if False
- `fname` - file name to save plot figure, default is ll.eps
- `verbose` - flag to print training times and log likelihoods for each training epoch, default is false
"""
# Setup keywords
if 'epochs' in args: epochs = args[ 'epochs' ]
else: epochs = 20
updatePi=updateA=updateB=scaling=graph = 1
normUpdate=verbose=validating = 0
if 'updatePi' in args: updatePi = args[ 'updatePi' ]
if 'updateA' in args: updateA = args[ 'updateA' ]
if 'updateB' in args: updateB = args[ 'updateB' ]
if 'scaling' in args: scaling = args[ 'scaling' ]
if 'graph' in args: graph = args[ 'graph' ]
if 'normUpdate' in args: normUpdate = args[ 'normUpdate' ]
if 'fname' in args: fname = args[ 'fname' ]
else: fname = 'll.eps'
if 'verbose' in args: verbose = args[ 'verbose' ]
if 'val_set' in args:
validating = 1
val_set = args[ 'val_set' ]
K = len( Obs_seqs ) # number of observation sequences
start = time.time() # start training timer
LLs = [] # keep track of log likelihoods for each epoch
val_LLs = [] # keep track of validation log-likelihoods for each epoch
# store best parameters
best_A = copy.deepcopy( hmm.A )
best_B = copy.deepcopy( hmm.B )
best_Pi = copy.deepcopy( hmm.Pi )
best_epoch = 'N/A'
best_val_LL = None
# Iterate over specified number of EM epochs
for epoch in xrange( epochs ):
start_epoch = time.time() # start epoch timer
LL_epoch = 0 # intialize log-likelihood of all seqs given the model
Expect_si_all = numpy.zeros( [ hmm.N ], float ) # Expectation of being in state i over all seqs
Expect_si_all_TM1 = numpy.zeros( [ hmm.N ], float ) # Expectation of being in state i over all seqs until T-1
Expect_si_sj_all = numpy.zeros( [ hmm.N, hmm.N ], float ) # Expectation of transitioning from state i to state j over all seqs
Expect_si_sj_all_TM1 = numpy.zeros( [ hmm.N, hmm.N ], float )
Expect_si_t0_all = numpy.zeros( [ hmm.N ] ) # Expectation of initially being in state i over all seqs
Expect_si_vk_all = numpy.zeros( [ hmm.N, hmm.M ], float ) # Expectation of being in state i and seeing symbol vk
ow = 0
for Obs in Obs_seqs:
if ow > 0 and ow % 100 == 0:
print("epoch %d: %d seqs processed" % ( epoch+1, ow ))
ow += 1
Obs = list( Obs )
log_Prob_Obs, Alpha, c = forward( hmm=hmm, Obs=Obs, scaling=1 ) # Calculate forward probs, log-likelihood, and scaling vals
Beta = backward( hmm=hmm, Obs=Obs, c=c ) # Calculate backward probs
LL_epoch += log_Prob_Obs # Update overall epoch log-likelihood
T = len( Obs ) # Number of states in observation sequence
# Determine update weight of the observation for contribution
# to model parameter maximization
if normUpdate:
w_k = 1.0 / -( log_Prob_Obs + numpy.log( len( Obs ) ) )
else:
w_k = 1.0
# Get index sequence of observation sequence to access
# the observable symbol probabilty distribution matrix
Obs_symbols = Obs[ : ]
Obs = symbol_index( hmm, Obs )
# Calculate gammas
# Gamma[ i,t ] = P( q_t = S_i|Obs, hmm)
Gamma_raw = Alpha * Beta
Gamma = Gamma_raw / Gamma_raw.sum( 0 )
Expect_si_t0_all += w_k * Gamma[ :,0 ]
# Expect_si_all[ i ] = expected number of transitions from state i over all
# training sequences.
Expect_si_all += w_k * Gamma.sum( 1 )
Expect_si_all_TM1 += w_k * Gamma[ :,:T-1 ].sum( 1 )
# Calculate Xis
# Xi is an N X N X T-1 matrix corresponding to
# Xi[ i,j,t ] = P(q_t = S_i, q_t+1 = S_j|Obs, hmm )
Xi = numpy.zeros( [ hmm.N, hmm.N, T-1 ], float )
for t in xrange( T-1 ):
for i in xrange( hmm.N ):
Xi[ i,:,t ] = Alpha[ i,t ] * hmm.A[ i,: ] * hmm.B[ :, Obs[ t+1 ] ] * Beta[ :,t+1 ]
if not scaling:
Xi[ :,:,t ] = Xi[ :,:,t ] / Xi[ :,:,t ].sum()
# Expect_si_sj_all = expected number of transitions from state s_i to state s_j
Expect_si_sj_all += w_k * Xi.sum( 2 ) #which = numpy.array( hmm.V[ k ] == numpy.array( Obs_symbols ) )
Expect_si_sj_all_TM1 += w_k * Xi[ :,:,:T-1].sum( 2 )
if updateB:
B_bar = numpy.zeros( [ hmm.N, hmm.M ], float )
for k in xrange( hmm.M ):
which = numpy.array( [ hmm.V[ k ] == x for x in Obs_symbols ] )
B_bar[ :,k ] = Gamma.T[ which,: ].sum( 0 )
Expect_si_vk_all += w_k * B_bar
############## Reestimate model parameters ###############
# reestimate initial state probabilites
if updatePi:
Expect_si_t0_all = Expect_si_t0_all / numpy.sum( Expect_si_t0_all )
hmm.Pi = Expect_si_t0_all
# reestimate transition probabilites
if updateA:
A_bar = numpy.zeros( [ hmm.N, hmm.N ], float )
for i in xrange( hmm.N ):
A_bar[ i,: ] = Expect_si_sj_all_TM1[ i,: ] / Expect_si_all_TM1[ i ]
hmm.A = A_bar
if updateB:
# reestimate emission probabilites
# ( observable symbol probability distribution )
for i in xrange( hmm.N ):
Expect_si_vk_all[ i,: ] = Expect_si_vk_all [ i,: ] / Expect_si_all[ i ]
hmm.B = Expect_si_vk_all
for i in hmm.F.keys():
hmm.B[ i,: ] = hmm.F[ i ]
LLs.append( LL_epoch )
# Quit if log_likelihoods have plateaued
if epoch > 1:
if LLs[ epoch - 1 ] == LL_epoch:
print("Log-likelihoods have plateaued--terminating training")
break
# if validating, then calculate log-likelihood of validation set
# to determine if training should be terminated.
if validating:
val_LL_epoch = 0
for val_Obs in val_set:
val_Obs = list( val_Obs )
val_LL_epoch += forward( hmm=hmm, Obs=val_Obs, scaling=1 )[ 0 ]
val_LLs.append( val_LL_epoch )
# Terminate training if validation log-likelihood is worse (lower) than
# previous epoch
if epoch > 0:
if val_LL_epoch > best_val_LL:
best_A = copy.deepcopy( hmm.A )
best_B = copy.deepcopy( hmm.B )
best_Pi = copy.deepcopy( hmm.Pi )
best_epoch = epoch
best_val_LL = val_LL_epoch
else:
best_val_LL = val_LL_epoch
best_epoch = 0
if verbose:
print("Finished epoch %d in %d secs" % ( epoch+1, int( time.time() - start_epoch ) ), LL_epoch)
if validating:
print("Validation LL: ", val_LLs[ epoch ])
if graph:
if validating:
pylab.figure()
pylab.subplot( 211 )
pylab.title( "Training Reestimation Performance" )
pylab.xlabel( "Epochs" )
pylab.ylabel( r"$\log( P ( O | \lambda ) )$" )
pylab.plot( LLs, label="Training data", color='red' )
pylab.subplots_adjust( hspace=0.4 )
pylab.subplot( 212 )
pylab.title( "Validation Reestimation Performance" )
pylab.plot( val_LLs, label="Validation LL", color='blue' )
pylab.xlabel( "Epochs" )
pylab.ylabel( r"$\log( P ( O | \lambda ) )$" )
pylab.axvline( best_epoch, color="black", label="Lowest validation LL", linewidth=2 )
pylab.legend( labelsep=0.01, shadow=1 , loc='lower right' )
pylab.savefig( fname )
else:
pylab.figure()
pylab.title( "Training Reestimation Performance" )
pylab.xlabel( "Epochs" )
pylab.ylabel( r"$\log( P ( O | \lambda ) )$" )
pylab.plot( LLs, label="Training data", color='red' )
pylab.savefig( fname )
print("Total training time: %d secs" % ( int( time.time() - start ) ), "Best epoch: ", best_epoch)
if validating:
hmm.A = best_A
hmm.B = best_B
hmm.Pi = best_Pi
return hmm
###################################################################################
################################ Example ########################################
###################################################################################
def dishonest_casino_test( graph = True ):
# create transition probability matrix
A = numpy.array( [ [ 0.95, 0.05],[ 0.05, 0.95 ] ] )
# create observable probability distribution matrix
B = numpy.array( [ [ 1.0/6, 1.0/6, 1.0/6, 1.0/6, 1.0/6, 1.0/6, ], \
[ 1.0/10, 1.0/10, 1.0/10, 1.0/10, 1.0/10, 1.0/2 ] ] )
# create set of all observabB = [ (-1,.1), (1,.1) ]
A = numpy.array( [ [ 0.99, 0.01 ], \
[ 0.01, 0.99 ] ] )
#le symbols
V =[1, 2, 3, 4, 5, 6]
# instantiate an hmm, note Pi is uniform probability distribution
# by default
hmm = HMM( 2, A=A, B=B, V=V )
# adjust the precision of printing float values
numpy.set_printoptions( precision=4 )
print("\nDishonest Casino Example:\n ")
Obs = [ 1,2,1,6,6 ]
print(hmm)
print()
print('*'*80)
print('*'*80)
print("\nWithout Scaling\n")
print("\nObservation Sequence: %s\n" % ( Obs ))
prob_Obs, Alpha = forward( hmm, Obs, scaling=0 )
print('*'*29)
print("* Forward Algorithm Results *")
print('*'*29 + '\n')
print("p(Obs|hmm) ~ %.7f" % ( prob_Obs ))
print("Alpha's:\n %s\n" % ( Alpha ))
print( '*'*80 + '\n')
Beta = backward( hmm, Obs )
print('*'*30)
print("* Backward Algorithm Results *")
print('*'*30 + '\n')
print("Beta's:\n %s\n" % ( str( Beta ) ))
print('*'*80 + '\n')
Q_star, Delta, Psi = viterbi( hmm, Obs, scaling=0 )
print('*'*29)
print( "* Viterbi Algorithm Results *")#Xi[ i,:,t ] = Xi[ i,:,t ] / Xi[ i,:,: ].sum( 1 )
print( '*'*29 + '\n')
print( "Q* = %s\n" % ( Q_star ))
print( "Delta's:\n %s\n" % ( Delta ))
print( "Psi:\n %s\n" % ( Psi ))
print( '*'*80 + '\n')
print( '*'*80)
print( '*'*80)
print( "\nWith Scaling\n")
print( "\nObservation Sequence: %s\n" % ( Obs ))
log_prob_Obs, Alpha, c = forward( hmm, Obs, scaling=1 )
print( '*'*29)
print( "* Forward Algorithm Results *")
print( '*'*29 + '\n')
print( "p(Obs|hmm) ~ %.7f" % ( numpy.exp( log_prob_Obs ) ))
print( "Alpha's:\n %s\n" % ( Alpha ))
print( '*'*80 + '\n')
Beta = backward( hmm, Obs, c )
print( '*'*30)
print( "* Backward Algorithm Results *")
print( '*'*30 + '\n')
print( "Beta's:\n %s\n" % ( str( Beta ) ))
print( '*'*80 + '\n')
Q_star, Delta, Psi = viterbi( hmm, Obs, scaling=1 )
print( '*'*29)
print( "* Viterbi Algorithm Results *")
print( '*'*29 + '\n')
print( "Q* = %s\n" % ( Q_star ))
print( "Delta's:\n %s\n" % ( Delta ))
print( "Psi:\n %s\n" % ( Psi ))
print( '*'*80 + '\n')
c = []
c.append( Obs )
baum_welch( hmm, c, epochs=15, graph=graph )
###################################################################################
###################################################################################
###################################################################################
#if __name__ == "__main__":
# ## # run the example, you can turn off graphing by setting it to 0
### X = rand.uniform(0,1,10).reshape( (5,2) )
### print norm_df(X)
# dishonest_casino_test( graph = 1 )
#
#def runme():
#
# #based on Mike's DC example
# #transition probabilities
# #A = numpy.array( [ [ 0.95, 0.05],[ 0.1, 0.90 ] ] )
# A = numpy.array( [ [.5,.5],[.5,.5]])
# #emission probabilities
# B = numpy.array( [ [ 1.0/6, 1.0/6, 1.0/6, 1.0/6, 1.0/6, 1.0/6, ], \
# [ 1.0/10, 1.0/10, 1.0/10, 1.0/10, 1.0/10, 1.0/2 ] ] )
# #symbols
# V = [1,2,3,4,5,6]
#
# model = HMM(2,A=A,B=B,V=V)
# numpy.set_printoptions(precision=5) | mit |
BorisJeremic/Real-ESSI-Examples | analytic_solution/test_cases/4NodeANDES/cantilever_different_Poisson/NumberOfDivision4/PoissonRatio0.05/compare_HDF5_ALL.py | 424 | 3382 | #!/usr/bin/python
import h5py
import sys
import numpy as np
import os
import re
import random
# find the path to my own python function:
cur_dir=os.getcwd()
sep='test_cases'
test_DIR=cur_dir.split(sep,1)[0]
scriptDIR=test_DIR+'compare_function'
sys.path.append(scriptDIR)
# import my own function for color and comparator
from mycomparator import *
from mycolor_fun import *
# the real essi hdf5 results
h5_result_new = sys.argv[1]
h5_result_ori = sys.argv[2]
disp_pass_or_fail=h5diff_disp(h5_result_ori,h5_result_new)
Gauss_pass_or_fail = 1
try:
Gauss_pass_or_fail=h5diff_Gauss_output(h5_result_ori,h5_result_new)
except KeyError:
pass
Element_Output_pass_or_fail = 1
try:
Element_Output_pass_or_fail=h5diff_Element_output(h5_result_ori,h5_result_new)
except KeyError:
pass
if disp_pass_or_fail and Gauss_pass_or_fail and Element_Output_pass_or_fail:
print headOK(), "All hdf5 results are the same."
print headOKCASE(),"-----------Done this case!-----------------"
else:
if disp_pass_or_fail==0:
print headFailed(),"-----------Displacement has mismatches!-----------------"
if Gauss_pass_or_fail==0:
print headFailed(),"-----------StressStrain has mismatches!-----------------"
if Element_Output_pass_or_fail==0:
print headFailed(),"-----------Element output has mismatches!-----------------"
# # The allowable tolerance between the ori_vals and new_vals values.
# tolerance=1e-5
# machine_epsilon=1e-16
# ori_vals=[]
# new_vals=[]
# ori_vals.append(find_max_disp(h5_result_ori,0))
# new_vals.append(find_max_disp(h5_result_new,0))
# # if multiple steps, compare the max_disp of random steps
# Nstep = find_disp_Nstep(h5_result_ori)
# if Nstep>5 :
# for i in xrange(1,4):
# test_step=random.randint(1,Nstep-1)
# ori_vals.append(find_max_disp(h5_result_ori,test_step))
# new_vals.append(find_max_disp(h5_result_new,test_step))
# # calculate the errors
# errors=[]
# for index, x in enumerate(ori_vals):
# if(abs(x))>machine_epsilon:
# errors.append(abs((new_vals[index]-x)/x))
# else:
# errors.append(machine_epsilon)
# # compare and form the flags
# flags=[]
# for item in errors:
# if abs(item)<tolerance:
# flags.append('pass')
# else:
# flags.append('failed')
# # print the results
# case_flag=1
# print headrun() , "-----------Testing results-----------------"
# print headstep() ,'{0} {1} {2} {3}'.format('back_value ','new_value ','error ','flag')
# for index, x in enumerate(errors):
# if(abs(x)<tolerance):
# print headOK() ,'{0:e} {1:e} {2:0.2f} {3}'.format(ori_vals[index],new_vals[index], x, flags[index] )
# else:
# case_flag=0
# print headFailed() ,'{0:e} {1:e} {2:0.2f} {3}'.format(ori_vals[index],new_vals[index], x, flags[index] )
# if(case_flag==1):
# print headOKCASE(),"-----------Done this case!-----------------"
# legacy backup
# automatically find the script directory.
# sys.path.append("/home/yuan/Dropbox/3essi_self_verification/test_suite/scripts" )
# script_dir=sys.argv[1]
# print headstart() , "Running test cases..."
# print headlocation(), os.path.dirname(os.path.abspath(__file__))
# file_in=open("ori_vals_values.txt","r")
# Input the 1st line, which is the ori_vals value.
# ori_vals= float(file_in.readline())
# Input the 2nd line, which is the HDF5 output filename.
# new_vals=find_max_disp(file_in.readline());
# file_in.close() | cc0-1.0 |
chen0510566/MissionPlanner | Lib/quopri.py | 72 | 7206 | #! /usr/bin/env python
"""Conversions to/from quoted-printable transport encoding as per RFC 1521."""
# (Dec 1991 version).
__all__ = ["encode", "decode", "encodestring", "decodestring"]
ESCAPE = '='
MAXLINESIZE = 76
HEX = '0123456789ABCDEF'
EMPTYSTRING = ''
try:
from binascii import a2b_qp, b2a_qp
except ImportError:
a2b_qp = None
b2a_qp = None
def needsquoting(c, quotetabs, header):
"""Decide whether a particular character needs to be quoted.
The 'quotetabs' flag indicates whether embedded tabs and spaces should be
quoted. Note that line-ending tabs and spaces are always encoded, as per
RFC 1521.
"""
if c in ' \t':
return quotetabs
# if header, we have to escape _ because _ is used to escape space
if c == '_':
return header
return c == ESCAPE or not (' ' <= c <= '~')
def quote(c):
"""Quote a single character."""
i = ord(c)
return ESCAPE + HEX[i//16] + HEX[i%16]
def encode(input, output, quotetabs, header = 0):
"""Read 'input', apply quoted-printable encoding, and write to 'output'.
'input' and 'output' are files with readline() and write() methods.
The 'quotetabs' flag indicates whether embedded tabs and spaces should be
quoted. Note that line-ending tabs and spaces are always encoded, as per
RFC 1521.
The 'header' flag indicates whether we are encoding spaces as _ as per
RFC 1522.
"""
if b2a_qp is not None:
data = input.read()
odata = b2a_qp(data, quotetabs = quotetabs, header = header)
output.write(odata)
return
def write(s, output=output, lineEnd='\n'):
# RFC 1521 requires that the line ending in a space or tab must have
# that trailing character encoded.
if s and s[-1:] in ' \t':
output.write(s[:-1] + quote(s[-1]) + lineEnd)
elif s == '.':
output.write(quote(s) + lineEnd)
else:
output.write(s + lineEnd)
prevline = None
while 1:
line = input.readline()
if not line:
break
outline = []
# Strip off any readline induced trailing newline
stripped = ''
if line[-1:] == '\n':
line = line[:-1]
stripped = '\n'
# Calculate the un-length-limited encoded line
for c in line:
if needsquoting(c, quotetabs, header):
c = quote(c)
if header and c == ' ':
outline.append('_')
else:
outline.append(c)
# First, write out the previous line
if prevline is not None:
write(prevline)
# Now see if we need any soft line breaks because of RFC-imposed
# length limitations. Then do the thisline->prevline dance.
thisline = EMPTYSTRING.join(outline)
while len(thisline) > MAXLINESIZE:
# Don't forget to include the soft line break `=' sign in the
# length calculation!
write(thisline[:MAXLINESIZE-1], lineEnd='=\n')
thisline = thisline[MAXLINESIZE-1:]
# Write out the current line
prevline = thisline
# Write out the last line, without a trailing newline
if prevline is not None:
write(prevline, lineEnd=stripped)
def encodestring(s, quotetabs = 0, header = 0):
if b2a_qp is not None:
return b2a_qp(s, quotetabs = quotetabs, header = header)
from cStringIO import StringIO
infp = StringIO(s)
outfp = StringIO()
encode(infp, outfp, quotetabs, header)
return outfp.getvalue()
def decode(input, output, header = 0):
"""Read 'input', apply quoted-printable decoding, and write to 'output'.
'input' and 'output' are files with readline() and write() methods.
If 'header' is true, decode underscore as space (per RFC 1522)."""
if a2b_qp is not None:
data = input.read()
odata = a2b_qp(data, header = header)
output.write(odata)
return
new = ''
while 1:
line = input.readline()
if not line: break
i, n = 0, len(line)
if n > 0 and line[n-1] == '\n':
partial = 0; n = n-1
# Strip trailing whitespace
while n > 0 and line[n-1] in " \t\r":
n = n-1
else:
partial = 1
while i < n:
c = line[i]
if c == '_' and header:
new = new + ' '; i = i+1
elif c != ESCAPE:
new = new + c; i = i+1
elif i+1 == n and not partial:
partial = 1; break
elif i+1 < n and line[i+1] == ESCAPE:
new = new + ESCAPE; i = i+2
elif i+2 < n and ishex(line[i+1]) and ishex(line[i+2]):
new = new + chr(unhex(line[i+1:i+3])); i = i+3
else: # Bad escape sequence -- leave it in
new = new + c; i = i+1
if not partial:
output.write(new + '\n')
new = ''
if new:
output.write(new)
def decodestring(s, header = 0):
if a2b_qp is not None:
return a2b_qp(s, header = header)
from cStringIO import StringIO
infp = StringIO(s)
outfp = StringIO()
decode(infp, outfp, header = header)
return outfp.getvalue()
# Other helper functions
def ishex(c):
"""Return true if the character 'c' is a hexadecimal digit."""
return '0' <= c <= '9' or 'a' <= c <= 'f' or 'A' <= c <= 'F'
def unhex(s):
"""Get the integer value of a hexadecimal number."""
bits = 0
for c in s:
if '0' <= c <= '9':
i = ord('0')
elif 'a' <= c <= 'f':
i = ord('a')-10
elif 'A' <= c <= 'F':
i = ord('A')-10
else:
break
bits = bits*16 + (ord(c) - i)
return bits
def main():
import sys
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:], 'td')
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print "usage: quopri [-t | -d] [file] ..."
print "-t: quote tabs"
print "-d: decode; default encode"
sys.exit(2)
deco = 0
tabs = 0
for o, a in opts:
if o == '-t': tabs = 1
if o == '-d': deco = 1
if tabs and deco:
sys.stdout = sys.stderr
print "-t and -d are mutually exclusive"
sys.exit(2)
if not args: args = ['-']
sts = 0
for file in args:
if file == '-':
fp = sys.stdin
else:
try:
fp = open(file)
except IOError, msg:
sys.stderr.write("%s: can't open (%s)\n" % (file, msg))
sts = 1
continue
if deco:
decode(fp, sys.stdout)
else:
encode(fp, sys.stdout, tabs)
if fp is not sys.stdin:
fp.close()
if sts:
sys.exit(sts)
if __name__ == '__main__':
main()
| gpl-3.0 |
40223148/finaltest | static/Brython3.1.0-20150301-090019/Lib/unittest/test/support.py | 770 | 3379 | import unittest
class TestEquality(object):
"""Used as a mixin for TestCase"""
# Check for a valid __eq__ implementation
def test_eq(self):
for obj_1, obj_2 in self.eq_pairs:
self.assertEqual(obj_1, obj_2)
self.assertEqual(obj_2, obj_1)
# Check for a valid __ne__ implementation
def test_ne(self):
for obj_1, obj_2 in self.ne_pairs:
self.assertNotEqual(obj_1, obj_2)
self.assertNotEqual(obj_2, obj_1)
class TestHashing(object):
"""Used as a mixin for TestCase"""
# Check for a valid __hash__ implementation
def test_hash(self):
for obj_1, obj_2 in self.eq_pairs:
try:
if not hash(obj_1) == hash(obj_2):
self.fail("%r and %r do not hash equal" % (obj_1, obj_2))
except KeyboardInterrupt:
raise
except Exception as e:
self.fail("Problem hashing %r and %r: %s" % (obj_1, obj_2, e))
for obj_1, obj_2 in self.ne_pairs:
try:
if hash(obj_1) == hash(obj_2):
self.fail("%s and %s hash equal, but shouldn't" %
(obj_1, obj_2))
except KeyboardInterrupt:
raise
except Exception as e:
self.fail("Problem hashing %s and %s: %s" % (obj_1, obj_2, e))
class LoggingResult(unittest.TestResult):
def __init__(self, log):
self._events = log
super().__init__()
def startTest(self, test):
self._events.append('startTest')
super().startTest(test)
def startTestRun(self):
self._events.append('startTestRun')
super(LoggingResult, self).startTestRun()
def stopTest(self, test):
self._events.append('stopTest')
super().stopTest(test)
def stopTestRun(self):
self._events.append('stopTestRun')
super(LoggingResult, self).stopTestRun()
def addFailure(self, *args):
self._events.append('addFailure')
super().addFailure(*args)
def addSuccess(self, *args):
self._events.append('addSuccess')
super(LoggingResult, self).addSuccess(*args)
def addError(self, *args):
self._events.append('addError')
super().addError(*args)
def addSkip(self, *args):
self._events.append('addSkip')
super(LoggingResult, self).addSkip(*args)
def addExpectedFailure(self, *args):
self._events.append('addExpectedFailure')
super(LoggingResult, self).addExpectedFailure(*args)
def addUnexpectedSuccess(self, *args):
self._events.append('addUnexpectedSuccess')
super(LoggingResult, self).addUnexpectedSuccess(*args)
class ResultWithNoStartTestRunStopTestRun(object):
"""An object honouring TestResult before startTestRun/stopTestRun."""
def __init__(self):
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
def startTest(self, test):
pass
def stopTest(self, test):
pass
def addError(self, test):
pass
def addFailure(self, test):
pass
def addSuccess(self, test):
pass
def wasSuccessful(self):
return True
| gpl-3.0 |
asimshankar/tensorflow | tensorflow/python/keras/optimizers_test.py | 1 | 10342 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras optimizers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gc
import os
import weakref
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python import tf2
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
from tensorflow.python.training.adam import AdamOptimizer
def _get_model(input_dim, num_hidden, output_dim):
model = keras.models.Sequential()
model.add(keras.layers.Dense(num_hidden,
activation='relu',
input_shape=(input_dim,)))
model.add(keras.layers.Dense(output_dim, activation='softmax'))
return model
def _test_optimizer(optimizer, target=0.75):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(train_samples=1000,
test_samples=200,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
model = _get_model(x_train.shape[1], 20, y_train.shape[1])
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
np.testing.assert_equal(keras.backend.get_value(model.optimizer.iterations),
0)
history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
np.testing.assert_equal(keras.backend.get_value(model.optimizer.iterations),
126) # 63 steps per epoch
assert history.history['acc'][-1] >= target
config = keras.optimizers.serialize(optimizer)
optim = keras.optimizers.deserialize(config)
new_config = keras.optimizers.serialize(optim)
new_config['class_name'] = new_config['class_name'].lower()
new_config['config'].pop('name', None)
if 'amsgrad' not in config['config']:
new_config['config'].pop('amsgrad', None)
if 'decay' in new_config['config'] and 'schedule_decay' in config['config']:
new_config['config']['schedule_decay'] = new_config['config'].pop('decay')
if 'momentum' not in config['config']:
new_config['config'].pop('momentum', None)
if 'centered' not in config['config']:
new_config['config'].pop('centered', None)
assert config == new_config
# Test constraints.
model = keras.models.Sequential()
dense = keras.layers.Dense(10,
input_shape=(x_train.shape[1],),
kernel_constraint=lambda x: 0. * x + 1.,
bias_constraint=lambda x: 0. * x + 2.,
activation='relu')
model.add(dense)
model.add(keras.layers.Dense(y_train.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
np.testing.assert_equal(keras.backend.get_value(model.optimizer.iterations),
126) # Using same optimizer from before
model.train_on_batch(x_train[:10], y_train[:10])
np.testing.assert_equal(keras.backend.get_value(model.optimizer.iterations),
127)
kernel, bias = dense.get_weights()
np.testing.assert_allclose(kernel, 1., atol=1e-3)
np.testing.assert_allclose(bias, 2., atol=1e-3)
class KerasOptimizersTest(test.TestCase):
def test_sgd(self):
with self.cached_session():
_test_optimizer(keras.optimizers.SGD(lr=0.01,
momentum=0.9,
nesterov=True))
def test_rmsprop(self):
with self.cached_session():
_test_optimizer(keras.optimizers.RMSprop())
_test_optimizer(keras.optimizers.RMSprop(decay=1e-3))
def test_adagrad(self):
with self.cached_session():
_test_optimizer(keras.optimizers.Adagrad())
_test_optimizer(keras.optimizers.Adagrad(decay=1e-3))
def test_adadelta(self):
with self.cached_session():
_test_optimizer(keras.optimizers.Adadelta(), target=0.6)
# Accuracy seems dependent on the initialization. Even adding tf.Print
# nodes in the graph seemed to affect the initialization seed, and hence
# the accuracy.
_test_optimizer(keras.optimizers.Adadelta(decay=1e-3), target=0.4)
def test_adam(self):
with self.cached_session():
_test_optimizer(keras.optimizers.Adam())
_test_optimizer(keras.optimizers.Adam(decay=1e-3))
_test_optimizer(keras.optimizers.Adam(amsgrad=True))
def test_adamax(self):
with self.cached_session():
_test_optimizer(keras.optimizers.Adamax())
_test_optimizer(keras.optimizers.Adamax(decay=1e-3))
def test_nadam(self):
with self.cached_session():
_test_optimizer(keras.optimizers.Nadam())
def test_clipnorm(self):
with self.cached_session():
_test_optimizer(keras.optimizers.SGD(lr=0.01,
momentum=0.9,
clipnorm=0.5))
def test_clipvalue(self):
with self.cached_session():
_test_optimizer(keras.optimizers.SGD(lr=0.01,
momentum=0.9,
clipvalue=0.5))
def test_tfoptimizer(self):
optimizer = keras.optimizers.TFOptimizer(AdamOptimizer(0.01))
model = keras.models.Sequential()
model.add(keras.layers.Dense(
2, input_shape=(3,), kernel_constraint=keras.constraints.MaxNorm(1)))
# This is possible
model.compile(loss='mean_squared_error', optimizer=optimizer)
keras.backend.track_tf_optimizer(optimizer)
model.fit(np.random.random((5, 3)),
np.random.random((5, 2)),
epochs=1,
batch_size=5,
verbose=0)
# not supported
with self.assertRaises(NotImplementedError):
_ = optimizer.weights
with self.assertRaises(NotImplementedError):
optimizer.get_config()
with self.assertRaises(NotImplementedError):
optimizer.from_config(None)
def test_optimizer_garbage_collection(self):
graph = ops.Graph()
with graph.as_default():
optimizer = keras.optimizers.TFOptimizer(AdamOptimizer(0.01))
keras.backend.track_tf_optimizer(optimizer)
optimizer_weak = weakref.ref(optimizer)
graph_weak = weakref.ref(graph)
del graph, optimizer
gc.collect()
# Check that the weak references are dead now.
self.assertIs(graph_weak(), None)
self.assertIs(optimizer_weak(), None)
@test_util.run_in_graph_and_eager_modes
def test_tfoptimizer_iterations(self):
with self.cached_session():
optimizer = keras.optimizers.TFOptimizer(AdamOptimizer(0.01))
model = keras.models.Sequential()
model.add(keras.layers.Dense(
2, input_shape=(3,), kernel_constraint=keras.constraints.MaxNorm(1)))
model.compile(loss='mean_squared_error', optimizer=optimizer)
keras.backend.track_tf_optimizer(optimizer)
self.assertEqual(keras.backend.get_value(model.optimizer.iterations), 0)
model.fit(np.random.random((55, 3)),
np.random.random((55, 2)),
epochs=1,
batch_size=5,
verbose=0)
self.assertEqual(keras.backend.get_value(model.optimizer.iterations), 11)
if not context.executing_eagerly():
# TODO(kathywu): investigate why training with an array input and
# setting the argument steps_per_epoch does not work in eager mode.
model.fit(np.random.random((20, 3)),
np.random.random((20, 2)),
steps_per_epoch=8,
verbose=0)
self.assertEqual(
keras.backend.get_value(model.optimizer.iterations), 19)
def test_negative_clipvalue_or_clipnorm(self):
with self.assertRaises(ValueError):
_ = keras.optimizers.SGD(lr=0.01, clipvalue=-0.5)
with self.assertRaises(ValueError):
_ = keras.optimizers.Adam(clipnorm=-2.0)
@test_util.run_all_in_graph_and_eager_modes
class KerasV2OptimizersTest(test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
('adadelta_tf2', 'adadelta', True), ('adadelta_tf1', 'adadelta', False),
('adagrad_tf2', 'adagrad', True), ('adagrad_tf1', 'adagrad', False),
('adam_tf2', 'adam', True), ('adam_tf1', 'adam', False),
('adamax_tf2', 'adamax', True), ('adamax_tf1', 'adamax', False),
('sgd_tf2', 'sgd', True), ('sgd_tf1', 'sgd', False),
('nadam_tf2', 'nadam', True), ('nadam_tf1', 'nadam', False),
('rmsprop_tf2', 'rmsprop', True), ('rmsprop_tf1', 'rmsprop', False))
def test_load_from_string(self, optimizer_string, tf2mode):
old_mode = os.environ.get('TF2_BEHAVIOR', None)
if tf2mode:
os.environ['TF2_BEHAVIOR'] = 'enabled'
else:
if 'TF2_BEHAVIOR' in os.environ:
del os.environ['TF2_BEHAVIOR']
# Sanity check.
self.assertEqual(tf2.enabled(), tf2mode)
model = keras.models.Sequential()
model.add(keras.layers.Dense(1, input_shape=(10,)))
model.compile(optimizer_string, 'binary_crossentropy')
self.assertEqual(optimizer_string,
model.optimizer.__class__.__name__.lower())
model.fit(np.ones((10, 10), 'float32'), np.ones((10, 1), 'float32'))
if old_mode is not None:
os.environ['TF2_BEHAVIOR'] = old_mode
if __name__ == '__main__':
test.main()
| apache-2.0 |
jheld/pylint | test/input/func_method_could_be_function.py | 3 | 1333 | # pylint: disable=R0903,R0922,W0232
"""test detection of method which could be a function"""
__revision__ = None
class Toto(object):
"""bla bal abl"""
def __init__(self):
self.aaa = 2
def regular_method(self):
"""this method is a real method since it access to self"""
self.function_method()
def function_method(self):
"""this method isn' a real method since it doesn't need self"""
print 'hello'
class Base:
"""an abstract class"""
def __init__(self):
self.aaa = 2
def check(self, arg):
"""an abstract method, could not be a function"""
raise NotImplementedError
class Sub(Base):
"""a concret class"""
def check(self, arg):
"""a concret method, could not be a function since it need
polymorphism benefits
"""
return arg == 0
class Super:
"""same as before without abstract"""
x = 1
def method(self):
"""regular"""
print self.x
class Sub1(Super):
"""override method with need for self"""
def method(self):
"""no i can not be a function"""
print 42
def __len__(self):
"""no i can not be a function"""
print 42
def __cmp__(self, other):
"""no i can not be a function"""
print 42
| gpl-2.0 |
kawamon/hue | desktop/core/src/desktop/lib/python_util.py | 2 | 7214 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import object
from six import string_types
import datetime
import logging
import select
import socket
import sys
from desktop import conf
from desktop.lib.i18n import smart_str
from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE
if sys.version_info[0] > 2:
from django.utils.translation import gettext as _
else:
from django.utils.translation import ugettext as _
BOMS = (
(BOM_UTF8, "UTF-8"),
(BOM_UTF32_BE, "UTF-32-BE"),
(BOM_UTF32_LE, "UTF-32-LE"),
(BOM_UTF16_BE, "UTF-16-BE"),
(BOM_UTF16_LE, "UTF-16-LE"),
)
__all__ = ['CaseInsensitiveDict', 'create_synchronous_io_multiplexer']
LOG = logging.getLogger(__name__)
class CaseInsensitiveDict(dict):
def __setitem__(self, key, value):
super(CaseInsensitiveDict, self).__setitem__(key.lower(), value)
def __getitem__(self, key):
return super(CaseInsensitiveDict, self).__getitem__(key.lower())
def __contains__(self, key):
return super(CaseInsensitiveDict, self).__contains__(key.lower())
@classmethod
def from_dict(cls, _dict):
return CaseInsensitiveDict([(isinstance(key, string_types) and key.lower() or key, _dict[key]) for key in _dict])
class SynchronousIOMultiplexer(object):
def read(self, rd):
raise NotImplementedError(_('"read" method is not implemented'))
def write(self, rd):
raise NotImplementedError(_('"write" method is not implemented'))
def error(self, rd):
raise NotImplementedError(_('"error" method is not implemented'))
class SelectSynchronousIOMultiplexer(SynchronousIOMultiplexer):
def __init__(self, timeout=0):
self.timeout = 0
def read(self, fds):
rlist, wlist, xlist = select.select(fds, [], [], self.timeout)
return rlist
class PollSynchronousIOMultiplexer(SynchronousIOMultiplexer):
def __init__(self, timeout=0):
self.timeout = 0
def read(self, fds):
poll_obj = select.poll()
for fd in fds:
poll_obj.register(fd, select.POLLIN)
event_list = poll_obj.poll(self.timeout)
return [fd_event_tuple[0] for fd_event_tuple in event_list]
def create_synchronous_io_multiplexer(timeout=0):
if conf.POLL_ENABLED.get():
try:
from select import poll
return PollSynchronousIOMultiplexer(timeout)
except ImportError:
pass
return SelectSynchronousIOMultiplexer(timeout)
def find_unused_port():
"""
Finds a port that's available.
Unfortunately, this port may not be available by the time
the subprocess uses it, but this generally works.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(('127.0.0.1', 0))
sock.listen(socket.SOMAXCONN)
_, port = sock.getsockname()
finally:
sock.close()
return port
def force_list_to_strings(lst):
if not lst:
return lst
new_list = []
for item in lst:
if isinstance(item, string_types):
# Strings should not be unicode.
new_list.append(smart_str(item))
elif isinstance(item, dict):
# Recursively force dicts to strings.
new_list.append(force_dict_to_strings(item))
elif isinstance(item, list):
new_list.append(force_list_to_strings(item))
else:
# Normal objects, or other literals, should not be converted.
new_list.append(item)
return new_list
def force_dict_to_strings(dictionary):
if not dictionary:
return dictionary
new_dict = {}
for k in dictionary:
new_key = smart_str(k)
if isinstance(dictionary[k], string_types):
# Strings should not be unicode.
new_dict[new_key] = smart_str(dictionary[k])
elif isinstance(dictionary[k], dict):
# Recursively force dicts to strings.
new_dict[new_key] = force_dict_to_strings(dictionary[k])
elif isinstance(dictionary[k], list):
new_dict[new_key] = force_list_to_strings(dictionary[k])
else:
# Normal objects, or other literals, should not be converted.
new_dict[new_key] = dictionary[k]
return new_dict
def from_string_to_bits(str_value):
return ''.join(format(ord(byte), '08b') for byte in str_value)
def get_bytes_from_bits(bit_string):
"""
This should be used in py3 or above
"""
padded_bits = bit_string + '0' * (8 - len(bit_string) % 8)
return list(int(padded_bits, 2).to_bytes(len(padded_bits) // 8, 'big'))
def isASCII(data):
try:
data.decode('ASCII')
except UnicodeDecodeError:
return False
else:
return True
def isUTF8(data):
try:
data.decode('UTF-8')
except UnicodeDecodeError:
return False
else:
return True
def isGB2312(data):
try:
data.decode('gb2312')
except UnicodeDecodeError:
return False
else:
return True
def is_big5(data):
try:
data.decode('big5')
except UnicodeDecodeError:
return False
else:
return True
def is_shift_jis(data):
try:
data.decode('shift-jis')
except UnicodeDecodeError:
return False
else:
return True
def isEUCKR(data):
try:
data.decode('EUC-KR')
except UnicodeDecodeError:
return False
else:
return True
def isISO8859_1(data):
try:
data.decode('iso-8859-1')
except UnicodeDecodeError:
return False
else:
return True
def isCP1252(data):
try:
data.decode('cp1252')
except UnicodeDecodeError:
return False
else:
return True
def isUTF8Strict(data):
try:
decoded = data.decode('UTF-8')
except UnicodeDecodeError:
return False
else:
for ch in decoded:
if 0xD800 <= ord(ch) <= 0xDFFF:
return False
return True
def check_bom(data):
return [encoding for bom, encoding in BOMS if data.startswith(bom)]
def check_encoding(data):
"""
this is a simplified alternative to GPL chardet
"""
LOG.debug("checking data encoding: %s" % data)
if isASCII(data):
return 'ASCII'
elif sys.version_info[0] == 2 and isUTF8(data):
return 'utf-8'
elif sys.version_info[0] > 2 and isUTF8Strict(data):
return 'utf-8'
else:
encoding = check_bom(data)
if encoding:
return encoding[0]
elif isEUCKR(data):
return 'EUC-KR'
elif isGB2312(data):
return 'gb2312'
elif is_big5(data):
return 'big5'
elif is_shift_jis(data):
return 'shift-jis'
elif isISO8859_1(data):
return 'iso-8859-1'
else:
return 'cp1252'
def current_ms_from_utc():
return (datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0)).total_seconds() * 1000
| apache-2.0 |
stephen144/odoo | addons/auth_oauth/res_users.py | 21 | 5273 | import logging
import werkzeug.urls
import urlparse
import urllib2
import json
import openerp
from openerp.addons.auth_signup.res_users import SignupError
from openerp.osv import osv, fields
from openerp import SUPERUSER_ID
_logger = logging.getLogger(__name__)
class res_users(osv.Model):
_inherit = 'res.users'
_columns = {
'oauth_provider_id': fields.many2one('auth.oauth.provider', 'OAuth Provider'),
'oauth_uid': fields.char('OAuth User ID', help="Oauth Provider user_id", copy=False),
'oauth_access_token': fields.char('OAuth Access Token', readonly=True, copy=False),
}
_sql_constraints = [
('uniq_users_oauth_provider_oauth_uid', 'unique(oauth_provider_id, oauth_uid)', 'OAuth UID must be unique per provider'),
]
def _auth_oauth_rpc(self, cr, uid, endpoint, access_token, context=None):
params = werkzeug.url_encode({'access_token': access_token})
if urlparse.urlparse(endpoint)[4]:
url = endpoint + '&' + params
else:
url = endpoint + '?' + params
f = urllib2.urlopen(url)
response = f.read()
return json.loads(response)
def _auth_oauth_validate(self, cr, uid, provider, access_token, context=None):
""" return the validation data corresponding to the access token """
p = self.pool.get('auth.oauth.provider').browse(cr, uid, provider, context=context)
validation = self._auth_oauth_rpc(cr, uid, p.validation_endpoint, access_token, context=context)
if validation.get("error"):
raise Exception(validation['error'])
if p.data_endpoint:
data = self._auth_oauth_rpc(cr, uid, p.data_endpoint, access_token, context=context)
validation.update(data)
return validation
def _generate_signup_values(self, cr, uid, provider, validation, params, context=None):
oauth_uid = validation['user_id']
email = validation.get('email', 'provider_%s_user_%s' % (provider, oauth_uid))
name = validation.get('name', email)
return {
'name': name,
'login': email,
'email': email,
'oauth_provider_id': provider,
'oauth_uid': oauth_uid,
'oauth_access_token': params['access_token'],
'active': True,
}
def _auth_oauth_signin(self, cr, uid, provider, validation, params, context=None):
""" retrieve and sign in the user corresponding to provider and validated access token
:param provider: oauth provider id (int)
:param validation: result of validation of access token (dict)
:param params: oauth parameters (dict)
:return: user login (str)
:raise: openerp.exceptions.AccessDenied if signin failed
This method can be overridden to add alternative signin methods.
"""
try:
oauth_uid = validation['user_id']
user_ids = self.search(cr, uid, [("oauth_uid", "=", oauth_uid), ('oauth_provider_id', '=', provider)])
if not user_ids:
raise openerp.exceptions.AccessDenied()
assert len(user_ids) == 1
user = self.browse(cr, uid, user_ids[0], context=context)
user.write({'oauth_access_token': params['access_token']})
return user.login
except openerp.exceptions.AccessDenied, access_denied_exception:
if context and context.get('no_user_creation'):
return None
state = json.loads(params['state'])
token = state.get('t')
values = self._generate_signup_values(cr, uid, provider, validation, params, context=context)
try:
_, login, _ = self.signup(cr, uid, values, token, context=context)
return login
except SignupError:
raise access_denied_exception
def auth_oauth(self, cr, uid, provider, params, context=None):
# Advice by Google (to avoid Confused Deputy Problem)
# if validation.audience != OUR_CLIENT_ID:
# abort()
# else:
# continue with the process
access_token = params.get('access_token')
validation = self._auth_oauth_validate(cr, uid, provider, access_token, context=context)
# required check
if not validation.get('user_id'):
# Workaround: facebook does not send 'user_id' in Open Graph Api
if validation.get('id'):
validation['user_id'] = validation['id']
else:
raise openerp.exceptions.AccessDenied()
# retrieve and sign in user
login = self._auth_oauth_signin(cr, uid, provider, validation, params, context=context)
if not login:
raise openerp.exceptions.AccessDenied()
# return user credentials
return (cr.dbname, login, access_token)
def check_credentials(self, cr, uid, password):
try:
return super(res_users, self).check_credentials(cr, uid, password)
except openerp.exceptions.AccessDenied:
res = self.search(cr, SUPERUSER_ID, [('id', '=', uid), ('oauth_access_token', '=', password)])
if not res:
raise
#
| agpl-3.0 |
heytcass/homeassistant-config | deps/google/protobuf/internal/any_test_pb2.py | 7 | 2672 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/any_test.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/any_test.proto',
package='google.protobuf.internal',
syntax='proto3',
serialized_pb=_b('\n\'google/protobuf/internal/any_test.proto\x12\x18google.protobuf.internal\x1a\x19google/protobuf/any.proto\"A\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tint_value\x18\x02 \x01(\x05\x62\x06proto3')
,
dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_TESTANY = _descriptor.Descriptor(
name='TestAny',
full_name='google.protobuf.internal.TestAny',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.internal.TestAny.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='int_value', full_name='google.protobuf.internal.TestAny.int_value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=96,
serialized_end=161,
)
_TESTANY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY
TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), dict(
DESCRIPTOR = _TESTANY,
__module__ = 'google.protobuf.internal.any_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestAny)
))
_sym_db.RegisterMessage(TestAny)
# @@protoc_insertion_point(module_scope)
| mit |
siavooshpayandehazad/SoCDep2 | src/main/python/Scheduler/Scheduling_Functions.py | 2 | 1355 | # Copyright (C) 2015 Siavoosh Payandeh Azad
def find_schedule_make_span(ag):
"""
Calculates the makespan of the Scheduling
:param ag: Architecture Graph
:return: MakeSpan of the Scheduling
"""
make_span = 0
for Node in ag.nodes():
for Task in ag.node[Node]['PE'].mapped_tasks:
make_span = max(ag.node[Node]['PE'].scheduling[Task][1], make_span)
return make_span
################################################################
def clear_scheduling(ag):
"""
Clears scheduling from PEs, Routers and Links
:param ag: Architecture Graph
:return: None
"""
for node in ag.nodes():
ag.node[node]['PE'].scheduling = {}
ag.node[node]['Router'].scheduling = {}
for link in ag.edges():
ag.edges[link]['Scheduling'] = {}
return None
def check_if_all_deadlines_are_met(tg, ag):
for node in ag:
for task in ag.node[node]['PE'].scheduling.keys():
if tg.node[task]['task'].criticality == 'H':
scheduling_time = ag.node[node]['PE'].scheduling[task]
# print(task, scheduling_time)
if tg.node[task]['task'].deadline < scheduling_time[1]:
# print(tg.node[task]['task'].deadline, scheduling_time[1])
return False
# print("-"*35)
return True | gpl-2.0 |
zcbenz/cefode-chromium | chrome/browser/nacl_host/test/debug_stub_browser_tests.py | 97 | 3312 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import sys
import xml.etree.ElementTree
import gdb_rsp
def GetTargetArch(connection):
"""Get the CPU architecture of the NaCl application."""
reply = connection.RspRequest('qXfer:features:read:target.xml:0,fff')
assert reply[0] == 'l', reply
tree = xml.etree.ElementTree.fromstring(reply[1:])
arch_tag = tree.find('architecture')
assert arch_tag is not None, reply
return arch_tag.text.strip()
def ReverseBytes(byte_string):
"""Reverse bytes in the hex string: '09ab' -> 'ab09'. This converts
little-endian number in the hex string to its normal string representation.
"""
assert len(byte_string) % 2 == 0, byte_string
return ''.join([byte_string[i - 2 : i]
for i in xrange(len(byte_string), 0, -2)])
def GetProgCtrString(connection, arch):
"""Get current execution point."""
registers = connection.RspRequest('g')
# PC register indices can be found in
# native_client/src/trusted/debug_stub/abi.cc in AbiInit function.
if arch == 'i386':
# eip index is 8
return ReverseBytes(registers[8 * 8 : 8 * 8 + 8])
if arch == 'i386:x86-64':
# rip index is 16
return ReverseBytes(registers[16 * 16 : 16 * 16 + 8])
if arch == 'iwmmxt':
# pc index is 15
return ReverseBytes(registers[15 * 8 : 15 * 8 + 8])
raise AssertionError('Unknown architecture: %s' % arch)
def TestContinue(connection):
result = connection.RspRequest('vCont;c')
# Once the NaCl test module reports that the test passed, the NaCl <embed>
# element is removed from the page and so the NaCl module is killed by
# the browser what is reported as exit due to SIGKILL (X09).
assert result == 'X09', result
def TestBreakpoint(connection):
# Breakpoints and single-stepping might interfere with Chrome sandbox. So we
# check that they work properly in this test.
arch = GetTargetArch(connection)
registers = connection.RspRequest('g')
pc = GetProgCtrString(connection, arch)
# Set breakpoint
result = connection.RspRequest('Z0,%s,1' % pc)
assert result == 'OK', result
# Check that we stopped at breakpoint
result = connection.RspRequest('vCont;c')
stop_reply = re.compile(r'T05thread:(\d+);')
assert stop_reply.match(result), result
thread = stop_reply.match(result).group(1)
# Check that registers haven't changed
result = connection.RspRequest('g')
assert result == registers, (result, registers)
# Remove breakpoint
result = connection.RspRequest('z0,%s,1' % pc)
assert result == 'OK', result
# Check single stepping
result = connection.RspRequest('vCont;s:%s' % thread)
assert result == 'T05thread:%s;' % thread, result
assert pc != GetProgCtrString(connection, arch)
# Check that we terminate normally
result = connection.RspRequest('vCont;c')
assert result == 'X09', result
def Main(args):
port = int(args[0])
name = args[1]
connection = gdb_rsp.GdbRspConnection(('localhost', port))
if name == 'continue':
TestContinue(connection)
elif name == 'breakpoint':
TestBreakpoint(connection)
else:
raise AssertionError('Unknown test name: %r' % name)
if __name__ == '__main__':
Main(sys.argv[1:])
| bsd-3-clause |
misli/python3-openid | examples/server.py | 2 | 25007 | #!/usr/bin/env python
__copyright__ = 'Copyright 2005-2008, Janrain, Inc.'
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlparse
import time
import http.cookies
import cgi
import cgitb
import sys
def quoteattr(s):
qs = cgi.escape(s, 1)
return '"%s"' % (qs,)
try:
import openid
except ImportError:
sys.stderr.write("""
Failed to import the OpenID library. In order to use this example, you
must either install the library (see INSTALL in the root of the
distribution) or else add the library to python's import path (the
PYTHONPATH environment variable).
For more information, see the README in the root of the library
distribution.""")
sys.exit(1)
from openid.extensions import sreg
from openid.server import server
from openid.store.filestore import FileOpenIDStore
from openid.consumer import discover
class OpenIDHTTPServer(HTTPServer):
"""
http server that contains a reference to an OpenID Server and
knows its base URL.
"""
def __init__(self, *args, **kwargs):
HTTPServer.__init__(self, *args, **kwargs)
if self.server_port != 80:
self.base_url = ('http://%s:%s/' %
(self.server_name, self.server_port))
else:
self.base_url = 'http://%s/' % (self.server_name,)
self.openid = None
self.approved = {}
self.lastCheckIDRequest = {}
def setOpenIDServer(self, oidserver):
self.openid = oidserver
class ServerHandler(BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
self.user = None
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def do_GET(self):
try:
self.parsed_uri = urlparse(self.path)
self.query = {}
for k, v in cgi.parse_qsl(self.parsed_uri[4]):
self.query[k] = v
self.setUser()
path = self.parsed_uri[2].lower()
if path == '/':
self.showMainPage()
elif path == '/openidserver':
self.serverEndPoint(self.query)
elif path == '/login':
self.showLoginPage('/', '/')
elif path == '/loginsubmit':
self.doLogin()
elif path.startswith('/id/'):
self.showIdPage(path)
elif path.startswith('/yadis/'):
self.showYadis(path[7:])
elif path == '/serveryadis':
self.showServerYadis()
else:
self.send_response(404)
self.end_headers()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.send_response(500)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(cgitb.html(sys.exc_info(), context=10), 'utf-8'))
def do_POST(self):
try:
self.parsed_uri = urlparse(self.path)
self.setUser()
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
self.query = {}
for k, v in cgi.parse_qsl(post_data):
self.query[k] = v
path = self.parsed_uri[2]
if path == '/openidserver':
self.serverEndPoint(self.query)
elif path == '/allow':
self.handleAllow(self.query)
else:
self.send_response(404)
self.end_headers()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.send_response(500)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(cgitb.html(sys.exc_info(), context=10), 'utf-8'))
def handleAllow(self, query):
# pretend this next bit is keying off the user's session or something,
# right?
request = self.server.lastCheckIDRequest.get(self.user)
query = self.binaryToUTF8(query)
if 'yes' in query:
if 'login_as' in query:
self.user = query['login_as']
if request.idSelect():
identity = self.server.base_url + 'id/' + query['identifier']
else:
identity = request.identity
trust_root = request.trust_root
if query.get('remember', 'no') == 'yes':
self.server.approved[(identity, trust_root)] = 'always'
response = self.approved(request, identity)
elif 'no' in query:
response = request.answer(False)
else:
assert False, 'strange allow post. %r' % (query,)
self.displayResponse(response)
def setUser(self):
cookies = self.headers.get('Cookie')
if cookies:
morsel = http.cookies.BaseCookie(cookies).get('user')
if morsel:
self.user = morsel.value
def isAuthorized(self, identity_url, trust_root):
if self.user is None:
return False
if identity_url != self.server.base_url + 'id/' + self.user:
return False
key = (identity_url, trust_root)
return self.server.approved.get(key) is not None
def serverEndPoint(self, query):
try:
query = self.binaryToUTF8(query)
request = self.server.openid.decodeRequest(query)
except server.ProtocolError as why:
self.displayResponse(why)
return
if request is None:
# Display text indicating that this is an endpoint.
self.showAboutPage()
return
if request.mode in ["checkid_immediate", "checkid_setup"]:
self.handleCheckIDRequest(request)
else:
response = self.server.openid.handleRequest(request)
self.displayResponse(response)
def addSRegResponse(self, request, response):
sreg_req = sreg.SRegRequest.fromOpenIDRequest(request)
# In a real application, this data would be user-specific,
# and the user should be asked for permission to release
# it.
sreg_data = {
'nickname':self.user
}
sreg_resp = sreg.SRegResponse.extractResponse(sreg_req, sreg_data)
response.addExtension(sreg_resp)
def approved(self, request, identifier=None):
response = request.answer(True, identity=identifier)
self.addSRegResponse(request, response)
return response
def handleCheckIDRequest(self, request):
is_authorized = self.isAuthorized(request.identity, request.trust_root)
if is_authorized:
response = self.approved(request)
self.displayResponse(response)
elif request.immediate:
response = request.answer(False)
self.displayResponse(response)
else:
self.server.lastCheckIDRequest[self.user] = request
self.showDecidePage(request)
def displayResponse(self, response):
try:
webresponse = self.server.openid.encodeResponse(response)
except server.EncodingError as why:
text = why.response.encodeToKVForm()
self.showErrorPage('<pre>%s</pre>' % cgi.escape(text))
return
self.send_response(webresponse.code)
for header, value in webresponse.headers.items():
self.send_header(header, value)
self.writeUserHeader()
self.end_headers()
if webresponse.body:
self.wfile.write(bytes(webresponse.body, 'utf-8'))
def doLogin(self):
if 'submit' in self.query:
if 'user' in self.query:
self.user = self.query['user']
else:
self.user = None
self.redirect(self.query['success_to'])
elif 'cancel' in self.query:
self.redirect(self.query['fail_to'])
else:
assert 0, 'strange login %r' % (self.query,)
def redirect(self, url):
self.send_response(302)
self.send_header('Location', url)
self.writeUserHeader()
self.end_headers()
def writeUserHeader(self):
if self.user is None:
t1970 = time.gmtime(0)
expires = time.strftime(
'Expires=%a, %d-%b-%y %H:%M:%S GMT', t1970)
self.send_header('Set-Cookie', 'user=;%s' % expires)
else:
self.send_header('Set-Cookie', 'user=%s' % self.user)
def showAboutPage(self):
endpoint_url = self.server.base_url + 'openidserver'
def link(url):
url_attr = quoteattr(url)
url_text = cgi.escape(url)
return '<a href=%s><code>%s</code></a>' % (url_attr, url_text)
def term(url, text):
return '<dt>%s</dt><dd>%s</dd>' % (link(url), text)
resources = [
(self.server.base_url, "This example server's home page"),
('http://www.openidenabled.com/',
'An OpenID community Web site, home of this library'),
('http://www.openid.net/', 'the official OpenID Web site'),
]
resource_markup = ''.join([term(url, text) for url, text in resources])
self.showPage(200, 'This is an OpenID server', msg="""\
<p>%s is an OpenID server endpoint.<p>
<p>For more information about OpenID, see:</p>
<dl>
%s
</dl>
""" % (link(endpoint_url), resource_markup,))
def showErrorPage(self, error_message):
self.showPage(400, 'Error Processing Request', err='''\
<p>%s</p>
<!--
This is a large comment. It exists to make this page larger.
That is unfortunately necessary because of the "smart"
handling of pages returned with an error code in IE.
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
*************************************************************
-->
''' % error_message)
def showDecidePage(self, request):
id_url_base = self.server.base_url+'id/'
# XXX: This may break if there are any synonyms for id_url_base,
# such as referring to it by IP address or a CNAME.
assert (request.identity.startswith(id_url_base) or
request.idSelect()), repr((request.identity, id_url_base))
expected_user = request.identity[len(id_url_base):]
if request.idSelect(): # We are being asked to select an ID
msg = '''\
<p>A site has asked for your identity. You may select an
identifier by which you would like this site to know you.
On a production site this would likely be a drop down list
of pre-created accounts or have the facility to generate
a random anonymous identifier.
</p>
'''
fdata = {
'id_url_base': id_url_base,
'trust_root': request.trust_root,
}
form = '''\
<form method="POST" action="/allow">
<table>
<tr><td>Identity:</td>
<td>%(id_url_base)s<input type='text' name='identifier'></td></tr>
<tr><td>Trust Root:</td><td>%(trust_root)s</td></tr>
</table>
<p>Allow this authentication to proceed?</p>
<input type="checkbox" id="remember" name="remember" value="yes"
/><label for="remember">Remember this
decision</label><br />
<input type="submit" name="yes" value="yes" />
<input type="submit" name="no" value="no" />
</form>
'''%fdata
elif expected_user == self.user:
msg = '''\
<p>A new site has asked to confirm your identity. If you
approve, the site represented by the trust root below will
be told that you control identity URL listed below. (If
you are using a delegated identity, the site will take
care of reversing the delegation on its own.)</p>'''
fdata = {
'identity': request.identity,
'trust_root': request.trust_root,
}
form = '''\
<table>
<tr><td>Identity:</td><td>%(identity)s</td></tr>
<tr><td>Trust Root:</td><td>%(trust_root)s</td></tr>
</table>
<p>Allow this authentication to proceed?</p>
<form method="POST" action="/allow">
<input type="checkbox" id="remember" name="remember" value="yes"
/><label for="remember">Remember this
decision</label><br />
<input type="submit" name="yes" value="yes" />
<input type="submit" name="no" value="no" />
</form>''' % fdata
else:
mdata = {
'expected_user': expected_user,
'user': self.user,
}
msg = '''\
<p>A site has asked for an identity belonging to
%(expected_user)s, but you are logged in as %(user)s. To
log in as %(expected_user)s and approve the login request,
hit OK below. The "Remember this decision" checkbox
applies only to the trust root decision.</p>''' % mdata
fdata = {
'identity': request.identity,
'trust_root': request.trust_root,
'expected_user': expected_user,
}
form = '''\
<table>
<tr><td>Identity:</td><td>%(identity)s</td></tr>
<tr><td>Trust Root:</td><td>%(trust_root)s</td></tr>
</table>
<p>Allow this authentication to proceed?</p>
<form method="POST" action="/allow">
<input type="checkbox" id="remember" name="remember" value="yes"
/><label for="remember">Remember this
decision</label><br />
<input type="hidden" name="login_as" value="%(expected_user)s"/>
<input type="submit" name="yes" value="yes" />
<input type="submit" name="no" value="no" />
</form>''' % fdata
self.showPage(200, 'Approve OpenID request?', msg=msg, form=form)
def showIdPage(self, path):
link_tag = '<link rel="openid.server" href="%sopenidserver">' %\
self.server.base_url
yadis_loc_tag = '<meta http-equiv="x-xrds-location" content="%s">'%\
(self.server.base_url+'yadis/'+path[4:])
disco_tags = link_tag + yadis_loc_tag
ident = self.server.base_url + path[1:]
approved_trust_roots = []
for (aident, trust_root) in list(self.server.approved.keys()):
if aident == ident:
trs = '<li><tt>%s</tt></li>\n' % cgi.escape(trust_root)
approved_trust_roots.append(trs)
if approved_trust_roots:
prepend = '<p>Approved trust roots:</p>\n<ul>\n'
approved_trust_roots.insert(0, prepend)
approved_trust_roots.append('</ul>\n')
msg = ''.join(approved_trust_roots)
else:
msg = ''
self.showPage(200, 'An Identity Page', head_extras=disco_tags, msg='''\
<p>This is an identity page for %s.</p>
%s
''' % (ident, msg))
def showYadis(self, user):
self.send_response(200)
self.send_header('Content-type', 'application/xrds+xml')
self.end_headers()
endpoint_url = self.server.base_url + 'openidserver'
user_url = self.server.base_url + 'id/' + user
self.wfile.write(bytes("""\
<?xml version="1.0" encoding="UTF-8"?>
<xrds:XRDS
xmlns:xrds="xri://$xrds"
xmlns="xri://$xrd*($v*2.0)">
<XRD>
<Service priority="0">
<Type>%s</Type>
<Type>%s</Type>
<URI>%s</URI>
<LocalID>%s</LocalID>
</Service>
</XRD>
</xrds:XRDS>
"""%(discover.OPENID_2_0_TYPE, discover.OPENID_1_0_TYPE,
endpoint_url, user_url), 'utf-8'))
def showServerYadis(self):
self.send_response(200)
self.send_header('Content-type', 'application/xrds+xml')
self.end_headers()
endpoint_url = self.server.base_url + 'openidserver'
self.wfile.write(bytes("""\
<?xml version="1.0" encoding="UTF-8"?>
<xrds:XRDS
xmlns:xrds="xri://$xrds"
xmlns="xri://$xrd*($v*2.0)">
<XRD>
<Service priority="0">
<Type>%s</Type>
<URI>%s</URI>
</Service>
</XRD>
</xrds:XRDS>
"""%(discover.OPENID_IDP_2_0_TYPE, endpoint_url,), 'utf-8'))
def showMainPage(self):
yadis_tag = '<meta http-equiv="x-xrds-location" content="%s">'%\
(self.server.base_url + 'serveryadis')
if self.user:
openid_url = self.server.base_url + 'id/' + self.user
user_message = """\
<p>You are logged in as %s. Your OpenID identity URL is
<tt><a href=%s>%s</a></tt>. Enter that URL at an OpenID
consumer to test this server.</p>
""" % (self.user, quoteattr(openid_url), openid_url)
else:
user_message = """\
<p>This server uses a cookie to remember who you are in
order to simulate a standard Web user experience. You are
not <a href='/login'>logged in</a>.</p>"""
self.showPage(200, 'Main Page', head_extras = yadis_tag, msg='''\
<p>This is a simple OpenID server implemented using the <a
href="http://openid.schtuff.com/">Python OpenID
library</a>.</p>
%s
<p>To use this server with a consumer, the consumer must be
able to fetch HTTP pages from this web server. If this
computer is behind a firewall, you will not be able to use
OpenID consumers outside of the firewall with it.</p>
<p>The URL for this server is <a href=%s><tt>%s</tt></a>.</p>
''' % (user_message, quoteattr(self.server.base_url), self.server.base_url))
def showLoginPage(self, success_to, fail_to):
self.showPage(200, 'Login Page', form='''\
<h2>Login</h2>
<p>You may log in with any name. This server does not use
passwords because it is just a sample of how to use the OpenID
library.</p>
<form method="GET" action="/loginsubmit">
<input type="hidden" name="success_to" value="%s" />
<input type="hidden" name="fail_to" value="%s" />
<input type="text" name="user" value="" />
<input type="submit" name="submit" value="Log In" />
<input type="submit" name="cancel" value="Cancel" />
</form>
''' % (success_to, fail_to))
def showPage(self, response_code, title,
head_extras='', msg=None, err=None, form=None):
if self.user is None:
user_link = '<a href="/login">not logged in</a>.'
else:
user_link = 'logged in as <a href="/id/%s">%s</a>.<br /><a href="/loginsubmit?submit=true&success_to=/login">Log out</a>' % \
(self.user, self.user)
body = ''
if err is not None:
body += '''\
<div class="error">
%s
</div>
''' % err
if msg is not None:
body += '''\
<div class="message">
%s
</div>
''' % msg
if form is not None:
body += '''\
<div class="form">
%s
</div>
''' % form
contents = {
'title': 'Python OpenID Server Example - ' + title,
'head_extras': head_extras,
'body': body,
'user_link': user_link,
}
self.send_response(response_code)
self.writeUserHeader()
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes('''<html>
<head>
<title>%(title)s</title>
%(head_extras)s
</head>
<style type="text/css">
h1 a:link {
color: black;
text-decoration: none;
}
h1 a:visited {
color: black;
text-decoration: none;
}
h1 a:hover {
text-decoration: underline;
}
body {
font-family: verdana,sans-serif;
width: 50em;
margin: 1em;
}
div {
padding: .5em;
}
table {
margin: none;
padding: none;
}
.banner {
padding: none 1em 1em 1em;
width: 100%%;
}
.leftbanner {
text-align: left;
}
.rightbanner {
text-align: right;
font-size: smaller;
}
.error {
border: 1px solid #ff0000;
background: #ffaaaa;
margin: .5em;
}
.message {
border: 1px solid #2233ff;
background: #eeeeff;
margin: .5em;
}
.form {
border: 1px solid #777777;
background: #ddddcc;
margin: .5em;
margin-top: 1em;
padding-bottom: 0em;
}
dd {
margin-bottom: 0.5em;
}
</style>
<body>
<table class="banner">
<tr>
<td class="leftbanner">
<h1><a href="/">Python OpenID Server Example</a></h1>
</td>
<td class="rightbanner">
You are %(user_link)s
</td>
</tr>
</table>
%(body)s
</body>
</html>
''' % contents, 'UTF-8'))
def binaryToUTF8(self, data):
args = {}
for key, value in data.items():
key = key.decode('utf-8')
value = value.decode('utf-8')
args[key] = value
return args
def main(host, port, data_path):
addr = (host, port)
httpserver = OpenIDHTTPServer(addr, ServerHandler)
# Instantiate OpenID consumer store and OpenID consumer. If you
# were connecting to a database, you would create the database
# connection and instantiate an appropriate store here.
store = FileOpenIDStore(data_path)
oidserver = server.Server(store, httpserver.base_url + 'openidserver')
httpserver.setOpenIDServer(oidserver)
print('Server running at:')
print(httpserver.base_url)
httpserver.serve_forever()
if __name__ == '__main__':
host = 'localhost'
data_path = 'sstore'
port = 8000
try:
import optparse
except ImportError:
pass # Use defaults (for Python 2.2)
else:
parser = optparse.OptionParser('Usage:\n %prog [options]')
parser.add_option(
'-d', '--data-path', dest='data_path', default=data_path,
help='Data directory for storing OpenID consumer state. '
'Defaults to "%default" in the current directory.')
parser.add_option(
'-p', '--port', dest='port', type='int', default=port,
help='Port on which to listen for HTTP requests. '
'Defaults to port %default.')
parser.add_option(
'-s', '--host', dest='host', default=host,
help='Host on which to listen for HTTP requests. '
'Also used for generating URLs. Defaults to %default.')
options, args = parser.parse_args()
if args:
parser.error('Expected no arguments. Got %r' % args)
host = options.host
port = options.port
data_path = options.data_path
main(host, port, data_path)
| apache-2.0 |
SummerLW/Perf-Insight-Report | third_party/closure_linter/closure_linter/common/matcher.py | 284 | 2158 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regular expression based JavaScript matcher classes."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
from closure_linter.common import position
from closure_linter.common import tokens
# Shorthand
Token = tokens.Token
Position = position.Position
class Matcher(object):
"""A token matcher.
Specifies a pattern to match, the type of token it represents, what mode the
token changes to, and what mode the token applies to.
Modes allow more advanced grammars to be incorporated, and are also necessary
to tokenize line by line. We can have different patterns apply to different
modes - i.e. looking for documentation while in comment mode.
Attributes:
regex: The regular expression representing this matcher.
type: The type of token indicated by a successful match.
result_mode: The mode to move to after a successful match.
"""
def __init__(self, regex, token_type, result_mode=None, line_start=False):
"""Create a new matcher template.
Args:
regex: The regular expression to match.
token_type: The type of token a successful match indicates.
result_mode: What mode to change to after a successful match. Defaults to
None, which means to not change the current mode.
line_start: Whether this matcher should only match string at the start
of a line.
"""
self.regex = regex
self.type = token_type
self.result_mode = result_mode
self.line_start = line_start
| bsd-3-clause |
toddberreth/gensim | gensim/corpora/sharded_corpus.py | 63 | 35097 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Original author: Jan Hajic jr.
# Copyright (C) 2015 Radim Rehurek and gensim team.
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
This module implements a corpus class that stores its data in separate files called
"shards". This is a compromise between speed (keeping the whole dataset
in memory) and memory footprint (keeping the data on disk and reading from it
on demand).
The corpus is intended for situations where you need to use your data
as numpy arrays for some iterative processing (like training something
using SGD, which usually involves heavy matrix multiplication).
"""
from __future__ import print_function
import logging
import os
import math
import numpy
import scipy.sparse as sparse
import time
logger = logging.getLogger(__name__)
#: Specifies which dtype should be used for serializing the shards.
_default_dtype = float
try:
import theano
_default_dtype = theano.config.floatX
except ImportError:
logger.info('Could not import Theano, will use standard float for default ShardedCorpus dtype.')
from six.moves import xrange
import gensim
from gensim.corpora import IndexedCorpus
from gensim.interfaces import TransformedCorpus
class ShardedCorpus(IndexedCorpus):
"""
This corpus is designed for situations where you need to train a model
on matrices, with a large number of iterations. (It should be faster than
gensim's other IndexedCorpus implementations for this use case; check the
`benchmark_datasets.py` script. It should also serialize faster.)
The corpus stores its data in separate files called
"shards". This is a compromise between speed (keeping the whole dataset
in memory) and memory footprint (keeping the data on disk and reading from
it on demand). Persistence is done using the standard gensim load/save methods.
.. note::
The dataset is **read-only**, there is - as opposed to gensim's Similarity
class, which works similarly - no way of adding documents to the dataset
(for now).
You can use ShardedCorpus to serialize your data just like any other gensim
corpus that implements serialization. However, because the data is saved
as numpy 2-dimensional ndarrays (or scipy sparse matrices), you need to
supply the dimension of your data to the corpus. (The dimension of word
frequency vectors will typically be the size of the vocabulary, etc.)
>>> corpus = gensim.utils.mock_data()
>>> output_prefix = 'mydata.shdat'
>>> ShardedCorpus.serialize(output_prefix, corpus, dim=1000)
The `output_prefix` tells the ShardedCorpus where to put the data.
Shards are saved as `output_prefix.0`, `output_prefix.1`, etc.
All shards must be of the same size. The shards can be re-sized (which
is essentially a re-serialization into new-size shards), but note that
this operation will temporarily take twice as much disk space, because
the old shards are not deleted until the new shards are safely in place.
After serializing the data, the corpus will then save itself to the file
`output_prefix`.
On further initialization with the same `output_prefix`, the corpus
will load the already built dataset unless the `overwrite` option is
given. (A new object is "cloned" from the one saved to `output_prefix`
previously.)
To retrieve data, you can load the corpus and use it like a list:
>>> sh_corpus = ShardedCorpus.load(output_prefix)
>>> batch = sh_corpus[100:150]
This will retrieve a numpy 2-dimensional array of 50 rows and 1000
columns (1000 was the dimension of the data we supplied to the corpus).
To retrieve gensim-style sparse vectors, set the `gensim` property:
>>> sh_corpus.gensim = True
>>> batch = sh_corpus[100:150]
The batch now will be a generator of gensim vectors.
Since the corpus needs the data serialized in order to be able to operate,
it will serialize data right away on initialization. Instead of calling
`ShardedCorpus.serialize()`, you can just initialize and use the corpus
right away:
>>> corpus = ShardedCorpus(output_prefix, corpus, dim=1000)
>>> batch = corpus[100:150]
ShardedCorpus also supports working with scipy sparse matrices, both
during retrieval and during serialization. If you want to serialize your
data as sparse matrices, set the `sparse_serialization` flag. For
retrieving your data as sparse matrices, use the `sparse_retrieval`
flag. (You can also retrieve densely serialized data as sparse matrices,
for the sake of completeness, and vice versa.) By default, the corpus
will retrieve numpy ndarrays even if it was serialized into sparse
matrices.
>>> sparse_prefix = 'mydata.sparse.shdat'
>>> ShardedCorpus.serialize(sparse_prefix, corpus, dim=1000, sparse_serialization=True)
>>> sparse_corpus = ShardedCorpus.load(sparse_prefix)
>>> batch = sparse_corpus[100:150]
>>> type(batch)
<type 'numpy.ndarray'>
>>> sparse_corpus.sparse_retrieval = True
>>> batch = sparse_corpus[100:150]
<class 'scipy.sparse.csr.csr_matrix'>
While you *can* touch the `sparse_retrieval` attribute during the life
of a ShardedCorpus object, you should definitely not touch `
`sharded_serialization`! Changing the attribute will not miraculously
re-serialize the data in the requested format.
The CSR format is used for sparse data throughout.
Internally, to retrieve data, the dataset keeps track of which shard is
currently open and on a `__getitem__` request, either returns an item from
the current shard, or opens a new one. The shard size is constant, except
for the last shard.
"""
def __init__(self, output_prefix, corpus, dim=None,
shardsize=4096, overwrite=False, sparse_serialization=False,
sparse_retrieval=False, gensim=False):
"""Initializes the dataset. If `output_prefix` is not found,
builds the shards.
:type output_prefix: str
:param output_prefix: The absolute path to the file from which shard
filenames should be derived. The individual shards will be saved
as `output_prefix.0`, `output_prefix.1`, etc.
The `output_prefix` path then works as the filename to which
the ShardedCorpus object itself will be automatically saved.
Normally, gensim corpora do not do this, but ShardedCorpus needs
to remember several serialization settings: namely the shard
size and whether it was serialized in dense or sparse format. By
saving automatically, any new ShardedCorpus with the same
`output_prefix` will be able to find the information about the
data serialized with the given prefix.
If you want to *overwrite* your data serialized with some output
prefix, set the `overwrite` flag to True.
Of course, you can save your corpus separately as well using
the `save()` method.
:type corpus: gensim.interfaces.CorpusABC
:param corpus: The source corpus from which to build the dataset.
:type dim: int
:param dim: Specify beforehand what the dimension of a dataset item
should be. This is useful when initializing from a corpus that
doesn't advertise its dimension, or when it does and you want to
check that the corpus matches the expected dimension. **If `dim`
is left unused and `corpus` does not provide its dimension in
an expected manner, initialization will fail.**
:type shardsize: int
:param shardsize: How many data points should be in one shard. More
data per shard means less shard reloading but higher memory usage
and vice versa.
:type overwrite: bool
:param overwrite: If set, will build dataset from given corpus even
if `output_prefix` already exists.
:type sparse_serialization: bool
:param sparse_serialization: If set, will save the data in a sparse
form (as csr matrices). This is to speed up retrieval when you
know you will be using sparse matrices.
..note::
This property **should not change** during the lifetime of
the dataset. (If you find out you need to change from a sparse
to a dense representation, the best practice is to create
another ShardedCorpus object.)
:type sparse_retrieval: bool
:param sparse_retrieval: If set, will retrieve data as sparse vectors
(numpy csr matrices). If unset, will return ndarrays.
Note that retrieval speed for this option depends on how the dataset
was serialized. If `sparse_serialization` was set, then setting
`sparse_retrieval` will be faster. However, if the two settings
do not correspond, the conversion on the fly will slow the dataset
down.
:type gensim: bool
:param gensim: If set, will convert the output to gensim
sparse vectors (list of tuples (id, value)) to make it behave like
any other gensim corpus. This **will** slow the dataset down.
"""
self.output_prefix = output_prefix
self.shardsize = shardsize
self.n_docs = 0
self.offsets = []
self.n_shards = 0
self.dim = dim # This number may change during initialization/loading.
# Sparse vs. dense serialization and retrieval.
self.sparse_serialization = sparse_serialization
self.sparse_retrieval = sparse_retrieval
self.gensim = gensim
# The "state" of the dataset.
self.current_shard = None # The current shard itself (numpy ndarray)
self.current_shard_n = None # Current shard is the current_shard_n-th
self.current_offset = None # The index into the dataset which
# corresponds to index 0 of current shard
logger.info('Initializing sharded corpus with prefix '
'{0}'.format(output_prefix))
if (not os.path.isfile(output_prefix)) or overwrite:
logger.info('Building from corpus...')
self.init_shards(output_prefix, corpus, shardsize)
# Save automatically, to facilitate re-loading
# and retain information about how the corpus
# was serialized.
logger.info('Saving ShardedCorpus object to '
'{0}'.format(self.output_prefix))
self.save()
else:
logger.info('Cloning existing...')
self.init_by_clone()
def init_shards(self, output_prefix, corpus, shardsize=4096, dtype=_default_dtype):
"""Initialize shards from the corpus."""
if not gensim.utils.is_corpus(corpus):
raise ValueError('Cannot initialize shards without a corpus to read'
' from! (Got corpus type: {0})'.format(type(corpus)))
proposed_dim = self._guess_n_features(corpus)
if proposed_dim != self.dim:
if self.dim is None:
logger.info('Deriving dataset dimension from corpus: '
'{0}'.format(proposed_dim))
else:
logger.warn('Dataset dimension derived from input corpus diffe'
'rs from initialization argument, using corpus.'
'(corpus {0}, init arg {1})'.format(proposed_dim,
self.dim))
self.dim = proposed_dim
self.offsets = [0]
start_time = time.clock()
logger.info('Running init from corpus.')
for n, doc_chunk in enumerate(gensim.utils.grouper(corpus, chunksize=shardsize)):
logger.info('Chunk no. {0} at {1} s'.format(n, time.clock() - start_time))
current_shard = numpy.zeros((len(doc_chunk), self.dim), dtype=dtype)
logger.debug('Current chunk dimension: '
'{0} x {1}'.format(len(doc_chunk), self.dim))
for i, doc in enumerate(doc_chunk):
doc = dict(doc)
current_shard[i][list(doc)] = list(gensim.matutils.itervalues(doc))
# Handles the updating as well.
if self.sparse_serialization:
current_shard = sparse.csr_matrix(current_shard)
self.save_shard(current_shard)
end_time = time.clock()
logger.info('Built {0} shards in {1} s.'.format(self.n_shards, end_time - start_time))
def init_by_clone(self):
"""
Initialize by copying over attributes of another ShardedCorpus
instance saved to the output_prefix given at __init__().
"""
temp = self.__class__.load(self.output_prefix)
self.n_shards = temp.n_shards
self.n_docs = temp.n_docs
self.offsets = temp.offsets
if temp.dim != self.dim:
if self.dim is None:
logger.info('Loaded dataset dimension: {0}'.format(temp.dim))
else:
logger.warn('Loaded dataset dimension differs from init arg '
'dimension, using loaded dim. '
'(loaded {0}, init {1})'.format(temp.dim, self.dim))
self.dim = temp.dim # To be consistent with the loaded data!
def save_shard(self, shard, n=None, filename=None):
"""
Pickle the given shard. If `n` is not given, will consider the shard
a new one.
If `filename` is given, will use that file name instead of generating
one.
"""
new_shard = False
if n is None:
n = self.n_shards # Saving the *next* one by default.
new_shard = True
if not filename:
filename = self._shard_name(n)
gensim.utils.pickle(shard, filename)
if new_shard:
self.offsets.append(self.offsets[-1] + shard.shape[0])
self.n_docs += shard.shape[0]
self.n_shards += 1
def load_shard(self, n):
"""
Load (unpickle) the n-th shard as the "live" part of the dataset
into the Dataset object."""
#logger.debug('ShardedCorpus loading shard {0}, '
# 'current shard: {1}'.format(n, self.current_shard_n))
# No-op if the shard is already open.
if self.current_shard_n == n:
return
filename = self._shard_name(n)
if not os.path.isfile(filename):
raise ValueError('Attempting to load nonexistent shard no. {0}'.format(n))
shard = gensim.utils.unpickle(filename)
self.current_shard = shard
self.current_shard_n = n
self.current_offset = self.offsets[n]
def reset(self):
"""
Reset to no shard at all. Used for saving.
"""
self.current_shard = None
self.current_shard_n = None
self.current_offset = None
def shard_by_offset(self, offset):
"""
Determine which shard the given offset belongs to. If the offset
is greater than the number of available documents, raises a
`ValueError`.
Assumes that all shards have the same size.
"""
k = int(offset / self.shardsize)
if offset >= self.n_docs:
raise ValueError('Too high offset specified ({0}), available '
'docs: {1}'.format(offset, self.n_docs))
if offset < 0:
raise ValueError('Negative offset {0} currently not'
' supported.'.format(offset))
return k
k = -1
for i, o in enumerate(self.offsets):
if o > offset: # Condition should fire for every valid offset,
# since the last offset is n_docs (one-past-end).
k = i - 1 # First offset is always 0, so i is at least 1.
break
return k
def in_current(self, offset):
"""
Determine whether the given offset falls within the current shard.
"""
return (self.current_offset <= offset) \
and (offset < self.offsets[self.current_shard_n + 1])
def in_next(self, offset):
"""
Determine whether the given offset falls within the next shard.
This is a very small speedup: typically, we will be iterating through
the data forward. Could save considerable time with a very large number
of smaller shards.
"""
if self.current_shard_n == self.n_shards:
return False # There's no next shard.
return (self.offsets[self.current_shard_n + 1] <= offset) \
and (offset < self.offsets[self.current_shard_n + 2])
def resize_shards(self, shardsize):
"""
Re-process the dataset to new shard size. This may take pretty long.
Also, note that you need some space on disk for this one (we're
assuming there is enough disk space for double the size of the dataset
and that there is enough memory for old + new shardsize).
:type shardsize: int
:param shardsize: The new shard size.
"""
# Determine how many new shards there will be
n_new_shards = int(math.floor(self.n_docs / float(shardsize)))
if self.n_docs % shardsize != 0:
n_new_shards += 1
new_shard_names = []
new_offsets = [0]
for new_shard_idx in xrange(n_new_shards):
new_start = shardsize * new_shard_idx
new_stop = new_start + shardsize
# Last shard?
if new_stop > self.n_docs:
# Sanity check
assert new_shard_idx == n_new_shards - 1, \
'Shard no. {0} that ends at {1} over last document' \
' ({2}) is not the last projected shard ({3})???' \
''.format(new_shard_idx, new_stop, self.n_docs, n_new_shards)
new_stop = self.n_docs
new_shard = self[new_start:new_stop]
new_shard_name = self._resized_shard_name(new_shard_idx)
new_shard_names.append(new_shard_name)
try:
self.save_shard(new_shard, new_shard_idx, new_shard_name)
except Exception:
# Clean up on unsuccessful resize.
for new_shard_name in new_shard_names:
os.remove(new_shard_name)
raise
new_offsets.append(new_stop)
# Move old shard files out, new ones in. Complicated due to possibility
# of exceptions.
old_shard_names = [self._shard_name(n) for n in xrange(self.n_shards)]
try:
for old_shard_n, old_shard_name in enumerate(old_shard_names):
os.remove(old_shard_name)
except Exception as e:
logger.error('Exception occurred during old shard no. {0} '
'removal: {1}.\nAttempting to at least move '
'new shards in.'.format(old_shard_n, str(e)))
finally:
# If something happens with cleaning up - try to at least get the
# new guys in.
try:
for shard_n, new_shard_name in enumerate(new_shard_names):
os.rename(new_shard_name, self._shard_name(shard_n))
# If something happens when we're in this stage, we're screwed.
except Exception as e:
print(e)
raise RuntimeError('Resizing completely failed for some reason.'
' Sorry, dataset is probably ruined...')
finally:
# Sets the new shard stats.
self.n_shards = n_new_shards
self.offsets = new_offsets
self.shardsize = shardsize
self.reset()
def _shard_name(self, n):
"""Generate the name for the n-th shard."""
return self.output_prefix + '.' + str(n)
def _resized_shard_name(self, n):
"""
Generate the name for the n-th new shard temporary file when
resizing dataset. The file will then be re-named to standard shard name.
"""
return self.output_prefix + '.resize-temp.' + str(n)
def _guess_n_features(self, corpus):
"""Attempt to guess number of features in `corpus`."""
n_features = None
if hasattr(corpus, 'dim'):
# print 'Guessing from \'dim\' attribute.'
n_features = corpus.dim
elif hasattr(corpus, 'dictionary'):
# print 'GUessing from dictionary.'
n_features = len(corpus.dictionary)
elif hasattr(corpus, 'n_out'):
# print 'Guessing from \'n_out\' attribute.'
n_features = corpus.n_out
elif hasattr(corpus, 'num_terms'):
# print 'Guessing from \'num_terms\' attribute.'
n_features = corpus.num_terms
elif isinstance(corpus, TransformedCorpus):
# TransformedCorpus: first check if the transformer object
# defines some output dimension; if it doesn't, relegate guessing
# to the corpus that is being transformed. This may easily fail!
try:
return self._guess_n_features(corpus.obj)
except TypeError:
return self._guess_n_features(corpus.corpus)
else:
if not self.dim:
raise TypeError('Couldn\'t find number of features, '
'refusing to guess (dimension set to {0},'
'type of corpus: {1}).'.format(self.dim, type(corpus)))
else:
logger.warn('Couldn\'t find number of features, trusting '
'supplied dimension ({0})'.format(self.dim))
n_features = self.dim
if self.dim and n_features != self.dim:
logger.warn('Discovered inconsistent dataset dim ({0}) and '
'feature count from corpus ({1}). Coercing to dimension'
' given by argument.'.format(self.dim, n_features))
return n_features
def __len__(self):
return self.n_docs
def _ensure_shard(self, offset):
# No shard loaded
if self.current_shard is None:
shard_n = self.shard_by_offset(offset)
self.load_shard(shard_n)
# Find appropriate shard, if necessary
elif not self.in_current(offset):
if self.in_next(offset):
self.load_shard(self.current_shard_n + 1)
else:
shard_n = self.shard_by_offset(offset)
self.load_shard(shard_n)
def get_by_offset(self, offset):
"""As opposed to getitem, this one only accepts ints as offsets."""
self._ensure_shard(offset)
result = self.current_shard[offset - self.current_offset]
return result
def __getitem__(self, offset):
"""
Retrieve the given row of the dataset. Supports slice notation.
"""
if isinstance(offset, list):
# Handle all serialization & retrieval options.
if self.sparse_serialization:
l_result = sparse.vstack([self.get_by_offset(i)
for i in offset])
if self.gensim:
l_result = self._getitem_sparse2gensim(l_result)
elif not self.sparse_retrieval:
l_result = numpy.array(l_result.todense())
else:
l_result = numpy.array([self.get_by_offset(i) for i in offset])
if self.gensim:
l_result = self._getitem_dense2gensim(l_result)
elif self.sparse_retrieval:
l_result = sparse.csr_matrix(l_result)
return l_result
elif isinstance(offset, slice):
start = offset.start
stop = offset.stop
if stop > self.n_docs:
raise IndexError('Requested slice offset {0} out of range'
' ({1} docs)'.format(stop, self.n_docs))
# - get range of shards over which to iterate
first_shard = self.shard_by_offset(start)
last_shard = self.n_shards - 1
if not stop == self.n_docs:
last_shard = self.shard_by_offset(stop)
# This fails on one-past
# slice indexing; that's why there's a code branch here.
#logger.debug('ShardedCorpus: Retrieving slice {0}: '
# 'shard {1}'.format((offset.start, offset.stop),
# (first_shard, last_shard)))
self.load_shard(first_shard)
# The easy case: both in one shard.
if first_shard == last_shard:
s_result = self.current_shard[start - self.current_offset:
stop - self.current_offset]
# Handle different sparsity settings:
s_result = self._getitem_format(s_result)
return s_result
# The hard case: the slice is distributed across multiple shards
# - initialize numpy.zeros()
s_result = numpy.zeros((stop - start, self.dim),
dtype=self.current_shard.dtype)
if self.sparse_serialization:
s_result = sparse.csr_matrix((0, self.dim),
dtype=self.current_shard.dtype)
# - gradually build it up. We will be using three set of start:stop
# indexes:
# - into the dataset (these are the indexes the caller works with)
# - into the current shard
# - into the result
# Indexes into current result rows. These are always smaller than
# the dataset indexes by `start` (as we move over the shards,
# we're moving by the same number of rows through the result).
result_start = 0
result_stop = self.offsets[self.current_shard_n + 1] - start
# Indexes into current shard. These are trickiest:
# - if in starting shard, these are from (start - current_offset)
# to self.shardsize
# - if in intermediate shard, these are from 0 to self.shardsize
# - if in ending shard, these are from 0
# to (stop - current_offset)
shard_start = start - self.current_offset
shard_stop = self.offsets[self.current_shard_n + 1] - \
self.current_offset
#s_result[result_start:result_stop] = self.current_shard[
# shard_start:shard_stop]
s_result = self.__add_to_slice(s_result, result_start, result_stop,
shard_start, shard_stop)
# First and last get special treatment, these are in between
for shard_n in xrange(first_shard+1, last_shard):
self.load_shard(shard_n)
result_start = result_stop
result_stop += self.shardsize
shard_start = 0
shard_stop = self.shardsize
s_result = self.__add_to_slice(s_result, result_start,
result_stop, shard_start,
shard_stop)
# Last shard
self.load_shard(last_shard)
result_start = result_stop
result_stop += stop - self.current_offset
shard_start = 0
shard_stop = stop - self.current_offset
s_result = self.__add_to_slice(s_result, result_start, result_stop,
shard_start, shard_stop)
s_result = self._getitem_format(s_result)
return s_result
else:
s_result = self.get_by_offset(offset)
s_result = self._getitem_format(s_result)
return s_result
def __add_to_slice(self, s_result, result_start, result_stop, start, stop):
"""
Add the rows of the current shard from `start` to `stop`
into rows `result_start` to `result_stop` of `s_result`.
Operation is based on the self.sparse_serialize setting. If the shard
contents are dense, then s_result is assumed to be an ndarray that
already supports row indices `result_start:result_stop`. If the shard
contents are sparse, assumes that s_result has `result_start` rows
and we should add them up to `result_stop`.
Returns the resulting s_result.
"""
if (result_stop - result_start) != (stop - start):
raise ValueError('Result start/stop range different than stop/start'
'range (%d - %d vs. %d - %d)'.format(result_start,
result_stop,
start, stop))
# Dense data: just copy using numpy's slice notation
if not self.sparse_serialization:
s_result[result_start:result_stop] = self.current_shard[start:stop]
return s_result
# A bit more difficult, we're using a different structure to build the
# result.
else:
if s_result.shape != (result_start, self.dim):
raise ValueError('Assuption about sparse s_result shape '
'invalid: {0} expected rows, {1} real '
'rows.'.format(result_start,
s_result.shape[0]))
tmp_matrix = self.current_shard[start:stop]
s_result = sparse.vstack([s_result, tmp_matrix])
return s_result
def _getitem_format(self, s_result):
if self.sparse_serialization:
if self.gensim:
s_result = self._getitem_sparse2gensim(s_result)
elif not self.sparse_retrieval:
s_result = numpy.array(s_result.todense())
else:
if self.gensim:
s_result = self._getitem_dense2gensim(s_result)
elif self.sparse_retrieval:
s_result = sparse.csr_matrix(s_result)
return s_result
def _getitem_sparse2gensim(self, result):
"""
Change given sparse result matrix to gensim sparse vectors.
Uses the internals of the sparse matrix to make this fast.
"""
def row_sparse2gensim(row_idx, csr_matrix):
indices = csr_matrix.indices[csr_matrix.indptr[row_idx]:csr_matrix.indptr[row_idx+1]]
g_row = [(col_idx, csr_matrix[row_idx, col_idx]) for col_idx in indices]
return g_row
output = (row_sparse2gensim(i, result) for i in xrange(result.shape[0]))
return output
def _getitem_dense2gensim(self, result):
"""Change given dense result matrix to gensim sparse vectors."""
if len(result.shape) == 1:
output = gensim.matutils.full2sparse(result)
else:
output = (gensim.matutils.full2sparse(result[i])
for i in xrange(result.shape[0]))
return output
# Overriding the IndexedCorpus and other corpus superclass methods
def __iter__(self):
"""
Yield dataset items one by one (generator).
"""
for i in xrange(len(self)):
yield self[i]
def save(self, *args, **kwargs):
"""
Save itself (the wrapper) in clean state (after calling `reset()`)
to the output_prefix file. If you wish to save to a different file,
use the `fname` argument as the first positional arg.
"""
# Can we save to a different file than output_prefix? Well, why not?
if len(args) == 0:
args = tuple([self.output_prefix])
attrs_to_ignore = ['current_shard',
'current_shard_n',
'current_offset']
if 'ignore' not in kwargs:
kwargs['ignore'] = frozenset(attrs_to_ignore)
else:
kwargs['ignore'] = frozenset([v for v in kwargs['ignore']]
+ attrs_to_ignore)
super(ShardedCorpus, self).save(*args, **kwargs)
#
# self.reset()
# with smart_open(self.output_prefix, 'wb') as pickle_handle:
# cPickle.dump(self, pickle_handle)
@classmethod
def load(cls, fname, mmap=None):
"""
Load itself in clean state. `mmap` has no effect here.
"""
return super(ShardedCorpus, cls).load(fname, mmap)
@staticmethod
def save_corpus(fname, corpus, id2word=None, progress_cnt=1000,
metadata=False, **kwargs):
"""
Implement a serialization interface. Do not call directly;
use the `serialize` method instead.
Note that you might need some ShardedCorpus init parameters, most
likely the dimension (`dim`). Again, pass these as `kwargs` to the
`serialize` method.
All this thing does is initialize a ShardedCorpus from a corpus
with the `output_prefix` argument set to the `fname` parameter
of this method. The initialization of a ShardedCorpus takes care of
serializing the data (in dense form) to shards.
Ignore the parameters id2word, progress_cnt and metadata. They
currently do nothing and are here only to provide a compatible
method signature with superclass.
"""
ShardedCorpus(fname, corpus, **kwargs)
@classmethod
def serialize(serializer, fname, corpus, id2word=None,
index_fname=None, progress_cnt=None, labels=None,
metadata=False, **kwargs):
"""
Iterate through the document stream `corpus`, saving the documents
as a ShardedCorpus to `fname`.
Use this method instead of calling `save_corpus` directly.
You may need to supply some kwargs that are used upon dataset creation
(namely: `dim`, unless the dataset can infer the dimension from the
given corpus).
Ignore the parameters id2word, index_fname, progress_cnt, labels
and metadata. They currently do nothing and are here only to
provide a compatible method signature with superclass."""
serializer.save_corpus(fname, corpus, id2word=id2word,
progress_cnt=progress_cnt, metadata=metadata,
**kwargs)
| gpl-3.0 |
remcoboerma/pyfpdf | tests/html_unicode.py | 18 | 1418 | # -*- coding: utf-8 -*-
"HTML Renderer for FPDF.py (unicode)"
__author__ = "Mariano Reingart <reingart@gmail.com>"
__copyright__ = "Copyright (C) 2010 Mariano Reingart"
__license__ = "LGPL 3.0"
# Inspired by tuto5.py and several examples from fpdf.org, html2fpdf, etc.
from fpdf import FPDF, HTMLMixin
import os.path
if __name__ == '__main__':
class MyFPDF(FPDF, HTMLMixin): pass
pdf=MyFPDF()
# load the unicode font
dir = os.path.dirname(__file__)
font = os.path.join(dir, 'font', 'DejaVuSansCondensed.ttf')
pdf.add_font('DejaVu', '', font, uni=True)
pdf.add_page()
# test the basic fonts
pdf.write_html("""<p><font face="Arial"><B>hello</B> <I>world</I></font></p>""")
pdf.write_html("""<p><font face="Times"><B>hello</B> <I>world</I></font></p>""")
pdf.write_html("""<p><font face="Courier"><B>hello</B> <I>world</I></font></p>""")
pdf.write_html("""<p><font face="zapfdingbats"><B>hello</B> <I>world</I></font></p>""")
# test the unicode (utf8) font:
# greek
pdf.write_html(u"""<p><font face="DejaVu">Γειά σου κόσμος</font></p>""")
# russian
pdf.write_html(u"""<p><font face="DejaVu">Здравствуй, Мир</font></p>""")
fn = 'html_unicode.pdf'
pdf.output(fn,'F')
import os
try:
os.startfile(fn)
except:
os.system("xdg-open \"%s\"" % fn)
| lgpl-3.0 |
SanketDG/coala | tests/bearlib/abstractions/external_bear_wrap_testfiles/test_external_bear.py | 4 | 1253 | import sys
import json
from coalib.results.Result import Result
from coalib.results.SourceRange import SourceRange
from coalib.output.JSONEncoder import create_json_encoder
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
if __name__ == '__main__':
line = sys.stdin.read()
args = json.loads(line)
settings = args['settings']
results = [
Result(
origin='TestBear',
message='This is wrong',
affected_code=(SourceRange.from_values(args['filename'], 1),),
severity=RESULT_SEVERITY.MAJOR),
Result(
origin='TestBear',
message='This is wrong too',
affected_code=(SourceRange.from_values(args['filename'], 3),),
severity=RESULT_SEVERITY.INFO)]
if settings['set_normal_severity']:
for res in results:
res.severity = RESULT_SEVERITY.NORMAL
if settings['set_sample_dbg_msg']:
results[0].debug_msg = 'Sample debug message'
if not settings['not_set_different_msg']:
results[1].message = 'Different message'
out = {}
out['results'] = results
JSONEncoder = create_json_encoder()
json_dump = json.dumps(out, cls=JSONEncoder)
sys.stdout.write(json_dump)
| agpl-3.0 |
piquadrat/django | tests/model_meta/models.py | 106 | 5416 | from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Relation(models.Model):
pass
class InstanceOnlyDescriptor:
def __get__(self, instance, cls=None):
if instance is None:
raise AttributeError('Instance only')
return 1
class AbstractPerson(models.Model):
# DATA fields
data_abstract = models.CharField(max_length=10)
fk_abstract = models.ForeignKey(Relation, models.CASCADE, related_name='fk_abstract_rel')
# M2M fields
m2m_abstract = models.ManyToManyField(Relation, related_name='m2m_abstract_rel')
friends_abstract = models.ManyToManyField('self', related_name='friends_abstract', symmetrical=True)
following_abstract = models.ManyToManyField('self', related_name='followers_abstract', symmetrical=False)
# VIRTUAL fields
data_not_concrete_abstract = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['abstract_non_concrete_id'],
to_fields=['id'],
related_name='fo_abstract_rel',
)
# GFK fields
content_type_abstract = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_abstract = models.PositiveIntegerField()
content_object_abstract = GenericForeignKey('content_type_abstract', 'object_id_abstract')
# GR fields
generic_relation_abstract = GenericRelation(Relation)
class Meta:
abstract = True
@property
def test_property(self):
return 1
test_instance_only_descriptor = InstanceOnlyDescriptor()
class BasePerson(AbstractPerson):
# DATA fields
data_base = models.CharField(max_length=10)
fk_base = models.ForeignKey(Relation, models.CASCADE, related_name='fk_base_rel')
# M2M fields
m2m_base = models.ManyToManyField(Relation, related_name='m2m_base_rel')
friends_base = models.ManyToManyField('self', related_name='friends_base', symmetrical=True)
following_base = models.ManyToManyField('self', related_name='followers_base', symmetrical=False)
# VIRTUAL fields
data_not_concrete_base = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['base_non_concrete_id'],
to_fields=['id'],
related_name='fo_base_rel',
)
# GFK fields
content_type_base = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_base = models.PositiveIntegerField()
content_object_base = GenericForeignKey('content_type_base', 'object_id_base')
# GR fields
generic_relation_base = GenericRelation(Relation)
class Person(BasePerson):
# DATA fields
data_inherited = models.CharField(max_length=10)
fk_inherited = models.ForeignKey(Relation, models.CASCADE, related_name='fk_concrete_rel')
# M2M Fields
m2m_inherited = models.ManyToManyField(Relation, related_name='m2m_concrete_rel')
friends_inherited = models.ManyToManyField('self', related_name='friends_concrete', symmetrical=True)
following_inherited = models.ManyToManyField('self', related_name='followers_concrete', symmetrical=False)
# VIRTUAL fields
data_not_concrete_inherited = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['model_non_concrete_id'],
to_fields=['id'],
related_name='fo_concrete_rel',
)
# GFK fields
content_type_concrete = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_concrete = models.PositiveIntegerField()
content_object_concrete = GenericForeignKey('content_type_concrete', 'object_id_concrete')
# GR fields
generic_relation_concrete = GenericRelation(Relation)
class ProxyPerson(Person):
class Meta:
proxy = True
class PersonThroughProxySubclass(ProxyPerson):
pass
class Relating(models.Model):
# ForeignKey to BasePerson
baseperson = models.ForeignKey(BasePerson, models.CASCADE, related_name='relating_baseperson')
baseperson_hidden = models.ForeignKey(BasePerson, models.CASCADE, related_name='+')
# ForeignKey to Person
person = models.ForeignKey(Person, models.CASCADE, related_name='relating_person')
person_hidden = models.ForeignKey(Person, models.CASCADE, related_name='+')
# ForeignKey to ProxyPerson
proxyperson = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='relating_proxyperson')
proxyperson_hidden = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='relating_proxyperson_hidden+')
# ManyToManyField to BasePerson
basepeople = models.ManyToManyField(BasePerson, related_name='relating_basepeople')
basepeople_hidden = models.ManyToManyField(BasePerson, related_name='+')
# ManyToManyField to Person
people = models.ManyToManyField(Person, related_name='relating_people')
people_hidden = models.ManyToManyField(Person, related_name='+')
# ParentListTests models
class CommonAncestor(models.Model):
pass
class FirstParent(CommonAncestor):
first_ancestor = models.OneToOneField(CommonAncestor, models.CASCADE, primary_key=True, parent_link=True)
class SecondParent(CommonAncestor):
second_ancestor = models.OneToOneField(CommonAncestor, models.CASCADE, primary_key=True, parent_link=True)
class Child(FirstParent, SecondParent):
pass
| bsd-3-clause |
bhargav2408/python-for-android | python3-alpha/python3-src/Lib/unittest/suite.py | 45 | 9726 | """TestSuite"""
import sys
from . import case
from . import util
__unittest = True
def _call_if_exists(parent, attr):
func = getattr(parent, attr, lambda: None)
func()
class BaseTestSuite(object):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
# sanity checks
if not hasattr(test, '__call__'):
raise TypeError("{} is not callable".format(repr(test)))
if isinstance(test, type) and issubclass(test,
(case.TestCase, TestSuite)):
raise TypeError("TestCases and TestSuites must be instantiated "
"before passing them to addTest()")
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, str):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result, debug=False):
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
if not debug:
test(result)
else:
test.debug()
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self.run(debug, True)
################################
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
_call_if_exists(result, '_setupStdout')
try:
setUpClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
_call_if_exists(result, '_setupStdout')
try:
setUpModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
_call_if_exists(result, '_setupStdout')
try:
tearDownModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
_call_if_exists(result, '_setupStdout')
try:
tearDownClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
# Inspired by the ErrorHolder from Twisted:
# http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py
# attribute used by TestResult._exc_info_to_string
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return "<ErrorHolder description=%r>" % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
# could call result.addError(...) - but this test-like object
# shouldn't be run anyway
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
| apache-2.0 |
Maikflow/django_test | lib/python2.7/site-packages/setuptools/tests/test_upload_docs.py | 522 | 2139 | """build_ext tests
"""
import sys, os, shutil, tempfile, unittest, site, zipfile
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo')
"""
class TestUploadDocsTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.upload_dir = os.path.join(self.dir, 'build')
os.mkdir(self.upload_dir)
# A test document.
f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
f.write("Hello world.")
f.close()
# An empty folder.
os.mkdir(os.path.join(self.upload_dir, 'empty'))
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases.
# This explicitly includes a folder containing an empty folder.
dist = Distribution()
cmd = upload_docs(dist)
cmd.upload_dir = self.upload_dir
cmd.target_dir = self.upload_dir
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh...
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)
| gpl-2.0 |
nolanamy/zxing | cpp/scons/scons-local-2.0.0.final.0/SCons/Tool/mwcc.py | 34 | 6892 | """SCons.Tool.mwcc
Tool-specific initialization for the Metrowerks CodeWarrior compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/mwcc.py 5023 2010/06/14 22:05:46 scons"
import os
import os.path
import SCons.Util
def set_vars(env):
"""Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars
MWCW_VERSIONS is set to a list of objects representing installed versions
MWCW_VERSION is set to the version object that will be used for building.
MWCW_VERSION can be set to a string during Environment
construction to influence which version is chosen, otherwise
the latest one from MWCW_VERSIONS is used.
Returns true if at least one version is found, false otherwise
"""
desired = env.get('MWCW_VERSION', '')
# return right away if the variables are already set
if isinstance(desired, MWVersion):
return 1
elif desired is None:
return 0
versions = find_versions()
version = None
if desired:
for v in versions:
if str(v) == desired:
version = v
elif versions:
version = versions[-1]
env['MWCW_VERSIONS'] = versions
env['MWCW_VERSION'] = version
if version is None:
return 0
env.PrependENVPath('PATH', version.clpath)
env.PrependENVPath('PATH', version.dllpath)
ENV = env['ENV']
ENV['CWFolder'] = version.path
ENV['LM_LICENSE_FILE'] = version.license
plus = lambda x: '+%s' % x
ENV['MWCIncludes'] = os.pathsep.join(map(plus, version.includes))
ENV['MWLibraries'] = os.pathsep.join(map(plus, version.libs))
return 1
def find_versions():
"""Return a list of MWVersion objects representing installed versions"""
versions = []
### This function finds CodeWarrior by reading from the registry on
### Windows. Some other method needs to be implemented for other
### platforms, maybe something that calls env.WhereIs('mwcc')
if SCons.Util.can_read_reg:
try:
HLM = SCons.Util.HKEY_LOCAL_MACHINE
product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions'
product_key = SCons.Util.RegOpenKeyEx(HLM, product)
i = 0
while True:
name = product + '\\' + SCons.Util.RegEnumKey(product_key, i)
name_key = SCons.Util.RegOpenKeyEx(HLM, name)
try:
version = SCons.Util.RegQueryValueEx(name_key, 'VERSION')
path = SCons.Util.RegQueryValueEx(name_key, 'PATH')
mwv = MWVersion(version[0], path[0], 'Win32-X86')
versions.append(mwv)
except SCons.Util.RegError:
pass
i = i + 1
except SCons.Util.RegError:
pass
return versions
class MWVersion(object):
def __init__(self, version, path, platform):
self.version = version
self.path = path
self.platform = platform
self.clpath = os.path.join(path, 'Other Metrowerks Tools',
'Command Line Tools')
self.dllpath = os.path.join(path, 'Bin')
# The Metrowerks tools don't store any configuration data so they
# are totally dumb when it comes to locating standard headers,
# libraries, and other files, expecting all the information
# to be handed to them in environment variables. The members set
# below control what information scons injects into the environment
### The paths below give a normal build environment in CodeWarrior for
### Windows, other versions of CodeWarrior might need different paths.
msl = os.path.join(path, 'MSL')
support = os.path.join(path, '%s Support' % platform)
self.license = os.path.join(path, 'license.dat')
self.includes = [msl, support]
self.libs = [msl, support]
def __str__(self):
return self.version
CSuffixes = ['.c', '.C']
CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++']
def generate(env):
"""Add Builders and construction variables for the mwcc to an Environment."""
import SCons.Defaults
import SCons.Tool
set_vars(env)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES'
env['CC'] = 'mwcc'
env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS'
env['CXX'] = 'mwcc'
env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = '$CCFLAGS'
env['SHCFLAGS'] = '$CFLAGS'
env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = '$CXXFLAGS'
env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS'
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cpp'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
#env['PCH'] = ?
#env['PCHSTOP'] = ?
def exists(env):
return set_vars(env)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.