code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
"""
Build IDE required files from python folder structure.
"""
import os.path
from fnmatch import fnmatch
from .ideprocesses import vstudio_read, vstudio_write, none_read, none_write
PROCESSES = {
"vstudio": [vstudio_read, vstudio_write],
None : [none_read, none_write]
}
def read_gitignore(source_path):
"""Read .gitignore file and return list of valid patterns."""
path = os.path.join(source_path, ".gitignore")
with open(path, 'r') as fgit:
is_valid = lambda l: l and not l.startswith("#")
return [l.strip() for l in fgit if is_valid(l.strip())]
def is_ignored(item, patterns):
"""Test if an item should be ignored according to the patterns."""
return any([fnmatch(item, pattern) for pattern in patterns])
def traverse(source_path, process):
"""Traverse folder structure from source_path and apply process function
to each step.
Parameters:
------
source_path: path to traverse
process: function (level, root, dirs, files)
Returns:
actions: list of actions
"""
patterns = read_gitignore(source_path)
level = lambda path: path.count("\\") + path.count("/")
base_level = level(source_path)
actions = []
for root, dirs, files in os.walk(source_path):
remove_ignored(dirs, patterns, is_dir=True)
remove_ignored(files, patterns)
actions.extend(process(level(root) - base_level, root, dirs, files))
return actions
def remove_ignored(alist, patterns, is_dir=False):
"""Remove ignored items from alist according to patterns."""
checkdir = lambda x: x + "/" if is_dir else x
to_ignore = [itm for itm in alist if is_ignored(checkdir(itm), patterns)]
for toi in to_ignore:
alist.remove(toi)
def build(source_path, overwrite=True, ide=None):
"""
Traverse source_path folder structure and writes required IDE files.
:param source_path: relative or full path of python code
:param overwrite: it will overwrite existing solution and project files
:param ide: {"vstudio", None}
The resulting files are written to disk.
"""
if not os.path.exists(source_path):
raise IOError("source_path does not exist so not skeleton can be built")
read_process, write_process = PROCESSES[ide]
actions = traverse(source_path, read_process)
return write_process(actions, source_path, overwrite)
|
jruizaranguren/ideskeleton
|
ideskeleton/builder.py
|
Python
|
mit
| 2,386
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2011-2013 Therp BV (<http://therp.nl>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import inspect
import logging
from openerp import release, tools, SUPERUSER_ID
from openerp.osv import orm
import openupgrade_tools
# The server log level has not been set at this point
# so to log at loglevel debug we need to set it
# manually here. As a consequence, DEBUG messages from
# this file are always logged
logger = logging.getLogger('OpenUpgrade')
logger.setLevel(logging.DEBUG)
__all__ = [
'migrate',
'load_data',
'rename_columns',
'rename_tables',
'rename_models',
'rename_xmlids',
'drop_columns',
'delete_model_workflow',
'warn_possible_dataloss',
'set_defaults',
'logged_query',
'column_exists',
'table_exists',
'update_module_names',
'add_ir_model_fields',
'get_legacy_name',
'm2o_to_m2m',
'message',
'check_values_selection_field',
]
def check_values_selection_field(cr, table_name, field_name, allowed_values):
"""
check if the field selection 'field_name' of the table 'table_name'
has only the values 'allowed_values'.
If not return False and log an error.
If yes, return True.
"""
res = True
cr.execute("SELECT %s, count(*) FROM %s GROUP BY %s;" %
(field_name, table_name, field_name))
for row in cr.fetchall():
if row[0] not in allowed_values:
logger.error(
"Invalid value '%s' in the table '%s' "
"for the field '%s'. (%s rows).",
row[0], table_name, field_name, row[1])
res = False
return res
def load_data(cr, module_name, filename, idref=None, mode='init'):
"""
Load an xml or csv data file from your post script. The usual case for
this is the
occurrence of newly added essential or useful data in the module that is
marked with "noupdate='1'" and without "forcecreate='1'" so that it will
not be loaded by the usual upgrade mechanism. Leaving the 'mode' argument
to its default 'init' will load the data from your migration script.
Theoretically, you could simply load a stock file from the module, but be
careful not to reinitialize any data that could have been customized.
Preferably, select only the newly added items. Copy these to a file
in your migrations directory and load that file.
Leave it to the user to actually delete existing resources that are
marked with 'noupdate' (other named items will be deleted
automatically).
:param module_name: the name of the module
:param filename: the path to the filename, relative to the module \
directory.
:param idref: optional hash with ?id mapping cache?
:param mode: one of 'init', 'update', 'demo'. Always use 'init' \
for adding new items from files that are marked with 'noupdate'. Defaults \
to 'init'.
"""
if idref is None:
idref = {}
logger.info('%s: loading %s' % (module_name, filename))
_, ext = os.path.splitext(filename)
pathname = os.path.join(module_name, filename)
fp = tools.file_open(pathname)
try:
if ext == '.csv':
noupdate = True
tools.convert_csv_import(
cr, module_name, pathname, fp.read(), idref, mode, noupdate)
else:
tools.convert_xml_import(cr, module_name, fp, idref, mode=mode)
finally:
fp.close()
# for backwards compatibility
load_xml = load_data
table_exists = openupgrade_tools.table_exists
def rename_columns(cr, column_spec):
"""
Rename table columns. Typically called in the pre script.
:param column_spec: a hash with table keys, with lists of tuples as \
values. Tuples consist of (old_name, new_name). Use None for new_name \
to trigger a conversion of old_name using get_legacy_name()
"""
for table in column_spec.keys():
for (old, new) in column_spec[table]:
if new is None:
new = get_legacy_name(old)
logger.info("table %s, column %s: renaming to %s",
table, old, new)
cr.execute(
'ALTER TABLE "%s" RENAME "%s" TO "%s"' % (table, old, new,))
cr.execute('DROP INDEX IF EXISTS "%s_%s_index"' % (table, old))
def rename_tables(cr, table_spec):
"""
Rename tables. Typically called in the pre script.
This function also renames the id sequence if it exists and if it is
not modified in the same run.
:param table_spec: a list of tuples (old table name, new table name).
"""
# Append id sequences
to_rename = [x[0] for x in table_spec]
for old, new in list(table_spec):
if (table_exists(cr, old + '_id_seq') and
old + '_id_seq' not in to_rename):
table_spec.append((old + '_id_seq', new + '_id_seq'))
for (old, new) in table_spec:
logger.info("table %s: renaming to %s",
old, new)
cr.execute('ALTER TABLE "%s" RENAME TO "%s"' % (old, new,))
def rename_models(cr, model_spec):
"""
Rename models. Typically called in the pre script.
:param model_spec: a list of tuples (old model name, new model name).
Use case: if a model changes name, but still implements equivalent
functionality you will want to update references in for instance
relation fields.
"""
for (old, new) in model_spec:
logger.info("model %s: renaming to %s",
old, new)
cr.execute('UPDATE ir_model SET model = %s '
'WHERE model = %s', (new, old,))
cr.execute('UPDATE ir_model_fields SET relation = %s '
'WHERE relation = %s', (new, old,))
# TODO: signal where the model occurs in references to ir_model
def rename_xmlids(cr, xmlids_spec):
"""
Rename XML IDs. Typically called in the pre script.
One usage example is when an ID changes module. In OpenERP 6 for example,
a number of res_groups IDs moved to module base from other modules (
although they were still being defined in their respective module).
"""
for (old, new) in xmlids_spec:
if not old.split('.') or not new.split('.'):
logger.error(
'Cannot rename XMLID %s to %s: need the module '
'reference to be specified in the IDs' % (old, new))
else:
query = ("UPDATE ir_model_data SET module = %s, name = %s "
"WHERE module = %s and name = %s")
logged_query(cr, query, tuple(new.split('.') + old.split('.')))
def drop_columns(cr, column_spec):
"""
Drop columns but perform an additional check if a column exists.
This covers the case of function fields that may or may not be stored.
Consider that this may not be obvious: an additional module can govern
a function fields' store properties.
:param column_spec: a list of (table, column) tuples
"""
for (table, column) in column_spec:
logger.info("table %s: drop column %s",
table, column)
if column_exists(cr, table, column):
cr.execute('ALTER TABLE "%s" DROP COLUMN "%s"' %
(table, column))
else:
logger.warn("table %s: column %s did not exist",
table, column)
def delete_model_workflow(cr, model):
"""
Forcefully remove active workflows for obsolete models,
to prevent foreign key issues when the orm deletes the model.
"""
logged_query(
cr,
"DELETE FROM wkf_workitem WHERE act_id in "
"( SELECT wkf_activity.id "
" FROM wkf_activity, wkf "
" WHERE wkf_id = wkf.id AND "
" wkf.osv = %s"
")", (model,))
logged_query(
cr,
"DELETE FROM wkf WHERE osv = %s", (model,))
def warn_possible_dataloss(cr, pool, old_module, fields):
"""
Use that function in the following case:
if a field of a model was moved from a 'A' module to a 'B' module.
('B' depend on 'A'),
This function will test if 'B' is installed.
If not, count the number of different value and possibly warn the user.
Use orm, so call from the post script.
:param old_module: name of the old module
:param fields: list of dictionary with the following keys:
'table' : name of the table where the field is.
'field' : name of the field that are moving.
'new_module' : name of the new module
.. versionadded:: 7.0
"""
module_obj = pool.get('ir.module.module')
for field in fields:
module_ids = module_obj.search(
cr, SUPERUSER_ID, [
('name', '=', field['new_module']),
('state', 'in', ['installed', 'to upgrade', 'to install'])
])
if not module_ids:
cr.execute(
"SELECT count(*) FROM (SELECT %s from %s group by %s) "
"as tmp" % (
field['field'], field['table'], field['field']))
row = cr.fetchone()
if row[0] == 1:
# not a problem, that field wasn't used.
# Just a loss of functionality
logger.info(
"Field '%s' from module '%s' was moved to module "
"'%s' which is not installed: "
"No dataloss detected, only loss of functionality"
% (field['field'], old_module, field['new_module']))
else:
# there is data loss after the migration.
message(
cr, old_module,
"Field '%s' was moved to module "
"'%s' which is not installed: "
"There were %s distinct values in this field.",
field['field'], field['new_module'], row[0])
def set_defaults(cr, pool, default_spec, force=False):
"""
Set default value. Useful for fields that are newly required. Uses orm, so
call from the post script.
:param default_spec: a hash with model names as keys. Values are lists \
of tuples (field, value). None as a value has a special meaning: it \
assigns the default value. If this value is provided by a function, the \
function is called as the user that created the resource.
:param force: overwrite existing values. To be used for assigning a non- \
default value (presumably in the case of a new column). The ORM assigns \
the default value as declared in the model in an earlier stage of the \
process. Beware of issues with resources loaded from new data that \
actually do require the model's default, in combination with the post \
script possible being run multiple times.
"""
def write_value(ids, field, value):
logger.debug(
"model %s, field %s: setting default value of resources %s to %s",
model, field, ids, unicode(value))
for res_id in ids:
# Iterating over ids here as a workaround for lp:1131653
obj.write(cr, SUPERUSER_ID, [res_id], {field: value})
for model in default_spec.keys():
obj = pool.get(model)
if not obj:
raise orm.except_orm(
"Error",
"Migration: error setting default, no such model: %s" % model)
for field, value in default_spec[model]:
domain = not force and [(field, '=', False)] or []
ids = obj.search(cr, SUPERUSER_ID, domain)
if not ids:
continue
if value is None:
# Set the value by calling the _defaults of the object.
# Typically used for company_id on various models, and in that
# case the result depends on the user associated with the
# object. We retrieve create_uid for this purpose and need to
# call the defaults function per resource. Otherwise, write
# all resources at once.
if field in obj._defaults:
if not callable(obj._defaults[field]):
write_value(ids, field, obj._defaults[field])
else:
cr.execute(
"SELECT id, COALESCE(create_uid, 1) FROM %s " %
obj._table + "WHERE id in %s", (tuple(ids),))
# Execute the function once per user_id
user_id_map = {}
for row in cr.fetchall():
user_id_map.setdefault(row[1], []).append(row[0])
for user_id in user_id_map:
write_value(
user_id_map[user_id], field,
obj._defaults[field](obj, cr, user_id, None))
else:
error = (
"OpenUpgrade: error setting default, field %s with "
"None default value not in %s' _defaults" % (
field, model))
logger.error(error)
# this exeption seems to get lost in a higher up try block
orm.except_orm("OpenUpgrade", error)
else:
write_value(ids, field, value)
def logged_query(cr, query, args=None):
"""
Logs query and affected rows at level DEBUG
"""
if args is None:
args = []
cr.execute(query, args)
logger.debug('Running %s', query % tuple(args))
logger.debug('%s rows affected', cr.rowcount)
return cr.rowcount
def column_exists(cr, table, column):
""" Check whether a certain column exists """
cr.execute(
'SELECT count(attname) FROM pg_attribute '
'WHERE attrelid = '
'( SELECT oid FROM pg_class WHERE relname = %s ) '
'AND attname = %s',
(table, column))
return cr.fetchone()[0] == 1
def update_module_names(cr, namespec):
"""
Deal with changed module names of certified modules
in order to prevent 'certificate not unique' error,
as well as updating the module reference in the
XML id.
:param namespec: tuple of (old name, new name)
"""
for (old_name, new_name) in namespec:
query = ("UPDATE ir_module_module SET name = %s "
"WHERE name = %s")
logged_query(cr, query, (new_name, old_name))
query = ("UPDATE ir_model_data SET module = %s "
"WHERE module = %s ")
logged_query(cr, query, (new_name, old_name))
query = ("UPDATE ir_module_module_dependency SET name = %s "
"WHERE name = %s")
logged_query(cr, query, (new_name, old_name))
def add_ir_model_fields(cr, columnspec):
"""
Typically, new columns on ir_model_fields need to be added in a very
early stage in the upgrade process of the base module, in raw sql
as they need to be in place before any model gets initialized.
Do not use for fields with additional SQL constraints, such as a
reference to another table or the cascade constraint, but craft your
own statement taking them into account.
:param columnspec: tuple of (column name, column type)
"""
for column in columnspec:
query = 'ALTER TABLE ir_model_fields ADD COLUMN %s %s' % (
column)
logged_query(cr, query, [])
def get_legacy_name(original_name):
"""
Returns a versioned name for legacy tables/columns/etc
Use this function instead of some custom name to avoid
collisions with future or past legacy tables/columns/etc
:param original_name: the original name of the column
:param version: current version as passed to migrate()
"""
return 'openupgrade_legacy_'+('_').join(
map(str, release.version_info[0:2]))+'_'+original_name
def m2o_to_m2m(cr, model, table, field, source_field):
"""
Recreate relations in many2many fields that were formerly
many2one fields. Use rename_columns in your pre-migrate
script to retain the column's old value, then call m2o_to_m2m
in your post-migrate script.
:param model: The target model pool object
:param table: The source table
:param field: The field name of the target model
:param source_field: the many2one column on the source table.
.. versionadded:: 7.0
"""
cr.execute('SELECT id, %(field)s '
'FROM %(table)s '
'WHERE %(field)s is not null' % {
'table': table,
'field': source_field,
})
for row in cr.fetchall():
model.write(cr, SUPERUSER_ID, row[0], {field: [(4, row[1])]})
def message(cr, module, table, column,
message, *args, **kwargs):
"""
Log handler for non-critical notifications about the upgrade.
To be extended with logging to a table for reporting purposes.
:param module: the module name that the message concerns
:param table: the model that this message concerns (may be False, \
but preferably not if 'column' is defined)
:param column: the column that this message concerns (may be False)
.. versionadded:: 7.0
"""
argslist = list(args or [])
prefix = ': '
if column:
argslist.insert(0, column)
prefix = ', column %s' + prefix
if table:
argslist.insert(0, table)
prefix = ', table %s' + prefix
argslist.insert(0, module)
prefix = 'Module %s' + prefix
logger.warn(prefix + message, *argslist, **kwargs)
def migrate():
"""
This is the decorator for the migrate() function
in migration scripts.
Return when the 'version' argument is not defined,
and log execeptions.
Retrieve debug context data from the frame above for
logging purposes.
"""
def wrap(func):
def wrapped_function(cr, version):
stage = 'unknown'
module = 'unknown'
filename = 'unknown'
try:
frame = inspect.getargvalues(inspect.stack()[1][0])
stage = frame.locals['stage']
module = frame.locals['pkg'].name
filename = frame.locals['fp'].name
except Exception, e:
logger.error(
"'migrate' decorator: failed to inspect "
"the frame above: %s" % e)
pass
if not version:
return
logger.info(
"%s: %s-migration script called with version %s" %
(module, stage, version))
try:
# The actual function is called here
func(cr, version)
except Exception, e:
logger.error(
"%s: error in migration script %s: %s" %
(module, filename, str(e).decode('utf8')))
logger.exception(e)
raise
return wrapped_function
return wrap
|
bealdav/OpenUpgrade
|
openerp/openupgrade/openupgrade.py
|
Python
|
agpl-3.0
| 19,938
|
import Base
import fixers
import Director
import VS
def AssignMission ():
fixers.DestroyActiveButtons ()
fixers.CreateChoiceButtons(Base.GetCurRoom(),[
fixers.Choice("bases/fixers/yes.spr","bases/fixers/pirates_mission.py","Accept This Agreement"),
fixers.Choice("bases/fixers/no.spr","bases/fixers/no.py","Decline This Agreement")])
playa = VS.getPlayer();
playernum = playa.isPlayerStarship()
if (VS.numActiveMissions()>1):
Base.Message("I can't give your cargo and money until your current mission is complete.");
else:
if (fixers.checkSaveValue (playernum,"pirate_mission1",0)):
print("START1")
AssignMission()
Base.Message("We are in need of a pilot who can safely transport this cargo to one of our bases in the #55ffffrigel#000000 system. It is highly important that no confed or militia know of these goods. Will you accept this unavoidable offer?")
#And then if you fail.......
elif (fixers.checkSaveValue (playernum,"pirate_mission1",-1) or fixers.checkSaveValue (playernum,"pirate_mission2",-1) or fixers.checkSaveValue (playernum,"pirate_mission3",-1) or fixers.checkSaveValue (playernum,"pirate_mission4",-1)):
Base.Message ("How could you let our precious cargo be destroyed. It has cost us millions of credits and you trashed it like it was nothing. Prepare to be nailed,pal!")
type = faction_ships.getRandomFighter ("pirates")
fgname="shadow"
launch.launch_wave_around_unit (fgname,"pirates",type,"default",1,80,300,playa).SetTarget(playa)
launch.launch_wave_around_unit (fgname,"pirates",type,"default",1,80,300,playa).SetTarget(playa)
else:
if (fixers.checkSaveValue (playernum,"pirate_mission1",1) and fixers.checkSaveValue(playernum,"pirate_mission2",0)):
Base.Message ("Thanks for the cargo that we needed, pal. We now need some cargo delivered to the #55fffftingvallir#000000 system. Can you do that without geting caught?")#assign mis 2
fixers.payCheck(playernum,'paidpiratemission',0,20000)
AssignMission()
elif (fixers.checkSaveValue (playernum,"pirate_mission2",1) and fixers.checkSaveValue(playernum,"pirate_mission3",0)):
Base.Message ("Hey, pal. You got back here all right. Now I have a special mission for you that requires a tractor beam. This will require you to destroy a merchant transport in this system. You must have a tractor beam equipped to your ship...we want the ship to blow, but the cargo...")#assign mis 3:
fixers.payCheck(playernum,'paidpiratemission',1,20000)
AssignMission()
elif (fixers.checkSaveValue (playernum,"pirate_mission3",1) and fixers.checkSaveValue(playernum,"pirate_mission4",0)):
fixers.setSaveValue(playernum,"pirate_mission3",2)
fixers.payCheck(playernum,'paidpiratemission',2,25000)
Base.Message ("It looks like you got some nice cargo from that guy. Go to the #55ffffrigel#000000 system immediately. There is something very important that must be done. Meet my friend who will tell you what to do.")
elif ((fixers.checkSaveValue (playernum,"pirate_mission3",2) or fixers.checkSaveValue (playernum,"pirate_mission3",1)) and VS.getSystemFile()=='enigma_sector/rigel' and fixers.checkSaveValue(playernum,"pirate_mission4",0)):
Base.Message ("Our base is under attack! Defend it from the militia! They have heard about illegal contraband transferring and are not happy.")#assign mis 4
AssignMission()
elif (fixers.checkSaveValue(playernum,"pirate_mission4",1)):
fixers.setSaveValue(playernum,"pirate_mission4",2)
Base.Message ("Thanks, pal. You've earned our trust. You're a recognized friend of the pirates now. I don't know if you fully grok this yet, but you have saved many of our lives here in the Rigel system. Here are your 80000 credits that we promised. Go out, take what's yours, and don't let anyone give you flak.")
fixers.payCheck(playernum,'paidpiratemission',3,80000)
else:
pass
|
vinni-au/vega-strike
|
data/bases/fixers/pirates.py
|
Python
|
gpl-2.0
| 4,098
|
#Py-Infinote is a python port of JInfinote, and was developed for the HWIOS project
'''
Copyright (c) 2009 Simon Veith <simon@jinfinote.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.'''
'''
Copyright (c) OS-Networks, http://os-networks.net
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the HWIOS Project nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.'''
import sys
import re
from hwios.core.application import HWIOS
class InfinotePool(object):
def __init__(self, ws_handler):
self.ws_handler = ws_handler
def subscribe(self, client, item_id, initial_text, app_pool, remote_callback):
'''
General pool subscriber for infinote clients
'''
online = []
if app_pool not in HWIOS.ws_realm.pool.subscription:
HWIOS.ws_realm.pool.subscription[app_pool] = {}
#if cid doesnt exist yet, it means we have to add structure and the first diff from the db
if item_id not in HWIOS.ws_realm.pool.subscription[app_pool]:
HWIOS.ws_realm.pool.subscription[app_pool][item_id] = {'clients':[],'editor':InfinoteEditor()}
HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].try_insert([1, '', 0, initial_text])
#before we add the user to the subscription pool, let's make sure there are no other references for this user left
for _slug in HWIOS.ws_realm.pool.subscription[app_pool]:
for _client in HWIOS.ws_realm.pool.subscription[app_pool][_slug]['clients']:
if _client.profile.uuid == client.profile.uuid:
HWIOS.ws_realm.pool.subscription[app_pool][_slug]['clients'].remove(_client)
HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients'].append(client)
#inform every editor about this editing client's arrival
for _client in HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients']:
if _client.role == 'edit':
online.append({'id':_client.profile.pk,'name':_client.profile.username})
for _client in HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients']:
if _client.role == 'edit' and _client != client:
remote_callback(_client, online, app_pool, item_id)
state = HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].get_state()
log_history = HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].get_log(limit=100)
return {'state':state,'log':log_history,'online':online}
def unsubscribe(self, client, app_pool, remote_callback):
'''
General pool unsubscriber for infinote clients
'''
online = []
if app_pool not in HWIOS.ws_realm.pool.subscription:
HWIOS.ws_realm.pool.subscription[app_pool] = {}
return
for _item_id in HWIOS.ws_realm.pool.subscription[app_pool]:
for _client in HWIOS.ws_realm.pool.subscription[app_pool][_item_id]['clients']:
#match found. Remove client, notify other clients
if _client.profile.uuid == client.profile.uuid:
HWIOS.ws_realm.pool.subscription[app_pool][_item_id]['clients'].remove(_client)
#notify left-over editors that this client has left the building
for _client in HWIOS.ws_realm.pool.subscription[app_pool][_item_id]['clients']:
if _client.role == 'edit':
online.append({'id':_client.profile.pk,'name':_client.profile.username})
for _client in HWIOS.ws_realm.pool.subscription[app_pool][_item_id]['clients']:
if _client.role == 'edit':
remote_callback(_client, online, app_pool, _item_id)
def request_insert(self, client, app_pool, item_id, params, remote_callback):
'''Infinote client wants to perform an insert operation on the subscription log'''
HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].try_insert(params)
state = HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].get_state()
print state
clients = HWIOS.ws_realm.pool.get_clients()
for target_client in HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients']:
if client != target_client:
remote_callback(target_client, app_pool, item_id, 'insert', params)
def request_delete(self, client, app_pool, item_id, params, remote_callback):
'''Infinote client wants to perform a delete operation on the subscription log'''
HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].try_delete(params)
state = HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].get_state()
print state
clients = HWIOS.ws_realm.pool.get_clients()
for target_client in HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients']:
if client != target_client:
remote_callback(target_client, app_pool, item_id, 'delete', params)
def request_undo(self, client, app_pool, item_id, params, remote_callback):
'''Infinote client wants to perform an undo operation on the subscription log'''
HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].try_undo(params)
clients = HWIOS.ws_realm.pool.get_clients()
for target_client in HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients']:
if client != target_client:
remote_callback(target_client, app_pool, item_id, 'undo', params)
def update_caret(self, client, app_pool, item_id, params, remote_callback):
for target_client in HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients']:
if client != target_client:
remote_callback(target_client, app_pool, item_id, params)
def get_other_clients(self, client, app_pool, item_id):
_clientlist = []
for _client in HWIOS.ws_realm.pool.subscription[app_pool][item_id]['clients']:
if client != _client:
_clientlist.append(_client)
return _clientlist
def get_state(self, app_pool, item_id):
return HWIOS.ws_realm.pool.subscription[app_pool][item_id]['editor'].get_state()
class InfinoteEditor(object):
def __init__(self):
self.log = []
self._state = State()
def try_insert(self, params):
#user, text
segment = Segment(params[0], params[3])
buffer = Buffer([segment])
#position, buffer
operation = Insert(params[2], buffer)
#user, vector
request = DoRequest(params[0], Vector(params[1]), operation)
if self._state.canExecute(request):
executedRequest = self._state.execute(request)
self.log.append(["i",tuple(params)])
def try_delete(self, params):
operation = Delete(params[2], params[3])
#user, vector, operation
request = DoRequest(params[0], Vector(params[1]), operation)
if self._state.canExecute(request):
executedRequest = self._state.execute(request)
self.log.append(["d",tuple(params)])
def try_undo(self, params):
request = UndoRequest(params[0], self._state.vector)
if self._state.canExecute(request):
executedRequest = self._state.execute(request)
self.log.append(["u",tuple(params)])
def sync(self):
for log in self.log:
if log[0] == 'i':
self.try_insert(log[1])
elif log[0] =='d':
self.try_delete(log[1])
elif log[0] == 'u':
self.try_undo(log[1])
def get_state(self):
return (self._state.vector.toString(), self._state.buffer.toString())
def get_log(self, limit = None):
if limit != None:
if len(self.log) >=limit:
return (limit, self.log[-limit:])
else:
return (limit, self.log)
else:
return (limit, self.log)
class BufferSpliceError(Exception):
def __init__(self, message, Errors = None):
Exception.__init__(self, message)
self.Errors = Errors
class NoOp(object):
'''Instantiates a new NoOp operation object.
@class An operation that does nothing.
'''
requiresCID = False
def toString(self):
return "NoOp()"
def toHTML(self):
return "NoOp()"
def apply(self, buffer):
'''Applies this NoOp operation to a buffer. This does nothing, per definition. */'''
pass
def transform(self, other):
'''Transforms this NoOp operation against another operation. This returns a
new NoOp operation.
@type Operations.NoOp
'''
return NoOp()
def mirror(self):
'''Mirrors this NoOp operation. This returns a new NoOp operation.
@type Operations.NoOp
'''
return NoOp()
class Insert(object):
'''Instantiates a new Insert operation object.
@class An operation that inserts a Buffer at a certain offset.
@param {Number} position The offset at which the text is to be inserted.
@param {Buffer} text The Buffer to insert.
'''
requiresCID = True
def __init__(self, position, text):
self.position = position
self.text = text.copy()
def __repr__(self):
return self.toString()
def toString(self):
return "Insert(%s, %s)" % (self.position, self.text)
def toHTML(self):
return "Insert(%s, %s)" % (self.position, self.text.toHTML())
def apply(self, buffer):
'''Applies the insert operation to the given Buffer.
@param {Buffer} buffer The buffer in which the insert operation is to be performed.
'''
buffer.splice(self.position, 0, self.text)
def cid(self, other):
'''Computes the concurrency ID against another Insert operation.
@param {Operations.Insert} other
@returns The operation that is to be transformed.
@type Operations.Insert
'''
if self.position < other.position:
return other
if self.position > other.position:
return self
def getLength(self):
'''Returns the total length of data to be inserted by this insert operation,
in characters.
@type Number
'''
return self.text.getLength()
def transform(self, other, cid = None):
'''Transforms this Insert operation against another operation, returning the
resulting operation as a new object.
@param {Operation} other The operation to transform against.
@param {Operation} [cid] The cid to take into account in the case of
conflicts.
@type Operation
'''
if isinstance(other, NoOp):
return Insert(self.position, self.text)
if isinstance(other, Split):
#We transform against the first component of the split operation first.
if cid == self:
transformFirst = self.transform(other.first, self)
else:
transformFirst = self.transform(other.first, other.first)
#The second part of the split operation is transformed against its first part.
newSecond = other.second.transform(other.first)
if cid == self:
transformSecond = transformFirst.transform(newSecond, transformFirst)
else:
transformSecond = transformFirst.transform(newSecond, newSecond)
return transformSecond
pos1 = self.position
str1 = self.text
pos2 = other.position
if isinstance(other, Insert):
str2 = other.text
if pos1 < pos2 or (pos1 == pos2 and cid == other):
return Insert(pos1, str1)
if pos1 > pos2 or (pos1 == pos2 and cid == self):
return Insert(pos1 + str2.getLength(), str1)
elif isinstance(other, Delete):
len2 = other.getLength()
if pos1 >= pos2 + len2:
return Insert(pos1 - len2, str1)
if pos1 < pos2:
return Insert(pos1, str1)
if pos1 >= pos2 and pos1 < pos2 + len2:
return Insert(pos2, str1)
def mirror(self):
'''Returns the inversion of this Insert operation.
@type Operations.Delete
'''
return Delete(self.position, self.text.copy())
class Delete(object):
'''Instantiates a new Delete operation object.
Delete operations can be reversible or not, depending on how they are
constructed. Delete operations constructed with a Buffer object know which
text they are removing from the buffer and can therefore be mirrored,
whereas Delete operations knowing only the amount of characters to be
removed are non-reversible.
@class An operation that removes a range of characters in the target
buffer.
@param {Number} position The offset of the first character to remove.
@param what The data to be removed. This can be either a numeric value
or a Buffer object.
'''
requiresCID = False
def __init__(self, position, what, recon = None):
self.position = position
if isinstance(what, Buffer):
self.what = what.copy()
else:
self.what = what
if recon != None:
self.recon = recon
else:
self.recon = Recon()
def __repr__(self):
return self.toString()
def toString(self):
return 'Delete(%s, %s)' % (self.position, self.what)
def toHTML(self):
if isinstance(self.what, Buffer):
return 'Delete(%s, %s)' % (self.position, self.what.toHTML())
else:
return 'Delete(%s, %s)' % (self.position, self.what)
def isReversible(self):
'''Determines whether this Delete operation is reversible.
@type Boolean
'''
return isinstance(self.what, Buffer)
def apply(self, buffer):
'''Applies this Delete operation to a buffer.
@param {Buffer} buffer The buffer to which the operation is to be applied.
'''
buffer.splice(self.position, self.getLength())
def cid(self, other):
pass
def getLength(self):
'''Returns the number of characters that this Delete operation removes.
@type Number
'''
if(self.isReversible()):
return self.what.getLength()
else:
return self.what
def split(self, at):
'''Splits this Delete operation into two Delete operations at the given
offset. The resulting Split operation will consist of two Delete
operations which, when combined, affect the same range of text as the
original Delete operation.
@param {Number} at Offset at which to split the Delete operation.
@type Operations.Split
'''
if self.isReversible():
#This is a reversible Delete operation. No need to to any processing for recon data.
return Split(
Delete(self.position, self.what.slice(0, at)),
Delete(self.position + at, self.what.slice(at))
)
else:
'''This is a non-reversible Delete operation that might carry recon
data. We need to split that data accordingly between the two new components.
'''
recon1 = Recon()
recon2 = Recon()
for index in self.recon.segments:
if self.recon.segments[index].offset < at:
recon1.segments.push(self.recon.segments[index])
else:
recon2.segments.push(ReconSegment(self.recon.segments[index].offset - at, self.recon.segments[index].buffer))
return Split(Delete(self.position, at, recon1), Delete(self.position + at, self.what - at, recon2))
@classmethod
def getAffectedString(self, operation, buffer):
'''Returns the range of text in a buffer that this Delete or Split-Delete operation removes.
@param operation A Split-Delete or Delete operation
@param {Buffer} buffer
@type Buffer
'''
if isinstance(operation, Split):
#The other operation is a Split operation. We call this function again recursively for each component.
part1 = Delete.getAffectedString(operation.first, buffer)
part2 = Delete.getAffectedString(operation.second, buffer)
part2.splice(0, 0, part1)
return part2
elif isinstance(operation,Delete):
'''In the process of determining the affected string, we also
have to take into account the data that has been "transformed away"
from the Delete operation and which is stored in the Recon object.
'''
reconBuffer = buffer.slice(operation.position, operation.position + operation.getLength())
operation.recon.restore(reconBuffer)
return reconBuffer
def makeReversible(self, transformed, state):
'''Makes this Delete operation reversible, given a transformed version of
this operation in a buffer matching its state. If this Delete operation is
already reversible, this function simply returns a copy of it.
@param {Operations.Delete} transformed A transformed version of this operation.
@param {State} state The state in which the transformed operation could be applied.
'''
if isinstance(self.what, Buffer):
return Delete(self.position, self.what)
else:
return Delete(self.position, Delete.getAffectedString(transformed, state.buffer))
def merge(self, other):
'''Merges a Delete operation with another one. The resulting Delete operation
removes the same range of text as the two separate Delete operations would
when executed sequentially.
@param {Operations.Delete} other
@type Operations.Delete
'''
if self.isReversible():
if not other.isReversible():
raise Exception('Cannot merge reversible operations with non-reversible ones')
newBuffer = self.what.copy()
newBuffer.splice(newBuffer.getLength(), 0, other.what)
return Delete(self.position, newBuffer)
else:
newLength = self.getLength() + other.getLength()
return Delete(self.position, newLength)
def transform(self, other, cid = None):
'''Transforms this Delete operation against another operation.
@param {Operation} other
@param {Operation} [cid]
'''
if isinstance(other, NoOp):
return Delete(self.position, self.what, self.recon)
if isinstance(other, Split):
#We transform against the first component of the split operation first.
if cid == self:
transformFirst = self.transform(other.first, self)
else:
transformFirst = self.transform(other.first, other.first)
#The second part of the split operation is transformed against its first part.
newSecond = other.second.transform(other.first)
if cid == self:
transformSecond = transformFirst.transform(newSecond,transformFirst)
else:
transformSecond = transformFirst.transform(newSecond,newSecond)
return transformSecond
pos1 = self.position
len1 = self.getLength()
pos2 = other.position
len2 = other.getLength()
if isinstance(other,Insert):
if pos2 >= pos1 + len1:
return Delete(pos1, self.what, self.recon)
if pos2 <= pos1:
return Delete(pos1 + len2, self.what, self.recon)
if pos2 > pos1 and pos2 < pos1 + len1:
result = self.split(pos2 - pos1)
result.second.position += len2
return result
elif isinstance(other,Delete):
if pos1 + len1 <= pos2:
return Delete(pos1, self.what, self.recon)
if pos1 >= pos2 + len2:
return Delete(pos1 - len2, self.what, self.recon)
if pos2 <= pos1 and pos2 + len2 >= pos1 + len1:
''' 1XXXXX|
2-------------|
This operation falls completely within the range of another,
i.e. all data has already been removed. The resulting
operation removes nothing.
'''
if self.isReversible():
newData = Buffer()
else:
newData = 0
newRecon = self.recon.update(0,other.what.slice(pos1 - pos2, pos1 - pos2 + len1) )
return Delete(pos2, newData, newRecon)
if pos2 <= pos1 and pos2 + len2 < pos1 + len1:
''' 1XXXX----|
2--------|
The first part of this operation falls within the range of another.
'''
result = self.split(pos2 + len2 - pos1)
result.second.position = pos2
result.second.recon = self.recon.update(0,other.what.slice(pos1 - pos2) )
return result.second
if pos2 > pos1 and pos2 + len2 >= pos1 + len1:
''' 1----XXXXX|
2--------|
The second part of this operation falls within the range of another.
'''
result = self.split(pos2 - pos1)
result.first.recon = self.recon.update(result.first.getLength(), other.what.slice(0, pos1 + len1 - pos2) )
return result.first
if pos2 > pos1 and pos2 + len2 < pos1 + len1:
'''1-----XXXXXX---|
2------|
Another operation falls completely within the range of this operation. We remove that part.
'''
#We split this operation two times: first at the beginning of the second operation, then at the end of the second operation.
r1 = self.split(pos2 - pos1)
r2 = r1.second.split(len2)
#The resulting Delete operation consists of the first and the last part, which are merged back into a single operation.
result = r1.first.merge(r2.second)
result.recon = self.recon.update(pos2 - pos1, other.what)
return result
def mirror(self):
'''Mirrors this Delete operation. Returns an operation which inserts the text
that this Delete operation would remove. If this Delete operation is not
reversible, the return value is undefined.
@type Operations.Insert
'''
if self.isReversible():
return Insert(self.position, self.what.copy())
class Split(object):
'''
Instantiates a new Split operation object.
@class An operation which wraps two different operations into a single
object. This is necessary for example in order to transform a Delete operation
against an Insert operation which falls into the range that is to be deleted.
@param {Operation} first
@param {Operation} second
'''
requiresCID = True
def __init__(self, first, second):
self.first = first
self.second = second
def __repr__(self):
return self.toString()
def toString(self):
return 'Split(%s, %s)' % (self.first, self.second)
def toHTML(self):
return 'Split(%s, %s)' % (self.first.toHTML(), self.second.toHTML())
def apply(self, buffer):
'''Applies the two components of this split operation to the given buffer
sequentially. The second component is implicitly transformed against the
first one in order to do so.
@param {Buffer} buffer The buffer to which this operation is to be applied.
'''
self.first.apply(buffer)
transformedSecond = self.second.transform(self.first)
transformedSecond.apply(buffer)
def cid(self, foo):
pass
def transform(self, other, cid = None):
'''Transforms this Split operation against another operation. This is done
by transforming both components individually.
@param {Operation} other
@param {Operation} [cid]
'''
if cid == self or cid == other:
if cid == self:
return Split(self.first.transform(other, self.first), self.second.transform(other, self.second))
else:
return Split(self.first.transform(other, other), self.second.transform(other, other))
else:
#OPERATIONS SHOULD NOT GO THROUGH THIS
return Split(self.first.transform(other),self.second.transform(other))
def mirror(self):
'''Mirrors this Split operation. This is done by transforming the second
component against the first one, then mirroring both components individually.
@type Operations.Split
'''
newSecond = self.second.transform(self.first)
return Split(self.first.mirror(), newSecond.mirror())
class Recon(object):
'''Creates a new Recon object.
@class The Recon class is a helper class which collects the parts of a
Delete operation that are lost during transformation. This is used to
reconstruct the text of a remote Delete operation that was issued in a
previous state, and thus to make such a Delete operation reversible.
@param {Recon} [recon] Pre-initialize the Recon object with data from another object.
'''
def __init__(self, recon = None):
if(recon != None):
self.segments = recon.segments.slice(0)
else:
self.segments = []
def __repr__(self):
return self.toString()
def toString(self):
return 'Recon(%s)' % self.segments
def update(self, offset, buffer):
'''Creates a new Recon object with an additional piece of text to be restored later.
@param {Number} offset
@param {Buffer} buffer
@type {Recon}
'''
newRecon = Recon(self)
if isinstance(buffer,Buffer):
newRecon.segments.push(ReconSegment(offset, buffer))
return newRecon
def restore(self, buffer):
'''Restores the recon data in the given buffer.
@param {Buffer} buffer
'''
for segment in self.segments:
buffer.splice(segment.offset, 0, segment.buffer)
class ReconSegment(object):
'''Instantiates a new ReconSegment object.
@class ReconSegments store a range of text combined with the offset at
which they are to be inserted upon restoration.
@param {Number} offset
@param {Buffer} buffer
'''
def __init__(self, offset, buffer):
self.offset = offset
self.buffer = buffer.copy()
def toString(self):
return '(%s,%s)' % (self.offset, self.buffer)
class DoRequest(object):
'''Initializes a new DoRequest object.
@class Represents a request made by an user at a certain time.
@param {Number} user The user that issued the request
@param {Vector} vector The time at which the request was issued
@param {Operation} operation
'''
def __init__(self, user, vector, operation):
self.user = user
self.vector = vector
self.operation = operation
def __repr__(self):
return self.toString()
def toString(self):
return 'DoRequest(%s, %s, %s)' % (self.user, self.vector.toString(), self.operation.toString())
def toHTML(self):
return 'DoRequest(%s, %s, %s)' % (self.user, self.vector.toHTML(), self.operation.toHTML())
def copy(self):
return DoRequest(self.user, self.vector, self.operation)
def execute(self, state):
'''Applies the request to a State.
@param {State} state The state to which the request should be applied.
'''
self.operation.apply(state.buffer)
state.vector = state.vector.incr(self.user, 1)
return self
def transform(self, other, cid):
'''Transforms this request against another request.
@param {DoRequest} other
@param {DoRequest} [cid] The concurrency ID of the two requests. This is
the request that is to be transformed in case of conflicting operations.
@type DoRequest
'''
if isinstance(self.operation, NoOp):
newOperation = NoOp()
else:
op_cid = None
if cid == self:
op_cid = self.operation
if cid == other:
op_cid = other.operation
newOperation = self.operation.transform(other.operation, op_cid)
return DoRequest(self.user, self.vector.incr(other.user), newOperation)
def mirror(self, amount):
'''Mirrors the request. This inverts the operation and increases the issuer's
component of the request time by the given amount.
@param {Number} [amount] The amount by which the request time is
increased. Defaults to 1.
@type DoRequest
'''
if not isinstance(amount, int):
amount = 1
#PY Split(Delete(0, ab), Split(Delete(4, c), Delete(7, de)))
#JS Split(Split(Delete(0, ab), Delete(4, c)), Delete(7, de))
#Split(Insert(0, ab), Split(Insert(2, c), Insert(4, de)))
#should be:
#Split(Split(Insert(0, ab), Insert(2, c)), Insert(4, de))
return DoRequest(self.user, self.vector.incr(self.user, amount), self.operation.mirror())
def fold(self, user, amount):
'''Folds the request along another user's axis. This increases that user's
component by the given amount, which must be a multiple of 2.
@type DoRequest
'''
if amount % 2 == 1:
raise Exception('Fold amounts must be multiples of 2.')
return DoRequest(self.user, self.vector.incr(user, amount), self.operation)
def makeReversible(self, translated, state):
'''Makes a request reversible, given a translated version of this request
and a State object. This only applies to requests carrying a Delete
operation; for all others, this does nothing.
@param {DoRequest} translated This request translated to the given state
@param {State} state The state which is used to make the request
reversible.
@type DoRequest
'''
result = self.copy()
if isinstance(self.operation, Delete):
result.operation = self.operation.makeReversible(translated.operation, state)
return result
class UndoRequest(object):
'''Instantiates a new undo request.
@class Represents an undo request made by an user at a certain time.
@param {Number} user
@param {Vector} vector The time at which the request was issued.
'''
def __init__(self, user, vector):
self.user = user
self.vector = vector
def __repr__(self):
return self.toString()
def toString(self):
return 'UndoRequest(%s, %s)' % (self.user, self.vector)
def toHTML(self):
return 'UndoRequest(%s, %s)' % (self.user, self.vector.toHTML())
def copy(self):
return UndoRequest(self.user, self.vector)
def associatedRequest(self, log):
'''Finds the corresponding DoRequest to this UndoRequest.
@param {Array} log The log to search
@type DoRequest
'''
sequence = 1
try:
index = log.index(self)
except ValueError:
index = -1
if index == -1:
index = len(log) - 1
for i in range(index,0,-1):
# === => ==
if log[i] == self or log[i].user != self.user:
continue
if log[i].vector.get(self.user) > self.vector.get(self.user):
continue
if isinstance(log[i], UndoRequest):
sequence += 1
else:
sequence -= 1
if sequence == 0:
return log[i]
class RedoRequest(object):
'''Instantiates a new redo request.
@class Represents an redo request made by an user at a certain time.
@param {Number} user
@param {Vector} vector The time at which the request was issued.
'''
def __init__(self, user, vector):
self.user = user
Operations.set_user(user)
self.vector = vector
def __repr__(self):
return self.toString()
def toString(self):
return 'RedoRequest(%s, %s)' % (self.user, self.vector)
def toHTML(self):
return 'RedoRequest(%s, %s)' % (self.user, self.vector.toHTML())
def copy(self):
return RedoRequest(self.user, self.vector)
def associatedRequest(self, log):
'''Finds the corresponding UndoRequest to this RedoRequest.
@param {Array} log The log to search
@type UndoRequest
'''
sequence = 1
index = log.index(self)
if(index == -1): index = log.length - 1
while index >= 0:
# === => ==
if log[index] == self or log[index].user != self.user: continue
if log[index].vector.get(self.user) > self.vector.get(self.user): continue
if isinstance(log[index], RedoRequest): sequence += 1
else: sequence -= 1
if(sequence == 0):
return log[index]
class Vector(object):
'''@class Stores state vectors.
@param [value] Pre-initialize the vector with existing values. This can be
a Vector object, a generic Object with numeric properties, or a string of the form "1:2;3:4;5:6".
'''
vector_time = re.compile(r'^(?P<id>\d+):(?P<op>\d+)$')
def __init__(self, value = None):
self.users = []
if type(value).__name__ == 'Vector':
found = False
for index, _user in enumerate(value.users):
if _user['id'] > 0:
if index in self.users:
self.users[index] = {'id':_user['id'],'op':_user['op']}
else: self.users.append({'id':_user['id'],'op':_user['op']})
elif isinstance(value, str):
pairs = value.split(';')
if value != '':
for pair in pairs:
match = self.vector_time.match(pair)
if match != None:
found = False
user_op = match.groupdict()
for index, _user in enumerate(self.users):
if _user['id'] == user_op['id']:
found = True
self.users[index]['op'] = user_op['op']
if not found:
self.users.append({'id':int(user_op['id']),'op':int(user_op['op'])})
def __repr__(self):
return self.toString()
def eachUser(self, callback):
'''Helper function to easily iterate over all users in this vector.
@param {function} callback Callback function which is called with the user
and the value of each component. If this callback function returns false,
iteration is stopped at that point and false is returned.
@type Boolean
@returns True if the callback function has never returned false; returns False otherwise.
'''
for index, _user in enumerate(self.users):
if callback(int(_user['id']), int(_user['op']), index) == False:
return False
return True
def toString(self):
'''Returns this vector as a string of the form "1:2;3:4;5:6"
@type String
'''
components = []
def Func(uid, op, index):
if(op > 0):
components.append("%s:%s" % (uid,op))
self.eachUser(Func)
components.sort()
return ';'.join(components)
def toHTML():
return self.toString()
def add(self, other):
'''Returns the sum of two vectors.
@param {Vector} other
'''
result = Vector(self)
def Func(uid, op, index):
result.users[index]= {'id':uid,'op':result.get(uid) + op}
other.eachUser(Func)
return result
def copy(self):
'''Returns a copy of this vector.'''
return Vector(self)
def get(self, user):
'''Returns a specific component of this vector, or 0 if it is not defined.
@param {Number} user Index of the component to be returned
'''
# != None
found = False
for index, _user in enumerate(self.users):
if _user['id'] == user and _user['op'] != None:
found = True
return _user['op']
if not found:
return 0
def causallyBefore(self, other):
'''Calculates whether this vector is smaller than or equal to another vector.
This means that all components of this vector are less than or equal to
their corresponding components in the other vector.
@param {Vector} other The vector to compare to
@type Boolean
'''
def Func(uid, op, index):
return op <= other.get(uid)
return self.eachUser(Func)
def equals(self, other):
'''Determines whether this vector is equal to another vector. This is true if
all components of this vector are present in the other vector and match
their values, and vice-versa.
@param {Vector} other The vector to compare to
@type Boolean
'''
def Func1(uid, op, index):
return other.get(uid) == op
eq1 = self.eachUser(Func1)
#this = self
def Func2(uid, op, index):
return self.get(uid) == op
eq2 = other.eachUser(Func2)
return eq1 and eq2
def incr(self, user, by = None):
'''Returns a new vector with a specific component increased by a given
amount.
@param {Number} user Component to increase
@param {Number} [by] Amount by which to increase the component (default 1)
@type Vector
'''
result = Vector(self)
if by == None:
by = 1
found = False
for index, _user in enumerate(result.users):
if _user['id'] == user:
found = True
result.users[index]['op'] = result.get(user) + by
if not found:
result.users.append({'id':user,'op':result.get(user) + by})
return result
@classmethod
def leastCommonSuccessor(self, v1, v2):
'''Calculates the least common successor of two vectors.
@param {Vector} v1
@param {Vector} v2
@type Vector
'''
result = v1.copy()
def Func(uid, op, index):
val1 = v1.get(uid)
val2 = v2.get(uid)
if val1 < val2:
result.users[index] = {'id':uid,'op': val2}
#else:
#result[u] = val1
#pass
v2.eachUser(Func)
return result
class State(object):
'''Instantiates a new state object.
@class Stores and manipulates the state of a document by keeping track of
its state vector, content and history of executed requests.
@param {Buffer} [buffer] Pre-initialize the buffer
@param {Vector} [vector] Set the initial state vector
'''
def __init__(self, buffer = None, vector = None):
if isinstance(buffer, Buffer):
self.buffer = buffer.copy()
else:
self.buffer = Buffer()
self.vector = Vector(vector)
self.request_queue = []
self.log = []
self.cache = {}
def translate(self, request, targetVector, noCache = False):
'''Translates a request to the given state vector.
@param {Request} request The request to translate
@param {Vector} targetVector The target state vector
@param {Boolean} [nocache] Set to true to bypass the translation cache.
'''
if isinstance(request, DoRequest) and request.vector.equals(targetVector):
#If the request vector is not an undo/redo request and is already at the desired state,
#simply return the original request since there is nothing to do.
return request.copy()
#Before we attempt to translate the request, we check whether it is cached already.
#[DoRequest(3, , Insert(3, bc)), 2:1]
#DoRequest(3, , Insert(3, bc)),2:1
cache_key = str([request, targetVector])
if self.cache != None and not noCache:
if not cache_key in self.cache:
self.cache[cache_key] = self.translate(request, targetVector, True)
#FIXME: translated requests are not cleared from the cache, so this might fill up considerably.
return self.cache[cache_key]
if isinstance(request, UndoRequest) or isinstance(request, RedoRequest):
'''If we're dealing with an undo or redo request, we first try to see
whether a late mirror is possible. For this, we retrieve the
associated request to this undo/redo and see whether it can be
translated and then mirrored to the desired state.
'''
assocReq = request.associatedRequest(self.log)
'''The state we're trying to mirror at corresponds to the target
vector, except the component of the issuing user is changed to
match the one from the associated request.
'''
mirrorAt = targetVector.copy()
#usermod mirrorAt[request.user]
#mirrorAt.users[str(request.user)] = assocReq.vector.get(request.user)
#users
found = False
for index, _user in enumerate(mirrorAt.users):
if _user['id'] == request.user:
found = True
mirrorAt.users[index]['op'] = assocReq.vector.get(request.user)
if not found:
mirrorAt.users.append({'id':_user['id'],'op': assocReq.vector.get(request.user)})
if self.reachable(mirrorAt):
translated = self.translate(assocReq, mirrorAt)
mirrorBy = targetVector.get(request.user) - mirrorAt.get(request.user)
mirrored = translated.mirror(mirrorBy)
return mirrored
#If mirrorAt is not reachable, we need to mirror earlier and then
#perform a translation afterwards, which is attempted next.
for index, _user in enumerate(self.vector.users):
#We now iterate through all users to see how we can translate the request to the desired state.
#The request's issuing user is left out since it is not possible to transform or fold a request along its own user
if _user['id'] == request.user:
continue
#We can only transform against requests that have been issued
#between the translated request's vector and the target vector.
#PROBLABLY HERE
if targetVector.get(_user['id']) <= request.vector.get(_user['id']):
continue
#Fetch the last request by this user that contributed to the current state vector.
lastRequest = self.requestByUser(_user['id'], targetVector.get(_user['id']) - 1)
if isinstance(lastRequest, UndoRequest) or isinstance(lastRequest, RedoRequest):
#When the last request was an undo/redo request, we can try to
#"fold" over it. By just skipping the do/undo or undo/redo pair,
#we pretend that nothing has changed and increase the state vector.
foldBy = targetVector.get(_user['id']) - lastRequest.associatedRequest(self.log).vector.get(_user['id'])
if(targetVector.get(_user['id']) >= foldBy):
foldAt = targetVector.incr(_user['id'], -foldBy)
#We need to make sure that the state we're trying to fold at is reachable and that the request
#we're translating was issued before it.
if self.reachable(foldAt) and request.vector.causallyBefore(foldAt):
translated = self.translate(request, foldAt)
folded = translated.fold(_user['id'], foldBy)
return folded
#If folding and mirroring is not possible, we can transform this
#request against other users' requests that have contributed to
#the current state vector.
transformAt = targetVector.incr(_user['id'], -1)
if transformAt.get(_user['id']) >= 0 and self.reachable(transformAt):
lastRequest = self.requestByUser(_user['id'], transformAt.get(_user['id']))
r1 = self.translate(request, transformAt)
r2 = self.translate(lastRequest, transformAt)
cid_req = None
if r1.operation.requiresCID:
#For the Insert operation, we need to check whether it is
#possible to determine which operation is to be transformed.
cid = r1.operation.cid(r2.operation)
if not cid:
#When two requests insert text at the same position,
#the transformation result is undefined. We therefore
#need to perform some tricks to decide which request
#has to be transformed against which.
#The first try is to transform both requests to a
#common successor before the transformation vector.
lcs = Vector.leastCommonSuccessor(request.vector, lastRequest.vector)
if self.reachable(lcs):
r1t = self.translate(request, lcs)
r2t = self.translate(lastRequest, lcs)
#We try to determine the CID at this vector, which
#hopefully yields a result.
cidt = r1t.operation.cid(r2t.operation)
if cidt == r1t.operation:
cid = r1.operation
elif cidt == r2t.operation:
cid = r2.operation
if not cid:
#If we arrived here, we couldn't decide for a CID,
#so we take the last resort: use the user ID of the
#requests to decide which request is to be
#transformed. This behavior is specified in the
#Infinote protocol.
if r1.user < r2.user:
cid = r1.operation
if r1.user > r2.user:
cid = r2.operation
if cid == r1.operation:
cid_req = r1
if cid == r2.operation:
cid_req = r2
return r1.transform(r2, cid_req)
raise Exception('Could not find a translation path')
def queue(self, request):
'''Adds a request to the request queue.
@param {Request} request The request to be queued.
'''
self.request_queue.append(request)
def canExecute(self, request = None):
'''Checks whether a given request can be executed in the current state.
@type Boolean
'''
if request == None:
return False
if isinstance(request, UndoRequest) or isinstance(request, RedoRequest):
return request.associatedRequest(self.log) != None
else:
return request.vector.causallyBefore(self.vector)
def execute(self, request = None):
'''Executes a request that is executable.
@param {Request} [request] The request to be executed. If omitted, an
executable request is picked from the request queue instead.
@returns The request that has been executed, or None if no request
has been executed.
'''
if request == None:
#Pick an executable request from the queue.
#for (index = 0 ++ loop)
for index, value in enumerate(self.request_queue):
request = self.request_queue[index]
if self.canExecute(request):
self.request_queue.splice(index, 1)
break
if not self.canExecute(request):
#Not executable yet - put it (back) in the queue.
if request != None:
self.queue(request)
return
request = request.copy()
if isinstance(request, UndoRequest) or isinstance(request, RedoRequest):
#For undo and redo requests, we change their vector to the vector
#of the original request, but leave the issuing user's component untouched.
assocReq = request.associatedRequest(self.log)
newVector = Vector(assocReq.vector)
#newVector[request.user]
found = False
for index, _user in enumerate(newVector.users):
if _user['id'] == request.user:
found = True
newVector.users[index]['op'] = request.vector.get(request.user)
if not found:
newVector.users.append({'id':request.user,'op':request.vector.get(request.user)})
request.vector = newVector
translated = self.translate(request, self.vector)
if isinstance(request, DoRequest) and isinstance(request.operation, Delete):
#Since each request might have to be mirrored at some point, it
#needs to be reversible. Delete requests are not reversible by
#default, but we can make them reversible.
self.log.append(request.makeReversible(translated, self))
else:
self.log.append(request);
translated.execute(self)
try:
getattr(self, 'onexecute')
self.onexecute(translated)
except AttributeError:
pass
return translated
def executeAll(self):
'''Executes all queued requests that are ready for execution.'''
executed = self.execute()
while executed:
executed = self.execute()
def reachable(self, vector):
'''Determines whether a given state is reachable by translation.
@param {Vector} vector
@type Boolean
'''
def Func(uid, op, index):
return self.reachableUser(vector, uid)
return self.vector.eachUser(Func)
def reachableUser(self, vector, user):
n = vector.get(user)
while True:
if n == 0:
return True
r = self.requestByUser(user, n - 1)
if r == None:
return False
if isinstance(r, DoRequest):
w = r.vector
return w.causallyBefore(vector)
else:
assocReq = r.associatedRequest(self.log)
n = assocReq.vector.get(user)
def requestByUser(self, user, getIndex):
'''Retrieve an user's request by its index.
@param {Number} user
@param {Number} index The number of the request to be returned
'''
userReqCount = 0
for reqIndex, request in enumerate(self.log):
if self.log[reqIndex].user == user:
if(userReqCount == getIndex):
return self.log[reqIndex]
else:
userReqCount += 1
class Segment(object):
'''Creates a new Segment instance given a user ID and a string.
@param {Number} user User ID
@param {String} text Text
@class Stores a chunk of text together with the user it was written by.
'''
def __init__(self, user, text):
self.user = user
self.text = text
def toString(self):
return self.text
def toHTML(self):
text = self.text.replace("<", "<").replace(">", ">").replace("&", "&")
return '<span class="segment user-' + str(self.user) + '">' + text + '</span>'
def copy(self):
'''Creates a copy of this segment.
@returns {Segment} A copy of this segment.
'''
return Segment(self.user, self.text)
class Buffer(object):
'''
Creates a new Buffer instance from the given array of
segments.
@param {Array} [segments] The segments that this buffer should be
pre-filled with.
@class Holds multiple Segments and provides methods for modifying them at a character level.
'''
def __init__(self, segments = None):
self.segments = []
if segments != None:
for segment in segments:
self.segments.append(segment.copy())
def __repr__(self):
return self.toString()
def toString(self):
output = ''
for segment in self.segments:
output +=segment.toString()
return output
def toHTML(self):
result = '<span class="buffer">'
#for index ++ loop
for index in enumerate(self.segments):
result += self.segments[index].toHTML()
result += '</span>'
return result
def copy(self):
'''Creates a deep copy of this buffer.
@type Buffer
'''
return self.slice(0)
def compact(self):
'''Cleans up the buffer by removing empty segments and combining adjacent
segments by the same user.
'''
segmentIndex = 0;
while segmentIndex < len(self.segments):
if len(self.segments[segmentIndex].text) == 0:
#This segment is empty, remove it.
self.segments.splice(segmentIndex, 1)
continue
elif segmentIndex < len(self.segments) - 1 and self.segments[segmentIndex].user == self.segments[segmentIndex+1].user:
#Two consecutive segments are from the same user; merge them into one.
self.segments[segmentIndex].text += self.segments[segmentIndex+1].text
self.segments.pop(segmentIndex+1)
continue
segmentIndex += 1;
def getLength(self):
'''Calculates the total number of characters contained in this buffer.
@returns Total character count in this buffer
@type Number
'''
length = 0;
# for index++ loop
for segment in self.segments:
length += len(segment.text)
return length
def slice(self, begin, end = None):
'''Extracts a deep copy of a range of characters in this buffer and returns
it as a new Buffer object.
@param {Number} begin Index of first character to return
@param {Number} [end] Index of last character (exclusive). If not
provided, defaults to the total length of the buffer.
@returns New buffer containing the specified character range.
@type Buffer
'''
result = Buffer()
segmentIndex = 0
segmentOffset = 0
sliceBegin = begin
sliceEnd = end
if sliceEnd == None:
sliceEnd = sys.maxint
while segmentIndex < len(self.segments) and sliceEnd >= segmentOffset:
segment = self.segments[segmentIndex]
if sliceBegin - segmentOffset < len(segment.text) and sliceEnd - segmentOffset > 0:
#segment.text.slice => self.slice
newText = segment.text[sliceBegin - segmentOffset:sliceEnd - segmentOffset]
newSegment = Segment(segment.user, newText)
result.segments.append(newSegment)
sliceBegin += len(newText)
segmentOffset += len(segment.text)
segmentIndex += 1
result.compact()
return result
def splice(self, index, remove, insert = None):
'''Like the Array "splice" method, this method allows for removing and
inserting text in a buffer at a character level.
@param {Number} index The offset at which to begin inserting/removing
@param {Number} [remove] Number of characters to remove
@param {Buffer} [insert] Buffer to insert
'''
if index > self.getLength():
raise BufferSpliceError('Buffer splice operation out of bounds')
segmentIndex = 0
segmentOffset = 0
spliceIndex = index
spliceCount = remove
spliceInsertOffset = None
#segments.length => len(self.segments)
while segmentIndex < len(self.segments):
segment = self.segments[segmentIndex]
if spliceIndex >= 0 and spliceIndex < len(segment.text):
#This segment is part of the region to splice.
#Store the text that this splice operation removes to adjust the
#splice offset correctly later on.
#[:]SLICE
removedText = segment.text[spliceIndex:(spliceIndex + spliceCount)]
if spliceIndex == 0:
#abcdefg
# ^ We're splicing at the beginning of a segment
if spliceIndex + spliceCount < len(segment.text):
#abcdefg
#^---^ Remove a part at the beginning
if spliceInsertOffset == None:
spliceInsertOffset = segmentIndex
#[:]SLICE
segment.text = segment.text[(spliceIndex + spliceCount):]
else:
#abcdefg
#^-----^ Remove the entire segment
if spliceInsertOffset == None:
spliceInsertOffset = segmentIndex
segment.text = ""
# splice => pop
self.segments.pop(segmentIndex)
segmentIndex -= 1
else:
#abcdefg
# ^ We're splicing inside a segment
if spliceInsertOffset == None:
spliceInsertOffset = segmentIndex + 1
if spliceIndex + spliceCount < len(segment.text):
# abcdefg
# ^--^ Remove a part in between
# Note that if spliceCount == 0, this function only
# splits the segment in two. This is necessary in case we
# want to insert new segments later.
#slice [:]
splicePost = Segment(segment.user, segment.text[(spliceIndex + spliceCount):])
#slice [:]
segment.text = segment.text[0:spliceIndex]
#splice (segmentIndex + 1, 0, splicePost) => list.insert(segmentIndex + 1, 0, splicePost)
self.segments.insert(segmentIndex + 1, splicePost)
else:
# abcdefg
# ^---^ Remove a part at the end
#slice [:]
segment.text = segment.text[0:spliceIndex]
spliceCount -= len(removedText)
if spliceIndex < len(segment.text) and spliceCount == 0:
#We have removed the specified amount of characters. No need to
#continue this loop since nothing remains to be done.
if spliceInsertOffset == None:
spliceInsertOffset = spliceIndex
break
spliceIndex -= len(segment.text)
segmentIndex += 1
if isinstance(insert, Buffer):
#If a buffer has been given, we insert copies of its segments at the specified position.
if spliceInsertOffset == None:
spliceInsertOffset = len(self.segments)
#for insertIndex++ loop
for insertIndex, segment in enumerate(insert.segments):
#splice (spliceInsertOffset + insertIndex, 0, insert.segments[insertIndex].copy()) => insert
self.segments.insert(spliceInsertOffset + insertIndex, insert.segments[insertIndex].copy())
#Clean up since the splice operation might have fragmented some segments.
self.compact()
|
Knygar/hwios
|
services/web_ui/models/infinote.py
|
Python
|
bsd-3-clause
| 65,205
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class BatchAccount(Resource):
"""Contains information about an Azure Batch account.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: The location of the resource.
:vartype location: str
:ivar tags: The tags of the resource.
:vartype tags: dict
:ivar account_endpoint: The account endpoint used to interact with the
Batch service.
:vartype account_endpoint: str
:ivar provisioning_state: The provisioned state of the resource. Possible
values include: 'Invalid', 'Creating', 'Deleting', 'Succeeded', 'Failed',
'Cancelled'
:vartype provisioning_state: str or :class:`ProvisioningState
<azure.mgmt.batch.models.ProvisioningState>`
:ivar pool_allocation_mode: The allocation mode to use for creating pools
in the Batch account. Possible values include: 'BatchService',
'UserSubscription'
:vartype pool_allocation_mode: str or :class:`PoolAllocationMode
<azure.mgmt.batch.models.PoolAllocationMode>`
:ivar key_vault_reference: A reference to the Azure key vault associated
with the Batch account.
:vartype key_vault_reference: :class:`KeyVaultReference
<azure.mgmt.batch.models.KeyVaultReference>`
:ivar auto_storage: The properties and status of any auto-storage account
associated with the Batch account.
:vartype auto_storage: :class:`AutoStorageProperties
<azure.mgmt.batch.models.AutoStorageProperties>`
:ivar dedicated_core_quota: The dedicated core quota for this Batch
account.
:vartype dedicated_core_quota: int
:ivar low_priority_core_quota: The low-priority core quota for this Batch
account.
:vartype low_priority_core_quota: int
:ivar pool_quota: The pool quota for this Batch account.
:vartype pool_quota: int
:ivar active_job_and_job_schedule_quota: The active job and job schedule
quota for this Batch account.
:vartype active_job_and_job_schedule_quota: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'tags': {'readonly': True},
'account_endpoint': {'readonly': True},
'provisioning_state': {'readonly': True},
'pool_allocation_mode': {'readonly': True},
'key_vault_reference': {'readonly': True},
'auto_storage': {'readonly': True},
'dedicated_core_quota': {'readonly': True},
'low_priority_core_quota': {'readonly': True},
'pool_quota': {'readonly': True},
'active_job_and_job_schedule_quota': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'account_endpoint': {'key': 'properties.accountEndpoint', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'pool_allocation_mode': {'key': 'properties.poolAllocationMode', 'type': 'PoolAllocationMode'},
'key_vault_reference': {'key': 'properties.keyVaultReference', 'type': 'KeyVaultReference'},
'auto_storage': {'key': 'properties.autoStorage', 'type': 'AutoStorageProperties'},
'dedicated_core_quota': {'key': 'properties.dedicatedCoreQuota', 'type': 'int'},
'low_priority_core_quota': {'key': 'properties.lowPriorityCoreQuota', 'type': 'int'},
'pool_quota': {'key': 'properties.poolQuota', 'type': 'int'},
'active_job_and_job_schedule_quota': {'key': 'properties.activeJobAndJobScheduleQuota', 'type': 'int'},
}
def __init__(self):
super(BatchAccount, self).__init__()
self.account_endpoint = None
self.provisioning_state = None
self.pool_allocation_mode = None
self.key_vault_reference = None
self.auto_storage = None
self.dedicated_core_quota = None
self.low_priority_core_quota = None
self.pool_quota = None
self.active_job_and_job_schedule_quota = None
|
SUSE/azure-sdk-for-python
|
azure-mgmt-batch/azure/mgmt/batch/models/batch_account.py
|
Python
|
mit
| 4,919
|
"""Provide info to system health."""
import os
from homeassistant.components import system_health
from homeassistant.core import HomeAssistant, callback
SUPERVISOR_PING = f"http://{os.environ['HASSIO']}/supervisor/ping"
OBSERVER_URL = f"http://{os.environ['HASSIO']}:4357"
@callback
def async_register(
hass: HomeAssistant, register: system_health.SystemHealthRegistration
) -> None:
"""Register system health callbacks."""
register.async_register_info(system_health_info, "/hassio")
async def system_health_info(hass: HomeAssistant):
"""Get info for the info page."""
info = hass.components.hassio.get_info()
host_info = hass.components.hassio.get_host_info()
supervisor_info = hass.components.hassio.get_supervisor_info()
if supervisor_info.get("healthy"):
healthy = True
else:
healthy = {
"type": "failed",
"error": "Unhealthy",
"more_info": "/hassio/system",
}
if supervisor_info.get("supported"):
supported = True
else:
supported = {
"type": "failed",
"error": "Unsupported",
"more_info": "/hassio/system",
}
information = {
"host_os": host_info.get("operating_system"),
"update_channel": info.get("channel"),
"supervisor_version": info.get("supervisor"),
"docker_version": info.get("docker"),
"disk_total": f"{host_info.get('disk_total')} GB",
"disk_used": f"{host_info.get('disk_used')} GB",
"healthy": healthy,
"supported": supported,
}
if info.get("hassos") is not None:
os_info = hass.components.hassio.get_os_info()
information["board"] = os_info.get("board")
information["supervisor_api"] = system_health.async_check_can_reach_url(
hass, SUPERVISOR_PING, OBSERVER_URL
)
information["version_api"] = system_health.async_check_can_reach_url(
hass,
f"https://version.home-assistant.io/{info.get('channel')}.json",
"/hassio/system",
)
information["installed_addons"] = ", ".join(
f"{addon['name']} ({addon['version']})"
for addon in supervisor_info.get("addons", [])
)
return information
|
tboyce1/home-assistant
|
homeassistant/components/hassio/system_health.py
|
Python
|
apache-2.0
| 2,241
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import errno
import httplib
import json
import socket
import sys
from telemetry.core import exceptions
class DevToolsClientConnectionError(exceptions.Error):
pass
class DevToolsClientUrlError(DevToolsClientConnectionError):
pass
class DevToolsHttp(object):
"""A helper class to send and parse DevTools HTTP requests.
This class maintains a persistent http connection to Chrome devtools.
Ideally, owners of instances of this class should call Disconnect() before
disposing of the instance. Otherwise, the connection will not be closed until
the instance is garbage collected.
"""
def __init__(self, devtools_port):
self._devtools_port = devtools_port
self._conn = None
def __del__(self):
self.Disconnect()
def _Connect(self, timeout):
"""Attempts to establish a connection to Chrome devtools."""
assert not self._conn
try:
host_port = '127.0.0.1:%i' % self._devtools_port
self._conn = httplib.HTTPConnection(host_port, timeout=timeout)
except (socket.error, httplib.HTTPException) as e:
raise DevToolsClientConnectionError, (e,), sys.exc_info()[2]
def Disconnect(self):
"""Closes the HTTP connection."""
if not self._conn:
return
try:
self._conn.close()
except (socket.error, httplib.HTTPException) as e:
raise DevToolsClientConnectionError, (e,), sys.exc_info()[2]
finally:
self._conn = None
def Request(self, path, timeout=30):
"""Sends a request to Chrome devtools.
This method lazily creates an HTTP connection, if one does not already
exist.
Args:
path: The DevTools URL path, without the /json/ prefix.
timeout: Timeout defaults to 30 seconds.
Raises:
DevToolsClientConnectionError: If the connection fails.
"""
if not self._conn:
self._Connect(timeout)
endpoint = '/json'
if path:
endpoint += '/' + path
if self._conn.sock:
self._conn.sock.settimeout(timeout)
else:
self._conn.timeout = timeout
try:
# By default, httplib avoids going through the default system proxy.
self._conn.request('GET', endpoint)
response = self._conn.getresponse()
return response.read()
except (socket.error, httplib.HTTPException) as e:
self.Disconnect()
if isinstance(e, socket.error) and e.errno == errno.ECONNREFUSED:
raise DevToolsClientUrlError, (e,), sys.exc_info()[2]
raise DevToolsClientConnectionError, (e,), sys.exc_info()[2]
def RequestJson(self, path, timeout=30):
"""Sends a request and parse the response as JSON.
Args:
path: The DevTools URL path, without the /json/ prefix.
timeout: Timeout defaults to 30 seconds.
Raises:
DevToolsClientConnectionError: If the connection fails.
ValueError: If the response is not a valid JSON.
"""
return json.loads(self.Request(path, timeout))
|
sgraham/nope
|
tools/telemetry/telemetry/core/backends/chrome_inspector/devtools_http.py
|
Python
|
bsd-3-clause
| 3,070
|
"""Constants used in modbus integration."""
from enum import Enum
from homeassistant.const import (
CONF_ADDRESS,
CONF_BINARY_SENSORS,
CONF_COVERS,
CONF_LIGHTS,
CONF_SENSORS,
CONF_SWITCHES,
Platform,
)
# configuration names
CONF_BAUDRATE = "baudrate"
CONF_BYTESIZE = "bytesize"
CONF_CLIMATES = "climates"
CONF_CLOSE_COMM_ON_ERROR = "close_comm_on_error"
CONF_COILS = "coils"
CONF_CURRENT_TEMP = "current_temp_register"
CONF_CURRENT_TEMP_REGISTER_TYPE = "current_temp_register_type"
CONF_DATA_TYPE = "data_type"
CONF_FANS = "fans"
CONF_HUB = "hub"
CONF_INPUTS = "inputs"
CONF_INPUT_TYPE = "input_type"
CONF_LAZY_ERROR = "lazy_error_count"
CONF_MAX_TEMP = "max_temp"
CONF_MIN_TEMP = "min_temp"
CONF_MSG_WAIT = "message_wait_milliseconds"
CONF_PARITY = "parity"
CONF_REGISTER = "register"
CONF_REGISTER_TYPE = "register_type"
CONF_REGISTERS = "registers"
CONF_RETRIES = "retries"
CONF_RETRY_ON_EMPTY = "retry_on_empty"
CONF_PRECISION = "precision"
CONF_SCALE = "scale"
CONF_SLAVE_COUNT = "slave_count"
CONF_STATE_CLOSED = "state_closed"
CONF_STATE_CLOSING = "state_closing"
CONF_STATE_OFF = "state_off"
CONF_STATE_ON = "state_on"
CONF_STATE_OPEN = "state_open"
CONF_STATE_OPENING = "state_opening"
CONF_STATUS_REGISTER = "status_register"
CONF_STATUS_REGISTER_TYPE = "status_register_type"
CONF_STEP = "temp_step"
CONF_STOPBITS = "stopbits"
CONF_SWAP = "swap"
CONF_SWAP_BYTE = "byte"
CONF_SWAP_NONE = "none"
CONF_SWAP_WORD = "word"
CONF_SWAP_WORD_BYTE = "word_byte"
CONF_TARGET_TEMP = "target_temp_register"
CONF_VERIFY = "verify"
CONF_VERIFY_REGISTER = "verify_register"
CONF_VERIFY_STATE = "verify_state"
CONF_WRITE_TYPE = "write_type"
RTUOVERTCP = "rtuovertcp"
SERIAL = "serial"
TCP = "tcp"
UDP = "udp"
# service call attributes
ATTR_ADDRESS = CONF_ADDRESS
ATTR_HUB = CONF_HUB
ATTR_UNIT = "unit"
ATTR_SLAVE = "slave"
ATTR_VALUE = "value"
class DataType(str, Enum):
"""Data types used by sensor etc."""
CUSTOM = "custom"
STRING = "string"
INT8 = "int8"
INT16 = "int16"
INT32 = "int32"
INT64 = "int64"
UINT8 = "uint8"
UINT16 = "uint16"
UINT32 = "uint32"
UINT64 = "uint64"
FLOAT16 = "float16"
FLOAT32 = "float32"
FLOAT64 = "float64"
# call types
CALL_TYPE_COIL = "coil"
CALL_TYPE_DISCRETE = "discrete_input"
CALL_TYPE_REGISTER_HOLDING = "holding"
CALL_TYPE_REGISTER_INPUT = "input"
CALL_TYPE_WRITE_COIL = "write_coil"
CALL_TYPE_WRITE_COILS = "write_coils"
CALL_TYPE_WRITE_REGISTER = "write_register"
CALL_TYPE_WRITE_REGISTERS = "write_registers"
CALL_TYPE_X_COILS = "coils"
CALL_TYPE_X_REGISTER_HOLDINGS = "holdings"
# service calls
SERVICE_WRITE_COIL = "write_coil"
SERVICE_WRITE_REGISTER = "write_register"
SERVICE_STOP = "stop"
SERVICE_RESTART = "restart"
# dispatcher signals
SIGNAL_STOP_ENTITY = "modbus.stop"
SIGNAL_START_ENTITY = "modbus.start"
# integration names
DEFAULT_HUB = "modbus_hub"
DEFAULT_SCAN_INTERVAL = 15 # seconds
DEFAULT_SLAVE = 1
DEFAULT_STRUCTURE_PREFIX = ">f"
DEFAULT_TEMP_UNIT = "C"
MODBUS_DOMAIN = "modbus"
ACTIVE_SCAN_INTERVAL = 2 # limit to force an extra update
PLATFORMS = (
(Platform.BINARY_SENSOR, CONF_BINARY_SENSORS),
(Platform.CLIMATE, CONF_CLIMATES),
(Platform.COVER, CONF_COVERS),
(Platform.LIGHT, CONF_LIGHTS),
(Platform.FAN, CONF_FANS),
(Platform.SENSOR, CONF_SENSORS),
(Platform.SWITCH, CONF_SWITCHES),
)
|
rohitranjan1991/home-assistant
|
homeassistant/components/modbus/const.py
|
Python
|
mit
| 3,373
|
"""General utilities."""
import scipy.constants as sc
# Useful equations -----------------------------------------------------------
def get_beam_radius(z, w0, wavelength):
"""Get beam radius (w).
Requires distance (z) and beam waist (w0).
Note
----
All parameters should be defined in the same units, including the output!
Parameters
----------
z : float
distance from beam waist
w0 : float
beam waist
wavelength : float
wavelength
Returns
-------
float
beam radius, i.e., the radial distance at which the E-field drops off
to 1/e of its on-axis value
"""
return w0 * (1 + ((wavelength * z) / (sc.pi * w0 ** 2)) ** 2) ** 0.5
def get_radius_of_curvature(z, w0, wavelength):
"""Get radius of curvature (R) at a given distance (z).
Requires distance (z) and beamwaist (w0).
Note
----
All parameters should be defined in the same units, including the output!
Parameters
----------
z : float
distance from beam waist
w0 : float
beam waist
wavelength : float
wavelength
Returns
-------
float
radius of curvature (R)
"""
return z * (1 + ((sc.pi * w0**2) / (wavelength * z)) ** 2)
def get_confocal_distance(w0, wavelength):
"""Get confocal distance (z_c).
Requires beam waist and wavelength.
This value generally defines the separation between near- and far-field.
Note
----
Beam waist and wavelength should be defined in the same units!
Parameters
----------
w0 : float
beam waist
wavelength : float
wavelength
Returns
-------
float
confocal distance (z_c)
"""
return sc.pi * w0 ** 2 / wavelength
def get_far_field_angle(w0, wavelength):
"""Get far-field divergence angle (theta_0).
Requires beam waist and wavelength.
Note
----
Beam waist and wavelength should be defined in the same units!
Parameters
----------
w0 : float
beam waist
wavelength : float
wavelength
Returns
-------
float
far-field divergence angle (theta_0), i.e., the angle at which the
beam waist grows in the far-field
"""
return wavelength / sc.pi / w0
def get_fwhm(w0, wavelength):
"""Get full width of half-maximum angle (theta_FWHM).
Requires beam waist and wavelength.
Note
----
Beam waist and wavelength should be defined in the same units!
Parameters
----------
w0 : float
beam waist
wavelength : float
wavelength
Returns
-------
float
full width of half-maximum angle (theta_FWHM)
"""
return 1.18 * get_far_field_angle(w0, wavelength)
# Set units ------------------------------------------------------------------
def set_d_units(units):
"""Read distance units.
Parameters
----------
units : str
distance units: 'um', 'mm', 'cm', 'dm', 'm' or 'km'
Returns
-------
float
multiplier
"""
unit_dict = {
'km': sc.kilo,
'm': 1,
'dm': sc.deci,
'cm': sc.centi,
'mm': sc.milli,
'um': sc.micro,
}
try:
multiplier = unit_dict[units.lower()]
except KeyError:
raise ValueError("Distance units not recognized.")
return multiplier
def set_f_units(units):
"""Read frequency units.
Parameters
----------
units : str
frequency units: 'Hz', 'kHz', 'MHz', 'GHz' or 'THz'
Returns
-------
float
multiplier
"""
unit_dict = {
'thz': sc.tera,
'ghz': sc.giga,
'mhz': sc.mega,
'khz': sc.kilo,
'hz': 1,
}
try:
multiplier = unit_dict[units.lower()]
except KeyError:
raise ValueError("Frequency units not recognized.")
return multiplier
|
garrettj403/GaussOpt
|
gaussopt/util.py
|
Python
|
mit
| 3,966
|
"""
Density Plot
============
_thumb: .5, .5
"""
import arviz as az
centered_data = az.load_arviz_data("centered_eight")
non_centered_data = az.load_arviz_data("non_centered_eight")
ax = az.plot_density(
[centered_data, non_centered_data],
data_labels=["Centered", "Non Centered"],
var_names=["theta"],
shade=0.1,
backend="bokeh",
)
|
arviz-devs/arviz
|
examples/bokeh/bokeh_plot_density.py
|
Python
|
apache-2.0
| 355
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-06-19 18:29
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('organization_projects', '0085_auto_20190619_2023'),
]
operations = [
migrations.AlterModelOptions(
name='project',
options={'ordering': ['title'], 'permissions': (('user_edit', 'Mezzo - User can edit its own content'), ('user_delete', 'Mezzo - User can delete its own content'), ('team_edit', "Mezzo - User can edit his team's content"), ('team_delete', "Mezzo - User can delete his team's content")), 'verbose_name': 'project', 'verbose_name_plural': 'projects'},
),
migrations.AlterField(
model_name='project',
name='user',
field=models.ForeignKey(default=4, on_delete=django.db.models.deletion.CASCADE, related_name='projects', to=settings.AUTH_USER_MODEL, verbose_name='Author'),
preserve_default=False,
),
]
|
Ircam-Web/mezzanine-organization
|
organization/projects/migrations/0086_auto_20190619_2029.py
|
Python
|
agpl-3.0
| 1,124
|
"""
Given a linked list, reverse the nodes of a linked list k at a time and return its modified list.
If the number of nodes is not a multiple of k then left-out nodes in the end should remain as it is.
You may not alter the values in the nodes, only nodes itself may be changed.
Only constant memory is allowed.
For example,
Given this linked list: 1->2->3->4->5
For k = 2, you should return: 2->1->4->3->5
For k = 3, you should return: 3->2->1->4->5
"""
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def reverseKGroup(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
rhead, n, stack = None, 0, []
node = head
pre_head = ListNode(-1)
pre_head.next = head
while node and n == 0:
while node and n < k:
stack.append(node)
node = node.next
n += 1
if k > 0 and len(stack) == k:
n = 0
head = stack.pop()
node = head
next = node.next
if not rhead:
rhead = head
while stack:
pre = stack.pop()
node.next = pre
node = pre
node.next = next
pre_head.next = head
pre_head = node
node = next
return rhead if rhead else head
s = Solution()
h = ListNode(1)
h.next = ListNode(2)
h.next.next = ListNode(3)
h.next.next.next = ListNode(4)
h.next.next.next.next = ListNode(5)
r = s.reverseKGroup(ListNode(1), 2)
|
dichen001/Go4Jobs
|
JackChen/linked_list/25. Reverse Nodes in k-Group.py
|
Python
|
gpl-3.0
| 1,744
|
'''
Qrcode example application
==========================
Author: Mathieu Virbel <mat@meltingrocks.com>
License:
Copyright (c) 2013 Mathieu Virbel <mat@meltingrocks.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Featuring:
- Android camera initialization
- Show the android camera into a Android surface that act as an overlay
- New AndroidWidgetHolder that control any android view as an overlay
- New ZbarQrcodeDetector that use AndroidCamera / PreviewFrame + zbar to
detect Qrcode.
'''
__all__ = ('ScannerAndroid', )
from kivy.utils import platform
if platform != 'android':
raise ImportError
from electrum_gui.kivy.qr_scanner import ScannerBase
from kivy.properties import ObjectProperty, NumericProperty
from kivy.uix.widget import Widget
from kivy.uix.anchorlayout import AnchorLayout
from kivy.graphics import Color, Line
from jnius import autoclass, PythonJavaClass, java_method, cast
from android.runnable import run_on_ui_thread
# preload java classes
System = autoclass('java.lang.System')
System.loadLibrary('iconv')
from android.config import JAVA_NAMESPACE
PythonActivity = autoclass(JAVA_NAMESPACE + '.PythonActivity')
Camera = autoclass('android.hardware.Camera')
ImageScanner = autoclass('net.sourceforge.zbar.ImageScanner')
Image = autoclass('net.sourceforge.zbar.Image')
Symbol = autoclass('net.sourceforge.zbar.Symbol')
Config = autoclass('net.sourceforge.zbar.Config')
SurfaceView = autoclass('android.view.SurfaceView')
LayoutParams = autoclass('android.view.ViewGroup$LayoutParams')
ImageFormat = autoclass('android.graphics.ImageFormat')
LinearLayout = autoclass('android.widget.LinearLayout')
class PreviewCallback(PythonJavaClass):
'''Interface used to get back the preview frame of the Android Camera
'''
__javainterfaces__ = ('android.hardware.Camera$PreviewCallback', )
def __init__(self, callback):
super(PreviewCallback, self).__init__()
self.callback = callback
@java_method('([BLandroid/hardware/Camera;)V')
def onPreviewFrame(self, data, camera):
self.callback(camera, data)
class SurfaceHolderCallback(PythonJavaClass):
'''Interface used to know exactly when the Surface used for the Android
Camera will be created and changed.
'''
__javainterfaces__ = ('android.view.SurfaceHolder$Callback', )
def __init__(self, callback):
super(SurfaceHolderCallback, self).__init__()
self.callback = callback
@java_method('(Landroid/view/SurfaceHolder;III)V')
def surfaceChanged(self, surface, fmt, width, height):
self.callback(fmt, width, height)
@java_method('(Landroid/view/SurfaceHolder;)V')
def surfaceCreated(self, surface):
pass
@java_method('(Landroid/view/SurfaceHolder;)V')
def surfaceDestroyed(self, surface):
pass
class AndroidWidgetHolder(Widget):
'''Act as a placeholder for an Android widget.
It will automatically add / remove the android view depending if the widget
view is set or not. The android view will act as an overlay, so any graphics
instruction in this area will be covered by the overlay.
'''
view = ObjectProperty(allownone=True)
'''Must be an Android View
'''
def __init__(self, **kwargs):
self._old_view = None
from kivy.core.window import Window
self._window = Window
kwargs['size_hint'] = (None, None)
super(AndroidWidgetHolder, self).__init__(**kwargs)
def on_view(self, instance, view):
if self._old_view is not None:
layout = cast(LinearLayout, self._old_view.getParent())
layout.removeView(self._old_view)
self._old_view = None
if view is None:
return
activity = PythonActivity.mActivity
activity.addContentView(view, LayoutParams(*self.size))
view.setZOrderOnTop(True)
view.setX(self.x)
view.setY(self._window.height - self.y - self.height)
self._old_view = view
def on_size(self, instance, size):
if self.view:
params = self.view.getLayoutParams()
params.width = self.width
params.height = self.height
self.view.setLayoutParams(params)
self.view.setY(self._window.height - self.y - self.height)
def on_x(self, instance, x):
if self.view:
self.view.setX(x)
def on_y(self, instance, y):
if self.view:
self.view.setY(self._window.height - self.y - self.height)
class AndroidCamera(Widget):
'''Widget for controling an Android Camera.
'''
index = NumericProperty(0)
__events__ = ('on_preview_frame', )
def __init__(self, **kwargs):
self._holder = None
self._android_camera = None
super(AndroidCamera, self).__init__(**kwargs)
self._holder = AndroidWidgetHolder(size=self.size, pos=self.pos)
self.add_widget(self._holder)
@run_on_ui_thread
def stop(self):
self.running = False
if self._android_camera is None:
return
self._android_camera.setPreviewCallback(None)
self._android_camera.release()
self._android_camera = None
self._holder.view = None
@run_on_ui_thread
def start(self):
self.running = True
if self._android_camera is not None:
return
self._android_camera = Camera.open(self.index)
# create a fake surfaceview to get the previewCallback working.
self._android_surface = SurfaceView(PythonActivity.mActivity)
surface_holder = self._android_surface.getHolder()
# create our own surface holder to correctly call the next method when
# the surface is ready
self._android_surface_cb = SurfaceHolderCallback(self._on_surface_changed)
surface_holder.addCallback(self._android_surface_cb)
# attach the android surfaceview to our android widget holder
self._holder.view = self._android_surface
# set orientation
self._android_camera.setDisplayOrientation(90)
def _on_surface_changed(self, fmt, width, height):
# internal, called when the android SurfaceView is ready
# FIXME if the size is not handled by the camera, it will failed.
params = self._android_camera.getParameters()
params.setPreviewSize(width, height)
self._android_camera.setParameters(params)
# now that we know the camera size, we'll create 2 buffers for faster
# result (using Callback buffer approach, as described in Camera android
# documentation)
# it also reduce the GC collection
bpp = ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8.
buf = '\x00' * int(width * height * bpp)
self._android_camera.addCallbackBuffer(buf)
self._android_camera.addCallbackBuffer(buf)
# create a PreviewCallback to get back the onPreviewFrame into python
self._previewCallback = PreviewCallback(self._on_preview_frame)
# connect everything and start the preview
self._android_camera.setPreviewCallbackWithBuffer(self._previewCallback);
self._android_camera.setPreviewDisplay(self._android_surface.getHolder())
self._android_camera.startPreview();
def _on_preview_frame(self, camera, data):
# internal, called by the PreviewCallback when onPreviewFrame is
# received
self.dispatch('on_preview_frame', camera, data)
# reintroduce the data buffer into the queue
self._android_camera.addCallbackBuffer(data)
def on_preview_frame(self, camera, data):
pass
def on_size(self, instance, size):
if self._holder:
self._holder.size = size
def on_pos(self, instance, pos):
if self._holder:
self._holder.pos = pos
class ScannerAndroid(ScannerBase):
'''Widget that use the AndroidCamera and zbar to detect qrcode.
When found, the `symbols` will be updated
'''
def __init__(self, **kwargs):
super(ScannerAndroid, self).__init__(**kwargs)
self._camera = AndroidCamera(
size=self.camera_size,
size_hint=(None, None))
self._camera.bind(on_preview_frame=self._detect_qrcode_frame)
self.add_widget(self._camera)
# create a scanner used for detecting qrcode
self._scanner = ImageScanner()
self._scanner.setConfig(0, Config.ENABLE, 0)
self._scanner.setConfig(Symbol.QRCODE, Config.ENABLE, 1)
self._scanner.setConfig(0, Config.X_DENSITY, 3)
self._scanner.setConfig(0, Config.Y_DENSITY, 3)
def start(self):
self._camera.start()
def stop(self):
self._camera.stop()
def _detect_qrcode_frame(self, instance, camera, data):
# the image we got by default from a camera is using the NV21 format
# zbar only allow Y800/GREY image, so we first need to convert,
# then start the detection on the image
if not self.get_root_window():
self.stop()
return
parameters = camera.getParameters()
size = parameters.getPreviewSize()
barcode = Image(size.width, size.height, 'NV21')
barcode.setData(data)
barcode = barcode.convert('Y800')
result = self._scanner.scanImage(barcode)
if result == 0:
self.symbols = []
return
# we detected qrcode! extract and dispatch them
symbols = []
it = barcode.getSymbols().iterator()
while it.hasNext():
symbol = it.next()
qrcode = ScannerAndroid.Qrcode(
type=symbol.getType(),
data=symbol.getData(),
quality=symbol.getQuality(),
count=symbol.getCount(),
bounds=symbol.getBounds())
symbols.append(qrcode)
self.symbols = symbols
'''
# can't work, due to the overlay.
def on_symbols(self, instance, value):
if self.show_bounds:
self.update_bounds()
def update_bounds(self):
self.canvas.after.remove_group('bounds')
if not self.symbols:
return
with self.canvas.after:
Color(1, 0, 0, group='bounds')
for symbol in self.symbols:
x, y, w, h = symbol.bounds
x = self._camera.right - x - w
y = self._camera.top - y - h
Line(rectangle=[x, y, w, h], group='bounds')
'''
if __name__ == '__main__':
from kivy.lang import Builder
from kivy.app import App
qrcode_kv = '''
BoxLayout:
orientation: 'vertical'
ZbarQrcodeDetector:
id: detector
Label:
text: '\\n'.join(map(repr, detector.symbols))
size_hint_y: None
height: '100dp'
BoxLayout:
size_hint_y: None
height: '48dp'
Button:
text: 'Scan a qrcode'
on_release: detector.start()
Button:
text: 'Stop detection'
on_release: detector.stop()
'''
class QrcodeExample(App):
def build(self):
return Builder.load_string(qrcode_kv)
QrcodeExample().run()
|
akshayaurora/electrum
|
gui/kivy/qr_scanner/scanner_android.py
|
Python
|
gpl-3.0
| 12,202
|
import os
from itertools import chain
from dvc.exceptions import PathMissingError
def ls(url, path=None, rev=None, recursive=None, dvc_only=False):
"""Methods for getting files and outputs for the repo.
Args:
url (str): the repo url
path (str, optional): relative path into the repo
rev (str, optional): SHA commit, branch or tag name
recursive (bool, optional): recursively walk the repo
dvc_only (bool, optional): show only DVC-artifacts
Returns:
list of `entry`
Notes:
`entry` is a dictionary with structure
{
"path": str,
"isout": bool,
"isdir": bool,
"isexec": bool,
}
"""
from . import Repo
with Repo.open(url, rev=rev, subrepos=True, uninitialized=True) as repo:
fs_path = repo.root_dir
if path:
fs_path = os.path.abspath(repo.fs.path.join(fs_path, path))
ret = _ls(repo.repo_fs, fs_path, recursive, dvc_only)
if path and not ret:
raise PathMissingError(path, repo, dvc_only=dvc_only)
ret_list = []
for path, info in ret.items():
info["path"] = path
ret_list.append(info)
ret_list.sort(key=lambda f: f["path"])
return ret_list
def _ls(fs, fs_path, recursive=None, dvc_only=False):
def onerror(exc):
raise exc
infos = []
try:
for root, dirs, files in fs.walk(
fs_path, onerror=onerror, dvcfiles=True
):
entries = chain(files, dirs) if not recursive else files
infos.extend(fs.path.join(root, entry) for entry in entries)
if not recursive:
break
except NotADirectoryError:
infos.append(fs_path)
except FileNotFoundError:
return {}
ret = {}
for info in infos:
metadata = fs.metadata(info)
if metadata.output_exists or not dvc_only:
path = (
fs.path.name(fs_path)
if fs_path == info
else fs.path.relpath(info, fs_path)
)
ret[path] = {
"isout": metadata.is_output,
"isdir": metadata.isdir,
"isexec": metadata.is_exec,
}
return ret
|
dmpetrov/dataversioncontrol
|
dvc/repo/ls.py
|
Python
|
apache-2.0
| 2,303
|
# The contents of this file are subject to the Mozilla Public License
# (MPL) Version 1.1 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License
# at http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and
# limitations under the License.
#
# The Original Code is LEPL (http://www.acooke.org/lepl)
# The Initial Developer of the Original Code is Andrew Cooke.
# Portions created by the Initial Developer are Copyright (C) 2009-2010
# Andrew Cooke (andrew@acooke.org). All Rights Reserved.
#
# Alternatively, the contents of this file may be used under the terms
# of the LGPL license (the GNU Lesser General Public License,
# http://www.gnu.org/licenses/lgpl.html), in which case the provisions
# of the LGPL License are applicable instead of those above.
#
# If you wish to allow use of your version of this file only under the
# terms of the LGPL License and not to allow others to use your version
# of this file under the MPL, indicate your decision by deleting the
# provisions above and replace them with the notice and other provisions
# required by the LGPL License. If you do not delete the provisions
# above, a recipient may use your version of this file under either the
# MPL or the LGPL License.
'''
Default implementations of the stream classes.
A stream is a tuple (state, helper), where `state` will vary from location to
location, while `helper` is an "unchanging" instance of `StreamHelper`,
defined below.
For simple streams state can be a simple integer and this approach avoids the
repeated creation of objects. More complex streams may choose to not use
the state at all, simply creating a new helper at each point.
'''
from abc import ABCMeta
from lepl.support.lib import fmt
#class _SimpleStream(metaclass=ABCMeta):
# Python 2.6
# pylint: disable-msg=W0105, C0103
_StreamHelper = ABCMeta('_StreamHelper', (object, ), {})
'''ABC used to identify streams.'''
DUMMY_HELPER = object()
'''Allows tests to specify an arbitrary helper in results.'''
OFFSET, LINENO, CHAR = range(3)
'''Indices into delta.'''
class StreamHelper(_StreamHelper):
'''
The interface that all helpers should implement.
'''
def __init__(self, id=None, factory=None, max=None, global_kargs=None,
cache_level=None):
from lepl.stream.factory import DEFAULT_STREAM_FACTORY
self.id = id if id is not None else hash(self)
self.factory = factory if factory else DEFAULT_STREAM_FACTORY
self.max = max if max else MutableMaxDepth()
self.global_kargs = global_kargs if global_kargs else {}
self.cache_level = 1 if cache_level is None else cache_level
def __repr__(self):
'''Simplify for comparison in tests'''
return '<helper>'
def __eq__(self, other):
return other is DUMMY_HELPER or super(StreamHelper, self).__eq__(other)
def __hash__(self):
return super(StreamHelper, self).__hash__()
def key(self, state, other):
'''
Generate an object that can be hashed (implements __hash__ and __eq__).
See `HashKey`.
'''
raise NotImplementedError
def kargs(self, state, prefix='', kargs=None):
'''
Generate a dictionary of values that describe the stream. These
may be extended by subclasses. They are provided to
`syntax_error_kargs`, for example.
`prefix` modifies the property names
`kargs` allows values to be provided. These are *not* overwritten,
so if there is a name clash the provided value remains.
Note: Calculating this can be expensive; use only for error messages,
not debug messages (that may be discarded).
The following names will be defined (at a minimum).
For these value the "global" prefix indicates the underlying stream
when, for example, tokens are used (other values will be relative to
the token). If tokens etc are not in use then global and non-global
values will agree.
- data: a line representing the data, highlighting the current offset
- global_data: as data, but for the entire sequence
- text: as data, but without a "[...]" at the end
- global_text: as text, but for the entire sequence
- type: the type of the sequence
- global_type: the type of the entire sequence
- global_offset: a 0-based index into the underlying sequence
These values are always local:
- offset: a 0-based index into the sequence
- rest: the data following the current point
- repr: the current value, or <EOS>
- str: the current value, or an empty string
These values are always global:
- filename: a filename, if available, or the type
- lineno: a 1-based line number for the current offset
- char: a 1-based character count within the line for the current offset
- location: a summary of the current location
'''
raise NotImplementedError
def fmt(self, state, template, prefix='', kargs=None):
'''fmt a message using the expensive kargs function.'''
return fmt(template, **self.kargs(state, prefix=prefix, kargs=kargs))
def debug(self, state):
'''Generate an inexpensive debug message.'''
raise NotImplementedError
def next(self, state, count=1):
'''
Return (value, stream) where `value` is the next value (or
values if count > 1) from the stream and `stream` is advanced to the
next character. Note that `value` is always a sequence (so if the
stream is a list of integers, and `count`=1, then it will be a
unitary list, for example).
Should raise StopIteration when no more data are available.
'''
raise StopIteration
def join(self, state, *values):
'''
Join sequences of values into a single sequence.
'''
raise NotImplementedError
def empty(self, state):
'''
Return true if no more data available.
'''
raise NotImplementedError
def line(self, state, empty_ok):
'''
Return (values, stream) where `values` correspond to something
like "the rest of the line" from the current point and `stream`
is advanced to the point after the line ends.
If `empty_ok` is true and we are at the end of a line, return an
empty line, otherwise advance (and maybe raise a StopIteration).
'''
raise NotImplementedError
def len(self, state):
'''
Return the remaining length of the stream. Streams of unknown
length (iterables) should raise a TypeError.
'''
raise NotImplementedError
def stream(self, state, value, id_=None, max=None):
'''
Return a new stream that encapsulates the value given, starting at
`state`. IMPORTANT: the stream used is the one that corresponds to
the start of the value.
For example:
(line, next_stream) = s_line(stream, False)
token_stream = s_stream(stream, line) # uses stream, not next_stream
This is used when processing Tokens, for example, or columns (where
fragments in the correct column area are parsed separately).
'''
raise NotImplementedError
def deepest(self):
'''
Return a stream that represents the deepest match. The stream may be
incomplete in some sense (it may not be possible to use it for
parsing more data), but it will have usable fmt and kargs methods.
'''
raise NotImplementedError
def delta(self, state):
'''
Return the offset, lineno and char of the current point, relative to
the entire stream, as a tuple.
'''
raise NotImplementedError
def eq(self, state1, state2):
'''
Are the two states equal?
'''
return state1 == state2
def new_max(self, state):
'''
Return (old max, new stream), where new stream uses a new max.
This is used when we want to read from the stream without
affecting the max (eg when looking ahead to generate tokens).
'''
raise NotImplementedError
def cacheable(self):
'''
Is this stream cacheable?
'''
return self.cache_level > 0
# The following are helper functions that allow the methods above to be
# called on (state, helper) tuples
s_key = lambda stream, other=None: stream[1].key(stream[0], other)
'''Invoke helper.key(state, other)'''
s_kargs = lambda stream, prefix='', kargs=None: stream[1].kargs(stream[0], prefix=prefix, kargs=kargs)
'''Invoke helper.kargs(state, prefix, kargs)'''
s_fmt = lambda stream, template, prefix='', kargs=None: stream[1].fmt(stream[0], template, prefix=prefix, kargs=kargs)
'''Invoke helper.fmt(state, template, prefix, kargs)'''
s_debug = lambda stream: stream[1].debug(stream[0])
'''Invoke helper.debug()'''
s_next = lambda stream, count=1: stream[1].next(stream[0], count=count)
'''Invoke helper.next(state, count)'''
s_join = lambda stream, *values: stream[1].join(stream[0], *values)
'''Invoke helper.join(*values)'''
s_empty = lambda stream: stream[1].empty(stream[0])
'''Invoke helper.empty(state)'''
s_line = lambda stream, empty_ok: stream[1].line(stream[0], empty_ok)
'''Invoke helper.line(state, empty_ok)'''
s_len = lambda stream: stream[1].len(stream[0])
'''Invoke helper.len(state)'''
s_stream = lambda stream, value, id_=None, max=None: stream[1].stream(stream[0], value, id_=id_, max=max)
'''Invoke helper.stream(state, value)'''
s_deepest = lambda stream: stream[1].deepest()
'''Invoke helper.deepest()'''
s_delta = lambda stream: stream[1].delta(stream[0])
'''Invoke helper.delta(state)'''
s_eq = lambda stream1, stream2: stream1[1].eq(stream1[0], stream2[0])
'''Compare two streams (which should have identical helpers)'''
s_id = lambda stream: stream[1].id
'''Access the ID attribute.'''
s_factory = lambda stream: stream[1].factory
'''Access the factory attribute.'''
s_max = lambda stream: stream[1].max
'''Access the max attribute.'''
s_new_max = lambda stream: stream[1].new_max(stream[0])
'''Invoke helper.new_max(state).'''
s_global_kargs = lambda stream: stream[1].global_kargs
'''Access the global_kargs attribute.'''
s_cache_level = lambda stream: stream[1].cache_level
'''Access the cache_level attribute.'''
s_cacheable = lambda stream: stream[1].cacheable()
'''Is the stream cacheable?'''
class MutableMaxDepth(object):
'''
Track maximum depth (offset) reached and the associated stream. Used to
generate error message for incomplete matches.
'''
def __init__(self):
self.depth = 0
self.stream = None
def update(self, depth, stream):
# the '=' here allows a token to nudge on to the next stream without
# changing the offset (when count=0 in s_next)
if depth >= self.depth or not self.stream:
self.depth = depth
self.stream = stream
def get(self):
return self.stream
class HashKey(object):
'''
Used to store a value with a given hash.
'''
__slots__ = ['hash', 'eq']
def __init__(self, hash, eq=None):
self.hash = hash
self.eq = eq
def __hash__(self):
return self.hash
def __eq__(self, other):
try:
return other.hash == self.hash and other.eq == self.eq
except AttributeError:
return False
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/lepl/stream/core.py
|
Python
|
agpl-3.0
| 12,016
|
"""The main form for the application"""
from PythonCard import model
# Allow importing of our custom controls
import PythonCard.resource
PythonCard.resource.APP_COMPONENTS_PACKAGE = "vb2py.targets.pythoncard.vbcontrols"
class Background(model.Background):
def __getattr__(self, name):
"""If a name was not found then look for it in components"""
return getattr(self.components, name)
def __init__(self, *args, **kw):
"""Initialize the form"""
model.Background.__init__(self, *args, **kw)
# Call the VB Form_Load
# TODO: This is brittle - depends on how the private indicator is set
if hasattr(self, "_MAINFORM__Form_Load"):
self._MAINFORM__Form_Load()
elif hasattr(self, "Form_Load"):
self.Form_Load()
from vb2py.vbfunctions import *
from vb2py.vbdebug import *
import Globals
class MAINFORM(Background):
def on_Check1_mouseClick(self, *args):
Globals.Log('Click ' + Str(self.Check1.Value))
def on_Check1_gainFocus(self, *args):
Globals.Log('Got focus')
def on_Check1_loseFocus(self, *args):
Globals.Log('Lost focus')
def on_Check1_mouseDown(self, *args):
Button, Shift, X, Y = vbGetEventArgs(["ButtonDown()", "ShiftDown()", "x", "y"], args)
Globals.Log('MouseDown' + Str(Button) + ', ' + Str(Shift) + ', ' + Str(X) + ', ' + Str(Y))
def on_Check1_mouseMove(self, *args):
Button, Shift, X, Y = vbGetEventArgs(["ButtonDown()", "ShiftDown()", "x", "y"], args)
Globals.Log('MouseMove' + Str(Button) + ', ' + Str(Shift) + ', ' + Str(X) + ', ' + Str(Y))
def on_Check1_mouseUp(self, *args):
Button, Shift, X, Y = vbGetEventArgs(["ButtonDown()", "ShiftDown()", "x", "y"], args)
Globals.Log('MouseUp' + Str(Button) + ', ' + Str(Shift) + ', ' + Str(X) + ', ' + Str(Y))
def on_Command1_mouseClick(self, *args):
self.Check1.Caption = 'Value ' + Str(self.Check1.Value)
def on_Command4_mouseClick(self, *args):
self.Check1.Visible = not self.Check1.Visible
def on_Command5_mouseClick(self, *args):
self.Check1.Left = self.Check1.Left + 20
self.Check1.Top = self.Check1.Top + 20
def on_Command6_mouseClick(self, *args):
self.Check1.Width = self.Check1.Width + 20
self.Check1.Height = self.Check1.Height + 50
def on_Command7_mouseClick(self, *args):
self.Check1.Enabled = not self.Check1.Enabled
# VB2PY (UntranslatedCode) Attribute VB_Name = "frmCheckBox"
# VB2PY (UntranslatedCode) Attribute VB_GlobalNameSpace = False
# VB2PY (UntranslatedCode) Attribute VB_Creatable = False
# VB2PY (UntranslatedCode) Attribute VB_PredeclaredId = True
# VB2PY (UntranslatedCode) Attribute VB_Exposed = False
if __name__ == '__main__':
app = model.Application(MAINFORM)
app.MainLoop()
|
mvz/vb2py
|
vb/test3/test/frmCheckBox.py
|
Python
|
bsd-3-clause
| 2,877
|
from numpy import sum
from numpy import zeros
from gwlfe.Input.LandUse.NLU import NLU
from gwlfe.Input.WaterBudget.Water import Water
from gwlfe.Memoization import memoize
from gwlfe.MultiUse_Fxns.Discharge.AdjUrbanQTotal import AdjUrbanQTotal
from gwlfe.Output.Loading.SurfaceLoad_1 import SurfaceLoad_1
from gwlfe.Output.Loading.SurfaceLoad_1 import SurfaceLoad_1_f
@memoize
def LuLoad(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0,
Grow_0, CNP_0, Imper, ISRR, ISRA, Qretention, PctAreaInfil, Nqual, LoadRateImp,
LoadRatePerv, Storm, UrbBMPRed, FilterWidth, PctStrmBuf):
result = zeros((NYrs, 16, 3))
water = Water(NYrs, DaysMonth, InitSnow_0, Temp, Prec)
nlu = NLU(NRur, NUrb)
adjurbanqtotal = AdjUrbanQTotal(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0,
Grow_0, CNP_0, Imper, ISRR, ISRA, Qretention, PctAreaInfil)
surfaceload_1 = SurfaceLoad_1(NYrs, DaysMonth, InitSnow_0, Temp, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0,
CNP_0,
Imper, ISRR, ISRA, Qretention, PctAreaInfil, Nqual, LoadRateImp, LoadRatePerv, Storm,
UrbBMPRed, FilterWidth, PctStrmBuf)
for Y in range(NYrs):
for i in range(12):
for j in range(DaysMonth[Y][i]):
if Temp[Y][i][j] > 0 and water[Y][i][j] > 0.01:
if adjurbanqtotal[Y][i][j] > 0.001:
for l in range(NRur, nlu):
for q in range(Nqual):
result[Y][l][q] += surfaceload_1[Y][i][j][l][q]
else:
pass
else:
pass
return result
@memoize
def LuLoad_f(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0,
Grow_0, CNP_0, Imper, ISRR, ISRA, Qretention, PctAreaInfil, Nqual, LoadRateImp,
LoadRatePerv, Storm, UrbBMPRed, FilterWidth, PctStrmBuf):
return sum(SurfaceLoad_1_f(NYrs, DaysMonth, InitSnow_0, Temp, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0,
CNP_0, Imper, ISRR, ISRA, Qretention, PctAreaInfil, Nqual, LoadRateImp, LoadRatePerv,
Storm, UrbBMPRed, FilterWidth, PctStrmBuf), axis=(1, 2))
|
WikiWatershed/gwlf-e
|
gwlfe/Output/Loading/LuLoad.py
|
Python
|
apache-2.0
| 2,418
|
# Copyright (c) 2015 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import xrange
from kmip.core.enums import Tags
from kmip.core.messages.contents import ProtocolVersion
from kmip.core.primitives import Struct
from kmip.core.utils import BytearrayStream
class DiscoverVersionsRequestPayload(Struct):
def __init__(self, protocol_versions=None):
super(DiscoverVersionsRequestPayload, self).__init__(
Tags.REQUEST_PAYLOAD)
if protocol_versions is None:
self.protocol_versions = list()
else:
self.protocol_versions = protocol_versions
self.validate()
def read(self, istream):
super(DiscoverVersionsRequestPayload, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
while(self.is_tag_next(Tags.PROTOCOL_VERSION, tstream)):
protocol_version = ProtocolVersion()
protocol_version.read(tstream)
self.protocol_versions.append(protocol_version)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
tstream = BytearrayStream()
for protocol_version in self.protocol_versions:
protocol_version.write(tstream)
self.length = tstream.length()
super(DiscoverVersionsRequestPayload, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
self.__validate()
def __validate(self):
if isinstance(self.protocol_versions, list):
for i in xrange(len(self.protocol_versions)):
protocol_version = self.protocol_versions[i]
if not isinstance(protocol_version, ProtocolVersion):
msg = "invalid protocol version ({0} in list)".format(i)
msg += "; expected {0}, received {1}".format(
ProtocolVersion, protocol_version)
raise TypeError(msg)
else:
msg = "invalid protocol versions list"
msg += "; expected {0}, received {1}".format(
list, self.protocol_versions)
raise TypeError(msg)
class DiscoverVersionsResponsePayload(Struct):
def __init__(self, protocol_versions=None):
super(DiscoverVersionsResponsePayload, self).__init__(
Tags.RESPONSE_PAYLOAD)
if protocol_versions is None:
self.protocol_versions = list()
else:
self.protocol_versions = protocol_versions
self.validate()
def read(self, istream):
super(DiscoverVersionsResponsePayload, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
while(self.is_tag_next(Tags.PROTOCOL_VERSION, tstream)):
protocol_version = ProtocolVersion()
protocol_version.read(tstream)
self.protocol_versions.append(protocol_version)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
tstream = BytearrayStream()
for protocol_version in self.protocol_versions:
protocol_version.write(tstream)
self.length = tstream.length()
super(DiscoverVersionsResponsePayload, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
self.__validate()
def __validate(self):
if isinstance(self.protocol_versions, list):
for i in xrange(len(self.protocol_versions)):
protocol_version = self.protocol_versions[i]
if not isinstance(protocol_version, ProtocolVersion):
msg = "invalid protocol version ({0} in list)".format(i)
msg += "; expected {0}, received {1}".format(
ProtocolVersion, protocol_version)
raise TypeError(msg)
else:
msg = "invalid protocol versions list"
msg += "; expected {0}, received {1}".format(
list, self.protocol_versions)
raise TypeError(msg)
|
viktorTarasov/PyKMIP
|
kmip/core/messages/payloads/discover_versions.py
|
Python
|
apache-2.0
| 4,623
|
# -*- coding: utf-8 -*-
"""
.. _tut-set-eeg-ref:
=========================
Setting the EEG reference
=========================
This tutorial describes how to set or change the EEG reference in MNE-Python.
As usual we'll start by importing the modules we need, loading some
:ref:`example data <sample-dataset>`, and cropping it to save memory. Since
this tutorial deals specifically with EEG, we'll also restrict the dataset to
just a few EEG channels so the plots are easier to see:
"""
# %%
import os
import mne
sample_data_folder = mne.datasets.sample.data_path()
sample_data_raw_file = os.path.join(sample_data_folder, 'MEG', 'sample',
'sample_audvis_raw.fif')
raw = mne.io.read_raw_fif(sample_data_raw_file, verbose=False)
raw.crop(tmax=60).load_data()
raw.pick(['EEG 0{:02}'.format(n) for n in range(41, 60)])
# %%
# Background
# ^^^^^^^^^^
#
# EEG measures a voltage (difference in electric potential) between each
# electrode and a reference electrode. This means that whatever signal is
# present at the reference electrode is effectively subtracted from all the
# measurement electrodes. Therefore, an ideal reference signal is one that
# captures *none* of the brain-specific fluctuations in electric potential,
# while capturing *all* of the environmental noise/interference that is being
# picked up by the measurement electrodes.
#
# In practice, this means that the reference electrode is often placed in a
# location on the subject's body and close to their head (so that any
# environmental interference affects the reference and measurement electrodes
# similarly) but as far away from the neural sources as possible (so that the
# reference signal doesn't pick up brain-based fluctuations). Typical reference
# locations are the subject's earlobe, nose, mastoid process, or collarbone.
# Each of these has advantages and disadvantages regarding how much brain
# signal it picks up (e.g., the mastoids pick up a fair amount compared to the
# others), and regarding the environmental noise it picks up (e.g., earlobe
# electrodes may shift easily, and have signals more similar to electrodes on
# the same side of the head).
#
# Even in cases where no electrode is specifically designated as the reference,
# EEG recording hardware will still treat one of the scalp electrodes as the
# reference, and the recording software may or may not display it to you (it
# might appear as a completely flat channel, or the software might subtract out
# the average of all signals before displaying, making it *look like* there is
# no reference).
#
#
# Setting or changing the reference channel
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If you want to recompute your data with a different reference than was used
# when the raw data were recorded and/or saved, MNE-Python provides the
# :meth:`~mne.io.Raw.set_eeg_reference` method on :class:`~mne.io.Raw` objects
# as well as the :func:`mne.add_reference_channels` function. To use an
# existing channel as the new reference, use the
# :meth:`~mne.io.Raw.set_eeg_reference` method; you can also designate multiple
# existing electrodes as reference channels, as is sometimes done with mastoid
# references:
# code lines below are commented out because the sample data doesn't have
# earlobe or mastoid channels, so this is just for demonstration purposes:
# use a single channel reference (left earlobe)
# raw.set_eeg_reference(ref_channels=['A1'])
# use average of mastoid channels as reference
# raw.set_eeg_reference(ref_channels=['M1', 'M2'])
# use a bipolar reference (contralateral)
# raw.set_bipolar_reference(anode='[F3'], cathode=['F4'])
# %%
# If a scalp electrode was used as reference but was not saved alongside the
# raw data (reference channels often aren't), you may wish to add it back to
# the dataset before re-referencing. For example, if your EEG system recorded
# with channel ``Fp1`` as the reference but did not include ``Fp1`` in the data
# file, using :meth:`~mne.io.Raw.set_eeg_reference` to set (say) ``Cz`` as the
# new reference will then subtract out the signal at ``Cz`` *without restoring
# the signal at* ``Fp1``. In this situation, you can add back ``Fp1`` as a flat
# channel prior to re-referencing using :func:`~mne.add_reference_channels`.
# (Since our example data doesn't use the `10-20 electrode naming system`_, the
# example below adds ``EEG 999`` as the missing reference, then sets the
# reference to ``EEG 050``.) Here's how the data looks in its original state:
raw.plot()
# %%
# By default, :func:`~mne.add_reference_channels` returns a copy, so we can go
# back to our original ``raw`` object later. If you wanted to alter the
# existing :class:`~mne.io.Raw` object in-place you could specify
# ``copy=False``.
# add new reference channel (all zero)
raw_new_ref = mne.add_reference_channels(raw, ref_channels=['EEG 999'])
raw_new_ref.plot()
# %%
# .. KEEP THESE BLOCKS SEPARATE SO FIGURES ARE BIG ENOUGH TO READ
# set reference to `EEG 050`
raw_new_ref.set_eeg_reference(ref_channels=['EEG 050'])
raw_new_ref.plot()
# %%
# Notice that the new reference (``EEG 050``) is now flat, while the original
# reference channel that we added back to the data (``EEG 999``) has a non-zero
# signal. Notice also that ``EEG 053`` (which is marked as "bad" in
# ``raw.info['bads']``) is not affected by the re-referencing.
#
#
# Setting average reference
# ^^^^^^^^^^^^^^^^^^^^^^^^^
#
# To set a "virtual reference" that is the average of all channels, you can use
# :meth:`~mne.io.Raw.set_eeg_reference` with ``ref_channels='average'``. Just
# as above, this will not affect any channels marked as "bad", nor will it
# include bad channels when computing the average. However, it does modify the
# :class:`~mne.io.Raw` object in-place, so we'll make a copy first so we can
# still go back to the unmodified :class:`~mne.io.Raw` object later:
# sphinx_gallery_thumbnail_number = 4
# use the average of all channels as reference
raw_avg_ref = raw.copy().set_eeg_reference(ref_channels='average')
raw_avg_ref.plot()
# %%
# .. _section-avg-ref-proj:
#
# Creating the average reference as a projector
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If using an average reference, it is possible to create the reference as a
# :term:`projector` rather than subtracting the reference from the data
# immediately by specifying ``projection=True``:
raw.set_eeg_reference('average', projection=True)
print(raw.info['projs'])
# %%
# Creating the average reference as a projector has a few advantages:
#
# 1. It is possible to turn projectors on or off when plotting, so it is easy
# to visualize the effect that the average reference has on the data.
#
# 2. If additional channels are marked as "bad" or if a subset of channels are
# later selected, the projector will be re-computed to take these changes
# into account (thus guaranteeing that the signal is zero-mean).
#
# 3. If there are other unapplied projectors affecting the EEG channels (such
# as SSP projectors for removing heartbeat or blink artifacts), EEG
# re-referencing cannot be performed until those projectors are either
# applied or removed; adding the EEG reference as a projector is not subject
# to that constraint. (The reason this wasn't a problem when we applied the
# non-projector average reference to ``raw_avg_ref`` above is that the
# empty-room projectors included in the sample data :file:`.fif` file were
# only computed for the magnetometers.)
for title, proj in zip(['Original', 'Average'], [False, True]):
with mne.viz.use_browser_backend('matplotlib'):
fig = raw.plot(proj=proj, n_channels=len(raw))
# make room for title
fig.subplots_adjust(top=0.9)
fig.suptitle('{} reference'.format(title), size='xx-large', weight='bold')
# %%
# Using an infinite reference (REST)
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# To use the "point at infinity" reference technique described in
# :footcite:`Yao2001` requires a forward model, which we can create in a few
# steps. Here we use a fairly large spacing of vertices (``pos`` = 15 mm) to
# reduce computation time; a 5 mm spacing is more typical for real data
# analysis:
raw.del_proj() # remove our average reference projector first
sphere = mne.make_sphere_model('auto', 'auto', raw.info)
src = mne.setup_volume_source_space(sphere=sphere, exclude=30., pos=15.)
forward = mne.make_forward_solution(raw.info, trans=None, src=src, bem=sphere)
raw_rest = raw.copy().set_eeg_reference('REST', forward=forward)
for title, _raw in zip(['Original', 'REST (∞)'], [raw, raw_rest]):
with mne.viz.use_browser_backend('matplotlib'):
fig = _raw.plot(n_channels=len(raw), scalings=dict(eeg=5e-5))
# make room for title
fig.subplots_adjust(top=0.9)
fig.suptitle('{} reference'.format(title), size='xx-large', weight='bold')
# %%
# Using a bipolar reference
# ^^^^^^^^^^^^^^^^^^^^^^^^^
#
# To create a bipolar reference, you can use :meth:`~mne.set_bipolar_reference`
# along with the respective channel names for ``anode`` and ``cathode`` which
# creates a new virtual channel that takes the difference between two
# specified channels (anode and cathode) and drops the original channels by
# default. The new virtual channel will be annotated with the channel info of
# the anode with location set to ``(0, 0, 0)`` and coil type set to
# ``EEG_BIPOLAR`` by default. Here we use a contralateral/transverse bipolar
# reference between channels ``EEG 054`` and ``EEG 055`` as described in
# :footcite:`YaoEtAl2019` which creates a new virtual channel
# named ``EEG 054-EEG 055``.
raw_bip_ref = mne.set_bipolar_reference(raw, anode=['EEG 054'],
cathode=['EEG 055'])
raw_bip_ref.plot()
# %%
# EEG reference and source modeling
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If you plan to perform source modeling (either with EEG or combined EEG/MEG
# data), it is **strongly recommended** to use the
# average-reference-as-projection approach. It is important to use an average
# reference because using a specific
# reference sensor (or even an average of a few sensors) spreads the forward
# model error from the reference sensor(s) into all sensors, effectively
# amplifying the importance of the reference sensor(s) when computing source
# estimates. In contrast, using the average of all EEG channels as reference
# spreads the forward modeling error evenly across channels, so no one channel
# is weighted more strongly during source estimation. See also this `FieldTrip
# FAQ on average referencing`_ for more information.
#
# The main reason for specifying the average reference as a projector was
# mentioned in the previous section: an average reference projector adapts if
# channels are dropped, ensuring that the signal will always be zero-mean when
# the source modeling is performed. In contrast, applying an average reference
# by the traditional subtraction method offers no such guarantee.
#
# For these reasons, when performing inverse imaging, *MNE-Python will raise
# a ``ValueError`` if there are EEG channels present and something other than
# an average reference strategy has been specified*.
#
# .. LINKS
#
# .. _`FieldTrip FAQ on average referencing`:
# http://www.fieldtriptoolbox.org/faq/why_should_i_use_an_average_reference_for_eeg_source_reconstruction/
# .. _`10-20 electrode naming system`:
# https://en.wikipedia.org/wiki/10%E2%80%9320_system_(EEG)
#
#
# References
# ^^^^^^^^^^
# .. footbibliography::
|
mne-tools/mne-python
|
tutorials/preprocessing/55_setting_eeg_reference.py
|
Python
|
bsd-3-clause
| 11,528
|
from django import forms
class PersonalPageForm(forms.Form):
""" Form to reflect the data stored in the PersonalPage model."""
bio = forms.CharField(max_length=300, required = False, widget=forms.Textarea(attrs={'rows':'2'}))
location = forms.CharField(max_length=50, required = False, initial = "eg: Richmond, VA, USA.")
email = forms.CharField(max_length=50, required = False)
facebook = forms.CharField(max_length=50, required = False)
twitter = forms.CharField(max_length=50, required = False)
tumblr = forms.CharField(max_length=80, required = False)
linkedin = forms.CharField(max_length=80, required = False)
personal_site = forms.CharField(max_length=50, required = False)
|
alonsebastian/SocialID
|
personal_page/forms.py
|
Python
|
gpl-2.0
| 718
|
from __future__ import unicode_literals, absolute_import
from .base import Command
from utils.colorize import colorize, Colors
class Volume(Command):
name = 'volume'
pattern = 'volume {n}'
example = ('volume 45', 'v 45', 'vol 45',)
description = 'Set volume level in percentage.'
@staticmethod
def handle(self, *args):
if self.player:
arg = args[0] if args else ''
if not arg:
return self.INDENT + colorize(Colors.GREEN, 'Current volume is ') + str(self.player.get_volume())
try:
self.player.set_volume(int(arg))
except ValueError:
return self.INDENT + colorize(Colors.RED, 'Volume value ') + arg + \
colorize(Colors.RED, ' isn\'t valid.')
return self.INDENT + colorize(Colors.GREEN, 'Set volume to ') + arg
return self.INDENT + colorize(Colors.RED, 'No active players found.')
class V(Volume):
name = 'v'
pattern = 'v {n}'
example = ('v 45', 'volume 45', 'vol 45',)
show_in_main_help = False
class Vol(Volume):
name = 'vol'
pattern = 'vol {n}'
example = ('vol 45', 'v 45', 'volume 45',)
show_in_main_help = False
|
roman-kachanovsky/cmd.fm-python
|
commands/volume.py
|
Python
|
bsd-3-clause
| 1,227
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that xctest targets are correctly configured.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['xcode'])
# Ignore this test if Xcode 5 is not installed
import subprocess
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, err = job.communicate()
if job.returncode != 0:
raise Exception('Error %d running xcodebuild' % job.returncode)
xcode_version, build_number = out.splitlines()
# Convert the version string from 'Xcode 5.0' to ['5','0'].
xcode_version = xcode_version.split()[-1].split('.')
if xcode_version < ['5']:
test.pass_test()
CHDIR = 'xctest'
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', chdir=CHDIR, arguments=['-scheme', 'classes', 'test'])
test.built_file_must_match('tests.xctest/Contents/Resources/resource.txt',
'foo\n', chdir=CHDIR)
test.pass_test()
|
Jet-Streaming/gyp
|
test/mac/gyptest-xctest.py
|
Python
|
bsd-3-clause
| 1,234
|
#!/usr/bin/env python
import numpy,sys,matplotlib.pyplot as plt,math,operator
## THIS PLOTS Standard Dev, NOT Variance
#print plt.style.available
#plt.style.use('ggplot')
if len(sys.argv) < 3:
print "Specify at least one input and one output."
sys.exit()
filesin = sys.argv[1:-1]
fileout = sys.argv[-1]
for filename in filesin:
if filename.endswith('.txt'):
header = open(filename,'r')
data = []
err_min = []
err_plus = []
#nr = 113*57600000
nr = 600*28800000
for line in header:
newline = line.strip()
newline = float(newline.split()[-1])# * nr
data.append( newline )
#data.append( newline )
#err.append( math.sqrt( nr*float( newline.split()[-1] ) ) / nr
err=0
try:
err = math.sqrt( newline ) * 1./math.sqrt(nr)
#err = math.sqrt( newline )
except:
pass
err_min.append( newline - err )
err_plus.append( newline + err )
#print err_min
#print err
plt.plot(data, label=filename)
#plt.plot(err_min, label=filename)
#plt.plot(err_plus, label=filename)
#print data, err_min, err_plus, '\n'
plt.fill_between(range(len(data)), err_min, err_plus, alpha=0.25)
if filename.endswith('.raw'):
data = numpy.fromfile(filename, dtype='<f4')
plt.plot(data, label=filename)
#plt.plot(data, label=filename)
plt.ylabel('Counts')
plt.ylabel('PG energy')
#plt.legend(loc=4,prop={'size':6})
plt.legend(prop={'size':10})
plt.savefig(fileout)
|
brenthuisman/phd_tools
|
plot.1d.pluserror.py
|
Python
|
lgpl-3.0
| 1,406
|
# Extract ground state eigenvalues and put into table
# Takes File Name as Input
# Tom Morrell 2015
import sys
name=sys.argv[1]
infile = open(name,'r')
inline = infile.readline()
top=[]
data=[]
cur=0 #index of curent triad
eigs=0
index=0
maxv=0
while inline[1:46] != 'CALCULATIONS for ACTOR:HAMILTONIAN GROUND':
inline=infile.readline()
#now at correct section
while inline[1:9] != 'H H A':
split=inline.split()
if len(split) > 0:
if split[0] == 'CALCULATING':
#If we have a triad label
top.append([split[6],split[7],split[8]])
index = len(top) #current triad
if split[0]=='EIGVAL':
if cur == index:
data[cur-1]=data[cur-1]+split[1:]
eigs = eigs + len(split[1:])
if eigs > maxv:
maxv=eigs
else: #new value
data.append(split[1:])
eigs=len(split[1:])
cur=index
inline=infile.readline()
outstring=''
for j in top:
outstring=outstring+str(j)+'\t'
outstring=outstring+'\n'
for i in range(maxv):#for each row
for j in range(len(data)):
if len(data[j])<=i:
outstring = outstring+'\t'
else:
outstring=outstring+data[j][i]+'\t'
outstring = outstring + '\n'
outfile=open(name.split('.')[0]+'.out','w')
outfile.write(outstring)
|
tmorrell/ground_ctm4xas
|
ground_ctm4xas.py
|
Python
|
gpl-2.0
| 1,396
|
import unittest
'''This test file is intended to test the database and the gui to a good extent.
This should:
- create random entries with somewhat random timestamps.
- Save entries to a text file for comparison.
- Enter each entry into the database
- search for some of the random terms.
- Remove database after testing
- Collect and show statistics throughout
- Push buttons to see if there is a place where things break down.'''
class GUITester(object):
def __init__(self):
pass
def randp(self): #new project
pass
def archivep(self): #'x' project
pass
def openp(self): #open project
pass
def move(self):
pass
# def test_empty_note(self):
# """ Tests that empty notes or ones with only space
# charachters don't get put into the database.
# Uses Projects, dates and times to enable checking.
# """
# pass
class TimeTester(object):
def __init__(self):
pass
def start(self):
pass
def stop(self):
pass
def replace(self):
pass
help()
|
Ghalko/noteTaker
|
test/test.py
|
Python
|
mit
| 1,092
|
from setuptools import setup
readme = open('README.rst').read()
setup(name='marbaloo_mako',
version='0.1.1',
description='Mako template support for cherrypy.',
long_description=readme,
url='http://github.com/marbaloo/marbaloo_mako',
author='Mahdi Ghane.g',
license='MIT',
keywords='mako cherrypy marbaloo marbaloo_mako',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Framework :: CherryPy',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Operating System :: Unix',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries'
],
install_requires=[
'cherrypy>=8.1.2',
'mako>=1.0.4'
],
packages=['marbaloo_mako'],
)
|
marbaloo/marbaloo_mako
|
setup.py
|
Python
|
mit
| 897
|
#!/usr/bin/env python3
"""Various utilities."""
from __future__ import annotations
import json
from os import environ
from pathlib import Path
import click
import requests
from beancount.query import query_env
from beancount.query import query_parser
from fava import LOCALES
BASE_PATH = Path(__file__).parent.parent
FAVA_PATH = BASE_PATH / "src" / "fava"
@click.group()
def cli():
"""Various utilities."""
def _env_to_list(attributes):
for name in attributes.keys():
if isinstance(name, tuple):
name = name[0]
yield name
@cli.command()
def generate_bql_grammar_json():
"""Generate a JSON file with BQL grammar attributes.
The online code editor needs to have the list of available columns,
functions, and keywords for syntax highlighting and completion.
Should be run whenever the BQL changes."""
target_env = query_env.TargetsEnvironment()
data = {
"columns": sorted(set(_env_to_list(target_env.columns))),
"functions": sorted(set(_env_to_list(target_env.functions))),
"keywords": sorted({kw.lower() for kw in query_parser.Lexer.keywords}),
}
path = BASE_PATH / "frontend" / "src" / "codemirror" / "bql-grammar.ts"
path.write_text("export default " + json.dumps(data))
@cli.command()
def download_translations():
"""Fetch updated translations from POEditor.com."""
token = environ.get("POEDITOR_TOKEN")
if not token:
raise click.UsageError(
"The POEDITOR_TOKEN environment variable needs to be set."
)
for language in LOCALES:
download_from_poeditor(language, token)
@cli.command()
def upload_translations():
"""Upload .pot message catalog to POEditor.com."""
token = environ.get("POEDITOR_TOKEN")
if not token:
raise click.UsageError(
"The POEDITOR_TOKEN environment variable needs to be set."
)
path = FAVA_PATH / "translations" / "messages.pot"
click.echo(f"Uploading message catalog: {path}")
data = {
"api_token": token,
"id": 90283,
"updating": "terms",
# "sync_terms": 1,
}
with open(path, "rb") as file:
files = {"file": file}
request = requests.post(
"https://api.poeditor.com/v2/projects/upload",
data=data,
files=files,
)
click.echo("Done: " + str(request.json()["result"]["terms"]))
# For these languages, the name on POEDITOR is off.
POEDITOR_LANGUAGE_NAME = {"zh": "zh-CN", "zh_Hant_TW": "zh-TW"}
def download_from_poeditor(language, token):
"""Download .po-file from POEditor and save to disk."""
click.echo(f'Downloading .po-file for language "{language}"')
poeditor_name = POEDITOR_LANGUAGE_NAME.get(language, language)
data = {
"api_token": token,
"id": 90283,
"language": poeditor_name,
"type": "po",
}
request = requests.post(
"https://api.poeditor.com/v2/projects/export", data=data
)
url = request.json()["result"]["url"]
content = requests.get(url).content
folder = FAVA_PATH / "translations" / language / "LC_MESSAGES"
if not folder.exists():
folder.mkdir(parents=True)
path = folder / "messages.po"
path.write_bytes(content)
click.echo(f'Downloaded to "{path}"')
if __name__ == "__main__":
cli()
|
beancount/fava
|
contrib/scripts.py
|
Python
|
mit
| 3,362
|
#!/usr/bin/env python
# Thanks to:
# https://github.com/ros-visualization/rqt_common_plugins/blob/groovy-devel/rqt_topic/src/rqt_topic/topic_widget.py
import numpy
import random
import roslib
import roslib.message
import rospy
from opcua import ua, uamethod
import ros_actions
import ros_server
import rostopic
class OpcUaROSTopic:
def __init__(self, server, parent, idx, topic_name, topic_type):
self.server = server
self.parent = self.recursive_create_objects(topic_name, idx, parent)
self.type_name = topic_type
self.name = topic_name
self._nodes = {}
self.idx = idx
self.message_class = None
try:
self.message_class = roslib.message.get_message_class(topic_type)
self.message_instance = self.message_class()
except rospy.ROSException:
self.message_class = None
rospy.logfatal("Couldn't find message class for type " + topic_type)
self._recursive_create_items(self.parent, idx, topic_name, topic_type, self.message_instance, True)
self._subscriber = rospy.Subscriber(self.name, self.message_class, self.message_callback)
self._publisher = rospy.Publisher(self.name, self.message_class, queue_size=1)
rospy.loginfo("Created ROS Topic with name: " + str(self.name))
def _recursive_create_items(self, parent, idx, topic_name, type_name, message, top_level=False):
topic_text = topic_name.split('/')[-1]
if '[' in topic_text:
topic_text = topic_text[topic_text.index('['):]
# This here are 'complex datatypes'
if hasattr(message, '__slots__') and hasattr(message, '_slot_types'):
complex_type = True
new_node = parent.add_object(ua.NodeId(topic_name, parent.nodeid.NamespaceIndex, ua.NodeIdType.String),
ua.QualifiedName(topic_name, parent.nodeid.NamespaceIndex))
new_node.add_property(ua.NodeId(topic_name + ".Type", idx),
ua.QualifiedName("Type", parent.nodeid.NamespaceIndex), type_name)
if top_level:
new_node.add_method(ua.NodeId(topic_name + ".Update", parent.nodeid.NamespaceIndex),
ua.QualifiedName("Update", parent.nodeid.NamespaceIndex),
self.opcua_update_callback, [], [])
for slot_name, type_name_child in zip(message.__slots__, message._slot_types):
self._recursive_create_items(new_node, idx, topic_name + '/' + slot_name, type_name_child,
getattr(message, slot_name))
self._nodes[topic_name] = new_node
else:
# This are arrays
base_type_str, array_size = _extract_array_info(type_name)
try:
base_instance = roslib.message.get_message_class(base_type_str)()
except (ValueError, TypeError):
base_instance = None
if array_size is not None and hasattr(base_instance, '__slots__'):
for index in range(array_size):
self._recursive_create_items(parent, idx, topic_name + '[%d]' % index, base_type_str, base_instance)
else:
new_node = _create_node_with_type(parent, idx, topic_name, topic_text, type_name, array_size)
self._nodes[topic_name] = new_node
if topic_name in self._nodes and self._nodes[topic_name].get_node_class() == ua.NodeClass.Variable:
self._nodes[topic_name].set_writable(True)
return
def message_callback(self, message):
self.update_value(self.name, message)
@uamethod
def opcua_update_callback(self, parent):
try:
for nodeName in self._nodes:
child = self._nodes[nodeName]
name = child.get_display_name().Text
if hasattr(self.message_instance, name):
if child.get_node_class() == ua.NodeClass.Variable:
setattr(self.message_instance, name,
correct_type(child, type(getattr(self.message_instance, name))))
elif child.get_node_class == ua.NodeClass.Object:
setattr(self.message_instance, name, self.create_message_instance(child))
self._publisher.publish(self.message_instance)
except rospy.ROSException as e:
rospy.logerr("Error when updating node " + self.name, e)
self.server.server.delete_nodes([self.parent])
def update_value(self, topic_name, message):
if hasattr(message, '__slots__') and hasattr(message, '_slot_types'):
for slot_name in message.__slots__:
self.update_value(topic_name + '/' + slot_name, getattr(message, slot_name))
elif type(message) in (list, tuple):
if (len(message) > 0) and hasattr(message[0], '__slots__'):
for index, slot in enumerate(message):
if topic_name + '[%d]' % index in self._nodes:
self.update_value(topic_name + '[%d]' % index, slot)
else:
if topic_name in self._nodes:
base_type_str, _ = _extract_array_info(
self._nodes[topic_name].text(self.type_name))
self._recursive_create_items(self._nodes[topic_name], topic_name + '[%d]' % index,
base_type_str,
slot, None)
# remove obsolete children
if topic_name in self._nodes:
if len(message) < len(self._nodes[topic_name].get_children()):
for i in range(len(message), self._nodes[topic_name].childCount()):
item_topic_name = topic_name + '[%d]' % i
self.recursive_delete_items(self._nodes[item_topic_name])
del self._nodes[item_topic_name]
else:
if topic_name in self._nodes and self._nodes[topic_name] is not None:
self._nodes[topic_name].set_value(repr(message))
def recursive_delete_items(self, item):
self._publisher.unregister()
self._subscriber.unregister()
for child in item.get_children():
self.recursive_delete_items(child)
if child in self._nodes:
del self._nodes[child]
self.server.server.delete_nodes([child])
self.server.server.delete_nodes([item])
if len(self.parent.get_children()) == 0:
self.server.server.delete_nodes([self.parent])
def create_message_instance(self, node):
for child in node.get_children():
name = child.get_display_name().Text
if hasattr(self.message_instance, name):
if child.get_node_class() == ua.NodeClass.Variable:
setattr(self.message_instance, name,
correct_type(child, type(getattr(self.message_instance, name))))
elif child.get_node_class == ua.NodeClass.Object:
setattr(self.message_instance, name, self.create_message_instance(child))
return self.message_instance # Converts the value of the node to that specified in the ros message we are trying to fill. Casts python ints
def recursive_create_objects(self, topic_name, idx, parent):
hierachy = topic_name.split('/')
if len(hierachy) == 0 or len(hierachy) == 1:
return parent
for name in hierachy:
if name != '':
try:
nodewithsamename = self.server.find_topics_node_with_same_name(name, idx)
if nodewithsamename is not None:
return self.recursive_create_objects(ros_server.nextname(hierachy, hierachy.index(name)), idx,
nodewithsamename)
else:
# if for some reason 2 services with exactly same name are created use hack>: add random int, prob to hit two
# same ints 1/10000, should be sufficient
newparent = parent.add_object(
ua.NodeId(name, parent.nodeid.NamespaceIndex, ua.NodeIdType.String),
ua.QualifiedName(name, parent.nodeid.NamespaceIndex))
return self.recursive_create_objects(ros_server.nextname(hierachy, hierachy.index(name)), idx,
newparent)
# thrown when node with parent name is not existent in server
except (IndexError, common.UaError) as e:
newparent = parent.add_object(
ua.NodeId(name + str(random.randint(0, 10000)), parent.nodeid.NamespaceIndex,
ua.NodeIdType.String),
ua.QualifiedName(name, parent.nodeid.NamespaceIndex))
return self.recursive_create_objects(ros_server.nextname(hierachy, hierachy.index(name)), idx,
newparent)
return parent
# to unsigned integers as to fulfill ros specification. Currently only uses a few different types,
# no other types encountered so far.
def correct_type(node, typemessage):
data_value = node.get_data_value()
result = node.get_value()
if isinstance(data_value, ua.DataValue):
if typemessage.__name__ == "float":
result = numpy.float(result)
if typemessage.__name__ == "double":
result = numpy.double(result)
if typemessage.__name__ == "int":
result = int(result) & 0xff
else:
rospy.logerr("can't convert: " + str(node.get_data_value.Value))
return None
return result
def _extract_array_info(type_str):
array_size = None
if '[' in type_str and type_str[-1] == ']':
type_str, array_size_str = type_str.split('[', 1)
array_size_str = array_size_str[:-1]
if len(array_size_str) > 0:
array_size = int(array_size_str)
else:
array_size = 0
return type_str, array_size
def _create_node_with_type(parent, idx, topic_name, topic_text, type_name, array_size):
if '[' in type_name:
type_name = type_name[:type_name.index('[')]
if type_name == 'bool':
dv = ua.Variant(False, ua.VariantType.Boolean)
elif type_name == 'byte':
dv = ua.Variant(0, ua.VariantType.Byte)
elif type_name == 'int':
dv = ua.Variant(0, ua.VariantType.Int32)
elif type_name == 'int8':
dv = ua.Variant(0, ua.VariantType.SByte)
elif type_name == 'uint8':
dv = ua.Variant(0, ua.VariantType.Byte)
elif type_name == 'int16':
dv = ua.Variant(0, ua.VariantType.Int16)
elif type_name == 'uint16':
dv = ua.Variant(0, ua.VariantType.UInt16)
elif type_name == 'int32':
dv = ua.Variant(0, ua.VariantType.Int32)
elif type_name == 'uint32':
dv = ua.Variant(0, ua.VariantType.UInt32)
elif type_name == 'int64':
dv = ua.Variant(0, ua.VariantType.Int64)
elif type_name == 'uint64':
dv = ua.Variant(0, ua.VariantType.UInt64)
elif type_name == 'float' or type_name == 'float32' or type_name == 'float64':
dv = ua.Variant(0.0, ua.VariantType.Float)
elif type_name == 'double':
dv = ua.Variant(0.0, ua.VariantType.Double)
elif type_name == 'string':
dv = ua.Variant('', ua.VariantType.String)
else:
rospy.logerr("can't create node with type" + str(type_name))
return None
if array_size is not None:
value = []
for i in range(array_size):
value.append(i)
return parent.add_variable(ua.NodeId(topic_name, parent.nodeid.NamespaceIndex),
ua.QualifiedName(topic_text, parent.nodeid.NamespaceIndex), dv.Value)
# Used to delete obsolete topics
def numberofsubscribers(nametolookfor, topicsDict):
# rosout only has one subscriber/publisher at all times, so ignore.
if nametolookfor != "/rosout":
ret = topicsDict[nametolookfor]._subscriber.get_num_connections()
else:
ret = 2
return ret
def refresh_topics_and_actions(namespace_ros, server, topicsdict, actionsdict, idx_topics, idx_actions, topics, actions):
ros_topics = rospy.get_published_topics(namespace_ros)
rospy.logdebug(str(ros_topics))
rospy.logdebug(str(rospy.get_published_topics('/move_base_simple')))
for topic_name, topic_type in ros_topics:
if topic_name not in topicsdict or topicsdict[topic_name] is None:
splits = topic_name.split('/')
if "cancel" in splits[-1] or "result" in splits[-1] or "feedback" in splits[-1] or "goal" in splits[-1] or "status" in splits[-1]:
rospy.logdebug("Found an action: " + str(topic_name))
correct_name = ros_actions.get_correct_name(topic_name)
if correct_name not in actionsdict:
try:
rospy.loginfo("Creating Action with name: " + correct_name)
actionsdict[correct_name] = ros_actions.OpcUaROSAction(server,
actions,
idx_actions,
correct_name,
get_goal_type(correct_name),
get_feedback_type(correct_name))
except (ValueError, TypeError, AttributeError) as e:
print(e)
rospy.logerr("Error while creating Action Objects for Action " + topic_name)
else:
# rospy.loginfo("Ignoring normal topics for debugging...")
topic = OpcUaROSTopic(server, topics, idx_topics, topic_name, topic_type)
topicsdict[topic_name] = topic
elif numberofsubscribers(topic_name, topicsdict) <= 1 and "rosout" not in topic_name:
topicsdict[topic_name].recursive_delete_items(server.server.get_node(ua.NodeId(topic_name, idx_topics)))
del topicsdict[topic_name]
ros_server.own_rosnode_cleanup()
ros_topics = rospy.get_published_topics(namespace_ros)
# use to not get dict changed during iteration errors
tobedeleted = []
for topic_nameOPC in topicsdict:
found = False
for topicROS, topic_type in ros_topics:
if topic_nameOPC == topicROS:
found = True
if not found:
topicsdict[topic_nameOPC].recursive_delete_items(server.get_node(ua.NodeId(topic_nameOPC, idx_topics)))
tobedeleted.append(topic_nameOPC)
for name in tobedeleted:
del topicsdict[name]
ros_actions.refresh_dict(namespace_ros, actionsdict, topicsdict, server, idx_actions)
def get_feedback_type(action_name):
try:
type, name, fn = rostopic.get_topic_type(action_name + "/feedback")
return type
except rospy.ROSException as e:
try:
type, name, fn = rostopic.get_topic_type(action_name + "/Feedback", e)
return type
except rospy.ROSException as e2:
rospy.logerr("Couldn't find feedback type for action " + action_name, e2)
return None
def get_goal_type(action_name):
try:
type, name, fn = rostopic.get_topic_type(action_name + "/goal")
return type
except rospy.ROSException as e:
try:
type, name, fn = rostopic.get_topic_type(action_name + "/Goal", e)
return type
except rospy.ROSException as e2:
rospy.logerr("Couldn't find goal type for action " + action_name, e2)
return None
|
iirob/ros_opcua_communication
|
ros_opcua_impl_python_opcua/scripts/ros_topics.py
|
Python
|
lgpl-3.0
| 16,231
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Quotas for shares."""
import datetime
from oslo_config import cfg
from oslo_log import log
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from manila import db
from manila import exception
from manila.i18n import _LE
LOG = log.getLogger(__name__)
quota_opts = [
cfg.IntOpt('quota_shares',
default=50,
help='Number of shares allowed per project.'),
cfg.IntOpt('quota_snapshots',
default=50,
help='Number of share snapshots allowed per project.'),
cfg.IntOpt('quota_gigabytes',
default=1000,
help='Number of share gigabytes allowed per project.'),
cfg.IntOpt('quota_snapshot_gigabytes',
default=1000,
help='Number of snapshot gigabytes allowed per project.'),
cfg.IntOpt('quota_share_networks',
default=10,
help='Number of share-networks allowed per project.'),
cfg.IntOpt('reservation_expire',
default=86400,
help='Number of seconds until a reservation expires.'),
cfg.IntOpt('until_refresh',
default=0,
help='Count of reservations until usage is refreshed.'),
cfg.IntOpt('max_age',
default=0,
help='Number of seconds between subsequent usage refreshes.'),
cfg.StrOpt('quota_driver',
default='manila.quota.DbQuotaDriver',
help='Default driver to use for quota checks.'), ]
CONF = cfg.CONF
CONF.register_opts(quota_opts)
class DbQuotaDriver(object):
"""Database Quota driver.
Driver to perform necessary checks to enforce quotas and obtain
quota information. The default driver utilizes the local
database.
"""
def get_by_project_and_user(self, context, project_id, user_id, resource):
"""Get a specific quota by project and user."""
return db.quota_get(context, project_id, user_id, resource)
def get_by_project(self, context, project_id, resource):
"""Get a specific quota by project."""
return db.quota_get(context, project_id, resource)
def get_by_class(self, context, quota_class, resource):
"""Get a specific quota by quota class."""
return db.quota_class_get(context, quota_class, resource)
def get_defaults(self, context, resources):
"""Given a list of resources, retrieve the default quotas.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
"""
quotas = {}
default_quotas = db.quota_class_get_default(context)
for resource in resources.values():
quotas[resource.name] = default_quotas.get(resource.name,
resource.default)
return quotas
def get_class_quotas(self, context, resources, quota_class,
defaults=True):
"""Retrieve quotas for a quota class.
Given a list of resources, retrieve the quotas for the given
quota class.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param quota_class: The name of the quota class to return
quotas for.
:param defaults: If True, the default value will be reported
if there is no specific value for the
resource.
"""
quotas = {}
class_quotas = db.quota_class_get_all_by_name(context, quota_class)
for resource in resources.values():
if defaults or resource.name in class_quotas:
quotas[resource.name] = class_quotas.get(resource.name,
resource.default)
return quotas
def _process_quotas(self, context, resources, project_id, quotas,
quota_class=None, defaults=True, usages=None,
remains=False):
modified_quotas = {}
# Get the quotas for the appropriate class. If the project ID
# matches the one in the context, we use the quota_class from
# the context, otherwise, we use the provided quota_class (if
# any)
if project_id == context.project_id:
quota_class = context.quota_class
if quota_class:
class_quotas = db.quota_class_get_all_by_name(context, quota_class)
else:
class_quotas = {}
default_quotas = self.get_defaults(context, resources)
for resource in resources.values():
# Omit default/quota class values
if not defaults and resource.name not in quotas:
continue
limit = quotas.get(
resource.name,
class_quotas.get(resource.name, default_quotas[resource.name]))
modified_quotas[resource.name] = dict(limit=limit)
# Include usages if desired. This is optional because one
# internal consumer of this interface wants to access the
# usages directly from inside a transaction.
if usages:
usage = usages.get(resource.name, {})
modified_quotas[resource.name].update(
in_use=usage.get('in_use', 0),
reserved=usage.get('reserved', 0),
)
# Initialize remains quotas.
if remains:
modified_quotas[resource.name].update(remains=limit)
if remains:
all_quotas = db.quota_get_all(context, project_id)
for quota in all_quotas:
if quota.resource in modified_quotas:
modified_quotas[quota.resource]['remains'] -= (
quota.hard_limit)
return modified_quotas
def get_project_quotas(self, context, resources, project_id,
quota_class=None, defaults=True,
usages=True, remains=False):
"""Retrieve quotas for project.
Given a list of resources, retrieve the quotas for the given
project.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param project_id: The ID of the project to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified. It
will be ignored if project_id ==
context.project_id.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
:param remains: If True, the current remains of the project will
will be returned.
"""
project_quotas = db.quota_get_all_by_project(context, project_id)
project_usages = None
if usages:
project_usages = db.quota_usage_get_all_by_project(context,
project_id)
return self._process_quotas(context, resources, project_id,
project_quotas, quota_class,
defaults=defaults, usages=project_usages,
remains=remains)
def get_user_quotas(self, context, resources, project_id, user_id,
quota_class=None, defaults=True,
usages=True):
"""Retrieve quotas for user and project.
Given a list of resources, retrieve the quotas for the given
user and project.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param project_id: The ID of the project to return quotas for.
:param user_id: The ID of the user to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified. It
will be ignored if project_id ==
context.project_id.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
"""
user_quotas = db.quota_get_all_by_project_and_user(context,
project_id, user_id)
# Use the project quota for default user quota.
proj_quotas = db.quota_get_all_by_project(context, project_id)
for key, value in six.iteritems(proj_quotas):
if key not in user_quotas.keys():
user_quotas[key] = value
user_usages = None
if usages:
user_usages = db.quota_usage_get_all_by_project_and_user(
context, project_id, user_id)
return self._process_quotas(context, resources, project_id,
user_quotas, quota_class,
defaults=defaults, usages=user_usages)
def get_settable_quotas(self, context, resources, project_id,
user_id=None):
"""Retrieve range of settable quotas.
Given a list of resources, retrieve the range of settable quotas for
the given user or project.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param project_id: The ID of the project to return quotas for.
:param user_id: The ID of the user to return quotas for.
"""
settable_quotas = {}
project_quotas = self.get_project_quotas(context, resources,
project_id, remains=True)
if user_id:
user_quotas = self.get_user_quotas(context, resources,
project_id, user_id)
setted_quotas = db.quota_get_all_by_project_and_user(
context, project_id, user_id)
for key, value in user_quotas.items():
maximum = (project_quotas[key]['remains'] +
setted_quotas.get(key, 0))
settable_quotas[key] = dict(
minimum=value['in_use'] + value['reserved'],
maximum=maximum)
else:
for key, value in project_quotas.items():
minimum = max(int(value['limit'] - value['remains']),
int(value['in_use'] + value['reserved']))
settable_quotas[key] = dict(minimum=minimum, maximum=-1)
return settable_quotas
def _get_quotas(self, context, resources, keys, has_sync, project_id=None,
user_id=None):
"""Retrieve quotas for a resource.
A helper method which retrieves the quotas for the specific
resources identified by keys, and which apply to the current
context.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param keys: A list of the desired quotas to retrieve.
:param has_sync: If True, indicates that the resource must
have a sync attribute; if False, indicates
that the resource must NOT have a sync
attribute.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
:param user_id: Specify the user_id if current context
is admin and admin wants to impact on
common user.
"""
# Filter resources
if has_sync:
sync_filt = lambda x: hasattr(x, 'sync')
else:
sync_filt = lambda x: not hasattr(x, 'sync')
desired = set(keys)
sub_resources = dict((k, v) for k, v in resources.items()
if k in desired and sync_filt(v))
# Make sure we accounted for all of them...
if len(keys) != len(sub_resources):
unknown = desired - set(sub_resources.keys())
raise exception.QuotaResourceUnknown(unknown=sorted(unknown))
if user_id:
# Grab and return the quotas (without usages)
quotas = self.get_user_quotas(context, sub_resources,
project_id, user_id,
context.quota_class, usages=False)
else:
# Grab and return the quotas (without usages)
quotas = self.get_project_quotas(context, sub_resources,
project_id,
context.quota_class,
usages=False)
return dict((k, v['limit']) for k, v in quotas.items())
def limit_check(self, context, resources, values, project_id=None,
user_id=None):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param values: A dictionary of the values to check against the
quota.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
:param user_id: Specify the user_id if current context
is admin and admin wants to impact on
common user.
"""
# Ensure no value is less than zero
unders = [key for key, val in values.items() if val < 0]
if unders:
raise exception.InvalidQuotaValue(unders=sorted(unders))
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# If user id is None, then we use the user_id in context
if user_id is None:
user_id = context.user_id
# Get the applicable quotas
quotas = self._get_quotas(context, resources, values.keys(),
has_sync=False, project_id=project_id)
user_quotas = self._get_quotas(context, resources, values.keys(),
has_sync=False, project_id=project_id,
user_id=user_id)
# Check the quotas and construct a list of the resources that
# would be put over limit by the desired values
overs = [key for key, val in values.items()
if (quotas[key] >= 0 and quotas[key] < val) or
(user_quotas[key] >= 0 and user_quotas[key] < val)]
if overs:
raise exception.OverQuota(overs=sorted(overs), quotas=quotas,
usages={})
def reserve(self, context, resources, deltas, expire=None,
project_id=None, user_id=None):
"""Check quotas and reserve resources.
For counting quotas--those quotas for which there is a usage
synchronization function--this method checks quotas against
current usage and the desired deltas.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it does not have a usage
synchronization function.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns a
list of reservation UUIDs which were created.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param deltas: A dictionary of the proposed delta changes.
:param expire: An optional parameter specifying an expiration
time for the reservations. If it is a simple
number, it is interpreted as a number of
seconds and added to the current time; if it is
a datetime.timedelta object, it will also be
added to the current time. A datetime.datetime
object will be interpreted as the absolute
expiration time. If None is specified, the
default expiration time set by
--default-reservation-expire will be used (this
value will be treated as a number of seconds).
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
:param user_id: Specify the user_id if current context
is admin and admin wants to impact on
common user.
"""
# Set up the reservation expiration
if expire is None:
expire = CONF.reservation_expire
if isinstance(expire, six.integer_types):
expire = datetime.timedelta(seconds=expire)
if isinstance(expire, datetime.timedelta):
expire = timeutils.utcnow() + expire
if not isinstance(expire, datetime.datetime):
raise exception.InvalidReservationExpiration(expire=expire)
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# If user_id is None, then we use the project_id in context
if user_id is None:
user_id = context.user_id
# Get the applicable quotas.
# NOTE(Vek): We're not worried about races at this point.
# Yes, the admin may be in the process of reducing
# quotas, but that's a pretty rare thing.
quotas = self._get_quotas(context, resources, deltas.keys(),
has_sync=True, project_id=project_id)
user_quotas = self._get_quotas(context, resources, deltas.keys(),
has_sync=True, project_id=project_id,
user_id=user_id)
# NOTE(Vek): Most of the work here has to be done in the DB
# API, because we have to do it in a transaction,
# which means access to the session. Since the
# session isn't available outside the DBAPI, we
# have to do the work there.
return db.quota_reserve(context, resources, quotas, user_quotas,
deltas, expire,
CONF.until_refresh, CONF.max_age,
project_id=project_id, user_id=user_id)
def commit(self, context, reservations, project_id=None, user_id=None):
"""Commit reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
:param user_id: Specify the user_id if current context
is admin and admin wants to impact on
common user.
"""
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# If user_id is None, then we use the user_id in context
if user_id is None:
user_id = context.user_id
db.reservation_commit(context, reservations, project_id=project_id,
user_id=user_id)
def rollback(self, context, reservations, project_id=None, user_id=None):
"""Roll back reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
:param user_id: Specify the user_id if current context
is admin and admin wants to impact on
common user.
"""
# If project_id is None, then we use the project_id in context
if project_id is None:
project_id = context.project_id
# If user_id is None, then we use the user_id in context
if user_id is None:
user_id = context.user_id
db.reservation_rollback(context, reservations, project_id=project_id,
user_id=user_id)
def usage_reset(self, context, resources):
"""Reset usage records.
Reset the usage records for a particular user on a list of
resources. This will force that user's usage records to be
refreshed the next time a reservation is made.
Note: this does not affect the currently outstanding
reservations the user has; those reservations must be
committed or rolled back (or expired).
:param context: The request context, for access checks.
:param resources: A list of the resource names for which the
usage must be reset.
"""
# We need an elevated context for the calls to
# quota_usage_update()
elevated = context.elevated()
for resource in resources:
try:
# Reset the usage to -1, which will force it to be
# refreshed
db.quota_usage_update(elevated, context.project_id,
context.user_id,
resource, in_use=-1)
except exception.QuotaUsageNotFound:
# That means it'll be refreshed anyway
pass
def destroy_all_by_project(self, context, project_id):
"""Destroy metadata associated with a project.
Destroy all quotas, usages, and reservations associated with a
project.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
"""
db.quota_destroy_all_by_project(context, project_id)
def destroy_all_by_project_and_user(self, context, project_id, user_id):
"""Destroy metadata associated with a project and user.
Destroy all quotas, usages, and reservations associated with a
project and user.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
:param user_id: The ID of the user being deleted.
"""
db.quota_destroy_all_by_project_and_user(context, project_id, user_id)
def expire(self, context):
"""Expire reservations.
Explores all currently existing reservations and rolls back
any that have expired.
:param context: The request context, for access checks.
"""
db.reservation_expire(context)
class BaseResource(object):
"""Describe a single resource for quota checking."""
def __init__(self, name, flag=None):
"""Initializes a Resource.
:param name: The name of the resource, i.e., "shares".
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
self.name = name
self.flag = flag
def quota(self, driver, context, **kwargs):
"""Obtain quota for a resource.
Given a driver and context, obtain the quota for this
resource.
:param driver: A quota driver.
:param context: The request context.
:param project_id: The project to obtain the quota value for.
If not provided, it is taken from the
context. If it is given as None, no
project-specific quota will be searched
for.
:param quota_class: The quota class corresponding to the
project, or for which the quota is to be
looked up. If not provided, it is taken
from the context. If it is given as None,
no quota class-specific quota will be
searched for. Note that the quota class
defaults to the value in the context,
which may not correspond to the project if
project_id is not the same as the one in
the context.
"""
# Get the project ID
project_id = kwargs.get('project_id', context.project_id)
# Ditto for the quota class
quota_class = kwargs.get('quota_class', context.quota_class)
# Look up the quota for the project
if project_id:
try:
return driver.get_by_project(context, project_id, self.name)
except exception.ProjectQuotaNotFound:
pass
# Try for the quota class
if quota_class:
try:
return driver.get_by_class(context, quota_class, self.name)
except exception.QuotaClassNotFound:
pass
# OK, return the default
return self.default
@property
def default(self):
"""Return the default value of the quota."""
return CONF[self.flag] if self.flag else -1
class ReservableResource(BaseResource):
"""Describe a reservable resource."""
def __init__(self, name, sync, flag=None):
"""Initializes a ReservableResource.
Reservable resources are those resources which directly
correspond to objects in the database, i.e., shares, gigabytes,
etc. A ReservableResource must be constructed with a usage
synchronization function, which will be called to determine the
current counts of one or more resources.
The usage synchronization function will be passed three
arguments: an admin context, the project ID, and an opaque
session object, which should in turn be passed to the
underlying database function. Synchronization functions
should return a dictionary mapping resource names to the
current in_use count for those resources; more than one
resource and resource count may be returned. Note that
synchronization functions may be associated with more than one
ReservableResource.
:param name: The name of the resource, i.e., "shares".
:param sync: A callable which returns a dictionary to
resynchronize the in_use count for one or more
resources, as described above.
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
super(ReservableResource, self).__init__(name, flag=flag)
self.sync = sync
class AbsoluteResource(BaseResource):
"""Describe a non-reservable resource."""
pass
class CountableResource(AbsoluteResource):
"""Describe a countable resource.
Describe a resource where the counts aren't based solely on the
project ID.
"""
def __init__(self, name, count, flag=None):
"""Initializes a CountableResource.
Countable resources are those resources which directly
correspond to objects in the database, i.e., shares, gigabytes,
etc., but for which a count by project ID is inappropriate. A
CountableResource must be constructed with a counting
function, which will be called to determine the current counts
of the resource.
The counting function will be passed the context, along with
the extra positional and keyword arguments that are passed to
Quota.count(). It should return an integer specifying the
count.
Note that this counting is not performed in a transaction-safe
manner. This resource class is a temporary measure to provide
required functionality, until a better approach to solving
this problem can be evolved.
:param name: The name of the resource, i.e., "shares".
:param count: A callable which returns the count of the
resource. The arguments passed are as described
above.
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
super(CountableResource, self).__init__(name, flag=flag)
self.count = count
class QuotaEngine(object):
"""Represent the set of recognized quotas."""
def __init__(self, quota_driver_class=None):
"""Initialize a Quota object."""
self._resources = {}
self._driver_cls = quota_driver_class
self.__driver = None
@property
def _driver(self):
if self.__driver:
return self.__driver
if not self._driver_cls:
self._driver_cls = CONF.quota_driver
if isinstance(self._driver_cls, six.string_types):
self._driver_cls = importutils.import_object(self._driver_cls)
self.__driver = self._driver_cls
return self.__driver
def __contains__(self, resource):
return resource in self._resources
def register_resource(self, resource):
"""Register a resource."""
self._resources[resource.name] = resource
def register_resources(self, resources):
"""Register a list of resources."""
for resource in resources:
self.register_resource(resource)
def get_by_project_and_user(self, context, project_id, user_id, resource):
"""Get a specific quota by project and user."""
return self._driver.get_by_project_and_user(context, project_id,
user_id, resource)
def get_by_project(self, context, project_id, resource):
"""Get a specific quota by project."""
return self._driver.get_by_project(context, project_id, resource)
def get_by_class(self, context, quota_class, resource):
"""Get a specific quota by quota class."""
return self._driver.get_by_class(context, quota_class, resource)
def get_defaults(self, context):
"""Retrieve the default quotas.
:param context: The request context, for access checks.
"""
return self._driver.get_defaults(context, self._resources)
def get_class_quotas(self, context, quota_class, defaults=True):
"""Retrieve the quotas for the given quota class.
:param context: The request context, for access checks.
:param quota_class: The name of the quota class to return
quotas for.
:param defaults: If True, the default value will be reported
if there is no specific value for the
resource.
"""
return self._driver.get_class_quotas(context, self._resources,
quota_class, defaults=defaults)
def get_user_quotas(self, context, project_id, user_id, quota_class=None,
defaults=True, usages=True):
"""Retrieve the quotas for the given user and project.
:param context: The request context, for access checks.
:param project_id: The ID of the project to return quotas for.
:param user_id: The ID of the user to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
"""
return self._driver.get_user_quotas(context, self._resources,
project_id, user_id,
quota_class=quota_class,
defaults=defaults,
usages=usages)
def get_project_quotas(self, context, project_id, quota_class=None,
defaults=True, usages=True, remains=False):
"""Retrieve the quotas for the given project.
:param context: The request context, for access checks.
:param project_id: The ID of the project to return quotas for.
:param quota_class: If project_id != context.project_id, the
quota class cannot be determined. This
parameter allows it to be specified.
:param defaults: If True, the quota class value (or the
default value, if there is no value from the
quota class) will be reported if there is no
specific value for the resource.
:param usages: If True, the current in_use and reserved counts
will also be returned.
:param remains: If True, the current remains of the project will
will be returned.
"""
return self._driver.get_project_quotas(context, self._resources,
project_id,
quota_class=quota_class,
defaults=defaults,
usages=usages,
remains=remains)
def get_settable_quotas(self, context, project_id, user_id=None):
"""Get settable quotas.
Given a list of resources, retrieve the range of settable quotas for
the given user or project.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param project_id: The ID of the project to return quotas for.
:param user_id: The ID of the user to return quotas for.
"""
return self._driver.get_settable_quotas(context, self._resources,
project_id,
user_id=user_id)
def count(self, context, resource, *args, **kwargs):
"""Count a resource.
For countable resources, invokes the count() function and
returns its result. Arguments following the context and
resource are passed directly to the count function declared by
the resource.
:param context: The request context, for access checks.
:param resource: The name of the resource, as a string.
"""
# Get the resource
res = self._resources.get(resource)
if not res or not hasattr(res, 'count'):
raise exception.QuotaResourceUnknown(unknown=[resource])
return res.count(context, *args, **kwargs)
def limit_check(self, context, project_id=None, user_id=None, **values):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction. The
values to check are given as keyword arguments, where the key
identifies the specific quota limit to check, and the value is
the proposed value.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
:param user_id: Specify the user_id if current context
is admin and admin wants to impact on
common user.
"""
return self._driver.limit_check(context, self._resources, values,
project_id=project_id, user_id=user_id)
def reserve(self, context, expire=None, project_id=None, user_id=None,
**deltas):
"""Check quotas and reserve resources.
For counting quotas--those quotas for which there is a usage
synchronization function--this method checks quotas against
current usage and the desired deltas. The deltas are given as
keyword arguments, and current usage and other reservations
are factored into the quota check.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it does not have a usage
synchronization function.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns a
list of reservation UUIDs which were created.
:param context: The request context, for access checks.
:param expire: An optional parameter specifying an expiration
time for the reservations. If it is a simple
number, it is interpreted as a number of
seconds and added to the current time; if it is
a datetime.timedelta object, it will also be
added to the current time. A datetime.datetime
object will be interpreted as the absolute
expiration time. If None is specified, the
default expiration time set by
--default-reservation-expire will be used (this
value will be treated as a number of seconds).
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
reservations = self._driver.reserve(context, self._resources, deltas,
expire=expire,
project_id=project_id,
user_id=user_id)
LOG.debug("Created reservations %s", reservations)
return reservations
def commit(self, context, reservations, project_id=None, user_id=None):
"""Commit reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
try:
self._driver.commit(context, reservations, project_id=project_id,
user_id=user_id)
except Exception:
# NOTE(Vek): Ignoring exceptions here is safe, because the
# usage resynchronization and the reservation expiration
# mechanisms will resolve the issue. The exception is
# logged, however, because this is less than optimal.
LOG.exception(_LE("Failed to commit reservations %s"),
reservations)
return
LOG.debug("Committed reservations %s", reservations)
def rollback(self, context, reservations, project_id=None, user_id=None):
"""Roll back reservations.
:param context: The request context, for access checks.
:param reservations: A list of the reservation UUIDs, as
returned by the reserve() method.
:param project_id: Specify the project_id if current context
is admin and admin wants to impact on
common user's tenant.
"""
try:
self._driver.rollback(context, reservations, project_id=project_id,
user_id=user_id)
except Exception:
# NOTE(Vek): Ignoring exceptions here is safe, because the
# usage resynchronization and the reservation expiration
# mechanisms will resolve the issue. The exception is
# logged, however, because this is less than optimal.
LOG.exception(_LE("Failed to roll back reservations %s"),
reservations)
return
LOG.debug("Rolled back reservations %s", reservations)
def usage_reset(self, context, resources):
"""Reset usage records.
Reset the usage records for a particular user on a list of
resources. This will force that user's usage records to be
refreshed the next time a reservation is made.
Note: this does not affect the currently outstanding
reservations the user has; those reservations must be
committed or rolled back (or expired).
:param context: The request context, for access checks.
:param resources: A list of the resource names for which the
usage must be reset.
"""
self._driver.usage_reset(context, resources)
def destroy_all_by_project_and_user(self, context, project_id, user_id):
"""Destroy metadata associated with a project and user.
Destroy all quotas, usages, and reservations associated with a
project and user.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
:param user_id: The ID of the user being deleted.
"""
self._driver.destroy_all_by_project_and_user(context,
project_id, user_id)
def destroy_all_by_project(self, context, project_id):
"""Destroy metadata associated with a project.
Destroy all quotas, usages, and reservations associated with a
project.
:param context: The request context, for access checks.
:param project_id: The ID of the project being deleted.
"""
self._driver.destroy_all_by_project(context, project_id)
def expire(self, context):
"""Expire reservations.
Explores all currently existing reservations and rolls back
any that have expired.
:param context: The request context, for access checks.
"""
self._driver.expire(context)
@property
def resources(self):
return sorted(self._resources.keys())
QUOTAS = QuotaEngine()
resources = [
ReservableResource('shares', '_sync_shares', 'quota_shares'),
ReservableResource('snapshots', '_sync_snapshots', 'quota_snapshots'),
ReservableResource('gigabytes', '_sync_gigabytes', 'quota_gigabytes'),
ReservableResource('snapshot_gigabytes', '_sync_snapshot_gigabytes',
'quota_snapshot_gigabytes'),
ReservableResource('share_networks', '_sync_share_networks',
'quota_share_networks'),
]
QUOTAS.register_resources(resources)
|
jcsp/manila
|
manila/quota.py
|
Python
|
apache-2.0
| 46,765
|
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
# Get an object from its sid. If you do not have a sid,
# check out the list resource examples on this page
domain = client.sip \
.domains("SD27f0288630a668bdfbf177f8e22f5ccc") \
.fetch()
print(domain.voice_method)
|
teoreteetik/api-snippets
|
rest/sip-in/get-domain-instance/get-domain-instance.6.x.py
|
Python
|
mit
| 516
|
#!/usr/bin/env python
# example images.py
import pygtk
pygtk.require('2.0')
import gtk
class ImagesExample:
# when invoked (via signal delete_event), terminates the application.
def close_application(self, widget, event, data=None):
gtk.main_quit()
return False
def __init__(self):
# create the main window, and attach delete_event signal to terminating
# the application
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.connect("delete_event", self.close_application)
window.set_border_width(10)
window.show()
# a horizontal box to hold the buttons
hbox = gtk.HBox()
hbox.show()
window.add(hbox)
# create several images with data from files and load images into
# buttons
baseimage = gtk.Image()
baseimage.set_from_file("../../patches/wall40_1.gif")
pixbuf = baseimage.get_pixbuf()
if pixbuf:
scale = 1
baseimage.set_from_pixbuf(pixbuf.scale_simple(
pixbuf.get_width() * scale,
pixbuf.get_height() * scale,
gtk.gdk.INTERP_NEAREST
))
baseimage.show()
basepatch = gtk.Image()
basepatch.set_from_file("../../patches/ps20a0.gif")
pb = basepatch.get_pixbuf()
basepatch.set_from_pixbuf(pb.scale_simple(
pb.get_width() * scale,
pb.get_height() * scale,
gtk.gdk.INTERP_NEAREST
))
pb = basepatch.get_pixbuf()
pb = pb.add_alpha(True,chr(0),chr(255),chr(255))
for x,y in [(0,0), (24,24), (104,16), (24,104), (-16,0), (0,-16)]:
x *= scale
y *= scale
image = gtk.Image()
image.set_from_pixbuf(baseimage.get_pixbuf().copy())
image.show()
dest_x = max(0, x)
dest_y = max(0, y)
dest_height = min(pb.get_height(), image.get_pixbuf().get_height() - dest_y)
dest_width = min(pb.get_width(), image.get_pixbuf().get_width() - dest_x)
if x < 0:
dest_width += x
if y < 0:
dest_height += y
offset_x = x
offset_y = y
pb.composite(
image.get_pixbuf(),
dest_x, dest_y, dest_width, dest_height,
offset_x, offset_y, 1, 1, # scale
gtk.gdk.INTERP_NEAREST, 255
)
# a button to contain the image widget
button = gtk.Button()
button.add(image)
button.show()
hbox.pack_start(button)
def main():
gtk.main()
return 0
if __name__ == "__main__":
ImagesExample()
main()
|
jmtd/freedoom
|
tools/cleanroom/images.py
|
Python
|
bsd-3-clause
| 2,748
|
#!/usr/bin/env python
# Testing the fastest way to check a dictionary for more
# than one key
#
# foo = {'foo':1,'zip':2,'zam':3,'bar':4}
#
# if ("foo","bar") in foo:
# #do stuff
#
from timeit import Timer
setup = '''
from random import randint as R
d=dict((str(R(0,1000000)),R(0,1000000)) for i in range(D))
q=dict((str(R(0,1000000)),R(0,1000000)) for i in range(Q))
print("looking for %s items in %s"%(len(q),len(d)))
'''
print Timer('set(q) <= set(d)', 'D=1000000;Q=100;'+setup).timeit(1)
print Timer('set(q) <= set(d.keys())', 'D=1000000;Q=100;'+setup).timeit(1)
print Timer('all(k in d for k in q)', 'D=1000000;Q=100;'+setup).timeit(1)
|
caronc/newsreap
|
tests/timer_testing/multi_dict_key_check.py
|
Python
|
gpl-3.0
| 645
|
# Copyright 2014 Google Inc. All Rights Reserved.
"""Command for listing target VPN gateways."""
from googlecloudsdk.compute.lib import base_classes
class List(base_classes.RegionalLister):
"""List target VPN gateways."""
# Placeholder to indicate that a detailed_help field exists and should
# be set outside the class definition.
detailed_help = None
@property
def service(self):
return self.compute.targetVpnGateways
@property
def resource_type(self):
return 'targetVpnGateways'
List.detailed_help = {
'brief': 'List target VPN Gateways',
'DESCRIPTION': """\
*{command}* lists summary information for the target VPN Gateways
in a project. The ``--uri'' option can be used to display the
URIs for the target VPN Gateways.
""",
}
|
ychen820/microblog
|
y/google-cloud-sdk/lib/googlecloudsdk/compute/subcommands/target_vpn_gateways/list.py
|
Python
|
bsd-3-clause
| 803
|
# -*- coding: utf-8 -*-
from __future__ import division
from datetime import timedelta
import textwrap
import warnings
import numpy as np
from pandas._libs import lib, tslibs
from pandas._libs.tslibs import NaT, Timedelta, Timestamp, iNaT
from pandas._libs.tslibs.fields import get_timedelta_field
from pandas._libs.tslibs.timedeltas import (
array_to_timedelta64, parse_timedelta_unit)
import pandas.compat as compat
from pandas.util._decorators import Appender
from pandas.core.dtypes.common import (
_NS_DTYPE, _TD_DTYPE, ensure_int64, is_datetime64_dtype, is_dtype_equal,
is_float_dtype, is_integer_dtype, is_list_like, is_object_dtype, is_scalar,
is_string_dtype, is_timedelta64_dtype, is_timedelta64_ns_dtype,
pandas_dtype)
from pandas.core.dtypes.dtypes import DatetimeTZDtype
from pandas.core.dtypes.generic import (
ABCDataFrame, ABCIndexClass, ABCSeries, ABCTimedeltaIndex)
from pandas.core.dtypes.missing import isna
from pandas.core import ops
from pandas.core.algorithms import checked_add_with_arr
import pandas.core.common as com
from pandas.tseries.frequencies import to_offset
from pandas.tseries.offsets import Tick
from . import datetimelike as dtl
_BAD_DTYPE = "dtype {dtype} cannot be converted to timedelta64[ns]"
def _is_convertible_to_td(key):
return isinstance(key, (Tick, timedelta,
np.timedelta64, compat.string_types))
def _field_accessor(name, alias, docstring=None):
def f(self):
values = self.asi8
result = get_timedelta_field(values, alias)
if self._hasnans:
result = self._maybe_mask_results(result, fill_value=None,
convert='float64')
return result
f.__name__ = name
f.__doc__ = "\n{}\n".format(docstring)
return property(f)
def _td_array_cmp(cls, op):
"""
Wrap comparison operations to convert timedelta-like to timedelta64
"""
opname = '__{name}__'.format(name=op.__name__)
nat_result = True if opname == '__ne__' else False
def wrapper(self, other):
if isinstance(other, (ABCDataFrame, ABCSeries, ABCIndexClass)):
return NotImplemented
if _is_convertible_to_td(other) or other is NaT:
try:
other = Timedelta(other)
except ValueError:
# failed to parse as timedelta
return ops.invalid_comparison(self, other, op)
result = op(self.view('i8'), other.value)
if isna(other):
result.fill(nat_result)
elif not is_list_like(other):
return ops.invalid_comparison(self, other, op)
elif len(other) != len(self):
raise ValueError("Lengths must match")
else:
try:
other = type(self)._from_sequence(other)._data
except (ValueError, TypeError):
return ops.invalid_comparison(self, other, op)
result = op(self.view('i8'), other.view('i8'))
result = com.values_from_object(result)
o_mask = np.array(isna(other))
if o_mask.any():
result[o_mask] = nat_result
if self._hasnans:
result[self._isnan] = nat_result
return result
return compat.set_function_name(wrapper, opname, cls)
class TimedeltaArray(dtl.DatetimeLikeArrayMixin, dtl.TimelikeOps):
"""
Pandas ExtensionArray for timedelta data.
.. versionadded:: 0.24.0
.. warning::
TimedeltaArray is currently experimental, and its API may change
without warning. In particular, :attr:`TimedeltaArray.dtype` is
expected to change to be an instance of an ``ExtensionDtype``
subclass.
Parameters
----------
values : array-like
The timedelta data.
dtype : numpy.dtype
Currently, only ``numpy.dtype("timedelta64[ns]")`` is accepted.
freq : Offset, optional
copy : bool, default False
Whether to copy the underlying array of data.
"""
_typ = "timedeltaarray"
_scalar_type = Timedelta
__array_priority__ = 1000
# define my properties & methods for delegation
_other_ops = []
_bool_ops = []
_object_ops = ['freq']
_field_ops = ['days', 'seconds', 'microseconds', 'nanoseconds']
_datetimelike_ops = _field_ops + _object_ops + _bool_ops
_datetimelike_methods = ["to_pytimedelta", "total_seconds",
"round", "floor", "ceil"]
# Needed so that NaT.__richcmp__(DateTimeArray) operates pointwise
ndim = 1
@property
def _box_func(self):
return lambda x: Timedelta(x, unit='ns')
@property
def dtype(self):
"""
The dtype for the TimedeltaArray.
.. warning::
A future version of pandas will change dtype to be an instance
of a :class:`pandas.api.extensions.ExtensionDtype` subclass,
not a ``numpy.dtype``.
Returns
-------
numpy.dtype
"""
return _TD_DTYPE
# ----------------------------------------------------------------
# Constructors
_attributes = ["freq"]
def __init__(self, values, dtype=_TD_DTYPE, freq=None, copy=False):
if isinstance(values, (ABCSeries, ABCIndexClass)):
values = values._values
inferred_freq = getattr(values, "_freq", None)
if isinstance(values, type(self)):
if freq is None:
freq = values.freq
elif freq and values.freq:
freq = to_offset(freq)
freq, _ = dtl.validate_inferred_freq(freq, values.freq, False)
values = values._data
if not isinstance(values, np.ndarray):
msg = (
"Unexpected type '{}'. 'values' must be a TimedeltaArray "
"ndarray, or Series or Index containing one of those."
)
raise ValueError(msg.format(type(values).__name__))
if values.dtype == 'i8':
# for compat with datetime/timedelta/period shared methods,
# we can sometimes get here with int64 values. These represent
# nanosecond UTC (or tz-naive) unix timestamps
values = values.view(_TD_DTYPE)
_validate_td64_dtype(values.dtype)
dtype = _validate_td64_dtype(dtype)
if freq == "infer":
msg = (
"Frequency inference not allowed in TimedeltaArray.__init__. "
"Use 'pd.array()' instead."
)
raise ValueError(msg)
if copy:
values = values.copy()
if freq:
freq = to_offset(freq)
self._data = values
self._dtype = dtype
self._freq = freq
if inferred_freq is None and freq is not None:
type(self)._validate_frequency(self, freq)
@classmethod
def _simple_new(cls, values, freq=None, dtype=_TD_DTYPE):
assert dtype == _TD_DTYPE, dtype
assert isinstance(values, np.ndarray), type(values)
result = object.__new__(cls)
result._data = values.view(_TD_DTYPE)
result._freq = to_offset(freq)
result._dtype = _TD_DTYPE
return result
@classmethod
def _from_sequence(cls, data, dtype=_TD_DTYPE, copy=False,
freq=None, unit=None):
if dtype:
_validate_td64_dtype(dtype)
freq, freq_infer = dtl.maybe_infer_freq(freq)
data, inferred_freq = sequence_to_td64ns(data, copy=copy, unit=unit)
freq, freq_infer = dtl.validate_inferred_freq(freq, inferred_freq,
freq_infer)
result = cls._simple_new(data, freq=freq)
if inferred_freq is None and freq is not None:
# this condition precludes `freq_infer`
cls._validate_frequency(result, freq)
elif freq_infer:
# Set _freq directly to bypass duplicative _validate_frequency
# check.
result._freq = to_offset(result.inferred_freq)
return result
@classmethod
def _generate_range(cls, start, end, periods, freq, closed=None):
periods = dtl.validate_periods(periods)
if freq is None and any(x is None for x in [periods, start, end]):
raise ValueError('Must provide freq argument if no data is '
'supplied')
if com.count_not_none(start, end, periods, freq) != 3:
raise ValueError('Of the four parameters: start, end, periods, '
'and freq, exactly three must be specified')
if start is not None:
start = Timedelta(start)
if end is not None:
end = Timedelta(end)
if start is None and end is None:
if closed is not None:
raise ValueError("Closed has to be None if not both of start"
"and end are defined")
left_closed, right_closed = dtl.validate_endpoints(closed)
if freq is not None:
index = _generate_regular_range(start, end, periods, freq)
else:
index = np.linspace(start.value, end.value, periods).astype('i8')
if not left_closed:
index = index[1:]
if not right_closed:
index = index[:-1]
return cls._simple_new(index, freq=freq)
# ----------------------------------------------------------------
# DatetimeLike Interface
def _unbox_scalar(self, value):
if not isinstance(value, self._scalar_type) and value is not NaT:
raise ValueError("'value' should be a Timedelta.")
self._check_compatible_with(value)
return value.value
def _scalar_from_string(self, value):
return Timedelta(value)
def _check_compatible_with(self, other):
# we don't have anything to validate.
pass
def _maybe_clear_freq(self):
self._freq = None
# ----------------------------------------------------------------
# Array-Like / EA-Interface Methods
@Appender(dtl.DatetimeLikeArrayMixin._validate_fill_value.__doc__)
def _validate_fill_value(self, fill_value):
if isna(fill_value):
fill_value = iNaT
elif isinstance(fill_value, (timedelta, np.timedelta64, Tick)):
fill_value = Timedelta(fill_value).value
else:
raise ValueError("'fill_value' should be a Timedelta. "
"Got '{got}'.".format(got=fill_value))
return fill_value
def astype(self, dtype, copy=True):
# We handle
# --> timedelta64[ns]
# --> timedelta64
# DatetimeLikeArrayMixin super call handles other cases
dtype = pandas_dtype(dtype)
if is_timedelta64_dtype(dtype) and not is_timedelta64_ns_dtype(dtype):
# by pandas convention, converting to non-nano timedelta64
# returns an int64-dtyped array with ints representing multiples
# of the desired timedelta unit. This is essentially division
if self._hasnans:
# avoid double-copying
result = self._data.astype(dtype, copy=False)
values = self._maybe_mask_results(result,
fill_value=None,
convert='float64')
return values
result = self._data.astype(dtype, copy=copy)
return result.astype('i8')
elif is_timedelta64_ns_dtype(dtype):
if copy:
return self.copy()
return self
return dtl.DatetimeLikeArrayMixin.astype(self, dtype, copy=copy)
# ----------------------------------------------------------------
# Rendering Methods
def _formatter(self, boxed=False):
from pandas.io.formats.format import _get_format_timedelta64
return _get_format_timedelta64(self, box=True)
def _format_native_types(self, na_rep='NaT', date_format=None):
from pandas.io.formats.format import _get_format_timedelta64
formatter = _get_format_timedelta64(self._data, na_rep)
return np.array([formatter(x) for x in self._data])
# ----------------------------------------------------------------
# Arithmetic Methods
_create_comparison_method = classmethod(_td_array_cmp)
def _add_offset(self, other):
assert not isinstance(other, Tick)
raise TypeError("cannot add the type {typ} to a {cls}"
.format(typ=type(other).__name__,
cls=type(self).__name__))
def _add_delta(self, delta):
"""
Add a timedelta-like, Tick, or TimedeltaIndex-like object
to self, yielding a new TimedeltaArray.
Parameters
----------
other : {timedelta, np.timedelta64, Tick,
TimedeltaIndex, ndarray[timedelta64]}
Returns
-------
result : TimedeltaArray
"""
new_values = super(TimedeltaArray, self)._add_delta(delta)
return type(self)._from_sequence(new_values, freq='infer')
def _add_datetime_arraylike(self, other):
"""
Add DatetimeArray/Index or ndarray[datetime64] to TimedeltaArray.
"""
if isinstance(other, np.ndarray):
# At this point we have already checked that dtype is datetime64
from pandas.core.arrays import DatetimeArray
other = DatetimeArray(other)
# defer to implementation in DatetimeArray
return other + self
def _add_datetimelike_scalar(self, other):
# adding a timedeltaindex to a datetimelike
from pandas.core.arrays import DatetimeArray
assert other is not NaT
other = Timestamp(other)
if other is NaT:
# In this case we specifically interpret NaT as a datetime, not
# the timedelta interpretation we would get by returning self + NaT
result = self.asi8.view('m8[ms]') + NaT.to_datetime64()
return DatetimeArray(result)
i8 = self.asi8
result = checked_add_with_arr(i8, other.value,
arr_mask=self._isnan)
result = self._maybe_mask_results(result)
dtype = DatetimeTZDtype(tz=other.tz) if other.tz else _NS_DTYPE
return DatetimeArray(result, dtype=dtype, freq=self.freq)
def _addsub_offset_array(self, other, op):
# Add or subtract Array-like of DateOffset objects
try:
# TimedeltaIndex can only operate with a subset of DateOffset
# subclasses. Incompatible classes will raise AttributeError,
# which we re-raise as TypeError
return super(TimedeltaArray, self)._addsub_offset_array(
other, op
)
except AttributeError:
raise TypeError("Cannot add/subtract non-tick DateOffset to {cls}"
.format(cls=type(self).__name__))
def __mul__(self, other):
other = lib.item_from_zerodim(other)
if isinstance(other, (ABCDataFrame, ABCSeries, ABCIndexClass)):
return NotImplemented
if is_scalar(other):
# numpy will accept float and int, raise TypeError for others
result = self._data * other
freq = None
if self.freq is not None and not isna(other):
freq = self.freq * other
return type(self)(result, freq=freq)
if not hasattr(other, "dtype"):
# list, tuple
other = np.array(other)
if len(other) != len(self) and not is_timedelta64_dtype(other):
# Exclude timedelta64 here so we correctly raise TypeError
# for that instead of ValueError
raise ValueError("Cannot multiply with unequal lengths")
if is_object_dtype(other):
# this multiplication will succeed only if all elements of other
# are int or float scalars, so we will end up with
# timedelta64[ns]-dtyped result
result = [self[n] * other[n] for n in range(len(self))]
result = np.array(result)
return type(self)(result)
# numpy will accept float or int dtype, raise TypeError for others
result = self._data * other
return type(self)(result)
__rmul__ = __mul__
def __truediv__(self, other):
# timedelta / X is well-defined for timedelta-like or numeric X
other = lib.item_from_zerodim(other)
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
if other is NaT:
# specifically timedelta64-NaT
result = np.empty(self.shape, dtype=np.float64)
result.fill(np.nan)
return result
# otherwise, dispatch to Timedelta implementation
return self._data / other
elif lib.is_scalar(other):
# assume it is numeric
result = self._data / other
freq = None
if self.freq is not None:
# Tick division is not implemented, so operate on Timedelta
freq = self.freq.delta / other
return type(self)(result, freq=freq)
if not hasattr(other, "dtype"):
# e.g. list, tuple
other = np.array(other)
if len(other) != len(self):
raise ValueError("Cannot divide vectors with unequal lengths")
elif is_timedelta64_dtype(other):
# let numpy handle it
return self._data / other
elif is_object_dtype(other):
# Note: we do not do type inference on the result, so either
# an object array or numeric-dtyped (if numpy does inference)
# will be returned. GH#23829
result = [self[n] / other[n] for n in range(len(self))]
result = np.array(result)
return result
else:
result = self._data / other
return type(self)(result)
def __rtruediv__(self, other):
# X / timedelta is defined only for timedelta-like X
other = lib.item_from_zerodim(other)
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
if other is NaT:
# specifically timedelta64-NaT
result = np.empty(self.shape, dtype=np.float64)
result.fill(np.nan)
return result
# otherwise, dispatch to Timedelta implementation
return other / self._data
elif lib.is_scalar(other):
raise TypeError("Cannot divide {typ} by {cls}"
.format(typ=type(other).__name__,
cls=type(self).__name__))
if not hasattr(other, "dtype"):
# e.g. list, tuple
other = np.array(other)
if len(other) != len(self):
raise ValueError("Cannot divide vectors with unequal lengths")
elif is_timedelta64_dtype(other):
# let numpy handle it
return other / self._data
elif is_object_dtype(other):
# Note: unlike in __truediv__, we do not _need_ to do type#
# inference on the result. It does not raise, a numeric array
# is returned. GH#23829
result = [other[n] / self[n] for n in range(len(self))]
return np.array(result)
else:
raise TypeError("Cannot divide {dtype} data by {cls}"
.format(dtype=other.dtype,
cls=type(self).__name__))
if compat.PY2:
__div__ = __truediv__
__rdiv__ = __rtruediv__
def __floordiv__(self, other):
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
other = lib.item_from_zerodim(other)
if is_scalar(other):
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
if other is NaT:
# treat this specifically as timedelta-NaT
result = np.empty(self.shape, dtype=np.float64)
result.fill(np.nan)
return result
# dispatch to Timedelta implementation
result = other.__rfloordiv__(self._data)
return result
# at this point we should only have numeric scalars; anything
# else will raise
result = self.asi8 // other
result[self._isnan] = iNaT
freq = None
if self.freq is not None:
# Note: freq gets division, not floor-division
freq = self.freq / other
return type(self)(result.view('m8[ns]'), freq=freq)
if not hasattr(other, "dtype"):
# list, tuple
other = np.array(other)
if len(other) != len(self):
raise ValueError("Cannot divide with unequal lengths")
elif is_timedelta64_dtype(other):
other = type(self)(other)
# numpy timedelta64 does not natively support floordiv, so operate
# on the i8 values
result = self.asi8 // other.asi8
mask = self._isnan | other._isnan
if mask.any():
result = result.astype(np.int64)
result[mask] = np.nan
return result
elif is_object_dtype(other):
result = [self[n] // other[n] for n in range(len(self))]
result = np.array(result)
if lib.infer_dtype(result, skipna=False) == 'timedelta':
result, _ = sequence_to_td64ns(result)
return type(self)(result)
return result
elif is_integer_dtype(other) or is_float_dtype(other):
result = self._data // other
return type(self)(result)
else:
dtype = getattr(other, "dtype", type(other).__name__)
raise TypeError("Cannot divide {typ} by {cls}"
.format(typ=dtype, cls=type(self).__name__))
def __rfloordiv__(self, other):
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
other = lib.item_from_zerodim(other)
if is_scalar(other):
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
if other is NaT:
# treat this specifically as timedelta-NaT
result = np.empty(self.shape, dtype=np.float64)
result.fill(np.nan)
return result
# dispatch to Timedelta implementation
result = other.__floordiv__(self._data)
return result
raise TypeError("Cannot divide {typ} by {cls}"
.format(typ=type(other).__name__,
cls=type(self).__name__))
if not hasattr(other, "dtype"):
# list, tuple
other = np.array(other)
if len(other) != len(self):
raise ValueError("Cannot divide with unequal lengths")
elif is_timedelta64_dtype(other):
other = type(self)(other)
# numpy timedelta64 does not natively support floordiv, so operate
# on the i8 values
result = other.asi8 // self.asi8
mask = self._isnan | other._isnan
if mask.any():
result = result.astype(np.int64)
result[mask] = np.nan
return result
elif is_object_dtype(other):
result = [other[n] // self[n] for n in range(len(self))]
result = np.array(result)
return result
else:
dtype = getattr(other, "dtype", type(other).__name__)
raise TypeError("Cannot divide {typ} by {cls}"
.format(typ=dtype, cls=type(self).__name__))
def __mod__(self, other):
# Note: This is a naive implementation, can likely be optimized
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
other = lib.item_from_zerodim(other)
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
return self - (self // other) * other
def __rmod__(self, other):
# Note: This is a naive implementation, can likely be optimized
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
other = lib.item_from_zerodim(other)
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
return other - (other // self) * self
def __divmod__(self, other):
# Note: This is a naive implementation, can likely be optimized
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
other = lib.item_from_zerodim(other)
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
res1 = self // other
res2 = self - res1 * other
return res1, res2
def __rdivmod__(self, other):
# Note: This is a naive implementation, can likely be optimized
if isinstance(other, (ABCSeries, ABCDataFrame, ABCIndexClass)):
return NotImplemented
other = lib.item_from_zerodim(other)
if isinstance(other, (timedelta, np.timedelta64, Tick)):
other = Timedelta(other)
res1 = other // self
res2 = other - res1 * self
return res1, res2
# Note: TimedeltaIndex overrides this in call to cls._add_numeric_methods
def __neg__(self):
if self.freq is not None:
return type(self)(-self._data, freq=-self.freq)
return type(self)(-self._data)
def __abs__(self):
# Note: freq is not preserved
return type(self)(np.abs(self._data))
# ----------------------------------------------------------------
# Conversion Methods - Vectorized analogues of Timedelta methods
def total_seconds(self):
"""
Return total duration of each element expressed in seconds.
This method is available directly on TimedeltaArray, TimedeltaIndex
and on Series containing timedelta values under the ``.dt`` namespace.
Returns
-------
seconds : [ndarray, Float64Index, Series]
When the calling object is a TimedeltaArray, the return type
is ndarray. When the calling object is a TimedeltaIndex,
the return type is a Float64Index. When the calling object
is a Series, the return type is Series of type `float64` whose
index is the same as the original.
See Also
--------
datetime.timedelta.total_seconds : Standard library version
of this method.
TimedeltaIndex.components : Return a DataFrame with components of
each Timedelta.
Examples
--------
**Series**
>>> s = pd.Series(pd.to_timedelta(np.arange(5), unit='d'))
>>> s
0 0 days
1 1 days
2 2 days
3 3 days
4 4 days
dtype: timedelta64[ns]
>>> s.dt.total_seconds()
0 0.0
1 86400.0
2 172800.0
3 259200.0
4 345600.0
dtype: float64
**TimedeltaIndex**
>>> idx = pd.to_timedelta(np.arange(5), unit='d')
>>> idx
TimedeltaIndex(['0 days', '1 days', '2 days', '3 days', '4 days'],
dtype='timedelta64[ns]', freq=None)
>>> idx.total_seconds()
Float64Index([0.0, 86400.0, 172800.0, 259200.00000000003, 345600.0],
dtype='float64')
"""
return self._maybe_mask_results(1e-9 * self.asi8, fill_value=None)
def to_pytimedelta(self):
"""
Return Timedelta Array/Index as object ndarray of datetime.timedelta
objects.
Returns
-------
datetimes : ndarray
"""
return tslibs.ints_to_pytimedelta(self.asi8)
days = _field_accessor("days", "days",
"Number of days for each element.")
seconds = _field_accessor("seconds", "seconds",
"Number of seconds (>= 0 and less than 1 day) "
"for each element.")
microseconds = _field_accessor("microseconds", "microseconds",
"Number of microseconds (>= 0 and less "
"than 1 second) for each element.")
nanoseconds = _field_accessor("nanoseconds", "nanoseconds",
"Number of nanoseconds (>= 0 and less "
"than 1 microsecond) for each element.")
@property
def components(self):
"""
Return a dataframe of the components (days, hours, minutes,
seconds, milliseconds, microseconds, nanoseconds) of the Timedeltas.
Returns
-------
a DataFrame
"""
from pandas import DataFrame
columns = ['days', 'hours', 'minutes', 'seconds',
'milliseconds', 'microseconds', 'nanoseconds']
hasnans = self._hasnans
if hasnans:
def f(x):
if isna(x):
return [np.nan] * len(columns)
return x.components
else:
def f(x):
return x.components
result = DataFrame([f(x) for x in self], columns=columns)
if not hasnans:
result = result.astype('int64')
return result
TimedeltaArray._add_comparison_ops()
# ---------------------------------------------------------------------
# Constructor Helpers
def sequence_to_td64ns(data, copy=False, unit="ns", errors="raise"):
"""
Parameters
----------
array : list-like
copy : bool, default False
unit : str, default "ns"
The timedelta unit to treat integers as multiples of.
errors : {"raise", "coerce", "ignore"}, default "raise"
How to handle elements that cannot be converted to timedelta64[ns].
See ``pandas.to_timedelta`` for details.
Returns
-------
converted : numpy.ndarray
The sequence converted to a numpy array with dtype ``timedelta64[ns]``.
inferred_freq : Tick or None
The inferred frequency of the sequence.
Raises
------
ValueError : Data cannot be converted to timedelta64[ns].
Notes
-----
Unlike `pandas.to_timedelta`, if setting ``errors=ignore`` will not cause
errors to be ignored; they are caught and subsequently ignored at a
higher level.
"""
inferred_freq = None
unit = parse_timedelta_unit(unit)
# Unwrap whatever we have into a np.ndarray
if not hasattr(data, 'dtype'):
# e.g. list, tuple
if np.ndim(data) == 0:
# i.e. generator
data = list(data)
data = np.array(data, copy=False)
elif isinstance(data, ABCSeries):
data = data._values
elif isinstance(data, (ABCTimedeltaIndex, TimedeltaArray)):
inferred_freq = data.freq
data = data._data
# Convert whatever we have into timedelta64[ns] dtype
if is_object_dtype(data.dtype) or is_string_dtype(data.dtype):
# no need to make a copy, need to convert if string-dtyped
data = objects_to_td64ns(data, unit=unit, errors=errors)
copy = False
elif is_integer_dtype(data.dtype):
# treat as multiples of the given unit
data, copy_made = ints_to_td64ns(data, unit=unit)
copy = copy and not copy_made
elif is_float_dtype(data.dtype):
# treat as multiples of the given unit. If after converting to nanos,
# there are fractional components left, these are truncated
# (i.e. NOT rounded)
mask = np.isnan(data)
coeff = np.timedelta64(1, unit) / np.timedelta64(1, 'ns')
data = (coeff * data).astype(np.int64).view('timedelta64[ns]')
data[mask] = iNaT
copy = False
elif is_timedelta64_dtype(data.dtype):
if data.dtype != _TD_DTYPE:
# non-nano unit
# TODO: watch out for overflows
data = data.astype(_TD_DTYPE)
copy = False
elif is_datetime64_dtype(data):
# GH#23539
warnings.warn("Passing datetime64-dtype data to TimedeltaIndex is "
"deprecated, will raise a TypeError in a future "
"version",
FutureWarning, stacklevel=4)
data = ensure_int64(data).view(_TD_DTYPE)
else:
raise TypeError("dtype {dtype} cannot be converted to timedelta64[ns]"
.format(dtype=data.dtype))
data = np.array(data, copy=copy)
assert data.dtype == 'm8[ns]', data
return data, inferred_freq
def ints_to_td64ns(data, unit="ns"):
"""
Convert an ndarray with integer-dtype to timedelta64[ns] dtype, treating
the integers as multiples of the given timedelta unit.
Parameters
----------
data : numpy.ndarray with integer-dtype
unit : str, default "ns"
The timedelta unit to treat integers as multiples of.
Returns
-------
numpy.ndarray : timedelta64[ns] array converted from data
bool : whether a copy was made
"""
copy_made = False
unit = unit if unit is not None else "ns"
if data.dtype != np.int64:
# converting to int64 makes a copy, so we can avoid
# re-copying later
data = data.astype(np.int64)
copy_made = True
if unit != "ns":
dtype_str = "timedelta64[{unit}]".format(unit=unit)
data = data.view(dtype_str)
# TODO: watch out for overflows when converting from lower-resolution
data = data.astype("timedelta64[ns]")
# the astype conversion makes a copy, so we can avoid re-copying later
copy_made = True
else:
data = data.view("timedelta64[ns]")
return data, copy_made
def objects_to_td64ns(data, unit="ns", errors="raise"):
"""
Convert a object-dtyped or string-dtyped array into an
timedelta64[ns]-dtyped array.
Parameters
----------
data : ndarray or Index
unit : str, default "ns"
The timedelta unit to treat integers as multiples of.
errors : {"raise", "coerce", "ignore"}, default "raise"
How to handle elements that cannot be converted to timedelta64[ns].
See ``pandas.to_timedelta`` for details.
Returns
-------
numpy.ndarray : timedelta64[ns] array converted from data
Raises
------
ValueError : Data cannot be converted to timedelta64[ns].
Notes
-----
Unlike `pandas.to_timedelta`, if setting `errors=ignore` will not cause
errors to be ignored; they are caught and subsequently ignored at a
higher level.
"""
# coerce Index to np.ndarray, converting string-dtype if necessary
values = np.array(data, dtype=np.object_, copy=False)
result = array_to_timedelta64(values,
unit=unit, errors=errors)
return result.view('timedelta64[ns]')
def _validate_td64_dtype(dtype):
dtype = pandas_dtype(dtype)
if is_dtype_equal(dtype, np.dtype("timedelta64")):
dtype = _TD_DTYPE
msg = textwrap.dedent("""\
Passing in 'timedelta' dtype with no precision is deprecated
and will raise in a future version. Please pass in
'timedelta64[ns]' instead.""")
warnings.warn(msg, FutureWarning, stacklevel=4)
if not is_dtype_equal(dtype, _TD_DTYPE):
raise ValueError(_BAD_DTYPE.format(dtype=dtype))
return dtype
def _generate_regular_range(start, end, periods, offset):
stride = offset.nanos
if periods is None:
b = Timedelta(start).value
e = Timedelta(end).value
e += stride - e % stride
elif start is not None:
b = Timedelta(start).value
e = b + periods * stride
elif end is not None:
e = Timedelta(end).value + stride
b = e - periods * stride
else:
raise ValueError("at least 'start' or 'end' should be specified "
"if a 'period' is given.")
data = np.arange(b, e, stride, dtype=np.int64)
return data
|
GuessWhoSamFoo/pandas
|
pandas/core/arrays/timedeltas.py
|
Python
|
bsd-3-clause
| 37,024
|
# -*- coding: utf-8 -*-
"""
flaskbb.forum.views
~~~~~~~~~~~~~~~~~~~~
This module handles the forum logic like creating and viewing
topics and posts.
:copyright: (c) 2014 by the FlaskBB Team.
:license: BSD, see LICENSE for more details.
"""
import datetime
from flask import (Blueprint, redirect, url_for, current_app,
request, flash)
from flask.ext.login import login_required, current_user
from flaskbb.extensions import db
from flaskbb.utils.settings import flaskbb_config
from flaskbb.utils.helpers import get_online_users, time_diff, render_template
from flaskbb.utils.permissions import (can_post_reply, can_post_topic,
can_delete_topic, can_delete_post,
can_edit_post, can_moderate)
from flaskbb.forum.models import (Category, Forum, Topic, Post, ForumsRead,
TopicsRead)
from flaskbb.forum.forms import (QuickreplyForm, ReplyForm, NewTopicForm,
ReportForm, UserSearchForm, SearchPageForm)
from flaskbb.user.models import User
forum = Blueprint("forum", __name__)
@forum.route("/")
def index():
categories = Category.get_all(user=current_user)
# Fetch a few stats about the forum
user_count = User.query.count()
topic_count = Topic.query.count()
post_count = Post.query.count()
newest_user = User.query.order_by(User.id.desc()).first()
# Check if we use redis or not
if not current_app.config["REDIS_ENABLED"]:
online_users = User.query.filter(User.lastseen >= time_diff()).count()
# Because we do not have server side sessions, we cannot check if there
# are online guests
online_guests = None
else:
online_users = len(get_online_users())
online_guests = len(get_online_users(guest=True))
return render_template("forum/index.html",
categories=categories,
user_count=user_count,
topic_count=topic_count,
post_count=post_count,
newest_user=newest_user,
online_users=online_users,
online_guests=online_guests)
@forum.route("/category/<int:category_id>")
@forum.route("/category/<int:category_id>-<slug>")
def view_category(category_id, slug=None):
category, forums = Category.\
get_forums(category_id=category_id, user=current_user)
return render_template("forum/category.html", forums=forums,
category=category)
@forum.route("/forum/<int:forum_id>")
@forum.route("/forum/<int:forum_id>-<slug>")
def view_forum(forum_id, slug=None):
page = request.args.get('page', 1, type=int)
forum, forumsread = Forum.get_forum(forum_id=forum_id, user=current_user)
if forum.external:
return redirect(forum.external)
topics = Forum.get_topics(forum_id=forum.id, user=current_user, page=page,
per_page=flaskbb_config["TOPICS_PER_PAGE"])
return render_template("forum/forum.html", forum=forum, topics=topics,
forumsread=forumsread,)
@forum.route("/topic/<int:topic_id>", methods=["POST", "GET"])
@forum.route("/topic/<int:topic_id>-<slug>", methods=["POST", "GET"])
def view_topic(topic_id, slug=None):
page = request.args.get('page', 1, type=int)
topic = Topic.query.filter_by(id=topic_id).first()
posts = Post.query.filter_by(topic_id=topic.id).\
order_by(Post.id.asc()).\
paginate(page, flaskbb_config['POSTS_PER_PAGE'], False)
# Count the topic views
topic.views += 1
# Update the topicsread status if the user hasn't read it
forumsread = None
if current_user.is_authenticated():
forumsread = ForumsRead.query.\
filter_by(user_id=current_user.id,
forum_id=topic.forum.id).first()
topic.update_read(current_user, topic.forum, forumsread)
topic.save()
form = None
if not topic.locked \
and not topic.forum.locked \
and can_post_reply(user=current_user,
forum=topic.forum):
form = QuickreplyForm()
if form.validate_on_submit():
post = form.save(current_user, topic)
return view_post(post.id)
return render_template("forum/topic.html", topic=topic, posts=posts,
last_seen=time_diff(), form=form)
@forum.route("/post/<int:post_id>")
def view_post(post_id):
post = Post.query.filter_by(id=post_id).first_or_404()
count = post.topic.post_count
page = count / flaskbb_config["POSTS_PER_PAGE"]
if count > flaskbb_config["POSTS_PER_PAGE"]:
page += 1
else:
page = 1
return redirect(post.topic.url + "?page=%d#pid%s" % (page, post.id))
@forum.route("/<int:forum_id>/topic/new", methods=["POST", "GET"])
@forum.route("/<int:forum_id>-<slug>/topic/new", methods=["POST", "GET"])
@login_required
def new_topic(forum_id, slug=None):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
if forum.locked:
flash("This forum is locked; you cannot submit new topics or posts.",
"danger")
return redirect(forum.url)
if not can_post_topic(user=current_user, forum=forum):
flash("You do not have the permissions to create a new topic.",
"danger")
return redirect(forum.url)
form = NewTopicForm()
if form.validate_on_submit():
if request.form['button'] == 'preview':
return render_template("forum/new_topic.html", forum=forum, form=form, preview=form.content.data)
else:
topic = form.save(current_user, forum)
# redirect to the new topic
return redirect(url_for('forum.view_topic', topic_id=topic.id))
return render_template("forum/new_topic.html", forum=forum, form=form)
@forum.route("/topic/<int:topic_id>/delete")
@forum.route("/topic/<int:topic_id>-<slug>/delete")
@login_required
def delete_topic(topic_id, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
if not can_delete_topic(user=current_user, forum=topic.forum,
post_user_id=topic.first_post.user_id):
flash("You do not have the permissions to delete the topic", "danger")
return redirect(topic.forum.url)
involved_users = User.query.filter(Post.topic_id == topic.id,
User.id == Post.user_id).all()
topic.delete(users=involved_users)
return redirect(url_for("forum.view_forum", forum_id=topic.forum_id))
@forum.route("/topic/<int:topic_id>/lock")
@forum.route("/topic/<int:topic_id>-<slug>/lock")
@login_required
def lock_topic(topic_id, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
# TODO: Bulk lock
if not can_moderate(user=current_user, forum=topic.forum):
flash("You do not have the permissions to lock this topic", "danger")
return redirect(topic.url)
topic.locked = True
topic.save()
return redirect(topic.url)
@forum.route("/topic/<int:topic_id>/unlock")
@forum.route("/topic/<int:topic_id>-<slug>/unlock")
@login_required
def unlock_topic(topic_id, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
# TODO: Bulk unlock
# Unlock is basically the same as lock
if not can_moderate(user=current_user, forum=topic.forum):
flash("Yo do not have the permissions to unlock this topic", "danger")
return redirect(topic.url)
topic.locked = False
topic.save()
return redirect(topic.url)
@forum.route("/topic/<int:topic_id>/move/<int:forum_id>")
@forum.route("/topic/<int:topic_id>-<topic_slug>/move/<int:forum_id>-<forum_slug>")
@login_required
def move_topic(topic_id, forum_id, topic_slug=None, forum_slug=None):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
topic = Topic.query.filter_by(id=topic_id).first_or_404()
# TODO: Bulk move
if not can_moderate(user=current_user, forum=topic.forum):
flash("Yo do not have the permissions to move this topic", "danger")
return redirect(forum.url)
if not topic.move(forum):
flash("Could not move the topic to forum %s" % forum.title, "danger")
return redirect(topic.url)
flash("Topic was moved to forum %s" % forum.title, "success")
return redirect(topic.url)
@forum.route("/topic/<int:old_id>/merge/<int:new_id>")
@forum.route("/topic/<int:old_id>-<old_slug>/merge/<int:new_id>-<new_slug>")
@login_required
def merge_topic(old_id, new_id, old_slug=None, new_slug=None):
old_topic = Topic.query.filter_by(id=old_id).first_or_404()
new_topic = Topic.query.filter_by(id=new_id).first_or_404()
# TODO: Bulk merge
if not can_moderate(user=current_user, forum=topic.forum):
flash("Yo do not have the permissions to merge this topic", "danger")
return redirect(old_topic.url)
if not old_topic.merge(new_topic):
flash("Could not merge the topic.", "danger")
return redirect(old_topic.url)
flash("Topic succesfully merged.", "success")
return redirect(new_topic.url)
@forum.route("/topic/<int:topic_id>/post/new", methods=["POST", "GET"])
@forum.route("/topic/<int:topic_id>-<slug>/post/new", methods=["POST", "GET"])
@login_required
def new_post(topic_id, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
if topic.forum.locked:
flash("This forum is locked; you cannot submit new topics or posts.",
"danger")
return redirect(topic.forum.url)
if topic.locked:
flash("The topic is locked.", "danger")
return redirect(topic.forum.url)
if not can_post_reply(user=current_user, forum=topic.forum):
flash("You do not have the permissions to delete the topic", "danger")
return redirect(topic.forum.url)
form = ReplyForm()
if form.validate_on_submit():
if request.form['button'] == 'preview':
return render_template("forum/new_post.html", topic=topic, form=form, preview=form.content.data)
else:
post = form.save(current_user, topic)
return view_post(post.id)
return render_template("forum/new_post.html", topic=topic, form=form)
@forum.route("/topic/<int:topic_id>/post/<int:post_id>/reply", methods=["POST", "GET"])
@login_required
def reply_post(topic_id, post_id):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
post = Post.query.filter_by(id=post_id).first_or_404()
if post.topic.forum.locked:
flash("This forum is locked; you cannot submit new topics or posts.",
"danger")
return redirect(post.topic.forum.url)
if post.topic.locked:
flash("The topic is locked.", "danger")
return redirect(post.topic.forum.url)
if not can_post_reply(user=current_user, forum=topic.forum):
flash("You do not have the permissions to post in this topic", "danger")
return redirect(topic.forum.url)
form = ReplyForm()
if form.validate_on_submit():
if request.form['button'] == 'preview':
return render_template("forum/new_post.html", topic=topic, form=form, preview=form.content.data)
else:
form.save(current_user, topic)
return redirect(post.topic.url)
else:
form.content.data = '[quote]{}[/quote]'.format(post.content)
return render_template("forum/new_post.html", topic=post.topic, form=form)
@forum.route("/post/<int:post_id>/edit", methods=["POST", "GET"])
@login_required
def edit_post(post_id):
post = Post.query.filter_by(id=post_id).first_or_404()
if post.topic.forum.locked:
flash("This forum is locked; you cannot submit new topics or posts.",
"danger")
return redirect(post.topic.forum.url)
if post.topic.locked:
flash("The topic is locked.", "danger")
return redirect(post.topic.forum.url)
if not can_edit_post(user=current_user, forum=post.topic.forum,
post_user_id=post.user_id):
flash("You do not have the permissions to edit this post", "danger")
return redirect(post.topic.url)
form = ReplyForm()
if form.validate_on_submit():
if request.form['button'] == 'preview':
return render_template("forum/new_post.html", topic=post.topic, form=form, preview=form.content.data)
else:
form.populate_obj(post)
post.date_modified = datetime.datetime.utcnow()
post.modified_by = current_user.username
post.save()
return redirect(post.topic.url)
else:
form.content.data = post.content
return render_template("forum/new_post.html", topic=post.topic, form=form)
@forum.route("/post/<int:post_id>/delete")
@login_required
def delete_post(post_id, slug=None):
post = Post.query.filter_by(id=post_id).first_or_404()
# TODO: Bulk delete
if not can_delete_post(user=current_user, forum=post.topic.forum,
post_user_id=post.user_id):
flash("You do not have the permissions to edit this post", "danger")
return redirect(post.topic.url)
post.delete()
# If the post was the first post in the topic, redirect to the forums
if post.first_post:
return redirect(post.topic.forum.url)
return redirect(post.topic.url)
@forum.route("/post/<int:post_id>/report", methods=["GET", "POST"])
@login_required
def report_post(post_id):
post = Post.query.filter_by(id=post_id).first_or_404()
form = ReportForm()
if form.validate_on_submit():
form.save(current_user, post)
flash("Thanks for reporting!", "success")
return render_template("forum/report_post.html", form=form)
@forum.route("/markread")
@forum.route("/<int:forum_id>/markread")
@forum.route("/<int:forum_id>-<slug>/markread")
@login_required
def markread(forum_id=None, slug=None):
# Mark a single forum as read
if forum_id:
forum = Forum.query.filter_by(id=forum_id).first_or_404()
forumsread = ForumsRead.query.filter_by(user_id=current_user.id,
forum_id=forum.id).first()
TopicsRead.query.filter_by(user_id=current_user.id,
forum_id=forum.id).delete()
if not forumsread:
forumsread = ForumsRead()
forumsread.user_id = current_user.id
forumsread.forum_id = forum.id
forumsread.last_read = datetime.datetime.utcnow()
forumsread.cleared = datetime.datetime.utcnow()
db.session.add(forumsread)
db.session.commit()
return redirect(forum.url)
# Mark all forums as read
ForumsRead.query.filter_by(user_id=current_user.id).delete()
TopicsRead.query.filter_by(user_id=current_user.id).delete()
forums = Forum.query.all()
forumsread_list = []
for forum in forums:
forumsread = ForumsRead()
forumsread.user_id = current_user.id
forumsread.forum_id = forum.id
forumsread.last_read = datetime.datetime.utcnow()
forumsread.cleared = datetime.datetime.utcnow()
forumsread_list.append(forumsread)
db.session.add_all(forumsread_list)
db.session.commit()
return redirect(url_for("forum.index"))
@forum.route("/who_is_online")
def who_is_online():
if current_app.config['REDIS_ENABLED']:
online_users = get_online_users()
else:
online_users = User.query.filter(User.lastseen >= time_diff()).all()
return render_template("forum/online_users.html",
online_users=online_users)
@forum.route("/memberlist", methods=['GET', 'POST'])
def memberlist():
page = request.args.get('page', 1, type=int)
search_form = UserSearchForm()
if search_form.validate():
users = search_form.get_results().\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("forum/memberlist.html", users=users,
search_form=search_form)
else:
users = User.query.\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("forum/memberlist.html", users=users,
search_form=search_form)
@forum.route("/topictracker")
@login_required
def topictracker():
page = request.args.get("page", 1, type=int)
topics = current_user.tracked_topics.\
outerjoin(TopicsRead,
db.and_(TopicsRead.topic_id == Topic.id,
TopicsRead.user_id == current_user.id)).\
add_entity(TopicsRead).\
order_by(Post.id.desc()).\
paginate(page, flaskbb_config['TOPICS_PER_PAGE'], True)
return render_template("forum/topictracker.html", topics=topics)
@forum.route("/topictracker/<int:topic_id>/add")
@forum.route("/topictracker/<int:topic_id>-<slug>/add")
@login_required
def track_topic(topic_id, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
current_user.track_topic(topic)
current_user.save()
return redirect(topic.url)
@forum.route("/topictracker/<int:topic_id>/delete")
@forum.route("/topictracker/<int:topic_id>-<slug>/delete")
@login_required
def untrack_topic(topic_id, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
current_user.untrack_topic(topic)
current_user.save()
return redirect(topic.url)
@forum.route("/search", methods=['GET', 'POST'])
def search():
form = SearchPageForm()
if form.validate_on_submit():
result = form.get_results()
return render_template('forum/search_result.html', form=form,
result=result)
return render_template('forum/search_form.html', form=form)
|
joyhuang-web/flaskbb
|
flaskbb/forum/views.py
|
Python
|
bsd-3-clause
| 18,011
|
#!/usr/bin/env python
"""
Copyright 2018-present Open Networking Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from zipfile import ZipFile, ZipInfo
# Utility to write out the ONOS OAR file bundle containing the artifacts
# required to install and activate an ONOS application.
def generateOar(output, files=[]):
with ZipFile(output, 'w') as zip:
for file, mvnCoords in files:
mvnCoords = mvnCoords.replace("mvn:", "")
filename = file.split('/')[-1]
if mvnCoords == 'NONE':
if 'app-xml.xml' in filename:
dest = 'app.xml'
else:
dest = filename
else:
parts = mvnCoords.split(':')
if len(parts) > 3:
parts.insert(2, parts.pop()) # move version to the 3rd position
groupId, artifactId, version = parts[0:3]
groupId = groupId.replace('.', '/')
extension = filename.split('.')[-1]
if extension == 'jar':
filename = '%s-%s.jar' % ( artifactId, version )
elif 'feature-xml' in filename:
filename = '%s-%s-features.xml' % ( artifactId, version )
dest = 'm2/%s/%s/%s/%s' % ( groupId, artifactId, version, filename )
f = open(file, 'rb')
zip.writestr(ZipInfo(dest, date_time=(1980, 1, 1, 0, 0, 0)), f.read())
f.close()
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print('USAGE')
sys.exit(1)
output = sys.argv[1]
args = sys.argv[2:]
if len(args) % 2 != 0:
print('There must be an even number of args: file mvn_coords')
sys.exit(2)
files = zip(*[iter(args)]*2)
generateOar(output, files)
|
opennetworkinglab/onos
|
tools/build/bazel/onos_app_bundler.py
|
Python
|
apache-2.0
| 2,313
|
import numpy as np
import pickle
import os
no_samples = 100
dim_samples = 5
learning_rate = 0.01
no_steps = 10
X = np.random.random((no_samples, dim_samples))
y = np.random.random((no_samples,))
w = np.random.random((dim_samples,))
for step in range(no_steps):
yhat = X.dot(w)
err = (yhat - y)
dw = err.dot(X)
w -= learning_rate * dw
loss = 0.5 * err.dot(err)
print("step = {}, loss = {}, L2 norm = {}".format(step, loss, w.dot(w)))
# with open(os.path.expanduser('~/weights/lr_w_{}_{}.pck'
# .format(step, loss)), 'w') as f:
# f.write(pickle.dumps(w))
from studio import fs_tracker
with open(os.path.join(fs_tracker.get_artifact('weights'),
'lr_w_{}_{}.pck'.format(step, loss)),
'w') as f:
f.write(pickle.dumps(w))
|
studioml/studio
|
examples/general/train_linreg.py
|
Python
|
apache-2.0
| 847
|
# -*- coding: utf-8 -*-
"""
oa
~~~~~
An oa interface written in Python.
:copyright: (c) 2015 by CC.
:license: no commercial use, all rights reserved.
"""
__version__ = 'prototype' #the First Blade
# the actions
import actions
# the constances associated with actions
import const
import models
|
lastcc/OAHelper
|
oa/__init__.py
|
Python
|
mit
| 341
|
import re
from collections import namedtuple
from typing import Optional
from esteid import settings
from esteid.constants import Languages
from esteid.exceptions import InvalidIdCode, InvalidParameter
from esteid.signing.types import InterimSessionData
from esteid.types import PredictableDict
from esteid.validators import id_code_ee_is_valid
PHONE_NUMBER_REGEXP = settings.MOBILE_ID_PHONE_NUMBER_REGEXP
AuthenticateResult = namedtuple(
"AuthenticateResult",
[
"session_id",
"hash_type",
"hash_value",
"verification_code",
"hash_value_b64",
],
)
AuthenticateStatusResult = namedtuple(
"AuthenticateStatusResult",
[
"certificate", # DER-encoded certificate
"certificate_b64", # Base64-encoded DER-encoded certificate
],
)
SignResult = namedtuple(
"SignResult",
[
"session_id",
"digest",
"verification_code",
],
)
# Note: MobileID doesn't return a certificate for SignStatus. It is set from a previous call to `/certificate`
SignStatusResult = namedtuple(
"SignStatusResult",
[
"signature",
"signature_algorithm",
"certificate",
],
)
class UserInput(PredictableDict):
phone_number: str
id_code: str
language: Optional[str]
def is_valid(self, raise_exception=True):
result = super().is_valid(raise_exception=raise_exception)
if result:
if not self.phone_number or PHONE_NUMBER_REGEXP and not re.match(PHONE_NUMBER_REGEXP, self.phone_number):
if not raise_exception:
return False
raise InvalidParameter(param="phone_number")
if not id_code_ee_is_valid(self.id_code):
if not raise_exception:
return False
raise InvalidIdCode
if not (self.get("language") and self.language in Languages.ALL):
self.language = settings.MOBILE_ID_DEFAULT_LANGUAGE
return result
class MobileIdSessionData(InterimSessionData):
session_id: str
|
thorgate/django-esteid
|
esteid/mobileid/types.py
|
Python
|
bsd-3-clause
| 2,083
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
"""
Unit tests for ``django-envelope`` views.
"""
import unittest
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
try:
import honeypot
except ImportError:
honeypot = None
from envelope import signals
class ContactViewTestCase(TestCase):
"""
Unit tests for contact form view.
"""
def setUp(self):
self.url = reverse('envelope-contact')
self.customized_url = reverse('customized_class_contact')
self.subclassed_url = reverse('subclassed_class_contact')
self.honeypot = getattr(settings, 'HONEYPOT_FIELD_NAME', 'email2')
self.form_data = {
'sender': 'zbyszek',
'email': 'test@example.com',
'subject': 'A subject',
'message': 'Hello there!',
self.honeypot: '',
}
def test_response_data(self):
"""
A GET request displays the contact form.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "envelope/contact.html")
form = response.context['form']
self.assertFalse(form.is_bound)
def test_prefilled_form(self):
"""
When an authenticated user hits the form view, his username, full name
and email address are automatically filled in.
"""
user = User.objects.create_user('test', 'test@example.org', 'password')
user.first_name = 'John'
user.last_name = 'Doe'
user.save()
logged_in = self.client.login(username='test', password='password')
self.assertTrue(logged_in)
response = self.client.get(self.url)
self.assertContains(response, 'value="test (John Doe)"')
self.assertContains(response, 'value="test@example.org"')
self.client.logout()
response = self.client.get(self.url)
self.assertNotContains(response, 'value="test (John Doe)"')
self.assertNotContains(response, 'value="test@example.org"')
def test_prefilled_form_no_full_name(self):
"""
In case the user is authenticated, but doesn't have his first and last
name set (depends on the registration process), only his username is
prefilled in the "From" field.
"""
User.objects.create_user('test', 'test@example.org', 'password')
logged_in = self.client.login(username='test', password='password')
self.assertTrue(logged_in)
response = self.client.get(self.url)
self.assertContains(response, 'value="test"')
@unittest.skipIf(honeypot is None, "django-honeypot is not installed")
def test_honeypot(self):
"""
If the honeypot field is not empty, keep the spammer off the page.
"""
self.form_data.update({self.honeypot: 'some value'})
response = self.client.post(self.url, self.form_data)
self.assertEqual(response.status_code, 400)
self.form_data.update({self.honeypot: ''})
response = self.client.post(self.url, self.form_data, follow=True)
self.assertEqual(response.status_code, 200)
def test_form_invalid(self):
"""
If the POST data is incorrect, the form is invalid.
"""
self.form_data.update({'sender': ''})
response = self.client.post(self.url, self.form_data)
self.assertEqual(response.status_code, 200)
def test_form_successful(self):
"""
If the data is correct, a message is sent and the user is redirected.
"""
response = self.client.post(self.url, self.form_data, follow=True)
self.assertRedirects(response, self.url)
self.assertEqual(len(response.redirect_chain), 1)
def test_signal_before_send(self):
"""
A ``before_send`` signal is emitted before sending the message.
"""
# ugly trick to access the variable from inner scope
params = {}
def handle_before_send(sender, request, form, **kwargs):
params['form'] = form
signals.before_send.connect(handle_before_send)
self.client.post(self.url, self.form_data, follow=True)
self.assertEqual(params['form'].cleaned_data['email'], self.form_data['email'])
def test_signal_after_send(self):
"""
An ``after_send`` signal is sent after succesfully sending the message.
"""
params = {}
def handle_after_send(sender, message, form, **kwargs):
params['message'] = message
signals.after_send.connect(handle_after_send)
self.client.post(self.url, self.form_data, follow=True)
self.assertIn(self.form_data['subject'], params['message'].subject)
def test_custom_template(self):
"""
You can change the default template used to render the form.
"""
response = self.client.get(self.customized_url)
self.assertTemplateUsed(response, "customized_contact.html")
def test_custom_success_url(self):
"""
The view redirects to a custom success_url when the form is valid.
"""
response = self.client.post(self.customized_url, self.form_data)
self.assertRedirects(response, self.customized_url)
def test_issue_18(self):
"""
ContactView subclasses should also trigger spam filtering.
See: https://github.com/zsiciarz/django-envelope/issues/18
"""
self.form_data.update({self.honeypot: 'some value'})
response = self.client.post(self.subclassed_url, self.form_data, follow=True)
self.assertEqual(response.status_code, 400)
|
r4ts0n/django-envelope
|
tests/test_views.py
|
Python
|
mit
| 5,807
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'InstanceApplication.cookie'
db.add_column('apply_instanceapplication', 'cookie', self.gf('django.db.models.fields.CharField')(default='xXtUWTZNa9', max_length=255), keep_default=False)
def backwards(self, orm):
# Deleting field 'InstanceApplication.cookie'
db.delete_column('apply_instanceapplication', 'cookie')
models = {
'apply.instanceapplication': {
'Meta': {'object_name': 'InstanceApplication'},
'admin_contact_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'admin_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'admin_contact_phone': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'applicant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'backend_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'cluster': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ganeti.Cluster']", 'null': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'cookie': ('django.db.models.fields.CharField', [], {'default': "'p2HuLKFKv5'", 'max_length': '255'}),
'disk_size': ('django.db.models.fields.IntegerField', [], {}),
'filed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'hosts_mail_server': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'memory': ('django.db.models.fields.IntegerField', [], {}),
'operating_system': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['apply.Organization']"}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'vcpus': ('django.db.models.fields.IntegerField', [], {})
},
'apply.organization': {
'Meta': {'object_name': 'Organization'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'apply.sshpublickey': {
'Meta': {'object_name': 'SshPublicKey'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fingerprint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.TextField', [], {}),
'key_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ganeti.cluster': {
'Meta': {'object_name': 'Cluster'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'fast_create': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'port': ('django.db.models.fields.PositiveIntegerField', [], {'default': '5080'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['apply']
|
irregulator/ganetimgr
|
apply/migrations/0004_add_application_cookie.py
|
Python
|
gpl-3.0
| 8,199
|
from unittest import TestCase
from mock import MagicMock
from random import choice
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from brain.models.sqlobjects import Probe
from irma.common.base.exceptions import IrmaDatabaseError, \
IrmaDatabaseResultNotFound
class TestModelsProbe(TestCase):
def setUp(self):
self.name = "name"
self.display_name = "display_name"
self.category = "category"
self.mimetype_regexp = "mimetype_regexp"
self.online = choice([True, False])
self.session = MagicMock()
def test001___init__(self):
probe = Probe(self.name, self.display_name, self.category,
self.mimetype_regexp, self.online)
self.assertEqual(probe.name, self.name)
self.assertEqual(probe.display_name, self.display_name)
self.assertEqual(probe.category, self.category)
self.assertEqual(probe.mimetype_regexp, self.mimetype_regexp)
self.assertEqual(probe.online, self.online)
def test002_get_by_name(self):
Probe.get_by_name("whatever", self.session)
self.session.query.assert_called_once_with(Probe)
m_filter = self.session.query().filter
m_filter.assert_called_once()
m_filter().one.assert_called_once()
def test003_get_by_name_not_found(self):
self.session.query.side_effect = NoResultFound
with self.assertRaises(IrmaDatabaseResultNotFound):
Probe.get_by_name("whatever", self.session)
def test004_get_by_name_multiple_found(self):
self.session.query.side_effect = MultipleResultsFound
with self.assertRaises(IrmaDatabaseError):
Probe.get_by_name("whatever", self.session)
def test005_all(self):
Probe.all(self.session)
self.session.query.assert_called_once_with(Probe)
self.session.query().all.assert_called_once()
|
quarkslab/irma
|
brain/tests/models/test_sqlobjects_Probe.py
|
Python
|
apache-2.0
| 1,905
|
'''
Integration test for testing power off mini hosts.
#1.operations & power off random hosts
#2.start hosts
#3.duplicated operation
@author: zhaohao.chen
'''
import apibinding.inventory as inventory
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.zstack_test.zstack_test_vm as test_vm_header
import zstackwoodpecker.zstack_test.zstack_test_volume as test_volume_header
import zstackwoodpecker.operations.vm_operations as vm_ops
import zstackwoodpecker.operations.volume_operations as vol_ops
import zstackwoodpecker.operations.host_operations as host_ops
import zstackwoodpecker.operations.scenario_operations as sce_ops
import time
import os
import random
import threading
import hashlib
import random
MN_IP = res_ops.query_resource(res_ops.MANAGEMENT_NODE)[0].hostName
admin_password = hashlib.sha512('password').hexdigest()
zstack_management_ip = os.environ.get('zstackManagementIp')
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def recover_hosts(host_uuids, host_ips, wait_time):
for ip in host_ips:
cond = res_ops.gen_query_conditions('vmNics.ip', '=', ip)
vm = sce_ops.query_resource(zstack_management_ip, res_ops.VM_INSTANCE, cond).inventories[0]
if vm.state != 'Stopped':
test_util.test_fail("Fail to power off host:{}".format(vm.uuid))
sce_ops.start_vm(zstack_management_ip, vm.uuid)
time.sleep(wait_time)#wait MN
for uuid in host_uuids:
host_ops.reconnect_host(uuid)
def operations_shutdown(shutdown_thread, host_uuids, host_ips, wait_time, operation_thread=None):
if operation_thread:
operation_thread.start()
shutdown_thread.start()
shutdown_thread.join()
time.sleep(180)
recover_hosts(host_uuids, host_ips, wait_time)
def test():
global test_obj_dict
wait_time = 120
round = 2
test_util.test_logger("@@:mnip:{}".format(zstack_management_ip))
cond = res_ops.gen_query_conditions('managementIp', '=', MN_IP)
MN_HOST = res_ops.query_resource(res_ops.HOST, cond)[0]
cluster_list = res_ops.get_resource(res_ops.CLUSTER)
vm = test_stub.create_vm()
test_obj_dict.add_vm(vm)
for i in range(round):
host_uuids = []
host_ips = []
mn_flag = None # if candidate hosts including MN node
#operations & power off random hosts
test_util.test_logger("round {}".format(i))
cluster_uuid = random.choice(cluster_list).uuid
cond = res_ops.gen_query_conditions('cluster.uuid', '=', cluster_uuid)
cluster_hosts = res_ops.query_resource(res_ops.HOST, cond)
for host in cluster_hosts:
if host.uuid == MN_HOST.uuid:
mn_flag = 1
wait_time = 900 #wait mn up
host_uuids.append(host.uuid)
host_ips.append(host.managementIp)
resize_root_vol_thread = threading.Thread(target=vol_ops.resize_volume, args=(vm.get_vm().rootVolumeUuid, vm.get_vm().allVolumes[0].size + 1073741824))
power_off_thread = threading.Thread(target=host_ops.poweroff_host, args=(host_uuids, admin_password, mn_flag))
operations_shutdown(power_off_thread, host_uuids, host_ips, wait_time, resize_root_vol_thread)
test_util.test_pass("pass")
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
def env_recover():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
|
zstackio/zstack-woodpecker
|
integrationtest/vm/mini/poweroff/test_poweroff_host_resize_root_volume.py
|
Python
|
apache-2.0
| 3,609
|
from theia.watcher import (FileSource,
DirectoryEventHandler,
SourcesDaemon)
import tempfile
from unittest import mock
from watchdog.observers import Observer
from theia.comm import Client
import os
def test_file_source_modified():
mock_callback = mock.MagicMock()
file_path = None
with tempfile.NamedTemporaryFile() as tmpfile:
fs = FileSource(file_path=tmpfile.name, callback=mock_callback, tags=['a', 'b'])
file_path = tmpfile.name
tmpfile.write('test content'.encode('utf-8'))
tmpfile.flush()
fs.modified()
assert fs.position > 0
assert mock_callback.called_once_with(b'test content', file_path, ['a','b'])
def test_file_source_moved():
with tempfile.NamedTemporaryFile() as tmpfile:
fs = FileSource(file_path=tmpfile.name, callback=None)
assert fs.path == tmpfile.name
assert fs.position == 0
# change the positon
tmpfile.write('test content'.encode('utf-8'))
tmpfile.flush()
fs.position = len('test content'.encode('utf-8'))
fs.moved('/another/location')
assert fs.path == '/another/location'
assert fs.position == len('test content'.encode('utf-8')) # must not reset the position
def test_file_source_created():
fs = FileSource(file_path='/fictitious', callback=None)
fs.created()
assert fs.position == 0
class _FakeEvent:
def __init__(self, src=None, dest=None):
self.src_path = src
self.dest_path = dest
def test_directory_event_handler_on_moved():
mock_handler = mock.MagicMock()
dir_handler = DirectoryEventHandler({
'moved': mock_handler
})
dir_handler.on_moved(_FakeEvent(src='a',dest='b'))
assert mock_handler.called_once_with('a', 'b')
def test_directory_event_handler_on_created():
mock_handler = mock.MagicMock()
dir_handler = DirectoryEventHandler({
'created': mock_handler
})
dir_handler.on_moved(_FakeEvent(src='a'))
assert mock_handler.called_once_with('a')
def test_directory_event_handler_on_deleted():
mock_handler = mock.MagicMock()
dir_handler = DirectoryEventHandler({
'deleted': mock_handler
})
dir_handler.on_moved(_FakeEvent(src='a'))
assert mock_handler.called_once_with('a')
def test_directory_event_handler_on_modified():
mock_handler = mock.MagicMock()
dir_handler = DirectoryEventHandler({
'modified': mock_handler
})
dir_handler.on_moved(_FakeEvent(src='a'))
assert mock_handler.called_once_with('a')
@mock.patch.object(Observer, 'start')
@mock.patch.object(Observer, 'schedule')
def test_sources_daemon_add_source(m_schedule, m_start):
with tempfile.TemporaryDirectory() as tmpdir:
def fake_schedule(dir_handler, pdir, recursive):
assert isinstance(dir_handler, DirectoryEventHandler)
assert pdir == tmpdir
assert recursive is False
m_schedule.side_effect = fake_schedule
sd = SourcesDaemon(observer=Observer(), client=None, tags=['a','b'])
assert m_start.call_count == 1
sd.add_source(fpath=os.path.join(tmpdir, 'test_source'), tags=['c'])
assert m_schedule.call_count == 1
assert len(sd.sources) == 1
assert len(sd.sources.get(tmpdir, {})) == 1
assert sd.sources[tmpdir].get('test_source') is not None
assert sd.sources[tmpdir].get('test_source').tags == ['a', 'b', 'c']
@mock.patch.object(Observer, 'start')
@mock.patch.object(Observer, 'schedule')
def test_sources_daemon_add_source_then_remove_source(m_schedule, m_start):
with tempfile.TemporaryDirectory() as tmpdir:
def fake_schedule(dir_handler, pdir, recursive):
assert isinstance(dir_handler, DirectoryEventHandler)
assert pdir == tmpdir
assert recursive is False
m_schedule.side_effect = fake_schedule
sd = SourcesDaemon(observer=Observer(), client=None, tags=['a','b'])
assert m_start.call_count == 1
sd.add_source(fpath=os.path.join(tmpdir, 'test_source'), tags=['c'])
assert m_schedule.call_count == 1
assert len(sd.sources) == 1
assert len(sd.sources.get(tmpdir, {})) == 1
assert sd.sources[tmpdir].get('test_source') is not None
assert sd.sources[tmpdir].get('test_source').tags == ['a', 'b', 'c']
sd.remove_source(fpath=os.path.join(tmpdir, 'test_source'))
assert len(sd.sources) == 0 # remove the directory as well
@mock.patch.object(Observer, 'start')
@mock.patch.object(Observer, 'schedule')
@mock.patch.object(Client, 'send_event')
def test_sources_daemon_modified_file(m_send_event, m_schedule, m_start):
with tempfile.TemporaryDirectory() as tmpdir:
state = {}
def fake_schedule(dir_handler, pdir, recursive):
assert isinstance(dir_handler, DirectoryEventHandler)
assert pdir == tmpdir
assert recursive is False
state[pdir] = dir_handler
m_schedule.side_effect = fake_schedule
def fake_send(event):
assert event.content is not None
assert event.content == state['content'].decode('utf-8')
assert event.tags == state['tags']
m_send_event.side_effect = fake_send
client = Client(loop=None, host=None, port=None) # just create a reference, don't connect
sd = SourcesDaemon(observer=Observer(), client=client, tags=['a','b'])
assert m_start.call_count == 1
with open(os.path.join(tmpdir, 'test_source'), 'w+b') as test_source:
sd.add_source(fpath=os.path.join(tmpdir, 'test_source'), tags=['c'])
assert m_schedule.call_count == 1
assert len(sd.sources) == 1
assert len(sd.sources.get(tmpdir, {})) == 1
assert sd.sources[tmpdir].get('test_source') is not None
assert sd.sources[tmpdir].get('test_source').tags == ['a', 'b', 'c']
content = 'test content'.encode('utf-8')
state['content'] = content
state['tags'] = ['a', 'b', 'c']
test_source.write(content)
test_source.flush()
# notify the daemon by calling the handler directly
fh = state[tmpdir]
fh.on_modified(_FakeEvent(src=os.path.join(tmpdir, 'test_source')))
assert m_send_event.call_count == 1
content = 'another change'.encode('utf-8')
state['content'] = content
test_source.write(content)
test_source.flush()
fh.on_modified(_FakeEvent(src=os.path.join(tmpdir, 'test_source')))
assert m_send_event.call_count == 2
@mock.patch.object(Observer, 'start')
@mock.patch.object(Observer, 'schedule')
@mock.patch.object(FileSource, 'moved')
def test_sources_daemon_moved(m_moved, m_schedule, m_start):
state = {}
with tempfile.TemporaryDirectory() as tmpdir:
def fake_schedule(dir_handler, pdir, recursive):
assert isinstance(dir_handler, DirectoryEventHandler)
assert pdir == tmpdir
assert recursive is False
state[pdir] = dir_handler
m_schedule.side_effect = fake_schedule
sd = SourcesDaemon(observer=Observer(), client=None, tags=['a','b'])
assert m_start.call_count == 1
sd.add_source(fpath=os.path.join(tmpdir, 'test_source'), tags=['c'])
assert m_schedule.call_count == 1
assert len(sd.sources) == 1
assert len(sd.sources.get(tmpdir, {})) == 1
assert sd.sources[tmpdir].get('test_source') is not None
assert sd.sources[tmpdir].get('test_source').tags == ['a', 'b', 'c']
fh = state[tmpdir]
with tempfile.TemporaryDirectory() as otherdir:
fh.on_moved(_FakeEvent(src=os.path.join(tmpdir, 'test_source'), dest=os.path.join(otherdir, 'source_moved')))
assert m_moved.called_once_with(os.path.join(otherdir, 'source_moved'))
@mock.patch.object(Observer, 'start')
@mock.patch.object(Observer, 'schedule')
@mock.patch.object(FileSource, 'created')
def test_sources_daemon_created(m_created, m_schedule, m_start):
state = {}
with tempfile.TemporaryDirectory() as tmpdir:
def fake_schedule(dir_handler, pdir, recursive):
assert isinstance(dir_handler, DirectoryEventHandler)
assert pdir == tmpdir
assert recursive is False
state[pdir] = dir_handler
m_schedule.side_effect = fake_schedule
sd = SourcesDaemon(observer=Observer(), client=None, tags=['a','b'])
assert m_start.call_count == 1
sd.add_source(fpath=os.path.join(tmpdir, 'test_source'), tags=['c'])
assert m_schedule.call_count == 1
assert len(sd.sources) == 1
assert len(sd.sources.get(tmpdir, {})) == 1
assert sd.sources[tmpdir].get('test_source') is not None
assert sd.sources[tmpdir].get('test_source').tags == ['a', 'b', 'c']
fh = state[tmpdir]
fh.on_created(_FakeEvent(src=os.path.join(tmpdir, 'test_source')))
assert m_created.called_once_with(os.path.join(tmpdir, 'test_source'))
@mock.patch.object(Observer, 'start')
@mock.patch.object(Observer, 'schedule')
@mock.patch.object(FileSource, 'removed')
def test_sources_daemon_deleted(m_removed, m_schedule, m_start):
state = {}
with tempfile.TemporaryDirectory() as tmpdir:
def fake_schedule(dir_handler, pdir, recursive):
assert isinstance(dir_handler, DirectoryEventHandler)
assert pdir == tmpdir
assert recursive is False
state[pdir] = dir_handler
m_schedule.side_effect = fake_schedule
sd = SourcesDaemon(observer=Observer(), client=None, tags=['a','b'])
assert m_start.call_count == 1
sd.add_source(fpath=os.path.join(tmpdir, 'test_source'), tags=['c'])
assert m_schedule.call_count == 1
assert len(sd.sources) == 1
assert len(sd.sources.get(tmpdir, {})) == 1
assert sd.sources[tmpdir].get('test_source') is not None
assert sd.sources[tmpdir].get('test_source').tags == ['a', 'b', 'c']
fh = state[tmpdir]
fh.on_deleted(_FakeEvent(src=os.path.join(tmpdir, 'test_source')))
assert m_removed.called_once_with(os.path.join(tmpdir, 'test_source'))
|
theia-log/theia
|
tests/test_watcher.py
|
Python
|
apache-2.0
| 11,177
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
import os
import sys
from binascii import hexlify, unhexlify
from base64 import b64encode
from decimal import Decimal, ROUND_DOWN
import json
import http.client
import random
import shutil
import subprocess
import time
import re
import errno
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
COVERAGE_DIR = None
# The maximum number of nodes a single test can spawn
MAX_NODES = 8
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
BITCOIND_PROC_WAIT_TIMEOUT = 60
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
#Set Mocktime default to OFF.
#MOCKTIME is only needed for scripts that use the
#cached version of the blockchain. If the cached
#version of the blockchain is used without MOCKTIME
#then the mempools will not sync due to IBD.
MOCKTIME = 0
def enable_mocktime():
#For backwared compatibility of the python scripts
#with previous versions of the cache, set MOCKTIME
#to Jan 1, 2014 + (201 * 10 * 60)
global MOCKTIME
MOCKTIME = 1388534400 + (201 * 10 * 60)
def disable_mocktime():
global MOCKTIME
MOCKTIME = 0
def get_mocktime():
return MOCKTIME
def enable_coverage(dirname):
"""Maintain a log of which RPC calls are made during testing."""
global COVERAGE_DIR
COVERAGE_DIR = dirname
def get_rpc_proxy(url, node_number, timeout=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
COVERAGE_DIR, node_number) if COVERAGE_DIR else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
# Use getblockcount() instead of waitforblockheight() to determine the
# initial max height because the two RPCs look at different internal global
# variables (chainActive vs latestBlock) and the former gets updated
# earlier.
maxheight = max(x.getblockcount() for x in rpc_connections)
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
if all(t["height"] == maxheight for t in tips):
if all(t["hash"] == tips[0]["hash"] for t in tips):
return
raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
"".join("\n {!r}".format(tip) for tip in tips)))
cur_time = time.time()
raise AssertionError("Block sync to height {} timed out:{}".format(
maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
def sync_chain(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same best block
"""
while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]]*len(best_hash):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Chain sync failed: Best block hashes don't match")
def sync_mempools(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
bitcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
rpc_u, rpc_p = rpc_auth_pair(n)
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
f.write("rpcuser=" + rpc_u + "\n")
f.write("rpcpassword=" + rpc_p + "\n")
f.write("port="+str(p2p_port(n))+"\n")
f.write("rpcport="+str(rpc_port(n))+"\n")
f.write("listenonion=0\n")
return datadir
def rpc_auth_pair(n):
return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n)
def rpc_url(i, rpchost=None):
rpc_u, rpc_p = rpc_auth_pair(i)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
def wait_for_bitcoind_start(process, url, i):
'''
Wait for bitcoind to start. This means that RPC is accessible and fully initialized.
Raise an exception if bitcoind exits during initialization.
'''
while True:
if process.poll() is not None:
raise Exception('bitcoind exited with status %i during initialization' % process.returncode)
try:
rpc = get_rpc_proxy(url, i)
blocks = rpc.getblockcount()
break # break out of loop on success
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unknown JSON RPC exception
time.sleep(0.25)
def initialize_chain(test_dir, num_nodes, cachedir):
"""
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache
"""
assert num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(os.path.join(cachedir, 'node'+str(i))):
create_cache = True
break
if create_cache:
#find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(os.path.join(cachedir,"node"+str(i))):
shutil.rmtree(os.path.join(cachedir,"node"+str(i)))
# Create cache directories, run bitcoinds:
for i in range(MAX_NODES):
datadir=initialize_datadir(cachedir, i)
args = [ os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
bitcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: bitcoind started, waiting for RPC to come up")
wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: RPC successfully started")
rpcs = []
for i in range(MAX_NODES):
try:
rpcs.append(get_rpc_proxy(rpc_url(i), i))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
enable_mocktime()
block_time = get_mocktime() - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
rpcs[peer].generate(1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
disable_mocktime()
for i in range(MAX_NODES):
os.remove(log_filename(cachedir, i, "debug.log"))
os.remove(log_filename(cachedir, i, "db.log"))
os.remove(log_filename(cachedir, i, "peers.dat"))
os.remove(log_filename(cachedir, i, "fee_estimates.dat"))
for i in range(num_nodes):
from_dir = os.path.join(cachedir, "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start a bitcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("BITCOIND", "bitcoind")
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-mocktime="+str(get_mocktime()) ]
if extra_args is not None: args.extend(extra_args)
bitcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("start_node: bitcoind started, waiting for RPC to come up")
url = rpc_url(i, rpchost)
wait_for_bitcoind_start(bitcoind_processes[i], url, i)
if os.getenv("PYTHON_DEBUG", ""):
print("start_node: RPC successfully started")
proxy = get_rpc_proxy(url, i, timeout=timewait)
if COVERAGE_DIR:
coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start multiple bitcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for _ in range(num_nodes) ]
if binary is None: binary = [ None for _ in range(num_nodes) ]
rpcs = []
try:
for i in range(num_nodes):
rpcs.append(start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i]))
except: # If one node failed to start, stop the others
stop_nodes(rpcs)
raise
return rpcs
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
def stop_node(node, i):
try:
node.stop()
except http.client.CannotSendRequest as e:
print("WARN: Unable to stop node: " + repr(e))
return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
assert_equal(return_code, 0)
del bitcoind_processes[i]
def stop_nodes(nodes):
for i, node in enumerate(nodes):
stop_node(node, i)
assert not bitcoind_processes.values() # All connections must be gone now
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using its output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:"+e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was returned or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:"+e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find = False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find == True:
assert_equal(expected, { })
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find == True:
num_matched = num_matched+1
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0 and should_not_find != True:
raise AssertionError("No objects matched %s"%(str(to_match)))
if num_matched > 0 and should_not_find == True:
raise AssertionError("Objects were found %s"%(str(to_match)))
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5*count)+101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value/2)
outputs[addr2] = satoshi_round(send_value/2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
txid = node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
for i in range (512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{ "txid" : coinbase, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
for _ in range(num):
t = utxos.pop()
inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
|
segsignal/bitcoin
|
qa/rpc-tests/test_framework/util.py
|
Python
|
mit
| 26,752
|
#!/usr/bin/python2.7
#Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = 'elsigh@google.com (Lindsey Simon)'
import logging
import os
import datetime
from datetime import timedelta
import re
import urllib2
from django import template
register = template.Library()
import settings
@register.filter
def by_key(array, key):
try:
value = array[key]
except IndexError:
value = ''
except TypeError:
value = ''
except KeyError:
value = ''
return value
@register.filter
def is_in(value, array):
return value in array
@register.filter
def less_than(value1, value2):
logging.info('%s less than %s = %s' % (value1, value2, value1 < value2))
return value1 < value2
@register.filter
def greater_than(value1, value2):
logging.info('%s greater than %s = %s' % (value1, value2, value1 > value2))
return value1 > value2
@register.filter
def urlquote(url):
return urllib2.quote(url)
@register.filter
def urlunquote(url):
return urllib2.unquote(url)
@register.filter
def resource_path(resource, category=None):
if category:
path = '/%s/static/%s' % (category, resource)
else:
path = '/static/%s' % resource
# Add on a version bit so we can use far future expires.
# In dev mode add random bits to prevent annoying, cough, browsers from
# caching stuff in frames.
if settings.BUILD == 'development':
version = str(datetime.datetime.now())
else:
version = os.environ['CURRENT_VERSION_ID']
path += '?v=%s' % version
return path
@register.filter
def as_range(end_range):
end_range = int(end_range)
return range(1, end_range + 1)
@register.filter
def utc_to_pst(utc_dt):
return utc_dt - timedelta(hours=7)
|
elsigh/browserscope
|
third_party/uaparser/tags/custom_filters.py
|
Python
|
apache-2.0
| 2,222
|
# -*- encoding: utf-8 -*-
import os
import json
import base64
import subprocess
from Crypto.Cipher import AES
from urllib2 import urlopen
BLOCK_SIZE = 16
def pad(data):
pad = BLOCK_SIZE - len(data) % BLOCK_SIZE
return data + pad * chr(pad)
def unpad(padded):
pad = ord(padded[-1])
return padded[:-pad]
def encrypt(data, password):
data = pad(data)
aes = AES.new(password, AES.MODE_CBC, password[:16])
encrypted = aes.encrypt(data)
return base64.urlsafe_b64encode(encrypted)
def decrypt(edata, password):
edata = base64.urlsafe_b64decode(edata)
aes = AES.new(password, AES.MODE_CBC, password[:16])
return unpad(aes.decrypt(edata))
def request(groups):
dir = os.path.dirname(os.path.abspath(__file__))
with open(dir + '/settings.json') as settings_file:
settings = json.load(settings_file)
password = settings['password']
server_address = settings['server_address']
data = {'prefix': "nidarholm-", 'groups': groups}
data = json.dumps(data)
encoded = encrypt(data, password)
url = (server_address + "/organization/updated_email_lists.json/" +
encoded)
contents = urlopen(url).read()
decoded = decrypt(contents, password)
data = json.loads(decoded)
for listname, group in data.items():
new_list = file("/tmp/" + listname, "w")
for email in group:
new_list.write(email + "\n")
new_list.close()
# next level
command = '/usr/sbin/sync_members -f /tmp/' + listname + ' ' + listname
print command
process = subprocess.Popen(command.split(), shell=False,
stdout=subprocess.PIPE)
output = process.communicate()[0]
if output:
print "==================================="
print group
print output
def main():
request(["Medlemmer", "Styret", "Plankom", "Jubileum"])
request(["Fløyte", "Obo", "Fagott", "Klarinett", "Saksofon", "Horn",
"Småmessing", "Trombone", "Euph", "Tuba", "Slagverk"])
if __name__ == "__main__":
main()
|
strekmann/nidarholmjs
|
scripts/nidarholm_lists.py
|
Python
|
agpl-3.0
| 2,132
|
import re
import unittest
from unittest import mock
import jsonschema
import pytest
from tests.utils import QuiltTestCase
from quilt3 import Package, workflows
class WorkflowConfigConfigDataVersionSupportTest(unittest.TestCase):
version = (1, 1, 1)
supported_versions = [
(1, 0, 0),
(1, 0, 1),
(1, 1, 0),
(1, 1, 1),
]
not_supported_versions = [
(2, 0, 0),
(2, 0, 1),
(2, 1, 0),
(2, 1, 1),
]
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.version = workflows.ConfigDataVersion._make(cls.version)
cls.supported_versions = list(map(workflows.ConfigDataVersion._make, cls.supported_versions))
cls.not_supported_versions = list(map(workflows.ConfigDataVersion._make, cls.not_supported_versions))
def setUp(self):
super().setUp()
patcher = mock.patch.object(workflows.WorkflowConfig, 'CONFIG_DATA_VERSION', self.version)
patcher.start()
self.addCleanup(patcher.stop)
def test_supported(self):
assert self.supported_versions
for v in self.supported_versions:
with self.subTest(version=v):
assert workflows.WorkflowConfig.is_supported_config_data_version(v) is True
def test_not_supported(self):
assert self.not_supported_versions
for v in self.not_supported_versions:
with self.subTest(version=v):
assert workflows.WorkflowConfig.is_supported_config_data_version(v) is False
class WorkflowConfigGetVersionStrTest(unittest.TestCase):
def test_version_str(self):
data = {
'version': '41.0.0',
}
assert workflows.WorkflowConfig.get_config_data_version_str(data) == '41.0.0'
def test_version_object(self):
data = {
'version': {
'ext1': '40.0.0',
'base': '41.0.0',
'ext2': '42.0.0',
}
}
assert workflows.WorkflowConfig.get_config_data_version_str(data) == '41.0.0'
class ConfigDataVersionParseTest(unittest.TestCase):
versions = {
'1': (1, 0, 0),
'1.0': (1, 0, 0),
'1.0.0': (1, 0, 0),
'1.0.1': (1, 0, 1),
'1.1': (1, 1, 0),
'1.1.1': (1, 1, 1),
}
def test(self):
for version_string, expected_version in self.versions.items():
expected_version = workflows.ConfigDataVersion._make(expected_version)
with self.subTest(version_string=version_string, expected_version=expected_version):
assert workflows.ConfigDataVersion.parse(version_string) == expected_version
class WorkflowValidatorTestMixin:
def get_workflow_validator(self, **kwargs):
return workflows.WorkflowValidator(
**{
'data_to_store': None,
'is_message_required': False,
'pkg_name_pattern': None,
'metadata_validator': None,
'entries_validator': None,
**kwargs,
}
)
class WorkflowValidatorTest(unittest.TestCase, WorkflowValidatorTestMixin):
JSON_SCHEMA_VALIDATOR_CLS = jsonschema.Draft7Validator
def test_validate(self):
pkg_name = 'test/name'
msg = 'test message'
meta = {'some': 'meta'}
pkg = Package()
pkg.set_meta(meta)
workflow_validator = self.get_workflow_validator(data_to_store=mock.sentinel.data_to_store)
methods_to_mock = (
'validate_name',
'validate_message',
'validate_metadata',
'validate_entries',
)
with mock.patch.multiple(workflows.WorkflowValidator, **dict.fromkeys(methods_to_mock, mock.DEFAULT)) as mocks:
assert workflow_validator.validate(
name=pkg_name,
pkg=pkg,
message=msg,
) is mock.sentinel.data_to_store
mocks['validate_name'].assert_called_once_with(pkg_name)
mocks['validate_message'].assert_called_once_with(msg)
mocks['validate_metadata'].assert_called_once_with(meta)
mocks['validate_entries'].assert_called_once_with(pkg)
def test_validate_name_noop(self):
workflow_validator = self.get_workflow_validator(pkg_name_pattern=None)
workflow_validator.validate_name('foobar')
def test_validate_name(self):
workflow_validator = self.get_workflow_validator(pkg_name_pattern=re.compile(r'oob'))
workflow_validator.validate_name('foobar')
def test_validate_name_fail(self):
workflow_validator = self.get_workflow_validator(pkg_name_pattern=re.compile(r'^oob'))
with pytest.raises(workflows.WorkflowValidationError):
workflow_validator.validate_name('foobar')
def test_validate_message_not_required(self):
workflow_validator = self.get_workflow_validator(is_message_required=False)
for msg in (
None,
'',
'message',
):
with self.subTest(message=msg):
workflow_validator.validate_message(msg)
def test_validate_message_required(self):
workflow_validator = self.get_workflow_validator(is_message_required=True)
workflow_validator.validate_message('message')
def test_validate_message_required_fail(self):
workflow_validator = self.get_workflow_validator(is_message_required=True)
for msg in (
None,
'',
):
with self.subTest(message=msg):
with pytest.raises(workflows.WorkflowValidationError):
workflow_validator.validate_message(msg)
def test_validate_metadata_noop(self):
workflow_validator = self.get_workflow_validator()
workflow_validator.validate_metadata({})
def test_validate_metadata(self):
workflow_validator = self.get_workflow_validator(metadata_validator=self.JSON_SCHEMA_VALIDATOR_CLS(True))
workflow_validator.validate_metadata({})
def test_validate_metadata_fail(self):
workflow_validator = self.get_workflow_validator(metadata_validator=self.JSON_SCHEMA_VALIDATOR_CLS(False))
with pytest.raises(workflows.WorkflowValidationError):
workflow_validator.validate_metadata({})
@mock.patch.object(workflows.WorkflowValidator, 'get_pkg_entries_for_validation')
def test_validate_pkg_entries_noop(self, get_pkg_entries_for_validation_mock):
workflow_validator = self.get_workflow_validator()
workflow_validator.validate_entries(Package())
get_pkg_entries_for_validation_mock.assert_not_called()
@mock.patch.object(workflows.WorkflowValidator, 'get_pkg_entries_for_validation')
def test_validate_pkg_entries(self, get_pkg_entries_for_validation_mock):
pkg = Package()
workflow_validator = self.get_workflow_validator(entries_validator=self.JSON_SCHEMA_VALIDATOR_CLS(True))
workflow_validator.validate_entries(pkg)
get_pkg_entries_for_validation_mock.assert_called_once_with(pkg)
@mock.patch.object(workflows.WorkflowValidator, 'get_pkg_entries_for_validation')
def test_validate_pkg_entries_fail(self, get_pkg_entries_for_validation_mock):
pkg = Package()
workflow_validator = self.get_workflow_validator(entries_validator=self.JSON_SCHEMA_VALIDATOR_CLS(False))
with pytest.raises(workflows.WorkflowValidationError):
workflow_validator.validate_entries(pkg)
get_pkg_entries_for_validation_mock.assert_called_once_with(pkg)
class GetPkgEntriesForValidationTest(QuiltTestCase, WorkflowValidatorTestMixin):
def test(self):
entries_data = {
'b/a': bytes(1),
'a/b': bytes(2),
'c': bytes(3),
}
pkg = Package()
for lk, data in entries_data.items():
pkg.set(lk, data)
workflow_validator = self.get_workflow_validator()
assert workflow_validator.get_pkg_entries_for_validation(pkg) == [
{
'logical_key': 'a/b',
'size': 2,
},
{
'logical_key': 'b/a',
'size': 1,
},
{
'logical_key': 'c',
'size': 3,
},
]
|
quiltdata/quilt
|
api/python/tests/test_workflows.py
|
Python
|
apache-2.0
| 8,375
|
from flask import render_template, session, redirect, url_for
from .. import db
from ..models import User
from . import main
from .forms import NameForm
@main.route('/', methods=['GET', 'POST'])
def index():
form = NameForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.name.data).first()
if user is None:
user = User(username=form.name.data)
db.session.add(user)
session['known'] = False
else:
session['known'] = True
session['name'] = user.username
return redirect(url_for('.index'))
return render_template('index.html', form=form, name=session.get('name'), known=session.get('known', False))
|
mfwarren/FreeCoding
|
2014/12/fc_29/app/main/views.py
|
Python
|
mit
| 725
|
"""
Copyright 2015 SYSTRAN Software, Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
# import models into model package
from .translation_output import TranslationOutput
from .translation_response import TranslationResponse
from .translation_file_response import TranslationFileResponse
from .translation_status import TranslationStatus
from .translation_cancel import TranslationCancel
from .batch_request import BatchRequest
from .batch_create import BatchCreate
from .batch_cancel import BatchCancel
from .batch_close import BatchClose
from .batch_status import BatchStatus
from .profile_id import ProfileId
from .language_pair import LanguagePair
from .supported_language_response import SupportedLanguageResponse
from .profile import Profile
from .profiles_response import ProfilesResponse
from .error_response import ErrorResponse
|
SYSTRAN/translation-api-python-client
|
systran_translation_api/models/__init__.py
|
Python
|
apache-2.0
| 1,422
|
import pcwg.configuration.preferences_configuration as pref
import pcwg.gui.root as gui
if __name__ == "__main__":
preferences = pref.Preferences.get()
user_interface = gui.UserInterface(preferences)
preferences.save()
print "Done"
|
clembou/PCWG
|
pcwg_tool.py
|
Python
|
mit
| 249
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Romanian accounting localization for OpenERP V7
# @author - Fekete Mihai, Tatár Attila <atta@nvm.ro>
# Copyright (C) 2011-2013 TOTAL PC SYSTEMS (http://www.www.erpsystems.ro).
# Copyright (C) 2013 Tatár Attila
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
import time
from string import maketrans
import requests
from stdnum.eu.vat import check_vies
from lxml import html
from openerp import models, fields, api, _
from openerp.exceptions import Warning
CEDILLATRANS = maketrans(u'\u015f\u0163\u015e\u0162'.encode(
'utf8'), u'\u0219\u021b\u0218\u021a'.encode('utf8'))
def getMfinante(cod):
headers = {
"User-Agent": "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)",
"Content-Type": "multipart/form-data;"
}
params = {'cod': cod}
res = requests.get(
'http://www.mfinante.ro/infocodfiscal.html',
params=params,
headers=headers
)
res.raise_for_status()
htm = html.fromstring(res.text)
# sunt 2 tabele primul e important
table = htm.xpath("//div[@id='main']//center/table")[0]
result = dict()
for tr in table.iterchildren():
key = ' '.join([x.strip() for x in tr.getchildren()[
0].text_content().split('\n') if x.strip() != ''])
val = ' '.join([x.strip() for x in tr.getchildren()[
1].text_content().split('\n') if x.strip() != ''])
result[key] = val.encode('utf8').translate(CEDILLATRANS).decode('utf8')
return result
class res_partner(models.Model):
_name = "res.partner"
_inherit = "res.partner"
name = fields.Char('Name', required=True, select=True, default=' ')
@api.one
def button_get_partner_data(self):
def _check_vat_ro(vat):
return bool(len(part.name.strip()) > 2 and
part.name.strip().upper()[:2] == 'RO' and
part.name.strip()[2:].isdigit())
part = self[0]
vat = part.vat
if vat:
self.write({'vat': part.vat.upper().replace(" ","")})
elif part.name and len(part.name.strip())>2 and part.name.strip().upper()[:2]=='RO' and part.name.strip()[2:].isdigit():
self.write( {'vat': part.name.upper().replace(" ","")})
if not part.vat and part.name:
try:
vat_country, vat_number = self._split_vat(part.name.upper().replace(" ",""))
valid = self.vies_vat_check(vat_country, vat_number)
if valid:
self.write( {'vat': part.name.upper().replace(" ","")})
except:
raise Warning(_("No VAT number found"))
vat_country, vat_number = self._split_vat(part.vat)
if part.vat_subjected:
self.write({'vat_subjected': False})
if vat_number and vat_country:
self.write({
'is_company': True,
'country_id': self.env['res.country'].search(
[('code', 'ilike', vat_country)])[0].id
})
if vat_country == 'ro':
try:
nrc_key = 'Numar de inmatriculare la Registrul Comertului:'
tva_key = 'Taxa pe valoarea adaugata (data luarii in evidenta):'
result = getMfinante(vat_number)
name = nrc = adresa = tel = fax = False
zip1 = vat_s = state = False
if 'Denumire platitor:' in result.keys():
name = result['Denumire platitor:'].upper()
if 'Adresa:' in result.keys():
adresa = result['Adresa:'].title() or ''
if nrc_key in result.keys():
nrc = result[nrc_key].replace(' ', '')
if nrc == '-/-/-':
nrc = ''
if 'Codul postal:' in result.keys():
zip1 = result['Codul postal:'] or ''
if 'Judetul:' in result.keys():
jud = result['Judetul:'].title() or ''
if jud.lower().startswith('municip'):
jud = ' '.join(jud.split(' ')[1:])
if jud != '':
state = self.env['res.country.state'].search(
[('name', 'ilike', jud)])
if state:
state = state[0].id
if 'Telefon:' in result.keys():
tel = result['Telefon:'].replace('.', '') or ''
if 'Fax:' in result.keys():
fax = result['Fax:'].replace('.', '') or ''
if tva_key in result.keys():
vat_s = bool(
result[tva_key] != 'NU')
self.write({
'name': name or '',
'nrc': nrc or '',
'street': adresa or '',
'phone': tel or '',
'fax': fax or '',
'zip': zip1 or '',
'vat_subjected': vat_s or False,
'state_id': state,
})
except:
headers = {
"User-Agent": "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)",
"Content-Type": "application/json;"
}
res = requests.post(
'https://webservicesp.anaf.ro:/PlatitorTvaRest/api/v1/ws/tva',
json=[{'cui': vat_number, 'data': fields.Date.today()}],
headers = headers)
if res.status_code == 200:
res = res.json()
if res['found'] and res['found'][0]:
datas = res['found'][0]
if datas['data_sfarsit'] and datas['data_sfarsit'] != ' ':
res = requests.post(
'https://webservicesp.anaf.ro:/PlatitorTvaRest/api/v1/ws/tva',
json=[{'cui': vat_number, 'data': datas['data_sfarsit']}],
headers = headers)
if res.status_code == 200:
res = res.json()
if res['found'] and res['found'][0]:
datas = res['found'][0]
if res['notfound'] and res['notfound'][0]:
datas = res['notfound'][0]
if datas['data_sfarsit'] and datas['data_sfarsit'] != ' ':
res = requests.post(
'https://webservicesp.anaf.ro:/PlatitorTvaRest/api/v1/ws/tva',
json=[{'cui': vat_number, 'data': datas['data_sfarsit']}],
headers = headers)
if res.status_code == 200:
res = res.json()
if res['found'] and res['found'][0]:
datas = res['found'][0]
if res['notfound'] and res['notfound'][0]:
datas = res['notfound'][0]
self.write({
'name': datas['denumire'].upper(),
'street': datas['adresa'].title(),
'vat_subjected': bool(datas['tva'])
})
else:
try:
result = check_vies(part.vat)
if result.name and result.name != '---':
self.write({
'name': unicode(result.name).upper(),
'is_company': True,
'vat_subjected': True
})
if (not part.street and
result.address and
result.address != '---'):
self.write({
'street': unicode(result.address).title()
})
self.write({'vat_subjected': result.valid})
except:
self.write({
'vat_subjected': self.vies_vat_check(vat_country, vat_number)
})
|
yoyo2k/l10n-romania
|
partner_create_by_vat/res_partner.py
|
Python
|
agpl-3.0
| 9,585
|
# -*- coding: utf-8 -*-
# Copyright © 2008-2011, Red Hat, Inc.
#
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# This file is taken from the Nushus project <http://fedorahosted.org/nushus/>
"""
Functions for handling remote package resources
"""
import fnmatch
import os.path
import BeautifulSoup
import logging
import re
import urllib2
import juicer.types
REMOTE_PKG_TYPE = 1
REMOTE_INDEX_TYPE = 2
REMOTE_INPUT_FILE_TYPE = 3
output = logging.getLogger('juicer')
def filter_items(repo_items, item_type):
"""
Filter a list of items into locals and remotes.
"""
if item_type == 'rpm':
pattern = juicer.types.RPM(None).pattern
elif item_type == 'docker':
pattern = juicer.types.Docker(None).pattern
elif item_type == 'iso':
pattern = juicer.types.Iso(None).pattern
else:
pattern = juicer.types.RPM(None).pattern
repo_hash = {}
for ri in repo_items:
(repo, items) = (ri[0], ri[1:])
remote_items = []
possible_remotes = filter(lambda i: not os.path.exists(i), items)
output.debug("Considering {number} possible remotes".format(number=len(possible_remotes)))
for item in possible_remotes:
remote_items.extend(assemble_remotes(item, pattern))
output.debug("Remote packages: {remote_items}".format(remote_items=str(remote_items)))
local_items = filter(os.path.exists, items)
# Store absolute path for local items
local_items = map(os.path.abspath, local_items)
filtered_items = list(set(remote_items + local_items))
repo_hash[repo] = filtered_items
return repo_hash
def assemble_remotes(resource, pattern):
"""
Using the specified input resource, assemble a list of resource URLS.
This function will, when given a remote package url, directory
index, or a combination of the two in a local input file, do all
the work required to turn that input into a list of only remote
package URLs.
"""
resource_type = classify_resource_type(resource, pattern)
if resource_type is None:
return []
elif resource_type == REMOTE_PKG_TYPE:
return [resource]
elif resource_type == REMOTE_INDEX_TYPE:
return parse_directory_index(resource, pattern)
elif resource_type == REMOTE_INPUT_FILE_TYPE:
# Later on this could examine the excluded data for directory
# indexes and iterate over those too.
remote_packages, excluded_data = parse_input_file(resource, pattern)
return remote_packages
def classify_resource_type(resource, pattern):
"""Determine if the specified resource is remote or local.
We can handle three remote resource types from the command line,
remote files, directory indexes, and input files. They're
classified by matching the following patterns:
- Remote files appear as http[s]://anything/anything.anything
- Directory indexes appear as http[s]://anything.anything/anything
- Input files don't match above, exist() on local filesystem
"""
if is_directory_index(resource):
return REMOTE_INDEX_TYPE
elif is_remote_package(resource, pattern):
return REMOTE_PKG_TYPE
elif os.path.exists(os.path.expanduser(resource)):
return REMOTE_INPUT_FILE_TYPE
else:
return None
def is_remote_package(resource, pattern):
"""
Classify the input resource as a remote resource.
"""
remote_regexp = re.compile(r"^https?://(.+)\/{pattern}$".format(pattern=pattern), re.I)
result = remote_regexp.match(resource)
if result is not None:
return True
else:
return False
def is_directory_index(resource):
"""
Classify the input resource as a directory index or not.
"""
if re.compile(r"^https?://(.+)/$", re.I).match(resource):
return True
if re.compile(r"^https?://(.+)$", re.I).match(resource):
if re.compile(r"^[^\[\]\*\?\!]+$", re.I).match(resource):
if re.compile(r"(.+)[.](.+)").match(os.path.basename(resource)):
return False
else:
return True
else:
return True
else:
return False
def parse_input_file(resource, pattern):
"""
Parse input file into remote packages and excluded data.
In addition to garbage, excluded data includes directory indexes
for the time being. This will be revisited after basic
functionality has been fully implemented.
"""
input_resource = open(resource, 'r').read()
remotes_list = [url for url in input_resource.split()]
remote_packages = [pkg for pkg in remotes_list if is_remote_package(pkg, pattern) is True]
excluded_data = [datum for datum in remotes_list if datum not in remote_packages]
http_indexes = [index for index in excluded_data if is_directory_index(index)]
remotes_from_indexes = reduce(lambda x, y: x + parse_directory_index(y, pattern), http_indexes, [])
return (remote_packages + remotes_from_indexes, excluded_data)
def parse_directory_index(directory_index, pattern):
"""
Retrieve a directory index and make a list of the resources listed.
"""
# Use the tail of the remote path to determine what we're dealing with.
head, tail = os.path.split(directory_index)
# Is this an fnmatch or a directory without a trailing slash?
if re.compile(r"^[^\[\]\*\?\!\.]+$", re.I).match(tail):
if not directory_index.endswith('/'):
directory_index = directory_index + '/'
site_index = urllib2.urlopen(directory_index)
else:
pattern = fnmatch.translate(tail)
directory_index = head + '/'
site_index = urllib2.urlopen(directory_index)
parsed_site_index = BeautifulSoup.BeautifulSoup(site_index)
link_tags = parsed_site_index.findAll('a', href=re.compile(pattern))
# Only save the HREF attribute values from the links found
names = [str(link['href']) for link in link_tags]
# Remove items ending in /
names[:] = [name for name in names if not name.endswith('/')]
# Join the index path with the discovered names so we only return complete paths
remote_list = map(lambda end: "".join([directory_index, end]), names)
return remote_list
|
abutcher/juicer
|
juicer/remotes.py
|
Python
|
gpl-3.0
| 6,519
|
"""Timing benchmark for AlexNet inference.
To run, use:
bazel run -c opt --config=cuda \
third_party/tensorflow/models/image/alexnet:alexnet_benchmark
Across 100 steps on batch size = 128.
Forward pass:
Run on Tesla K40c: 145 +/- 1.5 ms / batch
Run on Titan X: 70 +/- 0.1 ms / batch
Forward-backward pass:
Run on Tesla K40c: 480 +/- 48 ms / batch
Run on Titan X: 244 +/- 30 ms / batch
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import math
from six.moves import xrange # pylint: disable=redefined-builtin
import time
import tensorflow.python.platform
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('batch_size', 128,
"""Batch size.""")
tf.app.flags.DEFINE_integer('num_batches', 100,
"""Number of batches to run.""")
def print_activations(t):
print(t.op.name, ' ', t.get_shape().as_list())
def inference(images):
"""Build the AlexNet model.
Args:
images: Images Tensor
Returns:
pool5: the last Tensor in the convolutional component of AlexNet.
parameters: a list of Tensors corresponding to the weights and biases of the
AlexNet model.
"""
parameters = []
# conv1
with tf.name_scope('conv1') as scope:
kernel = tf.Variable(tf.truncated_normal([11, 11, 3, 64], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(images, kernel, [1, 4, 4, 1], padding='VALID')
biases = tf.Variable(tf.constant(0.0, shape=[64], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv1 = tf.nn.relu(bias, name=scope)
print_activations(conv1)
parameters += [kernel, biases]
# lrn1
# TODO(shlens, jiayq): Add a GPU version of local response normalization.
# pool1
pool1 = tf.nn.max_pool(conv1,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool1')
print_activations(pool1)
# conv2
with tf.name_scope('conv2') as scope:
kernel = tf.Variable(tf.truncated_normal([5, 5, 64, 192], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[192], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv2 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv2)
# pool2
pool2 = tf.nn.max_pool(conv2,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool2')
print_activations(pool2)
# conv3
with tf.name_scope('conv3') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 192, 384],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[384], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv3 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv3)
# conv4
with tf.name_scope('conv4') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 384, 256],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv3, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv4 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv4)
# conv5
with tf.name_scope('conv5') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 256, 256],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv4, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
conv5 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv5)
# pool5
pool5 = tf.nn.max_pool(conv5,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool5')
print_activations(pool5)
return pool5, parameters
def time_tensorflow_run(session, target, info_string):
"""Run the computation to obtain the target tensor and print timing stats.
Args:
session: the TensorFlow session to run the computation under.
target: the targe Tensor that is passed to the session's run() function.
info_string: a string summarizing this run, to be printed with the stats.
Returns:
None
"""
num_steps_burn_in = 10
total_duration = 0.0
total_duration_squared = 0.0
for i in xrange(FLAGS.num_batches + num_steps_burn_in):
start_time = time.time()
_ = session.run(target)
duration = time.time() - start_time
if i > num_steps_burn_in:
if not i % 10:
print ('%s: step %d, duration = %.3f' %
(datetime.now(), i - num_steps_burn_in, duration))
total_duration += duration
total_duration_squared += duration * duration
mn = total_duration / FLAGS.num_batches
vr = total_duration_squared / FLAGS.num_batches - mn * mn
sd = math.sqrt(vr)
print ('%s: %s across %d steps, %.3f +/- %.3f sec / batch' %
(datetime.now(), info_string, FLAGS.num_batches, mn, sd))
def run_benchmark():
"""Run the benchmark on AlexNet."""
with tf.Graph().as_default():
# Generate some dummy images.
image_size = 224
# Note that our padding definition is slightly different the cuda-convnet.
# In order to force the model to start with the same activations sizes,
# we add 3 to the image_size and employ VALID padding above.
images = tf.Variable(tf.random_normal([FLAGS.batch_size,
image_size + 3,
image_size + 3, 3],
dtype=tf.float32,
stddev=1e-1))
# Build a Graph that computes the logits predictions from the
# inference model.
pool5, parameters = inference(images)
# Build an initialization operation.
init = tf.initialize_all_variables()
# Start running operations on the Graph.
config = tf.ConfigProto()
config.gpu_options.allocator_type = 'BFC'
sess = tf.Session(config=config)
sess.run(init)
# Run the forward benchmark.
time_tensorflow_run(sess, pool5, "Forward")
# Add a simple objective so we can calculate the backward pass.
objective = tf.nn.l2_loss(pool5)
# Compute the gradient with respect to all the parameters.
grad = tf.gradients(objective, parameters)
# Run the backward benchmark.
time_tensorflow_run(sess, grad, "Forward-backward")
def main(_):
run_benchmark()
if __name__ == '__main__':
tf.app.run()
|
arunhotra/tensorflow
|
tensorflow/models/image/alexnet/alexnet_benchmark.py
|
Python
|
apache-2.0
| 7,840
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for architecture output directory fixups."""
from __future__ import print_function
import cr
class _ArchInitHookHelper(cr.InitHook):
"""Base class helper for CR_ARCH value fixups."""
def _VersionTest(self, old_version):
_ = old_version
return True
def _ArchConvert(self, old_arch):
return old_arch
def Run(self, old_version, config):
if old_version is None or not self._VersionTest(old_version):
return
old_arch = config.OVERRIDES.Find(cr.Arch.SELECTOR)
new_arch = self._ArchConvert(old_arch)
if new_arch != old_arch:
print('** Fixing architecture from {0} to {1}'.format(old_arch, new_arch))
config.OVERRIDES[cr.Arch.SELECTOR] = new_arch
class WrongArchDefaultInitHook(_ArchInitHookHelper):
"""Fixes bad initial defaults.
In the initial versions of cr before output directories were versioned
it was writing invalid architecture defaults. This detects that case and sets
the architecture to the current default instead.
"""
def _VersionTest(self, old_version):
return old_version <= 0.0
def _ArchConvert(self, _):
return cr.Arch.default.name
class MipsAndArmRenameInitHook(_ArchInitHookHelper):
"""Fixes rename of Mips and Arm to Mips32 and Arm32."""
def _ArchConvert(self, old_arch):
if old_arch == 'mips':
return cr.Mips32Arch.GetInstance().name
if old_arch == 'arm':
return cr.Arm32Arch.GetInstance().name
return old_arch
|
endlessm/chromium-browser
|
tools/cr/cr/fixups/arch.py
|
Python
|
bsd-3-clause
| 1,620
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VpnConnectionsOperations:
"""VpnConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
**kwargs: Any
) -> "_models.VpnConnection":
"""Retrieves the details of a vpn connection.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param connection_name: The name of the vpn connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_02_01.models.VpnConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
vpn_connection_parameters: "_models.VpnConnection",
**kwargs: Any
) -> "_models.VpnConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_connection_parameters, 'VpnConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VpnConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VpnConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
vpn_connection_parameters: "_models.VpnConnection",
**kwargs: Any
) -> AsyncLROPoller["_models.VpnConnection"]:
"""Creates a vpn connection to a scalable vpn gateway if it doesn't exist else updates the
existing connection.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param connection_name: The name of the connection.
:type connection_name: str
:param vpn_connection_parameters: Parameters supplied to create or Update a VPN Connection.
:type vpn_connection_parameters: ~azure.mgmt.network.v2019_02_01.models.VpnConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnConnection or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_02_01.models.VpnConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
connection_name=connection_name,
vpn_connection_parameters=vpn_connection_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a vpn connection.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param connection_name: The name of the connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
connection_name=connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
def list_by_vpn_gateway(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ListVpnConnectionsResult"]:
"""Retrieves all vpn connections for a particular virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnConnectionsResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_02_01.models.ListVpnConnectionsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnConnectionsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_vpn_gateway.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_vpn_gateway.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_02_01/aio/operations/_vpn_connections_operations.py
|
Python
|
mit
| 22,180
|
# Time O(m*n) Space O(m*n)
def multiply(num1, num2):
num1, num2 = num1[::-1], num2[::-1]
res = [0] * (len(num1) + len(num2))
for i in range(len(num1)):
for j in range(len(num2)):
res[i+j] += int(num1[i]) * int(num2[j])
res[i+j+1] += int(res[i+j] / 10)
res[i+j] %= 10
i = len(res) -1
while i > 0 and res[i] == 0:
i -= 1
return ''.join(map(str, res[i::-1]))
if __name__ == '__main__':
num1, num2 = '4532', '2'
print(multiply(num1, num2))
|
LeonardCohen/coding
|
py/multiply_strings.py
|
Python
|
gpl-2.0
| 522
|
from ..sourcefile import SourceFile
def create(filename, contents=b""):
assert isinstance(contents, bytes)
return SourceFile("/", filename, "/", contents=contents)
def items(s):
return [
(item.item_type, item.url)
for item in s.manifest_items()
]
def test_name_is_non_test():
non_tests = [
".gitignore",
".travis.yml",
"MANIFEST.json",
"tools/test.html",
"resources/test.html",
"common/test.html",
"conformance-checkers/test.html",
]
for rel_path in non_tests:
s = create(rel_path)
assert s.name_is_non_test
assert not s.content_is_testharness
assert items(s) == []
def test_name_is_manual():
manual_tests = [
"html/test-manual.html",
"html/test-manual.xhtml",
]
for rel_path in manual_tests:
s = create(rel_path)
assert not s.name_is_non_test
assert s.name_is_manual
assert not s.content_is_testharness
assert items(s) == [("manual", "/" + rel_path)]
def test_worker():
s = create("html/test.worker.js")
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert s.name_is_worker
assert not s.name_is_reference
assert not s.content_is_testharness
assert items(s) == [("testharness", "/html/test.worker")]
def test_multi_global():
s = create("html/test.any.js")
assert not s.name_is_non_test
assert not s.name_is_manual
assert s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert not s.content_is_testharness
assert items(s) == [
("testharness", "/html/test.any.html"),
("testharness", "/html/test.any.worker"),
]
def test_testharness():
content = b"<script src=/resources/testharness.js></script>"
for ext in ["htm", "html"]:
filename = "html/test." + ext
s = create(filename, content)
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert s.content_is_testharness
assert items(s) == [("testharness", "/" + filename)]
def test_relative_testharness():
content = b"<script src=../resources/testharness.js></script>"
for ext in ["htm", "html"]:
filename = "html/test." + ext
s = create(filename, content)
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert not s.content_is_testharness
assert items(s) == []
def test_testharness_xhtml():
content = b"""
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
</head>
<body/>
</html>
"""
for ext in ["xhtml", "xht", "xml"]:
filename = "html/test." + ext
s = create(filename, content)
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert s.content_is_testharness
assert items(s) == [("testharness", "/" + filename)]
def test_relative_testharness_xhtml():
content = b"""
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
</head>
<body/>
</html>
"""
for ext in ["xhtml", "xht", "xml"]:
filename = "html/test." + ext
s = create(filename, content)
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert not s.content_is_testharness
assert items(s) == []
def test_testharness_svg():
content = b"""\
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg"
xmlns:h="http://www.w3.org/1999/xhtml"
version="1.1"
width="100%" height="100%" viewBox="0 0 400 400">
<title>Null test</title>
<h:script src="/resources/testharness.js"/>
<h:script src="/resources/testharnessreport.js"/>
</svg>
"""
filename = "html/test.svg"
s = create(filename, content)
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert s.root
assert s.content_is_testharness
assert items(s) == [("testharness", "/" + filename)]
def test_relative_testharness_svg():
content = b"""\
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg"
xmlns:h="http://www.w3.org/1999/xhtml"
version="1.1"
width="100%" height="100%" viewBox="0 0 400 400">
<title>Null test</title>
<h:script src="../resources/testharness.js"/>
<h:script src="../resources/testharnessreport.js"/>
</svg>
"""
filename = "html/test.svg"
s = create(filename, content)
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert s.root
assert not s.content_is_testharness
assert items(s) == []
def test_testharness_ext():
content = b"<script src=/resources/testharness.js></script>"
for filename in ["test", "test.test"]:
s = create("html/" + filename, content)
assert not s.name_is_non_test
assert not s.name_is_manual
assert not s.name_is_multi_global
assert not s.name_is_worker
assert not s.name_is_reference
assert not s.root
assert not s.content_is_testharness
assert items(s) == []
|
wldcordeiro/servo
|
tests/wpt/web-platform-tests/tools/manifest/tests/test_sourcefile.py
|
Python
|
mpl-2.0
| 5,974
|
"""UniFi Network sensor platform tests."""
from datetime import datetime
from unittest.mock import patch
from aiounifi.controller import MESSAGE_CLIENT, MESSAGE_CLIENT_REMOVED
import pytest
from homeassistant.components.device_tracker import DOMAIN as TRACKER_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.unifi.const import (
CONF_ALLOW_BANDWIDTH_SENSORS,
CONF_ALLOW_UPTIME_SENSORS,
CONF_TRACK_CLIENTS,
CONF_TRACK_DEVICES,
DOMAIN as UNIFI_DOMAIN,
)
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import EntityCategory
import homeassistant.util.dt as dt_util
from .test_controller import setup_unifi_integration
async def test_no_clients(hass, aioclient_mock):
"""Test the update_clients function when no clients are found."""
await setup_unifi_integration(
hass,
aioclient_mock,
options={
CONF_ALLOW_BANDWIDTH_SENSORS: True,
CONF_ALLOW_UPTIME_SENSORS: True,
},
)
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 0
async def test_bandwidth_sensors(hass, aioclient_mock, mock_unifi_websocket):
"""Verify that bandwidth sensors are working as expected."""
wired_client = {
"hostname": "Wired client",
"is_wired": True,
"mac": "00:00:00:00:00:01",
"oui": "Producer",
"wired-rx_bytes-r": 1234000000,
"wired-tx_bytes-r": 5678000000,
}
wireless_client = {
"is_wired": False,
"mac": "00:00:00:00:00:02",
"name": "Wireless client",
"oui": "Producer",
"rx_bytes-r": 2345000000,
"tx_bytes-r": 6789000000,
}
options = {
CONF_ALLOW_BANDWIDTH_SENSORS: True,
CONF_ALLOW_UPTIME_SENSORS: False,
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
}
config_entry = await setup_unifi_integration(
hass,
aioclient_mock,
options=options,
clients_response=[wired_client, wireless_client],
)
assert len(hass.states.async_all()) == 5
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 4
assert hass.states.get("sensor.wired_client_rx").state == "1234.0"
assert hass.states.get("sensor.wired_client_tx").state == "5678.0"
assert hass.states.get("sensor.wireless_client_rx").state == "2345.0"
assert hass.states.get("sensor.wireless_client_tx").state == "6789.0"
ent_reg = er.async_get(hass)
assert (
ent_reg.async_get("sensor.wired_client_rx").entity_category
is EntityCategory.DIAGNOSTIC
)
# Verify state update
wireless_client["rx_bytes-r"] = 3456000000
wireless_client["tx_bytes-r"] = 7891000000
mock_unifi_websocket(
data={
"meta": {"message": MESSAGE_CLIENT},
"data": [wireless_client],
}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.wireless_client_rx").state == "3456.0"
assert hass.states.get("sensor.wireless_client_tx").state == "7891.0"
# Disable option
options[CONF_ALLOW_BANDWIDTH_SENSORS] = False
hass.config_entries.async_update_entry(config_entry, options=options.copy())
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 0
assert hass.states.get("sensor.wireless_client_rx") is None
assert hass.states.get("sensor.wireless_client_tx") is None
assert hass.states.get("sensor.wired_client_rx") is None
assert hass.states.get("sensor.wired_client_tx") is None
# Enable option
options[CONF_ALLOW_BANDWIDTH_SENSORS] = True
hass.config_entries.async_update_entry(config_entry, options=options.copy())
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 5
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 4
assert hass.states.get("sensor.wireless_client_rx")
assert hass.states.get("sensor.wireless_client_tx")
assert hass.states.get("sensor.wired_client_rx")
assert hass.states.get("sensor.wired_client_tx")
# Try to add the sensors again, using a signal
clients_connected = {wired_client["mac"], wireless_client["mac"]}
devices_connected = set()
controller = hass.data[UNIFI_DOMAIN][config_entry.entry_id]
async_dispatcher_send(
hass,
controller.signal_update,
clients_connected,
devices_connected,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 5
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 4
@pytest.mark.parametrize(
"initial_uptime,event_uptime,new_uptime",
[
# Uptime listed in epoch time should never change
(1609462800, 1609462800, 1612141200),
# Uptime counted in seconds increases with every event
(60, 64, 60),
],
)
async def test_uptime_sensors(
hass,
aioclient_mock,
mock_unifi_websocket,
initial_uptime,
event_uptime,
new_uptime,
):
"""Verify that uptime sensors are working as expected."""
uptime_client = {
"mac": "00:00:00:00:00:01",
"name": "client1",
"oui": "Producer",
"uptime": initial_uptime,
}
options = {
CONF_ALLOW_BANDWIDTH_SENSORS: False,
CONF_ALLOW_UPTIME_SENSORS: True,
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
}
now = datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC)
with patch("homeassistant.util.dt.now", return_value=now):
config_entry = await setup_unifi_integration(
hass,
aioclient_mock,
options=options,
clients_response=[uptime_client],
)
assert len(hass.states.async_all()) == 2
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1
assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00"
ent_reg = er.async_get(hass)
assert (
ent_reg.async_get("sensor.client1_uptime").entity_category
is EntityCategory.DIAGNOSTIC
)
# Verify normal new event doesn't change uptime
# 4 seconds has passed
uptime_client["uptime"] = event_uptime
now = datetime(2021, 1, 1, 1, 1, 4, tzinfo=dt_util.UTC)
with patch("homeassistant.util.dt.now", return_value=now):
mock_unifi_websocket(
data={
"meta": {"message": MESSAGE_CLIENT},
"data": [uptime_client],
}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00"
# Verify new event change uptime
# 1 month has passed
uptime_client["uptime"] = new_uptime
now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC)
with patch("homeassistant.util.dt.now", return_value=now):
mock_unifi_websocket(
data={
"meta": {"message": MESSAGE_CLIENT},
"data": [uptime_client],
}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.client1_uptime").state == "2021-02-01T01:00:00+00:00"
# Disable option
options[CONF_ALLOW_UPTIME_SENSORS] = False
hass.config_entries.async_update_entry(config_entry, options=options.copy())
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 0
assert hass.states.get("sensor.client1_uptime") is None
# Enable option
options[CONF_ALLOW_UPTIME_SENSORS] = True
with patch("homeassistant.util.dt.now", return_value=now):
hass.config_entries.async_update_entry(config_entry, options=options.copy())
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 2
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1
assert hass.states.get("sensor.client1_uptime")
# Try to add the sensors again, using a signal
clients_connected = {uptime_client["mac"]}
devices_connected = set()
controller = hass.data[UNIFI_DOMAIN][config_entry.entry_id]
async_dispatcher_send(
hass,
controller.signal_update,
clients_connected,
devices_connected,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 2
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1
async def test_remove_sensors(hass, aioclient_mock, mock_unifi_websocket):
"""Verify removing of clients work as expected."""
wired_client = {
"hostname": "Wired client",
"is_wired": True,
"mac": "00:00:00:00:00:01",
"oui": "Producer",
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
"uptime": 1600094505,
}
wireless_client = {
"is_wired": False,
"mac": "00:00:00:00:00:02",
"name": "Wireless client",
"oui": "Producer",
"rx_bytes": 2345000000,
"tx_bytes": 6789000000,
"uptime": 60,
}
await setup_unifi_integration(
hass,
aioclient_mock,
options={
CONF_ALLOW_BANDWIDTH_SENSORS: True,
CONF_ALLOW_UPTIME_SENSORS: True,
},
clients_response=[wired_client, wireless_client],
)
assert len(hass.states.async_all()) == 9
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6
assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2
assert hass.states.get("sensor.wired_client_rx")
assert hass.states.get("sensor.wired_client_tx")
assert hass.states.get("sensor.wired_client_uptime")
assert hass.states.get("sensor.wireless_client_rx")
assert hass.states.get("sensor.wireless_client_tx")
assert hass.states.get("sensor.wireless_client_uptime")
# Remove wired client
mock_unifi_websocket(
data={
"meta": {"message": MESSAGE_CLIENT_REMOVED},
"data": [wired_client],
}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 5
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3
assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1
assert hass.states.get("sensor.wired_client_rx") is None
assert hass.states.get("sensor.wired_client_tx") is None
assert hass.states.get("sensor.wired_client_uptime") is None
assert hass.states.get("sensor.wireless_client_rx")
assert hass.states.get("sensor.wireless_client_tx")
assert hass.states.get("sensor.wireless_client_uptime")
|
rohitranjan1991/home-assistant
|
tests/components/unifi/test_sensor.py
|
Python
|
mit
| 10,717
|
"""circuits.web websockets"""
from .client import WebSocketClient
from .dispatcher import WebSocketsDispatcher
# flake8: noqa
# pylama: skip=1
|
treemo/circuits
|
circuits/web/websockets/__init__.py
|
Python
|
mit
| 145
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Add node.raid_config and node.target_raid_config
Revision ID: 789acc877671
Revises: 2fb93ffd2af1
Create Date: 2015-06-26 01:21:46.062311
"""
# revision identifiers, used by Alembic.
revision = '789acc877671'
down_revision = '2fb93ffd2af1'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('nodes', sa.Column('raid_config', sa.Text(),
nullable=True))
op.add_column('nodes', sa.Column('target_raid_config', sa.Text(),
nullable=True))
def downgrade():
op.drop_column('nodes', 'raid_config')
op.drop_column('nodes', 'target_raid_config')
|
redhat-openstack/ironic
|
ironic/db/sqlalchemy/alembic/versions/789acc877671_add_raid_config.py
|
Python
|
apache-2.0
| 1,196
|
from itertools import izip
import requests
import grequests
import urls
import json_parsers as parse
def simple_request(id):
return requests.get(urls.make_url(id))
def category_search(search_category):
data = {}
params = {
'action' : 'query',
'list' : 'categorymembers',
'cmtype' : 'subcat',
'prop' : 'info',
'format' : 'json',
'cmlimit' : '500',
}
search_category = str(search_category)
if search_category.isdigit():
params['cmpageid'] = search_category
else:
params['cmtitle'] = search_category
r = requests.get('https://en.wikipedia.org/w/wikipedia_api.php', params=params)
try:
data = r.json()
except:
# r.json() sometimes throws errors
pass
return data
def async_response(id):
response = {}
url_list = [
urls.make_url(id, is_html=True),
urls.make_url(id, is_expanded=True),
urls.make_url(id),
]
rs = (grequests.get(u) for u in url_list)
response_list = grequests.map(rs)
response['wiki_html'] = response_list[0]
response['wiki_expanded_html'] = response_list[1]
response['wiki_text'] = response_list[2]
return response
def async_related(add_call_list):
url_list = []
for related_id in add_call_list:
url_list.append(urls.make_url(related_id[1]))
url_list.append(urls.make_url(related_id[1], is_html=True))
rs = (grequests.get(u) for u in url_list)
responses = grequests.map(rs)
return responses
def related_data(add_call_list):
related = {}
add_texts = {}
add_htmls = {}
count = 0
for r_text, r_html in pairwise(async_related(add_call_list)):
try:
add_texts[add_call_list[count][0]] = parse.wiki_text(r_text)
add_htmls[add_call_list[count][0]] = parse.wiki_html(r_html)
except:
# r.json() is risky so putting in try
pass
count += 1
related['add_texts'] = add_texts
related['add_htmls'] = add_htmls
return related
def pairwise(iterable):
"s -> (s0,s1), (s2,s3), (s4, s5), ..."
a = iter(iterable)
return izip(a, a)
|
ojones/wikipedia_parser
|
wikipedia_parser/wikipedia_api/http_calls.py
|
Python
|
mit
| 2,215
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helper functions common to native, java and host-driven test runners."""
import logging
import sys
import time
class CustomFormatter(logging.Formatter):
"""Custom log formatter."""
# override
def __init__(self, fmt='%(threadName)-4s %(message)s'):
# Can't use super() because in older Python versions logging.Formatter does
# not inherit from object.
logging.Formatter.__init__(self, fmt=fmt)
self._creation_time = time.time()
# override
def format(self, record):
# Can't use super() because in older Python versions logging.Formatter does
# not inherit from object.
msg = logging.Formatter.format(self, record)
if 'MainThread' in msg[:19]:
msg = msg.replace('MainThread', 'Main', 1)
timediff = time.time() - self._creation_time
return '%s %8.3fs %s' % (record.levelname[0], timediff, msg)
def SetLogLevel(verbose_count):
"""Sets log level as |verbose_count|."""
log_level = logging.WARNING # Default.
if verbose_count == 1:
log_level = logging.INFO
elif verbose_count >= 2:
log_level = logging.DEBUG
logger = logging.getLogger()
logger.setLevel(log_level)
custom_handler = logging.StreamHandler(sys.stdout)
custom_handler.setFormatter(CustomFormatter())
logging.getLogger().addHandler(custom_handler)
|
mrtnrdl/.macdots
|
scripts/bin/platform-tools/systrace/catapult/devil/devil/utils/run_tests_helper.py
|
Python
|
unlicense
| 1,463
|
from more.chameleon import ChameleonApp
class App(ChameleonApp):
pass
@App.path(path="persons/{name}")
class Person:
def __init__(self, name):
self.name = name
@App.template_directory()
def get_template_dir():
return "templates"
@App.html(model=Person, template="person.pt")
def person_default(self, request):
return {"name": self.name}
class SubApp(App):
pass
@SubApp.template_directory()
def get_override_template_dir():
return "templates_override"
|
morepath/more.chameleon
|
more/chameleon/tests/fixtures/override_template.py
|
Python
|
bsd-3-clause
| 495
|
import unittest
from autosklearn.pipeline.components.classification.qda import QDA
from autosklearn.pipeline.util import _test_classifier, _test_classifier_predict_proba
import numpy as np
import sklearn.metrics
import sklearn.qda
class QDAComponentTest(unittest.TestCase):
def test_default_configuration_iris(self):
for i in range(10):
predictions, targets = \
_test_classifier(QDA)
self.assertAlmostEqual(1.0,
sklearn.metrics.accuracy_score(predictions,
targets))
#@unittest.skip("QDA fails on this one")
def test_default_configuration_digits(self):
for i in range(10):
predictions, targets = \
_test_classifier(classifier=QDA, dataset='digits')
self.assertAlmostEqual(0.18882817243472982,
sklearn.metrics.accuracy_score(predictions,
targets))
def test_default_configuration_binary(self):
for i in range(10):
predictions, targets = \
_test_classifier(QDA, make_binary=True)
self.assertAlmostEqual(1.0,
sklearn.metrics.accuracy_score(predictions,
targets))
def test_produce_zero_scaling(self):
from autosklearn.pipeline.classification import SimpleClassificationPipeline
from autosklearn.pipeline import util as putil
p = SimpleClassificationPipeline(configuration={
'balancing:strategy': 'weighting',
'classifier:__choice__': 'qda',
'classifier:qda:reg_param': 2.992955287687101,
'imputation:strategy': 'most_frequent',
'one_hot_encoding:use_minimum_fraction': 'False',
'preprocessor:__choice__': 'gem',
'preprocessor:gem:N': 18,
'preprocessor:gem:precond': 0.12360249797270745,
'rescaling:__choice__': 'none'})
X_train, Y_train, X_test, Y_test = putil.get_dataset('iris')
self.assertRaisesRegexp(ValueError, 'Numerical problems in '
'QDA. QDA.scalings_ contains '
'values <= 0.0',
p.fit, X_train, Y_train)
# p.fit(X_train, Y_train)
# print(p.pipeline_.steps[-1][1].estimator.scalings_)
# print(p.predict_proba(X_test))
def test_default_configuration_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier(QDA, make_multilabel=True)
self.assertAlmostEqual(0.99456140350877187,
sklearn.metrics.average_precision_score(
predictions, targets))
def test_default_configuration_predict_proba_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier_predict_proba(QDA,
make_multilabel=True)
self.assertEqual(predictions.shape, ((50, 3)))
self.assertAlmostEqual(1.0,
sklearn.metrics.average_precision_score(
targets, predictions))
def test_target_algorithm_multioutput_multiclass_support(self):
cls = sklearn.qda.QDA()
X = np.random.random((10, 10))
y = np.random.randint(0, 1, size=(10, 10))
self.assertRaisesRegexp(ValueError, 'bad input shape \(10, 10\)',
cls.fit, X, y)
|
hmendozap/auto-sklearn
|
test/test_pipeline/components/classification/test_qda.py
|
Python
|
bsd-3-clause
| 3,764
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from datetime import datetime
from sqlalchemy import (Column, ForeignKey, Integer, Unicode, Boolean,
DateTime, TEXT)
from sqlalchemy.sql import and_
from sqlalchemy.orm import relationship
from turbogears.database import session
from bkr.server import identity
from bkr.server.bexceptions import BX
from .base import DeclarativeMappedObject
class ConfigItem(DeclarativeMappedObject):
__tablename__ = 'config_item'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, primary_key=True)
name = Column(Unicode(255), unique=True)
description = Column(Unicode(255))
numeric = Column(Boolean, default=False)
readonly = Column(Boolean, default=False)
@classmethod
def lazy_create(cls, name, description, numeric):
return super(ConfigItem, cls).lazy_create(name=name,
_extra_attrs=dict(description=description, numeric=numeric))
@classmethod
def by_name(cls, name):
return cls.query.filter_by(name=name).one()
@classmethod
def list_by_name(cls, name, find_anywhere=False):
if find_anywhere:
q = cls.query.filter(ConfigItem.name.like('%%%s%%' % name))
else:
q = cls.query.filter(ConfigItem.name.like('%s%%' % name))
return q
def _value_class(self):
if self.numeric:
return ConfigValueInt
else:
return ConfigValueString
value_class = property(_value_class)
def values(self):
return self.value_class.query.filter(self.value_class.config_item_id == self.id)
def current_value(self, default=None):
v = self.values().\
filter(and_(self.value_class.valid_from <= datetime.utcnow(), self.value_class.config_item_id == self.id)).\
order_by(self.value_class.valid_from.desc()).first()
if v:
return v.value
else:
return default
def next_value(self):
return self.values().filter(self.value_class.valid_from > datetime.utcnow()).\
order_by(self.value_class.valid_from.asc()).first()
def set(self, value, valid_from=None, user=None):
if user is None:
try:
user = identity.current.user
except AttributeError:
raise BX(_('Settings may not be changed anonymously'))
if valid_from:
if valid_from < datetime.utcnow():
raise BX(_('%s is in the past') % valid_from)
self.value_class(self, value, user, valid_from)
class ConfigValueString(DeclarativeMappedObject):
__tablename__ = 'config_value_string'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, autoincrement=True, primary_key=True)
config_item_id = Column(Integer, ForeignKey('config_item.id',
onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
config_item = relationship(ConfigItem)
modified = Column(DateTime, default=datetime.utcnow)
user_id = Column(Integer, ForeignKey('tg_user.user_id'), nullable=False)
user = relationship('User', back_populates='config_values_string')
valid_from = Column(DateTime, default=datetime.utcnow)
value = Column(TEXT, nullable=True)
def __init__(self, config_item, value, user, valid_from=None):
super(ConfigValueString, self).__init__()
self.config_item = config_item
self.value = value
self.user = user
if valid_from:
self.valid_from = valid_from
def __json__(self):
return {
'id': self.id,
'value': self.value,
'modified': self.modified,
'valid_from': self.valid_from,
}
class ConfigValueInt(DeclarativeMappedObject):
__tablename__ = 'config_value_int'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, autoincrement=True, primary_key=True)
config_item_id = Column(Integer, ForeignKey('config_item.id',
onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
config_item = relationship(ConfigItem)
modified = Column(DateTime, default=datetime.utcnow)
user_id = Column(Integer, ForeignKey('tg_user.user_id'), nullable=False)
user = relationship('User', back_populates='config_values_int')
valid_from = Column(DateTime, default=datetime.utcnow)
value = Column(Integer, nullable=True)
def __init__(self, config_item, value, user, valid_from=None):
super(ConfigValueInt, self).__init__()
self.config_item = config_item
self.value = value
self.user = user
if valid_from:
self.valid_from = valid_from
def __json__(self):
return {
'id': self.id,
'value': self.value,
'modified': self.modified,
'valid_from': self.valid_from,
}
|
jtoppins/beaker
|
Server/bkr/server/model/config.py
|
Python
|
gpl-2.0
| 5,072
|
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
import tensorflow_federated as tff
from utils.datasets import infinite_emnist
def _compute_dataset_length(dataset):
return dataset.reduce(0, lambda x, _: x + 1)
class InfiniteEmnistTest(tf.test.TestCase):
def test_element_type_structure_preserved(self):
raw_client_data = tff.simulation.datasets.emnist.get_synthetic()
inf_client_data = infinite_emnist.get_infinite(raw_client_data, 5)
self.assertEqual(raw_client_data.element_type_structure,
inf_client_data.element_type_structure)
def test_pseudo_client_count(self):
raw_client_data = tff.simulation.datasets.emnist.get_synthetic()
self.assertLen(raw_client_data.client_ids, 1)
inf_client_data = infinite_emnist.get_infinite(raw_client_data, 10)
self.assertLen(inf_client_data.client_ids, 10)
def test_first_pseudo_client_preserves_original(self):
raw_client_data = tff.simulation.datasets.emnist.get_synthetic()
inf_client_data = infinite_emnist.get_infinite(raw_client_data, 5)
raw_dataset = raw_client_data.dataset_computation(
raw_client_data.client_ids[0])
inf_dataset = inf_client_data.dataset_computation(
inf_client_data.client_ids[0])
length1 = _compute_dataset_length(raw_dataset)
length2 = _compute_dataset_length(inf_dataset)
self.assertEqual(length1, length2)
for raw_batch, inf_batch in zip(raw_dataset, inf_dataset):
self.assertAllClose(raw_batch, inf_batch)
def test_transform_modifies_data(self):
data = infinite_emnist.get_infinite(
tff.simulation.datasets.emnist.get_synthetic(), 3)
datasets = [data.dataset_computation(id) for id in data.client_ids]
lengths = [_compute_dataset_length(datasets[i]) for i in [0, 1, 2]]
self.assertEqual(lengths[0], lengths[1])
self.assertEqual(lengths[1], lengths[2])
for batch0, batch1, batch2 in zip(datasets[0], datasets[1], datasets[2]):
self.assertNotAllClose(batch0, batch1)
self.assertNotAllClose(batch1, batch2)
def test_dataset_computation_equals_create_tf_dataset(self):
synth_data = tff.simulation.datasets.emnist.get_synthetic()
data = infinite_emnist.get_infinite(synth_data, 3)
for client_id in data.client_ids:
comp_dataset = data.dataset_computation(client_id)
create_tf_dataset = data.create_tf_dataset_for_client(client_id)
for batch1, batch2 in zip(comp_dataset, create_tf_dataset):
# For some reason it appears tf.quantization.quantize_and_dequantize
# sometimes (very rarely-- on one pixel for this test) gives results
# that differ by a single bit between the serialized and the
# non-serialized versions. Hence we use atol just larger than 1 bit.
self.assertAllClose(batch1['pixels'], batch2['pixels'], atol=1.5 / 255)
if __name__ == '__main__':
tf.test.main()
|
google-research/public-data-in-dpfl
|
utils/datasets/infinite_emnist_test.py
|
Python
|
apache-2.0
| 3,434
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
from spack.main import SpackCommand
versions = SpackCommand('versions')
def test_safe_only_versions():
"""Only test the safe versions of a package.
(Using the deprecated command line argument)
"""
versions('--safe-only', 'zlib')
def test_safe_versions():
"""Only test the safe versions of a package."""
versions('--safe', 'zlib')
@pytest.mark.network
def test_remote_versions():
"""Test a package for which remote versions should be available."""
versions('zlib')
@pytest.mark.network
def test_remote_versions_only():
"""Test a package for which remote versions should be available."""
versions('--remote', 'zlib')
@pytest.mark.network
@pytest.mark.usefixtures('mock_packages')
def test_new_versions_only():
"""Test a package for which new versions should be available."""
versions('--new', 'brillig')
@pytest.mark.network
def test_no_versions():
"""Test a package for which no remote versions are available."""
versions('converge')
@pytest.mark.network
def test_no_unchecksummed_versions():
"""Test a package for which no unchecksummed versions are available."""
versions('bzip2')
@pytest.mark.network
def test_versions_no_url():
"""Test a package with versions but without a ``url`` attribute."""
versions('graphviz')
@pytest.mark.network
def test_no_versions_no_url():
"""Test a package without versions or a ``url`` attribute."""
versions('opengl')
|
iulian787/spack
|
lib/spack/spack/test/cmd/versions.py
|
Python
|
lgpl-2.1
| 1,677
|
#!/usr/bin/python
import sys
import os
import re
import copy
import operator
import math
import kpcommon as kpc
import mdb_common_lib as mdbcl
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
import scipy.sparse
import numpy
from nltk.tokenize import TreebankWordTokenizer as Tokenizer
from nltk.tag.perceptron import PerceptronTagger
if __name__ == "__main__":
try:
debug = True if sys.argv[-1] == "debug" else False
debug_tests = 3
file_count = 0
try:
dir_corpus = sys.argv[1]
dir_test = sys.argv[2]
dir_output = sys.argv[3]
min_similarity = float(sys.argv[4])
except:
print >> sys.stderr, "E) Directories: ", sys.exc_info()
tokenizer = Tokenizer()
tagger = PerceptronTagger()
process_class = "Process"
task_class = "Task"
material_class = "Material"
none_class = "None"
train_projection_classes = {}
train_projection_classes[process_class] = []
train_projection_classes[task_class] = []
train_projection_classes[material_class] = []
train_projection_classes[none_class] = []
for (dirname, _, filenames) in os.walk(dir_corpus):
for f in filenames:
ext = f[-4:]
current_filename = f[:-4]
if ext == '.ann':
file_count += 1 #debug
if debug and file_count > debug_tests: #debug
break #debug
print >> sys.stderr, file_count, f[:-4]
try:
train_projection = kpc.get_document_content_ann(dirname, current_filename + ".clsse")
#train_projection_extension = kpc.get_document_content_ann(dirname, current_filename + ".ann")
for projection in train_projection:
projection_type = projection[1].split(" ")[0]
tokenized_projection = kpc.my_features_as_string(projection[2], projection[3], tokenizer)
train_projection_classes[projection_type].append(tokenized_projection)
except:
print >> sys.stderr, "E) Open files: ", sys.exc_info()
else:
continue
corpus = []
Dnames = []
for tpc in train_projection_classes:
Dnames.append(tpc)
corpus.append(" ".join(train_projection_classes[tpc]))
file_count = 0
for (dirname, _, filenames) in os.walk(dir_test):
for f in filenames:
ext = f[-4:]
current_filename = f
if ext == '.ann':
file_count += 1 #debug
if debug and file_count > debug_tests: #debug
break #debug
print >> sys.stderr, "Test: ", file_count, f[:-4], min_similarity
ann_content = kpc.get_document_content_ann(dirname, current_filename)
#print ann_content
keyphrase_extractions = []
ann_tested = {}
for annotation in ann_content:
if annotation[2] in ann_tested:
similarity = ann_tested[annotation[2]]
else:
tmp_corpus = copy.copy(corpus)
tmp_corpus.append( kpc.my_features_as_string(annotation[2],
kpc.pos_tagger(annotation[2], tagger, tokenizer),
tokenizer))
#print >> sys.stderr, "Corpus", len(tmp_corpus)
vectorizer = TfidfVectorizer(min_df=0, analyzer=kpc.my_features)
tfidf = vectorizer.fit_transform(tmp_corpus)
tfidf_test = tfidf[-1:]
#print >> sys.stderr, "TF-IDF", tfidf.shape
similarity = dict(zip(Dnames, cosine_similarity(tfidf_test, tfidf[:-1]).flatten()))
#print >> sys.stderr, "Cosine similarity"
ann_tested[annotation[2]] = similarity
kp_type = max(similarity.items(), key=operator.itemgetter(1))
if kp_type[0] != none_class and kp_type[1] > min_similarity:
#print >> sys.stderr, annotation[2], similarity
keyphrase_extractions.append([annotation[0:1] + [" ".join([kp_type[0]] + annotation[1].split(" ")[1:])] + annotation[2:] ,
similarity[none_class]])
keyphrase_extractions = sorted(keyphrase_extractions, key = operator.itemgetter(1))
file_output = os.path.join(dir_output, current_filename)
stream_output = open(file_output, "w")
kp_count = 0
for kpe in keyphrase_extractions:
kp_count += 1
kpe_string = "\t".join(["T" + str(kp_count)] + kpe[0][1:])
print >> stream_output, kpe_string.encode("utf-8")
stream_output.close()
else:
continue
except:
print >> sys.stderr
print >> sys.stderr, "usage: python", sys.argv[0], "<corpus_dir_path> <output_dir_path>"
print >> sys.stderr, "example:"
print >> sys.stderr, " python", sys.argv[0], "some/path/to/corpus/ some/path/to/output/"
print >> sys.stderr, "Error: ", sys.exc_info()
|
snovd/test-scripts
|
kpext/kp_svm_with_projection_four_classes_more_features.py
|
Python
|
mit
| 5,810
|
from Numberjack import *
# Job Shop Scheduling
# Given a set of N job of various sizes, the job shop scheduling problem is to
# schedule these on M machies such that the overall makespan is minimized. The
# makespan is the total length of the schedule.
###############################################
####### Class JSP: problem instance #######
###############################################
class JSP:
def __init__(self, data_file):
stream = open(data_file)
n, m = stream.readline().split()[:2]
self.nJobs = int(n)
self.nMachines = int(m)
stream.readline()
## matrix of job's durations
self.job = []
## indices of the jobs organized by machines
self.machine = [[] for i in range(self.nMachines)]
self.m = [[None]*self.nMachines for i in range(self.nJobs)]
for i in range(self.nJobs):
self.job.append([int(elt) for elt in (stream.readline()[:-1]).split()])
stream.readline()
for i in range(self.nJobs):
machines = (stream.readline()[:-1]).split()
for j in range(len(machines)):
self.machine[int(machines[j]) - 1].append((i, j))
self.m[i][j] = (int(machines[j]) - 1)
def __str__(self):
return '\n'+str(self.job)+'\n\n'+str(self.machine)+'\n\n'+str(self.m)+'\n'
def lower_bound(self):
longest_job = max([sum(job) for job in self.job])
longest_machine = max([sum([self.job[i][j] for i, j in mac]) for mac in self.machine])
return max([longest_job, longest_machine])
def upper_bound(self):
M_job = [0]*self.nJobs
M_machine = [0]*self.nMachines
for i in range(self.nMachines):
for j in range(self.nJobs):
start_time = max(M_job[j], M_machine[self.m[j][i]])
M_job[j] = start_time+self.job[j][i]
M_machine[self.m[j][i]] = start_time+self.job[j][i]
return max(max(M_job), max(M_machine))
def get_model(jsp):
###############################################
############## Model ##############
###############################################
lb = jsp.lower_bound()
ub = jsp.upper_bound()
C_max = Variable(lb, ub, 'C_max')
Jobs = Matrix([[Task(ub, p) for p in job] for job in jsp.job])
model = Model(
[UnaryResource([Jobs[m] for m in machine]) for machine in jsp.machine],
[[job[i] < job[i+1] for i in range(jsp.nMachines-1)] for job in Jobs],
[job[-1] < C_max for job in Jobs],
Minimise(C_max)
)
return C_max, Jobs, model
def solve(param):
###############################################
############## Solving ##############
###############################################
jsp = JSP(param['data'])
C_max, Jobs, model = get_model(jsp)
solver = model.load(param['solver'])
solver.setVerbosity(param['verbose'])
solver.setTimeLimit(param['tcutoff'])
if sys.argv[-1] == 'scheduling':
solver.setHeuristic('Scheduling', 'Promise', 2)
solver.solveAndRestart(GEOMETRIC, 256, 1.3)
else:
solver.solve()
schedule = [[-1]*C_max.get_value() for job in jsp.job]
index = 0
for machine in jsp.machine:
index += 1
for m in machine:
for i in range(Jobs[m].duration):
start = Jobs[m].get_value()
schedule[m[0]][start+i] = index
###############################################
############# Output (Matplotlib) #############
###############################################
if param['print'] == 'yes':
print('\n display schedule')
width = 60
print_schedule = []
for row in schedule:
print_schedule.extend([row]*width)
import pylab
pylab.yticks(pylab.arange(int(width / 2), width * (len(jsp.job) + 1), width), ['job' + str(i + 1) for i in range(len(jsp.job))])
cmap = pylab.cm.get_cmap('jet', len(jsp.machine) + 1)
cmap.set_under(color='w')
pylab.imshow(print_schedule, cmap=cmap, interpolation='nearest', vmin=0)
#pylab.colorbar()
pylab.show()
out = ''
if solver.is_sat():
out = str(schedule)
out += ('\nNodes: ' + str(solver.getNodes()))
return out
default = {'solver': 'Mistral', 'data': 'data/tiny_jsp.txt', 'print': 'no', 'verbose': 1, 'tcutoff': 3}
if __name__ == '__main__':
param = input(default)
print(solve(param))
|
eomahony/Numberjack
|
examples/JobshopSimple.py
|
Python
|
lgpl-2.1
| 4,522
|
# -*- coding: utf-8 -*-
# Empty - see:
#
# http://groups.google.com/group/google-appengine/msg/88bfebe39815fbf5
# Copyright 2011 Felix E. Klee <felix.klee@inka.de>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
|
feklee/realitybuilder
|
deployed/demo/__init__.py
|
Python
|
apache-2.0
| 710
|
# Copyright 2013 CentRin Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables # noqa
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.vgpu \
import utils as instance_utils
from openstack_dashboard.dashboards.project.vgpu.workflows \
import create_instance
class SetFlavorChoiceAction(workflows.Action):
old_flavor_id = forms.CharField(required=False, widget=forms.HiddenInput())
old_flavor_name = forms.CharField(
label=_("Old Flavor"),
widget=forms.TextInput(attrs={'readonly': 'readonly'}),
required=False,
)
flavor = forms.ThemableChoiceField(
label=_("New Flavor"),
help_text=_("Choose the flavor to launch."))
class Meta(object):
name = _("Flavor Choice")
slug = 'flavor_choice'
help_text_template = ("admin/vgpu/"
"_flavors_and_quotas.html")
def populate_flavor_choices(self, request, context):
old_flavor_id = context.get('old_flavor_id')
flavors = context.get('flavors').values()
# Remove current flavor from the list of flavor choices
flavors = [flavor for flavor in flavors if flavor.id != old_flavor_id]
if len(flavors) > 1:
flavors = instance_utils.sort_flavor_list(request, flavors)
if flavors:
flavors.insert(0, ("", _("Select a New Flavor")))
else:
flavors.insert(0, ("", _("No flavors available")))
return flavors
def get_help_text(self, extra_context=None):
extra = {} if extra_context is None else dict(extra_context)
try:
extra['usages'] = api.nova.tenant_absolute_limits(self.request,
reserved=True)
extra['usages_json'] = json.dumps(extra['usages'])
flavors = json.dumps([f._info for f in
instance_utils.flavor_list(self.request)])
extra['flavors'] = flavors
extra['resize_instance'] = True
except Exception:
exceptions.handle(self.request,
_("Unable to retrieve quota information."))
return super(SetFlavorChoiceAction, self).get_help_text(extra)
class SetFlavorChoice(workflows.Step):
action_class = SetFlavorChoiceAction
depends_on = ("instance_id",)
contributes = ("old_flavor_id", "old_flavor_name", "flavors", "flavor")
class ResizeInstance(workflows.Workflow):
slug = "resize_instance"
name = _("Resize VGPUInstance")
finalize_button_name = _("Resize")
success_message = _('Request for resizing of instance "%s" '
'has been submitted.')
failure_message = _('Unable to resize instance "%s".')
success_url = "horizon:admin:vgpu:index"
default_steps = (SetFlavorChoice, create_instance.SetAdvanced)
def format_status_message(self, message):
return message % self.context.get('name', 'unknown instance')
@sensitive_variables('context')
def handle(self, request, context):
instance_id = context.get('instance_id', None)
flavor = context.get('flavor', None)
disk_config = context.get('disk_config', None)
try:
api.nova.server_resize(request, instance_id, flavor, disk_config)
return True
except Exception:
exceptions.handle(request)
return False
|
xuweiliang/Codelibrary
|
openstack_dashboard/dashboards/admin/vgpu/workflows/resize_instance.py
|
Python
|
apache-2.0
| 4,197
|
"""
id custom locale file.
"""
translations = {
"units": {"few_second": "beberapa detik"},
"ago": "{} yang lalu",
"from_now": "dalam {}",
"after": "{0} kemudian",
"before": "{0} yang lalu",
"date_formats": {
"LTS": "HH:mm:ss",
"LT": "HH:mm",
"LLLL": "dddd [d.] D. MMMM YYYY HH:mm",
"LLL": "D. MMMM YYYY HH:mm",
"LL": "D. MMMM YYYY",
"L": "DD/MM/YYYY",
},
}
|
sdispater/pendulum
|
pendulum/locales/id/custom.py
|
Python
|
mit
| 434
|
import math
class Solution:
def mirrorReflection(self, p: int, q: int) -> int:
g = math.gcd(p, q)
a = p // g
if a & 1:
return 1 if q//g&1 else 0
else:
return 2
|
jiadaizhao/LeetCode
|
0801-0900/0858-Mirror Reflection/0858-Mirror Reflection.py
|
Python
|
mit
| 220
|
#------------------------------------------------------------------------------
# Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
#------------------------------------------------------------------------------
"""Module for testing Simple Oracle Document Access (SODA) Database"""
import TestEnv
import cx_Oracle
import json
class TestCase(TestEnv.BaseTestCase):
def __dropExistingCollections(self, sodaDatabase):
for name in sodaDatabase.getCollectionNames():
sodaDatabase.openCollection(name).drop()
def __verifyDocument(self, doc, rawContent, strContent=None, content=None,
key=None, mediaType='application/json'):
self.assertEqual(doc.getContentAsBytes(), rawContent)
if strContent is not None:
self.assertEqual(doc.getContentAsString(), strContent)
if content is not None:
self.assertEqual(doc.getContent(), content)
self.assertEqual(doc.key, key)
self.assertEqual(doc.mediaType, mediaType)
def testCreateDocumentWithJson(self):
"test creating documents with JSON data"
sodaDatabase = self.connection.getSodaDatabase()
val = {"testKey1" : "testValue1", "testKey2" : "testValue2" }
strVal = json.dumps(val)
bytesVal = strVal.encode("UTF-8")
key = "MyKey"
mediaType = "text/plain"
doc = sodaDatabase.createDocument(val)
self.__verifyDocument(doc, bytesVal, strVal, val)
doc = sodaDatabase.createDocument(strVal, key)
self.__verifyDocument(doc, bytesVal, strVal, val, key)
doc = sodaDatabase.createDocument(bytesVal, key, mediaType)
self.__verifyDocument(doc, bytesVal, strVal, val, key, mediaType)
def testCreateDocumentWithRaw(self):
"test creating documents with raw data"
sodaDatabase = self.connection.getSodaDatabase()
val = b"<html/>"
key = "MyRawKey"
mediaType = "text/html"
doc = sodaDatabase.createDocument(val)
self.__verifyDocument(doc, val)
doc = sodaDatabase.createDocument(val, key)
self.__verifyDocument(doc, val, key=key)
doc = sodaDatabase.createDocument(val, key, mediaType)
self.__verifyDocument(doc, val, key=key, mediaType=mediaType)
def testGetCollectionNames(self):
"test getting collection names from the database"
sodaDatabase = self.connection.getSodaDatabase()
self.__dropExistingCollections(sodaDatabase)
self.assertEqual(sodaDatabase.getCollectionNames(), [])
names = ["zCol", "dCol", "sCol", "aCol", "gCol"]
sortedNames = list(sorted(names))
for name in names:
sodaDatabase.createCollection(name)
self.assertEqual(sodaDatabase.getCollectionNames(), sortedNames)
self.assertEqual(sodaDatabase.getCollectionNames(limit=2),
sortedNames[:2])
self.assertEqual(sodaDatabase.getCollectionNames("a"), sortedNames)
self.assertEqual(sodaDatabase.getCollectionNames("C"), sortedNames)
self.assertEqual(sodaDatabase.getCollectionNames("b", limit=3),
sortedNames[1:4])
self.assertEqual(sodaDatabase.getCollectionNames("z"),
sortedNames[-1:])
def testOpenCollection(self):
"test opening a collection"
sodaDatabase = self.connection.getSodaDatabase()
self.__dropExistingCollections(sodaDatabase)
coll = sodaDatabase.openCollection("CollectionThatDoesNotExist")
self.assertEqual(coll, None)
createdColl = sodaDatabase.createCollection("cxoTestOpenCollection")
coll = sodaDatabase.openCollection(createdColl.name)
self.assertEqual(coll.name, createdColl.name)
coll.drop()
def testRepr(self):
"test SodaDatabase representation"
con1 = self.connection
con2 = TestEnv.GetConnection()
sodaDatabase1 = con1.getSodaDatabase()
sodaDatabase2 = con1.getSodaDatabase()
sodaDatabase3 = con2.getSodaDatabase()
self.assertEqual(str(sodaDatabase1), str(sodaDatabase2))
self.assertEqual(str(sodaDatabase2), str(sodaDatabase3))
def testNegative(self):
"test negative cases for SODA database methods"
sodaDatabase = self.connection.getSodaDatabase()
self.assertRaises(TypeError, sodaDatabase.createCollection)
self.assertRaises(TypeError, sodaDatabase.createCollection, 1)
self.assertRaises(cx_Oracle.DatabaseError,
sodaDatabase.createCollection, None)
self.assertRaises(TypeError, sodaDatabase.getCollectionNames, 1)
if __name__ == "__main__":
TestEnv.RunTestCases()
|
cmsdaq/hltd
|
lib/cx_Oracle-7.1/test/SodaDatabase.py
|
Python
|
lgpl-3.0
| 4,698
|
# -*- coding: utf-8 -*-
"""
:mod:`gygax.irc` --- Internet Relay Chat client.
================================================
:mod:`gygax.irc` implements all functionality needed to communicate with an IRC
server. It does so using :mod:`asynchat` from the Python Standard Library,
which handles all asynchronous network complexities and leaves :mod:`gygax.irc`
only with the task of handling the IRC protocol.
:mod:`gygax.irc` defines the abstract class :class:`Client`, which can be
subclassed and provided an implementation for the :func:`Client.handle` method
to build IRC bots or custom clients.
"""
import asynchat
import asyncore
import logging
import time
log = logging.getLogger(__name__)
class Client(asynchat.async_chat):
"""An abstract class which implements a minimal, but functional subset of
the IRC client protocol.
:param str nick: The client's nickname to use. Also used as the username.
:param str real: The client's realname to use.
Handles most IRC messages itself, but on private messages (messages sent to
the client directly or to a channel the client is on) calls the
:meth:`handle` abstract method. This method can be overridden by subclasses
to create IRC bots or custom clients.
"""
@property
def nick(self):
"""The client's nickname. Also used as the username."""
return self._nick
@property
def real(self):
"""The client's realname."""
return self._real
@property
def channels(self):
"""A :func:`set` containing the channels the client is connected to."""
return self._channels
def __init__(self, nick, real):
"""Creates a new IRC client and initializes its attributes."""
super().__init__()
self.set_terminator(b"\r\n")
self._incoming = []
self._nick = nick
self._real = real
self._channels = set()
self._password = None
self._autosend = list()
def run(self, address, channels=None, password=None, autosend=None):
"""Connect to an IRC server and start the client's main loop.
:param tuple address: A tuple ``(host, port)`` with the address of the
IRC server to connect to.
:param iter channels: The list of channels to join on startup.
:param str password: The optional connection password to use.
:param iter autosend: The list of messages to send after successful
registration with the server, but before joining any channels.
"""
self._channels = channels or set()
self._password = password
self._autosend = autosend or list()
log.info("connecting to {}:{}...".format(*address))
self.create_socket()
self.connect(address)
asyncore.loop()
def handle_connect(self):
log.info("connected")
log.info("registering as {}...".format(self.nick))
if self._password:
self._command("PASS", self._password)
self._command("NICK", self.nick)
self._command("USER", self.nick, "0", "*", self.real)
def _command(self, command, *args):
"""Send a command to the IRC server."""
message_parts = [command]
if args:
args = list(args)
if " " in args[-1]:
args[-1] = ":" + args[-1]
message_parts += args
self._push(" ".join(message_parts))
def _push(self, message):
message = message.encode("utf-8")
if len(message) > 510:
newlen = 510
while message[newlen] & 0xc0 == 0x80: # UTF-8 continuation byte
newlen -= 1
log.warning("truncating message from {} to {} bytes".format(
len(message), newlen))
message = message[:newlen]
log.debug("pushing {}".format(message.decode("utf-8")))
self.push(message + b"\r\n")
def message(self, recipient, text):
"""Send a private message to the IRC network.
:param str recipient: The recipient of the message. Can be a user or a
channel name.
:param str text: The text of the private message to send.
"""
self._command("PRIVMSG", recipient, text)
def collect_incoming_data(self, data):
self._incoming.append(data)
def found_terminator(self):
message = b"".join(self._incoming).decode("utf-8")
self._incoming = []
log.debug("received {}".format(message))
prefix, command, params = _parse_message(message)
def _ignore(*args):
log.debug("ignoring unhandled command {}".format(command))
getattr(self, "_on_" + command, _ignore)(prefix, params)
def join(self, *channels):
"""Join IRC channels.
:param str \*channels: Positional :func:`str` arguments containing
the channels to join.
"""
for channel in channels:
log.info("joining channel {}...".format(channel))
self._command("JOIN", channel)
def quit(self, message="Quit"):
"""Terminate the session with the IRC network.
:param str message: The quit message to send to the IRC network.
"""
self._command("QUIT", message)
def handle(self, sender, recipient, text):
"""Handles a private message received from the IRC network.
An abstract method called when the client received a private message
from the IRC network. This can be overridden to respond or take some
action when a private message is received.
Note that messages sent to a channel are also considered private
messages.
Default implementation raises a
:class:`exceptions.NotImplementedError`.
:param str sender: The sender of the private message.
:param str recipient: The recipient of the private message. Either only
the IRC client or a channel the IRC client is on.
:param str text: The text contained in the private message.
"""
raise NotImplementedError("must be implemented in subclass")
def tick(self):
"""Called on every PING message from the server.
Default implementation does nothing. Can be overridden to perform
periodic tasks.
"""
pass
def handle_close(self):
log.info("connection closed")
self.close()
# The following functions are invoked when the corresponding command is
# received from the IRC server.
def _on_004(self, prefix, params):
# The server sends Replies 001 to 004 upon successful registration.
log.info("registered")
for message in self._autosend:
self._push(message)
time.sleep(1) # give the server time to handle the message
self.join(*self.channels)
self._channels = set() # Will be filled by _on_JOIN with channels
# successfully joined.
def _on_JOIN(self, prefix, params):
nick, _, _ = split_name(prefix)
if nick == self.nick:
log.info("joined channel {}".format(params[0]))
self.channels.add(params[0])
def _on_PING(self, prefix, params):
self._command("PONG", ":" + params[0])
self.tick()
def _on_INVITE(self, prefix, params):
nick, channel = params
log.info("invited to join channel {} by {}".format(channel, prefix))
self.join(channel)
def _on_KICK(self, prefix, params):
channel, nick, message = params
if nick == self.nick:
log.info("kicked from channel {} by {}".format(channel, prefix))
self.channels.remove(channel)
def _on_PRIVMSG(self, prefix, params):
self.handle(prefix, params[0], "".join(params[1:]).lstrip(":"))
def _parse_message(message):
"""Parses the message into a ``(prefix, command, params)`` tuple."""
# From http://tools.ietf.org/html/rfc2812#section-2.3.1:
# message = [ ":" prefix SPACE ] command [ params ]
# params = *14( SPACE middle ) [ SPACE ":" trailing ]
# =/ 14( SPACE middle ) [ SPACE [ ":" ] trailing ]
prefix = None
if message.startswith(":"):
prefix, message = _pop_message(message)
prefix = prefix[1:]
command, message = _pop_message(message)
params = list()
while message and not message.startswith(":") and len(params) < 14:
middle, message = _pop_message(message)
params.append(middle)
if message: # trailing
if message.startswith(":"):
message = message[1:]
params.append(message)
params = tuple(params) # make params read-only
return prefix, command, params or None
def _pop_message(message):
"""Pops the top-most space-separated component from the message."""
if " " in message:
component, message = message.split(" ", 1)
return component, message.lstrip(" ")
return message, ""
def split_name(name):
nick, rest = name.split("!", 1)
user, host = rest.split("@", 1)
return nick, user, host
|
thsnr/gygax
|
gygax/irc.py
|
Python
|
mit
| 9,091
|
# These are the instance-dependent settings. Copy this file to
# secrets.py and apply the desired settings.
#
# Only one variable is required here, SECRET_KEY. Fill this using:
# http://www.miniwebtool.com/django-secret-key-generator/
SECRET_KEY = ''
# In your development setup, you can leave the following variables
# unset:
#STATIC_ROOT =
#MEDIA_ROOT =
#DEBUG =
#DATABASES =
#EMAIL_BACKEND =
#EMAIL_USE_TLS =
#EMAIL_HOST =
#EMAIL_PORT =
#EMAIL_HOST_USER =
#EMAIL_HOST_PASSWORD =
#SESSION_COOKIE_DOMAIN =
#CSRF_COOKIE_DOMAIN =
#SECURE_HSTS_SECONDS =
|
SAlkhairy/trabd
|
trabd/secrets.template.py
|
Python
|
agpl-3.0
| 557
|
# Copyright 2006-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from portage import os
from portage.tests import TestCase
from portage.env.config import PortageModulesFile
from tempfile import mkstemp
class PortageModulesFileTestCase(TestCase):
keys = ['foo.bar', 'baz', 'bob', 'extra_key']
invalid_keys = ['', ""]
modules = ['spanky', 'zmedico', 'antarus', 'ricer', '5', '6']
def setUp(self):
self.items = {}
for k, v in zip(self.keys + self.invalid_keys, self.modules):
self.items[k] = v
def testPortageModulesFile(self):
self.BuildFile()
f = PortageModulesFile(self.fname)
f.load()
for k in self.keys:
self.assertEqual(f[k], self.items[k])
for ik in self.invalid_keys:
self.assertEqual(False, ik in f)
self.NukeFile()
def BuildFile(self):
fd, self.fname = mkstemp()
f = os.fdopen(fd, 'w')
for k, v in self.items.items():
f.write('%s=%s\n' % (k, v))
f.close()
def NukeFile(self):
os.unlink(self.fname)
|
dol-sen/portage
|
pym/portage/tests/env/config/test_PortageModulesFile.py
|
Python
|
gpl-2.0
| 999
|
from timeit import default_timer as timer
import socket
import sys
class IRCMessage(object):
consoleText = {
'INVITE': lambda msg: "# * %s invited %s to %s" % (msg.user, msg.target, msg.channel),
'JOIN': lambda msg: "%s * %s has joined" % (msg.channel.ljust(15), msg.user),
'MSG ACTION': lambda msg: "%s * %s %s" % (msg.channel.ljust(15), msg.user, msg.content),
'MSG': lambda msg: "%s [%s] %s" % (msg.channel.ljust(15), msg.user, msg.content),
'NICK': lambda msg: "# * %s is now known as %s" % (msg.user, msg.content),
'NOTICE': lambda msg: '# %s %s' % (msg.subject.rjust(13), msg.content),
'NUMERIC': lambda msg: "# %03d * %s" % (msg.numeric, msg.content),
'PART': lambda msg: "%s * %s has left" % (msg.channel.ljust(15), msg.user),
'KICK': lambda msg: "%s * %s has been kicked" % (msg.channel.ljust(15), msg.user), # see what Hexchat displays for this!
'TOPIC': lambda msg: "%s * TOPIC: %s" % (msg.channel.ljust(15), msg.content),
'PING': lambda msg: None,
'PM ACTION': lambda msg: "[%s > %s] * %s %s" % (msg.user, irc.nick, msg.user, msg.content),
'PM': lambda msg: "[%s > %s] %s" % (msg.user, irc.nick, msg.content) if msg.user != 'DM' else None,
'QUIT': lambda msg: "# * %s has quit" % (msg.user) + (" (%s)" % (msg.content) if msg.content else ""),
'UNKNOWN': lambda msg: "Error parsing message: " + msg.raw
}
def __init__(self, rawMsg):
self.type = 'UNKNOWN'
self.channel = None
self.user = None
self.subject = None
self.numeric = None
self.raw = rawMsg
def getContent(ignoredParts, rawMsg=rawMsg):
return self.raw.split(' ', ignoredParts)[-1].lstrip(':')
try:
splitLine = self.raw.split()
self.type = splitLine[0]
if self.type == 'PING':
self.target = splitLine[1]
self.content = self.target
irc.pong(self.target)
return
if self.type == 'NOTICE':
self.subject = splitLine[1]
self.content = getContent(2)
return
if self.type == ':' + irc.server:
self.type = 'NUMERIC'
self.numeric = int(splitLine[1])
self.subject = irc_numerics[self.numeric]
self.user = splitLine[2]
self.content = getContent(3)
return
self.type = splitLine[1]
self.user = splitLine[0].partition( '!' )[0][1:]
if self.type == 'PRIVMSG':
if splitLine[2][0] == '#': # message on a channel
self.type = 'MSG'
self.channel = splitLine[2]
else:
self.type = 'PM'
self.channel = self.user
if splitLine[3] == ':\x01ACTION' and splitLine[-1][-1] == '\x01':
self.type += ' ACTION'
self.content = self.raw.split(' ', 4)[-1][:-1]
elif splitLine[3] == ':\x01PING' and splitLine[-1][-1] == '\x01' and self.type == 'PM':
self.type = 'PING'
self.target = self.user
self.content = self.raw.split(' ', 4)[-1][:-1]
irc.msg(self.target, '\x01PONG %s\x01' % (self.content))
else:
self.content = getContent(3)
return
if self.type == 'NICK':
self.content = getContent(2)
if self.user == irc.nick:
irc.nick = splitLine[2]
return
if self.type == 'QUIT':
self.content = getContent(2)
return
self.channel = splitLine[2].lstrip(':')
if self.type == 'PART':
self.content = getContent(3)
return
if self.type == 'JOIN':
return
if self.type == 'TOPIC':
self.content = getContent(3)
return
if self.type == 'INVITE':
self.target = splitLine[2]
self.channel = splitLine[3]
return
if self.type == 'KICK':
self.content = getContent(4)
return
raise IndexError
except IndexError:
self.type = 'UNKNOWN'
self.channel = None
self.user = None
self.subject = None
self.numeric = None
self.content = self.raw
def printToConsole(self):
try:
text = IRCMessage.consoleText[self.type](self)
if text:
print text
except:
print "ERROR: Unable to parse message:"
print ' '.join([
"type: " + self.type, "channel: " + self.channel if self.channel else 'no channel',
"user: " + self.user if self.user else 'no user',
"subject: " + self.subject if self.subject else 'no subject',
"numeric: " + self.numeric if self.numeric else 'no numeric',
"content: '%s'" % (self.content) if self.content else 'no content',
"raw: '%s'" % (self.raw)])
sys.exit()
class IRCConnection(object):
def __init__(self):
self.s = socket.socket(socket.AF_INET)
self.queuedMsg = ''
def send(self, message):
self.s.send(message + '\r\n')
def getMsg(self):
# self.queuedMsg += self.s.recv(2040)
self.queuedMsg += self.s.recv(65536)
# print self.queuedMsg, '\r\n' in self.queuedMsg
if not self.queuedMsg:
return None
queuedMsg = self.queuedMsg.rpartition('\r\n')
self.queuedMsg = queuedMsg[2]
return [IRCMessage(msg) for msg in filter(None, queuedMsg[0].split('\r\n'))]
def waitForMsg(self):
receivedMsgs = None
while not receivedMsgs:
receivedMsgs = self.getMsg()
return receivedMsgs
def grabMsg(self, returnCondition, excludeCondition=None, abortCondition=lambda msg: False, waitForAbortCondition=False):
if not excludeCondition:
excludeCondition = returnCondition
discardedMsgs = ''
output = []
looping = True
while looping:
for msg in self.waitForMsg():
# print "raw: " + msg.raw, "type: " + msg.type, "return:", returnCondition(msg), "exclude:", excludeCondition(msg), "abort:", abortCondition(msg)
# print "raw: " + msg.raw, "subject: " + msg.subject, "something:", msg.content.partition(' ')[0]
if returnCondition(msg):
output.append(msg)
if not waitForAbortCondition:
looping = False
if not excludeCondition(msg):
discardedMsgs += msg.raw + '\r\n'
if abortCondition(msg):
looping = False
self.queuedMsg = discardedMsgs + self.queuedMsg
return output
def connect(self, server, username, realname):
self.server = server
self.channels = []
self.s.connect((server, 6667))
self.send('USER %s - - :%s' % (username, realname))
self.nick = ''
self.changeNick(username)
def changeNick(self, nickname):
self.send('NICK ' + nickname)
self.lastNickTried = nickname
def join(self, channel):
self.channels.append(channel)
self.send('JOIN ' + channel)
def part(self, channel):
self.channels.remove(channel)
self.send('PART' + channel)
def msg(self, target, message):
self.send('PRIVMSG %s :%s' % (target, message))
if target[0] == '#':
print "%s [%s] %s" % (target.ljust(15), irc.nick, message)
else:
print "[%s > %s] %s" % (irc.nick, target, message)
def action(self, target, message):
self.msg(target, '\x01%s %s\x01' % ('ACTION', message))
if target[0] == '#':
print "%s * %s %s" % (target.ljust(15), irc.nick, message)
else:
print "[%s > %s] * %s %s" % (irc.nick, target, irc.nick, message)
def ping(self, target=None):
if target:
self.send('PING ' + target)
else:
self.send('PING ' + self.server)
start = timer()
self.waitForMsg()
return timer() - start
def pong(self, message):
self.send('PONG ' + message)
def quit(self, reason=None):
if reason:
self.send('QUIT :' + reason)
else:
self.send('QUIT')
return
def __del__(self):
# import socket # WTF???
try:
self.quit()
self.s.close()
except socket.error:
pass
irc = IRCConnection()
# Taken from https://www.alien.net.au/irc/
# todo: reformat list from original and remove "obselete" entries
irc_numerics = {
1: 'RPL_WELCOME',
2: 'RPL_YOURHOST',
3: 'RPL_CREATED',
4: 'RPL_MYINFO',
# 5: 'RPL_BOUNCE',
5: 'RPL_ISUPPORT',
6: 'RPL_MAP',
7: 'RPL_MAPEND',
8: 'RPL_SNOMASK',
9: 'RPL_STATMEMTOT',
10: 'RPL_BOUNCE',
# 10: 'RPL_STATMEM',
14: 'RPL_YOURCOOKIE',
15: 'RPL_MAP',
16: 'RPL_MAPMORE',
17: 'RPL_MAPEND',
42: 'RPL_YOURID',
43: 'RPL_SAVENICK',
50: 'RPL_ATTEMPTINGJUNC',
51: 'RPL_ATTEMPTINGREROUTE',
200: 'RPL_TRACELINK',
201: 'RPL_TRACECONNECTING',
202: 'RPL_TRACEHANDSHAKE',
203: 'RPL_TRACEUNKNOWN',
204: 'RPL_TRACEOPERATOR',
205: 'RPL_TRACEUSER',
206: 'RPL_TRACESERVER',
207: 'RPL_TRACESERVICE',
208: 'RPL_TRACENEWTYPE',
209: 'RPL_TRACECLASS',
210: 'RPL_TRACERECONNECT',
# 210: 'RPL_STATS',
211: 'RPL_STATSLINKINFO',
212: 'RPL_STATSCOMMANDS',
213: 'RPL_STATSCLINE',
214: 'RPL_STATSNLINE',
215: 'RPL_STATSILINE',
216: 'RPL_STATSKLINE',
217: 'RPL_STATSQLINE',
# 217: 'RPL_STATSPLINE',
218: 'RPL_STATSYLINE',
219: 'RPL_ENDOFSTATS',
220: 'RPL_STATSPLINE',
# 220: 'RPL_STATSBLINE',
221: 'RPL_UMODEIS',
222: 'RPL_MODLIST',
# 222: 'RPL_SQLINE_NICK',
# 222: 'RPL_STATSBLINE',
223: 'RPL_STATSELINE',
# 223: 'RPL_STATSGLINE',
224: 'RPL_STATSFLINE',
# 224: 'RPL_STATSTLINE',
225: 'RPL_STATSDLINE',
# 225: 'RPL_STATSZLINE',
# 225: 'RPL_STATSELINE',
226: 'RPL_STATSCOUNT',
# 226: 'RPL_STATSNLINE',
227: 'RPL_STATSGLINE',
# 227: 'RPL_STATSVLINE',
228: 'RPL_STATSQLINE',
231: 'RPL_SERVICEINFO',
232: 'RPL_ENDOFSERVICES',
# 232: 'RPL_RULES',
233: 'RPL_SERVICE',
234: 'RPL_SERVLIST',
235: 'RPL_SERVLISTEND',
236: 'RPL_STATSVERBOSE',
237: 'RPL_STATSENGINE',
238: 'RPL_STATSFLINE',
239: 'RPL_STATSIAUTH',
240: 'RPL_STATSVLINE',
# 240: 'RPL_STATSXLINE',
241: 'RPL_STATSLLINE',
242: 'RPL_STATSUPTIME',
243: 'RPL_STATSOLINE',
244: 'RPL_STATSHLINE',
245: 'RPL_STATSSLINE',
246: 'RPL_STATSPING',
# 246: 'RPL_STATSTLINE',
# 246: 'RPL_STATSULINE',
247: 'RPL_STATSBLINE',
# 247: 'RPL_STATSXLINE',
# 247: 'RPL_STATSGLINE',
248: 'RPL_STATSULINE',
# 248: 'RPL_STATSDEFINE',
249: 'RPL_STATSULINE',
# 249: 'RPL_STATSDEBUG',
250: 'RPL_STATSDLINE',
# 250: 'RPL_STATSCONN',
251: 'RPL_LUSERCLIENT',
252: 'RPL_LUSEROP',
253: 'RPL_LUSERUNKNOWN',
254: 'RPL_LUSERCHANNELS',
255: 'RPL_LUSERME',
256: 'RPL_ADMINME',
257: 'RPL_ADMINLOC1',
258: 'RPL_ADMINLOC2',
259: 'RPL_ADMINEMAIL',
261: 'RPL_TRACELOG',
262: 'RPL_TRACEPING',
# 262: 'RPL_TRACEEND',
263: 'RPL_TRYAGAIN',
265: 'RPL_LOCALUSERS',
266: 'RPL_GLOBALUSERS',
267: 'RPL_START_NETSTAT',
268: 'RPL_NETSTAT',
269: 'RPL_END_NETSTAT',
270: 'RPL_PRIVS',
271: 'RPL_SILELIST',
272: 'RPL_ENDOFSILELIST',
273: 'RPL_NOTIFY',
274: 'RPL_ENDNOTIFY',
# 274: 'RPL_STATSDELTA',
275: 'RPL_STATSDLINE',
276: 'RPL_VCHANEXIST',
277: 'RPL_VCHANLIST',
278: 'RPL_VCHANHELP',
280: 'RPL_GLIST',
281: 'RPL_ENDOFGLIST',
# 281: 'RPL_ACCEPTLIST',
282: 'RPL_ENDOFACCEPT',
# 282: 'RPL_JUPELIST',
283: 'RPL_ALIST',
# 283: 'RPL_ENDOFJUPELIST',
284: 'RPL_ENDOFALIST',
# 284: 'RPL_FEATURE',
285: 'RPL_GLIST_HASH',
# 285: 'RPL_CHANINFO_HANDLE',
# 285: 'RPL_NEWHOSTIS',
286: 'RPL_CHANINFO_USERS',
# 286: 'RPL_CHKHEAD',
287: 'RPL_CHANINFO_CHOPS',
# 287: 'RPL_CHANUSER',
288: 'RPL_CHANINFO_VOICES',
# 288: 'RPL_PATCHHEAD',
289: 'RPL_CHANINFO_AWAY',
# 289: 'RPL_PATCHCON',
290: 'RPL_CHANINFO_OPERS',
# 290: 'RPL_HELPHDR',
# 290: 'RPL_DATASTR',
291: 'RPL_CHANINFO_BANNED',
# 291: 'RPL_HELPOP',
# 291: 'RPL_ENDOFCHECK',
292: 'RPL_CHANINFO_BANS',
# 292: 'RPL_HELPTLR',
293: 'RPL_CHANINFO_INVITE',
# 293: 'RPL_HELPHLP',
294: 'RPL_CHANINFO_INVITES',
# 294: 'RPL_HELPFWD',
295: 'RPL_CHANINFO_KICK',
# 295: 'RPL_HELPIGN',
296: 'RPL_CHANINFO_KICKS',
299: 'RPL_END_CHANINFO',
300: 'RPL_NONE',
301: 'RPL_AWAY',
# 301: 'RPL_AWAY',
302: 'RPL_USERHOST',
303: 'RPL_ISON',
304: 'RPL_TEXT',
305: 'RPL_UNAWAY',
306: 'RPL_NOWAWAY',
307: 'RPL_USERIP',
# 307: 'RPL_WHOISREGNICK',
# 307: 'RPL_SUSERHOST',
308: 'RPL_NOTIFYACTION',
# 308: 'RPL_WHOISADMIN',
# 308: 'RPL_RULESSTART',
309: 'RPL_NICKTRACE',
# 309: 'RPL_WHOISSADMIN',
# 309: 'RPL_ENDOFRULES',
# 309: 'RPL_WHOISHELPER',
310: 'RPL_WHOISSVCMSG',
# 310: 'RPL_WHOISHELPOP',
# 310: 'RPL_WHOISSERVICE',
311: 'RPL_WHOISUSER',
312: 'RPL_WHOISSERVER',
313: 'RPL_WHOISOPERATOR',
314: 'RPL_WHOWASUSER',
315: 'RPL_ENDOFWHO',
316: 'RPL_WHOISCHANOP',
317: 'RPL_WHOISIDLE',
318: 'RPL_ENDOFWHOIS',
319: 'RPL_WHOISCHANNELS',
320: 'RPL_WHOISVIRT',
# 320: 'RPL_WHOIS_HIDDEN',
# 320: 'RPL_WHOISSPECIAL',
321: 'RPL_LISTSTART',
322: 'RPL_LIST',
323: 'RPL_LISTEND',
324: 'RPL_CHANNELMODEIS',
325: 'RPL_UNIQOPIS',
# 325: 'RPL_CHANNELPASSIS',
326: 'RPL_NOCHANPASS',
327: 'RPL_CHPASSUNKNOWN',
328: 'RPL_CHANNEL_URL',
329: 'RPL_CREATIONTIME',
330: 'RPL_WHOWAS_TIME',
# 330: 'RPL_WHOISACCOUNT',
331: 'RPL_NOTOPIC',
332: 'RPL_TOPIC',
333: 'RPL_TOPICWHOTIME',
334: 'RPL_LISTUSAGE',
# 334: 'RPL_COMMANDSYNTAX',
# 334: 'RPL_LISTSYNTAX',
335: 'RPL_WHOISBOT',
# 338: 'RPL_CHANPASSOK',
338: 'RPL_WHOISACTUALLY',
339: 'RPL_BADCHANPASS',
340: 'RPL_USERIP',
341: 'RPL_INVITING',
342: 'RPL_SUMMONING',
345: 'RPL_INVITED',
346: 'RPL_INVITELIST',
347: 'RPL_ENDOFINVITELIST',
348: 'RPL_EXCEPTLIST',
349: 'RPL_ENDOFEXCEPTLIST',
351: 'RPL_VERSION',
352: 'RPL_WHOREPLY',
353: 'RPL_NAMREPLY',
354: 'RPL_WHOSPCRPL',
355: 'RPL_NAMREPLY_',
357: 'RPL_MAP',
358: 'RPL_MAPMORE',
359: 'RPL_MAPEND',
361: 'RPL_KILLDONE',
362: 'RPL_CLOSING',
363: 'RPL_CLOSEEND',
364: 'RPL_LINKS',
365: 'RPL_ENDOFLINKS',
366: 'RPL_ENDOFNAMES',
367: 'RPL_BANLIST',
368: 'RPL_ENDOFBANLIST',
369: 'RPL_ENDOFWHOWAS',
371: 'RPL_INFO',
372: 'RPL_MOTD',
373: 'RPL_INFOSTART',
374: 'RPL_ENDOFINFO',
375: 'RPL_MOTDSTART',
376: 'RPL_ENDOFMOTD',
377: 'RPL_KICKEXPIRED',
# 377: 'RPL_SPAM',
378: 'RPL_BANEXPIRED',
# 378: 'RPL_WHOISHOST',
# 378: 'RPL_MOTD',
379: 'RPL_KICKLINKED',
# 379: 'RPL_WHOISMODES',
380: 'RPL_BANLINKED',
# 380: 'RPL_YOURHELPER',
381: 'RPL_YOUREOPER',
382: 'RPL_REHASHING',
383: 'RPL_YOURESERVICE',
384: 'RPL_MYPORTIS',
385: 'RPL_NOTOPERANYMORE',
386: 'RPL_QLIST',
# 386: 'RPL_IRCOPS',
387: 'RPL_ENDOFQLIST',
# 387: 'RPL_ENDOFIRCOPS',
388: 'RPL_ALIST',
389: 'RPL_ENDOFALIST',
391: 'RPL_TIME',
# 391: 'RPL_TIME',
# 391: 'RPL_TIME',
# 391: 'RPL_TIME',
392: 'RPL_USERSSTART',
393: 'RPL_USERS',
394: 'RPL_ENDOFUSERS',
395: 'RPL_NOUSERS',
396: 'RPL_HOSTHIDDEN',
400: 'ERR_UNKNOWNERROR',
401: 'ERR_NOSUCHNICK',
402: 'ERR_NOSUCHSERVER',
403: 'ERR_NOSUCHCHANNEL',
404: 'ERR_CANNOTSENDTOCHAN',
405: 'ERR_TOOMANYCHANNELS',
406: 'ERR_WASNOSUCHNICK',
407: 'ERR_TOOMANYTARGETS',
408: 'ERR_NOSUCHSERVICE',
# 408: 'ERR_NOCOLORSONCHAN',
409: 'ERR_NOORIGIN',
411: 'ERR_NORECIPIENT',
412: 'ERR_NOTEXTTOSEND',
413: 'ERR_NOTOPLEVEL',
414: 'ERR_WILDTOPLEVEL',
415: 'ERR_BADMASK',
416: 'ERR_TOOMANYMATCHES',
# 416: 'ERR_QUERYTOOLONG',
419: 'ERR_LENGTHTRUNCATED',
421: 'ERR_UNKNOWNCOMMAND',
422: 'ERR_NOMOTD',
423: 'ERR_NOADMININFO',
424: 'ERR_FILEERROR',
425: 'ERR_NOOPERMOTD',
429: 'ERR_TOOMANYAWAY',
430: 'ERR_EVENTNICKCHANGE',
431: 'ERR_NONICKNAMEGIVEN',
432: 'ERR_ERRONEUSNICKNAME',
433: 'ERR_NICKNAMEINUSE',
434: 'ERR_SERVICENAMEINUSE',
# 434: 'ERR_NORULES',
435: 'ERR_SERVICECONFUSED',
# 435: 'ERR_BANONCHAN',
436: 'ERR_NICKCOLLISION',
437: 'ERR_UNAVAILRESOURCE',
# 437: 'ERR_BANNICKCHANGE',
438: 'ERR_NICKTOOFAST',
# 438: 'ERR_DEAD',
439: 'ERR_TARGETTOOFAST',
440: 'ERR_SERVICESDOWN',
441: 'ERR_USERNOTINCHANNEL',
442: 'ERR_NOTONCHANNEL',
443: 'ERR_USERONCHANNEL',
444: 'ERR_NOLOGIN',
445: 'ERR_SUMMONDISABLED',
446: 'ERR_USERSDISABLED',
447: 'ERR_NONICKCHANGE',
449: 'ERR_NOTIMPLEMENTED',
451: 'ERR_NOTREGISTERED',
452: 'ERR_IDCOLLISION',
453: 'ERR_NICKLOST',
455: 'ERR_HOSTILENAME',
456: 'ERR_ACCEPTFULL',
457: 'ERR_ACCEPTEXIST',
458: 'ERR_ACCEPTNOT',
459: 'ERR_NOHIDING',
460: 'ERR_NOTFORHALFOPS',
461: 'ERR_NEEDMOREPARAMS',
462: 'ERR_ALREADYREGISTERED',
463: 'ERR_NOPERMFORHOST',
464: 'ERR_PASSWDMISMATCH',
465: 'ERR_YOUREBANNEDCREEP',
466: 'ERR_YOUWILLBEBANNED',
467: 'ERR_KEYSET',
468: 'ERR_INVALIDUSERNAME',
# 468: 'ERR_ONLYSERVERSCANCHANGE',
469: 'ERR_LINKSET',
470: 'ERR_LINKCHANNEL',
# 470: 'ERR_KICKEDFROMCHAN',
471: 'ERR_CHANNELISFULL',
472: 'ERR_UNKNOWNMODE',
473: 'ERR_INVITEONLYCHAN',
474: 'ERR_BANNEDFROMCHAN',
475: 'ERR_BADCHANNELKEY',
476: 'ERR_BADCHANMASK',
477: 'ERR_NOCHANMODES',
# 477: 'ERR_NEEDREGGEDNICK',
478: 'ERR_BANLISTFULL',
479: 'ERR_BADCHANNAME',
# 479: 'ERR_LINKFAIL',
480: 'ERR_NOULINE',
# 480: 'ERR_CANNOTKNOCK',
481: 'ERR_NOPRIVILEGES',
482: 'ERR_CHANOPRIVSNEEDED',
483: 'ERR_CANTKILLSERVER',
484: 'ERR_RESTRICTED',
# 484: 'ERR_ISCHANSERVICE',
# 484: 'ERR_DESYNC',
# 484: 'ERR_ATTACKDENY',
485: 'ERR_UNIQOPRIVSNEEDED',
# 485: 'ERR_KILLDENY',
# 485: 'ERR_CANTKICKADMIN',
# 485: 'ERR_ISREALSERVICE',
486: 'ERR_NONONREG',
# 486: 'ERR_HTMDISABLED',
# 486: 'ERR_ACCOUNTONLY',
487: 'ERR_CHANTOORECENT',
# 487: 'ERR_MSGSERVICES',
488: 'ERR_TSLESSCHAN',
489: 'ERR_VOICENEEDED',
# 489: 'ERR_SECUREONLYCHAN',
491: 'ERR_NOOPERHOST',
492: 'ERR_NOSERVICEHOST',
493: 'ERR_NOFEATURE',
494: 'ERR_BADFEATURE',
495: 'ERR_BADLOGTYPE',
496: 'ERR_BADLOGSYS',
497: 'ERR_BADLOGVALUE',
498: 'ERR_ISOPERLCHAN',
499: 'ERR_CHANOWNPRIVNEEDED',
501: 'ERR_UMODEUNKNOWNFLAG',
502: 'ERR_USERSDONTMATCH',
503: 'ERR_GHOSTEDCLIENT',
# 503: 'ERR_VWORLDWARN',
504: 'ERR_USERNOTONSERV',
511: 'ERR_SILELISTFULL',
512: 'ERR_TOOMANYWATCH',
513: 'ERR_BADPING',
514: 'ERR_INVALID_ERROR',
# 514: 'ERR_TOOMANYDCC',
515: 'ERR_BADEXPIRE',
516: 'ERR_DONTCHEAT',
517: 'ERR_DISABLED',
518: 'ERR_NOINVITE',
# 518: 'ERR_LONGMASK',
519: 'ERR_ADMONLY',
# 519: 'ERR_TOOMANYUSERS',
520: 'ERR_OPERONLY',
# 520: 'ERR_MASKTOOWIDE',
# 520: 'ERR_WHOTRUNC',
521: 'ERR_LISTSYNTAX',
522: 'ERR_WHOSYNTAX',
523: 'ERR_WHOLIMEXCEED',
524: 'ERR_QUARANTINED',
# 524: 'ERR_OPERSPVERIFY',
525: 'ERR_REMOTEPFX',
526: 'ERR_PFXUNROUTABLE',
550: 'ERR_BADHOSTMASK',
551: 'ERR_HOSTUNAVAIL',
552: 'ERR_USINGSLINE',
553: 'ERR_STATSSLINE',
600: 'RPL_LOGON',
601: 'RPL_LOGOFF',
602: 'RPL_WATCHOFF',
603: 'RPL_WATCHSTAT',
604: 'RPL_NOWON',
605: 'RPL_NOWOFF',
606: 'RPL_WATCHLIST',
607: 'RPL_ENDOFWATCHLIST',
608: 'RPL_WATCHCLEAR',
610: 'RPL_MAPMORE',
# 610: 'RPL_ISOPER',
611: 'RPL_ISLOCOP',
612: 'RPL_ISNOTOPER',
613: 'RPL_ENDOFISOPER',
615: 'RPL_MAPMORE',
# 615: 'RPL_WHOISMODES',
616: 'RPL_WHOISHOST',
617: 'RPL_DCCSTATUS',
# 617: 'RPL_WHOISBOT',
618: 'RPL_DCCLIST',
619: 'RPL_ENDOFDCCLIST',
# 619: 'RPL_WHOWASHOST',
620: 'RPL_DCCINFO',
# 620: 'RPL_RULESSTART',
621: 'RPL_RULES',
622: 'RPL_ENDOFRULES',
623: 'RPL_MAPMORE',
624: 'RPL_OMOTDSTART',
625: 'RPL_OMOTD',
626: 'RPL_ENDOFO<MOTD',
630: 'RPL_SETTINGS',
631: 'RPL_ENDOFSETTINGS',
640: 'RPL_DUMPING',
641: 'RPL_DUMPRPL',
642: 'RPL_EODUMP',
660: 'RPL_TRACEROUTE_HOP',
661: 'RPL_TRACEROUTE_START',
662: 'RPL_MODECHANGEWARN',
663: 'RPL_CHANREDIR',
664: 'RPL_SERVMODEIS',
665: 'RPL_OTHERUMODEIS',
666: 'RPL_ENDOF_GENERIC',
670: 'RPL_WHOWASDETAILS',
671: 'RPL_WHOISSECURE',
672: 'RPL_UNKNOWNMODES',
673: 'RPL_CANNOTSETMODES',
678: 'RPL_LUSERSTAFF',
679: 'RPL_TIMEONSERVERIS',
682: 'RPL_NETWORKS',
687: 'RPL_YOURLANGUAGEIS',
688: 'RPL_LANGUAGE',
689: 'RPL_WHOISSTAFF',
690: 'RPL_WHOISLANGUAGE',
702: 'RPL_MODLIST',
703: 'RPL_ENDOFMODLIST',
704: 'RPL_HELPSTART',
705: 'RPL_HELPTXT',
706: 'RPL_ENDOFHELP',
708: 'RPL_ETRACEFULL',
709: 'RPL_ETRACE',
710: 'RPL_KNOCK',
711: 'RPL_KNOCKDLVR',
712: 'ERR_TOOMANYKNOCK',
713: 'ERR_CHANOPEN',
714: 'ERR_KNOCKONCHAN',
715: 'ERR_KNOCKDISABLED',
716: 'RPL_TARGUMODEG',
717: 'RPL_TARGNOTIFY',
718: 'RPL_UMODEGMSG',
720: 'RPL_OMOTDSTART',
721: 'RPL_OMOTD',
722: 'RPL_ENDOFOMOTD',
723: 'ERR_NOPRIVS',
724: 'RPL_TESTMARK',
725: 'RPL_TESTLINE',
726: 'RPL_NOTESTLINE',
740: 'RPL_CHALLENGE_TEXT',
741: 'RPL_CHALLENGE_END',
771: 'RPL_XINFO',
773: 'RPL_XINFOSTART',
774: 'RPL_XINFOEND',
903: 'RPL_SASL',
904: 'ERR_SASL',
972: 'ERR_CANNOTDOCOMMAND',
973: 'ERR_CANNOTCHANGEUMODE',
974: 'ERR_CANNOTCHANGECHANMODE',
975: 'ERR_CANNOTCHANGESERVERMODE',
976: 'ERR_CANNOTSENDTONICK',
977: 'ERR_UNKNOWNSERVERMODE',
979: 'ERR_SERVERMODELOCK',
980: 'ERR_BADCHARENCODING',
981: 'ERR_TOOMANYLANGUAGES',
982: 'ERR_NOLANGUAGE',
983: 'ERR_TEXTTOOSHORT',
999: 'ERR_NUMERIC_ERR'
}
|
HactarCE/XenoBot
|
irc.py
|
Python
|
cc0-1.0
| 19,641
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y'
TIME_FORMAT = 'g:i A'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
# NUMBER_GROUPING =
|
BitWriters/Zenith_project
|
zango/lib/python3.5/site-packages/django/conf/locale/hi/formats.py
|
Python
|
mit
| 749
|
#!/usr/bin/env python
"""
Copyright (c) 2009 Barry Schwartz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup (name = 'glyph_name',
version = '1.2',
description = 'Glyph name processing based on the Adobe Glyph List',
cmdclass = {'build_ext': build_ext},
ext_modules = [
Extension('glyph_name',
sources = ['glyphname.pyx', 'agl_lookup.c'],
)
],
)
|
chemoelectric/glyph_name
|
setup.py
|
Python
|
mit
| 1,535
|
import twentyc.database
import os
import re
def update_views(couch_engine, config, path):
if couch_engine == "couchbase":
raise Exception("Couchbase is currently not supported by this script.")
t = re.sub("\.ddoc$", "", os.path.basename(path)).split("-")
if len(t) != 2:
return
target = t[0]
design_name = t[1]
if not config.get("db_%s" % target):
print "Skipping %s because '%s' is an unknown target" % (path, target)
return
client = twentyc.database.ClientFromConfig(couch_engine, config, target)
view_f = open(path, "r")
view_d = view_f.read().replace("\n"," ")
view_f.close()
print "Updating design from '%s' to '%s:%s/_design/%s'" % (
path,
couch_engine,
client.database_name,
design_name
)
client.put_design(
design_name, view_d, verbose=True
)
|
20c/twentyc.database
|
twentyc/database/tools.py
|
Python
|
apache-2.0
| 828
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table(u'djangofeeds_category', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
('domain', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
))
db.send_create_signal(u'djangofeeds', ['Category'])
# Adding unique constraint on 'Category', fields ['name', 'domain']
db.create_unique(u'djangofeeds_category', ['name', 'domain'])
# Adding model 'Feed'
db.create_table(u'djangofeeds_feed', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('feed_url', self.gf('django.db.models.fields.URLField')(unique=True, max_length=200)),
('description', self.gf('django.db.models.fields.TextField')()),
('link', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('http_etag', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('http_last_modified', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('date_last_refresh', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('last_error', self.gf('django.db.models.fields.CharField')(default='', max_length=32, blank=True)),
('ratio', self.gf('django.db.models.fields.FloatField')(default=0.0)),
('sort', self.gf('django.db.models.fields.SmallIntegerField')(default=0)),
('date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_changed', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('date_last_requested', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('freq', self.gf('django.db.models.fields.IntegerField')(default=10800)),
))
db.send_create_signal(u'djangofeeds', ['Feed'])
# Adding M2M table for field categories on 'Feed'
m2m_table_name = db.shorten_name(u'djangofeeds_feed_categories')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('feed', models.ForeignKey(orm[u'djangofeeds.feed'], null=False)),
('category', models.ForeignKey(orm[u'djangofeeds.category'], null=False))
))
db.create_unique(m2m_table_name, ['feed_id', 'category_id'])
# Adding model 'Enclosure'
db.create_table(u'djangofeeds_enclosure', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('type', self.gf('django.db.models.fields.CharField')(max_length=200)),
('length', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
))
db.send_create_signal(u'djangofeeds', ['Enclosure'])
# Adding model 'Post'
db.create_table(u'djangofeeds_post', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('feed', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['djangofeeds.Feed'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('link', self.gf('django.db.models.fields.URLField')(max_length=2048)),
('content', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('guid', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('author', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('date_published', self.gf('django.db.models.fields.DateField')()),
('date_updated', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal(u'djangofeeds', ['Post'])
# Adding M2M table for field enclosures on 'Post'
m2m_table_name = db.shorten_name(u'djangofeeds_post_enclosures')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('post', models.ForeignKey(orm[u'djangofeeds.post'], null=False)),
('enclosure', models.ForeignKey(orm[u'djangofeeds.enclosure'], null=False))
))
db.create_unique(m2m_table_name, ['post_id', 'enclosure_id'])
# Adding M2M table for field categories on 'Post'
m2m_table_name = db.shorten_name(u'djangofeeds_post_categories')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('post', models.ForeignKey(orm[u'djangofeeds.post'], null=False)),
('category', models.ForeignKey(orm[u'djangofeeds.category'], null=False))
))
db.create_unique(m2m_table_name, ['post_id', 'category_id'])
def backwards(self, orm):
# Removing unique constraint on 'Category', fields ['name', 'domain']
db.delete_unique(u'djangofeeds_category', ['name', 'domain'])
# Deleting model 'Category'
db.delete_table(u'djangofeeds_category')
# Deleting model 'Feed'
db.delete_table(u'djangofeeds_feed')
# Removing M2M table for field categories on 'Feed'
db.delete_table(db.shorten_name(u'djangofeeds_feed_categories'))
# Deleting model 'Enclosure'
db.delete_table(u'djangofeeds_enclosure')
# Deleting model 'Post'
db.delete_table(u'djangofeeds_post')
# Removing M2M table for field enclosures on 'Post'
db.delete_table(db.shorten_name(u'djangofeeds_post_enclosures'))
# Removing M2M table for field categories on 'Post'
db.delete_table(db.shorten_name(u'djangofeeds_post_categories'))
models = {
u'djangofeeds.category': {
'Meta': {'unique_together': "(('name', 'domain'),)", 'object_name': 'Category'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'djangofeeds.enclosure': {
'Meta': {'object_name': 'Enclosure'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'length': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'djangofeeds.feed': {
'Meta': {'ordering': "('id',)", 'object_name': 'Feed'},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['djangofeeds.Category']", 'symmetrical': 'False'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_refresh': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_last_requested': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'feed_url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'freq': ('django.db.models.fields.IntegerField', [], {'default': '10800'}),
'http_etag': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'http_last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_error': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'ratio': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'sort': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
u'djangofeeds.post': {
'Meta': {'object_name': 'Post'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['djangofeeds.Category']", 'symmetrical': 'False'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_published': ('django.db.models.fields.DateField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'enclosures': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['djangofeeds.Enclosure']", 'symmetrical': 'False', 'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['djangofeeds.Feed']"}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '2048'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['djangofeeds']
|
chrisspen/django-feeds
|
djangofeeds/migrations/0001_initial.py
|
Python
|
bsd-2-clause
| 10,467
|
from django.db import models
from artists.models import Artist
ALBUM_CDN_PATH = 'cdn/images/albums/'
class Album(models.Model):
title = models.CharField(max_length=260, unique=True)
artist = models.ForeignKey(Artist, on_delete=models.CASCADE)
release_year = models.IntegerField(null=True, blank=True)
cover = models.ImageField(upload_to=ALBUM_CDN_PATH)
scrobble_count = models.IntegerField(default=0)
def __str__(self):
return self.title
|
ModalSeoul/Weeb.FM
|
albums/models.py
|
Python
|
mit
| 475
|
"""Python 3 compatibility shims
"""
import sys
if sys.version_info[0] < 3:
PY3 = False
def b(s):
return s
def u(s):
return unicode(s, 'unicode_escape')
import cStringIO as StringIO
StringIO = BytesIO = StringIO.StringIO
text_type = unicode
binary_type = str
string_types = basestring
integer_types = (int, long)
unichr = unichr
reload_module = reload
def fromhex(s):
return s.decode('hex')
else:
PY3 = True
if sys.version_info[3] == 'zippy':
reload_module = reload
elif sys.version_info[:2] >= (3, 4):
from importlib import reload as reload_module
else:
from imp import reload as reload_module
if sys.version_info[3] != 'zippy':
import codecs
def b(s):
return codecs.latin_1_encode(s)[0]
def u(s):
return s
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
text_type = str
binary_type = bytes
string_types = str
integer_types = int
def unichr(s):
return u(chr(s))
def fromhex(s):
return bytes.fromhex(s)
# long_type = integer_types[-1]
|
wdv4758h/ZipPy
|
edu.uci.python.benchmark/src/benchmarks/simplejson/simplejson/compat.py
|
Python
|
bsd-3-clause
| 1,145
|
class AlertException(Exception):
pass
|
redbox-mint/redbox
|
config/src/main/config/home/lib/jython/alertlib/AlertException.py
|
Python
|
gpl-2.0
| 45
|
#########################################################################
#
# Astronomy Club Membership
# file: membership/admin.py
#
# Copyright (C) 2017 Teruo Utsumi, San Jose Astronomical Association
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# Contributors:
# 2017-06-01 Teruo Utsumi, initial code
#
#########################################################################
import pdb
import datetime
from django.contrib import admin
from sched_core.sched_log import sched_log
from sched_core.filters import AdminDateTimeYearFilter
from .models import User
from .views import renew
################
# For Membership
################
# action from admin event page
def renew_memberships(modeladmin, request, queryset):
# renew(modeladmin, request, queryset)
views.renew(queryset)
renew_memberships.short_description = "Renew memberships"
class PostUsers(admin.ModelAdmin):
list_display = ('username', 'first_name', 'last_name', 'modified',
'status', 'notices',
'date_start', 'date_end', 'date_since', 'email', 'notes')
list_filter = ('status', 'date_end', 'volunteer')
search_fields = ['last_name', 'email']
ordering = ('status',)
fields = ('status',
('username', 'is_staff'),
('first_name', 'last_name', 'email'),
('date_start', 'date_end', 'date_since'),
'addr1', 'addr2', ('city', 'state', 'zip_code'),
'phone1', 'phone2',
'notices', 'associate', 'notes', 'volunteer', 'groups', 'coordinator')
actions = [renew]
admin.site.register(User, PostUsers)
|
sjaa/scheduler
|
membership/admin.py
|
Python
|
gpl-3.0
| 2,153
|
from rest_framework.response import Response
from allauth.socialaccount import providers
from allauth.socialaccount.adapter import get_adapter
from allauth_api.account.rest_framework.views import CloseableSignupMixin
from .serializers import ProviderSerializer
from allauth import app_settings
from allauth.utils import get_form_class
from allauth.socialaccount.models import SocialLogin
from allauth.socialaccount.forms import SignupForm
from allauth.socialaccount.helpers import complete_social_signup
from rest_framework.status import HTTP_400_BAD_REQUEST
from allauth_api.settings import allauth_api_settings
APIView = allauth_api_settings.DRF_API_VIEW
class RegisterView(CloseableSignupMixin, APIView):
"""
Register users who use 3rd-party authentication (e.g. Facebook, Google, Twitter, etc.)
"""
permission_classes = allauth_api_settings.DRF_REGISTER_VIEW_PERMISSIONS
form_class = SignupForm
def get_form_class(self):
return get_form_class(app_settings.FORMS, 'signup', self.form_class)
def dispatch(self, request, *args, **kwargs):
self.sociallogin = None
data = request.session.get('socialaccount_sociallogin')
if data:
self.sociallogin = SocialLogin.deserialize(data)
return super(RegisterView, self).dispatch(request, *args, **kwargs)
def post(self, request, format=None):
fc = self.get_form_class()
form = fc(data=request.DATA, files=request.FILES, sociallogin=self.sociallogin)
if form.is_valid():
user = form.save(request)
return complete_social_signup(self.request, user, app_settings.EMAIL_VERIFICATION)
return Response(form.errors, HTTP_400_BAD_REQUEST)
def is_open(self):
return get_adapter().is_open_for_signup(self.request,
self.sociallogin)
register = RegisterView.as_view()
class ProviderListView(APIView):
"""
List the social account providers available
"""
permission_classes = allauth_api_settings.DRF_PROVIDERS_VIEW_PERMISSIONS
def get(self, request, format=None):
p = providers.registry.get_list()
serializer = ProviderSerializer(p, many=True)
return Response(serializer.data)
list_providers = ProviderListView.as_view()
|
jannon/django-allauth-api
|
src/allauth_api/socialaccount/rest_framework/views.py
|
Python
|
bsd-2-clause
| 2,310
|
def get_num(a, b):
res = 0
for i in range(len(a)):
if a[i] == '1' or b[i] == '1':
res += 1
return res
s = [(int)(x) for x in (raw_input()).split(' ')]
n = s[0]
m = s[1]
arr = []
for i in range(n):
arr.append(raw_input())
max_num = 0
max_index = 0
for j in range(n):
for k in range(j + 1, n):
num = get_num(arr[j], arr[k])
if max_num < num:
max_num = num
max_index = 1
elif max_num == num:
max_index += 1
print max_num
print max_index
|
xbfool/hackerrank_xbfool
|
src/algorithms/warmup/acm_icpc_team.py
|
Python
|
mit
| 460
|
import abc
__author__ = 'cenk'
class ModelError(object):
@abc.abstractmethod
def calculate(self, Ws, Xs, Bs, h, f, y, s, *args, **kwargs):
pass
|
cenkbircanoglu/cnn-example
|
model_error/abstract.py
|
Python
|
mit
| 163
|
from itertools import combinations, permutations
def check_leading_zeros(*numbers):
return any(n[0] == '0' for n in numbers)
def test_equation(puzzle, substitutions):
equation = ''.join(substitutions.get(char) or char for char in puzzle)
left, right = equation.split(' == ')
left_numbers = left.split(' + ')
if check_leading_zeros(right, *left_numbers):
return False
return sum(map(int, left_numbers)) == int(right)
def solve(puzzle):
letters = set(char for char in puzzle if char.isupper())
numbers = map(str, range(10))
for c in combinations(numbers, len(letters)):
for p in permutations(c):
substitutions = dict(zip(letters, p))
if test_equation(puzzle, substitutions):
return {k: int(v) for k, v in substitutions.items()}
return {} # no solution found
|
mweb/python
|
exercises/alphametics/example.py
|
Python
|
mit
| 863
|
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000413.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
biomodels/BIOMD0000000413
|
BIOMD0000000413/model.py
|
Python
|
cc0-1.0
| 427
|
#!/usr/bin/env python2
#
# This file is part of the dune-hdd project:
# https://github.com/pymor/dune-hdd
# Copyright Holders: Felix Schindler
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from __future__ import division, print_function
import numpy as np
from functools import partial
from itertools import izip
from scipy.sparse import coo_matrix, bmat
from pymor.algorithms.basisextension import pod_basis_extension
from pymor.algorithms.greedy import greedy
from pymor.algorithms.timestepping import ImplicitEulerTimeStepper
from pymor.discretizations.basic import InstationaryDiscretization
from pymor.operators.constructions import FixedParameterOperator, LincombOperator
from pymor.operators.numpy import NumpyMatrixOperator
from pymor.playground.algorithms.blockbasisextension import pod_block_basis_extension
from pymor.operators.block import BlockOperator
from pymor.playground.reductors.block import GenericBlockRBReconstructor
from pymor.reductors.basic import reduce_generic_rb
from pymor.vectorarrays.block import BlockVectorArray
from pymor.vectorarrays.numpy import NumpyVectorArray
from morepas3__estimate import ReducedAgainstWeak
class Reconstructor(object):
def __init__(self, disc, RB):
self._disc = disc
self._RB = [rb.copy() for rb in RB]
def reconstruct(self, U):
if U.dim == 0:
return self._disc.globalize_vectors(self._disc.solution_space.zeros(len(U)))
else:
if isinstance(U, NumpyVectorArray):
assert U.dim == np.sum(len(RB) for RB in self._RB)
local_lens = [len(RB) for RB in self._RB]
local_starts = np.insert(np.cumsum(local_lens), 0, 0)[:-1]
localized_U = [NumpyVectorArray(U._array[:, local_starts[ii]:(local_starts[ii] + local_lens[ii])])
for ii in np.arange(len(self._RB))]
U = BlockVectorArray(localized_U)
if not isinstance(U, BlockVectorArray):
raise NotImplementedError
U = GenericBlockRBReconstructor(self._RB).reconstruct(U)
return self._disc.globalize_vectors(U)
def restricted_to_subbasis(self, dim):
if not isinstance(dim, tuple):
dim = len(self.RB)*[dim]
assert all([dd <= len(rb) for dd, rb in izip(dim, self.RB)])
return Reconstructor(self._disc, [rb.copy(ind=range(dd)) for rb, dd in izip(self.RB, dim)])
def reductor(config, detailed_data, discretization, RB, vector_product=None, disable_caching=True, extends=None):
elliptic_disc = detailed_data['elliptic_disc']
elliptic_LRBMS_disc = detailed_data['elliptic_LRBMS_disc']
T = config['end_time']
nt = config['nt']
if RB is None:
RB = [elliptic_LRBMS_disc.local_operator(ss).source.empty() for ss in np.arange(elliptic_LRBMS_disc.num_subdomains)]
rd, rc, reduction_data = reduce_generic_rb(elliptic_LRBMS_disc, RB, vector_product, disable_caching, extends)
rc = Reconstructor(elliptic_LRBMS_disc, RB)
def unblock_op(op, sparse=False):
assert op._blocks[0][0] is not None
if isinstance(op._blocks[0][0], LincombOperator):
coefficients = op._blocks[0][0].coefficients
operators = [None for kk in np.arange(len(op._blocks[0][0].operators))]
for kk in np.arange(len(op._blocks[0][0].operators)):
ops = [[op._blocks[ii][jj].operators[kk]
if op._blocks[ii][jj] is not None else None
for jj in np.arange(op.num_source_blocks)]
for ii in np.arange(op.num_range_blocks)]
operators[kk] = unblock_op(BlockOperator(ops))
return LincombOperator(operators=operators, coefficients=coefficients)
else:
assert all(all([isinstance(block, NumpyMatrixOperator) if block is not None else True
for block in row])
for row in op._blocks)
if op.source.dim == 0 and op.range.dim == 0:
return NumpyMatrixOperator(np.zeros((0, 0)))
elif op.source.dim == 1:
mat = np.concatenate([op._blocks[ii][0]._matrix
for ii in np.arange(op.num_range_blocks)],
axis=1)
elif op.range.dim == 1:
mat = np.concatenate([op._blocks[0][jj]._matrix
for jj in np.arange(op.num_source_blocks)],
axis=1)
else:
mat = bmat([[coo_matrix(op._blocks[ii][jj]._matrix)
if op._blocks[ii][jj] is not None else coo_matrix((op._range_dims[ii], op._source_dims[jj]))
for jj in np.arange(op.num_source_blocks)]
for ii in np.arange(op.num_range_blocks)])
mat = mat.toarray()
return NumpyMatrixOperator(mat)
reduced_op = unblock_op(rd.operator, True)
reduced_rhs = unblock_op(rd.rhs)
estimator = ReducedAgainstWeak(rc, detailed_data['example'], detailed_data['wrapper'],
detailed_data['bochner_norms']['elliptic_penalty'], detailed_data['space_products']['l2'],
T, detailed_data['mu_min'], detailed_data['mu_max'], detailed_data['mu_hat'],
detailed_data['mu_bar'], detailed_data['mu_tilde'])
return (InstationaryDiscretization(T=T,
initial_data=reduced_op.source.zeros(1),
operator=reduced_op,
rhs=unblock_op(rd.rhs),
mass=unblock_op(rd.products['l2']),
time_stepper=ImplicitEulerTimeStepper(nt),
products={kk: unblock_op(rd.products[kk]) for kk in rd.products.keys()},
operators={kk: unblock_op(rd.operators[kk])
for kk in rd.operators.keys() if kk != 'operator'},
functionals={kk: unblock_op(rd.functionals[kk])
for kk in rd.functionals.keys() if kk != 'rhs'},
vector_operators={kk: unblock_op(rd.vector_operators[kk])
for kk in rd.vector_operators.keys()},
parameter_space=rd.parameter_space,
estimator=estimator,
cache_region='disk',
name='reduced discretization ({} DoFs)'.format(reduced_op.source.dim)),
rc,
reduction_data)
def extension(elliptic_LRBMS_disc, extension_product, basis, U):
if not isinstance(U, BlockVectorArray):
U = BlockVectorArray([elliptic_LRBMS_disc.localize_vector(U, ii)
for ii in np.arange(elliptic_LRBMS_disc.num_subdomains)])
return pod_block_basis_extension(basis,
U,
count=1,
product=[elliptic_LRBMS_disc.local_product(ss, extension_product)
for ss in np.arange(elliptic_LRBMS_disc.num_subdomains)])
def reduce_pod_greedy(config, detailed_data, training_samples):
greedy_data = greedy(detailed_data['parabolic_disc'],
partial(reductor, config, detailed_data),
training_samples,
initial_basis=detailed_data['initial_basis'] if 'initial_basis' in detailed_data else None,
use_estimator=True,
error_norm=None,
extension_algorithm=partial(extension, detailed_data['elliptic_LRBMS_disc'], config['extension_product']),
max_extensions=config['max_rb_size'],
atol=config['target_error'])
return greedy_data
|
pymor/dune-hdd
|
examples/linearparabolic/morepas3__reduce.py
|
Python
|
bsd-2-clause
| 8,260
|
from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, \
iPlayableService, eServiceReference, eEPGCache, eActionMap
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
####key debug
# from keyids import KEYIDS
# from datetime import datetime
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
####key debug
#try:
# print 'KEY: %s %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next(),getKeyDescription(key)[0])
#except:
# try:
# print 'KEY: %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next()) # inverse dictionary lookup in KEYIDS
# except:
# print 'KEY: %s' % (key)
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class SecondInfoBar(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = None
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.secondInfoBarScreen = ""
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.secondInfoBarScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def doDimming(self):
if config.usage.show_infobar_do_dimming.value:
self.dimmed = self.dimmed-1
else:
self.dimmed = 0
self.DimmingTimer.stop()
self.doHide()
def unDimming(self):
self.unDimmingTimer.stop()
self.doWriteAlpha(config.av.osd_alpha.value)
def doWriteAlpha(self, value):
if fileExists("/proc/stb/video/alpha"):
f=open("/proc/stb/video/alpha","w")
f.write("%i" % (value))
f.close()
def __onHide(self):
self.unDimmingTimer = eTimer()
self.unDimmingTimer.callback.append(self.unDimming)
self.unDimmingTimer.start(100, True)
self.__state = self.STATE_HIDDEN
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
#if self.__state == self.STATE_SHOWN:
# self.hide()
self.DimmingTimer = eTimer()
self.DimmingTimer.callback.append(self.doDimming)
self.DimmingTimer.start(70, True)
self.dimmed = config.usage.show_infobar_dimming_speed.value
def doHide(self):
if self.__state != self.STATE_HIDDEN:
self.doWriteAlpha((config.av.osd_alpha.value*self.dimmed/config.usage.show_infobar_dimming_speed.value))
if self.dimmed > 0:
self.DimmingTimer.start(70, True)
else:
self.DimmingTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
if hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
elif self.__state == self.STATE_HIDDEN and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.secondInfoBarScreen and config.usage.show_second_infobar.value and not self.secondInfoBarScreen.shown:
self.show()
self.secondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen and self.secondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(3000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.Timer.start(1000, True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.field = self.numberString
self.handleServiceName()
if len(self.numberString) >= 5:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.field = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(3000, True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.history_tv = []
self.servicelist.history_radio = []
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"openServiceList": (self.openServiceList, _("Open service list")),
"openhistorybrowser": (self.openHistoryBrowser, _("open history browser")),
#"opendevicemanager": (self.openDeviceManager, _("open device manager")),
#"openaroraplugins": (self.openAroraPlugins, _("open Arora Browser")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"openBouquetList": (self.openBouquetList, _("open bouquetlist")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
})
def openHistoryBrowser(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/ZapHistoryBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Zap-Historie Browser") or plugin.name == _("Zap-History Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Zap-History Browser plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openDeviceManager(self):
if fileExists("/usr/lib/enigma2/python/Plugins/SystemPlugins/DeviceManager/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Device Manager - Fast Mounted Remove"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Device Manager plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openAroraPlugins(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/WebBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Web Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The WebBrowser is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showPluginBrowser(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def openBouquetList(self):
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
#"showCurrentEvent": (self.openEventView, _("Show Current Info...")),
#"showSingleCurrentEPG": (self.openSingleServiceEPG, _("Show single channel EPG...")),
#"showBouquetEPG": (self.openMultiServiceEPG, _("Show Bouquet EPG...")),
##"showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")),
##"showEventGuidePlugin": (self.showEventGuidePlugins, _("List EPG functions...")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order")
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
ts_disabled = False
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
if self.ts_disabled:
return None
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "veryshort":
filename = service_name + " - " + begin_date
elif config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order")
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Picture in Picture Setup")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentServiceReference()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modeminitv.value)
f.close()
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modepip.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceReference() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
slist = self.servicelist
if slist:
currentServicePath = slist.getCurrentServicePath()
currentBouquet = slist.getRoot()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
if slist:
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if slist and slist.dopipzap:
# This unfortunately won't work with subservices
slist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5", maxSize=False, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection=0
elif selection < 0:
selection=n-1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.session.nav.playService(newservice, False)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
tlist = []
idx = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
idx += 1
if self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.session.nav.playService(service[1], False)
def addSubserviceToBouquetCallback(self, service):
if len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
if begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime) or config.usage.inactivity_timer_blocktime_extra.value and\
(begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)
|
ssh1/stbgui
|
lib/python/Screens/InfoBarGenerics.py
|
Python
|
gpl-2.0
| 119,400
|
# -*- coding: utf-8 -*-
from Public_lib import *
#查看系统版本模块#
class Modulehandle():
def __init__(self,moduleid,hosts,sys_param_row):
self.hosts = ""
self.Runresult = ""
self.moduleid = moduleid
self.sys_param_array= sys_param_row
self.hosts=target_host(hosts,"HN").split(";")
def run(self):
try:
client = salt.client.LocalClient()
self.Runresult = client.cmd(self.hosts,'cmd.run',["/usr/bin/head -n 1 /etc/issue"],expr_form='list')
if len(self.Runresult) == 0:
return "No hosts found,请确认主机已经添加saltstack环境!"
except Exception,e:
return str(e)
return self.Runresult
|
zhengjue/mytornado
|
omserver/OMServer/modules/saltstack/Mid_1003.py
|
Python
|
gpl-3.0
| 741
|
# -*- encoding: UTF-8 -*-
import thread, gtk
import webbrowser
from bottle import route, run, template, static_file, request, redirect
import controller
from lib.simulador import Sistema
# Mantém o dicionário de contexto globalmente
sys = None
contexto = {}
@route("/")
def inicio ():
'''
Carrega a página inicial
'''
global contexto, sys
sys = Sistema()
contexto = {
"registradores":sys.getBanco().getRegistradores(),
"variaveis":None,
'instrucoes': None,
'UC': None,
'ULA' : None,
"codigo":"Digite o código no campo ao lado",
"erro":None,
'simulando': False,
'proxima_instrucao': None,
'decimal': False,
'sys':sys
}
return template("template/index.html", contexto)
@route("/", method="POST")
def simular ():
'''
Executa o método POST para receber o código digitado
'''
global contexto
if request.POST.get("simular",''):
cod = request.POST.get("cod",'').strip()
# Inicializa e configura a Máquina
contexto['sys'] = controller.inicializa(cod, sys)
contexto['variaveis'] = contexto['sys'].getMemoriaDados().getDados()
contexto['codigo'] = cod
contexto['simulando'] = True
contexto['ULA'] = contexto['sys'].getULA()
contexto['proxima_instrucao'] = contexto['sys'].getProximaInstrucao()
return template("template/index.html", contexto)
elif request.POST.get('passo',''):
# Executa uma única instrunção da fila
contexto['sys'] = controller.executa(contexto["sys"])
contexto['UC'] = contexto['sys'].getUC()
contexto['ULA'] = contexto['sys'].getULA()
contexto['proxima_instrucao'] = contexto['sys'].getProximaInstrucao()
return template('template/index.html', contexto)
@route("/<tipo>")
def trocar_notacao(tipo):
global contexto
if tipo == "decimal":
contexto['decimal'] = True
else:
contexto['decimal'] = False
redirect("/")
@route("/reinicia/")
def reinicia ():
redirect("/")
@route('/static/:path#.+#', name='static')
def static(path):
'''
Disponibiliza o diretório template para o navegador
'''
return static_file(path, root='template')
# iniciando o programa
if __name__ == '__main__':
try:
thread.start_new_thread(run, ())
thread.start_new_thread(webbrowser.open_new_tab, ("http://localhost:8080",))
except:
print "Ocorreu um erro ao iniciar a aplicação"
try:
message = gtk.MessageDialog(type=gtk.MESSAGE_ERROR, buttons=gtk.BUTTONS_OK)
message.set_markup("Ocorreu um erro ao iniciar a aplicação")
message.run()
except:
pass
while 1:
pass
|
diogocs1/simuladormips
|
main.py
|
Python
|
gpl-2.0
| 2,902
|
from plenum.common.constants import TARGET_NYM, TXN_TYPE, NYM, ROLE, VERKEY
from plenum.common.signer_did import DidIdentity
from stp_core.types import Identifier
from sovrin_common.auth import Authoriser
from sovrin_common.generates_request import GeneratesRequest
from sovrin_common.constants import GET_NYM, NULL
from sovrin_common.types import Request
class Identity(GeneratesRequest):
def __init__(self,
identifier: Identifier,
trust_anchor: Identifier=None,
verkey=None,
role=None,
last_synced=None,
seq_no=None):
"""
:param identifier:
:param trust_anchor:
:param verkey:
:param role: If role is explicitly passed as `null` then in the request
to ledger, `role` key would be sent as None which would stop the
Identity's ability to do any privileged actions. If role is not passed,
`role` key will not be included in the request to the ledger
:param last_synced:
:param seq_no:
"""
self.identity = DidIdentity(identifier, verkey=verkey)
self.trustAnchor = trust_anchor
# if role and role not in (TRUST_ANCHOR, STEWARD):
if not Authoriser.isValidRole(self.correctRole(role)):
raise AttributeError("Invalid role {}".format(role))
self._role = role
# timestamp for when the ledger was last checked for key replacement or
# revocation
self.last_synced = last_synced
# sequence number of the latest key management transaction for this
# identifier
self.seqNo = seq_no
@property
def identifier(self):
return self.identity.identifier
@property
def verkey(self):
return self.identity.verkey
@verkey.setter
def verkey(self, new_val):
identifier = self.identifier
self.identity = DidIdentity(identifier, verkey=new_val)
@staticmethod
def correctRole(role):
return None if role == NULL else role
@property
def role(self):
return self.correctRole(self._role)
@role.setter
def role(self, role):
if not Authoriser.isValidRole(self.correctRole(role)):
raise AttributeError("Invalid role {}".format(role))
self._role = role
def _op(self):
op = {
TXN_TYPE: NYM,
TARGET_NYM: self.identity.identifier
}
if self.identity.verkey is not None:
op[VERKEY] = self.identity.verkey
if self._role:
op[ROLE] = self.role
return op
def ledgerRequest(self):
if not self.seqNo:
assert self.identity.identifier is not None
return Request(identifier=self.trustAnchor, operation=self._op())
def _opForGet(self):
return {
TARGET_NYM: self.identity.identifier,
TXN_TYPE: GET_NYM,
}
def getRequest(self, requestAuthor: Identifier):
if not self.seqNo:
return Request(identifier=requestAuthor,
operation=self._opForGet())
|
keenondrums/sovrin-node
|
sovrin_common/identity.py
|
Python
|
apache-2.0
| 3,150
|
# GNU Enterprise Forms - GF Object Hierarchy - Box
#
# Copyright 2001-2007 Free Software Foundation
#
# This file is part of GNU Enterprise
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: GFBox.py,v 1.8 2009/07/24 14:59:58 oleg Exp $
"""
Logical box support
"""
from GFContainer import GFContainer
# =============================================================================
# Box widget
# =============================================================================
class GFBox(GFContainer):
label = None
# -------------------------------------------------------------------------
# Constructor
# -------------------------------------------------------------------------
def __init__(self, parent=None, name="GFBox"):
GFContainer.__init__(self, parent, name)
def _event_push_default_button(self):
"""
some child GFObject notifies about user requested to push default button
"""
# search default button in this box and push if found
return self.push_default_button()
def push_default_button(self):
"""
if have some children can push_default_button do it
"""
for i in self._children:
if hasattr(i, 'push_default_button'):
return i.push_default_button()
return False
def hasTitledBorder(self):
"""
returns True if the box has TitledBorder
"""
return self.label is not None and not self.hasLabel()
|
onoga/wm
|
src/gnue/forms/GFObjects/GFBox.py
|
Python
|
gpl-2.0
| 2,024
|
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:19118")
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
|
CHANCOIN/CHANCOIN
|
contrib/wallettools/walletunlock.py
|
Python
|
mit
| 159
|
#Make a list of functions to increment their arguments by 0 to 9.
def make_incrementers():
result = []
for i in range(10):
def incrementer(x):
return x + i
result.append(incrementer)
return result
#This will fail
def test():
incs = make_incrementers()
for x in range(10):
for y in range(10):
assert incs[x](y) == x+y
test()
|
github/codeql
|
python/ql/src/Variables/LoopVariableCapture.py
|
Python
|
mit
| 394
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.