code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
class MsgpackError(Exception):
pass
class ConnectionError(MsgpackError):
pass
class ResponseError(MsgpackError):
pass
class InvalidRequest(MsgpackError):
pass
class InvalidResponse(MsgpackError):
pass
class InvalidData(MsgpackError):
pass
class TimeoutError(MsgpackError):
pass
class SerializationError(MsgpackError):
pass
|
jakm/txmsgpackrpc
|
txmsgpackrpc/error.py
|
Python
|
mit
| 369
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2018-02-08 10:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0013_auto_20180206_0826'),
]
operations = [
migrations.AddField(
model_name='blogtag',
name='seo_description',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='blogtag',
name='seo_description_en',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_description_ru',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_description_uk',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_keywords',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='blogtag',
name='seo_keywords_en',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_keywords_ru',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_keywords_uk',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_title',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='blogtag',
name='seo_title_en',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_title_ru',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='blogtag',
name='seo_title_uk',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
mcmaxwell/idea_digital_agency
|
idea/blog/migrations/0014_auto_20180208_1025.py
|
Python
|
mit
| 2,473
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
import uuid
from .. import models
class GroupOperations(object):
"""GroupOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_sample_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Provides a resouce group with name 'testgroup101' and location 'West
US'.
:param resource_group_name: Resource Group name 'testgroup101'.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`SampleResourceGroup
<fixtures.acceptancetestssubscriptionidapiversion.models.SampleResourceGroup>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SampleResourceGroup', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
|
yaqiyang/autorest
|
src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/SubscriptionIdApiVersion/microsoftazuretesturl/operations/group_operations.py
|
Python
|
mit
| 3,678
|
# -*- coding: utf-8 -*-
from django.views.generic import TemplateView, View
from django.template import RequestContext, Template
from django.shortcuts import render_to_response
from django.http import HttpResponse
from test_app.models import EditliveBaseFieldsTest
TEST_TEMPLATE = """
{%% extends "test_app/base.html" %%}
{%% load editlive_tags %%}
{%% block content %%}
<div class="span12">
<h1>{{ field|capfirst }}Field</h1>
<h3>Test: {{ options }}</h3>
{%% editlive "object.%(fieldname)s" %(options)s as test %%}{{ test }}
</div>
{%% endblock %%}
"""
class TestView(View):
def get(self, request, field):
options = []
for k in request.GET.keys():
options.append('%s="%s"' % (k, request.GET.get(k)))
tpl = TEST_TEMPLATE % {
'fieldname': field + '_test',
'options': ' '.join(options),
}
t = Template(tpl)
c = RequestContext(request, {
'field': field,
'object': EditliveBaseFieldsTest.objects.all()[0],
})
return HttpResponse(t.render(c))
class TestSoupView(View):
def get(self, request):
return render_to_response('test_app/soup.html',{
'object': EditliveBaseFieldsTest.objects.all()[0]},
context_instance=RequestContext(request))
class HomeView(View):
def get(self, request):
return render_to_response('test_app/home.html',{},
context_instance=RequestContext(request))
|
h3/django-editlive
|
example_project/test_app/views.py
|
Python
|
bsd-3-clause
| 1,504
|
def extractInsouciantetranslationsWordpressCom(item):
'''
Parser for 'insouciantetranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
if item['tags'] == ['Uncategorized']:
titlemap = [
('RTTSITTF Chapter ', 'Rather Than The Son, I’ll Take The Father', 'translated'),
('WTCIDGM Chapter ', 'When The Count’s Illegitimate Daughter Gets Married', 'translated'),
('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'),
('Master of Dungeon', 'Master of Dungeon', 'oel'),
]
for titlecomponent, name, tl_type in titlemap:
if titlecomponent.lower() in item['title'].lower():
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractInsouciantetranslationsWordpressCom.py
|
Python
|
bsd-3-clause
| 1,213
|
# -*- coding: utf-8 -*-
##############################################################################
#
# licence AGPL version 3 or later
# see licence in __openerp__.py or http://www.gnu.org/licenses/agpl-3.0.txt
# Copyright (C) 2014 Akretion (http://www.akretion.com).
# @author David BEAL <david.beal@akretion.com>
#
##############################################################################
from openerp.osv import orm, fields
dropoff_mapping = {
# commerçants
'6M': ('A2P',),
# point cityssimo
'6J': ('CIT',),
# Centre de distribution de la poste, Agence Coliposte
# ou bureau de poste
'6H': ('CDI', 'ACP', 'BPR'),
}
class AbstractColipostePicking(orm.AbstractModel):
_inherit = 'abstract.coliposte.picking'
_columns = {
'colipostefr_barcode_routage': fields.char('Barcode Routage', size=64),
}
class StockPicking(orm.Model):
_inherit = "stock.picking"
def action_done(self, cr, uid, ids, context=None):
"""
:return: see original method
"""
if context is None:
context = {}
for picking in self.browse(cr, uid, ids, context=context):
if picking.carrier_type == 'so_colissimo':
# TODO FIX this constraint
#self._check_dropoff_site_according_to_carrier(
#cr, uid, ids, context=context)
self.generate_labels(cr, uid, [picking.id], context=context)
return super(StockPicking, self).action_done(
cr, uid, ids, context=context)
def carrier_id_change(self, cr, uid, ids, carrier_id, context=None):
res = super(StockPicking, self).carrier_id_change(
cr, uid, ids, carrier_id, context=context)
carrier = self.pool['delivery.carrier'].browse(
cr, uid, carrier_id, context=context)
if carrier.type == 'so_colissimo':
if carrier.code in ['6H', '6M', '6J']:
res['value'].update({'has_final_recipient': True})
res['domain'].update({
'partner_id': [
('dropoff_site_id', '!=', False),
('dropoff_site_id.subtype', 'in',
dropoff_mapping[carrier.code])
],
})
else:
res['value'].update({'has_final_recipient': False})
res['domain'].update({'partner_id': [
('customer', '=', True)]}),
return res
def _check_dropoff_site_according_to_carrier(
self, cr, uid, ids, context=None):
super(StockPicking, self)._check_dropoff_site_according_to_carrier(
cr, uid, ids, context=context)
for pick in self.browse(cr, uid, ids, context=context):
if pick.carrier_id.type == 'so_colissimo':
if pick.carrier_id.code in ['6M', '6H', '6J']:
if (not pick.partner_id.dropoff_site_id
or pick.partner_id.dropoff_site_id.subtype
not in dropoff_mapping[pick.carrier_id.code]):
raise orm.except_orm(
u"Point Relais",
u"Le champ '%s' sélectionné n'est pas compatible "
u"avec le transporteur choisi '%s'.\n\n"
u"Merci de changer l'un des deux."
% (pick.partner_id.name, pick.carrier_id.name))
return True
def _partner_data_postefr(
self, cr, uid, partner_id, max_street_size, context=None):
"used by partner_id and final_partner_id"
address = {}
for field in ['name', 'city', 'zip', 'phone', 'mobile']:
address[field] = partner_id[field]
if partner_id.name[0:15].lower() == 'bureau de poste':
address['name2'] = partner_id.name[16:]
address['name'] = 'BUREAU DE POSTE'
elif partner_id.name[0:16].lower() == 'espace cityssimo':
address['name2'] = partner_id.name[17:]
address['name'] = 'ESPACE CITYSSIMO'
if 'name2' in address:
# name2 max lenght is 30
address['name2'] = address['name2'][:30]
streets = self.pool['res.partner']._get_split_address(
cr, uid, partner_id, 4, max_street_size, context=context)
address.update({
'street': streets[0],
'street2': streets[1],
'street3': streets[2],
'street4': streets[3],
})
if partner_id.dropoff_site_id:
dropoff = partner_id.dropoff_site_id
if dropoff.lot_routing and dropoff.distri_sort:
address.update({
'lot_routing': dropoff.lot_routing,
'distri_sort': dropoff.distri_sort,
})
return address
def _prepare_address_postefr(self, cr, uid, pick, context=None):
# to be sure to override parent method
super(StockPicking, self)._prepare_address_postefr(
cr, uid, pick, context=context)
address = self._partner_data_postefr(
cr, uid, pick.partner_id, 35, context=context)
max_street_size = 35
if pick.carrier_code in ['6H', '6M', '6J']:
max_street_size = 20
if pick.has_final_recipient:
final_address = self._partner_data_postefr(
cr, uid, pick.final_partner_id, max_street_size,
context=context)
# TODO define if it needs to check
address['final_address'] = final_address
else:
address['final_address'] = address
for field in ['email', 'door_code', 'door_code2', 'intercom']:
address[field] = pick.partner_id[field]
return address
def _prepare_sender_postefr(self, cr, uid, pick, context=None):
sender = super(StockPicking, self)._prepare_sender_postefr(
cr, uid, pick, context=context)
if pick.carrier_type == 'so_colissimo' and pick.carrier_code == '6J':
sender['chargeur'] = \
pick.company_id.colipostefr_account_chargeur
return sender
def _prepare_delivery_postefr(self, cr, uid, pick, context=None):
delivery = super(StockPicking, self)._prepare_delivery_postefr(
cr, uid, pick, context=context)
return delivery
|
akretion/carrier-delivery-colipostefr
|
__unported__/delivery_carrier_label_so_colissimo/stock.py
|
Python
|
agpl-3.0
| 6,422
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import warnings
# External imports
# Bokeh imports
from ..core.has_props import abstract
from ..core.properties import Any, Bool, ColumnData, Dict, Enum, Instance, Int, JSON, List, PandasDataFrame, PandasGroupBy, Seq, String
from ..model import Model
from ..util.dependencies import import_optional
from ..util.serialization import convert_datetime_array
from ..util.warnings import BokehUserWarning
from .callbacks import Callback, CustomJS
from .filters import Filter
from .selections import Selection, SelectionPolicy, UnionRenderers
pd = import_optional('pandas')
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'ServerSentDataSource',
'AjaxDataSource',
'CDSView',
'ColumnarDataSource',
'ColumnDataSource',
'DataSource',
'GeoJSONDataSource',
'RemoteSource',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
@abstract
class DataSource(Model):
''' A base class for data source types.
'''
selected = Instance(Selection, default=lambda: Selection(), help="""
A Selection that indicates selected indices on this ``DataSource``.
""")
callback = Instance(Callback, help="""
A callback to run in the browser whenever the selection is changed.
.. note:
This property is left for backwards compatibility, but may be deprecated
in the future. Prefer ``source.selected.js_on_change(...)`` for new code.
""")
@abstract
class ColumnarDataSource(DataSource):
''' A base class for data source types, which can be mapped onto
a columnar format.
'''
selection_policy = Instance(SelectionPolicy, default=lambda: UnionRenderers(), help="""
An instance of a ``SelectionPolicy`` that determines how selections are set.
""")
class ColumnDataSource(ColumnarDataSource):
''' Maps names of columns to sequences or arrays.
The ``ColumnDataSource`` is a fundamental data structure of Bokeh. Most
plots, data tables, etc. will be driven by a ``ColumnDataSource``.
If the ``ColumnDataSource`` initializer is called with a single argument that
can be any of the following:
* A Python ``dict`` that maps string names to sequences of values, e.g.
lists, arrays, etc.
.. code-block:: python
data = {'x': [1,2,3,4], 'y': np.ndarray([10.0, 20.0, 30.0, 40.0])}
source = ColumnDataSource(data)
.. note::
``ColumnDataSource`` only creates a shallow copy of ``data``. Use e.g.
``ColumnDataSource(copy.deepcopy(data))`` if initializing from another
``ColumnDataSource.data`` object that you want to keep independent.
* A Pandas ``DataFrame`` object
.. code-block:: python
source = ColumnDataSource(df)
In this case the CDS will have columns corresponding to the columns of
the ``DataFrame``. If the ``DataFrame`` columns have multiple levels,
they will be flattened using an underscore (e.g. level_0_col_level_1_col).
The index of the ``DataFrame`` will be flattened to an ``Index`` of tuples
if it's a ``MultiIndex``, and then reset using ``reset_index``. The result
will be a column with the same name if the index was named, or
level_0_name_level_1_name if it was a named ``MultiIndex``. If the
``Index`` did not have a name or the ``MultiIndex`` name could not be
flattened/determined, the ``reset_index`` function will name the index column
``index``, or ``level_0`` if the name ``index`` is not available.
* A Pandas ``GroupBy`` object
.. code-block:: python
group = df.groupby(('colA', 'ColB'))
In this case the CDS will have columns corresponding to the result of
calling ``group.describe()``. The ``describe`` method generates columns
for statistical measures such as ``mean`` and ``count`` for all the
non-grouped original columns. The CDS columns are formed by joining
original column names with the computed measure. For example, if a
``DataFrame`` has columns ``'year'`` and ``'mpg'``. Then passing
``df.groupby('year')`` to a CDS will result in columns such as
``'mpg_mean'``
If the ``GroupBy.describe`` result has a named index column, then
CDS will also have a column with this name. However, if the index name
(or any subname of a ``MultiIndex``) is ``None``, then the CDS will have
a column generically named ``index`` for the index.
Note this capability to adapt ``GroupBy`` objects may only work with
Pandas ``>=0.20.0``.
.. note::
There is an implicit assumption that all the columns in a given
``ColumnDataSource`` all have the same length at all times. For this
reason, it is usually preferable to update the ``.data`` property
of a data source "all at once".
'''
data = ColumnData(String, Seq(Any), help="""
Mapping of column names to sequences of data. The columns can be, e.g,
Python lists or tuples, NumPy arrays, etc.
The .data attribute can also be set from Pandas DataFrames or GroupBy
objects. In these cases, the behaviour is identical to passing the objects
to the ``ColumnDataSource`` initializer.
""").accepts(
PandasDataFrame, lambda x: ColumnDataSource._data_from_df(x)
).accepts(
PandasGroupBy, lambda x: ColumnDataSource._data_from_groupby(x)
).asserts(lambda _, data: len(set(len(x) for x in data.values())) <= 1,
lambda obj, name, data: warnings.warn(
"ColumnDataSource's columns must be of the same length. " +
"Current lengths: %s" % ", ".join(sorted(str((k, len(v))) for k, v in data.items())), BokehUserWarning))
def __init__(self, *args, **kw):
''' If called with a single argument that is a dict or
``pandas.DataFrame``, treat that implicitly as the "data" attribute.
'''
if len(args) == 1 and "data" not in kw:
kw["data"] = args[0]
# TODO (bev) invalid to pass args and "data", check and raise exception
raw_data = kw.pop("data", {})
if not isinstance(raw_data, dict):
if pd and isinstance(raw_data, pd.DataFrame):
raw_data = self._data_from_df(raw_data)
elif pd and isinstance(raw_data, pd.core.groupby.GroupBy):
raw_data = self._data_from_groupby(raw_data)
else:
raise ValueError("expected a dict or pandas.DataFrame, got %s" % raw_data)
super(ColumnDataSource, self).__init__(**kw)
self.data.update(raw_data)
@property
def column_names(self):
''' A list of the column names in this data source.
'''
return list(self.data)
@staticmethod
def _data_from_df(df):
''' Create a ``dict`` of columns from a Pandas ``DataFrame``,
suitable for creating a ColumnDataSource.
Args:
df (DataFrame) : data to convert
Returns:
dict[str, np.array]
'''
_df = df.copy()
# Flatten columns
if isinstance(df.columns, pd.MultiIndex):
try:
_df.columns = ['_'.join(col) for col in _df.columns.values]
except TypeError:
raise TypeError('Could not flatten MultiIndex columns. '
'use string column names or flatten manually')
# Transform columns CategoricalIndex in list
if isinstance(df.columns, pd.CategoricalIndex):
_df.columns = df.columns.tolist()
# Flatten index
index_name = ColumnDataSource._df_index_name(df)
if index_name == 'index':
_df.index = pd.Index(_df.index.values)
else:
_df.index = pd.Index(_df.index.values, name=index_name)
_df.reset_index(inplace=True)
tmp_data = {c: v.values for c, v in _df.iteritems()}
new_data = {}
for k, v in tmp_data.items():
new_data[k] = v
return new_data
@staticmethod
def _data_from_groupby(group):
''' Create a ``dict`` of columns from a Pandas ``GroupBy``,
suitable for creating a ``ColumnDataSource``.
The data generated is the result of running ``describe``
on the group.
Args:
group (GroupBy) : data to convert
Returns:
dict[str, np.array]
'''
return ColumnDataSource._data_from_df(group.describe())
@staticmethod
def _df_index_name(df):
''' Return the Bokeh-appropriate column name for a ``DataFrame`` index
If there is no named index, then `"index" is returned.
If there is a single named index, then ``df.index.name`` is returned.
If there is a multi-index, and the index names are all strings, then
the names are joined with '_' and the result is returned, e.g. for a
multi-index ``['ind1', 'ind2']`` the result will be "ind1_ind2".
Otherwise if any index name is not a string, the fallback name "index"
is returned.
Args:
df (DataFrame) : the ``DataFrame`` to find an index name for
Returns:
str
'''
if df.index.name:
return df.index.name
elif df.index.names:
try:
return "_".join(df.index.names)
except TypeError:
return "index"
else:
return "index"
@classmethod
def from_df(cls, data):
''' Create a ``dict`` of columns from a Pandas ``DataFrame``,
suitable for creating a ``ColumnDataSource``.
Args:
data (DataFrame) : data to convert
Returns:
dict[str, np.array]
'''
return cls._data_from_df(data)
@classmethod
def from_groupby(cls, data):
''' Create a ``dict`` of columns from a Pandas ``GroupBy``,
suitable for creating a ``ColumnDataSource``.
The data generated is the result of running ``describe``
on the group.
Args:
data (Groupby) : data to convert
Returns:
dict[str, np.array]
'''
return cls._data_from_df(data.describe())
def to_df(self):
''' Convert this data source to pandas ``DataFrame``.
Returns:
DataFrame
'''
if not pd:
raise RuntimeError('Pandas must be installed to convert to a Pandas Dataframe')
return pd.DataFrame(self.data)
def add(self, data, name=None):
''' Appends a new column of data to the data source.
Args:
data (seq) : new data to add
name (str, optional) : column name to use.
If not supplied, generate a name of the form "Series ####"
Returns:
str: the column name used
'''
if name is None:
n = len(self.data)
while "Series %d"%n in self.data:
n += 1
name = "Series %d"%n
self.data[name] = data
return name
def remove(self, name):
''' Remove a column of data.
Args:
name (str) : name of the column to remove
Returns:
None
.. note::
If the column name does not exist, a warning is issued.
'''
try:
del self.data[name]
except (ValueError, KeyError):
import warnings
warnings.warn("Unable to find column '%s' in data source" % name)
def stream(self, new_data, rollover=None):
''' Efficiently update data source columns with new append-only data.
In cases where it is necessary to update data columns in, this method
can efficiently send only the new data, instead of requiring the
entire data set to be re-sent.
Args:
new_data (dict[str, seq]) : a mapping of column names to sequences of
new data to append to each column.
All columns of the data source must be present in ``new_data``,
with identical-length append data.
rollover (int, optional) : A maximum column size, above which data
from the start of the column begins to be discarded. If None,
then columns will continue to grow unbounded (default: None)
Returns:
None
Raises:
ValueError
Example:
.. code-block:: python
source = ColumnDataSource(data=dict(foo=[], bar=[]))
# has new, identical-length updates for all columns in source
new_data = {
'foo' : [10, 20],
'bar' : [100, 200],
}
source.stream(new_data)
'''
# calls internal implementation
self._stream(new_data, rollover)
def _stream(self, new_data, rollover=None, setter=None):
''' Internal implementation to efficiently update data source columns
with new append-only data. The internal implementation adds the setter
attribute. [https://github.com/bokeh/bokeh/issues/6577]
In cases where it is necessary to update data columns in, this method
can efficiently send only the new data, instead of requiring the
entire data set to be re-sent.
Args:
new_data (dict[str, seq] or DataFrame or Series) : a mapping of
column names to sequences of new data to append to each column,
a pandas DataFrame, or a pandas Series in case of a single row -
in this case the Series index is used as column names
All columns of the data source must be present in ``new_data``,
with identical-length append data.
rollover (int, optional) : A maximum column size, above which data
from the start of the column begins to be discarded. If None,
then columns will continue to grow unbounded (default: None)
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
In the context of a Bokeh server application, incoming updates
to properties will be annotated with the session that is
doing the updating. This value is propagated through any
subsequent change notifications that the update triggers.
The session can compare the event setter to itself, and
suppress any updates that originate from itself.
Returns:
None
Raises:
ValueError
Example:
.. code-block:: python
source = ColumnDataSource(data=dict(foo=[], bar=[]))
# has new, identical-length updates for all columns in source
new_data = {
'foo' : [10, 20],
'bar' : [100, 200],
}
source.stream(new_data)
'''
needs_length_check = True
if pd and isinstance(new_data, pd.Series):
new_data = new_data.to_frame().T
if pd and isinstance(new_data, pd.DataFrame):
needs_length_check = False # DataFrame lengths equal by definition
_df = new_data
newkeys = set(_df.columns)
index_name = ColumnDataSource._df_index_name(_df)
newkeys.add(index_name)
new_data = dict(_df.iteritems())
new_data[index_name] = _df.index.values
else:
newkeys = set(new_data.keys())
oldkeys = set(self.data.keys())
if newkeys != oldkeys:
missing = oldkeys - newkeys
extra = newkeys - oldkeys
if missing and extra:
raise ValueError(
"Must stream updates to all existing columns (missing: %s, extra: %s)" % (", ".join(sorted(missing)), ", ".join(sorted(extra)))
)
elif missing:
raise ValueError("Must stream updates to all existing columns (missing: %s)" % ", ".join(sorted(missing)))
else:
raise ValueError("Must stream updates to all existing columns (extra: %s)" % ", ".join(sorted(extra)))
import numpy as np
if needs_length_check:
lengths = set()
arr_types = (np.ndarray, pd.Series) if pd else np.ndarray
for k, x in new_data.items():
if isinstance(x, arr_types):
if len(x.shape) != 1:
raise ValueError("stream(...) only supports 1d sequences, got ndarray with size %r" % (x.shape,))
lengths.add(x.shape[0])
else:
lengths.add(len(x))
if len(lengths) > 1:
raise ValueError("All streaming column updates must be the same length")
# slightly awkward that we have to call convert_datetime_array here ourselves
# but the downstream code expects things to already be ms-since-epoch
for key, values in new_data.items():
if pd and isinstance(values, (pd.Series, pd.Index)):
values = values.values
old_values = self.data[key]
# Apply the transformation if the new data contains datetimes
# but the current data has already been transformed
if (isinstance(values, np.ndarray) and values.dtype.kind.lower() == 'm' and
isinstance(old_values, np.ndarray) and old_values.dtype.kind.lower() != 'm'):
new_data[key] = convert_datetime_array(values)
else:
new_data[key] = values
self.data._stream(self.document, self, new_data, rollover, setter)
def patch(self, patches, setter=None):
''' Efficiently update data source columns at specific locations
If it is only necessary to update a small subset of data in a
``ColumnDataSource``, this method can be used to efficiently update only
the subset, instead of requiring the entire data set to be sent.
This method should be passed a dictionary that maps column names to
lists of tuples that describe a patch change to apply. To replace
individual items in columns entirely, the tuples should be of the
form:
.. code-block:: python
(index, new_value) # replace a single column value
# or
(slice, new_values) # replace several column values
Values at an index or slice will be replaced with the corresponding
new values.
In the case of columns whose values are other arrays or lists, (e.g.
image or patches glyphs), it is also possible to patch "subregions".
In this case the first item of the tuple should be a whose first
element is the index of the array item in the CDS patch, and whose
subsequent elements are integer indices or slices into the array item:
.. code-block:: python
# replace the entire 10th column of the 2nd array:
+----------------- index of item in column data source
|
| +--------- row subindex into array item
| |
| | +- column subindex into array item
V V V
([2, slice(None), 10], new_values)
Imagining a list of 2d NumPy arrays, the patch above is roughly
equivalent to:
.. code-block:: python
data = [arr1, arr2, ...] # list of 2d arrays
data[2][:, 10] = new_data
There are some limitations to the kinds of slices and data that can
be accepted.
* Negative ``start``, ``stop``, or ``step`` values for slices will
result in a ``ValueError``.
* In a slice, ``start > stop`` will result in a ``ValueError``
* When patching 1d or 2d subitems, the subitems must be NumPy arrays.
* New values must be supplied as a **flattened one-dimensional array**
of the appropriate size.
Args:
patches (dict[str, list[tuple]]) : lists of patches for each column
Returns:
None
Raises:
ValueError
Example:
The following example shows how to patch entire column elements. In this case,
.. code-block:: python
source = ColumnDataSource(data=dict(foo=[10, 20, 30], bar=[100, 200, 300]))
patches = {
'foo' : [ (slice(2), [11, 12]) ],
'bar' : [ (0, 101), (2, 301) ],
}
source.patch(patches)
After this operation, the value of the ``source.data`` will be:
.. code-block:: python
dict(foo=[11, 12, 30], bar=[101, 200, 301])
For a more comprehensive complete example, see :bokeh-tree:`examples/howto/patch_app.py`.
'''
import numpy as np
extra = set(patches.keys()) - set(self.data.keys())
if extra:
raise ValueError("Can only patch existing columns (extra: %s)" % ", ".join(sorted(extra)))
for name, patch in patches.items():
col_len = len(self.data[name])
for ind, value in patch:
# integer index, patch single value of 1d column
if isinstance(ind, int):
if ind > col_len or ind < 0:
raise ValueError("Out-of bounds index (%d) in patch for column: %s" % (ind, name))
# slice index, patch multiple values of 1d column
elif isinstance(ind, slice):
_check_slice(ind)
if ind.stop is not None and ind.stop > col_len:
raise ValueError("Out-of bounds slice index stop (%d) in patch for column: %s" % (ind.stop, name))
# multi-index, patch sub-regions of "n-d" column
elif isinstance(ind, (list, tuple)):
if len(ind) == 0:
raise ValueError("Empty (length zero) patch multi-index")
if len(ind) == 1:
raise ValueError("Patch multi-index must contain more than one subindex")
if not isinstance(ind[0], int):
raise ValueError("Initial patch sub-index may only be integer, got: %s" % ind[0])
if ind[0] > col_len or ind[0] < 0:
raise ValueError("Out-of bounds initial sub-index (%d) in patch for column: %s" % (ind, name))
if not isinstance(self.data[name][ind[0]], np.ndarray):
raise ValueError("Can only sub-patch into columns with NumPy array items")
if len(self.data[name][ind[0]].shape) != (len(ind)-1):
raise ValueError("Shape mismatch between patch slice and sliced data")
elif isinstance(ind[0], slice):
_check_slice(ind[0])
if ind[0].stop is not None and ind[0].stop > col_len:
raise ValueError("Out-of bounds initial slice sub-index stop (%d) in patch for column: %s" % (ind.stop, name))
# Note: bounds of sub-indices after the first are not checked!
for subind in ind[1:]:
if not isinstance(subind, (int, slice)):
raise ValueError("Invalid patch sub-index: %s" % subind)
if isinstance(subind, slice):
_check_slice(subind)
else:
raise ValueError("Invalid patch index: %s" % ind)
self.data._patch(self.document, self, patches, setter)
class CDSView(Model):
''' A view into a ``ColumnDataSource`` that represents a row-wise subset.
'''
filters = List(Instance(Filter), default=[], help="""
List of filters that the view comprises.
""")
source = Instance(ColumnarDataSource, help="""
The ``ColumnDataSource`` associated with this view. Used to determine
the length of the columns.
""")
class GeoJSONDataSource(ColumnarDataSource):
'''
'''
geojson = JSON(help="""
GeoJSON that contains features for plotting. Currently
``GeoJSONDataSource`` can only process a ``FeatureCollection`` or
``GeometryCollection``.
""")
@abstract
class WebSource(ColumnDataSource):
''' Base class for web column data sources that can update from data
URLs.
.. note::
This base class is typically not useful to instantiate on its own.
'''
adapter = Instance(CustomJS, help="""
A JavaScript callback to adapt raw JSON responses to Bokeh ``ColumnDataSource``
format.
If provided, this callback is executes immediately after the JSON data is
received, but before appending or replacing data in the data source. The
``CustomJS`` callback will receive the ``AjaxDataSource`` as ``cb_obj`` and
will receive the raw JSON response as ``cb_data.response``. The callback
code should return a ``data`` object suitable for a Bokeh ``ColumnDataSource``
(i.e. a mapping of string column names to arrays of data).
""")
max_size = Int(help="""
Maximum size of the data columns. If a new fetch would result in columns
larger than ``max_size``, then earlier data is dropped to make room.
""")
mode = Enum("replace", "append", help="""
Whether to append new data to existing data (up to ``max_size``), or to
replace existing data entirely.
""")
data_url = String(help="""
A URL to to fetch data from.
""")
@abstract
class RemoteSource(WebSource):
''' Base class for remote column data sources that can update from data
URLs at prescribed time intervals.
.. note::
This base class is typically not useful to instantiate on its own.
'''
polling_interval = Int(help="""
A polling interval (in milliseconds) for updating data source.
""")
class ServerSentDataSource(WebSource):
''' A data source that can populate columns by receiving server sent
events endpoints.
'''
class AjaxDataSource(RemoteSource):
''' A data source that can populate columns by making Ajax calls to REST
endpoints.
The ``AjaxDataSource`` can be especially useful if you want to make a
standalone document (i.e. not backed by the Bokeh server) that can still
dynamically update using an existing REST API.
The response from the REST API should match the ``.data`` property of a
standard ``ColumnDataSource``, i.e. a JSON dict that maps names to arrays
of values:
.. code-block:: python
{
'x' : [1, 2, 3, ...],
'y' : [9, 3, 2, ...]
}
Alternatively, if the REST API returns a different format, a ``CustomJS``
callback can be provided to convert the REST response into Bokeh format,
via the ``adapter`` property of this data source.
A full example can be seen at :bokeh-tree:`examples/howto/ajax_source.py`
'''
method = Enum('POST', 'GET', help="""
Specify the HTTP method to use for the Ajax request (GET or POST)
""")
if_modified = Bool(False, help="""
Whether to include an ``If-Modified-Since`` header in Ajax requests
to the server. If this header is supported by the server, then only
new data since the last request will be returned.
""")
content_type = String(default='application/json', help="""
Set the "contentType" parameter for the Ajax request.
""")
http_headers = Dict(String, String, help="""
Specify HTTP headers to set for the Ajax request.
Example:
.. code-block:: python
ajax_source.headers = { 'x-my-custom-header': 'some value' }
""")
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def _check_slice(s):
if (s.start is not None and s.stop is not None and s.start > s.stop):
raise ValueError("Patch slices must have start < end, got %s" % s)
if (s.start is not None and s.start < 0) or \
(s.stop is not None and s.stop < 0) or \
(s.step is not None and s.step < 0):
raise ValueError("Patch slices must have non-negative (start, stop, step) values, got %s" % s)
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
timsnyder/bokeh
|
bokeh/models/sources.py
|
Python
|
bsd-3-clause
| 29,941
|
#!/usr/bin/env python
import unittest
from pycoin.ecdsa import sign, verify, public_pair_for_secret_exponent, possible_public_pairs_for_signature, generator_secp256k1
class SigningTest(unittest.TestCase):
def test_sign(self):
for se in ["47f7616ea6f9b923076625b4488115de1ef1187f760e65f89eb6f4f7ff04b012"] + [x * 64 for x in "123456789abcde"]:
secret_exponent = int(se, 16)
val = 28832970699858290 #int.from_bytes(b"foo bar", byteorder="big")
sig = sign(generator_secp256k1, secret_exponent, val)
public_pair = public_pair_for_secret_exponent(generator_secp256k1, secret_exponent)
v = verify(generator_secp256k1, public_pair, val, sig)
self.assertTrue(v)
sig1 = (sig[0] + 1, sig[1])
v = verify(generator_secp256k1, public_pair, val, sig1)
self.assertFalse(v)
public_pairs = possible_public_pairs_for_signature(generator_secp256k1, val, sig)
self.assertIn(public_pair, public_pairs)
print(se)
def main():
unittest.main()
if __name__ == "__main__":
main()
|
gitonio/pycoin
|
tests/signature_test.py
|
Python
|
mit
| 1,126
|
#!/usr/bin/env python
import time
import threading
import pprint
import copy
import Bybop_NetworkAL
from Bybop_Network import *
from Bybop_Commands import *
from Bybop_Discovery import *
from Bybop_Connection import *
import ARCommandsParser
class State(object):
"""
Three level dictionnary to save the internal state of a Device.
The first level key is the project of the command.
The second level key is the project of the classs.
The third level key is the command.
The content for each command depends on the command type. For normal commands,
the content is a dictionnary of arguments in the form { 'name':value ... }. If
the command is a list command, then the content is a list of arguments dictionnaries.
If the command is a map command, then the content is a dictionnary of arguments
dictionnaries, indexed by their first argument.
This class use internal locks to allow proper multithread access.
This class also implements a wait_for function to do non-busy wait for commands
reception (i.e. wait for an answer from the device), with an optionnal timeout.
"""
def __init__(self):
"""
Create a new, empty, state.
Creating a new state should only be done from an Device __init__ function.
"""
self._dict = {}
self._waitlist = {}
self._lock = threading.Lock()
self._waitid = 0
def _getcldic(self, pr, cl, create=True):
if not pr in self._dict:
if create:
self._dict[pr] = {}
else:
return None
pr_d = self._dict[pr]
if not cl in pr_d:
if create:
pr_d[cl] = {}
else:
return None
return pr_d[cl]
def wait_for(self, name, timeout=None):
"""
Wait for a change on the given key.
Return True if the key changed, False if a timeout occured
Arguments:
- name : The command to watch, in 'project.class.command' notation
Keyword arguments:
- timeout : Timeout, in floating point seconds, for the wait
"""
with self._lock:
event = threading.Event()
wid = self._waitid
self._waitid += 1
if not name in self._waitlist:
self._waitlist[name] = {}
self._waitlist[name][wid] = event
res = event.wait(timeout)
with self._lock:
if res:
event.clear()
del self._waitlist[name][wid]
if not self._waitlist[name]:
del self._waitlist[name]
return res
def _signal_waiting(self, pr, cl, cmd):
waitname = '%s.%s.%s' % (pr, cl, cmd)
if waitname in self._waitlist:
for k, v in self._waitlist[waitname].iteritems():
v.set()
def put(self, pr, cl, cmd, args):
"""
Put a new command in the dictionnary.
This function only handles normal commands. For list or map commands,
see put_list or put_map functions.
Arguments:
- pr : Project name of the command
- cl : Class name of the command
- cmd : Name of the commands
- args : Arguments dictionnary of the command
"""
with self._lock:
pr_cl = self._getcldic(pr, cl)
if cmd in pr_cl:
del pr_cl[cmd]
pr_cl[cmd] = copy.deepcopy(args)
self._signal_waiting(pr, cl, cmd)
def put_list(self, pr, cl, cmd, args):
"""
Put a new list-command in the dictionnary.
This function handles list-commands by appending the arguments dictionnary
to the command list.
Arguments:
- pr : Project name of the command
- cl : Class name of the command
- cmd : Name of the commands
- args : Arguments dictionnary of the command
"""
with self._lock:
pr_cl = self._getcldic(pr, cl)
if not cmd in pr_cl:
pr_cl[cmd] = []
pr_cl[cmd].append(copy.deepcopy(args))
self._signal_waiting(pr, cl, cmd)
def put_map(self, pr, cl, cmd, args, key):
"""
Put a new map-command in the dictionnary.
This function saves the arguments dictionnary, indexed by its first element, in
the command disctionnary.
Arguments:
- pr : Project name of the command
- cl : Class name of the command
- cmd : Name of the commands
- args : Arguments dictionnary of the command
- key : Value of the first argument of the command
"""
with self._lock:
pr_cl = self._getcldic(pr, cl)
if not cmd in pr_cl:
pr_cl[cmd] = {}
pr_cl[cmd][key] = copy.deepcopy(args)
self._signal_waiting(pr, cl, cmd)
def get_value(self, name):
"""
Get the current value of a command.
For never received commands, None is returned
For normal commands, an arguments dictionnary in the { 'name':value ... } format is
returned. For list-commands, a list of such disctionnaries is returned. For map-commands,
a dictionnary of such dictionnaries is returned.
Arguments:
- name : The command to get, in 'project.class.command' notation
"""
try:
[pr, cl, cmd] = name.split('.')
except ValueError:
return None
with self._lock:
pr_cl = self._getcldic(pr, cl)
if pr_cl is None:
ret = None
elif not cmd in pr_cl:
ret = None
else:
ret = copy.deepcopy(pr_cl[cmd])
return ret
def duplicate(self):
"""
Return a new, non-synchronized (i.e. pure dict) copy of the internal dictionnary.
"""
with self._lock:
ret = copy.deepcopy(self._dict)
return ret
def dump(self):
"""
Dump the current state using a pretty printer.
This is useful for debugging purposes, to see the whole product state.
"""
with self._lock:
pprint.pprint(self._dict)
class Device(object):
"""
Simple wrapper around ARNetwork + ARCommands.
This class is subclassed for each device to add convenience functions, and proper
initialization. It should not be used directly.
"""
def __init__(self, ip, c2d_port, d2c_port, ackBuffer=-1, nackBuffer=-1, urgBuffer=-1, cmdBuffers=[]):
"""
Create and start a new Device.
The connection must have been started beforehand by Connection.connect().
Arguments:
- ip : The product ip address
- c2d_port : The remote port (on which we will send data)
- d2c_port : The local port (on which we will read data)
- ackBuffer : The buffer for acknowledged data (-1 means no buffer)
- nackBuffer : The buffer for non acknowledged data (-1 means no buffer)
- urgBuffer : The buffer for high priority data (-1 means no buffer)
- cmdBuffers : The buffers from the device which contains ARCommands
"""
inb = [i for i in (ackBuffer, nackBuffer, urgBuffer) if i > 0]
outb = cmdBuffers
self._network = Network(ip, c2d_port, d2c_port, inb, outb, self)
self._ackBuffer = ackBuffer
self._nackBuffer = nackBuffer
self._urgBuffer = urgBuffer
self._cmdBuffers = cmdBuffers
self._state = State()
self._common_init_product()
self._init_product()
def data_received(self, buf, data):
"""
Save the recieved data in the state.
This function is called by the internal Network, and should not be called
directly by the application.
"""
if buf in self._cmdBuffers:
dico, ok = unpack_command(data)
if not ok:
return
pr, cl, cmd = dico['proj'], dico['class'], dico['cmd']
try:
args = dico['args']
key = dico['arg0']
except:
args = {}
key = 'no_arg'
type = dico['listtype']
if type == ARCommandListType.NONE:
self._state.put(pr, cl, cmd, args)
elif type == ARCommandListType.LIST:
self._state.put_list(pr, cl, cmd, args)
elif type == ARCommandListType.MAP:
self._state.put_map(pr, cl, cmd, args, key)
def did_disconnect(self):
"""
Called when the product is disconnected.
The application should not call this function directly.
"""
print('Product disconnected !')
self.stop()
def get_state(self, copy=True):
"""
Get the product state.
Arguments:
- copy : if True, this function will return a pure dictionnary copy of the state
if False, this function will return a reference to the internal state
(default True)
When requesting a non-copy state, the application should NEVER try to modify it.
To get a value from the internal state, use its 'get_value' function.
"""
if copy:
return self._state.duplicate()
else:
return self._state
def get_battery(self):
"""
Get the current battery percentage.
"""
try:
return self._state.get_value('common.CommonState.BatteryStateChanged')['percent']
except:
return 0
def send_data(self, pr, cl, cm, *args, **kwargs):
"""
Send some command to the product.
Return a NetworkStatus value.
Arguments:
- pr : Project name of the command
- cl : Class name of the command
- cm : Command name
- *args : arguments to the command
Keyword arguments:
- retries : number of retries (default 5)
- timeout : timeout (seconds) per try for acknowledgment (default 0.15)
"""
try:
cmd, buf, to = pack_command(pr, cl, cm, *args)
except CommandError as ce:
print(str(ce))
return NetworkStatus.ERROR
bufno=-1
if buf == ARCommandBuffer.NON_ACK:
bufno = self._nackBuffer
datatype = Bybop_NetworkAL.DataType.DATA
elif buf == ARCommandBuffer.ACK:
bufno = self._ackBuffer
datatype = Bybop_NetworkAL.DataType.DATA_WITH_ACK
elif buf == ARCommandBuffer.HIGH_PRIO:
bufno = self._urgBuffer
datatype = Bybop_NetworkAL.DataType.DATA_LOW_LATENCY
if bufno == -1:
print('No suitable buffer')
return NetworkStatus.ERROR
retries = kwargs['retries'] if 'retries' in kwargs else 5
timeout = kwargs['timeout'] if 'timeout' in kwargs else 0.15
status = self._network.send_data(bufno, cmd, datatype, timeout=timeout, tries=retries+1)
return status
def wait_answer(self, name, timeout=5.0):
"""
Wait for an answer from the product.
This function will block until the product sends the requested command, or the timeout
is expired.
Return True if the command was received, False if a timeout occured.
Arguments:
- name : The command to wait, in 'project.class.command' notation
Keyword arguments:
- timeout : Maximum time (floating point seconds) to wait (default 5.0)
"""
status = self._state.wait_for(name, timeout=timeout)
return status
def _init_product(self):
raise NotImplementedError('Do not use Device directly !')
def _common_init_product(self):
self.send_data('common', 'Settings', 'AllSettings', toto=42)
self.wait_answer('common.SettingsState.AllSettingsChanged')
print( 'first answer correct')
self.send_data('common', 'Common', 'AllStates')
self.wait_answer('common.CommonState.AllStatesChanged')
print( 'second answer correct')
now = time.gmtime()
dateStr = time.strftime('%Y-%m-%d', now)
timeStr = time.strftime('T%H%M%S+0000', now)
self.send_data('common', 'Common', 'CurrentDate', dateStr)
self.send_data('common', 'Common', 'CurrentTime', timeStr)
print( 'Dates send')
def dump_state(self):
print( 'Internal state :')
self._state.dump()
def stop(self):
self._network.stop()
class BebopDrone(Device):
def __init__(self, ip, c2d_port, d2c_port):
"""
Create and start a new BebopDrone device.
The connection must have been started beforehand by Connection.connect().
Arguments:
- ip : The product ip address
- c2d_port : The remote port (on which we will send data)
- d2c_port : The local port (on which we will read data)
"""
super(BebopDrone, self).__init__(ip, c2d_port, d2c_port, ackBuffer=11, nackBuffer=10, urgBuffer=12, cmdBuffers=[127, 126])
def _init_product(self):
# Deactivate video streaming
self.send_data('ARDrone3', 'MediaStreaming', 'VideoEnable', 0)
def take_off(self):
"""
Send a take off request to the Bebop Drone.
"""
self.send_data('ARDrone3', 'Piloting', 'TakeOff')
def land(self):
"""
Send a landing request to the Bebop Drone.
"""
self.send_data('ARDrone3', 'Piloting', 'Landing')
def emergency(self):
"""
Send an emergeny request to the Bebop Drone.
An emergency request shuts down the motors.
"""
self.send_data('ARDrone3', 'Piloting', 'Emergency')
class JumpingSumo(Device):
def __init__(self, ip, c2d_port, d2c_port):
"""
Create and start a new JumpingSumo device.
The connection must have been started beforehand by Connection.connect().
Arguments:
- ip : The product ip address
- c2d_port : The remote port (on which we will send data)
- d2c_port : The local port (on which we will read data)
"""
super(JumpingSumo, self).__init__(ip, c2d_port, d2c_port, ackBuffer=11, nackBuffer=10, cmdBuffers=[127, 126])
def _init_product(self):
# Deactivate video streaming
self.send_data('JumpingSumo', 'MediaStreaming', 'VideoEnable', 0)
def change_posture(self, posture):
"""
Change the posture of the JumpingSumo.
Arguments:
- posture : integer value corresponding to the posture requested
Possible values are found in the ARCommands xml file (0 then grows)
Currently known values:
- 0 : standing
- 1 : jumper
- 2 : kicker
"""
return self.send_data('JumpingSumo', 'Piloting', 'Posture', posture)
def move_forward(self, speed):
return self.send_data('JumpingSumo', 'Piloting', 'PCMD', 1, speed, 0)
def move(self, speed, angle):
return self.send_data('JumpingSumo', 'Piloting', 'PCMD', 1, speed, angle)
def spin(self):
return self.send_data('JumpingSumo', 'Animations', 'SimpleAnimation', 1 )
def simpleAnimation(self, id):
"""
Currently known values:
- 0 : stop
- 1 : spin
- 2 : tap
- 3 : slowshake
- 4 : metronome
- 5 : ondulation
- 6 : spinjump
- 7 : spintoposture
- 8 : spinjump
- 9 : spiral
- 10 : slalom
"""
return self.send_data('JumpingSumo', 'Animations', 'SimpleAnimation', id )
def change_volume(self, volume):
"""
Change the volume of the JumpingSumo.
Arguments:
- volume : integer value [0; 100] : percentage of maximum volume.
"""
return self.send_data('JumpingSumo', 'AudioSettings', 'MasterVolume', volume)
def jump(self, jump_type):
"""
Make the JumpingSumo jump.
Arguments:
- jump_type : integer value corresponding to the type of jump requested
Possible values are found in the ARCommands xml file (0 then grows)
Currently known values:
- 0 : long
- 1 : high
"""
return self.send_data('JumpingSumo', 'Animations', 'Jump', jump_type)
class Airborn(object):
def __init__(self):
"""
Create and start a new JumpingSumo device.
The connection must have been started beforehand by Connection.connect().
Arguments:
- ip : The product ip address
- c2d_port : The remote port (on which we will send data)
- d2c_port : The local port (on which we will read data)
"""
inb = [i for i in (11, 10, -1) if i > 0]
outb = [127, 126]
#self._network = Network(ip, c2d_port, d2c_port, inb, outb, self)
self._ackBuffer = 11
self._nackBuffer = 10
self._urgBuffer = -1
self._cmdBuffers = [127, 126]
self._state = State()
#super(Airborn, self).__init__(ip, c2d_port, d2c_port, ackBuffer=11, nackBuffer=10, cmdBuffers=[127, 126])
def flip(self, direction):
"""
Change the posture of the JumpingSumo.
Arguments:
- posture : integer value corresponding to the posture requested
Possible values are found in the ARCommands xml file (0 then grows)
Currently known values:
- 0 : front
- 1 : back
- 2 : right
- 3 : left
"""
return self.send_data('MiniDrone', 'Animations', 'Flip', direction)
def move(self, speed, angle):
return self.send_data('MiniDrone', 'Piloting', 'PCMD', 3, angle)
def takeOff(self):
return self.send_data('MiniDrone', 'Piloting', 'TakeOff')
def landing(self):
return self.send_data('MiniDrone', 'Piloting', 'Landing')
def data_received(self, buf, data):
"""
Save the recieved data in the state.
This function is called by the internal Network, and should not be called
directly by the application.
"""
if buf in self._cmdBuffers:
dico, ok = unpack_command(data)
if not ok:
return
pr, cl, cmd = dico['proj'], dico['class'], dico['cmd']
try:
args = dico['args']
key = dico['arg0']
except:
args = {}
key = 'no_arg'
type = dico['listtype']
if type == ARCommandListType.NONE:
self._state.put(pr, cl, cmd, args)
elif type == ARCommandListType.LIST:
self._state.put_list(pr, cl, cmd, args)
elif type == ARCommandListType.MAP:
self._state.put_map(pr, cl, cmd, args, key)
def did_disconnect(self):
"""
Called when the product is disconnected.
The application should not call this function directly.
"""
print( 'Product disconnected !')
self.stop()
def get_state(self, copy=True):
"""
Get the product state.
Arguments:
- copy : if True, this function will return a pure dictionnary copy of the state
if False, this function will return a reference to the internal state
(default True)
When requesting a non-copy state, the application should NEVER try to modify it.
To get a value from the internal state, use its 'get_value' function.
"""
if copy:
return self._state.duplicate()
else:
return self._state
def get_battery(self):
"""
Get the current battery percentage.
"""
try:
return self._state.get_value('common.CommonState.BatteryStateChanged')['percent']
except:
return 0
def send_data(self, pr, cl, cm, *args, **kwargs):
"""
Send some command to the product.
Return a NetworkStatus value.
Arguments:
- pr : Project name of the command
- cl : Class name of the command
- cm : Command name
- *args : arguments to the command
Keyword arguments:
- retries : number of retries (default 5)
- timeout : timeout (seconds) per try for acknowledgment (default 0.15)
"""
try:
cmd, buf, to = pack_command(pr, cl, cm, *args)
except CommandError as ce:
print(str(ce))
return NetworkStatus.ERROR
bufno=-1
if buf == ARCommandBuffer.NON_ACK:
bufno = self._nackBuffer
datatype = Bybop_NetworkAL.DataType.DATA
elif buf == ARCommandBuffer.ACK:
bufno = self._ackBuffer
datatype = Bybop_NetworkAL.DataType.DATA_WITH_ACK
elif buf == ARCommandBuffer.HIGH_PRIO:
bufno = self._urgBuffer
datatype = Bybop_NetworkAL.DataType.DATA_LOW_LATENCY
if bufno == -1:
print( 'No suitable buffer')
return NetworkStatus.ERROR
retries = kwargs['retries'] if 'retries' in kwargs else 5
timeout = kwargs['timeout'] if 'timeout' in kwargs else 0.15
status = self._network.send_data(bufno, cmd, datatype, timeout=timeout, tries=retries+1)
return status
def wait_answer(self, name, timeout=5.0):
"""
Wait for an answer from the product.
This function will block until the product sends the requested command, or the timeout
is expired.
Return True if the command was received, False if a timeout occured.
Arguments:
- name : The command to wait, in 'project.class.command' notation
Keyword arguments:
- timeout : Maximum time (floating point seconds) to wait (default 5.0)
"""
status = self._state.wait_for(name, timeout=timeout)
return status
def _common_init_product(self):
self.send_data('common', 'Settings', 'AllSettings', toto=42)
self.wait_answer('common.SettingsState.AllSettingsChanged')
print( 'first answer correct')
self.send_data('common', 'Common', 'AllStates')
self.wait_answer('common.CommonState.AllStatesChanged')
print( 'second answer correct')
now = time.gmtime()
dateStr = time.strftime('%Y-%m-%d', now)
timeStr = time.strftime('T%H%M%S+0000', now)
self.send_data('common', 'Common', 'CurrentDate', dateStr)
self.send_data('common', 'Common', 'CurrentTime', timeStr)
print( 'Dates send')
def dump_state(self):
print( 'Internal state :')
self._state.dump()
def stop(self):
self._network.stop()
def create_and_connect(device, d2c_port, controller_type, controller_name):
device_id = get_device_id(device)
ip = get_ip(device)
port = get_port(device)
if device_id not in (DeviceID.BEBOP_DRONE, DeviceID.JUMPING_SUMO, DeviceID.JUMPING_NIGHT, DeviceID.AIRBORNE_NIGHT):
print('Unknown product ' + device_id)
return None
connection = Connection(ip, port)
answer = connection.connect(d2c_port, controller_type, controller_name)
if not answer:
print( 'Unable to connect')
return None
if answer['status'] != 0:
print( 'Connection refused')
return None
c2d_port = answer['c2d_port']
if device_id == DeviceID.BEBOP_DRONE:
return BebopDrone(ip, c2d_port, d2c_port)
elif device_id == DeviceID.JUMPING_SUMO:
return JumpingSumo(ip, c2d_port, d2c_port)
elif device_id == DeviceID.JUMPING_NIGHT:
return JumpingSumo(ip, c2d_port, d2c_port)
return None
|
Tala/bybop
|
src/Bybop_Device.py
|
Python
|
bsd-3-clause
| 23,839
|
'''
working with gitlab's groups
'''
from crud import Crud
import members
class Groups(Crud, members.Members):
def __init__(self):
Crud.__init__(self, 'groups')
'''
add a new group
'''
def add(self, sysNam, grpNam):
return Crud.add(self, sysNam, {'name': grpNam, 'path': grpNam})
|
alces/gitlab-rest-client
|
groups.py
|
Python
|
bsd-2-clause
| 292
|
#!/usr/bin/env python3
from bisect import insort
from collections import defaultdict, namedtuple
from operator import attrgetter
from intervaltree import Interval, IntervalTree
from ck2parser import (rootpath, vanilladir, is_codename, TopLevel, Number,
Pair, Obj, Date as ASTDate, Comment, SimpleParser,
FullParser)
from print_time import print_time
CHECK_LIEGE_CONSISTENCY = True
LANDED_TITLES_ORDER = True # if false, date order
PRUNE_UNEXECUTED_HISTORY = True # prune all after last playable start
PRUNE_IMPOSSIBLE_STARTS = True # implies prev
PRUNE_NONBOOKMARK_STARTS = False # implies prev
PRUNE_NONERA_STARTS = False # implies prev
PRUNE_ALL_BUT_DATES = [] # overrides above
PRUNE_ALL_BUT_REGIONS = []
FORMAT_TITLE_HISTORY = False
CLEANUP_TITLE_HISTORY = False # implies previous, overrides date pruning
class Date(namedtuple('Date', ['y', 'm', 'd'])):
def __str__(self):
return '{}.{}.{}'.format(*self)
def get_next_day(self):
y, m, d = self.y, self.m, self.d + 1
if (d == 29 and m == 2 or
d == 31 and m in (4, 6, 9, 11) or
d == 32 and m in (1, 3, 5, 7, 8, 10, 12)):
m, d = m + 1, 1
if m == 13:
y, m = y + 1, 1
return Date(y, m, d)
Date.EARLIEST = Date(float('-inf'), float('-inf'), float('-inf'))
Date.LATEST = Date(float('inf'), float('inf'), float('inf'))
class TitleHistory:
keys = [
'de_jure_liege', 'historical_nomad', 'holding_dynasty',
'liege', 'holder', 'pentarch', 'law', 'vice_royalty', 'active',
'clear_tribute_suzerain', 'set_tribute_suzerain', 'conquest_culture',
'name', 'reset_name', 'adjective', 'reset_adjective',
'set_global_flag', 'clr_global_flag', 'effect', 'capital', 'government'
]
keys_sort_key = lambda cls, x: (
x.key.val != 'active' or x.value.val != 'yes',
cls.keys.index(x.key.val))
def __init__(self, name, djl, cap = 0):
self.name = name
self.has_file = False
self.attr = {k: [(Date.EARLIEST, v)] for k, v in [
('holder', 0),
('liege', djl if name.startswith('b') else 0),
('de_jure_liege', djl),
('vice_royalty', 'no'),
('historical_nomad', 'no'),
('holding_dynasty', 0),
('active', 'yes'),
('pentarch', 0),
('conquest_culture', 0),
('name', ''),
('adjective', ''),
('suzerain', 0),
('capital', cap),
('government', '')
]}
self.date_comments = defaultdict(list)
self.date_ker_comments = defaultdict(list)
self.attr_comment = {}
self.post_comments = None
self.history = defaultdict(list)
self.tree = None
def compile(self):
for k, vs in self.attr.items():
for i, (date, v) in enumerate(vs):
if date != Date.EARLIEST:
if k == 'suzerain':
if v[1] == 0:
item = 'clear_tribute_suzerain', v[0]
else:
if (i > 0 and vs[i - 1][1] != 0 and
vs[i - 1][1][0] != v[0]):
self.history[date].append(
Pair('clear_tribute_suzerain',
vs[i - 1][1][0]))
v = Obj([Pair('who', v[0]),
Pair('percentage', Number(str(v[1])))])
item = 'set_tribute_suzerain', v
elif k in ('name', 'adjective') and v == '':
item = 'reset_{}'.format(k), 'yes'
else:
item = k, v
if isinstance(item[1], int):
item = item[0], Number(str(item[1]))
pair = Pair(item[0], item[1])
if isinstance(item[1], Number):
item = item[0], item[1].val
if (date, item) in self.attr_comment:
pre, post = self.attr_comment[date, item]
pair.pre_comments = pre
pair.post_comment = post
self.history[date].append(pair)
contents = []
for date, items in sorted(self.history.items()):
items.sort(key=self.keys_sort_key)
obj = Obj(items)
obj.pre_comments = self.date_ker_comments[date]
date = ASTDate(self.date_comments[date], str(date), None)
date_pair = Pair(date, obj)
contents.append(date_pair)
self.tree = TopLevel(contents)
self.tree.post_comments = self.post_comments
def remove_dead_holders(self, parser, dead_holders):
print(self.name)
if not dead_holders:
return
if not self.tree:
self.compile()
prevprev_holder = 0
prev = -1, None, None
i = 0
while True:
last_iter = i == len(self.tree)
if not last_iter:
date_pair = self.tree.contents[i]
date = date_pair.key.val
obj = date_pair.value
try:
j, holder_pair = next((j, e) for j, e in enumerate(obj)
if e.key.val == 'holder')
except StopIteration:
i += 1
continue
holder = holder_pair.value.val
else:
date = Date.LATEST
prev_i, prev_date_pair, prev_holder_pair = prev
if prev_date_pair:
prev_date = prev_date_pair.key.val
prev_holder = prev_holder_pair.value.val
else:
prev = i, date_pair, holder_pair
i += 1
continue
#if self.name == 'c_godwad' and prev_date[0] == 1321:
# import pdb; pdb.set_trace()
if not last_iter and holder == 0:
if prev_holder == 0:
if len(obj) == 1:
# remove whole date
del self.tree.contents[i]
else:
del obj.contents[j]
else:
i += 1
continue
try:
begin, end = next((b, e)
for b, e in dead_holders if e > prev_date)
except StopIteration:
break
if begin <= prev_date:
obj = prev_date_pair.value
if prevprev_holder == 0:
if len(obj) == 1:
# remove whole date
self.tree.contents.remove(prev_date_pair)
pair_is, _ = prev_date_pair.inline_str(0, parser, 0)
comments = [Comment(s)
for s in pair_is.split('\n') if s]
if prev_i < len(self.tree):
# TODO fix how this stacks up comment level
next_thing = self.tree.contents[prev_i]
next_thing.pre_comments[:0] = comments
i -= 1
else:
self.post_comments[:0] = comments
else:
j = obj.contents.index(prev_holder_pair)
pair_is, _ = prev_holder_pair.inline_str(0, parser, 0)
comments = [Comment(s)
for s in pair_is.split('\n') if s]
next_thing = (obj.contents[j + 1]
if j + 1 < len(obj) else obj)
next_thing.pre_comments[:0] = comments
else:
obj.contents.remove(prev_holder_pair)
no_holder_pair = Pair('holder', Number(0))
pair_is, _ = prev_holder_pair.inline_str(0, parser, 0)
comments = [Comment(s)
for s in pair_is.split('\n') if s]
no_holder_pair.pre_comments[:0] = comments
obj.contents.append(no_holder_pair)
obj.contents.sort(key=self.keys_sort_key)
# possible redundant holder = 0 from that
# will be removed next iteration
if end < date:
# re-add holder when he's born
if end in self.tree.dictionary:
obj = self.tree[end]
obj.contents.append(prev_holder_pair)
obj.contents.sort(key=self.keys_sort_key)
else:
self.tree.contents.append(Pair(
ASTDate(str(end)), Obj([prev_holder_pair])))
self.tree.contents.sort(key=lambda x: x.key.val)
next_begin = next((b for b, e in dead_holders if b > begin),
Date.LATEST)
if next_begin < date:
no_holder_pair = Pair('holder', Number(0))
if end in self.tree.dictionary:
obj = self.tree[end]
obj.contents.append(no_holder_pair)
obj.contents.sort(key=self.keys_sort_key)
else:
self.tree.contents.append(Pair(
ASTDate(str(end)), Obj([no_holder_pair])))
self.tree.contents.sort(key=lambda x: x.key.val)
elif begin < date:
# no holder when he's dead
if holder == 0:
if len(date_pair.value) == 1:
date_pair.key = ASTDate(str(begin))
else:
date_pair.value.contents.remove(holder_pair)
date_pair = Pair(ASTDate(str(begin)),
Obj([holder_pair]))
self.tree.contents.append(date_pair)
self.tree.contents.sort(key=lambda x: x.key.val)
i = self.tree.contents.index(date_pair)
else:
no_holder_pair = Pair('holder', Number(0))
if begin in self.tree.dictionary:
obj = self.tree[begin]
obj.contents.append(no_holder_pair)
obj.contents.sort(key=self.keys_sort_key)
else:
self.tree.contents.append(Pair(
ASTDate(str(begin)), Obj([no_holder_pair])))
self.tree.contents.sort(key=lambda x: x.key.val)
i += 1
date = end
if last_iter:
break
if end == date:
prevprev_holder = 0
else:
prevprev_holder = prev_holder
prev = i, date_pair, holder_pair
i += 1
def write(self, parser, folder):
if not self.tree:
self.compile()
path = folder / '{}.txt'.format(self.name)
parser.write(self.tree, path)
# for monkey patching Node.pop_greatest_child to fix issue 41
# https://github.com/chaimleib/intervaltree/issues/41
def intervaltree_patch_issue_41():
from intervaltree.node import Node
def pop_greatest_child(self):
if self.right_node:
greatest_child, self[1] = self[1].pop_greatest_child()
new_self = self.rotate()
for iv in set(new_self.s_center):
if iv.contains_point(greatest_child.x_center):
new_self.s_center.remove(iv)
greatest_child.add(iv)
return (greatest_child,
new_self if new_self.s_center else new_self.prune())
x_centers = set(iv.end for iv in self.s_center)
x_centers.remove(max(x_centers))
x_centers.add(self.x_center)
new_x_center = max(x_centers)
child = Node(new_x_center, (iv for iv in self.s_center
if iv.contains_point(new_x_center)))
self.s_center -= child.s_center
return child, self if self.s_center else self[0]
Node.pop_greatest_child = pop_greatest_child
def iv_to_str(iv, end=None):
if end is not None:
iv = iv, end
if iv[0] == Date.EARLIEST and iv[1] == Date.LATEST:
s = 'always'
elif iv[0] == Date.EARLIEST:
s = 'till {}'.format(iv[1])
elif iv[1] == Date.LATEST:
s = '{} on'.format(iv[0])
elif iv[1] == iv[0].get_next_day():
s = str(iv[0])
else:
s = '{} to {}'.format(iv[0], iv[1])
if len(iv) > 2 and iv[2] is not None:
s += ' ({})'.format(iv[2])
return s
def title_tier(title):
return 'bcdke'.index(title[0])
def prune_tree(ivt, date_filter, pred=None):
for filter_iv in date_filter:
if pred is None or pred(filter_iv):
ivt.chop(filter_iv.begin, filter_iv.end)
@print_time
def main():
intervaltree_patch_issue_41()
simple_parser = SimpleParser(rootpath / 'SWMH-BETA/SWMH')
if FORMAT_TITLE_HISTORY or CLEANUP_TITLE_HISTORY:
history_parser = FullParser(rootpath / 'SWMH-BETA/SWMH')
else:
history_parser = simple_parser
history_parser.no_fold_to_depth = 0
landed_titles_index = {0: -1}
title_djls = {}
histories = {}
current_index = 0
def recurse(tree, stack=[]):
nonlocal current_index
for n, v in tree:
if is_codename(n.val):
cap = v.get('capital', Number(0)).val
histories[n.val] = TitleHistory(n.val,
stack[-1] if stack else 0,
cap)
landed_titles_index[n.val] = current_index
current_index += 1
stack.append(n.val)
title_djls[n.val] = stack.copy()
recurse(v, stack=stack)
stack.pop()
for _, tree in simple_parser.parse_files('common/landed_titles/*.txt'):
recurse(tree)
date_filter = IntervalTree()
if not CLEANUP_TITLE_HISTORY:
if PRUNE_ALL_BUT_DATES:
dates = [Date(*d) for d in PRUNE_ALL_BUT_DATES]
dates.append(Date.LATEST)
date_filter.addi(Date.EARLIEST, dates[0])
for i in range(len(dates) - 1):
date_filter.addi(dates[i].get_next_day(), dates[i + 1])
elif (PRUNE_UNEXECUTED_HISTORY or PRUNE_IMPOSSIBLE_STARTS or
PRUNE_NONBOOKMARK_STARTS or PRUNE_NONERA_STARTS):
date_filter.addi(Date.EARLIEST, Date.LATEST)
last_start_date = Date.EARLIEST
for _, tree in simple_parser.parse_files('common/bookmarks/*'):
for _, v in tree:
date = Date(*v['date'].val)
if not PRUNE_NONERA_STARTS or v.has_pair('era', 'yes'):
date_filter.chop(date, date.get_next_day())
last_start_date = max(date, last_start_date)
if not PRUNE_NONBOOKMARK_STARTS and not PRUNE_NONERA_STARTS:
defines = simple_parser.parse_file('common/defines.txt')
first = Date(*defines['start_date'].val)
last = Date(*defines['last_start_date'].val)
date_filter.chop(first, last.get_next_day())
last_start_date = max(last, last_start_date)
if not PRUNE_IMPOSSIBLE_STARTS:
date_filter.clear()
date_filter.addi(last_start_date.get_next_day(),
Date.LATEST)
title_holders = defaultdict(IntervalTree)
title_unheld = defaultdict(
lambda: IntervalTree.from_tuples([(Date.EARLIEST, Date.LATEST)]))
title_lieges = defaultdict(IntervalTree)
title_lte_tier = []
char_titles = defaultdict(IntervalTree)
char_life = {}
title_dead_holders = []
title_county_unheld = []
for _, tree in simple_parser.parse_files('history/characters/*'):
for n, v in tree:
birth = next((Date(*n2.val) for n2, v2 in v
if (isinstance(n2, ASTDate) and v2.get('birth'))),
Date.LATEST)
death = next((Date(*n2.val) for n2, v2 in v
if (isinstance(n2, ASTDate) and v2.get('death'))),
Date.LATEST)
if birth <= death:
char_life[n.val] = birth, death
for path, tree in history_parser.parse_files('history/titles/*'):
title = path.stem
tier = title_tier(title)
if not len(tree) > 0 or title not in landed_titles_index:
if (title in landed_titles_index and
not (vanilladir / 'history/titles' / path.name).exists()):
if FORMAT_TITLE_HISTORY or CLEANUP_TITLE_HISTORY:
path.unlink()
else:
print('unnecessary blank? {}'.format(path.name))
continue
histories[title].has_file = True
histories[title].post_comments = tree.post_comments
if FORMAT_TITLE_HISTORY and not CLEANUP_TITLE_HISTORY:
history_parser.write(tree, path)
try:
for p in sorted(tree, key=attrgetter('key.val')):
n, v = p
date = Date(*n.val)
date_comments = histories[title].date_comments[date]
date_comments.extend(str(c) for c in n.pre_comments)
potentials = [x.post_comment for x in (p.op, v.kel, v.ker)
if x.post_comment]
if not(len(potentials) == 1 and len(v) == 1 and
not v.contents[0].post_comment):
histories[title].date_comments[date].extend(str(c) for c in
potentials)
histories[title].date_ker_comments[date].extend(
v.ker.pre_comments)
for p2 in v:
n2, v2 = p2
if n2.val in ['law', 'set_global_flag', 'clr_global_flag',
'effect']:
histories[title].history[date].append(p2)
continue
attr_vals, value = None, None
if n2.val in ['holder', 'liege']:
if v2.val in ['0', '-', title]:
value = 0
elif n2.val == 'set_tribute_suzerain':
attr_vals = histories[title].attr['suzerain']
try:
value = v2['who'].val, v2['percentage'].val
except KeyError:
continue
elif n2.val == 'clear_tribute_suzerain':
attr_vals = histories[title].attr['suzerain']
value = v2.val, 0
if attr_vals[-1][1] == 0 or attr_vals[-1][1][0] != v2.val:
continue
elif n2.val in ['reset_adjective', 'reset_name']:
if v2.val != 'yes':
continue
attr_vals = histories[title].attr[n2.val[6:]]
value = ''
if attr_vals is None:
try:
attr_vals = histories[title].attr[n2.val]
except:
print(title)
raise
if value is None:
value = v2.val
if attr_vals[-1][0] == date:
attr_vals[-1] = date, value
elif attr_vals[-1][1] != value:
attr_vals.append((date, value))
if (len(potentials) == 1 and len(v) == 1 and
not v2.post_comment):
if isinstance(v2, Obj):
v2.kel.post_comment = potentials[0]
else:
v2.post_comment = potentials[0]
if n2.pre_comments or v2.post_comment:
histories[title].attr_comment[date, (n2.val, value)] = (
n2.pre_comments, v2.post_comment)
except TypeError:
print(path)
raise
dead_holders = []
county_unheld = []
holders = histories[title].attr['holder']
for i, (begin, holder) in enumerate(holders):
try:
end = holders[i + 1][0]
except IndexError:
end = Date.LATEST
if holder != 0:
birth, death = char_life.get(holder,
(Date.LATEST, Date.LATEST))
if begin < birth and death < end:
if dead_holders and dead_holders[-1][1] == begin:
dead_holders[-1] = dead_holders[-1][0], birth
else:
dead_holders.append((begin, birth))
if dead_holders[-1][1] == death:
dead_holders[-1] = dead_holders[-1][0], end
else:
dead_holders.append((death, end))
elif begin < birth or death < end:
error_begin = death if birth <= begin < death else begin
error_end = birth if begin < birth <= end else end
if dead_holders and dead_holders[-1][1] == error_begin:
dead_holders[-1] = dead_holders[-1][0], error_end
else:
dead_holders.append((error_begin, error_end))
elif title.startswith('c'):
if county_unheld and county_unheld[-1][1] == begin:
county_unheld[-1] = county_unheld[-1][0], end
else:
county_unheld.append((begin, end))
title_holders[title][begin:end] = holder
if holder != 0:
char_titles[holder][begin:end] = title
title_unheld[title].chop(begin, end)
lte_tier = IntervalTree()
lieges = histories[title].attr['liege']
for i, (begin, liege) in enumerate(lieges):
try:
end = lieges[i + 1][0]
except IndexError:
end = Date.LATEST
if liege != 0 and title_tier(liege) <= tier:
lte_tier[begin:end] = liege
title_lieges[title][begin:end] = liege
if lte_tier:
title_lte_tier.append((title, lte_tier))
if dead_holders:
dead_holders = IntervalTree.from_tuples(dead_holders)
title_dead_holders.append((title, dead_holders))
if county_unheld:
county_unheld = IntervalTree.from_tuples(county_unheld)
title_county_unheld.append((title, county_unheld))
# counties without title histories
for history in histories.values():
if not history.has_file and history.name.startswith('c'):
always = IntervalTree.from_tuples([(Date.EARLIEST, Date.LATEST)])
title_county_unheld.append((history.name, always))
# possible todo: look for dead lieges,
# even though redundant with dead holders
title_liege_errors = []
for title, lieges in title_lieges.items():
errors = []
for liege_begin, liege_end, liege in sorted(lieges):
# counties are always held by someone
if liege == 0 or liege.startswith('c'):
continue
liege_unhelds = IntervalTree(title_unheld[liege])
if not title.startswith('c'):
# don't care if liege is unheld when this title is also unheld
prune_tree(liege_unhelds, title_unheld[title])
for liege_unheld in liege_unhelds[liege_begin:liege_end]:
begin = max(liege_begin, liege_unheld.begin)
end = min(liege_end, liege_unheld.end)
if errors and errors[-1][1] == begin:
errors[-1] = errors[-1][0], end
else:
errors.append((begin, end))
if errors:
errors = IntervalTree.from_tuples(errors)
title_liege_errors.append((title, errors))
if CHECK_LIEGE_CONSISTENCY:
liege_consistency_unamb = defaultdict(dict)
liege_consistency_amb = defaultdict(dict)
for char, titles in char_titles.items():
liege_chars = IntervalTree()
for holder_begin, holder_end, title in titles:
# if char == 71823 and title == 'c_roma':
# import pdb; pdb.set_trace()
lieges = title_lieges[title][holder_begin:holder_end]
for liege_begin, liege_end, liege in lieges:
liege_begin = max(liege_begin, holder_begin)
liege_end = min(liege_end, holder_end)
if liege not in title_holders:
liege_chars[liege_begin:liege_end] = 0, liege, title
continue
liege_holders = title_holders[liege][liege_begin:liege_end]
for begin, end, liege_holder in liege_holders:
begin = max(begin, liege_begin)
end = min(end, liege_end)
if liege == title:
liege_holder = 0
elif liege_holder == char:
continue
liege_chars[begin:end] = liege_holder, liege, title
prune_tree(liege_chars, date_filter)
if liege_chars:
liege_chars.split_overlaps()
items = defaultdict(
lambda: defaultdict(lambda: defaultdict(list)))
for begin, end, (liege_holder, liege, title) in liege_chars:
items[begin, end][liege_holder][liege].append(title)
for iv, liege_holders in items.items():
if len(liege_holders) > 1:
if (PRUNE_ALL_BUT_REGIONS and
all(region not in title_djls.get(title, ())
for _, ls in liege_holders.items()
for l in ls
for region in PRUNE_ALL_BUT_REGIONS) and
all(region not in title_djls.get(title, ())
for _, ls in liege_holders.items()
for _, ts in ls.items() for t in ts
for region in PRUNE_ALL_BUT_REGIONS)):
continue
tiers = [max(title_tier(title)
for _, titles in lieges.items()
for title in titles)
for _, lieges in liege_holders.items()]
if tiers.count(max(tiers)) == 1:
which_dict = liege_consistency_unamb
else:
which_dict = liege_consistency_amb
which_dict[char][iv] = liege_holders
if date_filter:
for title, errors in reversed(title_liege_errors):
prune_tree(errors, date_filter)
if not errors:
title_liege_errors.remove((title, errors))
for title, errors in reversed(title_county_unheld):
prune_tree(errors, date_filter)
if not errors:
title_county_unheld.remove((title, errors))
for title, dead_holders in reversed(title_dead_holders):
prune_tree(dead_holders, date_filter)
if not dead_holders:
title_dead_holders.remove((title, dead_holders))
if LANDED_TITLES_ORDER:
sort_key = lambda x: landed_titles_index[x[0]]
else:
sort_key = lambda x: (x[1].begin(), landed_titles_index[x[0]])
title_liege_errors.sort(key=sort_key)
title_county_unheld.sort(key=sort_key)
title_lte_tier.sort(key=sort_key)
title_dead_holders.sort(key=sort_key)
if CLEANUP_TITLE_HISTORY:
history_folder = history_parser.moddirs[0] / 'history/titles'
for history in sorted(histories.values(), key=lambda x: x.name):
if history.has_file:
dead_holders = next((l for title, l in title_dead_holders
if title == history.name), [])
dead_holders = [(x[0], x[1]) for x in sorted(dead_holders)]
history.remove_dead_holders(history_parser, dead_holders)
history.write(history_parser, history_folder)
def title_region(title):
try:
region = title_djls[title][0]
except KeyError:
return 'undefined'
if region.startswith('e'):
if region in ('e_null', 'e_placeholder'):
try:
return title_djls[title][1]
except:
pass
return region
return 'titular'
with (rootpath / 'check_title_history.txt').open('w') as fp:
print('Liege has no holder:', file=fp)
if not title_liege_errors:
print('\t(none)', file=fp)
prev_region = None
for title, errors in title_liege_errors:
if (PRUNE_ALL_BUT_REGIONS and
all(region not in title_djls[title]
for region in PRUNE_ALL_BUT_REGIONS)):
continue
region = title_region(title)
if (not PRUNE_ALL_BUT_REGIONS and LANDED_TITLES_ORDER and
region != prev_region):
print('\t# {}'.format(region), file=fp)
line = '\t{}: '.format(title)
line += ', '.join(iv_to_str(iv) for iv in sorted(errors))
print(line, file=fp)
prev_region = region
print('County has no holder:', file=fp)
if not title_county_unheld:
print('\t(none)', file=fp)
prev_region = None
for title, errors in title_county_unheld:
if (PRUNE_ALL_BUT_REGIONS and
all(region not in title_djls[title]
for region in PRUNE_ALL_BUT_REGIONS)):
continue
region = title_region(title)
if (not PRUNE_ALL_BUT_REGIONS and LANDED_TITLES_ORDER and
region != prev_region):
print('\t# {}'.format(region), file=fp)
line = '\t{}: '.format(title)
line += ', '.join(iv_to_str(iv) for iv in sorted(errors))
print(line, file=fp)
prev_region = region
print('Liege not of higher tier:', file=fp)
if not title_lte_tier:
print('\t(none)', file=fp)
for title, lte_tier in title_lte_tier:
line = '\t{}: '.format(title)
line += ', '.join(iv_to_str(iv) for iv in sorted(lte_tier))
print(line, file=fp)
print('Holder not alive:', file=fp)
if not title_dead_holders:
print('\t(none)', file=fp)
prev_region = None
for title, dead_holders in title_dead_holders:
if (PRUNE_ALL_BUT_REGIONS and
all(region not in title_djls[title]
for region in PRUNE_ALL_BUT_REGIONS)):
continue
region = title_region(title)
if (not PRUNE_ALL_BUT_REGIONS and LANDED_TITLES_ORDER and
region != prev_region):
print('\t# {}'.format(region), file=fp)
line = '\t{}: '.format(title)
line += ', '.join(iv_to_str(iv) for iv in sorted(dead_holders))
print(line, file=fp)
prev_region = region
if CHECK_LIEGE_CONSISTENCY:
print('Liege inconsistency (unambiguous):', file=fp)
if not liege_consistency_unamb:
print('\t(none)', file=fp)
for char, ivs in sorted(liege_consistency_unamb.items()):
for iv, liege_holders in sorted(ivs.items()):
print('\t{}, {}:'.format(char, iv_to_str(iv)), file=fp)
for liege_holder, lieges in sorted(liege_holders.items()):
for liege, titles in sorted(lieges.items(),
key=lambda x: landed_titles_index[x[0]]):
print('\t\t{} ({}) <= {}'.format(
liege, liege_holder, ', '.join(sorted(titles,
key=lambda x: landed_titles_index[x]))),
file=fp)
print('Liege inconsistency (ambiguous):', file=fp)
if not liege_consistency_amb:
print('\t(none)', file=fp)
for char, ivs in sorted(liege_consistency_amb.items()):
for iv, liege_holders in sorted(ivs.items()):
print('\t{}, {}:'.format(char, iv_to_str(iv)), file=fp)
for liege_holder, lieges in sorted(liege_holders.items()):
for liege, titles in sorted(lieges.items(),
key=lambda x: landed_titles_index[x[0]]):
print('\t\t{} ({}) <= {}'.format(
liege, liege_holder, ', '.join(sorted(titles,
key=lambda x: landed_titles_index[x]))),
file=fp)
if __name__ == '__main__':
main()
|
zijistark/ck2utils
|
esc/check_title_history.py
|
Python
|
gpl-2.0
| 34,052
|
# CTCI 1.3
# URLify
import unittest
# My Solution
#-------------------------------------------------------------------------------
# CTCI Solution
def urlify(string, length):
'''function replaces single spaces with %20 and removes trailing spaces'''
new_index = len(string)
for i in reversed(range(length)):
if string[i] == ' ':
# Replace spaces
string[new_index - 3:new_index] = '%20'
new_index -= 3
else:
# Move characters
string[new_index - 1] = string[i]
new_index -= 1
return string
#-------------------------------------------------------------------------------
#Testing
class Test(unittest.TestCase):
'''Test Cases'''
# Using lists because Python strings are immutable
data = [
(list('much ado about nothing '), 22,
list('much%20ado%20about%20nothing')),
(list('Mr John Smith '), 13, list('Mr%20John%20Smith'))]
def test_urlify(self):
for [test_string, length, expected] in self.data:
actual = urlify(test_string, length)
self.assertEqual(actual, expected)
if __name__ == "__main__":
unittest.main()
|
kyle8998/Practice-Coding-Questions
|
CTCI/Chapter1/1.3-URLify.py
|
Python
|
unlicense
| 1,208
|
from flask import render_template
import erc_config
from erc_server import app
@app.route('/')
def root():
return render_template('index.html', resources=erc_config.ERC_SERVER_STATIC_PATH)
|
veasy/easy-remote-control
|
erc-server/erc_server/routes/default_routes.py
|
Python
|
mit
| 194
|
import elaspic.elaspic_model
import pytest
@pytest.mark.parametrize("alignment, scores", [
[('AAAAA', 'AAAAA'), (1.0, 1.0, None, None)],
])
def test_analyze_alignment(alignment, scores):
assert elaspic.elaspic_model.analyze_alignment(alignment) == scores
|
ostrokach/elaspic
|
tests/test_elaspic_model.py
|
Python
|
mit
| 265
|
from __future__ import unicode_literals
import base64
import logging
import threading
import spotify
from spotify import ffi, lib, serialized, utils
__all__ = ['Image', 'ImageFormat', 'ImageSize']
logger = logging.getLogger(__name__)
class Image(object):
"""A Spotify image.
You can get images from :meth:`Album.cover`, :meth:`Artist.portrait`,
:meth:`Playlist.image`, or you can create an :class:`Image` yourself from a
Spotify URI::
>>> session = spotify.Session()
# ...
>>> image = session.get_image(
... 'spotify:image:a0bdcbe11b5cd126968e519b5ed1050b0e8183d0')
>>> image.load().data_uri[:50]
u'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEBLAEsAAD'
If ``callback`` isn't :class:`None`, it is expected to be a callable
that accepts a single argument, an :class:`Image` instance, when
the image is done loading.
"""
def __init__(
self, session, uri=None, sp_image=None, add_ref=True, callback=None
):
assert uri or sp_image, 'uri or sp_image is required'
self._session = session
if uri is not None:
image = spotify.Link(self._session, uri=uri).as_image()
if image is None:
raise ValueError(
'Failed to get image from Spotify URI: %r' % uri
)
sp_image = image._sp_image
add_ref = True
if add_ref:
lib.sp_image_add_ref(sp_image)
self._sp_image = ffi.gc(sp_image, lib.sp_image_release)
self.loaded_event = threading.Event()
handle = ffi.new_handle((self._session, self, callback))
self._session._callback_handles.add(handle)
spotify.Error.maybe_raise(
lib.sp_image_add_load_callback(
self._sp_image, _image_load_callback, handle
)
)
def __repr__(self):
return 'Image(%r)' % self.link.uri
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._sp_image == other._sp_image
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self._sp_image)
loaded_event = None
""":class:`threading.Event` that is set when the image is loaded."""
@property
def is_loaded(self):
"""Whether the image's data is loaded."""
return bool(lib.sp_image_is_loaded(self._sp_image))
@property
def error(self):
"""An :class:`ErrorType` associated with the image.
Check to see if there was problems loading the image.
"""
return spotify.ErrorType(lib.sp_image_error(self._sp_image))
def load(self, timeout=None):
"""Block until the image's data is loaded.
After ``timeout`` seconds with no results :exc:`~spotify.Timeout` is
raised. If ``timeout`` is :class:`None` the default timeout is used.
The method returns ``self`` to allow for chaining of calls.
"""
return utils.load(self._session, self, timeout=timeout)
@property
def format(self):
"""The :class:`ImageFormat` of the image.
Will always return :class:`None` if the image isn't loaded.
"""
if not self.is_loaded:
return None
return ImageFormat(lib.sp_image_format(self._sp_image))
@property
@serialized
def data(self):
"""The raw image data as a bytestring.
Will always return :class:`None` if the image isn't loaded.
"""
if not self.is_loaded:
return None
data_size_ptr = ffi.new('size_t *')
data = lib.sp_image_data(self._sp_image, data_size_ptr)
buffer_ = ffi.buffer(data, data_size_ptr[0])
data_bytes = buffer_[:]
assert len(data_bytes) == data_size_ptr[0], '%r == %r' % (
len(data_bytes),
data_size_ptr[0],
)
return data_bytes
@property
def data_uri(self):
"""The raw image data as a data: URI.
Will always return :class:`None` if the image isn't loaded.
"""
if not self.is_loaded:
return None
if self.format is not ImageFormat.JPEG:
raise ValueError('Unknown image format: %r' % self.format)
return 'data:image/jpeg;base64,%s' % (
base64.b64encode(self.data).decode('ascii')
)
@property
def link(self):
"""A :class:`Link` to the image."""
return spotify.Link(
self._session,
sp_link=lib.sp_link_create_from_image(self._sp_image),
add_ref=False,
)
@ffi.callback('void(sp_image *, void *)')
@serialized
def _image_load_callback(sp_image, handle):
logger.debug('image_load_callback called')
if handle == ffi.NULL:
logger.warning('pyspotify image_load_callback called without userdata')
return
(session, image, callback) = ffi.from_handle(handle)
session._callback_handles.remove(handle)
image.loaded_event.set()
if callback is not None:
callback(image)
# Load callbacks are by nature only called once per image, so we clean up
# and remove the load callback the first time it is called.
lib.sp_image_remove_load_callback(sp_image, _image_load_callback, handle)
@utils.make_enum('SP_IMAGE_FORMAT_')
class ImageFormat(utils.IntEnum):
pass
@utils.make_enum('SP_IMAGE_SIZE_')
class ImageSize(utils.IntEnum):
pass
|
mopidy/pyspotify
|
spotify/image.py
|
Python
|
apache-2.0
| 5,521
|
__author__ = 'bryson'
"""
The Factory Method Pattern defines an interface for creating an object, but lets subclasses decide which class to
instantiate. Factory Method lets a class defer instantiation to subclasses.
"""
class PizzaStore(object):
def __init__(self):
pass
def order_pizza(self, type):
pizza = self.create_pizza(type)
pizza.prepare()
pizza.bake()
pizza.cut()
pizza.box()
return pizza
def create_pizza(self, type):
raise NotImplementedError()
class NYStylePizzaStore(PizzaStore):
def __init__(self):
super(NYStylePizzaStore, self).__init__()
def create_pizza(self, type):
if type is "cheese":
pizza = NYStyleCheesePizza()
elif type is "pepperoni":
pizza = NYStylePepperoniPizza()
elif type is "clam":
pizza = NYStyleClamPizza()
elif type is "veggie":
pizza = NYStyleVeggiePizza()
else:
pizza = None
return pizza
class Pizza(object):
def __init__(self):
pass
def prepare(self):
raise NotImplementedError()
def bake(self):
raise NotImplementedError()
def cut(self):
raise NotImplementedError()
def box(self):
raise NotImplementedError()
class NYStyleCheesePizza(Pizza):
def __init__(self):
super(NYStyleCheesePizza, self).__init__()
def prepare(self):
pass
def bake(self):
pass
def cut(self):
pass
def box(self):
pass
class NYStylePepperoniPizza(Pizza):
def __init__(self):
super(NYStylePepperoniPizza, self).__init__()
def prepare(self):
pass
def bake(self):
pass
def cut(self):
pass
def box(self):
pass
class NYStyleClamPizza(Pizza):
def __init__(self):
super(NYStyleClamPizza, self).__init__()
def prepare(self):
pass
def bake(self):
pass
def cut(self):
pass
def box(self):
pass
class NYStyleVeggiePizza(Pizza):
def __init__(self):
super(NYStyleVeggiePizza, self).__init__()
def prepare(self):
pass
def bake(self):
pass
def cut(self):
pass
def box(self):
pass
|
bpeeters/DesignPatterns
|
Factory_Pattern/PizzaFactoryMethod.py
|
Python
|
gpl-2.0
| 2,318
|
#!/usr/bin/env python
#########################################################################################
# Spinal Cord Registration module
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2020 NeuroPoly, Polytechnique Montreal <www.neuro.polymtl.ca>
#
# License: see the LICENSE.TXT
#########################################################################################
import logging
import os # FIXME
import shutil
from math import asin, cos, sin, acos
import numpy as np
from scipy import ndimage
from nibabel import load, Nifti1Image, save
from scipy.signal import argrelmax, medfilt
from sklearn.decomposition import PCA
from scipy.io import loadmat
import spinalcordtoolbox.image as image
from spinalcordtoolbox.math import laplacian
from spinalcordtoolbox.registration.landmarks import register_landmarks
from spinalcordtoolbox.utils import sct_progress_bar, copy_helper, run_proc, tmp_create
# TODO [AJ]
# introduce potential cleanup functions in case exceptions occur and
# filesystem is left with temp artefacts everywhere?
logger = logging.getLogger(__name__)
class Paramreg(object):
def __init__(self, step=None, type=None, algo='syn', metric='MeanSquares', samplingStrategy='None',
samplingPercentage='0.2', iter='10', shrink='1', smooth='0', gradStep='0.5', deformation='1x1x0',
init='', filter_size=5, poly='5', slicewise='0', laplacian='0', dof='Tx_Ty_Tz_Rx_Ry_Rz',
smoothWarpXY='2', pca_eigenratio_th='1.6', rot_method='pca'):
"""
Class to define registration method.
:param step: int: Step number (starts at 1, except for type=label which corresponds to step=0).
:param type: {im, seg, imseg, label} Type of data used for registration. Use type=label only at step=0.
:param algo:
:param metric:
:param samplingStrategy: {'Regular', 'Random', 'None'}
:param samplingPercentage: [0, 1]
:param iter:
:param shrink:
:param smooth:
:param gradStep:
:param deformation:
:param init:
:param filter_size: int: Size of the Gaussian kernel when filtering the cord rotation estimate across z.
:param poly:
:param slicewise: {'0', '1'}: Slice-by-slice 2d transformation.
:param laplacian:
:param dof:
:param smoothWarpXY:
:param pca_eigenratio_th:
:param rot_method: {'pca', 'hog', 'pcahog'}: Rotation method to be used with algo=centermassrot.
pca: approximate cord segmentation by an ellipse and finds it orientation using PCA's
eigenvectors; hog: finds the orientation using the symmetry of the image; pcahog: tries method pca and if it
fails, uses method hog. If using hog or pcahog, type should be set to 'imseg'."
"""
self.step = step
self.type = type
self.algo = algo
self.metric = metric
self.samplingStrategy = samplingStrategy
self.samplingPercentage = samplingPercentage
self.iter = iter
self.shrink = shrink
self.smooth = smooth
self.laplacian = laplacian
self.gradStep = gradStep
self.deformation = deformation
self.slicewise = slicewise
self.init = init
self.poly = poly # only for algo=slicereg
self.filter_size = filter_size # only for algo=centermassrot
self.dof = dof # only for type=label
self.smoothWarpXY = smoothWarpXY # only for algo=columnwise
self.pca_eigenratio_th = pca_eigenratio_th # only for algo=centermassrot
self.rot_method = rot_method # only for algo=centermassrot
self.rot_src = None # this variable is used to set the angle of the cord on the src image if it is known
self.rot_dest = None # same as above for the destination image (e.g., if template, should be set to 0)
# list of possible values for self.type
self.type_list = ['im', 'seg', 'imseg', 'label']
# update constructor with user's parameters
def update(self, paramreg_user):
list_objects = paramreg_user.split(',')
for object in list_objects:
if len(object) < 2:
raise ValueError("Invalid use of -param! Check usage (usage changed from previous version)")
obj = object.split('=')
setattr(self, obj[0], obj[1])
class ParamregMultiStep:
"""
Class to aggregate multiple Paramreg() classes into a dictionary. The method addStep() is used to build this class.
"""
def __init__(self, listParam=[]):
self.steps = dict()
for stepParam in listParam:
if isinstance(stepParam, Paramreg):
self.steps[stepParam.step] = stepParam
else:
self.addStep(stepParam)
def addStep(self, stepParam):
"""
Checks if the step is already present.
If it exists: update it.
If not: add it.
"""
param_reg = Paramreg()
param_reg.update(stepParam)
if param_reg.step is None:
raise ValueError("Parameters must contain 'step'!")
else:
if param_reg.step in self.steps:
self.steps[param_reg.step].update(stepParam)
else:
self.steps[param_reg.step] = param_reg
if int(param_reg.step) != 0 and param_reg.type not in param_reg.type_list:
raise ValueError("Parameters must contain a type, either 'im' or 'seg'")
def register_step_ants_slice_regularized_registration(src, dest, step, metricSize, fname_mask='', verbose=1):
"""
"""
# NB: If the mask is soft, fname_mask will be returned as None, so that it won't be further applied via '-x'
if fname_mask:
dest, fname_mask = image.apply_mask_if_soft(dest, fname_mask)
# Find the min (and max) z-slice index below which (and above which) slices only have voxels below a given
# threshold.
list_fname = [src, dest]
if fname_mask:
list_fname.append(fname_mask)
mask_options = ['-x', fname_mask]
else:
mask_options = []
zmin_global, zmax_global = 0, 99999 # this is assuming that typical image has less slice than 99999
for fname in list_fname:
im = image.Image(fname)
zmin, zmax = image.find_zmin_zmax(im, threshold=0.1)
if zmin > zmin_global:
zmin_global = zmin
if zmax < zmax_global:
zmax_global = zmax
# crop images (see issue #293)
src_crop = image.add_suffix(src, '_crop')
image.spatial_crop(image.Image(src), dict(((2, (zmin_global, zmax_global)),))).save(src_crop)
dest_crop = image.add_suffix(dest, '_crop')
image.spatial_crop(image.Image(dest), dict(((2, (zmin_global, zmax_global)),))).save(dest_crop)
# update variables
src = src_crop
dest = dest_crop
scr_regStep = image.add_suffix(src, '_regStep' + str(step.step))
# estimate transfo
cmd = ['isct_antsSliceRegularizedRegistration',
'-t', 'Translation[' + step.gradStep + ']',
'-m', step.metric + '['
+ ','.join([dest, src, '1', metricSize, step.samplingStrategy, step.samplingPercentage]) + ']',
'-p', step.poly,
'-i', step.iter,
'-f', step.shrink,
'-s', step.smooth,
'-v', '1', # verbose (verbose=2 does not exist, so we force it to 1)
'-o', '[step' + str(step.step) + ',' + scr_regStep + ']', # here the warp name is stage10 because
# antsSliceReg add "Warp"
] + mask_options
# Filepaths for output files generated by isct_antsSliceRegularizedRegistration
warp_forward_out = 'step' + str(step.step) + 'Warp.nii.gz'
warp_inverse_out = 'step' + str(step.step) + 'InverseWarp.nii.gz'
txty_csv_out = 'step' + str(step.step) + 'TxTy_poly.csv'
# FIXME: Allow these filepaths be specified as input arguments (to provide control over where files are output to)
# run command
status, output = run_proc(cmd, verbose, is_sct_binary=True)
return warp_forward_out, warp_inverse_out, txty_csv_out
def register_step_ants_registration(src, dest, step, masking, ants_registration_params, padding, metricSize, verbose=1):
"""
"""
# Pad the destination image (because ants doesn't deform the extremities)
# N.B. no need to pad if iter = 0
if not step.iter == '0':
dest_pad = image.add_suffix(dest, '_pad')
run_proc(['sct_image', '-i', dest, '-o', dest_pad, '-pad', '0,0,' + str(padding)])
dest = dest_pad
# apply Laplacian filter
if not step.laplacian == '0':
logger.info(f"\nApply Laplacian filter")
sigmas = [step.laplacian, step.laplacian, 0]
src_img = image.Image(src)
src_out = src_img.copy()
src = image.add_suffix(src, '_laplacian')
dest = image.add_suffix(dest, '_laplacian')
sigmas = [sigmas[i] / src_img.dim[i + 4] for i in range(3)]
src_out.data = laplacian(src_out.data, sigmas)
src_out.save(path=src)
dest_img = image.Image(dest)
dest_out = dest_img.copy()
dest_out.data = laplacian(dest_out.data, sigmas)
dest_out.save(path=dest)
# Estimate transformation
logger.info(f"\nEstimate transformation")
scr_regStep = image.add_suffix(src, '_regStep' + str(step.step))
cmd = ['isct_antsRegistration',
'--dimensionality', '3',
'--transform', step.algo + '[' + step.gradStep
+ ants_registration_params[step.algo.lower()] + ']',
'--metric', step.metric + '[' + dest + ',' + src + ',1,' + metricSize + ']',
'--convergence', step.iter,
'--shrink-factors', step.shrink,
'--smoothing-sigmas', step.smooth + 'mm',
'--restrict-deformation', step.deformation,
'--output', '[step' + str(step.step) + ',' + scr_regStep + ']',
'--interpolation', 'BSpline[3]',
'--verbose', '1',
] + masking
# add init translation
if step.init:
init_dict = {'geometric': '0', 'centermass': '1', 'origin': '2'}
cmd += ['-r', '[' + dest + ',' + src + ',' + init_dict[step.init] + ']']
# run command
status, output = run_proc(cmd, verbose, is_sct_binary=True)
# get appropriate file name for transformation
if step.algo in ['rigid', 'affine', 'translation']:
warp_forward_out = 'step' + str(step.step) + '0GenericAffine.mat'
warp_inverse_out = '-step' + str(step.step) + '0GenericAffine.mat'
else:
warp_forward_out = 'step' + str(step.step) + '0Warp.nii.gz'
warp_inverse_out = 'step' + str(step.step) + '0InverseWarp.nii.gz'
return warp_forward_out, warp_inverse_out
def register_step_slicewise_ants(src, dest, step, ants_registration_params, fname_mask, remove_temp_files, verbose=1):
"""
"""
# if shrink!=1, force it to be 1 (otherwise, it generates a wrong 3d warping field). TODO: fix that!
if not step.shrink == '1':
logger.warning(f"\nWhen using slicewise with SyN or BSplineSyN, shrink factor needs to be one. Forcing shrink=1")
step.shrink = '1'
warp_forward_out = 'step' + str(step.step) + 'Warp.nii.gz'
warp_inverse_out = 'step' + str(step.step) + 'InverseWarp.nii.gz'
register_slicewise(
fname_src=src,
fname_dest=dest,
paramreg=step,
fname_mask=fname_mask,
warp_forward_out=warp_forward_out,
warp_inverse_out=warp_inverse_out,
ants_registration_params=ants_registration_params,
remove_temp_files=remove_temp_files,
verbose=verbose
)
return warp_forward_out, warp_inverse_out
def register_step_slicewise(src, dest, step, ants_registration_params, remove_temp_files, verbose=1):
"""
"""
# smooth data
if not step.smooth == '0':
logger.warning(f"\nAlgo {step.algo} will ignore the parameter smoothing.\n")
warp_forward_out = 'step' + str(step.step) + 'Warp.nii.gz'
warp_inverse_out = 'step' + str(step.step) + 'InverseWarp.nii.gz'
register_slicewise(
fname_src=src,
fname_dest=dest,
paramreg=step,
fname_mask='',
warp_forward_out=warp_forward_out,
warp_inverse_out=warp_inverse_out,
ants_registration_params=ants_registration_params,
remove_temp_files=remove_temp_files,
verbose=verbose
)
return warp_forward_out, warp_inverse_out
def register_step_label(src, dest, step, verbose=1):
"""
"""
warp_forward_out = 'step' + step.step + '0GenericAffine.txt'
warp_inverse_out = '-step' + step.step + '0GenericAffine.txt'
register_landmarks(src,
dest,
step.dof,
fname_affine=warp_forward_out,
verbose=verbose)
return warp_forward_out, warp_inverse_out
def register_slicewise(fname_src, fname_dest, paramreg=None, fname_mask='', warp_forward_out='step0Warp.nii.gz',
warp_inverse_out='step0InverseWarp.nii.gz', ants_registration_params=None,
path_qc='./', remove_temp_files=0, verbose=0):
"""
Main function that calls various methods for slicewise registration.
:param fname_src: Str or List: If List, first element is image, second element is segmentation.
:param fname_dest: Str or List: If List, first element is image, second element is segmentation.
:param paramreg: Class Paramreg()
:param fname_mask:
:param warp_forward_out:
:param warp_inverse_out:
:param ants_registration_params:
:param path_qc:
:param remove_temp_files:
:param verbose:
:return:
"""
# create temporary folder
path_tmp = tmp_create(basename="register")
# copy data to temp folder
logger.info(f"\nCopy input data to temp folder...")
if isinstance(fname_src, list):
# TODO: swap 0 and 1 (to be consistent with the child function below)
src_img = image.convert(image.Image(fname_src[0]))
src_img.save(os.path.join(path_tmp, "src.nii"), mutable=True, verbose=verbose)
src_seg = image.convert(image.Image(fname_src[1]))
src_seg.save(os.path.join(path_tmp, "src_seg.nii"), mutable=True, verbose=verbose)
dest_img = image.convert(image.Image(fname_dest[0]))
dest_img.save(os.path.join(path_tmp, "dest.nii"), mutable=True, verbose=verbose)
dest_seg = image.convert(image.Image(fname_dest[1]))
dest_seg.save(os.path.join(path_tmp, "dest_seg.nii"), mutable=True, verbose=verbose)
else:
src_img = image.convert(image.Image(fname_src))
src_img.save(os.path.join(path_tmp, "src.nii"), mutable=True, verbose=verbose)
dest_image = image.convert(image.Image(fname_dest))
dest_image.save(os.path.join(path_tmp, "dest.nii"), mutable=True, verbose=verbose)
if fname_mask != '':
mask_img = image.convert(image.Image(fname_mask))
mask_img.save(os.path.join(path_tmp, "mask.nii.gz"), mutable=True, verbose=verbose)
# go to temporary folder
curdir = os.getcwd()
os.chdir(path_tmp)
# Calculate displacement
if paramreg.algo in ['centermass', 'centermassrot']:
# translation of center of mass between source and destination in voxel space
if paramreg.algo in 'centermass':
rot_method = 'none'
else:
rot_method = paramreg.rot_method
if rot_method in ['hog', 'pcahog']:
src_input = ['src_seg.nii', 'src.nii']
dest_input = ['dest_seg.nii', 'dest.nii']
else:
src_input = ['src.nii']
dest_input = ['dest.nii']
register2d_centermassrot(
src_input, dest_input, paramreg=paramreg, fname_warp=warp_forward_out, fname_warp_inv=warp_inverse_out,
rot_method=rot_method, filter_size=paramreg.filter_size, path_qc=path_qc, verbose=verbose,
pca_eigenratio_th=float(paramreg.pca_eigenratio_th), )
elif paramreg.algo == 'columnwise':
# scaling R-L, then column-wise center of mass alignment and scaling
register2d_columnwise('src.nii',
'dest.nii',
fname_warp=warp_forward_out,
fname_warp_inv=warp_inverse_out,
verbose=verbose,
path_qc=path_qc,
smoothWarpXY=int(paramreg.smoothWarpXY),
)
# ANTs registration
else:
# convert SCT flags into ANTs-compatible flags
algo_dic = {'translation': 'Translation', 'rigid': 'Rigid', 'affine': 'Affine', 'syn': 'SyN', 'bsplinesyn': 'BSplineSyN', 'centermass': 'centermass'}
paramreg.algo = algo_dic[paramreg.algo]
# run slicewise registration
register2d('src.nii',
'dest.nii',
fname_mask=fname_mask,
fname_warp=warp_forward_out,
fname_warp_inv=warp_inverse_out,
paramreg=paramreg,
ants_registration_params=ants_registration_params,
verbose=verbose,
)
logger.info(f"\nMove warping fields...")
copy_helper(warp_forward_out, curdir)
copy_helper(warp_inverse_out, curdir)
# go back
os.chdir(curdir)
if remove_temp_files:
logger.info(f"rm -rf {path_tmp}")
shutil.rmtree(path_tmp)
def register2d_centermassrot(fname_src, fname_dest, paramreg=None, fname_warp='warp_forward.nii.gz',
fname_warp_inv='warp_inverse.nii.gz', rot_method='pca', filter_size=0, path_qc='./',
verbose=0, pca_eigenratio_th=1.6, th_max_angle=40):
"""
Rotate the source image to match the orientation of the destination image, using the first and second eigenvector
of the PCA. This function should be used on segmentations (not images).
This works for 2D and 3D images. If 3D, it splits the image and performs the rotation slice-by-slice.
:param fname_src: List: Name of moving image. If rot=0 or 1, only the first element is used (should be a
segmentation). If rot=2 or 3, the first element is a segmentation and the second is an image.
:param fname_dest: List: Name of fixed image. If rot=0 or 1, only the first element is used (should be a
segmentation). If rot=2 or 3, the first element is a segmentation and the second is an image.
:param paramreg: Class Paramreg()
:param fname_warp: name of output 3d forward warping field
:param fname_warp_inv: name of output 3d inverse warping field
:param rot_method: {'none', 'pca', 'hog', 'pcahog'}. Depending on the rotation method, input might be segmentation
only or segmentation and image.
:param filter_size: size of the gaussian filter for regularization along z for rotation angle (type: float).
0: no regularization
:param path_qc:
:param verbose:
:param pca_eigenratio_th: threshold for the ratio between the first and second eigenvector of the estimated ellipse
for the PCA rotation detection method. If below this threshold, the estimation will be discarded (poorly robust)
:param th_max_angle: threshold of the absolute value of the estimated rotation using the PCA method, above
which the estimation will be discarded (unlikely to happen genuinely and hence considered outlier)
:return:
"""
# TODO: no need to split the src or dest if it is the template (we know its centerline and orientation already)
if verbose == 2:
import matplotlib.pyplot as plt
# Get image dimensions and retrieve nz
logger.info(f"\nGet image dimensions of destination image...")
nx, ny, nz, nt, px, py, pz, pt = image.Image(fname_dest[0]).dim
logger.info(f" matrix size: {str(nx)} x {str(ny)} x {str(nz)}")
logger.info(f" voxel size: {str(px)}mm x {str(py)}mm x {str(nz)}mm")
# Split source volume along z
logger.info(f"\nSplit input segmentation...")
im_src = image.Image(fname_src[0])
split_source_list = image.split_img_data(im_src, 2)
for im in split_source_list:
im.save()
# Split destination volume along z
logger.info(f"\nSplit destination segmentation...")
im_dest = image.Image(fname_dest[0])
split_dest_list = image.split_img_data(im_dest, 2)
for im in split_dest_list:
im.save()
data_src = im_src.data
data_dest = im_dest.data
# if input data is 2D, reshape into pseudo 3D (only one slice)
if len(data_src.shape) == 2:
new_shape = list(data_src.shape)
new_shape.append(1)
new_shape = tuple(new_shape)
data_src = data_src.reshape(new_shape)
data_dest = data_dest.reshape(new_shape)
# Deal with cases where both an image and segmentation are input
if len(fname_src) > 1:
# Split source volume along z
logger.info(f"\nSplit input image...")
im_src_im = image.Image(fname_src[1])
split_source_list = image.split_img_data(im_src_im, 2)
for im in split_source_list:
im.save()
# Split destination volume along z
logger.info(f"\nSplit destination image...")
im_dest_im = image.Image(fname_dest[1])
split_dest_list = image.split_img_data(im_dest_im, 2)
for im in split_dest_list:
im.save()
data_src_im = im_src_im.data
data_dest_im = im_dest_im.data
# initialize displacement and rotation
coord_src = [None] * nz
pca_src = [None] * nz
coord_dest = [None] * nz
pca_dest = [None] * nz
centermass_src = np.zeros([nz, 2])
centermass_dest = np.zeros([nz, 2])
# displacement_forward = np.zeros([nz, 2])
# displacement_inverse = np.zeros([nz, 2])
angle_src_dest = np.zeros(nz)
z_nonzero = []
th_max_angle *= np.pi / 180
# Loop across slices
for iz in sct_progress_bar(range(0, nz), unit='iter', unit_scale=False, desc="Estimate cord angle for each slice",
ascii=False, ncols=100):
try:
# compute PCA and get center or mass based on segmentation
coord_src[iz], pca_src[iz], centermass_src[iz, :] = compute_pca(data_src[:, :, iz])
coord_dest[iz], pca_dest[iz], centermass_dest[iz, :] = compute_pca(data_dest[:, :, iz])
# detect rotation using the HOG method
if rot_method in ['hog', 'pcahog']:
angle_src_hog, conf_score_src = find_angle_hog(data_src_im[:, :, iz], centermass_src[iz, :],
px, py, angle_range=th_max_angle)
angle_dest_hog, conf_score_dest = find_angle_hog(data_dest_im[:, :, iz], centermass_dest[ iz, : ],
px, py, angle_range=th_max_angle)
# In case no maxima is found (it should never happen)
if (angle_src_hog is None) or (angle_dest_hog is None):
logger.warning(f"Slice #{str(iz)} not angle found in dest or src. It will be ignored.")
continue
if rot_method == 'hog':
angle_src = -angle_src_hog # flip sign to be consistent with PCA output
angle_dest = angle_dest_hog
# Detect rotation using the PCA or PCA-HOG method
if rot_method in ['pca', 'pcahog']:
eigenv_src = pca_src[iz].components_.T[0][0], pca_src[iz].components_.T[1][0]
eigenv_dest = pca_dest[iz].components_.T[0][0], pca_dest[iz].components_.T[1][0]
# Make sure first element is always positive (to prevent sign flipping)
if eigenv_src[0] <= 0:
eigenv_src = tuple([i * (-1) for i in eigenv_src])
if eigenv_dest[0] <= 0:
eigenv_dest = tuple([i * (-1) for i in eigenv_dest])
angle_src = angle_between(eigenv_src, [1, 0])
angle_dest = angle_between([1, 0], eigenv_dest)
# compute ratio between axis of PCA
pca_eigenratio_src = pca_src[iz].explained_variance_ratio_[0] / pca_src[iz].explained_variance_ratio_[1]
pca_eigenratio_dest = pca_dest[iz].explained_variance_ratio_[0] / pca_dest[iz].explained_variance_ratio_[1]
# angle is set to 0 if either ratio between axis is too low or outside angle range
if pca_eigenratio_src < pca_eigenratio_th or angle_src > th_max_angle or angle_src < -th_max_angle:
if rot_method == 'pca':
angle_src = 0
elif rot_method == 'pcahog':
logger.info("Switched to method 'hog' for slice: {}".format(iz))
angle_src = -angle_src_hog # flip sign to be consistent with PCA output
if pca_eigenratio_dest < pca_eigenratio_th or angle_dest > th_max_angle or angle_dest < -th_max_angle:
if rot_method == 'pca':
angle_dest = 0
elif rot_method == 'pcahog':
logger.info("Switched to method 'hog' for slice: {}".format(iz))
angle_dest = angle_dest_hog
if not rot_method == 'none':
# bypass estimation is source or destination angle is known a priori
if paramreg.rot_src is not None:
angle_src = paramreg.rot_src
if paramreg.rot_dest is not None:
angle_dest = paramreg.rot_dest
# the angle between (src, dest) is the angle between (src, origin) + angle between (origin, dest)
angle_src_dest[iz] = angle_src + angle_dest
# append to list of z_nonzero
z_nonzero.append(iz)
# if one of the slice is empty, ignore it
except ValueError:
logger.warning(f"Slice #{str(iz)} is empty. It will be ignored.")
# regularize rotation
if not filter_size == 0 and (rot_method in ['pca', 'hog', 'pcahog']):
# Filtering the angles by gaussian filter
angle_src_dest_regularized = ndimage.filters.gaussian_filter1d(angle_src_dest[z_nonzero], filter_size)
if verbose == 2:
plt.plot(180 * angle_src_dest[z_nonzero] / np.pi, 'ob')
plt.plot(180 * angle_src_dest_regularized / np.pi, 'r', linewidth=2)
plt.grid()
plt.xlabel('z')
plt.ylabel('Angle (deg)')
plt.title("Regularized cord angle estimation (filter_size: {})".format(filter_size))
plt.savefig(os.path.join(path_qc, 'register2d_centermassrot_regularize_rotation.png'))
plt.close()
# update variable
angle_src_dest[z_nonzero] = angle_src_dest_regularized
warp_x = np.zeros(data_dest.shape)
warp_y = np.zeros(data_dest.shape)
warp_inv_x = np.zeros(data_src.shape)
warp_inv_y = np.zeros(data_src.shape)
# construct 3D warping matrix
for iz in sct_progress_bar(z_nonzero, unit='iter', unit_scale=False, desc="Build 3D deformation field",
ascii=False, ncols=100):
# get indices of x and y coordinates
row, col = np.indices((nx, ny))
# build 2xn array of coordinates in pixel space
coord_init_pix = np.array([row.ravel(), col.ravel(), np.array(np.ones(len(row.ravel())) * iz)]).T
# convert coordinates to physical space
coord_init_phy = np.array(im_src.transfo_pix2phys(coord_init_pix))
# get centermass coordinates in physical space
centermass_src_phy = im_src.transfo_pix2phys([[centermass_src[iz, :].T[0], centermass_src[iz, :].T[1], iz]])[0]
centermass_dest_phy = im_src.transfo_pix2phys([[centermass_dest[iz, :].T[0], centermass_dest[iz, :].T[1], iz]])[0]
# build rotation matrix
R = np.matrix(((cos(angle_src_dest[iz]), sin(angle_src_dest[iz])), (-sin(angle_src_dest[iz]), cos(angle_src_dest[iz]))))
# build 3D rotation matrix
R3d = np.eye(3)
R3d[0:2, 0:2] = R
# apply forward transformation (in physical space)
coord_forward_phy = np.array(np.dot((coord_init_phy - np.transpose(centermass_dest_phy)), R3d) + np.transpose(centermass_src_phy))
# apply inverse transformation (in physical space)
coord_inverse_phy = np.array(np.dot((coord_init_phy - np.transpose(centermass_src_phy)), R3d.T) + np.transpose(centermass_dest_phy))
# display rotations
if verbose == 2 and not angle_src_dest[iz] == 0 and not rot_method == 'hog':
# compute new coordinates
coord_src_rot = coord_src[iz] * R
coord_dest_rot = coord_dest[iz] * R.T
# generate figure
plt.figure(figsize=(9, 9))
# plt.ion() # enables interactive mode (allows keyboard interruption)
for isub in [221, 222, 223, 224]:
# plt.figure
plt.subplot(isub)
# ax = matplotlib.pyplot.axis()
try:
if isub == 221:
plt.scatter(coord_src[iz][:, 0], coord_src[iz][:, 1], s=5, marker='o', zorder=10, color='steelblue',
alpha=0.5)
pcaaxis = pca_src[iz].components_.T
pca_eigenratio = pca_src[iz].explained_variance_ratio_
plt.title('src')
elif isub == 222:
plt.scatter([coord_src_rot[i, 0] for i in range(len(coord_src_rot))], [coord_src_rot[i, 1] for i in range(len(coord_src_rot))], s=5, marker='o', zorder=10, color='steelblue', alpha=0.5)
pcaaxis = pca_dest[iz].components_.T
pca_eigenratio = pca_dest[iz].explained_variance_ratio_
plt.title('src_rot')
elif isub == 223:
plt.scatter(coord_dest[iz][:, 0], coord_dest[iz][:, 1], s=5, marker='o', zorder=10, color='red',
alpha=0.5)
pcaaxis = pca_dest[iz].components_.T
pca_eigenratio = pca_dest[iz].explained_variance_ratio_
plt.title('dest')
elif isub == 224:
plt.scatter([coord_dest_rot[i, 0] for i in range(len(coord_dest_rot))], [coord_dest_rot[i, 1] for i in range(len(coord_dest_rot))], s=5, marker='o', zorder=10, color='red', alpha=0.5)
pcaaxis = pca_src[iz].components_.T
pca_eigenratio = pca_src[iz].explained_variance_ratio_
plt.title('dest_rot')
plt.text(-2.5, -2, 'eigenvectors:', horizontalalignment='left', verticalalignment='bottom')
plt.text(-2.5, -2.8, str(pcaaxis), horizontalalignment='left', verticalalignment='bottom')
plt.text(-2.5, 2.5, 'eigenval_ratio:', horizontalalignment='left', verticalalignment='bottom')
plt.text(-2.5, 2, str(pca_eigenratio), horizontalalignment='left', verticalalignment='bottom')
plt.plot([0, pcaaxis[0, 0]], [0, pcaaxis[1, 0]], linewidth=2, color='red')
plt.plot([0, pcaaxis[0, 1]], [0, pcaaxis[1, 1]], linewidth=2, color='orange')
plt.axis([-3, 3, -3, 3])
plt.gca().set_aspect('equal', adjustable='box')
except Exception as e:
raise Exception
plt.savefig(os.path.join(path_qc, 'register2d_centermassrot_pca_z' + str(iz) + '.png'))
plt.close()
# construct 3D warping matrix
warp_x[:, :, iz] = np.array([coord_forward_phy[i, 0] - coord_init_phy[i, 0] for i in range(nx * ny)]).reshape((nx, ny))
warp_y[:, :, iz] = np.array([coord_forward_phy[i, 1] - coord_init_phy[i, 1] for i in range(nx * ny)]).reshape((nx, ny))
warp_inv_x[:, :, iz] = np.array([coord_inverse_phy[i, 0] - coord_init_phy[i, 0] for i in range(nx * ny)]).reshape((nx, ny))
warp_inv_y[:, :, iz] = np.array([coord_inverse_phy[i, 1] - coord_init_phy[i, 1] for i in range(nx * ny)]).reshape((nx, ny))
# Generate forward warping field (defined in destination space)
generate_warping_field(fname_dest[0], warp_x, warp_y, fname_warp, verbose)
generate_warping_field(fname_src[0], warp_inv_x, warp_inv_y, fname_warp_inv, verbose)
def register2d_columnwise(fname_src, fname_dest, fname_warp='warp_forward.nii.gz', fname_warp_inv='warp_inverse.nii.gz', verbose=0, path_qc='./', smoothWarpXY=1):
"""
Column-wise non-linear registration of segmentations. Based on an idea from Allan Martin.
- Assumes src/dest are segmentations (not necessarily binary), and already registered by center of mass
- Assumes src/dest are in RPI orientation.
- Split along Z, then for each slice:
- scale in R-L direction to match src/dest
- loop across R-L columns and register by (i) matching center of mass and (ii) scaling.
:param fname_src:
:param fname_dest:
:param fname_warp:
:param fname_warp_inv:
:param verbose:
:return:
"""
# initialization
th_nonzero = 0.5 # values below are considered zero
# for display stuff
if verbose == 2:
import matplotlib.pyplot as plt
# Get image dimensions and retrieve nz
logger.info(f"\nGet image dimensions of destination image...")
nx, ny, nz, nt, px, py, pz, pt = image.Image(fname_dest).dim
logger.info(f" matrix size: {str(nx)} x {str(ny)} x {str(nz)}")
logger.info(f" voxel size: {str(px)}mm x {str(py)}mm x {str(nz)}mm")
# Split source volume along z
logger.info(f"\nSplit input volume...")
im_src = image.Image('src.nii')
split_source_list = image.split_img_data(im_src, 2)
for im in split_source_list:
im.save()
# Split destination volume along z
logger.info(f"\nSplit destination volume...")
im_dest = image.Image('dest.nii')
split_dest_list = image.split_img_data(im_dest, 2)
for im in split_dest_list:
im.save()
# open image
data_src = im_src.data
data_dest = im_dest.data
if len(data_src.shape) == 2:
# reshape 2D data into pseudo 3D (only one slice)
new_shape = list(data_src.shape)
new_shape.append(1)
new_shape = tuple(new_shape)
data_src = data_src.reshape(new_shape)
data_dest = data_dest.reshape(new_shape)
# initialize forward warping field (defined in destination space)
warp_x = np.zeros(data_dest.shape)
warp_y = np.zeros(data_dest.shape)
# initialize inverse warping field (defined in source space)
warp_inv_x = np.zeros(data_src.shape)
warp_inv_y = np.zeros(data_src.shape)
# Loop across slices
logger.info(f"\nEstimate columnwise transformation...")
for iz in range(0, nz):
logger.info(f"{str(iz)}/{str(nz)}..")
# PREPARE COORDINATES
# ============================================================
# get indices of x and y coordinates
row, col = np.indices((nx, ny))
# build 2xn array of coordinates in pixel space
# ordering of indices is as follows:
# coord_init_pix[:, 0] = 0, 0, 0, ..., 1, 1, 1..., nx, nx, nx
# coord_init_pix[:, 1] = 0, 1, 2, ..., 0, 1, 2..., 0, 1, 2
coord_init_pix = np.array([row.ravel(), col.ravel(), np.array(np.ones(len(row.ravel())) * iz)]).T
# convert coordinates to physical space
coord_init_phy = np.array(im_src.transfo_pix2phys(coord_init_pix))
# get 2d data from the selected slice
src2d = data_src[:, :, iz]
dest2d = data_dest[:, :, iz]
# julien 20161105
#<<<
# threshold at 0.5
src2d[src2d < th_nonzero] = 0
dest2d[dest2d < th_nonzero] = 0
# get non-zero coordinates, and transpose to obtain nx2 dimensions
coord_src2d = np.array(np.where(src2d > 0)).T
coord_dest2d = np.array(np.where(dest2d > 0)).T
# here we use 0.5 as threshold for non-zero value
# coord_src2d = np.array(np.where(src2d > th_nonzero)).T
# coord_dest2d = np.array(np.where(dest2d > th_nonzero)).T
#>>>
# SCALING R-L (X dimension)
# ============================================================
# sum data across Y to obtain 1D signal: src_y and dest_y
src1d = np.sum(src2d, 1)
dest1d = np.sum(dest2d, 1)
# make sure there are non-zero data in src or dest
if np.any(src1d > th_nonzero) and np.any(dest1d > th_nonzero):
# retrieve min/max of non-zeros elements (edge of the segmentation)
# julien 20161105
# <<<
src1d_min, src1d_max = min(np.where(src1d != 0)[0]), max(np.where(src1d != 0)[0])
dest1d_min, dest1d_max = min(np.where(dest1d != 0)[0]), max(np.where(dest1d != 0)[0])
# for i in range(len(src1d)):
# if src1d[i] > 0.5:
# found index above 0.5, exit loop
# break
# get indices (in continuous space) at half-maximum of upward and downward slope
# src1d_min, src1d_max = find_index_halfmax(src1d)
# dest1d_min, dest1d_max = find_index_halfmax(dest1d)
# >>>
# 1D matching between src_y and dest_y
mean_dest_x = (dest1d_max + dest1d_min) / 2
mean_src_x = (src1d_max + src1d_min) / 2
# compute x-scaling factor
Sx = (dest1d_max - dest1d_min + 1) / float(src1d_max - src1d_min + 1)
# apply transformation to coordinates
coord_src2d_scaleX = np.copy(coord_src2d) # need to use np.copy to avoid copying pointer
coord_src2d_scaleX[:, 0] = (coord_src2d[:, 0] - mean_src_x) * Sx + mean_dest_x
coord_init_pix_scaleX = np.copy(coord_init_pix)
coord_init_pix_scaleX[:, 0] = (coord_init_pix[:, 0] - mean_src_x) * Sx + mean_dest_x
coord_init_pix_scaleXinv = np.copy(coord_init_pix)
coord_init_pix_scaleXinv[:, 0] = (coord_init_pix[:, 0] - mean_dest_x) / float(Sx) + mean_src_x
# apply transformation to image
from skimage.transform import warp
row_scaleXinv = np.reshape(coord_init_pix_scaleXinv[:, 0], [nx, ny])
src2d_scaleX = warp(src2d, np.array([row_scaleXinv, col]), order=1)
# ============================================================
# COLUMN-WISE REGISTRATION (Y dimension for each Xi)
# ============================================================
coord_init_pix_scaleY = np.copy(coord_init_pix) # need to use np.copy to avoid copying pointer
coord_init_pix_scaleYinv = np.copy(coord_init_pix) # need to use np.copy to avoid copying pointer
# coord_src2d_scaleXY = np.copy(coord_src2d_scaleX) # need to use np.copy to avoid copying pointer
# loop across columns (X dimension)
for ix in range(nx):
# retrieve 1D signal along Y
src1d = src2d_scaleX[ix, :]
dest1d = dest2d[ix, :]
# make sure there are non-zero data in src or dest
if np.any(src1d > th_nonzero) and np.any(dest1d > th_nonzero):
# retrieve min/max of non-zeros elements (edge of the segmentation)
# src1d_min, src1d_max = min(np.nonzero(src1d)[0]), max(np.nonzero(src1d)[0])
# dest1d_min, dest1d_max = min(np.nonzero(dest1d)[0]), max(np.nonzero(dest1d)[0])
# 1D matching between src_y and dest_y
# Ty = (dest1d_max + dest1d_min)/2 - (src1d_max + src1d_min)/2
# Sy = (dest1d_max - dest1d_min) / float(src1d_max - src1d_min)
# apply translation and scaling to coordinates in column
# get indices (in continuous space) at half-maximum of upward and downward slope
# src1d_min, src1d_max = find_index_halfmax(src1d)
# dest1d_min, dest1d_max = find_index_halfmax(dest1d)
src1d_min, src1d_max = np.min(np.where(src1d > th_nonzero)), np.max(np.where(src1d > th_nonzero))
dest1d_min, dest1d_max = np.min(np.where(dest1d > th_nonzero)), np.max(np.where(dest1d > th_nonzero))
# 1D matching between src_y and dest_y
mean_dest_y = (dest1d_max + dest1d_min) / 2
mean_src_y = (src1d_max + src1d_min) / 2
# Tx = (dest1d_max + dest1d_min)/2 - (src1d_max + src1d_min)/2
Sy = (dest1d_max - dest1d_min + 1) / float(src1d_max - src1d_min + 1)
# apply forward transformation (in pixel space)
# below: only for debugging purpose
# coord_src2d_scaleX = np.copy(coord_src2d) # need to use np.copy to avoid copying pointer
# coord_src2d_scaleX[:, 0] = (coord_src2d[:, 0] - mean_src) * Sx + mean_dest
# coord_init_pix_scaleY = np.copy(coord_init_pix) # need to use np.copy to avoid copying pointer
# coord_init_pix_scaleY[:, 0] = (coord_init_pix[:, 0] - mean_src ) * Sx + mean_dest
range_x = list(range(ix * ny, ix * ny + nx))
coord_init_pix_scaleY[range_x, 1] = (coord_init_pix[range_x, 1] - mean_src_y) * Sy + mean_dest_y
coord_init_pix_scaleYinv[range_x, 1] = (coord_init_pix[range_x, 1] - mean_dest_y) / float(Sy) + mean_src_y
# apply transformation to image
col_scaleYinv = np.reshape(coord_init_pix_scaleYinv[:, 1], [nx, ny])
src2d_scaleXY = warp(src2d, np.array([row_scaleXinv, col_scaleYinv]), order=1)
# regularize Y warping fields
from skimage.filters import gaussian
col_scaleY = np.reshape(coord_init_pix_scaleY[:, 1], [nx, ny])
col_scaleYsmooth = gaussian(col_scaleY, smoothWarpXY)
col_scaleYinvsmooth = gaussian(col_scaleYinv, smoothWarpXY)
# apply smoothed transformation to image
src2d_scaleXYsmooth = warp(src2d, np.array([row_scaleXinv, col_scaleYinvsmooth]), order=1)
# reshape warping field as 1d
coord_init_pix_scaleY[:, 1] = col_scaleYsmooth.ravel()
coord_init_pix_scaleYinv[:, 1] = col_scaleYinvsmooth.ravel()
# display
if verbose == 2:
# FIG 1
plt.figure(figsize=(15, 3))
# plot #1
ax = plt.subplot(141)
plt.imshow(np.swapaxes(src2d, 1, 0), cmap=plt.cm.gray, interpolation='none')
plt.hold(True) # add other layer
plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5)
plt.title('src')
plt.xlabel('x')
plt.ylabel('y')
plt.xlim(mean_dest_x - 15, mean_dest_x + 15)
plt.ylim(mean_dest_y - 15, mean_dest_y + 15)
ax.grid(True, color='w')
# plot #2
ax = plt.subplot(142)
plt.imshow(np.swapaxes(src2d_scaleX, 1, 0), cmap=plt.cm.gray, interpolation='none')
plt.hold(True) # add other layer
plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5)
plt.title('src_scaleX')
plt.xlabel('x')
plt.ylabel('y')
plt.xlim(mean_dest_x - 15, mean_dest_x + 15)
plt.ylim(mean_dest_y - 15, mean_dest_y + 15)
ax.grid(True, color='w')
# plot #3
ax = plt.subplot(143)
plt.imshow(np.swapaxes(src2d_scaleXY, 1, 0), cmap=plt.cm.gray, interpolation='none')
plt.hold(True) # add other layer
plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5)
plt.title('src_scaleXY')
plt.xlabel('x')
plt.ylabel('y')
plt.xlim(mean_dest_x - 15, mean_dest_x + 15)
plt.ylim(mean_dest_y - 15, mean_dest_y + 15)
ax.grid(True, color='w')
# plot #4
ax = plt.subplot(144)
plt.imshow(np.swapaxes(src2d_scaleXYsmooth, 1, 0), cmap=plt.cm.gray, interpolation='none')
plt.hold(True) # add other layer
plt.imshow(np.swapaxes(dest2d, 1, 0), cmap=plt.cm.copper, interpolation='none', alpha=0.5)
plt.title('src_scaleXYsmooth (s=' + str(smoothWarpXY) + ')')
plt.xlabel('x')
plt.ylabel('y')
plt.xlim(mean_dest_x - 15, mean_dest_x + 15)
plt.ylim(mean_dest_y - 15, mean_dest_y + 15)
ax.grid(True, color='w')
# save figure
plt.savefig(os.path.join(path_qc, 'register2d_columnwise_image_z' + str(iz) + '.png'))
plt.close()
# ============================================================
# CALCULATE TRANSFORMATIONS
# ============================================================
# calculate forward transformation (in physical space)
coord_init_phy_scaleX = np.array(im_dest.transfo_pix2phys(coord_init_pix_scaleX))
coord_init_phy_scaleY = np.array(im_dest.transfo_pix2phys(coord_init_pix_scaleY))
# calculate inverse transformation (in physical space)
coord_init_phy_scaleXinv = np.array(im_src.transfo_pix2phys(coord_init_pix_scaleXinv))
coord_init_phy_scaleYinv = np.array(im_src.transfo_pix2phys(coord_init_pix_scaleYinv))
# compute displacement per pixel in destination space (for forward warping field)
warp_x[:, :, iz] = np.array([coord_init_phy_scaleXinv[i, 0] - coord_init_phy[i, 0] for i in range(nx * ny)]).reshape((nx, ny))
warp_y[:, :, iz] = np.array([coord_init_phy_scaleYinv[i, 1] - coord_init_phy[i, 1] for i in range(nx * ny)]).reshape((nx, ny))
# compute displacement per pixel in source space (for inverse warping field)
warp_inv_x[:, :, iz] = np.array([coord_init_phy_scaleX[i, 0] - coord_init_phy[i, 0] for i in range(nx * ny)]).reshape((nx, ny))
warp_inv_y[:, :, iz] = np.array([coord_init_phy_scaleY[i, 1] - coord_init_phy[i, 1] for i in range(nx * ny)]).reshape((nx, ny))
# Generate forward warping field (defined in destination space)
generate_warping_field(fname_dest, warp_x, warp_y, fname_warp, verbose)
# Generate inverse warping field (defined in source space)
generate_warping_field(fname_src, warp_inv_x, warp_inv_y, fname_warp_inv, verbose)
def register2d(fname_src, fname_dest, fname_mask='', fname_warp='warp_forward.nii.gz',
fname_warp_inv='warp_inverse.nii.gz',
paramreg=Paramreg(step='0', type='im', algo='Translation', metric='MI', iter='5', shrink='1', smooth='0',
gradStep='0.5'),
ants_registration_params={'rigid': '', 'affine': '', 'compositeaffine': '', 'similarity': '',
'translation': '', 'bspline': ',10', 'gaussiandisplacementfield': ',3,0',
'bsplinedisplacementfield': ',5,10', 'syn': ',3,0', 'bsplinesyn': ',1,3'},
verbose=0):
"""
Slice-by-slice registration of two images.
:param fname_src: name of moving image (type: string)
:param fname_dest: name of fixed image (type: string)
:param fname_mask: name of mask file (type: string) (parameter -x of antsRegistration)
:param fname_warp: name of output 3d forward warping field
:param fname_warp_inv: name of output 3d inverse warping field
:param paramreg: Class Paramreg()
:param ants_registration_params: dict: specific algorithm's parameters for antsRegistration
:param verbose:
:return:
if algo==translation:
x_displacement: list of translation along x axis for each slice (type: list)
y_displacement: list of translation along y axis for each slice (type: list)
if algo==rigid:
x_displacement: list of translation along x axis for each slice (type: list)
y_displacement: list of translation along y axis for each slice (type: list)
theta_rotation: list of rotation angle in radian (and in ITK's coordinate system) for each slice (type: list)
if algo==affine or algo==syn or algo==bsplinesyn:
creation of two 3D warping fields (forward and inverse) that are the concatenations of the slice-by-slice
warps.
"""
# set metricSize
# TODO: create internal function get_metricSize()
if paramreg.metric == 'MI':
metricSize = '32' # corresponds to number of bins
else:
metricSize = '4' # corresponds to radius (for CC, MeanSquares...)
# Get image dimensions and retrieve nz
logger.info(f"\nGet image dimensions of destination image...")
nx, ny, nz, nt, px, py, pz, pt = image.Image(fname_dest).dim
logger.info(f" matrix size: {str(nx)} x {str(ny)} x {str(nz)}")
logger.info(f" voxel size: {str(px)}mm x {str(py)}mm x {str(nz)}mm")
# Split input volume along z
logger.info(f"\nSplit input volume...")
im_src = image.Image(fname_src)
split_source_list = image.split_img_data(im_src, 2)
for im in split_source_list:
im.save()
# Split destination volume along z
logger.info(f"\nSplit destination volume...")
im_dest = image.Image(fname_dest)
split_dest_list = image.split_img_data(im_dest, 2)
for im in split_dest_list:
im.save()
# Split mask volume along z
if fname_mask != '':
logger.info(f"\nSplit mask volume...")
im_mask = image.Image('mask.nii.gz')
split_mask_list = image.split_img_data(im_mask, 2)
for im in split_mask_list:
im.save()
# initialization
if paramreg.algo in ['Translation']:
x_displacement = [0 for i in range(nz)]
y_displacement = [0 for i in range(nz)]
theta_rotation = [0 for i in range(nz)]
if paramreg.algo in ['Rigid', 'Affine', 'BSplineSyN', 'SyN']:
list_warp = []
list_warp_inv = []
# loop across slices
for i in range(nz):
# set masking
logger.info(f"Registering slice {str(i)}/{str(nz-1)}...")
num = numerotation(i)
prefix_warp2d = 'warp2d_' + num
# if mask is used, prepare command for ANTs
if fname_mask != '':
masking = ['-x', 'mask_Z' + num + '.nii.gz']
else:
masking = []
# main command for registration
# TODO fixup isct_ants* parsers
cmd = ['isct_antsRegistration',
'--dimensionality', '2',
'--transform', paramreg.algo + '[' + str(paramreg.gradStep) + ants_registration_params[paramreg.algo.lower()] + ']',
'--metric', paramreg.metric + '[dest_Z' + num + '.nii' + ',src_Z' + num + '.nii' + ',1,' + metricSize + ']', #[fixedImage,movingImage,metricWeight +nb_of_bins (MI) or radius (other)
'--convergence', str(paramreg.iter),
'--shrink-factors', str(paramreg.shrink),
'--smoothing-sigmas', str(paramreg.smooth) + 'mm',
'--output', '[' + prefix_warp2d + ',src_Z' + num + '_reg.nii]', #--> file.mat (contains Tx,Ty, theta)
'--interpolation', 'BSpline[3]',
'--verbose', '1',
] + masking
# add init translation
if not paramreg.init == '':
init_dict = {'geometric': '0', 'centermass': '1', 'origin': '2'}
cmd += ['-r', '[dest_Z' + num + '.nii' + ',src_Z' + num + '.nii,' + init_dict[paramreg.init] + ']']
try:
# run registration
run_proc(cmd, is_sct_binary=True)
if paramreg.algo in ['Translation']:
file_mat = prefix_warp2d + '0GenericAffine.mat'
matfile = loadmat(file_mat, struct_as_record=True)
array_transfo = matfile['AffineTransform_double_2_2']
x_displacement[i] = array_transfo[4][0] # Tx in ITK'S coordinate system
y_displacement[i] = array_transfo[5][0] # Ty in ITK'S and fslview's coordinate systems
theta_rotation[i] = asin(array_transfo[2]) # angle of rotation theta in ITK'S coordinate system (minus theta for fslview)
if paramreg.algo in ['Rigid', 'Affine', 'BSplineSyN', 'SyN']:
# List names of 2d warping fields for subsequent merge along Z
file_warp2d = prefix_warp2d + '0Warp.nii.gz'
file_warp2d_inv = prefix_warp2d + '0InverseWarp.nii.gz'
list_warp.append(file_warp2d)
list_warp_inv.append(file_warp2d_inv)
if paramreg.algo in ['Rigid', 'Affine']:
# Generating null 2d warping field (for subsequent concatenation with affine transformation)
# TODO fixup isct_ants* parsers
run_proc(['isct_antsRegistration',
'-d', '2',
'-t', 'SyN[1,1,1]',
'-c', '0',
'-m', 'MI[dest_Z' + num + '.nii,src_Z' + num + '.nii,1,32]',
'-o', 'warp2d_null',
'-f', '1',
'-s', '0',
], is_sct_binary=True)
# --> outputs: warp2d_null0Warp.nii.gz, warp2d_null0InverseWarp.nii.gz
file_mat = prefix_warp2d + '0GenericAffine.mat'
# Concatenating mat transfo and null 2d warping field to obtain 2d warping field of affine transformation
run_proc(['isct_ComposeMultiTransform', '2', file_warp2d, '-R', 'dest_Z' + num + '.nii', 'warp2d_null0Warp.nii.gz', file_mat], is_sct_binary=True)
run_proc(['isct_ComposeMultiTransform', '2', file_warp2d_inv, '-R', 'src_Z' + num + '.nii', 'warp2d_null0InverseWarp.nii.gz', '-i', file_mat], is_sct_binary=True)
# if an exception occurs with ants, take the last value for the transformation
# TODO: DO WE NEED TO DO THAT??? (julien 2016-03-01)
except Exception as e:
# TODO [AJ] is it desired to completely ignore exception??
logger.error(f"Exception occurred. \n {e}")
# Merge warping field along z
logger.info(f"\nMerge warping fields along z...")
if paramreg.algo in ['Translation']:
# convert to array
x_disp_a = np.asarray(x_displacement)
y_disp_a = np.asarray(y_displacement)
theta_rot_a = np.asarray(theta_rotation)
# Generate warping field
generate_warping_field(fname_dest, x_disp_a, y_disp_a, fname_warp=fname_warp) #name_warp= 'step'+str(paramreg.step)
# Inverse warping field
generate_warping_field(fname_src, -x_disp_a, -y_disp_a, fname_warp=fname_warp_inv)
if paramreg.algo in ['Rigid', 'Affine', 'BSplineSyN', 'SyN']:
# concatenate 2d warping fields along z
image.concat_warp2d(list_warp, fname_warp, fname_dest)
image.concat_warp2d(list_warp_inv, fname_warp_inv, fname_src)
def numerotation(nb):
"""Indexation of number for matching fslsplit's index.
Given a slice number, this function returns the corresponding number in fslsplit indexation system.
param nb: the number of the slice (type: int)
return nb_output: the number of the slice for fslsplit (type: string)
"""
if nb < 0 or nb > 9999:
raise ValueError("Number must be between 0 and 9999")
elif -1 < nb < 10:
nb_output = '000' + str(nb)
elif 9 < nb < 100:
nb_output = '00' + str(nb)
elif 99 < nb < 1000:
nb_output = '0' + str(nb)
elif 999 < nb < 10000:
nb_output = str(nb)
return nb_output
def generate_warping_field(fname_dest, warp_x, warp_y, fname_warp='warping_field.nii.gz', verbose=1):
"""
Generate an ITK warping field
:param fname_dest:
:param warp_x:
:param warp_y:
:param fname_warp:
:param verbose:
:return:
"""
logger.info(f"\nGenerate warping field...")
# Get image dimensions
nx, ny, nz, nt, px, py, pz, pt = image.Image(fname_dest).dim
# initialize
data_warp = np.zeros((nx, ny, nz, 1, 3))
# fill matrix
data_warp[:, :, :, 0, 0] = -warp_x # need to invert due to ITK conventions
data_warp[:, :, :, 0, 1] = -warp_y # need to invert due to ITK conventions
# save warping field
im_dest = load(fname_dest)
hdr_dest = im_dest.get_header()
hdr_warp = hdr_dest.copy()
hdr_warp.set_intent('vector', (), '')
hdr_warp.set_data_dtype('float32')
img = Nifti1Image(data_warp, None, hdr_warp)
save(img, fname_warp)
logger.info(f" --> {fname_warp}")
def angle_between(a, b):
"""
Compute angle in radian between a and b. Throws an exception if a or b has zero magnitude.
:param a: Coordinates of first point
:param b: Coordinates of second point
:return: angle in rads
"""
arccosInput = np.dot(a, b) / np.linalg.norm(a) / np.linalg.norm(b)
arccosInput = 1.0 if arccosInput > 1.0 else arccosInput
arccosInput = -1.0 if arccosInput < -1.0 else arccosInput
sign_angle = np.sign(np.cross(a, b))
return sign_angle * acos(arccosInput)
def compute_pca(data2d):
"""
Compute PCA using sklearn
:param data2d: 2d array. PCA will be computed on non-zeros values.
:return: coordsrc: 2d array: centered non-zero coordinates\
pca: object: PCA result.\
centermass: 2x1 array: 2d coordinates of the center of mass
"""
# round it and make it int (otherwise end up with values like 10-7)
data2d = data2d.round().astype(int)
# get non-zero coordinates, and transpose to obtain nx2 dimensions
coordsrc = np.array(data2d.nonzero()).T
# get center of mass
centermass = coordsrc.mean(0)
# center data
coordsrc = coordsrc - centermass
# normalize data
coordsrc /= coordsrc.std()
# Performs PCA
pca = PCA(n_components=2, copy=False, whiten=False)
pca.fit(coordsrc)
return coordsrc, pca, centermass
def find_index_halfmax(data1d):
"""
Find the two indices at half maximum for a bell-type curve (non-parametric). Uses center of mass calculation.
:param data1d:
:return: xmin, xmax
"""
# normalize data between 0 and 1
data1d = data1d / float(np.max(data1d))
# loop across elements and stops when found 0.5
for i in range(len(data1d)):
if data1d[i] > 0.5:
break
# compute center of mass to get coordinate at 0.5
xmin = i - 1 + (0.5 - data1d[i - 1]) / float(data1d[i] - data1d[i - 1])
# continue for the descending slope
for i in range(i, len(data1d)):
if data1d[i] < 0.5:
break
# compute center of mass to get coordinate at 0.5
xmax = i - 1 + (0.5 - data1d[i - 1]) / float(data1d[i] - data1d[i - 1])
return xmin, xmax
def find_angle_hog(image, centermass, px, py, angle_range=10):
"""
Finds the angle of an image based on the method described by Sun, "Symmetry Detection Using Gradient Information."
Pattern Recognition Letters 16, no. 9 (September 1, 1995): 987–96, and improved by N. Pinon
:param: image : 2D numpy array to find symmetry axis on
:param: centermass: tuple of floats indicating the center of mass of the image
:param: px, py, dimensions of the pixels in the x and y direction
:param: angle_range : float or None, in deg, the angle will be search in the range [-angle_range, angle_range], if None angle angle might be returned
:return: angle found and confidence score
"""
# param that can actually be tweeked to influence method performance :
sigma = 10 # influence how far away pixels will vote for the orientation, if high far away pixels vote will count more, if low only closest pixels will participate
nb_bin = 360 # number of angle bins for the histogram, can be more or less than 360, if high, a higher precision might be achieved but there is the risk of
kmedian_size = 5
# Normalization of sigma relative to pixdim :
sigmax = sigma / px
sigmay = sigma / py
if nb_bin % 2 != 0: # necessary to have even number of bins
nb_bin = nb_bin - 1
if angle_range is None:
angle_range = 90
# Constructing mask based on center of mass that will influence the weighting of the orientation histogram
nx, ny = image.shape
xx, yy = np.mgrid[:nx, :ny]
seg_weighted_mask = np.exp(
-(((xx - centermass[0]) ** 2) / (2 * (sigmax ** 2)) + ((yy - centermass[1]) ** 2) / (2 * (sigmay ** 2))))
# Acquiring the orientation histogram :
grad_orient_histo = gradient_orientation_histogram(image, nb_bin=nb_bin, seg_weighted_mask=seg_weighted_mask)
# Bins of the histogram :
repr_hist = np.linspace(-(np.pi - 2 * np.pi / nb_bin), (np.pi - 2 * np.pi / nb_bin), nb_bin - 1)
# Smoothing of the histogram, necessary to avoid digitization effects that will favor angles 0, 45, 90, -45, -90:
grad_orient_histo_smooth = circular_filter_1d(grad_orient_histo, kmedian_size, kernel='median') # fft than square than ifft to calculate convolution
# Computing the circular autoconvolution of the histogram to obtain the axis of symmetry of the histogram :
grad_orient_histo_conv = circular_conv(grad_orient_histo_smooth, grad_orient_histo_smooth)
# Restraining angle search to the angle range :
index_restrain = int(np.ceil(np.true_divide(angle_range, 180) * nb_bin))
center = (nb_bin - 1) // 2
grad_orient_histo_conv_restrained = grad_orient_histo_conv[center - index_restrain + 1:center + index_restrain + 1]
# Finding the symmetry axis by searching for the maximum in the autoconvolution of the histogram :
index_angle_found = np.argmax(grad_orient_histo_conv_restrained) + (nb_bin // 2 - index_restrain)
angle_found = repr_hist[index_angle_found] / 2
angle_found_score = np.amax(grad_orient_histo_conv_restrained)
# Finding other maxima to compute confidence score
arg_maxs = argrelmax(grad_orient_histo_conv_restrained, order=kmedian_size, mode='wrap')[0]
# Confidence score is the ratio of the 2 first maxima :
if len(arg_maxs) > 1:
conf_score = angle_found_score / grad_orient_histo_conv_restrained[arg_maxs[1]]
else:
conf_score = angle_found_score / np.mean(grad_orient_histo_conv) # if no other maxima in the region ratio of the maximum to the mean
return angle_found, conf_score
def gradient_orientation_histogram(image, nb_bin, seg_weighted_mask=None):
"""
This function takes an image as an input and return its orientation histogram
:param image: the image to compute the orientation histogram from, a 2D numpy array
:param nb_bin: the number of bins of the histogram, an int, for instance 360 for bins 1 degree large (can be more or less than 360)
:param seg_weighted_mask: optional, mask weighting the histogram count, base on segmentation, 2D numpy array between 0 and 1
:return grad_orient_histo: the histogram of the orientations of the image, a 1D numpy array of length nb_bin"""
h_kernel = np.array([[1, 2, 1],
[0, 0, 0],
[-1, -2, -1]]) / 4.0
v_kernel = h_kernel.T
# Normalization by median, to resolve scaling problems
median = np.median(image)
if median != 0:
image = image / median
# x and y gradients of the image
gradx = ndimage.convolve(image, v_kernel)
grady = ndimage.convolve(image, h_kernel)
# orientation gradient
orient = np.arctan2(grady, gradx) # results are in the range -pi pi
# weight by gradient magnitude : this step seems dumb, it alters the angles
grad_mag = ((np.abs(gradx.astype(object)) ** 2 + np.abs(grady.astype(object)) ** 2) ** 0.5) # weird data type manipulation, cannot explain why it failed without it
if np.max(grad_mag) != 0:
grad_mag = grad_mag / np.max(grad_mag) # to have map between 0 and 1 (and keep consistency with the seg_weihting map if provided)
if seg_weighted_mask is not None:
weighting_map = np.multiply(seg_weighted_mask, grad_mag) # include weightning by segmentation
else:
weighting_map = grad_mag
# compute histogram :
grad_orient_histo = np.histogram(np.concatenate(orient), bins=nb_bin - 1, range=(-(np.pi - np.pi / nb_bin), (np.pi - np.pi / nb_bin)),
weights=np.concatenate(weighting_map))
return grad_orient_histo[0].astype(float) # return only the values of the bins, not the bins (we know them)
def circular_conv(signal1, signal2):
"""
Takes two 1D numpy array and perform a circular convolution with them
:param signal1: 1D numpy array
:param signal2: 1D numpy array, same length as signal1
:return: signal_conv : 1D numpy array, result of circular convolution of signal1 and signal2"""
if signal1.shape != signal2.shape:
raise Exception("The two signals for circular convolution do not have the same shape")
signal2_extended = np.concatenate((signal2, signal2, signal2)) # replicate signal at both ends
signal_conv_extended = np.convolve(signal1, signal2_extended, mode="same") # median filtering
signal_conv = signal_conv_extended[len(signal1):2*len(signal1)] # truncate back the signal
return signal_conv
def circular_filter_1d(signal, window_size, kernel='gaussian'):
"""
This function filters circularly the signal inputted with a median filter of inputted size, in this context\
circularly means that the signal is wrapped around and then filtered
:param signal: 1D numpy array
:param window_size: size of the kernel, an int
:return: signal_smoothed: 1D numpy array, same size as signal"""
signal_extended = np.concatenate((signal, signal, signal)) # replicate signal at both ends
if kernel == 'gaussian':
signal_extended_smooth = ndimage.gaussian_filter(signal_extended, window_size) # gaussian
elif kernel == 'median':
signal_extended_smooth = medfilt(signal_extended, window_size) # median filtering
else:
raise Exception("Unknow type of kernel")
signal_smoothed = signal_extended_smooth[len(signal):2*len(signal)] # truncate back the signal
return signal_smoothed
|
neuropoly/spinalcordtoolbox
|
spinalcordtoolbox/registration/register.py
|
Python
|
mit
| 66,785
|
# -*- coding: utf-8 -*-
"""
@package Sekator
@brief Contain an iterator function to read fastq files and return FastqSeq objects
@copyright [GNU General Public License v2](http://www.gnu.org/licenses/gpl-2.0.html)
@author Adrien Leger - 2014
* <adrien.leger@gmail.com>
* <adrien.leger@inserm.fr>
* <adrien.leger@univ-nantes.fr>
* [Github](https://github.com/a-slide)
* [Atlantic Gene Therapies - INSERM 1089] (http://www.atlantic-gene-therapies.fr/)
"""
# Standard library imports
from gzip import open as gopen
import os
# Local imports
from FastqSeq import FastqSeq
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
class FastqReader(object):
"""Generator"""
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
#~~~~~~~FUNDAMENTAL METHODS~~~~~~~#
def __init__(self, fastq_file):
"""
@param fastq_file
Check if fastq_file is readable
"""
assert os.path.exists(fastq_file), '{}: file not found'.format(fastq_file)
assert os.access(fastq_file, os.R_OK), '{}: file not readable'.format(fastq_file)
self.n_seq = 0
self.fastq_file = fastq_file
def __str__(self):
return ("Fastq File: {}\nNumber of Sequence read: {}".format(self.fastq_file, self.n_seq))
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
# FUNCTIONS
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
def __call__(self):
""" Simple fastq reader returning a generator over a fastq file """
try:
# Open the file depending of the compression status
fastq = gopen(self.fastq_file, "rb") if self.fastq_file[-2:] == "gz" else open(self.fastq_file, "rb")
# Iterate on the file until the end
while True:
# Extract informations from the fastq file
name, seq, sep, qual = next(fastq), next(fastq), next(fastq), next(fastq)
split_name = name.split(":")
# Try to generate a valid FastqSeq object
try:
yield FastqSeq(
sampleName = ":".join(split_name[0:-2])[1:],
seq = seq.rstrip(),
qual = qual.rstrip(),
sampleIndex = split_name[-2].rstrip(),
molecularIndex = split_name[-1].rstrip())
self.n_seq += 1
except AssertionError as E:
print(E)
print ("Skipping the sequence")
except IOError as E:
print(E)
print ("Error while reading {} file".format(self.fastq_file))
exit()
except StopIteration:
raise StopIteration("\t{} sequences parsed".format(self.n_seq))
fastq.close()
|
a-slide/py_NGS_tools
|
FastqReader.py
|
Python
|
gpl-2.0
| 3,073
|
#!/usr/bin/env python
from urlparse import urlparse
import re
def url_split(url):
"""split url and return only strings in a list"""
url_split = urlparse(url)
url_path = url_split.path
clean_url = re.sub('[^A-Za-z]+', ',', url_path).split(",")
wordList = []
for word in clean_url:
"""remove any empty strings and reverse back the list to the original
order the strings are in the url"""
if word:
wordList.append(word)
return wordList
|
mad01/hermit
|
src/lib/wsplit.py
|
Python
|
mit
| 499
|
#!/usr/bin/env python
# test --flatten : turn deep into composited non-deep
command += oiiotool("src/deepalpha.exr --flatten -o flat.exr")
# test --ch on deep files (and --chnames)
command += oiiotool("src/deepalpha.exr --ch =0.0,A,=0.5,A,Z --chnames R,G,B,A,Z --flatten -d half -o ch.exr")
# To add more tests, just append more lines like the above and also add
# the new 'feature.tif' (or whatever you call it) to the outputs list,
# below.
# Outputs to check against references
outputs = [ "flat.exr",
"ch.exr",
"out.txt" ]
#print "Running this command:\n" + command + "\n"
|
scott-wilson/oiio
|
testsuite/oiiotool-deep/run.py
|
Python
|
bsd-3-clause
| 609
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2016-11-04 16:36
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('pootle_app', '0014_set_directory_tp_path'),
]
operations = [
migrations.AlterIndexTogether(
name='directory',
index_together=set([('obsolete', 'tp', 'tp_path'), ('obsolete', 'pootle_path')]),
),
]
|
claudep/pootle
|
pootle/apps/pootle_app/migrations/0015_add_tp_path_idx.py
|
Python
|
gpl-3.0
| 470
|
#!/usr/bin/env python2
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transperf singleserver/local container mode test setup."""
from __future__ import print_function
import argparse
import binascii
import datetime
import exceptions
import inspect
import itertools
import logging
import os
import socket
import struct
import subprocess
import sys
import time
import transperf
from transperf import log
from transperf import path
from transperf import shell
from transperf.recv import Receiver
LOG = logging.getLogger('transperf/containermgr')
"""
Lifecycle for containers:
1. transperf launcher is invoked in singleserver mode, specifying a physical
host that all containers will be run on and a bridge name on that server.
2. If a bridge already exists by the given name, it is removed.
3. If a netns namespace already exists by the given name, it is removed.
The name of the netns is identical to the container node name.
4. A new bridge and containers are created. The containers are not hooked up to
the bridge yet (unless running in demo/standalone container creation mode).
5. The transperf orchestrator (orch.py) runs the provided config files (>=1) and
all experiments within each config file. For each file:
i. Assuming a clean slate of zero enabled bond or 'physical' interfaces in
a container, and no enabled 'physical' interfaces on the bridge
(*except* the one providing connectivity to/from the root namespace),
orch.py invokes container.setup_all_container_interfaces() to connect
each container to the bridge as specified in the config.
ii. All experiments for the config are executed.
iii. orch.py cleans up the interfaces for this config file
before moving onto the next.
6. After the config files are all done, transperf leaves the bridge and all
containers in place for manual inspection if needed. Subsequent invocations
of transperf will clean up the bridge and containers as in step #1.
"""
def traced(func):
"""Decorator to debug-trace function calls."""
def wrap(*args, **kwargs):
bound = inspect.getcallargs(func, *args, **kwargs)
keys = sorted(bound)
fmt = ' Traced:\t%s:%d:\t%s(' + '%s=%s, '*len(bound) + ')'
args_arr = list(itertools.chain(*([(kw, bound[kw]) for kw in keys])))
rewind = 2
LOG.debug(fmt, *([Utils.__file__(rewind), Utils.__line__(rewind),
func.__name__] + list(args_arr)))
return func(*args, **kwargs)
return wrap
class Constants(object):
"""Contains constants used by the transperf container-management module."""
# Filenames and resources.
HOSTS_PATH = '/etc/hosts'
BRDEV_LIST_PATH = '/sys/devices/virtual/net/'
BOND_MASTERS = '/sys/class/net/bonding_masters'
BOND_PATH_TEMPLATE = '/sys/class/net/{bond}/bonding'
NET_NS_PFX = '/var/run/netns'
DEFAULT_OUTDIR_BASE = '/transperf'
VIRTUALIZED_PATHS = ['/home', '/home/tmp', '/tmp']
# Network configuration.
DEFAULT_BRIDGE = 'br-xperf'
DEFAULT_NODE_CFG = {
'bond': 'eth0',
'ifaces': ['eth1', 'regex:%s' % transperf.InterfaceConfig.ETHX_REGEX,],
'root_nic_offloads_enabled': True,
'container_nic_offloads_enabled': True,
}
# IPv6 addresses.
TRANSPERF_SUBNET_ADDR = {socket.AF_INET: '10.255.0.0',
socket.AF_INET6: 'fd42:7850:5c06:1::'}
ROOT_TRANSPERF_ADDR = {socket.AF_INET: '10.255.0.1',
socket.AF_INET6: 'fd42:7850:5c06:1::1'}
TRANSPERF_SUBNET = {socket.AF_INET: 24, socket.AF_INET6: 64}
# IPv6 addresses.
ROOT_TRANSPERF_IFACE = {'name': 'xperf0',
'address': {
socket.AF_INET: '%s/%d' % (
ROOT_TRANSPERF_ADDR[socket.AF_INET],
TRANSPERF_SUBNET[socket.AF_INET]),
socket.AF_INET6: '%s/%d' % (
ROOT_TRANSPERF_ADDR[socket.AF_INET6],
TRANSPERF_SUBNET[socket.AF_INET6])},
'br-pair': 'br-xperf0'}
# Bond configuration within containers.
DEFAULT_BOND_MODE = 2 # balance-xor
DEFAULT_BOND_XMIT_HASH_POLICY = 'layer3+4'
# Limiting constants.
IPV4_BITLEN = 32
IPV6_BITLEN = 128
MAX_NODES = {
socket.AF_INET: 2 ** (IPV4_BITLEN - TRANSPERF_SUBNET[socket.AF_INET]),
socket.AF_INET6: 2 ** (IPV6_BITLEN - TRANSPERF_SUBNET[socket.AF_INET6]),
}
# Misc.
UNSHARE_DELAY_SECONDS = 1
class Utils(object):
"""Contains util functions used by the transperf container module."""
@staticmethod
def ip_numeric(ip, ip_mode):
"""Returns numerical value from IP address."""
if ip_mode == socket.AF_INET:
# Returns numerical value from IPv4 address (dotted string).
return struct.unpack('!I', socket.inet_pton(socket.AF_INET, ip))[0]
elif ip_mode == socket.AF_INET6:
return int(binascii.hexlify(socket.inet_pton(socket.AF_INET6, ip)),
16)
else:
raise exceptions.RuntimeError('Invalid IP mode %s' % ip_mode)
@staticmethod
def numeric_ip(numeric, ip_mode):
"""Returns IP address from numerical value."""
if ip_mode == socket.AF_INET:
# Returns IPv4 address (dotted string) from numerical value.
return socket.inet_ntop(socket.AF_INET, struct.pack('!I', numeric))
elif ip_mode == socket.AF_INET6:
# pad=32 is to 0-pad all 128 bits.
hex_form = '{value:0{pad}x}'.format(value=numeric, pad=32)
return socket.inet_ntop(socket.AF_INET6,
binascii.unhexlify(hex_form))
else:
raise exceptions.RuntimeError('Invalid IP mode %s' % ip_mode)
@staticmethod
def timestamp_dirname():
"""Builds a directory name based on current time."""
return datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
@staticmethod
def __func__(rewind=1):
"""Returns function executed by caller for more debug info."""
frame = inspect.currentframe()
while rewind > 0:
upframe = frame.f_back
if upframe is None:
break
frame = upframe
rewind -= 1
return frame.f_code.co_name
@staticmethod
def __file__(rewind=1):
"""Returns line number executed by caller for more debug info."""
frame = inspect.currentframe()
while rewind > 0:
upframe = frame.f_back
if upframe is None:
break
frame = upframe
rewind -= 1
return frame.f_code.co_filename
@staticmethod
def __line__(rewind=1):
"""Returns line number executed by caller for more debug info."""
frame = inspect.currentframe()
while rewind > 0:
upframe = frame.f_back
if upframe is None:
break
frame = upframe
rewind -= 1
return frame.f_lineno
@staticmethod
def debug(fmt, *args):
"""Debug logs with more descriptive debug info."""
rewind = 2
fmt = '%s:%d:%s: ' + fmt
LOG.debug(fmt, *([Utils.__file__(rewind), Utils.__line__(rewind),
Utils.__func__(rewind)] + list(args)))
@staticmethod
def run(cmd):
"""Wraps transerf::shell::run() with more descriptive debug info."""
rewind = 2
fmt = ' Shell:\t%s:%d:\t%s: Executing [%s]'
LOG.debug(fmt, *([Utils.__file__(rewind), Utils.__line__(rewind),
Utils.__func__(rewind), cmd]))
return shell.run(cmd)
class IfUtils(object):
"""Contains interface management convenience functions."""
@staticmethod
def ensure_bonding_available():
modules = subprocess.check_output('lsmod | awk \'{print $1}\'',
shell=True)
modules = set(modules.splitlines())
if 'bonding' not in modules:
Utils.run('modprobe bonding')
@staticmethod
@traced
def br_exists(brdev):
"""Does this bridge exist?"""
cmd = '{ctl} show {dev}'.format(ctl=path.brctl(), dev=brdev)
_, err, _ = Utils.run(cmd)
errlines = err.splitlines()
# Format:
# bridge_name bridge_id STP_enabled interfaces (line 1)
# ... <per bridge data> on subsequent lines
if errlines and ('No such device' in errlines[0]
or 'can\'t get info' in errlines[0]
or 'does not exist' in errlines[0]):
return False
return True
@staticmethod
@traced
def create_bridge(brdev):
"""Creates bridge."""
cmd = '{ctl} addbr {dev}'.format(ctl=path.brctl(), dev=brdev)
Utils.run(cmd)
cmd = 'ip link set {dev} up'.format(dev=brdev)
Utils.run(cmd)
@staticmethod
@traced
def delete_bridge(brdev):
"""Deletes bridge."""
# Unhook and delete each interface on bridge if it exists.
br_ifaces = IfUtils.get_bridge_ifaces(brdev, must_exist=False)
for iface in br_ifaces:
cmd = 'ip link set {iface} down'.format(iface=iface)
Utils.run(cmd)
IfUtils.unhook_br_iface(brdev, iface)
IfUtils.del_iface(iface)
# Disable bridge
cmd = 'ip link set {dev} down'.format(dev=brdev)
Utils.run(cmd)
# Delete bridge and verify.
cmd = '{ctl} delbr {dev}'.format(ctl=path.brctl(), dev=brdev)
Utils.run(cmd)
assert not IfUtils.br_exists(brdev), ('Cannot delete '
'bridge %s' % brdev)
# Delete orphaned root transperf interface.
IfUtils.del_iface(Constants.ROOT_TRANSPERF_IFACE['name'])
Utils.debug('Return: Delete bridge %s', brdev)
@staticmethod
@traced
def get_bridge_ifaces(brdev, must_exist=True):
"""Gets bridge interfaces.
If must_exist is False, return an empty list. If must_exist is True,
return an error if the bridge does not exist.
Args:
brdev: The bridge device.
must_exist: If true, raise an exception if no bridge found.
Returns:
The list of bridge interfaces.
"""
pathstr = '{br_base}/{dev}/brif'
pathstr = pathstr.format(br_base=Constants.BRDEV_LIST_PATH, dev=brdev)
cmd = 'ls {path}'.format(path=pathstr)
out, err, _ = Utils.run(cmd)
lines = out.splitlines()
errlines = err.splitlines()
assert not errlines or not must_exist, ('Bridge %s '
'does not exist' % brdev)
return lines
@staticmethod
@traced
def unhook_br_iface(brdev, iface):
"""Removes interface from bridge."""
cmd = '{ctl} delif {dev} {iface}'.format(ctl=path.brctl(),
dev=brdev, iface=iface)
Utils.run(cmd)
assert iface not in IfUtils.get_bridge_ifaces(brdev), ('Cannot unhook '
'iface %s from '
'bridge %s' %
(iface, brdev))
@staticmethod
@traced
def hook_br_iface(brdev, iface):
"""Adds interface to bridge."""
cmd = '{ctl} addif {dev} {iface}'.format(ctl=path.brctl(),
dev=brdev, iface=iface)
Utils.run(cmd)
assert iface in IfUtils.get_bridge_ifaces(brdev), ('Unable to add %s '
'to bridge %s' %
(iface, brdev))
@staticmethod
@traced
def verify_iface_on_br(brdev, iface):
"""Ensures interface added to bridge."""
assert iface in IfUtils.get_bridge_ifaces(brdev), ('iface %s not in '
'bridge %s' %
(iface, brdev))
@staticmethod
@traced
def verify_iface_cfg(iface_cfg, ip_mode):
"""Verifies interface exists with configured address."""
iface = iface_cfg['name']
cmd = 'ip addr show dev {iface}'.format(iface=iface)
out, err, _ = Utils.run(cmd)
lines = out.splitlines()
errlines = err.splitlines()
# Verify iface exists.
assert lines, 'No output for %s' % cmd
assert (not errlines or
'does not exist' not in errlines[0]), ('Device %s '
'does not exist.' %
iface)
# Verify address.
matcher = 'inet6' if ip_mode == socket.AF_INET6 else 'inet'
for line in lines:
splits = line.split()
if not splits:
continue
if splits[0] != matcher:
continue
addr = splits[1]
if addr == iface_cfg['address'][ip_mode]:
return
error = 'Unable to verify interface: %s' % iface_cfg
LOG.error(error)
raise RuntimeError(error)
@staticmethod
@traced
def setup_root_veth(brdev, ip_mode):
"""Setup root netns transperf interface with reserved address."""
# Root veths are reserved constants.
root_iface_cfg = Constants.ROOT_TRANSPERF_IFACE
root_iface = root_iface_cfg['name']
root_addr = root_iface_cfg['address'][ip_mode]
peer = root_iface_cfg['br-pair']
IfUtils.setup_iface(brdev, root_iface, peer, ip_mode, addr=root_addr)
# Give the root interface route a specific route MTU
# to not break other tests.
if ip_mode == socket.AF_INET:
cmd = 'ip route change %s/%d dev %s mtu 1500' % (
Constants.TRANSPERF_SUBNET_ADDR[ip_mode],
Constants.TRANSPERF_SUBNET[ip_mode],
Constants.ROOT_TRANSPERF_IFACE['name'])
Utils.run(cmd)
else:
cmd = 'ip -6 route del %s/%d' % (
Constants.TRANSPERF_SUBNET_ADDR[ip_mode],
Constants.TRANSPERF_SUBNET[ip_mode],
)
Utils.run(cmd)
cmd = 'ip -6 route add %s/%d dev %s mtu 1500' % (
Constants.TRANSPERF_SUBNET_ADDR[ip_mode],
Constants.TRANSPERF_SUBNET[ip_mode],
Constants.ROOT_TRANSPERF_IFACE['name'])
Utils.run(cmd)
@staticmethod
@traced
def setup_iface(brdev, iface, peer, ip_mode, root_offload=True,
container_offload=True, netns=None, addr=None):
"""Setup interface with given address.
If netns is specified, the non-bridge half of the interface is moved
into the target netns and renamed by stripping "<netns>-" from the name.
Args:
brdev: The bridge device the peer is hooked onto.
iface: The interface we are setting up.
peer: The peer interface.
ip_mode: Whether we are using IPv4 or IPv6.
root_offload: Whether ethtool offloads are enabled or not for
root-facing veth device.
container_offload: Whether ethtool offloads are enabled or not for
container-facing veth device.
netns: The netns for the interface.
addr: The address for the interface.
Returns:
Nothing.
"""
# Ensure old devices gone.
IfUtils.del_iface(iface)
IfUtils.del_iface(peer)
# Create devices.
cmd = 'ip link add dev {iface} type veth peer name {peer}'
cmd = cmd.format(iface=iface, peer=peer)
Utils.run(cmd)
# Disable offloads.
if not container_offload:
cmd = 'ethtool -K {dev} tso off gso off gro off'
cmd = cmd.format(dev=iface)
LOG.debug('Container offload disable: executing %s', cmd)
Utils.run(cmd)
if not root_offload:
cmd = 'ethtool -K {dev} tso off gso off gro off'
cmd = cmd.format(dev=peer)
Utils.run(cmd)
# Enable devices.
cmd = 'ip link set {iface} up'.format(iface=iface)
Utils.run(cmd)
cmd = 'ip link set {peer} up'.format(peer=peer)
Utils.run(cmd)
# Add peer to bridge.
IfUtils.hook_br_iface(brdev, peer)
# Configure addr if netns is None (ie. root device) and addr specified.
if netns is None: # Root netns.
if addr is None:
return # Nothing else to do.
# Set address for root device.
if ip_mode == socket.AF_INET:
cmd = 'ifconfig {iface} {addr} up'
cmd = cmd.format(iface=iface,
addr=addr)
Utils.run(cmd)
else:
cmd = 'ifconfig {iface} up'
cmd = cmd.format(iface=iface)
Utils.run(cmd)
cmd = 'ifconfig {iface} inet6 add {addr}'
cmd = cmd.format(iface=iface,
addr=addr)
Utils.run(cmd)
else: # Container netns.
cmd = 'ip link set {iface} netns {netns}'
cmd = cmd.format(iface=iface,
netns=netns)
Utils.run(cmd)
# Now rename it within the node netns.
netns_exec = 'ip netns exec {netns}'.format(netns=netns)
# Ifdown.
cmd = '{netexec} ip link set {iface} down'
cmd = cmd.format(netexec=netns_exec,
iface=iface)
Utils.run(cmd)
# Strip the node_ prefix from the name to rename.
newname = iface[len('%s-' % netns):]
cmd = '{netexec} ip link set {iface} name {new}'
cmd = cmd.format(netexec=netns_exec, iface=iface, new=newname)
Utils.run(cmd)
# Ifup.
cmd = '{netexec} ip link set {iface} up'
cmd = cmd.format(netexec=netns_exec, iface=newname)
Utils.run(cmd)
@staticmethod
@traced
def del_iface(iface):
"""Delete interface."""
# Delete interface.
cmd = 'ip link del dev {iface}'.format(iface=iface)
Utils.run(cmd)
# Verify it's gone.
cmd = 'ip addr show dev {iface}'.format(iface=iface)
out, err, _ = Utils.run(cmd)
lines = out.splitlines()
errlines = err.splitlines()
assert not lines, 'Got output [%s] for %s' % (lines, cmd)
assert 'does not exist' in errlines[0], ('Device %s '
'still exists.' % iface)
class ContainerCtx(object):
"""Transperf singleserver container management object.
Contains methods for setting up, managing and tearing down container
environments for single server transperf.
"""
def __init__(self, brdev, nodes, out_dir, ip_mode):
"""Initialize ContainerContext object.
Args:
brdev: The name of the bridge used by transperf.
nodes: A list of node names.
out_dir: Output directory for transperf.
ip_mode: Whether we are using IPv4 or IPv6.
Returns:
An initialized ContainerCtx.
Raises:
Nothing.
"""
self.brdev = brdev
self.nodes = nodes
self.out_dir = out_dir
self.ip_mode = ip_mode
self.uts_ns_pfx = ContainerCtx.get_uts_ns_pfx(self.out_dir)
self.pid_ns_pfx = ContainerCtx.get_pid_ns_pfx(self.out_dir)
@staticmethod
def get_pid_ns_pfx(out_dir):
return os.path.join(out_dir, 'pid')
@staticmethod
def get_node_pidfile(out_dir, node):
return os.path.join(ContainerCtx.get_pid_ns_pfx(out_dir),
'%s_init' % node)
@staticmethod
def get_uts_ns_pfx(out_dir):
return os.path.join(out_dir, 'uts')
@staticmethod
def get_node_uts_ns_path(out_dir, node):
return os.path.join(ContainerCtx.get_uts_ns_pfx(out_dir), node)
@staticmethod
def get_node_pid_ns_path(out_dir, node):
return os.path.join(ContainerCtx.get_pid_ns_pfx(out_dir), node)
@staticmethod
def get_node_net_ns_path(node):
return os.path.join(Constants.NET_NS_PFX, node)
@traced
def setup_container_environment(self):
"""Setup container environment for experiment.
Performs the following actions:
1. Delete existing node containers and bridge device, as well as all
interfaces connected to bridge device. Unmounts /etc/hosts if needed.
(We temporarily bind mount per transperf run for node addresses).
2. Creates virtual bridge and rootns veth pair for contacting nodes in
namespace (ROOT_TRANSPERF_ADDR/TRANSPERF_SUBNET) with stp off.
3. Remounts (--make-private, --bind) mount namespace dir.
4. Creates a container per node with these persistent namespaces:
uts: <outdir>/uts/<container>
mount: <outdir>/mntns/<container>
netns: /var/run/netns/<container>
with a running 'screen' session as the initial process.
Creates per-node directories and performs necessary mount ops.
NB: This method does *not* connect the containers to the bridge
(see: container.setup_all_container_interfaces() instead).
It also does not create a custom /etc/hosts file (only orch.py can do
that since it can vary from config file to config file).
It also does not handle custom /etc/hosts file bind-mounting
(see: initialization code in recv.py/send.py/orch.py instead).
Raises:
RuntimeError if an operation fails during container setup.
"""
# Ensures bonding module is loaded.
IfUtils.ensure_bonding_available()
# Delete existing bridge.
IfUtils.delete_bridge(self.brdev)
# Prepare to create namespaces.
self.__prepare_ns_dirs()
# Cleanup existing nodes as necessary.
for node in self.nodes:
net_ns = node
cmd = 'ip netns del {ns}'.format(ns=net_ns)
Utils.run(cmd)
# NB: We do not clean up the node processes, however.
# Create bridge.
IfUtils.create_bridge(self.brdev)
assert IfUtils.br_exists(self.brdev), ('Cannot create '
'bridge %s' % self.brdev)
# Create root veth pair and attach to bridge.
IfUtils.setup_root_veth(self.brdev, self.ip_mode)
IfUtils.verify_iface_cfg(Constants.ROOT_TRANSPERF_IFACE, self.ip_mode)
IfUtils.verify_iface_on_br(self.brdev,
Constants.ROOT_TRANSPERF_IFACE['br-pair'])
# Create node containers.
for node in self.nodes:
self.__create_node_container(node)
@traced
def __prepare_ns_dirs(self):
"""Creates and prepares namespace directories."""
# Ensure UTS namespace directory exists.
try:
os.makedirs(self.uts_ns_pfx)
except OSError:
assert os.path.isdir(self.uts_ns_pfx), ('UTS path %s '
'does not exist, '
'cannot be created.' %
self.uts_ns_pfx)
# Ensure PID namespace directory exists.
try:
os.makedirs(self.pid_ns_pfx)
except OSError:
assert os.path.isdir(self.pid_ns_pfx), ('PID path %s '
'does not exist, '
'cannot be created.' %
self.pid_ns_pfx)
@traced
def __create_node_container(self, node):
"""Create a container for a node."""
# Get persistent namespace paths.
uts_ns = os.path.join(self.uts_ns_pfx, node)
pid_ns = os.path.join(self.pid_ns_pfx, node)
# Create the netns.
cmd = 'ip netns add {netns}'.format(netns=node)
Utils.run(cmd)
# Create the persistent non-net namespaces.
open(uts_ns, 'a').close()
open(pid_ns, 'a').close()
# Start node init process.
cmd = transperf.path.nodeinit()
node_pidfile = ContainerCtx.get_node_pidfile(self.out_dir, node)
unshare(node_pidfile, uts_ns, pid_ns,
'python2', cmd, node)
# Make directories and mount.
node_root = ContainerCtx.get_node_root(self.out_dir, node)
for dirname in Constants.VIRTUALIZED_PATHS:
# We can't use os.path.join because it discards paths that occur
# before any path with a leading slash.
custom_dir = os.path.normpath(os.path.sep.join([node_root,
dirname]))
Utils.debug('Create dir %s for node %s (node_root %s, dirname %s)',
custom_dir, node, node_root, dirname)
os.makedirs(custom_dir)
@staticmethod
def get_node_root(out_dir, node):
"""Get the node filesystem root directory."""
return os.path.join(out_dir, 'fs', node)
def __get_path_pfxs(self, paths):
"""Gets the subset of paths that aren't prefixed by others."""
# For each path, False means we think no other path is a prefix.
pfxs = {os.path.normpath(path): False for path in paths}
for pfx in pfxs:
other_pfxs = [other for other in pfxs.keys() if other != pfx]
for other in other_pfxs:
# Test if we're contained in other, vice versa, or disjoint.
common = os.path.commonprefix([pfx, other])
if pfx == common:
pfxs[other] = True # Other is contained by us
elif other == common:
pfxs[pfx] = True # We are contained by other
else:
pass # Neither of us contain the other
# Every path has been examined by every other path. Get the list of
# uncontained paths as our mount points.
return [pathstr for pathstr in pfxs if not pfxs[pathstr]]
@traced
def setup_all_container_interfaces(self, node_cfg_dict):
"""Assigns addresses and creates interfaces for all nodes.
Performs the following actions:
1. For each node, assign an address, >= 1 + ROOT_TRANSPERF_ADDR.
2. For each node, setup the container interfaces with the assigned
address.
Args:
node_cfg_dict: A per-node configuration for interfaces.
Return:
Nothing.
Raises:
RuntimeError: if an operation fails.
"""
base = Utils.ip_numeric(Constants.ROOT_TRANSPERF_ADDR[self.ip_mode],
self.ip_mode)
nextval = base + 1
mask = Constants.TRANSPERF_SUBNET[self.ip_mode]
node_dns = []
# Check if we have too many nodes.
max_nodes = Constants.MAX_NODES[self.ip_mode]
if len(self.nodes) >= max_nodes:
raise RuntimeError('Too many nodes (%d given, max %d)' %
(len(self.nodes), max_nodes))
# IFB device module is not virtualized by netns. Need to setup IFBs in
# root namespace and move into the per-node namespaces.
self.__setup_node_ifbs(node_cfg_dict)
# Assign subsequent nodes the next available IP address.
for node in self.nodes:
val = nextval
nextval += 1
node_addr = '{ip}/{mask}'.format(ip=Utils.numeric_ip(val,
self.ip_mode),
mask=mask)
# Get per-node cfg and setup interfaces for node.
node_cfg = self.__get_node_cfg(node, node_cfg_dict)
self.__setup_container_interfaces(node, node_addr, node_cfg)
# DNS entry for augmented /etc/hosts file.
dns = '{addr} {node}'.format(addr=node_addr.split('/')[0],
node=node)
node_dns.append(dns)
# Add nodes to hosts file and bind-mount it on top of regular file.
new_hosts = os.path.join(self.out_dir, 'hosts')
with open(new_hosts, 'w') as new_file:
for dns in node_dns:
new_file.write('%s\n' % dns)
new_file.close()
@traced
def __setup_container_interfaces(self, node, node_addr, node_cfg):
"""Creates (bond and 'physical') interfaces for the given node.
Creates interfaces for the given node, connects them to the provided
bridge, and configures their addresses.
Precondition: no enabled bond or other interfaces exist in the
container.
Performs the following actions:
1. For each 'physical' interface, creates a veth-pair in the root
namespace and moves it to the target node netns.
2. Attaches the root-end of the veth-pair to the transperf bridge.
3. Renames the node-end of the veth-pair to the config-provided value.
4. Creates a bond-device within the node netns and assigns all
node-ends of all veth-pairs to the bond-device.
5. Assigns the node-end of the veth-pair an IP address (either from the
config file, or otherwise chosen by transperf from 10/24).
6. Clears the ARP cache within the node.
Args:
node: The current node being configured.
node_addr: The IP address for the current node.
node_cfg: A dict with the interface settings for the node.
Returns:
Nothing.
Raises:
RuntimeError if an operation fails.
"""
phys_ifaces = [iface for iface in node_cfg['ifaces']
if not iface.startswith('regex:')]
# Create veth pairs, one for each 'physical' interface.
for iface in phys_ifaces:
# rootname is used when we first create the interface; it is renamed
# within the container after it is moved there by setup_iface.
rootname = '{node}-{iface}'.format(node=node, iface=iface)
peer = 'br-{node}-{iface}'.format(node=node, iface=iface)
IfUtils.setup_iface(
self.brdev, rootname, peer, self.ip_mode,
root_offload=node_cfg['root_nic_offloads_enabled'],
container_offload=node_cfg['container_nic_offloads_enabled'],
netns=node)
# Get bond device parameters.
bond_iface = node_cfg['bond']
bond_path = Constants.BOND_PATH_TEMPLATE.format(bond=bond_iface)
netns_exec = 'ip netns exec {node}'.format(node=node)
echo_exec = '{netexec} sh -c'.format(netexec=netns_exec)
# Create bond device within the node (per config).
cmd = '{echoexec} \'echo +{bond} > {bondmasters}\''
cmd = cmd.format(echoexec=echo_exec, bond=bond_iface,
bondmasters=Constants.BOND_MASTERS)
Utils.run(cmd)
# Set bond mode and hash function within the node.
bond_mode_path = os.path.join(bond_path, 'mode')
cmd = '{echoexec} \'echo {mode} > {modepath}\''
cmd = cmd.format(echoexec=echo_exec, mode=Constants.DEFAULT_BOND_MODE,
modepath=bond_mode_path)
Utils.run(cmd)
bond_policy_path = os.path.join(bond_path, 'xmit_hash_policy')
cmd = '{echoexec} \'echo {policy} > {xmitpath}\''
cmd = cmd.format(echoexec=echo_exec,
policy=Constants.DEFAULT_BOND_XMIT_HASH_POLICY,
xmitpath=bond_policy_path)
Utils.run(cmd)
# Add all 'physical' interfaces to the bond device.
bond_slaves = os.path.join(bond_path, 'slaves')
for iface in phys_ifaces:
# ifdown
cmd = '{netexec} ip link set {iface} down'
cmd = cmd.format(netexec=netns_exec,
iface=iface)
Utils.run(cmd)
# Add to bond
cmd = '{netexec} sh -c \'echo +{iface} > {slaves}\''
cmd = cmd.format(netexec=netns_exec,
iface=iface,
slaves=bond_slaves)
Utils.run(cmd)
# ifup
cmd = '{netexec} ip link set {iface} up'
cmd = cmd.format(netexec=netns_exec,
iface=iface)
Utils.run(cmd)
# Assign address to bond device.
if self.ip_mode == socket.AF_INET:
cmd = '{netexec} ifconfig {bond} {addr} up'
cmd = cmd.format(netexec=netns_exec, bond=bond_iface,
addr=node_addr)
Utils.run(cmd)
else:
cmd = '{netexec} ifconfig {bond} up'
cmd = cmd.format(netexec=netns_exec, bond=bond_iface)
Utils.run(cmd)
cmd = '{netexec} ifconfig {bond} inet6 add {addr}'
cmd = cmd.format(netexec=netns_exec, bond=bond_iface,
addr=node_addr)
Utils.run(cmd)
# Clear node ARP cache.
cmd = '{netexec} ip -s -s neigh flush all'
cmd = cmd.format(netexec=netns_exec, node=node)
Utils.run(cmd)
# Enable loopback.
cmd = '{netexec} ifconfig lo up'
cmd = cmd.format(netexec=netns_exec)
Utils.run(cmd)
@traced
def __setup_node_ifbs(self, node_cfg_dict):
"""Setup node IFBs some basic rules.
Rules:
1. No one else on the system is using an IFB.
2. Every node has 1+N interfaces, where N is the number of 'physical'
(non-bond) interfaces on the node.
Args:
node_cfg_dict: A dictionary of per-node interface configs.
Returns:
Nothing.
"""
# Compute the number of IFBs we need.
next_ifb_idx = 0
ifb_node_mappings = []
for node in self.nodes:
node_cfg = self.__get_node_cfg(node, node_cfg_dict)
physical_ifaces = [iface for iface in node_cfg['ifaces']
if not iface.startswith('regex:')]
LOG.debug('Node %s phys ifaces %s', node, physical_ifaces)
all_ifaces = [node_cfg['bond']] + physical_ifaces
LOG.debug('Node %s all ifaces %s', node, all_ifaces)
# Assign mapping from root-ns ifb to the corresponding node iface.
for iface in all_ifaces:
LOG.debug('Handle iface %s from %s', iface, all_ifaces)
ifb_node_mappings.append((node, 'ifb%d' % next_ifb_idx, iface))
next_ifb_idx += 1
LOG.debug('Mappings: %s', ifb_node_mappings)
# Setup the module.
Utils.run('rmmod ifb')
Utils.run('modprobe ifb numifbs=%s' % len(ifb_node_mappings))
# Now we move the ifbs into the destination node namespaces.
for node, ifb, node_iface in ifb_node_mappings:
LOG.debug('Handle node %s ifb %s node_iface %s', node, ifb,
node_iface)
# Disable ifb.
cmd = 'ip link set {ifb} down'.format(ifb=ifb)
Utils.run(cmd)
# Move to node.
cmd = 'ip link set {ifb} netns {node}'
cmd = cmd.format(ifb=ifb, node=node)
Utils.run(cmd)
# Rename.
newname = Receiver.get_ifb_for_iface(node_iface)
netns_exec = 'ip netns exec {node}'.format(node=node)
cmd = '{netexec} ip link set {ifb} name {new}'
cmd = cmd.format(netexec=netns_exec, ifb=ifb, new=newname)
Utils.run(cmd)
# Enable ifb.
cmd = '{netexec} ip link set {ifb} up'
cmd = cmd.format(netexec=netns_exec, ifb=newname)
Utils.run(cmd)
def __get_node_cfg(self, node, node_cfg_dict):
"""Gets the interface configuration for this node."""
if node not in node_cfg_dict:
LOG.debug('Node %s not in cfg dict (%s), '
'using default: %s',
node,
node_cfg_dict,
Constants.DEFAULT_NODE_CFG)
return Constants.DEFAULT_NODE_CFG
LOG.debug('Node %s using iface cfg %s', node, node_cfg_dict[node])
return node_cfg_dict[node]
@traced
def cleanup_all_container_interfaces(self, node_cfg_dict):
"""Clears (bond and 'physical') interfaces for the given node.
Clears interfaces for the given node.
Args:
node_cfg_dict: A dict with the interface settings for the node.
Returns:
Nothing.
Raises:
RuntimeError if an operation fails.
"""
for node in self.nodes:
self.__cleanup_container_interfaces(node, node_cfg_dict[node])
@traced
def __cleanup_container_interfaces(self, node, node_cfg):
"""Cleans up interfaces between subsequent transperf cfgs.
A single invocation of transperf may run experiments in >1 config file.
Each config may have its own setup for per-node interfaces. Transperf
uses freshly created interfaces for each experiment run in singleserver
mode to provide a clean slate for each experiment.
Args:
node: The node being cleaned up.
node_cfg: A dict with the interface settings for the node.
Returns:
Nothing.
Raises:
"""
netns_exec = 'ip netns exec {node}'.format(node=node)
# Disable and remove the bridge interface.
bond_iface = node_cfg['bond']
cmd = '{netexec} ip link del dev {bond}'.format(netexec=netns_exec,
bond=bond_iface)
Utils.run(cmd)
# Disable and remove the physical interfaces.
phys_ifaces = [iface for iface in node_cfg['ifaces']
if not iface.startswith('regex:')]
for iface in phys_ifaces:
peer = 'br-{node}-{iface}'.format(node=node, iface=iface)
# Delete iface in netns.
cmd = '{netexec} ip link del dev {iface}'
cmd = cmd.format(netexec=netns_exec, iface=iface)
Utils.run(cmd)
# Delete bridge side in root ns.
cmd = 'ip link del dev {iface}'.format(iface=peer)
Utils.run(cmd)
def default_node_iface_setup(self, nodes):
node_cfg = {node: Constants.DEFAULT_NODE_CFG for node in nodes}
self.setup_all_container_interfaces(node_cfg)
return node_cfg
def get_init_pid_from_unshare(unshare_pid):
unshare_pid = str(unshare_pid)
output = shell.run('pgrep -P %s' % unshare_pid)[0].splitlines()
return output[0]
def unshare(pidfile, uts, pidns, cmd, *args):
"""Wrapper function for running given command with unshare."""
# Run unshare, invoking the node init process, and get the pid.
build = ['setsid',
'unshare',
'--fork',
'--mount-proc',
'--uts=%s' % uts,
'--pid=%s' % pidns,
cmd,]
build += args
proc = subprocess.Popen(build)
unshare_pid = proc.pid
# Wait for unshare to do its thing.
time.sleep(Constants.UNSHARE_DELAY_SECONDS)
# Get pid for node init process - it's the only child of unshare.
init_pid = get_init_pid_from_unshare(unshare_pid)
# Write it to the node pidfile.
with open(pidfile, 'w') as fd:
fd.write('%s\n' % init_pid)
LOG.info('Node init pid is: %s:%s\n', pidfile, init_pid)
def main():
"""main() initializes a container environment for singleserver transperf.
launch.py will call main() over ssh to set up a singleserver
transperf environment if flagged to do so. main() can also be
invoked manually to provide a quick flat L2 container based environment to
play around in.
Returns:
0 upon successful exit.
"""
# Setup CLI args
parser = argparse.ArgumentParser(description='transperf container setup.')
parser.add_argument('rcvnode', nargs=1, help='Receiver node (1)')
parser.add_argument('sndnodes', nargs='+', help='Sender nodes (>=1)')
parser.add_argument('-b', nargs='?',
const=Constants.DEFAULT_BRIDGE,
default=Constants.DEFAULT_BRIDGE,
dest='brdev', help='Specify bridge device')
parser.add_argument('-d', nargs='?', const=None, default=None,
dest='out_dir', help='Specify out directory')
parser.add_argument('--demo', action='store_true', default=False,
dest='demo', help='Setup basic NICs')
parser.add_argument('-v', action='store_true', default=False,
dest='debug', help='Enabled debug output')
parser.add_argument('--ifacecfg', nargs='?', dest='ifacecfg',
help='Node interfaces config.')
parser.add_argument('--ip_mode', nargs='?', dest='ip_mode',
help='IP mode (4 or 6 (default)).', default=6)
# Get args
args = parser.parse_args()
# Setup logging.
log.setup_logging([(arg, getattr(args, arg)) for arg in vars(args)])
ip_mode = transperf.ip_modes[int(args.ip_mode)]
brdev = args.brdev
demo_mode = args.demo
nodes = args.rcvnode + args.sndnodes
if args.ifacecfg is not None:
iface_cfgname = args.ifacecfg
ifacecfg = os.path.abspath(os.path.expanduser(iface_cfgname))
node_cfg = transperf.InterfaceConfig.validate_config(ifacecfg, LOG)
else:
iface_cfgname = 'default'
node_cfg = {node: dict(Constants.DEFAULT_NODE_CFG) for node in nodes}
# For receiver node, we need to disable NIC offloads.
for rcvr in args.rcvnode:
LOG.debug('Disabling root/container offloads for receiver %s', rcvr)
node_cfg[rcvr]['root_nic_offloads_enabled'] = False
node_cfg[rcvr]['container_nic_offloads_enabled'] = False
# Create output directory.
out_dir = args.out_dir
if out_dir is not None:
out_dir = os.path.abspath(os.path.expanduser(args.out_dir))
else:
out_dir = os.path.join(Constants.DEFAULT_OUTDIR_BASE,
Utils.timestamp_dirname())
try:
os.makedirs(out_dir)
except OSError:
assert os.path.isdir(out_dir), ('Output dir %s does not exist '
'and cannot be created.' % out_dir)
# Setup container environment.
ctx = ContainerCtx(brdev, nodes, out_dir, ip_mode)
ctx.setup_container_environment()
# Setup basic connectivity in demo mode.
if demo_mode:
demo_cfg = ctx.default_node_iface_setup(nodes)
raw_input('Press enter to remove all interfaces for test.')
ctx.cleanup_all_container_interfaces(demo_cfg)
raw_input('Press enter to re-add all interfaces for test.')
ctx.setup_all_container_interfaces(demo_cfg)
print('\nDemo mode complete; leaving containers running.')
else:
# Setup requested connectivity.
ctx.setup_all_container_interfaces(node_cfg)
return 0
if __name__ == '__main__':
sys.exit(main())
|
google/transperf
|
virtsetup.py
|
Python
|
apache-2.0
| 44,507
|
import sys
import pprint
import bucky3.module as module
class DebugOutput(module.MetricsPushProcess):
pprinter = pprint.PrettyPrinter(stream=sys.stderr, indent=1, width=120, depth=5, compact=False)
def process_values(self, *args, **kwargs):
if args:
self.pprinter.pprint(args)
if kwargs:
self.pprinter.pprint(kwargs)
|
jsiembida/bucky3
|
bucky3/debug.py
|
Python
|
apache-2.0
| 370
|
#!/usr/bin/env python3
# Copyright (c) 2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importdescriptors RPC.
Test importdescriptors by generating keys on node0, importing the corresponding
descriptors on node1 and then testing the address info for the different address
variants.
- `get_generate_key()` is called to generate keys and return the privkeys,
pubkeys and all variants of scriptPubKey and address.
- `test_importdesc()` is called to send an importdescriptors call to node1, test
success, and (if unsuccessful) test the error code and error message returned.
- `test_address()` is called to call getaddressinfo for an address on node1
and test the values returned."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
find_vout_for_address,
)
from test_framework.wallet_util import (
get_generate_key,
test_address,
)
class ImportDescriptorsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [["-addresstype=legacy"],
["-addresstype=bech32", "-keypool=5"]
]
self.setup_clean_chain = True
self.wallet_names = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
def test_importdesc(self, req, success, error_code=None, error_message=None, warnings=None, wallet=None):
"""Run importdescriptors and assert success"""
if warnings is None:
warnings = []
wrpc = self.nodes[1].get_wallet_rpc('w1')
if wallet is not None:
wrpc = wallet
result = wrpc.importdescriptors([req])
observed_warnings = []
if 'warnings' in result[0]:
observed_warnings = result[0]['warnings']
assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
assert_equal(result[0]['success'], success)
if error_code is not None:
assert_equal(result[0]['error']['code'], error_code)
assert_equal(result[0]['error']['message'], error_message)
def run_test(self):
self.log.info('Setting up wallets')
self.nodes[0].createwallet(wallet_name='w0', disable_private_keys=False, descriptors=True)
w0 = self.nodes[0].get_wallet_rpc('w0')
self.nodes[1].createwallet(wallet_name='w1', disable_private_keys=True, blank=True, descriptors=True)
w1 = self.nodes[1].get_wallet_rpc('w1')
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
self.nodes[1].createwallet(wallet_name="wpriv", disable_private_keys=False, blank=True, descriptors=True)
wpriv = self.nodes[1].get_wallet_rpc("wpriv")
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0)
self.log.info('Mining coins')
w0.generatetoaddress(101, w0.getnewaddress())
# RPC importdescriptors -----------------------------------------------
# # Test import fails if no descriptor present
key = get_generate_key()
self.log.info("Import should fail if a descriptor is not provided")
self.test_importdesc({"timestamp": "now"},
success=False,
error_code=-8,
error_message='Descriptor not found.')
# # Test importing of a P2PKH descriptor
key = get_generate_key()
self.log.info("Should import a p2pkh descriptor")
self.test_importdesc({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"timestamp": "now",
"label": "Descriptor import test"},
success=True)
test_address(w1,
key.p2pkh_addr,
solvable=True,
ismine=True,
labels=["Descriptor import test"])
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
self.log.info("Internal addresses cannot have labels")
self.test_importdesc({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"timestamp": "now",
"internal": True,
"label": "Descriptor import test"},
success=False,
error_code=-8,
error_message="Internal addresses should not have a label")
# # Test importing of a P2SH-P2WPKH descriptor
key = get_generate_key()
self.log.info("Should not import a p2sh-p2wpkh descriptor without checksum")
self.test_importdesc({"desc": "sh(wpkh(" + key.pubkey + "))",
"timestamp": "now"
},
success=False,
error_code=-5,
error_message="Missing checksum")
self.log.info("Should not import a p2sh-p2wpkh descriptor that has range specified")
self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
"timestamp": "now",
"range": 1,
},
success=False,
error_code=-8,
error_message="Range should not be specified for an un-ranged descriptor")
self.log.info("Should not import a p2sh-p2wpkh descriptor and have it set to active")
self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
"timestamp": "now",
"active": True,
},
success=False,
error_code=-8,
error_message="Active descriptors must be ranged")
self.log.info("Should import a (non-active) p2sh-p2wpkh descriptor")
self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
"timestamp": "now",
"active": False,
},
success=True)
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
test_address(w1,
key.p2sh_p2wpkh_addr,
ismine=True,
solvable=True)
# Check persistence of data and that loading works correctly
w1.unloadwallet()
self.nodes[1].loadwallet('w1')
test_address(w1,
key.p2sh_p2wpkh_addr,
ismine=True,
solvable=True)
# # Test importing of a multisig descriptor
key1 = get_generate_key()
key2 = get_generate_key()
self.log.info("Should import a 1-of-2 bare multisig from descriptor")
self.test_importdesc({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
"timestamp": "now"},
success=True)
self.log.info("Should not treat individual keys from the imported bare multisig as watchonly")
test_address(w1,
key1.p2pkh_addr,
ismine=False)
# # Test ranged descriptors
xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
xpub = "tpubD6NzVbkrYhZ4YNXVQbNhMK1WqguFsUXceaVJKbmno2aZ3B6QfbMeraaYvnBSGpV3vxLyTTK9DYT1yoEck4XUScMzXoQ2U2oSmE2JyMedq3H"
addresses = ["2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf", "2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"] # hdkeypath=m/0'/0'/0' and 1'
addresses += ["bcrt1qrd3n235cj2czsfmsuvqqpr3lu6lg0ju7scl8gn", "bcrt1qfqeppuvj0ww98r6qghmdkj70tv8qpchehegrg8"] # wpkh subscripts corresponding to the above addresses
desc = "sh(wpkh(" + xpub + "/0/0/*" + "))"
self.log.info("Ranged descriptors cannot have labels")
self.test_importdesc({"desc":descsum_create(desc),
"timestamp": "now",
"range": [0, 100],
"label": "test"},
success=False,
error_code=-8,
error_message='Ranged descriptors should not have a label')
self.log.info("Private keys required for private keys enabled wallet")
self.test_importdesc({"desc":descsum_create(desc),
"timestamp": "now",
"range": [0, 100]},
success=False,
error_code=-4,
error_message='Cannot import descriptor without private keys to a wallet with private keys enabled',
wallet=wpriv)
self.log.info("Ranged descriptor import should warn without a specified range")
self.test_importdesc({"desc": descsum_create(desc),
"timestamp": "now"},
success=True,
warnings=['Range not given, using default keypool range'])
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
# # Test importing of a ranged descriptor with xpriv
self.log.info("Should not import a ranged descriptor that includes xpriv into a watch-only wallet")
desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
self.test_importdesc({"desc": descsum_create(desc),
"timestamp": "now",
"range": 1},
success=False,
error_code=-4,
error_message='Cannot import private keys to a wallet with private keys disabled')
for address in addresses:
test_address(w1,
address,
ismine=False,
solvable=False)
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
success=False, error_code=-8, error_message='Range should be greater or equal than 0')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
success=False, error_code=-8, error_message='Range is too large')
# Make sure ranged imports import keys in order
w1 = self.nodes[1].get_wallet_rpc('w1')
self.log.info('Key ranges should be imported in order')
xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
addresses = [
'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv', # m/0'/0'/0
'bcrt1q8vprchan07gzagd5e6v9wd7azyucksq2xc76k8', # m/0'/0'/1
'bcrt1qtuqdtha7zmqgcrr26n2rqxztv5y8rafjp9lulu', # m/0'/0'/2
'bcrt1qau64272ymawq26t90md6an0ps99qkrse58m640', # m/0'/0'/3
'bcrt1qsg97266hrh6cpmutqen8s4s962aryy77jp0fg0', # m/0'/0'/4
]
self.test_importdesc({'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
'active': True,
'range' : [0, 2],
'timestamp': 'now'
},
success=True)
self.test_importdesc({'desc': descsum_create('sh(wpkh([abcdef12/0h/0h]' + xpub + '/*))'),
'active': True,
'range' : [0, 2],
'timestamp': 'now'
},
success=True)
self.test_importdesc({'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
'active': True,
'range' : [0, 2],
'timestamp': 'now'
},
success=True)
assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
for i, expected_addr in enumerate(addresses):
received_addr = w1.getnewaddress('', 'bech32')
assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'bech32')
assert_equal(received_addr, expected_addr)
bech32_addr_info = w1.getaddressinfo(received_addr)
assert_equal(bech32_addr_info['desc'][:23], 'wpkh([80002067/0\'/0\'/{}]'.format(i))
shwpkh_addr = w1.getnewaddress('', 'p2sh-segwit')
shwpkh_addr_info = w1.getaddressinfo(shwpkh_addr)
assert_equal(shwpkh_addr_info['desc'][:26], 'sh(wpkh([abcdef12/0\'/0\'/{}]'.format(i))
pkh_addr = w1.getnewaddress('', 'legacy')
pkh_addr_info = w1.getaddressinfo(pkh_addr)
assert_equal(pkh_addr_info['desc'][:22], 'pkh([12345678/0\'/0\'/{}]'.format(i))
assert_equal(w1.getwalletinfo()['keypoolsize'], 4 * 3) # After retrieving a key, we don't refill the keypool again, so it's one less for each address type
w1.keypoolrefill()
assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
# Check active=False default
self.log.info('Check imported descriptors are not active by default')
self.test_importdesc({'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
'range' : [0, 2],
'timestamp': 'now',
'internal': True
},
success=True)
assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'legacy')
# # Test importing a descriptor containing a WIF private key
wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
address = "2MuhcG52uHPknxDgmGPsV18jSHFBnnRgjPg"
desc = "sh(wpkh(" + wif_priv + "))"
self.log.info("Should import a descriptor with a WIF private key as spendable")
self.test_importdesc({"desc": descsum_create(desc),
"timestamp": "now"},
success=True,
wallet=wpriv)
test_address(wpriv,
address,
solvable=True,
ismine=True)
txid = w0.sendtoaddress(address, 49.99995540)
w0.generatetoaddress(6, w0.getnewaddress())
self.sync_blocks()
tx = wpriv.createrawtransaction([{"txid": txid, "vout": 0}], {w0.getnewaddress(): 49.999})
signed_tx = wpriv.signrawtransactionwithwallet(tx)
w1.sendrawtransaction(signed_tx['hex'])
# Make sure that we can use import and use multisig as addresses
self.log.info('Test that multisigs can be imported, signed for, and getnewaddress\'d')
self.nodes[1].createwallet(wallet_name="wmulti_priv", disable_private_keys=False, blank=True, descriptors=True)
wmulti_priv = self.nodes[1].get_wallet_rpc("wmulti_priv")
assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 0)
self.test_importdesc({"desc":"wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/0h/0h/*))#m2sr93jn",
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_priv)
self.test_importdesc({"desc":"wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/1h/0h/*))#q3sztvx5",
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_priv)
assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1001) # Range end (1000) is inclusive, so 1001 addresses generated
addr = wmulti_priv.getnewaddress('', 'bech32')
assert_equal(addr, 'bcrt1qdt0qy5p7dzhxzmegnn4ulzhard33s2809arjqgjndx87rv5vd0fq2czhy8') # Derived at m/84'/0'/0'/0
change_addr = wmulti_priv.getrawchangeaddress('bech32')
assert_equal(change_addr, 'bcrt1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lnsy44n8e')
assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1000)
txid = w0.sendtoaddress(addr, 10)
self.nodes[0].generate(6)
self.sync_all()
send_txid = wmulti_priv.sendtoaddress(w0.getnewaddress(), 8)
decoded = wmulti_priv.decoderawtransaction(wmulti_priv.gettransaction(send_txid)['hex'])
assert_equal(len(decoded['vin'][0]['txinwitness']), 4)
self.nodes[0].generate(6)
self.sync_all()
self.nodes[1].createwallet(wallet_name="wmulti_pub", disable_private_keys=True, blank=True, descriptors=True)
wmulti_pub = self.nodes[1].get_wallet_rpc("wmulti_pub")
assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 0)
self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))#tsry0s5e",
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_pub)
self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))#c08a2rzv",
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_pub)
assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 1000) # The first one was already consumed by previous import and is detected as used
addr = wmulti_pub.getnewaddress('', 'bech32')
assert_equal(addr, 'bcrt1qp8s25ckjl7gr6x2q3dx3tn2pytwp05upkjztk6ey857tt50r5aeqn6mvr9') # Derived at m/84'/0'/0'/1
change_addr = wmulti_pub.getrawchangeaddress('bech32')
assert_equal(change_addr, 'bcrt1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lnsy44n8e')
assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 999)
txid = w0.sendtoaddress(addr, 10)
vout = find_vout_for_address(self.nodes[0], txid, addr)
self.nodes[0].generate(6)
self.sync_all()
assert_equal(wmulti_pub.getbalance(), wmulti_priv.getbalance())
# Make sure that descriptor wallets containing multiple xpubs in a single descriptor load correctly
wmulti_pub.unloadwallet()
self.nodes[1].loadwallet('wmulti_pub')
self.log.info("Multisig with distributed keys")
self.nodes[1].createwallet(wallet_name="wmulti_priv1", descriptors=True)
wmulti_priv1 = self.nodes[1].get_wallet_rpc("wmulti_priv1")
res = wmulti_priv1.importdescriptors([
{
"desc": descsum_create("wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
},
{
"desc": descsum_create("wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"),
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
assert_equal(res[1]['success'], True)
assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
self.nodes[1].createwallet(wallet_name='wmulti_priv2', blank=True, descriptors=True)
wmulti_priv2 = self.nodes[1].get_wallet_rpc('wmulti_priv2')
res = wmulti_priv2.importdescriptors([
{
"desc": descsum_create("wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
},
{
"desc": descsum_create("wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"),
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
assert_equal(res[1]['success'], True)
assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
rawtx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {w0.getnewaddress(): 9.999})
tx_signed_1 = wmulti_priv1.signrawtransactionwithwallet(rawtx)
assert_equal(tx_signed_1['complete'], False)
tx_signed_2 = wmulti_priv2.signrawtransactionwithwallet(tx_signed_1['hex'])
assert_equal(tx_signed_2['complete'], True)
self.nodes[1].sendrawtransaction(tx_signed_2['hex'])
self.log.info("Combo descriptors cannot be active")
self.test_importdesc({"desc": descsum_create("combo(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
"active": True,
"range": 1,
"timestamp": "now"},
success=False,
error_code=-4,
error_message="Combo descriptors cannot be set to active")
self.log.info("Descriptors with no type cannot be active")
self.test_importdesc({"desc": descsum_create("pk(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
"active": True,
"range": 1,
"timestamp": "now"},
success=True,
warnings=["Unknown output type, cannot set descriptor to active."])
if __name__ == '__main__':
ImportDescriptorsTest().main()
|
jonasschnelli/bitcoin
|
test/functional/wallet_importdescriptors.py
|
Python
|
mit
| 26,479
|
# (C) Copyright 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import urlparse
import jira
import yaml
from jinja2 import Template
from monasca_notification.monitoring import client
from monasca_notification.monitoring.metrics import NOTIFICATION_SEND_TIMER
from monasca_notification.plugins.abstract_notifier import AbstractNotifier
"""
Note:
This plugin doesn't support multi tenancy. Multi tenancy requires support for
multiple JIRA server url. JIRA doesn't support OAUTH2 tokens, we may need to get
the user credentials in query params and store them in monasca DB which we don't want to do.
That is the reason for not supporting true multitenancy.
MultiTenancy can be achieved by creating issues in different project for different tenant on
the same JIRA server.
notification.address = https://<jira_url>/?project=<project_name>
Dependency for Jira
1) Jira plugin requires Jira library. Consumers need to install
JIRA via pip
2) (i.e) pip install jira
Jira Configuration
1) jira:
username: username
password: password
Sample notification:
monasca notification-create MyIssuer JIRA https://jira.hpcloud.net/?project=MyProject
monasca notification-create MyIssuer1 JIRA https://jira.hpcloud.net/?project=MyProject&
component=MyComponent
"""
STATSD_CLIENT = client.get_client()
STATSD_TIMER = STATSD_CLIENT.get_timer()
class JiraNotifier(AbstractNotifier):
_search_query = search_query = "project={} and reporter='{}' and summary ~ '{}'"
def __init__(self, log):
super(JiraNotifier, self).__init__("jira")
self._log = log
self.jira_fields_format = None
def config(self, config_dict):
super(JiraNotifier, self).config(config_dict)
if not config_dict.get("user") and not config_dict.get("password"):
message = "Missing user and password settings in JIRA plugin configuration"
self._log.exception(message)
raise Exception(message)
self.jira_fields_format = self._get_jira_custom_format_fields()
def _get_jira_custom_format_fields(self):
jira_fields_format = None
if (not self.jira_fields_format and self._config.get("custom_formatter")):
try:
with open(self._config.get("custom_formatter")) as f:
jira_fields_format = yaml.safe_load(f)
except Exception:
self._log.exception("Unable to read custom_formatter file. Check file location")
raise
# Remove the top element
jira_fields_format = jira_fields_format["jira_format"]
return jira_fields_format
def _build_custom_jira_message(self, notification, jira_fields_format):
jira_fields = {}
# Templatize the message object
jira_field_summary_field = jira_fields_format.get("summary", None)
if jira_field_summary_field:
template = Template(jira_field_summary_field)
jira_fields["summary"] = template.render(notification=notification)
jira_field_comments_field = jira_fields_format.get("comments", None)
if jira_field_comments_field:
template = Template(jira_field_comments_field)
jira_fields["comments"] = template.render(notification=notification)
jira_field_description_field = jira_fields_format.get("description", None)
if jira_field_description_field:
template = Template(jira_field_description_field)
jira_fields["description"] = template.render(notification=notification)
return jira_fields
def _build_default_jira_message(self, notification):
"""Builds jira message body
"""
body = {'alarm_id': notification.alarm_id,
'alarm_definition_id': notification.raw_alarm['alarmDefinitionId'],
'alarm_name': notification.alarm_name,
'alarm_description': notification.raw_alarm['alarmDescription'],
'alarm_timestamp': notification.alarm_timestamp,
'state': notification.state,
'old_state': notification.raw_alarm['oldState'],
'message': notification.message,
'tenant_id': notification.tenant_id,
'metrics': notification.metrics}
jira_fields = {}
summary_format_string = "Monasca alarm for alarm_defintion {0} status changed to {1} for the alarm_id {2}"
jira_fields["summary"] = summary_format_string.format(notification.alarm_name,
notification.state,
notification.alarm_id)
jira_fields["comments"] = "{code}%s{code}" % (json.dumps(body, indent=3))
return jira_fields
def _build_jira_message(self, notification):
if self._config.get("custom_formatter"):
return self._build_custom_jira_message(notification, self.jira_fields_format)
return self._build_default_jira_message(notification)
@STATSD_TIMER.timed(NOTIFICATION_SEND_TIMER, dimensions={'notification_type': 'pagerduty'})
def send_notification(self, notification):
"""Creates or Updates an issue in Jira
"""
jira_fields = self._build_jira_message(notification)
parsed_url = urlparse.urlsplit(notification.address)
query_params = urlparse.parse_qs(parsed_url.query)
# URL without query params
url = urlparse.urljoin(notification.address, urlparse.urlparse(notification.address).path)
jira_fields["project"] = query_params["project"][0]
if query_params.get("component"):
jira_fields["component"] = query_params["component"][0]
auth = (self._config["user"], self._config["password"])
proxyDict = None
if (self._config.get("proxy")):
proxyDict = {"https": self._config.get("proxy")}
try:
jira_obj = jira.JIRA(url, basic_auth=auth, proxies=proxyDict)
self.jira_workflow(jira_fields, jira_obj, notification)
except Exception:
self._log.exception("Error creating issue in Jira at URL {}".format(url))
return False
return True
def jira_workflow(self, jira_fields, jira_obj, notification):
"""How does Jira plugin work?
1) Check whether the issue with same description exists?
2) If issue exists, and if it is closed state, open it
3) if the issue doesn't exist, then create the issue
4) Add current alarm details in comments
"""
issue_dict = {'project': {'key': jira_fields["project"]},
'summary': jira_fields["summary"],
'description': 'Monasca alaram',
'issuetype': {'name': 'Bug'}, }
# If the JIRA workflow is created with mandatory components
if jira_fields.get("component"):
issue_dict["components"] = [{"name": jira_fields.get("component")}]
search_term = self._search_query.format(issue_dict["project"]["key"],
self._config["user"], notification.alarm_id)
issue_list = jira_obj.search_issues(search_term)
if not issue_list:
self._log.debug("Creating an issue with the data {}".format(issue_dict))
issue = jira_obj.create_issue(fields=issue_dict)
else:
issue = issue_list[0]
self._log.debug("Found an existing issue {} for this notification".format(issue))
current_state = issue.fields.status.name
if current_state.lower() in ["resolved", "closed"]:
# Open the the issue
transitions = jira_obj.transitions(issue)
allowed_transistions = [(t['id'], t['name']) for t in transitions if "reopen" in t['name'].lower()]
if allowed_transistions:
# Reopen the issue
jira_obj.transition_issue(issue, allowed_transistions[0][0])
jira_comment_message = jira_fields.get("comments")
if jira_comment_message:
jira_obj.add_comment(issue, jira_comment_message)
|
sapcc/monasca-notification
|
monasca_notification/plugins/jira_notifier.py
|
Python
|
apache-2.0
| 8,896
|
# -*- coding: utf-8 -*-
"""Toolbox to handle date intervals
A period is a triple (unit, start, size), where unit is either "month" or "year", where start format is a
(year, month, day) triple, and where size is an integer > 1.
Since a period is a triple it can be used as a dictionary key.
"""
import calendar
import collections
import datetime
import re
from . import conv
YEAR = u'year'
MONTH = u'month'
N_ = lambda message: message
# Note: weak references are not used, because Python 2.7 can't create weak reference to 'datetime.date' objects.
date_by_instant_cache = {}
str_by_instant_cache = {}
year_or_month_or_day_re = re.compile(ur'(18|19|20)\d{2}(-(0?[1-9]|1[0-2])(-([0-2]?\d|3[0-1]))?)?$')
class Instant(tuple):
def __repr__(self):
"""Transform instant to to its Python representation as a string.
>>> repr(instant(2014))
'Instant((2014, 1, 1))'
>>> repr(instant('2014-2'))
'Instant((2014, 2, 1))'
>>> repr(instant('2014-2-3'))
'Instant((2014, 2, 3))'
"""
return '{}({})'.format(self.__class__.__name__, super(Instant, self).__repr__())
def __str__(self):
"""Transform instant to a string.
>>> str(instant(2014))
'2014-01-01'
>>> str(instant('2014-2'))
'2014-02-01'
>>> str(instant('2014-2-3'))
'2014-02-03'
>>> unicode(instant(2014))
u'2014-01-01'
>>> unicode(instant('2014-2'))
u'2014-02-01'
>>> unicode(instant('2014-2-3'))
u'2014-02-03'
"""
instant_str = str_by_instant_cache.get(self)
if instant_str is None:
str_by_instant_cache[self] = instant_str = self.date.isoformat()
return instant_str
@property
def date(self):
"""Convert instant to a date.
>>> instant(2014).date
datetime.date(2014, 1, 1)
>>> instant('2014-2').date
datetime.date(2014, 2, 1)
>>> instant('2014-2-3').date
datetime.date(2014, 2, 3)
"""
instant_date = date_by_instant_cache.get(self)
if instant_date is None:
date_by_instant_cache[self] = instant_date = datetime.date(*self)
return instant_date
@property
def day(self):
"""Extract day from instant.
>>> instant(2014).day
1
>>> instant('2014-2').day
1
>>> instant('2014-2-3').day
3
"""
return self[2]
@property
def month(self):
"""Extract month from instant.
>>> instant(2014).month
1
>>> instant('2014-2').month
2
>>> instant('2014-2-3').month
2
"""
return self[1]
def period(self, unit, size = 1):
"""Create a new period starting at instant.
>>> instant(2014).period('month')
Period((u'month', Instant((2014, 1, 1)), 1))
>>> instant('2014-2').period('year', 2)
Period((u'year', Instant((2014, 2, 1)), 2))
>>> instant('2014-2-3').period('day', size = 2)
Period((u'day', Instant((2014, 2, 3)), 2))
"""
assert unit in (u'day', u'month', u'year'), 'Invalid unit: {} of type {}'.format(unit, type(unit))
assert isinstance(size, int) and size >= 1, 'Invalid size: {} of type {}'.format(size, type(size))
return Period((unicode(unit), self, size))
def offset(self, offset, unit):
"""Increment (or decrement) the given instant with offset units.
>>> instant(2014).offset(1, 'day')
Instant((2014, 1, 2))
>>> instant(2014).offset(1, 'month')
Instant((2014, 2, 1))
>>> instant(2014).offset(1, 'year')
Instant((2015, 1, 1))
>>> instant('2014-1-31').offset(1, 'day')
Instant((2014, 2, 1))
>>> instant('2014-1-31').offset(1, 'month')
Instant((2014, 2, 28))
>>> instant('2014-1-31').offset(1, 'year')
Instant((2015, 1, 31))
>>> instant('2011-2-28').offset(1, 'day')
Instant((2011, 3, 1))
>>> instant('2011-2-28').offset(1, 'month')
Instant((2011, 3, 28))
>>> instant('2012-2-29').offset(1, 'year')
Instant((2013, 2, 28))
>>> instant(2014).offset(-1, 'day')
Instant((2013, 12, 31))
>>> instant(2014).offset(-1, 'month')
Instant((2013, 12, 1))
>>> instant(2014).offset(-1, 'year')
Instant((2013, 1, 1))
>>> instant('2011-3-1').offset(-1, 'day')
Instant((2011, 2, 28))
>>> instant('2011-3-31').offset(-1, 'month')
Instant((2011, 2, 28))
>>> instant('2012-2-29').offset(-1, 'year')
Instant((2011, 2, 28))
>>> instant('2014-1-30').offset(3, 'day')
Instant((2014, 2, 2))
>>> instant('2014-10-2').offset(3, 'month')
Instant((2015, 1, 2))
>>> instant('2014-1-1').offset(3, 'year')
Instant((2017, 1, 1))
>>> instant(2014).offset(-3, 'day')
Instant((2013, 12, 29))
>>> instant(2014).offset(-3, 'month')
Instant((2013, 10, 1))
>>> instant(2014).offset(-3, 'year')
Instant((2011, 1, 1))
>>> instant(2014).offset('first-of', 'month')
Instant((2014, 1, 1))
>>> instant('2014-2').offset('first-of', 'month')
Instant((2014, 2, 1))
>>> instant('2014-2-3').offset('first-of', 'month')
Instant((2014, 2, 1))
>>> instant(2014).offset('first-of', 'year')
Instant((2014, 1, 1))
>>> instant('2014-2').offset('first-of', 'year')
Instant((2014, 1, 1))
>>> instant('2014-2-3').offset('first-of', 'year')
Instant((2014, 1, 1))
>>> instant(2014).offset('last-of', 'month')
Instant((2014, 1, 31))
>>> instant('2014-2').offset('last-of', 'month')
Instant((2014, 2, 28))
>>> instant('2012-2-3').offset('last-of', 'month')
Instant((2012, 2, 29))
>>> instant(2014).offset('last-of', 'year')
Instant((2014, 12, 31))
>>> instant('2014-2').offset('last-of', 'year')
Instant((2014, 12, 31))
>>> instant('2014-2-3').offset('last-of', 'year')
Instant((2014, 12, 31))
"""
year, month, day = self
if offset == 'first-of':
if unit == u'month':
day = 1
else:
assert unit == u'year', 'Invalid unit: {} of type {}'.format(unit, type(unit))
month = 1
day = 1
elif offset == 'last-of':
if unit == u'month':
day = calendar.monthrange(year, month)[1]
else:
assert unit == u'year', 'Invalid unit: {} of type {}'.format(unit, type(unit))
month = 12
day = 31
else:
assert isinstance(offset, int), 'Invalid offset: {} of type {}'.format(offset, type(offset))
if unit == u'day':
day += offset
if offset < 0:
while day < 1:
month -= 1
if month == 0:
year -= 1
month = 12
day += calendar.monthrange(year, month)[1]
elif offset > 0:
month_last_day = calendar.monthrange(year, month)[1]
while day > month_last_day:
month += 1
if month == 13:
year += 1
month = 1
day -= month_last_day
month_last_day = calendar.monthrange(year, month)[1]
elif unit == u'month':
month += offset
if offset < 0:
while month < 1:
year -= 1
month += 12
elif offset > 0:
while month > 12:
year += 1
month -= 12
month_last_day = calendar.monthrange(year, month)[1]
if day > month_last_day:
day = month_last_day
else:
assert unit == u'year', 'Invalid unit: {} of type {}'.format(unit, type(unit))
year += offset
# Handle february month of leap year.
month_last_day = calendar.monthrange(year, month)[1]
if day > month_last_day:
day = month_last_day
return self.__class__((year, month, day))
@property
def year(self):
"""Extract year from instant.
>>> instant(2014).year
2014
>>> instant('2014-2').year
2014
>>> instant('2014-2-3').year
2014
"""
return self[0]
class Period(tuple):
def __repr__(self):
"""Transform period to to its Python representation as a string.
>>> repr(period('year', 2014))
"Period((u'year', Instant((2014, 1, 1)), 1))"
>>> repr(period('month', '2014-2'))
"Period((u'month', Instant((2014, 2, 1)), 1))"
>>> repr(period('day', '2014-2-3'))
"Period((u'day', Instant((2014, 2, 3)), 1))"
"""
return '{}({})'.format(self.__class__.__name__, super(Period, self).__repr__())
def __str__(self):
"""Transform period to a string.
>>> unicode(period(u'year', 2014))
u'2014'
>>> unicode(period(u'month', 2014))
u'month:2014'
>>> unicode(period(u'day', 2014))
u'day:2014'
>>> unicode(period(u'year', '2014-2'))
u'year:2014-02'
>>> unicode(period(u'month', '2014-2'))
u'2014-02'
>>> unicode(period(u'day', '2014-2'))
u'day:2014-02'
>>> unicode(period(u'year', '2014-3-2'))
u'year:2014-03-02'
>>> unicode(period(u'month', '2014-3-2'))
u'month:2014-03-02'
>>> unicode(period(u'day', '2014-3-2'))
u'2014-03-02'
>>> unicode(period(u'year', 2012, size = 2))
u'2012:2'
>>> unicode(period(u'month', 2012, size = 2))
u'2012-01:2'
>>> unicode(period(u'day', 2012, size = 2))
u'2012-01-01:2'
>>> unicode(period(u'year', '2012-3', size = 2))
u'year:2012-03:2'
>>> unicode(period(u'month', '2012-3', size = 2))
u'2012-03:2'
>>> unicode(period(u'day', '2012-3', size = 2))
u'2012-03-01:2'
>>> unicode(period(u'year', '2012-3-3', size = 2))
u'year:2012-03-03:2'
>>> unicode(period(u'month', '2012-3-3', size = 2))
u'month:2012-03-03:2'
>>> unicode(period(u'day', '2012-3-3', size = 2))
u'2012-03-03:2'
"""
unit, start_instant, size = self
year, month, day = start_instant
if day == 1:
if month == 1 and (unit == u'day' and size == (366 if calendar.isleap(year) else 365)
or unit == u'month' and size == 12
or unit == u'year'):
start_instant = start_instant[:1]
if unit != u'year':
size = None
elif unit == u'day' and size == calendar.monthrange(year, month)[1] or unit in (u'month', u'year'):
start_instant = start_instant[:2]
if unit not in (u'month', u'year'):
size = None
if unit == u'day' and len(start_instant) == 3 \
or unit == u'month' and len(start_instant) == 2 \
or unit == u'year' and len(start_instant) == 1:
unit = None
start_str = u'-'.join(
unicode(fragment) if index == 0 else u'{:02d}'.format(fragment)
for index, fragment in enumerate(start_instant)
)
size_str = unicode(size) if size is not None and size > 1 else None
return u':'.join(
fragment
for fragment in (unit, start_str, size_str)
if fragment is not None
)
@property
def date(self):
assert self.size == 1, '"date" is undefined for a period of size > 1: {}'.format(self)
return self.start.date
@property
def days(self):
"""Count the number of days in period.
>>> period('day', 2014).days
365
>>> period('month', 2014).days
365
>>> period('year', 2014).days
365
>>> period('day', '2014-2').days
28
>>> period('month', '2014-2').days
28
>>> period('year', '2014-2').days
365
>>> period('day', '2014-2-3').days
1
>>> period('month', '2014-2-3').days
28
>>> period('year', '2014-2-3').days
365
"""
return (self.stop.date - self.start.date).days + 1
def intersection(self, start, stop):
if start is None and stop is None:
return self
period_start = self[1]
period_stop = self.stop
if start is None:
start = period_start
if stop is None:
stop = period_stop
if stop < period_start or period_stop < start:
return None
intersection_start = max(period_start, start)
intersection_stop = min(period_stop, stop)
if intersection_start == period_start and intersection_stop == period_stop:
return self
if intersection_start.day == 1 and intersection_start.month == 1 \
and intersection_stop.day == 31 and intersection_stop.month == 12:
return self.__class__((
u'year',
intersection_start,
intersection_stop.year - intersection_start.year + 1,
))
if intersection_start.day == 1 and intersection_stop.day == calendar.monthrange(intersection_stop.year,
intersection_stop.month)[1]:
return self.__class__((
u'month',
intersection_start,
((intersection_stop.year - intersection_start.year) * 12 + intersection_stop.month
- intersection_start.month + 1),
))
return self.__class__((
u'day',
intersection_start,
(intersection_stop.date - intersection_start.date).days + 1,
))
def offset(self, offset, unit = None):
"""Increment (or decrement) the given period with offset units.
>>> period('day', 2014).offset(1)
Period((u'day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'day')
Period((u'day', Instant((2014, 1, 2)), 365))
>>> period('day', 2014).offset(1, 'month')
Period((u'day', Instant((2014, 2, 1)), 365))
>>> period('day', 2014).offset(1, 'year')
Period((u'day', Instant((2015, 1, 1)), 365))
>>> period('month', 2014).offset(1)
Period((u'month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'day')
Period((u'month', Instant((2014, 1, 2)), 12))
>>> period('month', 2014).offset(1, 'month')
Period((u'month', Instant((2014, 2, 1)), 12))
>>> period('month', 2014).offset(1, 'year')
Period((u'month', Instant((2015, 1, 1)), 12))
>>> period('year', 2014).offset(1)
Period((u'year', Instant((2015, 1, 1)), 1))
>>> period('year', 2014).offset(1, 'day')
Period((u'year', Instant((2014, 1, 2)), 1))
>>> period('year', 2014).offset(1, 'month')
Period((u'year', Instant((2014, 2, 1)), 1))
>>> period('year', 2014).offset(1, 'year')
Period((u'year', Instant((2015, 1, 1)), 1))
>>> period('day', '2011-2-28').offset(1)
Period((u'day', Instant((2011, 3, 1)), 1))
>>> period('month', '2011-2-28').offset(1)
Period((u'month', Instant((2011, 3, 28)), 1))
>>> period('year', '2011-2-28').offset(1)
Period((u'year', Instant((2012, 2, 28)), 1))
>>> period('day', '2011-3-1').offset(-1)
Period((u'day', Instant((2011, 2, 28)), 1))
>>> period('month', '2011-3-1').offset(-1)
Period((u'month', Instant((2011, 2, 1)), 1))
>>> period('year', '2011-3-1').offset(-1)
Period((u'year', Instant((2010, 3, 1)), 1))
>>> period('day', '2014-1-30').offset(3)
Period((u'day', Instant((2014, 2, 2)), 1))
>>> period('month', '2014-1-30').offset(3)
Period((u'month', Instant((2014, 4, 30)), 1))
>>> period('year', '2014-1-30').offset(3)
Period((u'year', Instant((2017, 1, 30)), 1))
>>> period('day', 2014).offset(-3)
Period((u'day', Instant((2013, 12, 29)), 365))
>>> period('month', 2014).offset(-3)
Period((u'month', Instant((2013, 10, 1)), 12))
>>> period('year', 2014).offset(-3)
Period((u'year', Instant((2011, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'month')
Period((u'day', Instant((2014, 2, 1)), 1))
>>> period('day', '2014-2-3').offset('first-of', 'year')
Period((u'day', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3', 4).offset('first-of', 'month')
Period((u'day', Instant((2014, 2, 1)), 4))
>>> period('day', '2014-2-3', 4).offset('first-of', 'year')
Period((u'day', Instant((2014, 1, 1)), 4))
>>> period('month', '2014-2-3').offset('first-of')
Period((u'month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'month')
Period((u'month', Instant((2014, 2, 1)), 1))
>>> period('month', '2014-2-3').offset('first-of', 'year')
Period((u'month', Instant((2014, 1, 1)), 1))
>>> period('month', '2014-2-3', 4).offset('first-of')
Period((u'month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'month')
Period((u'month', Instant((2014, 2, 1)), 4))
>>> period('month', '2014-2-3', 4).offset('first-of', 'year')
Period((u'month', Instant((2014, 1, 1)), 4))
>>> period('year', 2014).offset('first-of')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'month')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period('year', 2014).offset('first-of', 'year')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'month')
Period((u'year', Instant((2014, 2, 1)), 1))
>>> period('year', '2014-2-3').offset('first-of', 'year')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'month')
Period((u'day', Instant((2014, 2, 28)), 1))
>>> period('day', '2014-2-3').offset('last-of', 'year')
Period((u'day', Instant((2014, 12, 31)), 1))
>>> period('day', '2014-2-3', 4).offset('last-of', 'month')
Period((u'day', Instant((2014, 2, 28)), 4))
>>> period('day', '2014-2-3', 4).offset('last-of', 'year')
Period((u'day', Instant((2014, 12, 31)), 4))
>>> period('month', '2014-2-3').offset('last-of')
Period((u'month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'month')
Period((u'month', Instant((2014, 2, 28)), 1))
>>> period('month', '2014-2-3').offset('last-of', 'year')
Period((u'month', Instant((2014, 12, 31)), 1))
>>> period('month', '2014-2-3', 4).offset('last-of')
Period((u'month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'month')
Period((u'month', Instant((2014, 2, 28)), 4))
>>> period('month', '2014-2-3', 4).offset('last-of', 'year')
Period((u'month', Instant((2014, 12, 31)), 4))
>>> period('year', 2014).offset('last-of')
Period((u'year', Instant((2014, 12, 31)), 1))
>>> period('year', 2014).offset('last-of', 'month')
Period((u'year', Instant((2014, 1, 31)), 1))
>>> period('year', 2014).offset('last-of', 'year')
Period((u'year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of')
Period((u'year', Instant((2014, 12, 31)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'month')
Period((u'year', Instant((2014, 2, 28)), 1))
>>> period('year', '2014-2-3').offset('last-of', 'year')
Period((u'year', Instant((2014, 12, 31)), 1))
"""
return self.__class__((self[0], self[1].offset(offset, self[0] if unit is None else unit), self[2]))
@property
def size(self):
"""Return the size of the period.
>>> period('month', '2012-2-29', 4).size
4
"""
return self[2]
@property
def size_in_months(self):
"""Return the size of the period in months.
>>> period('month', '2012-2-29', 4).size_in_months
4
>>> period('year', '2012', 1).size_in_months
12
"""
if (self[0] == MONTH):
return self[2]
else:
return self[2] * 12
@property
def start(self):
"""Return the first day of the period as an Instant instance.
>>> period('month', '2012-2-29', 4).start
Instant((2012, 2, 29))
"""
return self[1]
@property
def stop(self):
"""Return the last day of the period as an Instant instance.
>>> period('year', 2014).stop
Instant((2014, 12, 31))
>>> period('month', 2014).stop
Instant((2014, 12, 31))
>>> period('day', 2014).stop
Instant((2014, 12, 31))
>>> period('year', '2012-2-29').stop
Instant((2013, 2, 28))
>>> period('month', '2012-2-29').stop
Instant((2012, 3, 28))
>>> period('day', '2012-2-29').stop
Instant((2012, 2, 29))
>>> period('year', '2012-2-29', 2).stop
Instant((2014, 2, 28))
>>> period('month', '2012-2-29', 2).stop
Instant((2012, 4, 28))
>>> period('day', '2012-2-29', 2).stop
Instant((2012, 3, 1))
"""
unit, start_instant, size = self
year, month, day = start_instant
if unit == u'day':
if size > 1:
day += size - 1
month_last_day = calendar.monthrange(year, month)[1]
while day > month_last_day:
month += 1
if month == 13:
year += 1
month = 1
day -= month_last_day
month_last_day = calendar.monthrange(year, month)[1]
else:
if unit == u'month':
month += size
while month > 12:
year += 1
month -= 12
else:
assert unit == u'year', 'Invalid unit: {} of type {}'.format(unit, type(unit))
year += size
day -= 1
if day < 1:
month -= 1
if month == 0:
year -= 1
month = 12
day += calendar.monthrange(year, month)[1]
else:
month_last_day = calendar.monthrange(year, month)[1]
if day > month_last_day:
month += 1
if month == 13:
year += 1
month = 1
day -= month_last_day
return Instant((year, month, day))
def to_json_dict(self):
return collections.OrderedDict((
('unit', self[0]),
('start', unicode(self[1])),
('size', self[2]),
))
@property
def unit(self):
return self[0]
# Reference periods
@property
def last_3_months(self):
return self.this_month.start.period('month', 3).offset(-3)
@property
def last_month(self):
return self.this_month.offset(-1)
@property
def last_year(self):
return self.start.offset('first-of', 'year').period('year').offset(-1)
@property
def n_2(self):
return self.start.offset('first-of', 'year').period('year').offset(-2)
@property
def this_year(self):
return self.start.offset('first-of', 'year').period('year')
@property
def this_month(self):
return self.start.offset('first-of', 'month').period('month')
def instant(instant):
"""Return a new instant, aka a triple of integers (year, month, day).
>>> instant(2014)
Instant((2014, 1, 1))
>>> instant(u'2014')
Instant((2014, 1, 1))
>>> instant(u'2014-02')
Instant((2014, 2, 1))
>>> instant(u'2014-3-2')
Instant((2014, 3, 2))
>>> instant(instant(u'2014-3-2'))
Instant((2014, 3, 2))
>>> instant(period('month', u'2014-3-2'))
Instant((2014, 3, 2))
>>> instant(None)
"""
if instant is None:
return None
if isinstance(instant, basestring):
instant = Instant(
int(fragment)
for fragment in instant.split(u'-', 2)[:3]
)
elif isinstance(instant, datetime.date):
instant = Instant((instant.year, instant.month, instant.day))
elif isinstance(instant, int):
instant = (instant,)
elif isinstance(instant, list):
assert 1 <= len(instant) <= 3
instant = tuple(instant)
elif isinstance(instant, Period):
instant = instant.start
else:
assert isinstance(instant, tuple), instant
assert 1 <= len(instant) <= 3
if len(instant) == 1:
return Instant((instant[0], 1, 1))
if len(instant) == 2:
return Instant((instant[0], instant[1], 1))
return Instant(instant)
def instant_date(instant):
if instant is None:
return None
instant_date = date_by_instant_cache.get(instant)
if instant_date is None:
date_by_instant_cache[instant] = instant_date = datetime.date(*instant)
return instant_date
def period(value, start = None, size = None):
"""Return a new period, aka a triple (unit, start_instant, size).
>>> period(u'2014')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period(u'2014:2')
Period((u'year', Instant((2014, 1, 1)), 2))
>>> period(u'2014-2')
Period((u'month', Instant((2014, 2, 1)), 1))
>>> period(u'2014-2:2')
Period((u'month', Instant((2014, 2, 1)), 2))
>>> period(u'2014-2-3')
Period((u'day', Instant((2014, 2, 3)), 1))
>>> period(u'2014-2-3:2')
Period((u'day', Instant((2014, 2, 3)), 2))
>>> period(u'year:2014')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period(u'month:2014')
Period((u'month', Instant((2014, 1, 1)), 12))
>>> period(u'day:2014')
Period((u'day', Instant((2014, 1, 1)), 365))
>>> period(u'year:2014-2')
Period((u'year', Instant((2014, 2, 1)), 1))
>>> period(u'month:2014-2')
Period((u'month', Instant((2014, 2, 1)), 1))
>>> period(u'day:2014-2')
Period((u'day', Instant((2014, 2, 1)), 28))
>>> period(u'year:2014-2-3')
Period((u'year', Instant((2014, 2, 3)), 1))
>>> period(u'month:2014-2-3')
Period((u'month', Instant((2014, 2, 3)), 1))
>>> period(u'day:2014-2-3')
Period((u'day', Instant((2014, 2, 3)), 1))
>>> period(u'year:2014-2-3:2')
Period((u'year', Instant((2014, 2, 3)), 2))
>>> period(u'month:2014-2-3:2')
Period((u'month', Instant((2014, 2, 3)), 2))
>>> period(u'day:2014-2-3:2')
Period((u'day', Instant((2014, 2, 3)), 2))
>>> period('year', 2014)
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period('month', 2014)
Period((u'month', Instant((2014, 1, 1)), 12))
>>> period('day', 2014)
Period((u'day', Instant((2014, 1, 1)), 365))
>>> period('year', u'2014')
Period((u'year', Instant((2014, 1, 1)), 1))
>>> period('month', u'2014')
Period((u'month', Instant((2014, 1, 1)), 12))
>>> period('day', u'2014')
Period((u'day', Instant((2014, 1, 1)), 365))
>>> period('year', u'2014-02')
Period((u'year', Instant((2014, 2, 1)), 1))
>>> period('month', u'2014-02')
Period((u'month', Instant((2014, 2, 1)), 1))
>>> period('day', u'2014-02')
Period((u'day', Instant((2014, 2, 1)), 28))
>>> period('year', u'2014-3-2')
Period((u'year', Instant((2014, 3, 2)), 1))
>>> period('month', u'2014-3-2')
Period((u'month', Instant((2014, 3, 2)), 1))
>>> period('day', u'2014-3-2')
Period((u'day', Instant((2014, 3, 2)), 1))
>>> period('year', u'2014-3-2', size = 2)
Period((u'year', Instant((2014, 3, 2)), 2))
>>> period('month', u'2014-3-2', size = 2)
Period((u'month', Instant((2014, 3, 2)), 2))
>>> period('day', u'2014-3-2', size = 2)
Period((u'day', Instant((2014, 3, 2)), 2))
>>> period('month', instant(u'2014-3-2'), size = 2)
Period((u'month', Instant((2014, 3, 2)), 2))
>>> period('month', period(u'year', u'2014-3-2'), size = 2)
Period((u'month', Instant((2014, 3, 2)), 2))
"""
if not isinstance(value, basestring) or value not in (u'day', u'month', u'year'):
assert start is None, start
assert size is None, size
return conv.check(json_or_python_to_period)(value)
unit = unicode(value)
assert size is None or isinstance(size, int) and size > 0, size
if isinstance(start, basestring):
start = tuple(
int(fragment)
for fragment in start.split(u'-', 2)[:3]
)
elif isinstance(start, datetime.date):
start = (start.year, start.month, start.day)
elif isinstance(start, int):
start = (start,)
elif isinstance(start, list):
assert 1 <= len(start) <= 3
start = tuple(start)
elif isinstance(start, Period):
start = start.start
else:
assert isinstance(start, tuple)
assert 1 <= len(start) <= 3
if len(start) == 1:
start = Instant((start[0], 1, 1))
if size is None:
if unit == u'day':
size = 366 if calendar.isleap(start[0]) else 365
elif unit == u'month':
size = 12
else:
size = 1
elif len(start) == 2:
start = Instant((start[0], start[1], 1))
if size is None:
if unit == u'day':
size = calendar.monthrange(start[0], start[1])[1]
else:
size = 1
else:
start = Instant(start)
if size is None:
size = 1
return Period((unit, start, size))
# Level-1 converters
def input_to_period_tuple(value, state = None):
"""Convert an input string to a period tuple.
.. note:: This function doesn't return a period, but a tuple that allows to construct a period.
>>> input_to_period_tuple(u'2014')
((u'year', 2014), None)
>>> input_to_period_tuple(u'2014:2')
((u'year', 2014, 2), None)
>>> input_to_period_tuple(u'2014-2')
((u'month', (2014, 2)), None)
>>> input_to_period_tuple(u'2014-3:12')
((u'month', (2014, 3), 12), None)
>>> input_to_period_tuple(u'2014-2-3')
((u'day', (2014, 2, 3)), None)
>>> input_to_period_tuple(u'2014-3-4:2')
((u'day', (2014, 3, 4), 2), None)
>>> input_to_period_tuple(u'year:2014')
((u'year', u'2014'), None)
>>> input_to_period_tuple(u'year:2014:2')
((u'year', u'2014', u'2'), None)
>>> input_to_period_tuple(u'year:2014-2:2')
((u'year', u'2014-2', u'2'), None)
"""
if value is None:
return value, None
if state is None:
state = conv.default_state
split_value = tuple(
clean_fragment
for clean_fragment in (
fragment.strip()
for fragment in value.split(u':')
)
if clean_fragment
)
if not split_value:
return None, None
if len(split_value) == 1:
split_value = tuple(
clean_fragment
for clean_fragment in (
fragment.strip()
for fragment in split_value[0].split(u'-')
)
if clean_fragment
)
if len(split_value) == 1:
return conv.pipe(
conv.input_to_strict_int,
conv.test_greater_or_equal(0),
conv.function(lambda year: (u'year', year)),
)(split_value[0], state = state)
if len(split_value) == 2:
return conv.pipe(
conv.struct(
(
conv.pipe(
conv.input_to_strict_int,
conv.test_greater_or_equal(0),
),
conv.pipe(
conv.input_to_strict_int,
conv.test_between(1, 12),
),
),
),
conv.function(lambda month_tuple: (u'month', month_tuple)),
)(split_value, state = state)
if len(split_value) == 3:
return conv.pipe(
conv.struct(
(
conv.pipe(
conv.input_to_strict_int,
conv.test_greater_or_equal(0),
),
conv.pipe(
conv.input_to_strict_int,
conv.test_between(1, 12),
),
conv.pipe(
conv.input_to_strict_int,
conv.test_between(1, 31),
),
),
),
conv.function(lambda day_tuple: (u'day', day_tuple)),
)(split_value, state = state)
return split_value, state._(u'Instant string contains too much "-" for a year, a month or a day')
if len(split_value) == 2:
split_start = tuple(
clean_fragment
for clean_fragment in (
fragment.strip()
for fragment in split_value[0].split(u'-')
)
if clean_fragment
)
size, error = conv.input_to_int(split_value[1], state = state)
if error is None:
if len(split_start) == 1:
start, error = conv.pipe(
conv.input_to_strict_int,
conv.test_greater_or_equal(0),
)(split_start[0], state = state)
if error is None:
return (u'year', start, size), None
elif len(split_start) == 2:
start, error = conv.struct(
(
conv.pipe(
conv.input_to_strict_int,
conv.test_greater_or_equal(0),
),
conv.pipe(
conv.input_to_strict_int,
conv.test_between(1, 12),
),
),
)(split_start, state = state)
if error is None:
return (u'month', start, size), None
elif len(split_start) == 3:
start, error = conv.struct(
(
conv.pipe(
conv.input_to_strict_int,
conv.test_greater_or_equal(0),
),
conv.pipe(
conv.input_to_strict_int,
conv.test_between(1, 12),
),
conv.pipe(
conv.input_to_strict_int,
conv.test_between(1, 31),
),
),
)(split_start, state = state)
if error is None:
return (u'day', start, size), None
return split_value, None
def json_or_python_to_instant_tuple(value, state = None):
"""Convert a JSON or Python object to an instant tuple.
.. note:: This function doesn't return an instant, but a tuple that allows to construct an instant.
>>> json_or_python_to_instant_tuple('2014')
((2014,), None)
>>> json_or_python_to_instant_tuple('2014-2')
((2014, 2), None)
>>> json_or_python_to_instant_tuple('2014-2-3')
((2014, 2, 3), None)
>>> json_or_python_to_instant_tuple(datetime.date(2014, 2, 3))
((2014, 2, 3), None)
>>> json_or_python_to_instant_tuple([2014])
((2014,), None)
>>> json_or_python_to_instant_tuple([2014, 2])
((2014, 2), None)
>>> json_or_python_to_instant_tuple([2014, 2, 3])
((2014, 2, 3), None)
>>> json_or_python_to_instant_tuple(2014)
((2014,), None)
>>> json_or_python_to_instant_tuple((2014,))
((2014,), None)
>>> json_or_python_to_instant_tuple((2014, 2))
((2014, 2), None)
>>> json_or_python_to_instant_tuple((2014, 2, 3))
((2014, 2, 3), None)
"""
if value is None:
return value, None
if state is None:
state = conv.default_state
if isinstance(value, basestring):
if year_or_month_or_day_re.match(value) is None:
return value, state._(u'Invalid date string')
instant = tuple(
int(fragment)
for fragment in value.split(u'-', 2)
)
elif isinstance(value, datetime.date):
instant = (value.year, value.month, value.day)
elif isinstance(value, int):
instant = (value,)
elif isinstance(value, list):
if not (1 <= len(value) <= 3):
return value, state._(u'Invalid size for date list')
instant = tuple(value)
else:
if not isinstance(value, tuple):
return value, state._(u'Invalid type')
if not (1 <= len(value) <= 3):
return value, state._(u'Invalid size for date tuple')
instant = value
return instant, None
def make_json_or_python_to_period(min_date = None, max_date = None):
"""Return a converter that creates a period from a JSON or Python object.
>>> json_or_python_to_period(u'2014')
(Period((u'year', Instant((2014, 1, 1)), 1)), None)
>>> json_or_python_to_period(u'2014:2')
(Period((u'year', Instant((2014, 1, 1)), 2)), None)
>>> json_or_python_to_period(u'2014-2')
(Period((u'month', Instant((2014, 2, 1)), 1)), None)
>>> json_or_python_to_period(u'2014-2:2')
(Period((u'month', Instant((2014, 2, 1)), 2)), None)
>>> json_or_python_to_period(u'2014-2-3')
(Period((u'day', Instant((2014, 2, 3)), 1)), None)
>>> json_or_python_to_period(u'2014-2-3:2')
(Period((u'day', Instant((2014, 2, 3)), 2)), None)
>>> json_or_python_to_period(u'year:2014')
(Period((u'year', Instant((2014, 1, 1)), 1)), None)
>>> json_or_python_to_period(u'month:2014')
(Period((u'month', Instant((2014, 1, 1)), 12)), None)
>>> json_or_python_to_period(u'day:2014')
(Period((u'day', Instant((2014, 1, 1)), 365)), None)
>>> json_or_python_to_period(u'year:2014-2')
(Period((u'year', Instant((2014, 2, 1)), 1)), None)
>>> json_or_python_to_period(u'month:2014-2')
(Period((u'month', Instant((2014, 2, 1)), 1)), None)
>>> json_or_python_to_period(u'day:2014-2')
(Period((u'day', Instant((2014, 2, 1)), 28)), None)
>>> json_or_python_to_period(u'year:2014-2-3')
(Period((u'year', Instant((2014, 2, 3)), 1)), None)
>>> json_or_python_to_period(u'month:2014-2-3')
(Period((u'month', Instant((2014, 2, 3)), 1)), None)
>>> json_or_python_to_period(u'day:2014-2-3')
(Period((u'day', Instant((2014, 2, 3)), 1)), None)
>>> json_or_python_to_period(u'year:2014-2-3:2')
(Period((u'year', Instant((2014, 2, 3)), 2)), None)
>>> json_or_python_to_period(u'month:2014-2-3:2')
(Period((u'month', Instant((2014, 2, 3)), 2)), None)
>>> json_or_python_to_period(u'day:2014-2-3:2')
(Period((u'day', Instant((2014, 2, 3)), 2)), None)
"""
min_instant = (1870, 1, 1) if min_date is None else (min_date.year, min_date.month, min_date.day)
max_instant = (2099, 12, 31) if max_date is None else (max_date.year, max_date.month, max_date.day)
return conv.pipe(
conv.condition(
conv.test_isinstance(basestring),
input_to_period_tuple,
conv.condition(
conv.test_isinstance(int),
conv.pipe(
conv.test_greater_or_equal(0),
conv.function(lambda year: (u'year', year)),
),
),
),
conv.condition(
conv.test_isinstance(dict),
conv.pipe(
conv.struct(
dict(
size = conv.pipe(
conv.test_isinstance((basestring, int)),
conv.anything_to_int,
conv.test_greater_or_equal(1),
),
start = conv.pipe(
json_or_python_to_instant_tuple,
conv.not_none,
),
unit = conv.pipe(
conv.test_isinstance(basestring),
conv.input_to_slug,
conv.test_in((u'day', u'month', u'year')),
conv.not_none,
),
),
),
conv.function(lambda value: period(value['unit'], value['start'], value['size'])),
),
conv.pipe(
conv.test_isinstance((list, tuple)),
conv.test(lambda period_tuple: 2 <= len(period_tuple) <= 3, error = N_(u'Invalid period tuple')),
conv.function(lambda period_tuple: (tuple(period_tuple) + (None,))[:3]),
conv.struct(
(
# unit
conv.pipe(
conv.test_isinstance(basestring),
conv.input_to_slug,
conv.test_in((u'day', u'month', u'year')),
conv.not_none,
),
# start
conv.pipe(
json_or_python_to_instant_tuple,
conv.not_none,
),
# size
conv.pipe(
conv.test_isinstance((basestring, int)),
conv.anything_to_int,
conv.test_greater_or_equal(1),
),
),
),
conv.function(lambda value: period(*value)),
),
),
conv.struct(
Period((
# unit
conv.noop,
# start
conv.test_between(min_instant, max_instant),
# stop
conv.noop,
)),
),
)
# Level-2 converters
json_or_python_to_period = make_json_or_python_to_period()
|
adrienpacifico/openfisca-core
|
openfisca_core/periods.py
|
Python
|
agpl-3.0
| 43,653
|
import numpy as np
from scipy import sparse
from ..affine import astransform, affine_transform
def difference_transform(X, order=1, sorted=False,
transform=False):
"""
Compute the divided difference matrix for X
after sorting X.
Parameters
----------
X: np.array, np.float, ndim=1
order: int
What order of difference should we compute?
sorted: bool
Is X sorted?
transform: bool
If True, return a linear_transform rather
than a sparse matrix.
Returns
-------
D: np.array, ndim=2, shape=(n-order,order)
Matrix of divided differences of sorted X.
"""
if not sorted:
X = np.sort(X)
X = np.asarray(X)
n = X.shape[0]
Dfinal = np.identity(n)
for j in range(1, order+1):
D = (-np.identity(n-j+1)+np.diag(np.ones(n-j),k=1))[:-1]
steps = X[j:]-X[:-j]
inv_steps = np.zeros(steps.shape)
inv_steps[steps != 0] = 1. / steps[steps != 0]
D = np.dot(np.diag(inv_steps), D)
Dfinal = np.dot(D, Dfinal)
if not transform:
return sparse.csr_matrix(Dfinal)
return astransform(Dfinal)
class trend_filter(affine_transform):
def __init__(self, m, order=1, knots=None, sorted=False):
self.m = m
self.order = order
if knots is None:
knots = np.arange(m)
else:
knots = np.sort(knots)
self.knots = knots
self.steps = knots[1:] - knots[:-1]
self.linear_transform = difference_transform(knots, order=order, sorted=True,
transform=True)
self.affine_offset = None
self.input_shape = self.linear_transform.input_shape
self.output_shape = self.linear_transform.output_shape
def linear_map(self, x):
return self.linear_transform.linear_map(x)
def affine_map(self, x):
return self.linear_map(x)
def offset_map(self, x):
return x
def adjoint_map(self, x):
return self.linear_transform.adjoint_map(x)
class trend_filter_inverse(affine_transform):
def __init__(self, m, order=1, knots=None, sorted=False):
self.m = m
self.order = order
if order != 1:
raise ValueError('pinv only worked out for first order')
if knots is None:
knots = np.arange(m)
else:
knots = np.sort(knots)
self.knots = knots
self.steps = knots[1:] - knots[:-1]
dtransform = difference_transform(knots, order=order, sorted=True,
transform=True)
self.affine_offset = None
self.output_shape = dtransform.input_shape
self.input_shape = dtransform.output_shape
def linear_map(self, x):
if x.ndim == 1:
v = np.zeros(self.m)
v[1:] = np.cumsum(x * self.steps)
v -= v.mean()
return v
elif x.ndim == 2:
# assuming m is the first axis
v = np.zeros((self.m, x.shape[1]))
v[1:] = np.cumsum(x * self.steps[:,np.newaxis], axis=0)
v -= v.mean(0)
return v
def affine_map(self, x):
return self.linear_map(x)
def offset_map(self, x):
return x
def adjoint_map(self, x):
if x.ndim == 1:
x = x - x.mean(0)
C = np.cumsum(x[1:][::-1])[::-1]
return C * self.steps
if x.ndim == 2:
# assuming m is the first axis
x = x - x.mean(0)[np.newaxis,:]
C = np.cumsum(x[1:][::-1], 1)[::-1]
return C * self.steps[:,np.newaxis]
|
klingebj/regreg
|
code/regreg/affine/fused_lasso.py
|
Python
|
bsd-3-clause
| 3,706
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, absolute_import
import unittest
from whispy_lispy import lexer
from whispy_lispy import cst
from ..constructors import *
import whispy_lispy.exceptions
def like_tokens(obj):
"""Wraps all the objects in the nested list (or the single object) with
a cst.Token
"""
if not isinstance(obj, list):
return cst.Token(obj)
result = []
for elem in obj:
if not isinstance(elem, list):
result.append(cst.Token(elem))
else:
result.append(like_tokens(elem))
return result
class BaseLexerTestCase(unittest.TestCase):
def assertEqualTokens(self, second, first, msg=None):
"""Like assertEqual, but wraps all the values with a cst.Token before
comparing equality - because the lexer returns cst.Tokens and not
raw python values now
"""
return self.assertEqual(second, like_tokens(first), msg)
# For easily instantiating the nodes, and tokens
def n(value):
"""Return an ConcreteSyntaxNode """
return create_node_type(value, cst.ConcreteSyntaxNode)
def rn(value):
"""Return the root ConcreteSyntaxNode """
return create_node_type(value, cst.RootConcreteSyntaxnode)
def create_node_type(value, node_cls):
if isinstance(value, tuple):
return node_cls(value)
else:
return node_cls((value,))
t = cst.Token
d = cst.DecrementNesting()
i = cst.IncrementNesting()
class FlatLexerTestCase(BaseLexerTestCase):
def test_parses_empty_text(self):
self.assertEqual(lexer.get_flat_token_list(''), [])
def test_parses_known_types(self):
self.assertSequenceEqual(
lexer.get_flat_token_list(r'a 2 4.4 b #t #f "as\"df"'), [
t('a'),
t(2),
t(4.4),
t('b'),
t(True),
t(False),
t(r'"as\"df"')])
def test_parses_multiline_strings(self):
tokens = lexer.get_flat_token_list(r'''
"a
\"b
c"
''')
expected_value = '"a\n \\"b\n c"'
self.assertEqual(tokens, [t(expected_value)])
def test_parses_nested_known_types(self):
self.assertSequenceEqual(
lexer.get_flat_token_list(
'(a b) (#f d) (e f (g (h 1 2)))'),
[i, t('a'), t('b'), d, i, t(False), t('d'), d, i, t('e'),
t('f'), i, t('g'), i, t('h'), t(1), t(2), d, d, d]
)
def test_omits_newline(self):
self.assertSequenceEqual(
lexer.get_flat_token_list('\n\t \na\t\t\nb\n\n \n(1\n 2\n)'),
[t('a'), t('b'), i, t(1), t(2), d])
def test_parses_quote(self):
self.assertSequenceEqual(
lexer.get_flat_token_list("(def x '(a 1 2))"),
[i, t('def'), t('x'), t('\''), i, t('a'), t(1), t(2), d, d]
)
def test_accepts_non_matching_parentheses(self):
self.assertSequenceEqual(
lexer.get_flat_token_list('))a b ))('),
[d, d, t('a'), t('b'), d, d, i])
class ConcreteSyntaxTreeTestCase(unittest.TestCase):
def test_empty_token_list(self):
self.assertEqual(lexer.get_concrete_syntax_tree([]), rn(()))
def test_single_element(self):
self.assertEqual(
lexer.get_concrete_syntax_tree([t('_a')]),
rn(n('_a'))
)
def test_simple_atom(self):
self.assertEqual(
lexer.get_concrete_syntax_tree([i, t('a_'), t('b'), d]),
rn(n((n('a_'), n('b'))))
)
def test_2_top_level_nodes_and_2_level_nesting(self):
self.assertEqual(
# (define x (+ 1 2)) 4
lexer.get_concrete_syntax_tree(
[i, t('def'), t('x'), i, t('sum'), t(1), t(2), d, d, t(4)]
),
rn((n((n('def'), n('x'), n((n('sum'), n(1), n(2))))), n(4)))
)
def test_5_nesting_levels_and_2_outmost_nodes(self):
actual = lexer.get_concrete_syntax_tree(
# 4 (define x (+ 5 4 ((lambda (x) (+ 3 x))1)))
[t(4), i, t('def'), t('z'), i, t('sum'), t(5), t(6), i, i, t('lambda'), i, t('x'), d, i, t('sum'), t(7), t('y'), d, d, t(1), d, d, d] # noqa
)
expected = rn((
n(4),
n((
n('def'),
n('z'),
n((
n('sum'),
n(5),
n(6),
n((
n((
n('lambda'),
n(n('x')),
n((
n('sum'),
n(7),
n('y'))),
)),
n(1)
))))))))
self.assertEqual(actual, expected)
def test_blow_up_if_too_few_closing_parentheses(self):
self.assertRaises(
whispy_lispy.exceptions.WhispyLispySyntaxError,
lexer.get_concrete_syntax_tree,
[i]
)
def test_blow_up_on_too_few_opening_parentheses(self):
self.assertRaises(
whispy_lispy.exceptions.WhispyLispySyntaxError,
lexer.get_concrete_syntax_tree,
[d]
)
def test_syntax_error_when_parentheses_mismatch(self):
self.assertRaises(
whispy_lispy.exceptions.WhispyLispySyntaxError,
lexer.get_concrete_syntax_tree,
[d, t(0), i]
)
def test_produces_lists_with_literals_on_first_position(self):
self.assertEqual(
lexer.get_concrete_syntax_tree([i, t(1), d]),
cst.RootConcreteSyntaxnode((
cst.ConcreteSyntaxNode((
cst.ConcreteSyntaxNode((1,)),)),)))
def test_alphanumeric_names(self):
self.assertEqual(
lexer.get_flat_token_list('f1__a444_a'),
[cst.Token('f1__a444_a')]
)
def test_weird_character_combinations(self):
self.assertRaises(
whispy_lispy.exceptions.WhispyLispySyntaxError,
lexer.get_flat_token_list,
'f1__a. 444_a')
def test_not_so_weird_character_combination(self):
self.assertEqual(
lexer.get_flat_token_list('1.1 1.1 1.1 1.1'),
[cst.Token(1.1), cst.Token(1.1), cst.Token(1.1), cst.Token(1.1)]
)
# Created to fix a bug
def test_factorial_cst(self):
text = """(def
(fact n)
(cond
((= n 1) 1)
(
#t
( * n
(fact (sub n 1)) ) ) ) )"""
actual_cst = lexer.get_concrete_syntax_tree(lexer.get_flat_token_list(text))
expected_cst = c_r(
c_n(
c_n('def'),
c_n(
c_n('fact'),
c_n('n')),
c_n(
c_n('cond'),
c_n(
c_n(
c_n('='),
c_n('n'),
c_n(1)),
c_n(1)),
c_n(
c_n(True),
c_n(
c_n('*'),
c_n('n'),
c_n(
c_n('fact'),
c_n(
c_n('sub'),
c_n('n'),
c_n(1))))))))
# import pydevd; pydevd.settrace('172.16.67.219')
self.assertEqual(actual_cst, expected_cst)
|
vladiibine/whispy_lispy
|
tests/test_lexer/__init__.py
|
Python
|
mit
| 7,832
|
#!/usr/bin/python
# Copyright (C) 2010, 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for printing.py."""
import optparse
import StringIO
import time
import unittest
from webkitpy.common.host_mock import MockHost
from webkitpy.common.system import logtesting
from webkitpy.layout_tests import port
from webkitpy.layout_tests.controllers import manager
from webkitpy.layout_tests.models import result_summary
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
from webkitpy.layout_tests.models import test_results
from webkitpy.layout_tests.views import printing
def get_options(args):
print_options = printing.print_options()
option_parser = optparse.OptionParser(option_list=print_options)
return option_parser.parse_args(args)
class TestUtilityFunctions(unittest.TestCase):
def test_print_options(self):
options, args = get_options([])
self.assertTrue(options is not None)
def test_parse_print_options(self):
def test_switches(args, expected_switches_str, verbose=False):
options, args = get_options(args)
if expected_switches_str:
expected_switches = set(expected_switches_str.split(','))
else:
expected_switches = set()
switches = printing.parse_print_options(options.print_options,
verbose)
self.assertEqual(expected_switches, switches)
# test that we default to the default set of switches
test_switches([], printing.PRINT_DEFAULT)
# test that verbose defaults to everything
test_switches([], printing.PRINT_EVERYTHING, verbose=True)
# test that --print default does what it's supposed to
test_switches(['--print', 'default'], printing.PRINT_DEFAULT)
# test that --print nothing does what it's supposed to
test_switches(['--print', 'nothing'], None)
# test that --print everything does what it's supposed to
test_switches(['--print', 'everything'], printing.PRINT_EVERYTHING)
# this tests that '--print X' overrides '--verbose'
test_switches(['--print', 'actual'], 'actual', verbose=True)
class Testprinter(unittest.TestCase):
def assertEmpty(self, stream):
self.assertFalse(stream.getvalue())
def assertNotEmpty(self, stream):
self.assertTrue(stream.getvalue())
def assertWritten(self, stream, contents):
self.assertEquals(stream.buflist, contents)
def reset(self, stream):
stream.buflist = []
stream.buf = ''
def get_printer(self, args=None, tty=False):
args = args or []
printing_options = printing.print_options()
option_parser = optparse.OptionParser(option_list=printing_options)
options, args = option_parser.parse_args(args)
host = MockHost()
self._port = host.port_factory.get('test', options)
nproc = 2
regular_output = StringIO.StringIO()
regular_output.isatty = lambda: tty
buildbot_output = StringIO.StringIO()
printer = printing.Printer(self._port, options, regular_output, buildbot_output)
return printer, regular_output, buildbot_output
def get_result(self, test_name, result_type=test_expectations.PASS, run_time=0):
failures = []
if result_type == test_expectations.TIMEOUT:
failures = [test_failures.FailureTimeout()]
elif result_type == test_expectations.CRASH:
failures = [test_failures.FailureCrash()]
return test_results.TestResult(test_name, failures=failures, test_run_time=run_time)
def get_result_summary(self, test_names, expectations_str):
expectations = test_expectations.TestExpectations(
self._port, test_names, expectations_str,
self._port.test_configuration(),
is_lint_mode=False)
rs = result_summary.ResultSummary(expectations, test_names)
return test_names, rs, expectations
def test_help_printer(self):
# Here and below we'll call the "regular" printer err and the
# buildbot printer out; this corresponds to how things run on the
# bots with stderr and stdout.
printer, err, out = self.get_printer()
# This routine should print something to stdout. testing what it is
# is kind of pointless.
printer.help_printing()
self.assertNotEmpty(err)
self.assertEmpty(out)
def do_switch_tests(self, method_name, switch, to_buildbot,
message='hello', exp_err=None, exp_bot=None):
def do_helper(method_name, switch, message, exp_err, exp_bot):
printer, err, bot = self.get_printer(['--print', switch], tty=True)
getattr(printer, method_name)(message)
self.assertEqual(err.buflist, exp_err)
self.assertEqual(bot.buflist, exp_bot)
if to_buildbot:
if exp_err is None:
exp_err = []
if exp_bot is None:
exp_bot = [message + "\n"]
else:
if exp_err is None:
exp_err = [message + "\n"]
if exp_bot is None:
exp_bot = []
do_helper(method_name, 'nothing', 'hello', [], [])
do_helper(method_name, switch, 'hello', exp_err, exp_bot)
do_helper(method_name, 'everything', 'hello', exp_err, exp_bot)
def test_configure_and_cleanup(self):
# This test verifies that calling cleanup repeatedly and deleting
# the object is safe.
printer, err, out = self.get_printer(['--print', 'everything'])
printer.cleanup()
printer.cleanup()
printer = None
def test_print_actual(self):
# Actual results need to be logged to the buildbot's stream.
self.do_switch_tests('print_actual', 'actual', to_buildbot=True)
def test_print_actual_buildbot(self):
# FIXME: Test that the format of the actual results matches what the
# buildbot is expecting.
pass
def test_print_config(self):
self.do_switch_tests('print_config', 'config', to_buildbot=False)
def test_print_expected(self):
self.do_switch_tests('print_expected', 'expected', to_buildbot=False)
def test_print_timing(self):
self.do_switch_tests('print_timing', 'timing', to_buildbot=False)
def test_print_update(self):
# Note that there shouldn't be a carriage return here; updates()
# are meant to be overwritten.
self.do_switch_tests('print_update', 'updates', to_buildbot=False,
message='hello', exp_err=['hello'])
def test_print_one_line_summary(self):
printer, err, out = self.get_printer(['--print', 'nothing'])
printer.print_one_line_summary(1, 1, 0)
self.assertEmpty(err)
printer, err, out = self.get_printer(['--print', 'one-line-summary'])
printer.print_one_line_summary(1, 1, 0)
self.assertWritten(err, ["All 1 tests ran as expected.\n", "\n"])
printer, err, out = self.get_printer(['--print', 'everything'])
printer.print_one_line_summary(1, 1, 0)
self.assertWritten(err, ["All 1 tests ran as expected.\n", "\n"])
printer, err, out = self.get_printer(['--print', 'everything'])
printer.print_one_line_summary(2, 1, 1)
self.assertWritten(err, ["1 test ran as expected, 1 didn't:\n", "\n"])
printer, err, out = self.get_printer(['--print', 'everything'])
printer.print_one_line_summary(3, 2, 1)
self.assertWritten(err, ["2 tests ran as expected, 1 didn't:\n", "\n"])
printer, err, out = self.get_printer(['--print', 'everything'])
printer.print_one_line_summary(3, 2, 0)
self.assertWritten(err, ['\n', "2 tests ran as expected (1 didn't run).\n", '\n'])
def test_print_test_result(self):
# Note here that we don't use meaningful exp_str and got_str values;
# the actual contents of the string are treated opaquely by
# print_test_result() when tracing, and usually we don't want
# to test what exactly is printed, just that something
# was printed (or that nothing was printed).
#
# FIXME: this is actually some goofy layering; it would be nice
# we could refactor it so that the args weren't redundant. Maybe
# the TestResult should contain what was expected, and the
# strings could be derived from the TestResult?
printer, err, out = self.get_printer(['--print', 'nothing'])
result = self.get_result('passes/image.html')
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
self.assertEmpty(err)
printer, err, out = self.get_printer(['--print', 'unexpected'])
printer.print_test_result(result, expected=True, exp_str='',
got_str='')
self.assertEmpty(err)
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
self.assertWritten(err, [' passes/image.html -> unexpected pass\n'])
printer, err, out = self.get_printer(['--print', 'everything'])
printer.print_test_result(result, expected=True, exp_str='',
got_str='')
self.assertEmpty(err)
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
self.assertWritten(err, [' passes/image.html -> unexpected pass\n'])
printer, err, out = self.get_printer(['--print', 'nothing'])
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
self.assertEmpty(err)
printer, err, out = self.get_printer(['--print',
'trace-unexpected'])
printer.print_test_result(result, expected=True, exp_str='',
got_str='')
self.assertEmpty(err)
printer, err, out = self.get_printer(['--print',
'trace-unexpected'])
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print',
'trace-unexpected'])
result = self.get_result("passes/text.html")
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print',
'trace-unexpected'])
result = self.get_result("passes/text.html")
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print', 'trace-everything'])
result = self.get_result('passes/image.html')
printer.print_test_result(result, expected=True, exp_str='',
got_str='')
result = self.get_result('failures/expected/missing_text.html')
printer.print_test_result(result, expected=True, exp_str='',
got_str='')
result = self.get_result('failures/expected/missing_check.html')
printer.print_test_result(result, expected=True, exp_str='',
got_str='')
result = self.get_result('failures/expected/missing_image.html')
printer.print_test_result(result, expected=True, exp_str='',
got_str='')
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print', 'trace-everything'])
result = self.get_result('passes/image.html')
printer.print_test_result(result, expected=False, exp_str='',
got_str='')
def test_print_progress(self):
expectations = ''
printer, err, out = self.get_printer(['--print', 'nothing'])
tests = ['passes/text.html', 'failures/expected/timeout.html',
'failures/expected/crash.html']
paths, rs, exp = self.get_result_summary(tests, expectations)
# First, test that we print nothing when we shouldn't print anything.
printer.print_progress(rs, False, paths)
self.assertEmpty(out)
self.assertEmpty(err)
printer.print_progress(rs, True, paths)
self.assertEmpty(out)
self.assertEmpty(err)
# Now test that we do print things.
printer, err, out = self.get_printer(['--print', 'one-line-progress'])
printer.print_progress(rs, False, paths)
self.assertEmpty(out)
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print', 'one-line-progress'])
printer.print_progress(rs, True, paths)
self.assertEmpty(out)
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print', 'one-line-progress'])
rs.remaining = 0
printer.print_progress(rs, False, paths)
self.assertEmpty(out)
self.assertNotEmpty(err)
printer.print_progress(rs, True, paths)
self.assertEmpty(out)
self.assertNotEmpty(err)
def test_write_nothing(self):
printer, err, out = self.get_printer(['--print', 'nothing'])
printer.write("foo")
self.assertEmpty(err)
def test_write_misc(self):
printer, err, out = self.get_printer(['--print', 'misc'])
printer.write("foo")
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print', 'misc'])
printer.write("foo", "config")
self.assertEmpty(err)
def test_write_everything(self):
printer, err, out = self.get_printer(['--print', 'everything'])
printer.write("foo")
self.assertNotEmpty(err)
printer, err, out = self.get_printer(['--print', 'everything'])
printer.write("foo", "config")
self.assertNotEmpty(err)
def test_write_verbose(self):
printer, err, out = self.get_printer(['--verbose'])
printer.write("foo")
self.assertTrue("foo" in err.buflist[0])
self.assertEmpty(out)
def test_print_unexpected_results(self):
# This routine is the only one that prints stuff that the bots
# care about.
#
# FIXME: there's some weird layering going on here. It seems
# like we shouldn't be both using an expectations string and
# having to specify whether or not the result was expected.
# This whole set of tests should probably be rewritten.
#
# FIXME: Plus, the fact that we're having to call into
# run_webkit_tests is clearly a layering inversion.
def get_unexpected_results(expected, passing, flaky):
"""Return an unexpected results summary matching the input description.
There are a lot of different combinations of test results that
can be tested; this routine produces various combinations based
on the values of the input flags.
Args
expected: whether the tests ran as expected
passing: whether the tests should all pass
flaky: whether the tests should be flaky (if False, they
produce the same results on both runs; if True, they
all pass on the second run).
"""
paths, rs, exp = self.get_result_summary(tests, expectations)
if expected:
rs.add(self.get_result('passes/text.html', test_expectations.PASS), expected)
rs.add(self.get_result('failures/expected/timeout.html', test_expectations.TIMEOUT), expected)
rs.add(self.get_result('failures/expected/crash.html', test_expectations.CRASH), expected)
elif passing:
rs.add(self.get_result('passes/text.html'), expected)
rs.add(self.get_result('failures/expected/timeout.html'), expected)
rs.add(self.get_result('failures/expected/crash.html'), expected)
else:
rs.add(self.get_result('passes/text.html', test_expectations.TIMEOUT), expected)
rs.add(self.get_result('failures/expected/timeout.html', test_expectations.CRASH), expected)
rs.add(self.get_result('failures/expected/crash.html', test_expectations.TIMEOUT), expected)
retry = rs
if flaky:
paths, retry, exp = self.get_result_summary(tests, expectations)
retry.add(self.get_result('passes/text.html'), True)
retry.add(self.get_result('failures/expected/timeout.html'), True)
retry.add(self.get_result('failures/expected/crash.html'), True)
unexpected_results = manager.summarize_results(self._port, exp, rs, retry, test_timings={}, only_unexpected=True, interrupted=False)
return unexpected_results
tests = ['passes/text.html', 'failures/expected/timeout.html', 'failures/expected/crash.html']
expectations = ''
printer, err, out = self.get_printer(['--print', 'nothing'])
ur = get_unexpected_results(expected=False, passing=False, flaky=False)
printer.print_unexpected_results(ur)
self.assertEmpty(err)
self.assertEmpty(out)
printer, err, out = self.get_printer(['--print', 'unexpected-results'])
# test everything running as expected
ur = get_unexpected_results(expected=True, passing=False, flaky=False)
printer.print_unexpected_results(ur)
self.assertEmpty(err)
self.assertEmpty(out)
# test failures
printer, err, out = self.get_printer(['--print', 'unexpected-results'])
ur = get_unexpected_results(expected=False, passing=False, flaky=False)
printer.print_unexpected_results(ur)
self.assertEmpty(err)
self.assertNotEmpty(out)
# test unexpected flaky
printer, err, out = self.get_printer(['--print', 'unexpected-results'])
ur = get_unexpected_results(expected=False, passing=False, flaky=True)
printer.print_unexpected_results(ur)
self.assertEmpty(err)
self.assertNotEmpty(out)
printer, err, out = self.get_printer(['--print', 'everything'])
ur = get_unexpected_results(expected=False, passing=False, flaky=False)
printer.print_unexpected_results(ur)
self.assertEmpty(err)
self.assertNotEmpty(out)
expectations = """
BUGX : failures/expected/crash.html = CRASH
BUGX : failures/expected/timeout.html = TIMEOUT
"""
printer, err, out = self.get_printer(['--print', 'unexpected-results'])
ur = get_unexpected_results(expected=False, passing=False, flaky=False)
printer.print_unexpected_results(ur)
self.assertEmpty(err)
self.assertNotEmpty(out)
printer, err, out = self.get_printer(['--print', 'unexpected-results'])
ur = get_unexpected_results(expected=False, passing=True, flaky=False)
printer.print_unexpected_results(ur)
self.assertEmpty(err)
self.assertNotEmpty(out)
# Test handling of --verbose as well.
printer, err, out = self.get_printer(['--verbose'])
ur = get_unexpected_results(expected=False, passing=False, flaky=False)
printer.print_unexpected_results(ur)
# FIXME: debug output from the port and scm objects may or may not go
# to stderr, so there's no point in testing its contents here.
self.assertNotEmpty(out)
def test_print_unexpected_results_buildbot(self):
# FIXME: Test that print_unexpected_results() produces the printer the
# buildbot is expecting.
pass
if __name__ == '__main__':
unittest.main()
|
cs-au-dk/Artemis
|
WebKit/Tools/Scripts/webkitpy/layout_tests/views/printing_unittest.py
|
Python
|
gpl-3.0
| 21,671
|
#!/usr/bin/env python
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility for generating experimental API tokens
usage: generate_token.py [-h] [--key-file KEY_FILE]
[--expire-days EXPIRE_DAYS |
--expire-timestamp EXPIRE_TIMESTAMP]
origin trial_name
Run "generate_token.py -h" for more help on usage.
"""
import argparse
import base64
from datetime import datetime
import json
import re
import os
import struct
import sys
import time
import urlparse
script_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(script_dir, 'third_party', 'ed25519'))
import ed25519
# Matches a valid DNS name label (alphanumeric plus hyphens, except at the ends,
# no longer than 63 ASCII characters)
DNS_LABEL_REGEX = re.compile(r"^(?!-)[a-z\d-]{1,63}(?<!-)$", re.IGNORECASE)
# This script generates Version 2 tokens.
VERSION = "\x02"
# Default key file, relative to script_dir.
DEFAULT_KEY_FILE = 'eftest.key'
def HostnameFromArg(arg):
"""Determines whether a string represents a valid hostname.
Returns the canonical hostname if its argument is valid, or None otherwise.
"""
if not arg or len(arg) > 255:
return None
if arg[-1] == ".":
arg = arg[:-1]
if "." not in arg and arg != "localhost":
return None
if all(DNS_LABEL_REGEX.match(label) for label in arg.split(".")):
return arg.lower()
def OriginFromArg(arg):
"""Constructs the origin for the token from a command line argument.
Returns None if this is not possible (neither a valid hostname nor a
valid origin URL was provided.)
"""
# Does it look like a hostname?
hostname = HostnameFromArg(arg)
if hostname:
return "https://" + hostname + ":443"
# If not, try to construct an origin URL from the argument
origin = urlparse.urlparse(arg)
if not origin or not origin.scheme or not origin.netloc:
raise argparse.ArgumentTypeError("%s is not a hostname or a URL" % arg)
# HTTPS or HTTP only
if origin.scheme not in ('https','http'):
raise argparse.ArgumentTypeError("%s does not use a recognized URL scheme" %
arg)
# Add default port if it is not specified
try:
port = origin.port
except ValueError:
raise argparse.ArgumentTypeError("%s is not a hostname or a URL" % arg)
if not port:
port = {"https": 443, "http": 80}[origin.scheme]
# Strip any extra components and return the origin URL:
return "{0}://{1}:{2}".format(origin.scheme, origin.hostname, port)
def ExpiryFromArgs(args):
if args.expire_timestamp:
return int(args.expire_timestamp)
return (int(time.time()) + (int(args.expire_days) * 86400))
def GenerateTokenData(origin, api_name, expiry):
return json.dumps({"origin": origin,
"feature": api_name,
"expiry": expiry}).encode('utf-8')
def GenerateDataToSign(version, data):
return version + struct.pack(">I",len(data)) + data
def Sign(private_key, data):
return ed25519.signature(data, private_key[:32], private_key[32:])
def FormatToken(version, signature, data):
return base64.b64encode(version + signature +
struct.pack(">I",len(data)) + data)
def main():
default_key_file_absolute = os.path.join(script_dir, DEFAULT_KEY_FILE)
parser = argparse.ArgumentParser(
description="Generate tokens for enabling experimental APIs")
parser.add_argument("origin",
help="Origin for which to enable the API. This can be "
"either a hostname (default scheme HTTPS, default "
"port 443) or a URL.",
type=OriginFromArg)
parser.add_argument("trial_name",
help="Feature to enable. The current list of "
"experimental feature trials can be found in "
"RuntimeFeatures.in")
parser.add_argument("--key-file",
help="Ed25519 private key file to sign the token with",
default=default_key_file_absolute)
expiry_group = parser.add_mutually_exclusive_group()
expiry_group.add_argument("--expire-days",
help="Days from now when the token should exipire",
type=int,
default=42)
expiry_group.add_argument("--expire-timestamp",
help="Exact time (seconds since 1970-01-01 "
"00:00:00 UTC) when the token should exipire",
type=int)
args = parser.parse_args()
expiry = ExpiryFromArgs(args)
key_file = open(os.path.expanduser(args.key_file), mode="rb")
private_key = key_file.read(64)
# Validate that the key file read was a proper Ed25519 key -- running the
# publickey method on the first half of the key should return the second
# half.
if (len(private_key) < 64 or
ed25519.publickey(private_key[:32]) != private_key[32:]):
print("Unable to use the specified private key file.")
sys.exit(1)
token_data = GenerateTokenData(args.origin, args.trial_name, expiry)
data_to_sign = GenerateDataToSign(VERSION, token_data)
signature = Sign(private_key, data_to_sign)
# Verify that that the signature is correct before printing it.
try:
ed25519.checkvalid(signature, data_to_sign, private_key[32:])
except Exception, exc:
print "There was an error generating the signature."
print "(The original error was: %s)" % exc
sys.exit(1)
# Output the token details
print "Token details:"
print " Origin: %s" % args.origin
print " Feature: %s" % args.trial_name
print " Expiry: %d (%s UTC)" % (expiry, datetime.utcfromtimestamp(expiry))
print
# Output the properly-formatted token.
print FormatToken(VERSION, signature, token_data)
if __name__ == "__main__":
main()
|
danakj/chromium
|
tools/origin_trials/generate_token.py
|
Python
|
bsd-3-clause
| 6,023
|
from itertools import chain
import os
from six import string_types
def flatten_path_dict(path_dict, base_prefix="", delimiter=os.sep):
"""
Convert a directory tree dict into a list of leaf file paths.
For example:
paths = {'ohsu':
{'test': 'simple',
'pasat': [
'A-31',
'B-32',
{'example': ['C-20']}
]}}
flatten_path_dict(paths, base_prefix='/fs/storage/laptops/import')
# => ['/fs/storage/laptops/import/ohsu/test/simple',
# '/fs/storage/laptops/import/ohsu/pasat/A-31',
# '/fs/storage/laptops/import/ohsu/pasat/B-32',
# '/fs/storage/laptops/import/ohsu/pasat/example/C-20']
"""
# NOTE: On reflection, this is not well-recursed; working towards a string
# base case would have been more elegant and possibly more robust.
output = []
for key, val in path_dict.items():
new_prefix = base_prefix + delimiter + key
if isinstance(val, dict):
# The value is a subdirectory -> recurse
output.extend(flatten_path_dict(val, new_prefix, delimiter))
elif isinstance(val, list):
# List will contain either strings ready for concatenation...
output.extend(
[new_prefix + delimiter + item
for item in val if not isinstance(item, dict)])
# ...or more dict subdirs to recurse into
output.extend( # chain.from_iterable flattens the resulting list
chain.from_iterable(
[flatten_path_dict(item, new_prefix, delimiter)
for item in val if isinstance(item, dict)]))
elif isinstance(val, string_types):
output.append(new_prefix + delimiter + val)
return output
|
sibis-platform/ncanda-data-integration
|
scripts/import/laptops/config_utils.py
|
Python
|
bsd-3-clause
| 1,862
|
import praw
import pdb
import re
import os
import csv
import pdb
from bot_config import *
from IPython import embed
from nltk import *
from weight_response import WeightResponse
from subreddit_dictionary import SubredditDictionary
if not os.path.isfile('bot_config.py'):
print "You must create config file with your reddit username and password."
print "Please see config_skel.py"
exit(1)
user_agent = ("F4tB0t v0.1")
r = praw.Reddit(user_agent=user_agent)
r.login(REDDIT_USERNAME, REDDIT_PASS)
users = []
if os.path.isfile("users.csv"):
with open("users.csv", "rb") as f:
fieldnames = ['username', 'weight']
userreader = csv.reader(f, delimiter=',')
for row in userreader:
user = {}
user.username = row['username']
user.weight = row['weight']
users.append(user)
post = r.get_submission(POST_ENDPOINT)
def weightFromText(text):
regex = re.compile('(([0-9]+) ?([lpkLPK][\w]+))')
return regex.findall(text)
def main():
good_entries = 0
bad_entries = 0
weightlist = []
namelist = []
subs = []
for comment in post.comments[1:]:
name = comment.author
try:
weightResponse = WeightResponse(weightFromText(comment.body)[0])
weightResponse.author = name
good_entries = good_entries + 1
if weightResponse.is_this_reasonable():
weightlist.append(weightResponse.weight_in_pounds())
namelist.append(weightResponse.author)
newUser = {}
newUser['username'] = weightResponse.author
newUser['weight'] = weightResponse.weight_in_pounds()
users.append(newUser)
else:
pass
except Exception, e:
bad_entries = bad_entries + 1
for name in namelist:
try:
user = r.get_redditor(name)
subs.append(subredditsForUser(user))
except Exception, e:
pass
subrDict = SubredditDictionary(subs, weightlist)
subrDict.pretty_print()
def commentsForUser(user):
return user.get_comments()
def subredditForComment(comment):
return comment.subreddit.display_name
def subredditsForUser(user):
subreddits = []
for comment in commentsForUser(user):
subreddits.append(subredditForComment(comment))
return list(set(subreddits))
def writeData():
with open('users.csv', 'wb') as csvfile:
userWriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
for user in users:
userWriter.writerow([user['username'], user['weight']])
main()
writeData()
|
Pinwheeler/F4tB0t
|
f4tb0t.py
|
Python
|
gpl-2.0
| 2,500
|
''' Handlers for the Player List pages /players/{firstletter}
This only returns a small dataset on each player like Name, College, Draft Year'''
import pandas
import requests
import datetime
import os
from bs4 import BeautifulSoup
baseurl = "http://www.basketball-reference.com/players/{firstletter}/{playerid}.html"
players_url = "http://www.basketball-reference.com/players/{firstletter}/"
bref_date_transformation = lambda x: datetime.datetime.strptime(str(x), '%B %d, %Y').strftime(
'%Y-%m-%d') if pandas.notnull(x) else None
def validate_players_table(df):
if df.columns.tolist() == ['Player', 'From', 'To', 'Pos', 'Ht', 'Wt', 'Birth Date', 'College']:
return True
else:
return False
def player_id_creator(player_fullname):
''' Guess what player IDs should look like based on BasketBall References format
ID format seems to use:
{first five letters of the surname}
+ {first two letters of forename}
+ {counter}'''
if len(player_fullname.split(' ')) < 2:
return None
else:
return (player_fullname.split(' ')[1][:5] \
+ player_fullname.split(' ')[0][:2] \
+ '01').lower()
def check_player_ids_look_ok(df):
''' Check to see a good proportion of player IDs match
what we'd expect them to be. Basketball Reference's player
If 50% of calculated player_ids match, it's probably ok
'''
correctness_threshold = 0.5
check_vector = df.apply(lambda x: x['player_id'] == player_id_creator(x['Player']), axis=1)
return check_vector.value_counts().to_dict()[True] > len(check_vector) * correctness_threshold
def get_players_table(letter):
''' Return a BS Tag of all playes whose surname begins with letter '''
req = requests.get(players_url.format(firstletter=letter))
soup = BeautifulSoup(req.text, 'html.parser')
tables = soup.find_all('table')
table_lst = [x for x in tables if x.attrs.get('id') == 'players']
if len(table_lst) == 1:
return table_lst[0] # If only one table detected
else:
print('Error: Unable to find player list for letter: %s' % letter)
return None
def convert_HTML_players_table_to_df(t):
''' Returns a Pandas DataFrame from BS4 player table input from BREF'''
''' Create base table as seen on the site '''
df = pandas.read_html(str(t))[0]
assert validate_players_table(
df), 'Player table format unexpected. Either table download failed or BR has changed its format'
'''Enrichment past the basic table provided by Basketball Ref'''
'''1. Identify Hall of Famers and remove the asterisk from their Names!'''
df['hall_of_fame'] = df.Player.map(lambda x: '*' in x)
df.Player = df.Player.map(lambda x: x.replace('*', ''))
'''2. Grab Player IDs '''
player_id_series = []
for row in t.find_all('tr'):
if row.th.attrs.get('data-append-csv'):
player_id_series.append(row.th.attrs.get('data-append-csv'))
if len(player_id_series) == len(df):
df['player_id'] = player_id_series
if check_player_ids_look_ok(df):
return df
else:
print('Unable to parse Player IDs')
df.drop('player_id', inplace=True)
return df
def get_players_dataframe(letter):
t = get_players_table(letter)
df = convert_HTML_players_table_to_df(t)
'''Rename BRef's columns to friendlier DB ones'''
df.rename(columns={'player_id': 'id'
, 'Player': 'player_name'
, 'From': 'year_from'
, 'To': 'year_to'
, 'Pos': 'position'
, 'Ht': 'height'
, 'Wt': 'weight'
, 'Birth Date': 'birth_date'
, 'College': 'college'
, 'hall_of_fame': 'hall_of_fame'}
, inplace=True)
df.birth_date = df.birth_date.map(bref_date_transformation)
return df.where(pandas.notnull, None)
|
mblanchard23/brefcrawler
|
get_players.py
|
Python
|
mit
| 3,878
|
# encoding: utf8
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [("podcasts", "0005_auto_20140610_1854")]
operations = [
migrations.AlterField(
model_name="episode",
name="outdated",
field=models.BooleanField(default=False, db_index=True),
),
migrations.AlterField(
model_name="podcast",
name="outdated",
field=models.BooleanField(default=False, db_index=True),
),
migrations.AlterField(
model_name="episode",
name="guid",
field=models.CharField(max_length=100, null=True),
),
]
|
gpodder/mygpo
|
mygpo/podcasts/migrations/0006_auto_20140614_0836.py
|
Python
|
agpl-3.0
| 701
|
"""
Project: athena-server
Author: Saj Arora
Description:
"""
from api.v1 import SageController, SageMethod, make_json_ok_response
def sage_account_me_function(self, account, **kwargs):
return account.to_dict()
def sage_account_update_function(self, key, resource, **kwargs):
return make_json_ok_response(dict(method="update"))
account_controller = {
'me': SageController(sage_account_me_function),
'': SageController(sage_account_me_function),
'<string:key>':[
SageController(sage_account_update_function, SageMethod.PUT)
]
}
|
aroraenterprise/projecteos
|
backend/api/v1/account_module/account_api.py
|
Python
|
mit
| 564
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 1 10:31:00 2015
@author: kienle
"""
import numpy as np
import matplotlib.pyplot as plt
import pickle
from matplotlib import cm
from generalFunctions import *
from skimage import filters
import matplotlib as mpl
mpl.rcParams['pdf.fonttype'] = 42
path = 'X:\\Nicola\\160606_noiseTest_beads'
worms = ['C04']#,'C02']
def getPiezo( path, worm, tRow ):
with open( os.path.join( path,worm,tRow.fName+'.txt' ), 'r') as f:
line = ''
while 'Time [ms]' not in line:
line = f.readline()
lines = f.readlines()
t = np.array( [ np.float(i.strip().split('\t')[0]) for i in lines ] ) * 1000
_in = np.array( [ np.float(i.strip().split('\t')[1]) for i in lines ] )
_out = np.array( [ np.float(i.strip().split('\t')[2]) for i in lines ] )
# plt.plot(t,_in)
# plt.plot(t,_out)
# plt.show()
return ( t, _in, _out )
def plotSingleWormData( path, worm, ax1 ):
timesDF = load_data_frame( path, worm + '_01times.pickle' )
gonadPosDF = load_data_frame( path, worm + '_02gonadPos.pickle' )
cellPosDF = load_data_frame( path, worm + '_04cellPos.pickle' )
cellOutDF = load_data_frame( path, worm + '_05cellOut.pickle' )
cellFluoDF = load_data_frame( path, worm + '_06cellFluo.pickle' )
for idx, tRow in timesDF.iterrows():
print(tRow.fName)
(t,_in,_out) = getPiezo( path, worm, tRow )
ax1.plot( t, _in, '-b', lw=2 )
ax1.plot( t, _out, '-g', lw=2 )
### setup figure for the timeseries
fig1 = plt.figure(figsize=(5.8,3.8))
ax1 = fig1.add_subplot(111)
fig1.subplots_adjust(left=0.15, right=.95, top=.95, bottom=0.15)
for tl in ax1.get_xticklabels():
tl.set_fontsize(18)
for tl in ax1.get_yticklabels():
tl.set_fontsize(18)
ax1.set_ylim((-17.2,0.5))
ax1.set_xlim((-5,500))
plotSingleWormData(path,worms[0],ax1)
plt.show()
|
nicolagritti/ACVU_scripts
|
source/check_piezo_movements.py
|
Python
|
gpl-3.0
| 1,793
|
"""
Module with code executed during Studio startup
"""
from django.conf import settings
# Force settings to run so that the python path is modified
settings.INSTALLED_APPS # pylint: disable=pointless-statement
from openedx.core.lib.django_startup import autostartup
import django
from monkey_patch import third_party_auth
import xmodule.x_module
import cms.lib.xblock.runtime
from openedx.core.djangoapps.theming.core import enable_comprehensive_theme
def run():
"""
Executed during django startup
"""
third_party_auth.patch()
# Comprehensive theming needs to be set up before django startup,
# because modifying django template paths after startup has no effect.
if settings.COMPREHENSIVE_THEME_DIR:
enable_comprehensive_theme(settings.COMPREHENSIVE_THEME_DIR)
django.setup()
autostartup()
add_mimetypes()
if settings.FEATURES.get('USE_CUSTOM_THEME', False):
enable_theme()
# In order to allow descriptors to use a handler url, we need to
# monkey-patch the x_module library.
# TODO: Remove this code when Runtimes are no longer created by modulestores
# https://openedx.atlassian.net/wiki/display/PLAT/Convert+from+Storage-centric+runtimes+to+Application-centric+runtimes
xmodule.x_module.descriptor_global_handler_url = cms.lib.xblock.runtime.handler_url
xmodule.x_module.descriptor_global_local_resource_url = cms.lib.xblock.runtime.local_resource_url
def add_mimetypes():
"""
Add extra mimetypes. Used in xblock_resource.
If you add a mimetype here, be sure to also add it in lms/startup.py.
"""
import mimetypes
mimetypes.add_type('application/vnd.ms-fontobject', '.eot')
mimetypes.add_type('application/x-font-opentype', '.otf')
mimetypes.add_type('application/x-font-ttf', '.ttf')
mimetypes.add_type('application/font-woff', '.woff')
def enable_theme():
"""
Enable the settings for a custom theme, whose files should be stored
in ENV_ROOT/themes/THEME_NAME (e.g., edx_all/themes/stanford).
At this moment this is actually just a fix for collectstatic,
(see https://openedx.atlassian.net/browse/TNL-726),
but can be improved with a full theming option also for Studio
in the future (see lms.startup)
"""
# Workaround for setting THEME_NAME to an empty
# string which is the default due to this ansible
# bug: https://github.com/ansible/ansible/issues/4812
if settings.THEME_NAME == "":
settings.THEME_NAME = None
return
assert settings.FEATURES['USE_CUSTOM_THEME']
settings.FAVICON_PATH = 'themes/{name}/images/favicon.ico'.format(
name=settings.THEME_NAME
)
# Calculate the location of the theme's files
theme_root = settings.ENV_ROOT / "themes" / settings.THEME_NAME
# Namespace the theme's static files to 'themes/<theme_name>' to
# avoid collisions with default edX static files
settings.STATICFILES_DIRS.append(
(u'themes/{}'.format(settings.THEME_NAME), theme_root / 'static')
)
|
franosincic/edx-platform
|
cms/startup.py
|
Python
|
agpl-3.0
| 3,052
|
'''
kinkin
'''
import urllib,urllib2,re,xbmcplugin,xbmcgui,os
import settings
import time,datetime
from datetime import date
from threading import Timer
from helpers import clean_file_name
import shutil
import glob
from threading import Thread
import cookielib
from t0mm0.common.net import Net
from helpers import clean_file_name
import requests
import urlresolver
from metahandler import metahandlers
metainfo = metahandlers.MetaData()
net = Net()
ADDON = settings.addon()
ENABLE_SUBS = settings.enable_subscriptions()
ENABLE_META = settings.enable_meta()
TV_PATH = settings.tv_directory()
AUTOPLAY = settings.autoplay()
FAV = settings.favourites_file()
SUB = settings.subscription_file()
cookie_jar = settings.cookie_jar()
addon_path = os.path.join(xbmc.translatePath('special://home/addons'), '')
fanart = xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'fanart.jpg'))
iconart = xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'icon.png'))
base_url = 'http://www.watch-tvseries.net/'
trans_table = ''.join( [chr(i) for i in range(128)] + [' '] * 128 )
def open_url(url):
req = urllib2.Request(url)
req.add_header('User-Agent','Mozilla/5.0 (Linux; <Android Version>; <Build Tag etc.>) AppleWebKit/<WebKit Rev>(KHTML, like Gecko) Chrome/<Chrome Rev> Safari/<WebKit Rev>')
response = urllib2.urlopen(req)
link=response.read()
response.close()
return link
def CATEGORIES(name):
addDir("All Shows", base_url + 'play/menulist',7,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'alltvshows.jpg')), '','')
addDir("Top Shows", base_url,2,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'hittvshows.jpg')), '','')
addDir("New Episodes", base_url,1,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'newepisodes.jpg')), '','')
addDir("A-Z", 'url',8,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'a-z.jpg')), '','')
addDir("My Favourites", 'url',12,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'myfavourites.jpg')), '','')
if ENABLE_SUBS:
addDir("My Subscriptions", 'url',16,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'mysubscriptions.jpg')), '','')
else:
addDir("[COLOR orange] My Subscriptions (ENABLE IN SETTINGS)[/COLOR]", 'url',16,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'mysubscriptions.jpg')), '','')
addDir("Search", 'url',6,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', 'search.jpg')), '','')
def a_to_z(url):
alphabet = ['#', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U','V', 'W', 'X', 'Y', 'Z']
for a in alphabet:
addDir(a, base_url + 'play/menulist',9,xbmc.translatePath(os.path.join('special://home/addons/plugin.video.tv4me', 'art', a.lower().replace('#','hash') + '.jpg')), '','menu')
def favourites():
if os.path.isfile(FAV):
s = read_from_file(FAV)
search_list = s.split('\n')
for list in search_list:
if list != '':
list1 = list.split('QQ')
title = list1[0]
title = title.replace('->-', ' & ')
url = list1[1]
thumb = list1[2]
if ENABLE_META:
infoLabels = get_meta(title,'tvshow',year=None,season=None,episode=None,imdb=None)
if infoLabels['title']=='':
title=title
else:
title=infoLabels['title']
if infoLabels['cover_url']=='':
iconimage=thumb
else:
iconimage=infoLabels['cover_url']
else:
infoLabels =None
iconimage=thumb
addDir(title, url,3,thumb, list,'sh',infoLabels=infoLabels)
def subscriptions():
if os.path.isfile(SUB):
s = read_from_file(SUB)
search_list = s.split('\n')
for list in search_list:
if list != '':
list1 = list.split('QQ')
title = list1[0]
title = title.replace('->-', ' & ')
url = list1[1]
thumb = list1[2]
addDir(title, url,3,thumb, list,'sh')
def search():
keyboard = xbmc.Keyboard('', 'Search TV Show', False)
keyboard.doModal()
if keyboard.isConfirmed():
query = keyboard.getText()
if len(query) > 0:
search_show(query)
def search_show(query):
header_dict = {}
header_dict['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; rv:24.0) Gecko/20100101 Firefox/24.0'
net.set_cookies(cookie_jar)
link = net.http_GET(base_url + 'play/menulist', headers=header_dict).content.encode("utf-8").rstrip()
match = re.compile("<li><a href='(.+?)'>(.+?)</a></li>").findall(link)
for url, title in match:
if query.lower() in title.lower():
if not 'http://www.watch-tvseries.net' in url:
url='http://www.watch-tvseries.net' + url
if ENABLE_META:
infoLabels = get_meta(title,'tvshow',year=None,season=None,episode=None,imdb=None)
if infoLabels['title']=='':
title=title
else:
title=infoLabels['title']
if infoLabels['cover_url']=='':
iconimage=iconart
else:
iconimage=infoLabels['cover_url']
else:
infoLabels =None
iconimage=iconart
list_data = "%sQQ%sQQ%s" % (title, url, iconimage)
addDir(title, url,3,iconimage, list_data,'sh',infoLabels=infoLabels)
setView('episodes', 'show')
def shows(url):
header_dict = {}
header_dict['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; rv:24.0) Gecko/20100101 Firefox/24.0'
net.set_cookies(cookie_jar)
link = net.http_GET(url, headers=header_dict).content.encode("utf-8").rstrip()
all_shows = regex_from_to(link,'Top TV Shows', '</div></div></div><div id=')
match = re.compile('<a href="(.+?)">(.+?)</a>').findall(all_shows)
for url, title in match:
if not 'http://www.watch-tvseries.net' in url:
url='http://www.watch-tvseries.net' + url
if ENABLE_META:
infoLabels = get_meta(title,'tvshow',year=None,season=None,episode=None,imdb=None)
if infoLabels['title']=='':
name=title
else:
name=infoLabels['title']
if infoLabels['cover_url']=='':
iconimage=iconart
else:
iconimage=infoLabels['cover_url']
else:
infoLabels =None
iconimage=iconart
list_data = "%sQQ%sQQ%s" % (title, url, iconimage)
addDir(title, url,3,iconimage, list_data,'sh',infoLabels=infoLabels)
setView('episodes', 'show')
def a_z_shows(name, url):
name = str(name)
header_dict = {}
header_dict['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; rv:24.0) Gecko/20100101 Firefox/24.0'
net.set_cookies(cookie_jar)
link = net.http_GET(url, headers=header_dict).content.encode("utf-8").rstrip()
match = re.compile("<li><a href='(.+?)'>(.+?)</a></li>").findall(link)
for url, title in match:
if not 'http://www.watch-tvseries.net' in url:
url='http://www.watch-tvseries.net' + url
tnum = title[:1].replace('9','#').replace('8','#').replace('7','#').replace('6','#').replace('5','#').replace('4','#').replace('3','#').replace('2','#').replace('1','#').replace('0','#')
if title[:1] == name or tnum == name:
if ENABLE_META:
infoLabels = get_meta(title,'tvshow',year=None,season=None,episode=None,imdb=None)
if infoLabels['title']=='':
title=title
else:
title=infoLabels['title']
if infoLabels['cover_url']=='':
iconimage=iconart
else:
iconimage=infoLabels['cover_url']
else:
infoLabels =None
iconimage=iconart
list_data = "%sQQ%sQQ%s" % (title, url, iconimage)
addDir(title, url,3,iconimage, list_data,'sh',infoLabels=infoLabels)
setView('episodes', 'show')
def latest_episodes(url):
header_dict = {}
header_dict['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; rv:24.0) Gecko/20100101 Firefox/24.0'
net.set_cookies(cookie_jar)
link = net.http_GET(url, headers=header_dict).content.encode("utf-8").rstrip()#
all_episodes = regex_from_to(link,'Latest Episodes', '</div> </div></div>')
episodes = re.compile('<a href="(.+?)">(.+?)data-original="(.+?)"(.+?)<a href="(.+?)">(.+?)</a> </div> <div class="midestv"> (.+?) </div> <div class="ddestv"> (.+?)</div> </div> </div>').findall(all_episodes)
for url,a,thumb,b,url2,title,desc,aired in episodes:
name = "%s - %s" % (title, aired)
addDirPlayable(name,url,5,thumb, "")
setView('episodes', 'episodes-view')
def grouped_shows(url):
header_dict = {}
header_dict['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; rv:24.0) Gecko/20100101 Firefox/24.0'
net.set_cookies(cookie_jar)
link = net.http_GET(url, headers=header_dict).content.encode("utf-8").rstrip()
match = re.compile("<li><a href='(.+?)'>(.+?)</a></li>").findall(link)
for url, title in match:
if not 'http://www.watch-tvseries.net' in url:
url='http://www.watch-tvseries.net' + url
if ENABLE_META:
infoLabels = get_meta(title,'tvshow',year=None,season=None,episode=None,imdb=None)
if infoLabels['title']=='':
name=title
else:
name=infoLabels['title']
if infoLabels['cover_url']=='':
iconimage=iconart
else:
iconimage=infoLabels['cover_url']
else:
infoLabels =None
iconimage=iconart
list_data = "%sQQ%sQQ%s" % (title, url, iconimage)
addDir(title, url,3,iconimage, list_data,'sh',infoLabels=infoLabels)
setView('episodes', 'show')
def tv_show(name, url, iconimage):
episodes = []
if 'tvseries.net' in url:
site = "ZZnetYY"
else:
site = "ZZmeYY"
net.set_cookies(cookie_jar)
#url='http://www.watch-tvseries.net' + url
link = net.http_GET(url).content.encode("utf-8").rstrip()
net.save_cookies(cookie_jar)
seasonlist = regex_get_all(link.replace('&', 'and'), '<div class="csseason', '</div> </div> <div class=')
for s in seasonlist:
sname = regex_from_to(s, '">', '</div>')
if sname.startswith('Season 0'):
sn = sname.replace('Season 0', '')
else:
sn = sname.replace('Season ', '')
eplist = regex_get_all(str(s), '<a', '</a>')
if ENABLE_META:
infoLabels=get_meta(name,'tvshow',year=None,season=sn,episode=None)
if infoLabels['title']=='':
name=name
else:
name=infoLabels['title']
if infoLabels['cover_url']=='':
iconimage=iconart
else:
iconimage=infoLabels['cover_url']
else:
infoLabels =None
iconimage=iconart
addDir(sname, 'url',4,iconimage, eplist,site + name,infoLabels=infoLabels)
setView('episodes', 'seasons-view')
def tv_show_episodes(name, list, iconimage, showname):
list = str(list)
site = regex_from_to(showname,'ZZ', 'YY')
splitshnm = showname.split('YY')
showname = splitshnm[1]
episodes = re.compile('<a title="(.+?)" href="(.+?)"> <div class="(.+?)data-original="(.+?)"(.+?)nseasnumep"> (.+?) <br(.+?)>(.+?) </div> </div> </a>').findall(list)
for epname, url, a, thumb, b, snum, c, epnum in episodes:
url = 'http://www.watch-tvseries.' + site + url
epnum = epnum.replace('episode ', 'E')
snum = snum.replace('season ', 'S')
sn = snum.replace('S0','')
if epnum.startswith('E0'):
en = epnum.replace('E0', '')
else:
en = epnum.replace('E', '')
name = "%s%s - %s" % (snum, epnum, clean_file_name(epname))
if ENABLE_META:
infoLabels=get_meta(showname,'episode',year=None,season=sn,episode=en)
if infoLabels['title']=='':
name = name
else:
name = "%s%s %s" % (snum, epnum, infoLabels['title'])
if infoLabels['cover_url']=='':
iconimage=thumb
else:
iconimage=infoLabels['cover_url']
else:
infoLabels =None
iconimage=iconart
if AUTOPLAY:
addDirPlayable(name,url,5,iconimage, showname,infoLabels=infoLabels)
else:
addDir(name,url,20,iconimage, showname, showname,infoLabels=infoLabels)
setView('episodes', 'episodes-view')
def play(name, url, iconimage, showname):
hosturl = url
site = regex_from_to(hosturl,'www.watch-tvseries.', '/series')
host = 'www.watch-tvseries.' + site
vidlinks = "found"
dp = xbmcgui.DialogProgress()
dp.create("Opening",showname + ' - ' + name)
dp.update(0)
header_dict = {}
header_dict['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
header_dict['Host'] = host
header_dict['Connection'] = 'keep-alive'
header_dict['Cache-Control'] = 'max-age=0'
header_dict['Referer'] = hosturl
header_dict['User-Agent'] = 'AppleWebKit/<WebKit Rev>'
link = net.http_GET(url, headers=header_dict).content.replace("'", '"')#.content.encode("utf-8").translate(trans_table)
net.save_cookies(cookie_jar)
url1=[]
if site == 'net':
key = re.compile('get[(]"http://www.watch-tvseries.net/"[+]updv[+]"(.+?)"').findall(link)
else:
key = re.compile('get[(]"http://www.watch-tvseries.me/"[+]updv[+]"(.+?)"').findall(link)
for url in key:
url1.append('http://www.watch-tvseries.%s/play/plvids%s' % (site, url))
match=re.compile('morurlvid[(]"(.+?)"').findall(link)
nItem=len(match)
count=0
for kl in match:
count+=1
url = 'http://www.watch-tvseries.net/play/mvideo_' + kl
response = requests.get(url, allow_redirects=False)
url1.append(response.headers['location'])
nItm=len(url1)
count=0
for u in url1:
u=urllib.unquote(u.replace('https://p.wplay.me/red.php?u=',''))
try:
title = regex_from_to(u, 'http://', '/')
except:
try:
title = regex_from_to(u, 'https://', '/')
except:
title = u
title = title.replace('embed.','').replace('api.','').replace('www.','')
count+=1
titlelist = str(count) + ' of ' + str(nItem) + ': ' + title
progress = float(count) / float(nItem) * 100
dp.update(int(progress), 'Adding link',"")
if dp.iscanceled():
return
if not 'watch-tvseries' in titlelist:
if AUTOPLAY:
try:
dp = xbmcgui.DialogProgress()
dp.create("TV4ME: Trying Links",titlelist)
play_videos(name,u,iconimage,showname)
return
except:
pass
else:
addDirPlayable(title,u,19,iconimage,name+'<>'+showname)
def play_videos(name, url, iconimage, showname):
if '<>' in showname:
name=showname.split('<>')[0]
showname=showname.split('<>')[1]
hosturl = url
header_dict = {}
header_dict['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
header_dict['Host'] = 'vk.com'
header_dict['Referer'] = str(hosturl)
header_dict['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; rv:24.0) Gecko/20100101 Firefox/24.0'
if 'plvids' in url:
linkvk = net.http_GET(url).content.encode("utf-8").rstrip()
if 'mail.ru' in linkvk:
url = regex_from_to(linkvk, 'src="', '"')
url = url.replace('.html','.json?ver=0.2.60').replace('embed/','')
max=0
link = requests.get(url).content
cookielink = requests.get(url)
setcookie = cookielink.headers['Set-Cookie']
match=re.compile('"key":"(.+?)","url":"(.+?)"').findall(link)
for q,url in match:
quality=int(q.replace('p',''))
if quality > max:
max=quality
playlink="%s|Cookie=%s" % (url,urllib.quote(setcookie))
elif 'http://vk.com/video_ext.php?oid' in linkvk:
url = regex_from_to(linkvk, 'src="', '"').replace('https://p.wplay.me/red.php?u=','').replace('&', '&') + '&hd=1'
net.set_cookies(cookie_jar)
link = net.http_GET(url, headers=header_dict).content.encode("utf-8").rstrip()
net.save_cookies(cookie_jar)
if 'url720":"' in link:
vidlinks = re.compile('url720":"(.+?)"').findall(link)
elif 'url480":"' in link:
vidlinks = re.compile('url480":"(.+?)"').findall(link)
elif 'url360":"' in link:
vidlinks = re.compile('url360":"(.+?)"').findall(link)
elif 'url240":"' in link:
vidlinks = re.compile('url240":"(.+?)"').findall(link)
else:
vidlinks = "removed"
for playlink in vidlinks:
playlink = playlink.replace('\/', '/')
elif 'http://www.youtube.com' in linkvk:
vidlink = regex_from_to(linkvk, 'src="http://www.youtube.com/embed/', 'wmode').replace('?', '')
vidlinks = "found"
playlink = ('plugin://plugin.video.youtube/?action=play_video&videoid=%s' % vidlink)
else:
if 'gorillavid.in' in url:
link = requests.get(url).text
playlink = regex_from_to(link, 'file: "', '"')
elif 'nowvideo' in url:
headers = {'Referer': hosturl, 'Host': 'embed.nowvideo.sx'}
link = requests.get(url, headers=headers).text
key = regex_from_to(link, 'var fkzd="', '"').replace('.', '%2E').replace('-', '%2D')
file = regex_from_to(link, 'flashvars.file="', '"')
linkurl = 'http://www.nowvideo.sx/api/player.api.php?cid=1&cid3=undefined&key=%s&user=undefined&file=%s&numOfErrors=0&pass=undefined&cid2=undefined' % (key, file)
link = open_url(linkurl)
playlink = regex_from_to(link, 'url=', '&title')
elif 'ishared' in url:
link = open_url(url).strip().replace('\n', '').replace('\t', '')
try:
playlink = regex_from_to(link, 'var zzzz = "', '"')
except:
findfile = regex_from_to(link, 'playlist:', 'type')
key = regex_from_to(findfile, 'file: ', ',')
playlink = regex_from_to(link, 'var ' + key + ' = "', '"')
elif 'vk.com' in url:
url = url.replace('https://p.wplay.me/red.php?u=','').replace('&', '&') + '&hd=1'
net.set_cookies(cookie_jar)
link = net.http_GET(url, headers=header_dict).content.encode("utf-8").rstrip()
net.save_cookies(cookie_jar)
if 'url720":"' in link:
vidlinks = re.compile('url720":"(.+?)"').findall(link)
elif 'url480":"' in link:
vidlinks = re.compile('url480":"(.+?)"').findall(link)
elif 'url360":"' in link:
vidlinks = re.compile('url360":"(.+?)"').findall(link)
elif 'url240":"' in link:
vidlinks = re.compile('url240":"(.+?)"').findall(link)
else:
vidlinks = "removed"
for playlink in vidlinks:
playlink = playlink.replace('\/', '/')
else:
validresolver = urlresolver.HostedMediaFile(url)
if validresolver:
playlink = urlresolver.resolve(url)
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
playlist.clear()
listitem = xbmcgui.ListItem(showname + ' ' + name, iconImage=iconimage, thumbnailImage=iconimage)
playlist.add(playlink,listitem)
xbmcPlayer = xbmc.Player()
handle = str(sys.argv[1])
if handle != "-1":
listitem.setProperty("IsPlayable", "true")
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, listitem)
else:
xbmcPlayer.play(playlist)
def add_favourite(name, url, iconimage, dir, text):
list_data = iconimage.replace('hhhh', 'http:')
splitdata = list_data.split('QQ')
name = splitdata[0]
name = name.replace('->-', ' & ')
thumb = splitdata[2]
add_to_list(list_data, dir)
notification(name, "[COLOR lime]" + text + "[/COLOR]", '5000', thumb)
def remove_from_favourites(name, url, iconimage, dir, text):
list_data = iconimage.replace('hhhh', 'http:')
splitdata = list_data.split('QQ')
name = splitdata[0]
name = name.replace('->-', ' & ')
thumb = splitdata[2]
remove_from_list(list_data, dir)
notification(name, "[COLOR orange]" + text + "[/COLOR]", '5000', thumb)
def create_tv_show_strm_files(name, url, iconimage, ntf):
dialog = xbmcgui.Dialog()
n = name
u = url
l = iconimage
list_data = iconimage.replace('hhhh', 'http:')
splitdata = iconimage.split('QQ')
name = splitdata[0]
name = name.replace('->-', ' & ')
thumbmain = splitdata[2]
tv_show_path = create_directory(TV_PATH, name)
net.set_cookies(cookie_jar)
link = net.http_GET(url).content.encode("utf-8").rstrip()
net.save_cookies(cookie_jar)
seasonlist = regex_get_all(link.replace('&', 'and'), '<div class="csseason', '</div> </div> <div class=')
for s in seasonlist:
sname = regex_from_to(s, '">', '</div>')
if sname.startswith('Season 0'):
snum = sname.replace('Season 0', '')
else:
snum = sname.replace('Season ', '')
season_path = create_directory(tv_show_path, str(snum))
eplist = regex_get_all(str(s), '<a', '</a>')
for e in eplist:
episodes = re.compile('<a title="(.+?)" href="(.+?)"> <div class="(.+?)data-original="(.+?)"(.+?)nseasnumep"> (.+?) <br(.+?)>(.+?) </div> </div> </a>').findall(e)
for epname, url, a, thumb, b, snum, c, epnum in episodes:
url = 'http://www.watch-tvseries.net' + url
epnum = epnum.replace('episode ', 'E')
snum = snum.replace('season ', 'S')
sn = snum.replace('S0','')
if epnum.startswith('E0'):
en = epnum.replace('E0', '')
else:
en = epnum.replace('E', '')
ep = "%sx%s" % (sn, en)
display = "%s %s" % (ep, epname)
create_strm_file(display, url, "5", season_path, thumbmain, name)
if ntf == "true" and ENABLE_SUBS:
if dialog.yesno("Subscribe?", 'Do you want TV[COLOR lime]4[/COLOR]ME to automatically add new', '[COLOR gold]' + name + '[/COLOR]' + ' episodes when available?'):
add_favourite(n, u, l, SUB, "Added to Library/Subscribed")
else:
notification(name, "[COLOR lime]Added to Library[/COLOR]", '5000', thumb)
if xbmc.getCondVisibility('Library.IsScanningVideo') == False:
xbmc.executebuiltin('UpdateLibrary(video)')
def remove_tv_show_strm_files(name, url, iconimage, dir_path):
dialog = xbmcgui.Dialog()
splitname = iconimage.split('QQ')
rname = splitname[0]
rname = rname.replace('->-', ' & ')
try:
path = os.path.join(dir_path, str(rname))
shutil.rmtree(path)
remove_from_favourites(name, url, iconimage, SUB, "Removed from Library/Unsubscribed")
if xbmc.getCondVisibility('Library.IsScanningVideo') == False:
if dialog.yesno("Clean Library?", '', 'Do you want clean the library now?'):
xbmc.executebuiltin('CleanLibrary(video)')
except:
xbmc.log("[TV4ME] Was unable to remove TV show: %s" % (name))
def create_directory(dir_path, dir_name=None):
if dir_name:
dir_path = os.path.join(dir_path, dir_name)
dir_path = dir_path.strip()
if not os.path.exists(dir_path):
os.makedirs(dir_path)
return dir_path
def create_file(dir_path, file_name=None):
if file_name:
file_path = os.path.join(dir_path, file_name)
file_path = file_path.strip()
if not os.path.exists(file_path):
f = open(file_path, 'w')
f.write('')
f.close()
return file_path
def create_strm_file(name, url, mode, dir_path, iconimage, showname):
try:
strm_string = create_url(name, mode, url=url, iconimage=iconimage, showname=showname)
filename = clean_file_name("%s.strm" % name)
path = os.path.join(dir_path, filename)
if not os.path.exists(path):
stream_file = open(path, 'w')
stream_file.write(strm_string)
stream_file.close()
except:
xbmc.log("[TV4ME] Error while creating strm file for : " + name)
def create_url(name, mode, url, iconimage, showname):
name = urllib.quote(str(name))
data = urllib.quote(str(url))
iconimage = urllib.quote(str(iconimage))
showname = urllib.quote(str(showname))
mode = str(mode)
url = sys.argv[0] + '?name=%s&url=%s&mode=%s&iconimage=%s&showname=%s' % (name, data, mode, iconimage, showname)
return url
def get_subscriptions():
try:
if os.path.isfile(SUB):
s = read_from_file(SUB)
search_list = s.split('\n')
for list in search_list:
if list != '':
list1 = list.split('QQ')
title = list1[0]
url = list1[1]
thumb = list1[2]
create_tv_show_strm_files(title, url, list, "false")
except:
xbmc.log("[TV4ME] Failed to fetch subscription")
def regex_from_to(text, from_string, to_string, excluding=True):
if excluding:
r = re.search("(?i)" + from_string + "([\S\s]+?)" + to_string, text).group(1)
else:
r = re.search("(?i)(" + from_string + "[\S\s]+?" + to_string + ")", text).group(1)
return r
def regex_get_all(text, start_with, end_with):
r = re.findall("(?i)(" + start_with + "[\S\s]+?" + end_with + ")", text)
return r
def strip_text(r, f, t, excluding=True):
r = re.search("(?i)" + f + "([\S\s]+?)" + t, r).group(1)
return r
def find_list(query, search_file):
try:
content = read_from_file(search_file)
lines = content.split('\n')
index = lines.index(query)
return index
except:
return -1
def add_to_list(list, file):
if find_list(list, file) >= 0:
return
if os.path.isfile(file):
content = read_from_file(file)
else:
content = ""
lines = content.split('\n')
s = '%s\n' % list
for line in lines:
if len(line) > 0:
s = s + line + '\n'
write_to_file(file, s)
xbmc.executebuiltin("Container.Refresh")
def remove_from_list(list, file):
index = find_list(list, file)
if index >= 0:
content = read_from_file(file)
lines = content.split('\n')
lines.pop(index)
s = ''
for line in lines:
if len(line) > 0:
s = s + line + '\n'
write_to_file(file, s)
xbmc.executebuiltin("Container.Refresh")
def write_to_file(path, content, append=False, silent=False):
try:
if append:
f = open(path, 'a')
else:
f = open(path, 'w')
f.write(content)
f.close()
return True
except:
if not silent:
print("Could not write to " + path)
return False
def read_from_file(path, silent=False):
try:
f = open(path, 'r')
r = f.read()
f.close()
return str(r)
except:
if not silent:
print("Could not read from " + path)
return None
def wait_dl_only(time_to_wait, title):
print 'Waiting ' + str(time_to_wait) + ' secs'
progress = xbmcgui.DialogProgress()
progress.create(title)
secs = 0
percent = 0
cancelled = False
while secs < time_to_wait:
secs = secs + 1
percent = int((100 * secs) / time_to_wait)
secs_left = str((time_to_wait - secs))
remaining_display = ' waiting ' + secs_left + ' seconds for download to start...'
progress.update(percent, remaining_display)
xbmc.sleep(1000)
if (progress.iscanceled()):
cancelled = True
break
if cancelled == True:
print 'wait cancelled'
return False
else:
print 'Done waiting'
return True
def notification(title, message, ms, nart):
xbmc.executebuiltin("XBMC.notification(" + title + "," + message + "," + ms + "," + nart + ")")
def get_meta(name,types=None,year=None,season=None,episode=None,imdb=None,episode_title=None):
if 'tvshow' in types:
meta = metainfo.get_meta('tvshow',name,'','','')
if 'episode' in types:
meta = metainfo.get_episode_meta(name, '', season, episode)
infoLabels = {'rating': meta['rating'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],'plot': meta['plot'],'title': meta['title'],'cover_url': meta['cover_url'],'fanart': meta['backdrop_url'],'Episode': meta['episode'],'Aired': meta['premiered']}
return infoLabels
def remove_list_duplicates(list_to_check):
temp_set = {}
map(temp_set.__setitem__, list_to_check, [])
return temp_set.keys()
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
def addDir(name,url,mode,iconimage,list,description,infoLabels=None):
suffix = ""
suffix2 = ""
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+str(iconimage)+"&list="+str(list)+"&description="+str(description)
ok=True
contextMenuItems = []
if name == "My Subscriptions":
contextMenuItems.append(("[COLOR cyan]Refresh Subscriptions[/COLOR]",'XBMC.RunPlugin(%s?name=%s&url=%s&mode=17&list=%s)'%(sys.argv[0], name, url, str(list).replace('http:','hhhh'))))
if description == "sh":
if find_list(list, FAV) < 0:
suffix = ""
contextMenuItems.append(("[COLOR lime]Add to TV4ME Favourites[/COLOR]",'XBMC.RunPlugin(%s?name=%s&url=%s&mode=11&list=%s)'%(sys.argv[0], name, url, str(list).replace('http:','hhhh'))))
else:
suffix = ' [COLOR lime]+[/COLOR]'
contextMenuItems.append(("[COLOR orange]Remove from TV4ME Favourites[/COLOR]",'XBMC.RunPlugin(%s?name=%s&url=%s&mode=13&list=%s)'%(sys.argv[0], name, url, str(list).replace('http:','hhhh'))))
if find_list(list, SUB) < 0:
suffix2 = ""
contextMenuItems.append(("[COLOR lime]Add to XBMC Library/Subscribe[/COLOR]",'XBMC.RunPlugin(%s?name=%s&url=%s&mode=14&list=%s)'%(sys.argv[0], name, url, str(list).replace('http:','hhhh'))))
else:
suffix2 = ' [COLOR cyan][s][/COLOR]'
contextMenuItems.append(("[COLOR orange]Remove from XBMC Library[/COLOR]",'XBMC.RunPlugin(%s?name=%s&url=%s&mode=15&list=%s)'%(sys.argv[0], name, url, str(list).replace('http:','hhhh'))))
liz=xbmcgui.ListItem(name + suffix + suffix2, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels=infoLabels)
try:
liz.setProperty( "fanart_image", infoLabels['fanart'] )
except:
liz.setProperty('fanart_image', fanart )
liz.addContextMenuItems(contextMenuItems, replaceItems=False)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addDirPlayable(name,url,mode,iconimage,showname,infoLabels=None):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)+"&showname="+urllib.quote_plus(showname)
ok=True
#contextMenuItems = []
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels=infoLabels)
try:
liz.setProperty( "fanart_image", infoLabels['fanart'] )
except:
liz.setProperty('fanart_image', fanart )
#contextMenuItems.append(("[COLOR red]Report an error[/COLOR]",'XBMC.RunPlugin(%s?name=%s&url=%s&mode=10&showname=%s)'%(sys.argv[0],name, url, showname)))
#liz.addContextMenuItems(contextMenuItems, replaceItems=False)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
params=get_params()
url=None
name=None
mode=None
iconimage=None
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
start=urllib.unquote_plus(params["start"])
except:
pass
try:
list=urllib.unquote_plus(params["list"])
except:
pass
try:
showname=urllib.unquote_plus(params["showname"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
if mode==None or url==None or len(url)<1:
CATEGORIES(name)
elif mode == 1:
latest_episodes(url)
elif mode==2:
shows(url)
elif mode==3:
tv_show(name, url, iconimage)
elif mode==4:
tv_show_episodes(name, list, iconimage, description)
elif mode==5:
play(name, url, iconimage.replace('hhhh', 'http:'), showname)
elif mode==6:
search()
elif mode==7:
grouped_shows(url)
elif mode == 8:
a_to_z(url)
elif mode == 9:
a_z_shows(name,url)
elif mode == 11:
add_favourite(name, url, list, FAV, "Added to Favourites")
elif mode == 12:
favourites()
elif mode == 13:
remove_from_favourites(name, url, list, FAV, "Removed from Favourites")
elif mode == 14:
create_tv_show_strm_files(name, url, list, "true")
elif mode == 15:
remove_tv_show_strm_files(name, url, list, TV_PATH)
elif mode == 16:
subscriptions()
elif mode == 17:
get_subscriptions()
elif mode == 18:
search_show(name)
elif mode==19:
play_videos(name, url, iconimage.replace('hhhh', 'http:'), showname)
elif mode==20:
play(name, url, iconimage.replace('hhhh', 'http:'), list)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
odicraig/kodi2odi
|
addons/plugin.video.tv4me/default.py
|
Python
|
gpl-3.0
| 37,025
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from scipy.stats import norm
import os
from astropy.io import fits
import sys
from sklearn.mixture import GMM
from pandas import DataFrame
import legacyanalysis.decals_sim_priors as priors
# Globals
xyrange=dict(x_star=[-0.5,2.2],\
y_star=[-0.3,2.],\
x_elg=[-0.5,2.2],\
y_elg=[-0.3,2.],\
x_lrg= [0, 2.5],\
y_lrg= [-2, 6])
####
def getbic(X, ncomp=[3]):
'''Simple function to compute the Bayesian information criterion.'''
bic = [GMM(n_components=nc, covariance_type="full").fit(X).bic(X) for nc in ncomp]
#for ii in range(len(ncomp)):
# print(ncomp[ii], bic[ii])
return bic
def qa_plot_MoG(Xall,ncomp=2, src='STAR',nsamp=10000,outdir='.',extra=False,append=''):
'''Build a color-color plot. Show the data on the left-hand panel and random draws from
the MoGs on the right-hand panel.'''
if src == 'STAR':
mog_file = 'legacypipe/data/star_colors_mog.fits'
xrange = xyrange['x_%s' % src.lower()]
yrange = xyrange['y_%s' % src.lower()]
xlab='r - z'
ylab='g - r'
elif src == 'ELG':
mog_file = 'legacypipe/data/elg_colors_mog.fits'
xrange = xyrange['x_%s' % src.lower()]
yrange = xyrange['y_%s' % src.lower()]
xlab='r - z'
ylab='g - r'
elif src == 'LRG':
mog_file = 'legacypipe/data/lrg_colors_mog.fits'
xrange = xyrange['x_%s' % src.lower()]
yrange = xyrange['y_%s' % src.lower()]
xlab='r - z'
ylab='r - w1'
else: raise ValueError('src=%s not supported' % src)
# Build MoG
if ncomp is None:
mog = priors._GaussianMixtureModel.load(mog_file)
else:
from sklearn.mixture import GMM
mog = GMM(n_components=ncomp, covariance_type="full").fit(Xall)
samp = mog.sample(n_samples=nsamp)
#if extra:
# # Higher accuracy sampling, but more time consuming and negligible improvment
# samp= mog.sample_full_pdf(nsamp)
fig, ax = plt.subplots(1, 3, sharey=True,figsize=(12, 4))
ax[0].plot(Xall[:,0],Xall[:,1], 'o', c='b', markersize=3)
priors.add_MoG_curves(ax[1], mog.means_, mog.covars_, mog.weights_)
ax[2].plot(samp[:,0], samp[:,1], 'o', c='b', markersize=3)
# Add ts box
if src != 'STAR':
ts= priors.TSBox(src=src)
for i in [0,2]:
ts.add_ts_box(ax[i], xlim=xrange,ylim=yrange)
for i,title in zip(range(3),['Data','Gaussian Mixture','%d Draws' % nsamp]):
xlab1=ax[i].set_xlabel(xlab)
ax[i].set_xlim(xrange)
ax[i].set_ylim(yrange)
ti=ax[i].set_title(title)
ylab1=ax[0].set_ylabel(ylab)
fig.subplots_adjust(wspace=0) #, hspace=0.1)
for i in range(2):
priors.rm_last_ticklabel(ax[i])
name= os.path.join(outdir,'qa-mog-sample-%s%s.png' % (src,append))
print('Writing {}'.format(name))
plt.savefig(name, bbox_extra_artists=[xlab1,ylab1,ti], bbox_inches='tight',dpi=150)
plt.close()
def qa_plot_BIC(Xall,src='STAR',append=''):
'''Number componentes from Bayesian Information Criterion'''
ncomp = np.arange(1, 6)
bic = getbic(Xall, ncomp)
fig, ax = plt.subplots(1, 1, figsize=(8,5))
ax.plot(ncomp, bic, marker='s', ls='-')
ax.set_xlim((0, 10))
ax.set_xlabel('Number of Gaussian Components')
ax.set_ylabel('Bayesian Information Criterion')
if src == 'STAR':
plt.legend(labels=['%s g-r, r-z colors' % src])
elif src == 'ELG':
plt.legend(labels=['%s g-r, r-z colors' % src])
elif src == 'LRG':
plt.legend(labels=['%s r-w1, r-z colors' % src])
else: raise ValueError('src=%s not supportd' % src)
plt.tight_layout()
name='qa-mog-bic-%s%s.png' % (src,append)
print('Writing {}'.format(name))
plt.savefig(name)
plt.close()
def create_joinplot(df,xkey,ykey,xlab,ylab,xlim,ylim,color,src='ELG'):
import seaborn as sns
g = sns.JointGrid(x=xkey, y=ykey, data=df, xlim=xlim, ylim=ylim)
g = g.plot_joint(plt.scatter, color=color, edgecolor="white")
g = g.plot_marginals(sns.distplot, kde=False, color=color)
g = g.set_axis_labels(xlab,ylab)
def f_cut(junk1,junk2):
return 0
g = g.annotate(f_cut, template="Cut to r50 > {val:d}",\
stat="", loc="upper right", fontsize=12)
name='qa-priors-%s-%s-%s.png' % (xkey,ykey,src)
print('Writing {}'.format(name))
g = g.savefig(name)
def qa_plot_Priors(d=None,src='ELG'):
'''d -- dictionary of morphology params'''
import seaborn as sns
assert(d is not None)
# JoinGrid needs pandas DataFrame
df= DataFrame(d)
if src == 'ELG':
grrange = (-0.2, 2.0)
rzrange = (-0.4, 2.5)
else: raise ValueError('src=%s not supported')
col = sns.color_palette()
# Put each in sep plot window
color=col[0]
for xkey,ykey,xlab,ylab,xlim,ylim in zip(\
['rz','r50','ba'],['gr','n','n'],\
['r-z','Half-light radius (arcsec)','Axis ratio (b/a)'], ['g-r','Sersic n','Sersic n'],\
[rzrange,(0,1.5),(0,1)], [grrange,(0,4),(0,4)]):
create_joinplot(df,xkey,ykey,xlab,ylab,xlim,ylim,color,src=src)
if __name__ == "__main__":
# Stars
Xall= priors.star_data()
qa_plot_BIC(Xall,src='STAR')
qa_plot_MoG(Xall,ncomp=6, src='STAR')
# Save Model
mog = GMM(n_components=6, covariance_type="full").fit(Xall)
star_mogfile= 'legacypipe/data/star_colors_mog.fits'
if os.path.exists(star_mogfile):
print('STAR MoG exists, not overwritting: %s' % star_mogfile)
else:
print('Writing {}'.format(star_mogfile))
# with 6 comp, 6th is noise, 1-5 are physical
priors._GaussianMixtureModel.save(mog, star_mogfile,index=np.arange(5))
qa_plot_MoG(Xall,ncomp=None, src='STAR',append='saved')
# ELGs
# FDR data
Xall,cuts= priors.elg_data_for_FDR()
priors.plot_FDR(Xall,cuts,src='ELG')
b= cuts['any_elg']
qa_plot_BIC(Xall[b,:], src='ELG',append='_FDR')
qa_plot_MoG(Xall[b,:],ncomp=6, src='ELG',append='_FDR') #,extra=True)
# Fit template spectra data
Xall,cuts, morph= priors.elg_data()
qa_plot_BIC(Xall, src='ELG',append='_synth')
qa_plot_MoG(Xall,ncomp=3, src='ELG',append='_synth') #,extra=True)
b= cuts['has_morph']
qa_plot_BIC(Xall[b,:], src='ELG',append='_synth+morph')
qa_plot_MoG(Xall[b,:],ncomp=4, src='ELG',append='_synth+morph') #,extra=True)
# only have priors for morph cut
qa_plot_Priors(d=morph,src='ELG')
# Save 3 component synth MoG
mog = GMM(n_components=3, covariance_type="full").fit(Xall)
elg_mogfile='legacypipe/data/elg_colors_mog.fits'
if os.path.exists(elg_mogfile):
print('ELG MoG exists, not overwritting: %s' % elg_mogfile)
else:
print('Writing {}'.format(elg_mogfile))
priors._GaussianMixtureModel.save(mog, elg_mogfile)
qa_plot_MoG(Xall,ncomp=None, src='ELG',append='saved')
# LRGs
Xall,cuts= lrg_data_for_FDR()
priors.plot_FDR(Xall,cuts,src='LRG')
b= cuts['lrg']
qa_plot_BIC(Xall[b,:], src='LRG')
qa_plot_MoG(Xall[b,:], ncomp=2,src='LRG') #,extra=True)
# Save 2 comp model
b= cuts['lrg']
mog = GMM(n_components=2, covariance_type="full").fit(Xall[b,:])
lrg_mogfile= 'legacypipe/data/lrg_colors_mog.fits'
if os.path.exists(lrg_mogfile):
print('LRG MoG exists, not overwritting: %s' % lrg_mogfile)
else:
print('Writing {}'.format(lrg_mogfile))
priors._GaussianMixtureModel.save(mog, lrg_mogfile)
qa_plot_MoG(Xall,ncomp=None, src='LRG',append='saved')
print('done')
|
legacysurvey/pipeline
|
py/obiwan/decals_sim_priors_plots.py
|
Python
|
gpl-2.0
| 7,697
|
import flask
import gzip
def generateResponse(token, data = None):
"""
Return a flask response with required headers for osu! client, token and gzip compressed data
token -- user token
data -- plain response body
return -- flask response
"""
resp = flask.Response(gzip.compress(data, 6))
resp.headers['cho-token'] = token
resp.headers['cho-protocol'] = '19'
resp.headers['Keep-Alive'] = 'timeout=5, max=100'
resp.headers['Connection'] = 'keep-alive'
resp.headers['Content-Type'] = 'text/html; charset=UTF-8'
resp.headers['Vary'] = 'Accept-Encoding'
resp.headers['Content-Encoding'] = 'gzip'
return resp
def HTMLResponse():
"""Return HTML bancho meme response"""
html = "<html><head><title>UHINF?!</title><style type='text/css'>body{width:30%}</style></head><body><pre>"
html += "<marquee style='white-space:pre;'><br>"
html += " .. o .<br>"
html += " o.o o . o<br>"
html += " oo...<br>"
html += " __[]__<br>"
html += " phwr--> _\\:D/_/o_o_o_|__ <span style=\"font-family: 'Comic Sans MS'; font-size: 8pt;\">u wot m8</span><br>"
html += " \\\"\"\"\"\"\"\"\"\"\"\"\"\"\"/<br>"
html += " \\ . .. .. . /<br>"
html += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^<br>"
html += "</marquee></pre></body></html>"
return html
|
RlSEN/bannedcho
|
c.ppy.sh/responseHelper.py
|
Python
|
gpl-3.0
| 1,381
|
from __future__ import unicode_literals
import collections
from collections import OrderedDict
from django.utils.encoding import force_text
from rest_framework.compat import unicode_to_repr
from rest_framework.utils import json
class ReturnDict(OrderedDict):
"""
Return object from `serializer.data` for the `Serializer` class.
Includes a backlink to the serializer instance for renderers
to use if they need richer field information.
"""
def __init__(self, *args, **kwargs):
self.serializer = kwargs.pop('serializer')
super(ReturnDict, self).__init__(*args, **kwargs)
def copy(self):
return ReturnDict(self, serializer=self.serializer)
def __repr__(self):
return dict.__repr__(self)
def __reduce__(self):
# Pickling these objects will drop the .serializer backlink,
# but preserve the raw data.
return (dict, (dict(self),))
class ReturnList(list):
"""
Return object from `serializer.data` for the `SerializerList` class.
Includes a backlink to the serializer instance for renderers
to use if they need richer field information.
"""
def __init__(self, *args, **kwargs):
self.serializer = kwargs.pop('serializer')
super(ReturnList, self).__init__(*args, **kwargs)
def __repr__(self):
return list.__repr__(self)
def __reduce__(self):
# Pickling these objects will drop the .serializer backlink,
# but preserve the raw data.
return (list, (list(self),))
class BoundField(object):
"""
A field object that also includes `.value` and `.error` properties.
Returned when iterating over a serializer instance,
providing an API similar to Django forms and form fields.
"""
def __init__(self, field, value, errors, prefix=''):
self._field = field
self._prefix = prefix
self.value = value
self.errors = errors
self.name = prefix + self.field_name
def __getattr__(self, attr_name):
return getattr(self._field, attr_name)
@property
def _proxy_class(self):
return self._field.__class__
def __repr__(self):
return unicode_to_repr('<%s value=%s errors=%s>' % (
self.__class__.__name__, self.value, self.errors
))
def as_form_field(self):
value = '' if (self.value is None or self.value is False) else self.value
return self.__class__(self._field, value, self.errors, self._prefix)
class JSONBoundField(BoundField):
def as_form_field(self):
value = self.value
# When HTML form input is used and the input is not valid
# value will be a JSONString, rather than a JSON primitive.
if not getattr(value, 'is_json_string', False):
try:
value = json.dumps(self.value, sort_keys=True, indent=4)
except (TypeError, ValueError):
pass
return self.__class__(self._field, value, self.errors, self._prefix)
class NestedBoundField(BoundField):
"""
This `BoundField` additionally implements __iter__ and __getitem__
in order to support nested bound fields. This class is the type of
`BoundField` that is used for serializer fields.
"""
def __init__(self, field, value, errors, prefix=''):
if value == None or value == '':
value = {}
super(NestedBoundField, self).__init__(field, value, errors, prefix)
def __iter__(self):
for field in self.fields.values():
yield self[field.field_name]
def __getitem__(self, key):
field = self.fields[key]
value = self.value.get(key) if self.value else None
error = self.errors.get(key) if isinstance(self.errors, dict) else None
if hasattr(field, 'fields'):
return NestedBoundField(field, value, error, prefix=self.name + '.')
return BoundField(field, value, error, prefix=self.name + '.')
def as_form_field(self):
values = {}
for key, value in self.value.items():
if isinstance(value, (list, dict)):
values[key] = value
else:
values[key] = '' if (value is None or value is False) else force_text(value)
return self.__class__(self._field, values, self.errors, self._prefix)
class BindingDict(collections.MutableMapping):
"""
This dict-like object is used to store fields on a serializer.
This ensures that whenever fields are added to the serializer we call
`field.bind()` so that the `field_name` and `parent` attributes
can be set correctly.
"""
def __init__(self, serializer):
self.serializer = serializer
self.fields = OrderedDict()
def __setitem__(self, key, field):
self.fields[key] = field
field.bind(field_name=key, parent=self.serializer)
def __getitem__(self, key):
return self.fields[key]
def __delitem__(self, key):
del self.fields[key]
def __iter__(self):
return iter(self.fields)
def __len__(self):
return len(self.fields)
def __repr__(self):
return dict.__repr__(self.fields)
|
interlegis/saap
|
config/rest_framework/utils/serializer_helpers.py
|
Python
|
gpl-3.0
| 5,187
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import io
import json
import os
from distutils.version import LooseVersion
from cms import __version__ as cms_version
from cms.toolbar.utils import get_toolbar_from_request
from django import template
from django.conf import settings
from django.core.cache import caches
from django.template.exceptions import TemplateDoesNotExist
from django.contrib.staticfiles import finders
from django.utils.safestring import mark_safe
from classytags.arguments import Argument
from classytags.core import Options, Tag
register = template.Library()
CMS_LT_3_4 = LooseVersion(cms_version) < LooseVersion('3.5')
class StrideRenderer(Tag):
"""
Render the serialized content of a placeholder field using the full cascade of plugins.
{% render_cascade "cascade-data.json" %}
Keyword arguments:
datafile -- Filename containing the cascade tree. Must be file locatable by Django's
static file finders.
"""
name = 'render_cascade'
options = Options(
Argument('datafile'),
)
def render_tag(self, context, datafile):
from sekizai.helpers import get_varname
from cmsplugin_cascade.strides import StrideContentRenderer
jsonfile = finders.find(datafile)
if not jsonfile:
raise IOError("Unable to find file: {}".format(datafile))
with io.open(jsonfile) as fp:
tree_data = json.load(fp)
content_renderer = StrideContentRenderer(context['request'])
with context.push(cms_content_renderer=content_renderer):
content = content_renderer.render_cascade(context, tree_data)
# some templates use Sekizai's templatetag `addtoblock` or `add_data`, which have to be re-added to the context
cache = caches['default']
if cache:
varname = get_varname()
SEKIZAI_CONTENT_HOLDER = cache.get_or_set(varname, context.get(varname))
if SEKIZAI_CONTENT_HOLDER:
for name in SEKIZAI_CONTENT_HOLDER:
context[varname][name] = SEKIZAI_CONTENT_HOLDER[name]
return content
register.tag('render_cascade', StrideRenderer)
class RenderPlugin(Tag):
name = 'render_plugin'
options = Options(
Argument('plugin')
)
def render_tag(self, context, plugin):
if not plugin:
return ''
if CMS_LT_3_4:
content_renderer = context['cms_content_renderer']
content = content_renderer.render_plugin(
instance=plugin,
context=context,
editable=content_renderer.user_is_on_edit_mode(),
)
else:
toolbar = get_toolbar_from_request(context['request'])
if 'cms_renderer' in context.dicts[1]:
content_renderer=context.dicts[1]['cms_renderer']
elif 'cms_content_renderer' in context:
content_renderer=context['cms_content_renderer']
else:
content_renderer = toolbar.content_renderer
content = content_renderer.render_plugin(
instance=plugin,
context=context,
editable=toolbar.edit_mode_active,
)
return content
register.tag('render_plugin', RenderPlugin)
@register.simple_tag
def sphinx_docs_include(path):
filename = os.path.join(settings.SPHINX_DOCS_ROOT, path)
if not os.path.exists(filename):
raise TemplateDoesNotExist("'{path}' does not exist".format(path=path))
with io.open(filename) as fh:
return mark_safe(fh.read())
|
haricot/djangocms-bs4forcascade
|
cmsplugin_bs4forcascade/templatetags/cascade_tags.py
|
Python
|
mit
| 3,625
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from odoo import api, fields, models
class ProductProduct(models.Model):
_inherit = "product.product"
date_from = fields.Date(compute='_compute_product_margin_fields_values', string='Margin Date From')
date_to = fields.Date(compute='_compute_product_margin_fields_values', string='Margin Date To')
invoice_state = fields.Selection(compute='_compute_product_margin_fields_values',
selection=[
('paid', 'Paid'),
('open_paid', 'Open and Paid'),
('draft_open_paid', 'Draft, Open and Paid')
], string='Invoice State', readonly=True)
sale_avg_price = fields.Float(compute='_compute_product_margin_fields_values', string='Avg. Sale Unit Price',
help="Avg. Price in Customer Invoices.")
purchase_avg_price = fields.Float(compute='_compute_product_margin_fields_values', string='Avg. Purchase Unit Price',
help="Avg. Price in Vendor Bills ")
sale_num_invoiced = fields.Float(compute='_compute_product_margin_fields_values', string='# Invoiced in Sale',
help="Sum of Quantity in Customer Invoices")
purchase_num_invoiced = fields.Float(compute='_compute_product_margin_fields_values', string='# Invoiced in Purchase',
help="Sum of Quantity in Vendor Bills")
sales_gap = fields.Float(compute='_compute_product_margin_fields_values', string='Sales Gap',
help="Expected Sale - Turn Over")
purchase_gap = fields.Float(compute='_compute_product_margin_fields_values', string='Purchase Gap',
help="Normal Cost - Total Cost")
turnover = fields.Float(compute='_compute_product_margin_fields_values', string='Turnover',
help="Sum of Multiplication of Invoice price and quantity of Customer Invoices")
total_cost = fields.Float(compute='_compute_product_margin_fields_values', string='Total Cost',
help="Sum of Multiplication of Invoice price and quantity of Vendor Bills ")
sale_expected = fields.Float(compute='_compute_product_margin_fields_values', string='Expected Sale',
help="Sum of Multiplication of Sale Catalog price and quantity of Customer Invoices")
normal_cost = fields.Float(compute='_compute_product_margin_fields_values', string='Normal Cost',
help="Sum of Multiplication of Cost price and quantity of Vendor Bills")
total_margin = fields.Float(compute='_compute_product_margin_fields_values', string='Total Margin',
help="Turnover - Standard price")
expected_margin = fields.Float(compute='_compute_product_margin_fields_values', string='Expected Margin',
help="Expected Sale - Normal Cost")
total_margin_rate = fields.Float(compute='_compute_product_margin_fields_values', string='Total Margin Rate(%)',
help="Total margin * 100 / Turnover")
expected_margin_rate = fields.Float(compute='_compute_product_margin_fields_values', string='Expected Margin (%)',
help="Expected margin * 100 / Expected Sale")
@api.model
def read_group(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True):
"""
Inherit read_group to calculate the sum of the non-stored fields, as it is not automatically done anymore through the XML.
"""
res = super(ProductProduct, self).read_group(domain, fields, groupby, offset=offset, limit=limit, orderby=orderby, lazy=lazy)
fields_list = ['turnover', 'sale_avg_price', 'sale_purchase_price', 'sale_num_invoiced', 'purchase_num_invoiced',
'sales_gap', 'purchase_gap', 'total_cost', 'sale_expected', 'normal_cost', 'total_margin',
'expected_margin', 'total_margin_rate', 'expected_margin_rate']
if any(x in fields for x in fields_list):
# Calculate first for every product in which line it needs to be applied
re_ind = 0
prod_re = {}
tot_products = self.browse([])
for re in res:
if re.get('__domain'):
products = self.search(re['__domain'])
tot_products |= products
for prod in products:
prod_re[prod.id] = re_ind
re_ind += 1
res_val = tot_products._compute_product_margin_fields_values(field_names=[x for x in fields if fields in fields_list])
for key in res_val:
for l in res_val[key]:
re = res[prod_re[key]]
if re.get(l):
re[l] += res_val[key][l]
else:
re[l] = res_val[key][l]
return res
def _compute_product_margin_fields_values(self, field_names=None):
res = {}
if field_names is None:
field_names = []
for val in self:
res[val.id] = {}
date_from = self.env.context.get('date_from', time.strftime('%Y-01-01'))
date_to = self.env.context.get('date_to', time.strftime('%Y-12-31'))
invoice_state = self.env.context.get('invoice_state', 'open_paid')
res[val.id]['date_from'] = date_from
res[val.id]['date_to'] = date_to
res[val.id]['invoice_state'] = invoice_state
states = ()
payment_states = ()
if invoice_state == 'paid':
states = ('posted',)
payment_states = ('paid',)
elif invoice_state == 'open_paid':
states = ('posted',)
payment_states = ('not_paid', 'paid')
elif invoice_state == 'draft_open_paid':
states = ('posted', 'draft')
payment_states = ('not_paid', 'paid')
company_id = self.env.company.id
#Cost price is calculated afterwards as it is a property
self.env['account.move.line'].flush(['price_unit', 'quantity', 'balance', 'product_id', 'display_type'])
self.env['account.move'].flush(['state', 'payment_state', 'move_type', 'invoice_date', 'company_id'])
self.env['product.template'].flush(['list_price'])
sqlstr = """
WITH currency_rate AS ({})
SELECT
SUM(l.price_unit / (CASE COALESCE(cr.rate, 0) WHEN 0 THEN 1.0 ELSE cr.rate END) * l.quantity) / NULLIF(SUM(l.quantity),0) AS avg_unit_price,
SUM(l.quantity * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS num_qty,
SUM(ABS(l.balance) * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS total,
SUM(l.quantity * pt.list_price * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS sale_expected
FROM account_move_line l
LEFT JOIN account_move i ON (l.move_id = i.id)
LEFT JOIN product_product product ON (product.id=l.product_id)
LEFT JOIN product_template pt ON (pt.id = product.product_tmpl_id)
left join currency_rate cr on
(cr.currency_id = i.currency_id and
cr.company_id = i.company_id and
cr.date_start <= COALESCE(i.invoice_date, NOW()) and
(cr.date_end IS NULL OR cr.date_end > COALESCE(i.invoice_date, NOW())))
WHERE l.product_id = %s
AND i.state IN %s
AND i.payment_state IN %s
AND i.move_type IN %s
AND i.invoice_date BETWEEN %s AND %s
AND i.company_id = %s
AND l.display_type IS NULL
AND l.exclude_from_invoice_tab = false
""".format(self.env['res.currency']._select_companies_rates())
invoice_types = ('out_invoice', 'out_refund')
self.env.cr.execute(sqlstr, (val.id, states, payment_states, invoice_types, date_from, date_to, company_id))
result = self.env.cr.fetchall()[0]
res[val.id]['sale_avg_price'] = result[0] and result[0] or 0.0
res[val.id]['sale_num_invoiced'] = result[1] and result[1] or 0.0
res[val.id]['turnover'] = result[2] and result[2] or 0.0
res[val.id]['sale_expected'] = result[3] and result[3] or 0.0
res[val.id]['sales_gap'] = res[val.id]['sale_expected'] - res[val.id]['turnover']
invoice_types = ('in_invoice', 'in_refund')
self.env.cr.execute(sqlstr, (val.id, states, payment_states, invoice_types, date_from, date_to, company_id))
result = self.env.cr.fetchall()[0]
res[val.id]['purchase_avg_price'] = result[0] and result[0] or 0.0
res[val.id]['purchase_num_invoiced'] = result[1] and result[1] or 0.0
res[val.id]['total_cost'] = result[2] and result[2] or 0.0
res[val.id]['normal_cost'] = val.standard_price * res[val.id]['purchase_num_invoiced']
res[val.id]['purchase_gap'] = res[val.id]['normal_cost'] - res[val.id]['total_cost']
res[val.id]['total_margin'] = res[val.id]['turnover'] - res[val.id]['total_cost']
res[val.id]['expected_margin'] = res[val.id]['sale_expected'] - res[val.id]['normal_cost']
res[val.id]['total_margin_rate'] = res[val.id]['turnover'] and res[val.id]['total_margin'] * 100 / res[val.id]['turnover'] or 0.0
res[val.id]['expected_margin_rate'] = res[val.id]['sale_expected'] and res[val.id]['expected_margin'] * 100 / res[val.id]['sale_expected'] or 0.0
for k, v in res[val.id].items():
setattr(val, k, v)
return res
|
ygol/odoo
|
addons/product_margin/models/product_product.py
|
Python
|
agpl-3.0
| 9,711
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import openerp
import openerp.tools as tools
from openerp.osv import osv
from openerp.osv import fields
from openerp.tools.safe_eval import safe_eval as eval
from openerp import SUPERUSER_ID
class mail_group(osv.Model):
""" A mail_group is a collection of users sharing messages in a discussion
group. The group mechanics are based on the followers. """
_description = 'Discussion group'
_name = 'mail.group'
_mail_flat_thread = False
_inherit = ['mail.thread']
_inherits = {'mail.alias': 'alias_id'}
def _get_image(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(obj.image)
return result
def _set_image(self, cr, uid, id, name, value, args, context=None):
return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context)
_columns = {
'name': fields.char('Name', required=True, translate=True),
'description': fields.text('Description'),
'menu_id': fields.many2one('ir.ui.menu', string='Related Menu', required=True, ondelete="cascade"),
'public': fields.selection([('public', 'Public'), ('private', 'Private'), ('groups', 'Selected Group Only')], 'Privacy', required=True,
help='This group is visible by non members. \
Invisible groups can add members through the invite button.'),
'group_public_id': fields.many2one('res.groups', string='Authorized Group'),
'group_ids': fields.many2many('res.groups', rel='mail_group_res_group_rel',
id1='mail_group_id', id2='groups_id', string='Auto Subscription',
help="Members of those groups will automatically added as followers. "\
"Note that they will be able to manage their subscription manually "\
"if necessary."),
# image: all image fields are base64 encoded and PIL-supported
'image': fields.binary("Photo",
help="This field holds the image used as photo for the group, limited to 1024x1024px."),
'image_medium': fields.function(_get_image, fnct_inv=_set_image,
string="Medium-sized photo", type="binary", multi="_get_image",
store={
'mail.group': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Medium-sized photo of the group. It is automatically "\
"resized as a 128x128px image, with aspect ratio preserved. "\
"Use this field in form views or some kanban views."),
'image_small': fields.function(_get_image, fnct_inv=_set_image,
string="Small-sized photo", type="binary", multi="_get_image",
store={
'mail.group': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Small-sized photo of the group. It is automatically "\
"resized as a 64x64px image, with aspect ratio preserved. "\
"Use this field anywhere a small image is required."),
'alias_id': fields.many2one('mail.alias', 'Alias', ondelete="restrict", required=True,
help="The email address associated with this group. New emails received will automatically "
"create new topics."),
}
def _get_default_employee_group(self, cr, uid, context=None):
ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'group_user')
return ref and ref[1] or False
def _get_default_image(self, cr, uid, context=None):
image_path = openerp.modules.get_module_resource('mail', 'static/src/img', 'groupdefault.png')
return tools.image_resize_image_big(open(image_path, 'rb').read().encode('base64'))
_defaults = {
'public': 'groups',
'group_public_id': _get_default_employee_group,
'image': _get_default_image,
}
def _generate_header_description(self, cr, uid, group, context=None):
header = ''
if group.description:
header = '%s' % group.description
if group.alias_id and group.alias_name and group.alias_domain:
if header:
header = '%s<br/>' % header
return '%sGroup email gateway: %s@%s' % (header, group.alias_name, group.alias_domain)
return header
def _subscribe_users(self, cr, uid, ids, context=None):
for mail_group in self.browse(cr, uid, ids, context=context):
partner_ids = []
for group in mail_group.group_ids:
partner_ids += [user.partner_id.id for user in group.users]
self.message_subscribe(cr, uid, ids, partner_ids, context=context)
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
# get parent menu
menu_parent = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'mail', 'mail_group_root')
menu_parent = menu_parent and menu_parent[1] or False
# Create menu id
mobj = self.pool.get('ir.ui.menu')
menu_id = mobj.create(cr, SUPERUSER_ID, {'name': vals['name'], 'parent_id': menu_parent}, context=context)
vals['menu_id'] = menu_id
# Create group and alias
create_context = dict(context, alias_model_name=self._name, alias_parent_model_name=self._name, mail_create_nolog=True)
mail_group_id = super(mail_group, self).create(cr, uid, vals, context=create_context)
group = self.browse(cr, uid, mail_group_id, context=context)
self.pool.get('mail.alias').write(cr, uid, [group.alias_id.id], {"alias_force_thread_id": mail_group_id, 'alias_parent_thread_id': mail_group_id}, context)
group = self.browse(cr, uid, mail_group_id, context=context)
# Create client action for this group and link the menu to it
ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'mail', 'action_mail_group_feeds')
if ref:
search_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'mail', 'view_message_search')
params = {
'search_view_id': search_ref and search_ref[1] or False,
'domain': [
('model', '=', 'mail.group'),
('res_id', '=', mail_group_id),
],
'context': {
'default_model': 'mail.group',
'default_res_id': mail_group_id,
},
'res_model': 'mail.message',
'thread_level': 1,
'header_description': self._generate_header_description(cr, uid, group, context=context),
'view_mailbox': True,
'compose_placeholder': 'Send a message to the group',
}
cobj = self.pool.get('ir.actions.client')
newref = cobj.copy(cr, SUPERUSER_ID, ref[1], default={'params': str(params), 'name': vals['name']}, context=context)
mobj.write(cr, SUPERUSER_ID, menu_id, {'action': 'ir.actions.client,' + str(newref), 'mail_group_id': mail_group_id}, context=context)
if vals.get('group_ids'):
self._subscribe_users(cr, uid, [mail_group_id], context=context)
return mail_group_id
def unlink(self, cr, uid, ids, context=None):
groups = self.browse(cr, uid, ids, context=context)
# Cascade-delete mail aliases as well, as they should not exist without the mail group.
mail_alias = self.pool.get('mail.alias')
alias_ids = [group.alias_id.id for group in groups if group.alias_id]
# Delete mail_group
res = super(mail_group, self).unlink(cr, uid, ids, context=context)
# Delete alias
mail_alias.unlink(cr, SUPERUSER_ID, alias_ids, context=context)
# Cascade-delete menu entries as well
self.pool.get('ir.ui.menu').unlink(cr, SUPERUSER_ID, [group.menu_id.id for group in groups if group.menu_id], context=context)
return res
def write(self, cr, uid, ids, vals, context=None):
result = super(mail_group, self).write(cr, uid, ids, vals, context=context)
if vals.get('group_ids'):
self._subscribe_users(cr, uid, ids, context=context)
# if description, name or alias is changed: update client action
if vals.get('description') or vals.get('name') or vals.get('alias_id') or vals.get('alias_name'):
cobj = self.pool.get('ir.actions.client')
for action in [group.menu_id.action for group in self.browse(cr, uid, ids, context=context)]:
new_params = action.params
new_params['header_description'] = self._generate_header_description(cr, uid, group, context=context)
cobj.write(cr, SUPERUSER_ID, [action.id], {'params': str(new_params)}, context=context)
# if name is changed: update menu
if vals.get('name'):
mobj = self.pool.get('ir.ui.menu')
mobj.write(cr, SUPERUSER_ID,
[group.menu_id.id for group in self.browse(cr, uid, ids, context=context)],
{'name': vals.get('name')}, context=context)
return result
def action_follow(self, cr, uid, ids, context=None):
""" Wrapper because message_subscribe_users take a user_ids=None
that receive the context without the wrapper. """
return self.message_subscribe_users(cr, uid, ids, context=context)
def action_unfollow(self, cr, uid, ids, context=None):
""" Wrapper because message_unsubscribe_users take a user_ids=None
that receive the context without the wrapper. """
return self.message_unsubscribe_users(cr, uid, ids, context=context)
def get_suggested_thread(self, cr, uid, removed_suggested_threads=None, context=None):
"""Show the suggestion of groups if display_groups_suggestions if the
user perference allows it."""
user = self.pool.get('res.users').browse(cr, uid, uid, context)
if not user.display_groups_suggestions:
return []
else:
return super(mail_group, self).get_suggested_thread(cr, uid, removed_suggested_threads, context)
def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None):
res = super(mail_group, self).message_get_email_values(cr, uid, id, notif_mail=notif_mail, context=context)
group = self.browse(cr, uid, id, context=context)
try:
headers = eval(res.get('headers', '{}'))
except Exception:
headers = {}
headers['Precedence'] = 'list'
if group.alias_domain and group.alias_name:
headers['List-Id'] = '%s.%s' % (group.alias_name, group.alias_domain)
headers['List-Post'] = '<mailto:%s@%s>' % (group.alias_name, group.alias_domain)
res['headers'] = '%s' % headers
return res
|
a0c/odoo
|
addons/mail/mail_group.py
|
Python
|
agpl-3.0
| 11,974
|
import sys
from PyQt4 import QtGui, QtCore
from mainwindow import Ui_MainWindow
import ntpath
from socket import *
import os
import time
ntpath.basename("a/b/c")
# Stack Overflow : http://stackoverflow.com/questions/8384737/python-extract-file-name-from-path-no-matter-what-the-os-path-format
def path_leaf(path):
path = str(path)
head, tail = ntpath.split(path)
return tail or ntpath.basename(head)
# End Stack Overflow
class StartWindow(Ui_MainWindow):
def __init__(self, window):
super(StartWindow, self).__init__()
self.setupUi(window)
self.closeButton.clicked.connect(self.close_application)
window.setWindowTitle('EzFile - v0.0')
self.sW = window
# Define the model for the QListView
self.model = QtGui.QStringListModel()
self.targetList = QtCore.QStringList()
self.model.setStringList(self.targetList)
self.listView.setModel(self.model)
# Add the selection changed signal
sm = self.listView.selectionModel()
sm.selectionChanged.connect(self.targetSet)
# Initialize the IconScene
self.iconScene = QtGui.QGraphicsScene()
pixmap = QtGui.QPixmap('error.png')
pixmap = pixmap.scaled(30, 30, QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
self.iconScene.addPixmap(pixmap)
self.graphicsView.setStyleSheet("background: transparent")
self.graphicsView.setScene(self.iconScene)
self.graphicsView.show()
# Initializing selected file to nothing
self.selectedFile = None
# Initializing animationTimer
self.animationTimer = QtCore.QTimer()
# Connect browse button to the open file dialog
self.browseButton.clicked.connect(self.openFileDialog)
# Check if connected each time line edit changes
self.lineEdit.editingFinished.connect(self.connect)
# Set statusLabel
self.statusLabel.setText('')
# Initializing the send button to disabled
# self.sendButton.setEnabled(False)
def devButt():
self.send()
self.sendButton.clicked.connect(devButt)
self.dev()
def targetSet(self, a, b):
indexList = a.indexes()
for l in indexList:
data = l.data().toString()
self.lineEdit.setText(data)
def close_application(self):
sys.exit()
def addNewTarget(self, target):
self.targetList.append(target)
self.model.setStringList(self.targetList)
def changeIcon(self, icon):
self.animationTimer.stop()
if icon:
iFile = 'success.png'
else:
iFile = 'error.png'
pixmap = QtGui.QPixmap(iFile)
pixmap = pixmap.scaled(30, 30, QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
self.iconScene.clear()
self.iconScene.addPixmap(pixmap)
def openFileDialog(self):
name = QtGui.QFileDialog.getOpenFileName(self.sW, 'Open File')
self.selectedFile = name
filename = path_leaf(name)
self.fileLabel.setText(filename)
self.statusLabel.setText('0%')
self.progressBar.setValue(0)
print name
def startIconProgressWheel(self):
pixmap = QtGui.QPixmap('loading.png')
pixmap = pixmap.scaled(30, 30, QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
self.iconScene.clear()
item = QtGui.QGraphicsPixmapItem(pixmap)
item.setTransformOriginPoint(QtCore.QPoint(15, 15))
def makeAnimation():
item.setRotation(item.rotation() + 5)
self.animationTimer = QtCore.QTimer()
self.animationTimer.timeout.connect(makeAnimation)
self.animationTimer.start(25)
self.iconScene.addItem(item)
def connect(self):
if self.lineEdit.text() != '':
s = socket(AF_INET, SOCK_STREAM)
host = self.lineEdit.text()
port = 5612
try:
s.connect((host, port))
s.close()
self.changeIcon(True)
return True
except:
self.changeIcon(False)
return False
def send(self):
if self.selectedFile != None and self.connect() == True:
self.sendButton.setEnabled(False)
s = socket(AF_INET, SOCK_STREAM)
host = self.lineEdit.text()
port = 5612
s.connect((host, port))
# Open the file to be sent
f = open(self.selectedFile, 'rb')
# Send the filename first
print path_leaf(self.selectedFile) + '\n'
s.send(path_leaf(self.selectedFile) + '\n')
# Wait for server A-ACK
looper = True
while looper:
l = s.recv(4096)
if l.split()[0] == 'A-ACK':
looper = False
# Read file and sent while there is data
totalSize = os.path.getsize(self.selectedFile)
totalSent = 4096
speed = 0
l = f.read(4096)
# n is the block number
# will do speed calculations for each y'th block
n = 0
y = 100
t = time.time()
totalSentLastTime = 0
while l:
n += 1
s.send(l)
l = f.read(4096)
totalSent += 4096
# Speed Calculations
if n % y == 0:
speed = (totalSent - totalSentLastTime) / (time.time() - t)
totalSentLastTime = totalSent
t = time.time()
speed = round(speed * (1.0 / pow(10, 3)))
# Percent Calculations
percent = (100.0 / totalSize) * totalSent
if percent > 100:
percent = 100
self.progressBar.setValue(percent)
self.statusLabel.setText(str(round(percent)) + '% , ' + str(speed) + ' kb/s')
# So hacky, maybe make a thread?
QtGui.QApplication.processEvents()
s.close()
self.sendButton.setEnabled(True)
def dev(self):
self.addNewTarget("127.0.0.1")
self.addNewTarget("192.168.0.1")
self.lineEdit.setText('localhost')
self.connect()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = StartWindow(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
WheelBarrow2/EzFileSender
|
Old fiels/app_old.py
|
Python
|
mit
| 6,579
|
from __future__ import print_function
from __future__ import absolute_import
import numpy as np
from ..forest import Forest
def simple_forest():
""" generate a simple forest
"""
parents = np.array([2, 2, 4, 4, 4])
F = Forest(5, parents)
return F
def test_forest():
""" test creation of forest object
"""
F = simple_forest()
assert F.E == 8
assert F.cc().max() == 0
def test_forest_trivial():
""" test creation of forest object
"""
F = Forest(5)
assert F.E == 0
assert (F.cc() == np.arange(5)).all()
def test_children():
""" test that we obtain children
"""
sf = simple_forest()
ch = sf.get_children()
assert len(ch) == 5
assert ch[0] == []
assert ch[1] == []
assert ch[2] == [0, 1]
assert ch[3] == []
assert ch[4] == [2, 3]
def test_descendants():
""" test the get_descendants() method
"""
sf = simple_forest()
assert sf.get_descendants(0) == [0]
assert sf.get_descendants(1) == [1]
assert sf.get_descendants(2) == [0, 1, 2]
assert sf.get_descendants(4) == [0, 1, 2, 3, 4]
def test_root():
""" test the isroot() method
"""
root = simple_forest().isroot()
assert root[4] == True
assert root.sum() == 1
def test_merge_simple_branches():
""" test the merge_simple_branches() method
"""
f = Forest(5, np.array([2, 2, 4, 4, 4])).merge_simple_branches()
assert f.V == 5
f = Forest(5, np.array([1, 2, 4, 4, 4])).merge_simple_branches()
assert f.V == 3
def test_all_distances():
""" test the all_distances() methods
"""
f = simple_forest()
dg = f.all_distances()
print(dg)
assert dg[0, 3] == 3.
assert dg.max() == 3.
assert dg.min() == 0.
assert dg.shape == (5, 5)
dg = f.all_distances(1)
assert dg[3] == 3.
def test_depth():
""" test the depth_from_leaves() methods
"""
f = simple_forest()
depth = f.depth_from_leaves()
assert depth[0] == 0
assert depth[1] == 0
assert depth[3] == 0
assert depth[2] == 1
assert depth[4] == 2
def test_reorder():
""" test the reorder_from_leaves_to_roots() method
"""
f = simple_forest()
order = f.reorder_from_leaves_to_roots()
assert (f.depth_from_leaves() == np.array([0, 0, 0, 1, 2])).all()
assert (order == np.array([0, 1, 3, 2, 4])).all()
def test_leaves():
""" test the leaves_of_a_subtree() method
"""
f = simple_forest()
assert f.leaves_of_a_subtree([0, 1]) == True
assert f.leaves_of_a_subtree([0, 3]) == False
assert f.leaves_of_a_subtree([1, 3]) == False
assert f.leaves_of_a_subtree([0, 1, 3]) == True
assert f.leaves_of_a_subtree([1]) == True
def test_depth():
""" Test the tree_depth() method
"""
f = simple_forest()
assert f.tree_depth() == 3
def test_upward_and():
""" test the propagate_upward_and() method
"""
f = simple_forest()
assert(f.propagate_upward_and([0, 1, 0, 1, 0]) == [0, 1, 0, 1, 0]).all()
assert(f.propagate_upward_and([0, 1, 1, 1, 0]) == [0, 1, 0, 1, 0]).all()
assert(f.propagate_upward_and([0, 1, 1, 1, 1]) == [0, 1, 0, 1, 0]).all()
assert(f.propagate_upward_and([1, 1, 0, 1, 0]) == [1, 1, 1, 1, 1]).all()
def test_upward():
""" test the propagate_upward() method
"""
f = simple_forest()
assert(f.propagate_upward([0, 0, 1, 3, 1]) == [0, 0, 0, 3, 1]).all()
assert(f.propagate_upward([0, 0, 5, 0, 2]) == [0, 0, 0, 0, 0]).all()
if __name__ == "__main__":
import nose
nose.run(argv=['', __file__])
|
alexis-roche/nipy
|
nipy/algorithms/graph/tests/test_forest.py
|
Python
|
bsd-3-clause
| 3,578
|
"""
gts v0.01
genetic test sequencer
Copyright 2011 Brian Monkaba
This file is part of ga-bitbot.
ga-bitbot is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ga-bitbot is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ga-bitbot. If not, see <http://www.gnu.org/licenses/>.
"""
import traceback
import xmlrpclib
import json
import gene_server_config
import time
import sys
import random
import subprocess
import __main__
import paths
from genetic import *
from load_config import *
random.seed(time.time())
if __name__ == "__main__":
__appversion__ = "0.01a"
print "Genetic Test Sequencer v%s"%__appversion__
# connect to the xml server
#
__server__ = gene_server_config.__server__
__port__ = str(gene_server_config.__port__)
#make sure the port number matches the server.
server = xmlrpclib.Server('http://' + __server__ + ":" + __port__)
multicall = xmlrpclib.MultiCall(server)
print "gts: connected to gene_server ",__server__,":",__port__
#the variable values below are superceded by the configuration loaded from the
#configuration file global_config.json
#!!!!!!!! to change the values edit the json configuration file NOT the variables below !!!!!!!!
max_length = 60 * 24 * 60
load_throttle = 1 #go easy on cpu usage
load_throttle_sleep_interval = 0.10#seconds
calibrate = 1 #set to one to adjust the population size to maintain a one min test cycle
cycle_time = 60 * 1#time in seconds to test the entire population
min_cycle_time = 30
cycle_time_step = 2
pid_update_rate = 20 #reset watchdog after every n seconds
enable_flash_crash_protection = False
flash_crash_protection_delay = 60 * 3 #three hours
trusted_keys_path = "./config/trusted_keys/"
config_loaded = 0
#!!!!!!!!!!!!!!!!end of loaded config values!!!!!!!!
#define the module exit function
profile = False
def gts_exit(msg,pid=None):
global profile
if pid != None:
server.pid_msg(pid,msg)
server.pid_exit(pid)
if profile == True:
print "gts: profiler saving gts_call_graph.png to ./report/"
pycallgraph.make_dot_graph('./report/gts_call_graph.png')
print msg
sys.exit()
#load config
try:
__main__ = load_config_file_into_object('global_config.json',__main__)
except:
gts_exit("gts: error detected while loading the configuration. the application will now exit.")
else:
if config_loaded == False:
gts_exit("gts: configuration failed to load. the application will now exit.")
else:
print "gts: configuration loaded."
#internal variables
quartile_cycle = False
quartile = ''
bs = ''
verbose = False
run_once = False
get_config = False
get_default_config = False
score_only = False
profile = False
pid = None
g = genepool()
gd = "UNDEFINED"
if len(sys.argv) >= 3:
# Convert the two arguments from strings into numbers
quartile = sys.argv[1]
bs = sys.argv[2]
if len(sys.argv) > 3:
for i in range(3,len(sys.argv)):
if sys.argv[i] == 'v':
verbose = True
if sys.argv[i] == 'run_once':
#use with gal.py to auto reset (to address pypy memory leaks)
#exit after first local optima found
#or in the case of 'all' quartiles being tested, reset after once cycle through the quartiles
run_once = True
if sys.argv[i] == 'get_default_config':
#if set the default gene_def config will be loaded from the server
get_default_config = True
get_config = True
if sys.argv[i] == 'get_config':
#if set the gene_def config will be randomly loaded from the server
get_config = True
if sys.argv[i] == 'score_only':
#if set the gene_def config will be randomly loaded from the server
score_only = True
if sys.argv[i] == 'profile':
try:
import pycallgraph
except:
print "gts: pycallgraph module not installed. Profiling disabled."
else:
pycallgraph.start_trace()
profile = True
print "gts: running pycallgraph profiler"
if sys.argv[i] == 'pid':
#set the pid from the command line
try:
pid = sys.argv[i + 1]
except:
pass
if pid == None:
#if the pid is not set from the command line then
#use the genetic class object id
pid = g.id
#which quartile group to test
while not (quartile in ['1','2','3','4','all']):
print "Which quartile group to test? (1,2,3,4):"
quartile = raw_input()
if quartile != 'all':
quartile = int(quartile)
else:
quartile = 1
quartile_cycle = True
update_all_scores = True
if score_only:
update_all_scores = True
else:
update_all_scores = False
#configure the gene pool
if get_config == True:
print "gts: Loading gene_def from the server."
while gd == "UNDEFINED" and get_config == True:
#get the gene def config list from the server
gdhl = json.loads(server.get_gene_def_hash_list())
if get_default_config == True:
gdh = json.loads(server.get_default_gene_def_hash())
gdhl = [gdh,gdh,gdh] #create a dummy list with the same (default) hash
if len(gdhl) < 2:
#the default config isn't defined
#if there are less then two genes registered then switch to the local config.
get_config = False
break
#pick one at random
gdh = random.choice(gdhl)
#get the gene_def
gd = server.get_gene_def(gdh)
#print gd
if gd != "UNDEFINED":
try:
gd = json.loads(gd)
#load the remote config
g = load_config_into_object(gd,g)
#only need to register the client with the existing gene_def hash
server.pid_register_client(pid,gdh)
print "gts: gene_def_hash:",gdh
print "gts: name",gd['name']
print "gts: description",gd['description']
print "gts: gene_def load complete."
except:
print "gts: gene_def load error:",gd
gd = "UNDEFINED"
get_config = False #force load local gen_def.json config
else:
time.sleep(5) #default config is undefined so just wait and try again....
#the script will remain in this loop until the default config is set
if get_config == False:
gd = load_config_from_file("gene_def.json")
g = load_config_into_object(gd,g)
#register the gene_def file and link to this client using the gene pool id as the PID (GUID)
f = open('./config/gene_def.json','r')
gdc = f.read()
f.close()
gdh = server.pid_register_gene_def(pid,gdc)
server.pid_register_client(pid,gdh)
#reset the process watchdog
server.pid_alive(pid)
#send a copy of the command line args
server.pid_msg(pid,str(sys.argv))
ff = None
if gd.has_key('fitness_script'):
#check for an updated signed package on the gene_server
#pypy probably wont have pycrypto installed - fall back to python in a subprocess to sync
#fitness module names in the gene_def exclude the .py file extention
#but signed packages use the extention. check for extention, if none exists then add .py
print "gts: synchronizing signed code"
if len(gd['fitness_script'].split('.')) == 1:
sync_filename = gd['fitness_script'] + '.py'
subprocess.call(('python','cpsu.py','get',sync_filename,trusted_keys_path))
print "gts: loading the fitness module",gd['fitness_script']
ff = __import__(gd['fitness_script'])
else:
print "gts: no fitness module defined, loading default (bct)"
ff = __import__('bct')
te = ff.trade_engine()
#apply global configs
te.max_length = max_length
te.enable_flash_crash_protection = enable_flash_crash_protection
te.flash_crash_protection_delay = flash_crash_protection_delay
#load the gene_def fitness_config, if available
if gd.has_key('fitness_config'):
te = load_config_into_object(gd['fitness_config'],te)
te.score_only = True
print "gts: initializing the fitness function"
te.initialize()
#bootstrap the population with the winners available from the gene_pool server
while not(bs == 'y' or bs == 'n'):
print "Bootstrap from the gene_server? (y/n)"
bs = raw_input()
if bs == 'y':
bob_simulator = True
g.local_optima_trigger = 10
bootstrap_bobs = json.loads(server.get_bobs(quartile,pid))
bootstrap_all = json.loads(server.get_all(quartile,pid))
if (type(bootstrap_bobs) == type([])) and (type(bootstrap_all) == type([])):
g.seed()
if len(bootstrap_all) > 100:
g.pool = []
g.insert_genedict_list(bootstrap_bobs)
g.insert_genedict_list(bootstrap_all)
g.pool_size = len(g.pool)
if update_all_scores == True:
#reset the scores for retesting
g.reset_scores()
else:
#mate the genes before testing
g.next_gen()
else: #if no BOBS or high scores..seed with a new population
print "gts: no BOBs or high scores available...seeding new pool."
g.seed()
print "gts: Update all scores:",update_all_scores
print "gts: %s BOBs loaded"%len(bootstrap_bobs)
print "gts: %s high scores loaded"%len(bootstrap_all)
print "gts: Pool size: %s"%len(g.pool)
else:
bob_simulator = False
#update_all_scores = False
g.local_optima_trigger = 5
print "gts: Seeding the initial population"
g.seed()
#the counters are all incremented at the same time but are reset by different events:
test_count = 0 #used to reset the pool after so many loop cycles
total_count = 0 #used to calculate overall performance
loop_count = 0 # used to trigger pool size calibration and data reload
max_score = -100000
max_score_id = -1
max_gene = None
multicall_count = 0
start_time = time.time()
watchdog_reset_time = time.time()
server.pid_alive(pid)
print "gts: running the test sequencer"
while 1:
test_count += 1
total_count += 1
loop_count += 1
if load_throttle == 1:
time.sleep(load_throttle_sleep_interval)
if (time.time() - watchdog_reset_time) >= pid_update_rate: #total_count%pid_update_rate == 0:
#periodicaly reset the watchdog monitor
print "gts: resetting watchdog timer"
watchdog_reset_time = time.time()
server.pid_alive(pid)
if loop_count > g.pool_size:
if score_only: #quartile_cycle == True and bob_simulator == True:
#force a state jump to load the next quartile to retest the genes
#in this mode the only function of the client is to cycle through the quartiles to retest existing genes
g.local_optima_reached = True
#update_all_scores = False #on the first pass only, bob clients need to resubmit updated scores for every gene
loop_count = 0
#reset the watchdog monitor
#server.pid_alive(pid)
#benchmark the cycle speed
current_time = time.time()
elapsed_time = current_time - start_time
gps = total_count / elapsed_time
#pid_update_rate = int(gps * 40)
if calibrate == 1:
print "gts: recalibrating pool size..."
g.pool_size = int(gps * cycle_time)
cycle_time -= cycle_time_step
if cycle_time < min_cycle_time:
cycle_time = min_cycle_time
if g.pool_size > 10000:
g.pool_size = 10000
kss = (gps*te.input_data_length)/1000.0
performance_metrics = "gts: ","%.2f"%gps,"G/S; ","%.2f"%kss,"KS/S;"," Pool Size: ",g.pool_size," Total Processed: ",total_count
performance_metrics = " ".join(map(str,performance_metrics))
print performance_metrics
pmd = {'channel':'gts_metric','gps':gps,'kss':kss,'pool':g.pool_size,'total':total_count}
server.pid_msg(pid,json.dumps(pmd))
if g.local_optima_reached:
test_count = 0
#initialize fitness function (load updated data)
te.initialize()
if score_only: #quartile_cycle == True and bob_simulator == True:
#jump to the next quartile and skip the bob submission
update_all_scores = True
quartile += 1
if quartile > 4:
quartile = 1
if run_once:
print "gts: flushing xmlrpc multicall buffer."
multicall() #send any batched calls to the server
print "gts: run once done."
gts_exit("gts: run once done.",pid)
elif max_gene != None:
#debug
print "gts: ",max_gene
#end debug
print "gts: submit BOB for id:%s to server (%.2f)"%(str(max_gene['id']),max_gene['score'])
server.put_bob(json.dumps(max_gene),quartile,pid)
if quartile_cycle == True:
#if cycling is enabled then
#the client will cycle through the quartiles as local optimas are found
#jump to the next quartile
quartile += 1
if quartile > 4:
quartile = 1
if run_once:
gts_exit("gts: run once done.",pid)
else:
if max_score > -1000:
print "gts: **WARNING** MAX_GENE is gone.: ID",max_score_id
print "*"*80
print "gts: GENE DUMP:"
for ag in g.pool:
print ag['id'],ag['score']
print "*"*80
gts_exit("gts: HALTED.",pid)
max_gene = None #clear the max gene
max_score = -100000 #reset the high score
if quartile_cycle == False and run_once:
print "gts: flushing xmlrpc multicall buffer."
multicall() #send any batched calls to the server
print "gts: run once done."
gts_exit("gts: run once done.",pid)
if bob_simulator:
#update_all_scores = True #on the first pass only, bob clients need to resubmit updated scores for every gene
bootstrap_bobs = json.loads(server.get_bobs(quartile,pid))
bootstrap_all = json.loads(server.get_all(quartile,pid))
g.pool_size = len(g.pool)
if (type(bootstrap_bobs) == type([])) and (type(bootstrap_all) == type([])):
g.seed()
g.pool = []
g.insert_genedict_list(bootstrap_bobs)
g.insert_genedict_list(bootstrap_all)
if quartile_cycle == True:
#reset the scores for retesting
g.reset_scores()
else:
#mate the genes before testing
g.next_gen()
else: #if no BOBS or high scores..seed with a new population
#print "no BOBs or high scores available...seeding new pool."
g.seed()
else:
g.seed()
if test_count > (g.pool_size * 10):
test_count = 0
print "gts: reseting scores to force retest of winners..."
test_count = 0
max_score = 0 #knock the high score down to prevent blocking
#latest scoring data which may fall due to
#the latest price data
g.next_gen()
g.reset_scores()
#create/reset the trade engine
te.reset()
#get the next gene
ag = g.get_next()
#configure the trade engine
te = load_config_into_object({'set':ag},te)
#set the quartile to test
te.test_quartile(quartile)
#run the fitness function
try:
te.run()
except Exception, err:
#kill off any genes that crash the trade engine (div by 0 errors for instance)
print "gts: ***** GENE FAULT *****"
print Exception,err
print traceback.format_exc()
print "gts: ***** END GENE FAULT *****"
g.set_score(ag['id'],g.kill_score)
else:
#return the score to the gene pool
try:
score = te.score()
except Exception, err:
#kill off any genes that crash the trade engine (div by 0 errors for instance)
print "gts: ***** GENE SCORE FAULT *****"
print Exception,err
print traceback.format_exc()
print "gts: ***** END GENE SCORE FAULT *****"
g.set_score(ag['id'],g.kill_score)
else:
if verbose:
print "gts: ",ag['gene'],"\t".join(["%.5f"%max_score,"%.5f"%score,"%.3f"%g.prune_threshold])
g.set_score(ag['id'],score)
#g.set_message(ag['id'],"Balance: " + str(te.balance) +"; Wins: " + str(te.wins)+ "; Loss:" + str(te.loss) + "; Positions: " + str(len(te.positions)))
g.set_message(ag['id'],te.text_summary)
if score > 1000 and profile == True:
gts_exit("gts: profiling complete")
#if a new high score is found submit the gene to the server
if score > max_score and update_all_scores == False:
print "gts: submit high score for quartile:%s id:%s to server (%.5f)"%(str(quartile),str(ag['id']),score)
max_score = score
max_score_id = ag['id']
max_gene = ag.copy() #g.get_by_id(max_score_id)
if max_gene != None:
server.put(json.dumps(max_gene),quartile,pid)
else:
print "gts: MAX_GENE is None!!"
if update_all_scores == True:
print "gts: updating score for quartile:%s id:%s to server, multicall deffered (%.5f)"%(str(quartile),str(ag['id']),score)
agene = g.get_by_id(ag['id'])
if agene != None:
multicall_count += 1
multicall.mc_put(json.dumps(agene),quartile,pid)
if multicall_count > 40:
multicall_count = 0
print "gts: flushing xmlrpc multicall buffer."
multicall()
else:
print "gts: updating gene error: gene is missing!!"
|
Pascal66/ga-bitbot
|
gts.py
|
Python
|
gpl-3.0
| 20,392
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import time
from resource_management.core.logger import Logger
from resource_management.core import shell
from resource_management.libraries.functions.format import format
from resource_management.core.resources.system import File, Execute
from resource_management.core.resources.service import Service
from resource_management.core.exceptions import Fail
from resource_management.core.shell import as_user
from resource_management.libraries.functions.hive_check import check_thrift_port_sasl
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def hive_service(name, action='start', rolling_restart=False):
import params
if name == 'metastore':
if action == 'start' or action == 'stop':
Service(params.hive_metastore_win_service_name, action=action)
if name == 'hiveserver2':
if action == 'start' or action == 'stop':
Service(params.hive_server_win_service_name, action=action)
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def hive_service(name, action='start', rolling_restart=False):
import params
if name == 'metastore':
pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
cmd = format("{start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
elif name == 'hiveserver2':
pid_file = format("{hive_pid_dir}/{hive_pid}")
cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.log {pid_file} {hive_server_conf_dir} {hive_log_dir}")
pid_expression = "`" + as_user(format("cat {pid_file}"), user=params.hive_user) + "`"
process_id_exists_command = format("ls {pid_file} >/dev/null 2>&1 && ps -p {pid_expression} >/dev/null 2>&1")
if action == 'start':
if name == 'hiveserver2':
check_fs_root()
daemon_cmd = cmd
hadoop_home = params.hadoop_home
hive_bin = "hive"
# upgrading hiveserver2 (rolling_restart) means that there is an existing,
# de-registering hiveserver2; the pid will still exist, but the new
# hiveserver is spinning up on a new port, so the pid will be re-written
if rolling_restart:
process_id_exists_command = None
if (params.version):
import os
hadoop_home = format("/usr/hdp/{version}/hadoop")
hive_bin = os.path.join(params.hive_bin, hive_bin)
Execute(daemon_cmd,
user = params.hive_user,
environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': params.java64_home, 'HIVE_BIN': hive_bin },
path = params.execute_path,
not_if = process_id_exists_command)
if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or \
params.hive_jdbc_driver == "org.postgresql.Driver" or \
params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
db_connection_check_command = format(
"{java64_home}/bin/java -cp {check_db_connection_jar}:{target} org.apache.ambari.server.DBConnectionVerification '{hive_jdbc_connection_url}' {hive_metastore_user_name} {hive_metastore_user_passwd!p} {hive_jdbc_driver}")
Execute(db_connection_check_command,
path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin', tries=5, try_sleep=10)
elif action == 'stop':
daemon_kill_cmd = format("{sudo} kill {pid_expression}")
daemon_hard_kill_cmd = format("{sudo} kill -9 {pid_expression}")
Execute(daemon_kill_cmd,
not_if = format("! ({process_id_exists_command})")
)
wait_time = 5
Execute(daemon_hard_kill_cmd,
not_if = format("! ({process_id_exists_command}) || ( sleep {wait_time} && ! ({process_id_exists_command}) )")
)
# check if stopped the process, else fail the task
Execute(format("! ({process_id_exists_command})"),
tries=20,
try_sleep=3,
)
File(pid_file,
action = "delete"
)
def check_fs_root():
import params
if not params.fs_root.startswith("hdfs://"):
Logger.info("Skipping fs root check as fs_root does not start with hdfs://")
return
metatool_cmd = format("hive --config {hive_server_conf_dir} --service metatool")
cmd = as_user(format("{metatool_cmd} -listFSRoot", env={'PATH': params.execute_path}), params.hive_user) \
+ format(" 2>/dev/null | grep hdfs:// | cut -f1,2,3 -d '/' | grep -v '{fs_root}' | head -1")
code, out = shell.call(cmd)
if code == 0 and out.strip() != "" and params.fs_root.strip() != out.strip():
out = out.strip()
cmd = format("{metatool_cmd} -updateLocation {fs_root} {out}")
Execute(cmd,
user=params.hive_user,
environment={'PATH': params.execute_path}
)
|
zouzhberk/ambaridemo
|
demo-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
|
Python
|
apache-2.0
| 5,505
|
### Copyright (C) 2007-2016 Peter Williams <pwil3058@gmail.com>
###
### This program is free software; you can redistribute it and/or modify
### it under the terms of the GNU General Public License as published by
### the Free Software Foundation; version 2 of the License only.
###
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import re
import os
import hashlib
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import Pango
from ...bab import CmdResult, CmdFailure
from ...bab import runext
from ...bab import options
from .. import patchlib
from ...gtx import dialogue
from ...gtx import gutils
from ...gtx import textview
from ... import wsm_icons
options.define("diff", "extdiff", options.Defn(str, None, _("The name of external application for viewing diffs")))
class FileAndRefreshActions:
def __init__(self):
self._action_group = Gtk.ActionGroup("diff_file_and_refresh")
self._action_group.add_actions(
[
("diff_save", Gtk.STOCK_SAVE, _('_Save'), None,
_('Save the diff to previously nominated file'), self._save_acb),
("diff_save_as", Gtk.STOCK_SAVE_AS, _('Save _as'), None,
_('Save the diff to a nominated file'), self._save_as_acb),
("diff_refresh", Gtk.STOCK_REFRESH, _('_Refresh'), None,
_('Refresh contents of the diff'), self._refresh_acb),
])
self._save_file = None
self.check_set_save_sensitive()
def check_save_sensitive(self):
return self._save_file is not None and os.path.exists(self._save_file)
def check_set_save_sensitive(self):
set_sensitive = self.check_save_sensitive()
self._action_group.get_action("diff_save").set_sensitive(set_sensitive)
def _save_acb(self, _action):
self._save_to_file()
def _save_as_acb(self, _action):
if self._save_file:
suggestion = self._save_file
else:
suggestion = os.getcwd()
self._save_file = dialogue.main_window.ask_file_path(_('Save as ...'), suggestion=suggestion, existing=False)
self._save_to_file()
def _save_to_file(self):
if not self._save_file:
return
try:
fobj = open(self._save_file, 'w')
except IOError as edata:
dialogue.main_window.report_any_problems(CmdResult.error(stderr=edata[1]))
self.check_set_save_sensitive()
return
text = self._get_text_to_save()
fobj.write(text)
fobj.close()
self.check_set_save_sensitive()
class TwsLineCountDisplay(Gtk.HBox):
STATES = [Gtk.StateType.NORMAL, Gtk.StateType.ACTIVE, Gtk.StateType.PRELIGHT, Gtk.StateType.INSENSITIVE]
LABEL = _("Added TWS lines:")
def __init__(self):
Gtk.HBox.__init__(self)
self.pack_start(Gtk.Label(self.LABEL), expand=False, fill=False, padding=0)
self._entry = Gtk.Entry()
self._entry.set_width_chars(1)
self._entry.set_text(str(0))
self._entry.set_editable(False)
self.pack_start(self._entry, expand=False, fill=False, padding=0)
self.show_all()
def set_value(self, val):
sval = str(val)
self._entry.set_width_chars(len(sval))
self._entry.set_text(sval)
if val:
for state in self.STATES:
self._entry.modify_base(state, Gdk.color_parse("#FF0000"))
else:
for state in self.STATES:
self._entry.modify_base(state, Gdk.color_parse("#00FF00"))
class DiffBuffer(textview.Buffer):
TWS_CHECK_CRE = re.compile("^(\+.*\S)(\s+\n)$")
def __init__(self):
textview.Buffer.__init__(self)
self.index_tag = self.create_tag("INDEX", weight=Pango.Weight.BOLD, foreground="#0000AA", family="monospace")
self.sep_tag = self.create_tag("SEP", weight=Pango.Weight.BOLD, foreground="#0000AA", family="monospace")
self.minus_tag = self.create_tag("MINUS", foreground="#AA0000", family="monospace")
self.lab_tag = self.create_tag("LAB", foreground="#AA0000", family="monospace")
self.plus_tag = self.create_tag("PLUS", foreground="#006600", family="monospace")
self.added_tws_tag = self.create_tag("ADDED_TWS", background="#006600", family="monospace")
self.star_tag = self.create_tag("STAR", foreground="#006600", family="monospace")
self.rab_tag = self.create_tag("RAB", foreground="#006600", family="monospace")
self.change_tag = self.create_tag("CHANGED", foreground="#AA6600", family="monospace")
self.stats_tag = self.create_tag("STATS", foreground="#AA00AA", family="monospace")
self.func_tag = self.create_tag("FUNC", foreground="#00AAAA", family="monospace")
self.unchanged_tag = self.create_tag("UNCHANGED", foreground="black", family="monospace")
def _append_tagged_text(self, text, tag):
self.insert_with_tags(self.get_end_iter(), text, tag)
def _append_patch_line(self, line):
first_char = line[0]
if first_char == " ":
self._append_tagged_text(line, self.unchanged_tag)
elif first_char == "+":
match = self.TWS_CHECK_CRE.match(line)
if match:
self._append_tagged_text(match.group(1), self.plus_tag)
self._append_tagged_text(match.group(2), self.added_tws_tag)
return len(match.group(1))
else:
self._append_tagged_text(line, self.plus_tag)
elif first_char == "-":
self._append_tagged_text(line, self.minus_tag)
elif first_char == "!":
self._append_tagged_text(line, self.change_tag)
elif first_char == "@":
i = line.find("@@", 2)
if i == -1:
self._append_tagged_text(line, self.stats_tag)
else:
self._append_tagged_text(line[:i+2], self.stats_tag)
self._append_tagged_text(line[i+2:], self.func_tag)
elif first_char == "=":
self._append_tagged_text(line, self.sep_tag)
elif first_char == "*":
self._append_tagged_text(line, self.star_tag)
elif first_char == "<":
self._append_tagged_text(line, self.lab_tag)
elif first_char == ">":
self._append_tagged_text(line, self.rab_tag)
else:
self._append_tagged_text(line, self.index_tag)
return 0
class DiffView(textview.View):
BUFFER = DiffBuffer
def __init__(self, width_in_chars=81, aspect_ratio=0.33, fdesc=None):
textview.View.__init__(self, width_in_chars=width_in_chars, aspect_ratio=aspect_ratio, fdesc=fdesc)
self.set_editable(False)
self.set_cursor_visible(False)
class TextWidget(textview.Widget):
TEXT_VIEW = DiffView
def __init__(self, width_in_chars=81, aspect_ratio=0.33, fdesc=None):
textview.Widget.__init__(self, width_in_chars=width_in_chars, aspect_ratio=aspect_ratio, fdesc=fdesc)
self.tws_list = []
self.tws_index = 0
self._action_group = Gtk.ActionGroup("diff_text")
self._action_group.add_actions(
[
("diff_save", Gtk.STOCK_SAVE, _("_Save"), None,
_("Save the diff to previously nominated file"), self._save_acb),
("diff_save_as", Gtk.STOCK_SAVE_AS, _("Save _as"), None,
_("Save the diff to a nominated file"), self._save_as_acb),
("diff_refresh", Gtk.STOCK_REFRESH, _("_Refresh"), None,
_("Refresh contents of the diff"), self._refresh_acb),
("tws_nav_first", Gtk.STOCK_GOTO_TOP, _("_First"), None,
_("Scroll to first line with added trailing white space"),
self._tws_nav_first_acb),
("tws_nav_prev", Gtk.STOCK_GO_UP, _("_Prev"), None,
_("Scroll to previous line with added trailing white space"),
self._tws_nav_prev_acb),
("tws_nav_next", Gtk.STOCK_GO_DOWN, _("_Next"), None,
_("Scroll to next line with added trailing white space"),
self._tws_nav_next_acb),
("tws_nav_last", Gtk.STOCK_GOTO_BOTTOM, _("_Last"), None,
_("Scroll to last line with added trailing white space"),
self._tws_nav_last_acb),
])
self.tws_nav_buttonbox = gutils.ActionHButtonBox([self._action_group],
["tws_nav_first", "tws_nav_prev", "tws_nav_next", "tws_nav_last"])
self._tws_nav_buttons_packed = False
self._save_file = None
self.check_set_save_sensitive()
self.tws_display = TwsLineCountDisplay()
self._set_contents()
self.show_all()
@property
def bfr(self):
return self.view.get_buffer()
@property
def h_scrollbar(self):
return self._scrolled_window.get_hscrollbar()
@property
def v_scrollbar(self):
return self._scrolled_window.get_vscrollbar()
def get_scrollbar_values(self):
return (self.h_scrollbar.get_value(), self.h_scrollbar.get_value())
def set_scrollbar_values(self, values):
self.h_scrollbar.set_value(values[0])
self.v_scrollbar.set_value(values[1])
def _get_diff_text_iter(self):
return []
def _set_contents(self):
def update_for_tws_change(new_count):
if self._tws_nav_buttons_packed and not new_count:
self.remove(self.tws_nav_buttonbox)
self.view.set_cursor_visible(False)
self._tws_nav_buttons_packed = False
elif not self._tws_nav_buttons_packed and new_count:
self.pack_start(self.tws_nav_buttonbox, expand=False, fill=True, padding=0)
self.view.set_cursor_visible(True)
self._tws_nav_buttons_packed = True
self.show_all()
old_count = len(self.tws_list)
self.bfr.begin_user_action()
self.bfr.set_text("")
self.tws_list = []
line_no = 0
for line in self._get_diff_text_iter():
offset = self.bfr._append_patch_line(line)
if offset:
self.tws_list.append((line_no, offset - 2))
line_no += 1
self.bfr.end_user_action()
new_count = len(self.tws_list)
self.tws_display.set_value(new_count)
if not (new_count == old_count):
update_for_tws_change(new_count)
def _save_to_file(self):
if not self._save_file:
return
try:
fobj = open(self._save_file, "w")
except IOError as edata:
strerror = edata[1]
dialogue.main_window.report_any_problems(CmdResult.error(stderr=strerror))
self.check_set_save_sensitive()
return
text = self.bfr.get_text(self.bfr.get_start_iter(), self.bfr.get_end_iter())
fobj.write(text)
fobj.close()
self.check_set_save_sensitive()
def _tws_index_iter(self):
pos = self.tws_list[self.tws_index]
model_iter = self.bfr.get_iter_at_line_offset(pos[0], pos[1])
self.bfr.place_cursor(model_iter)
return model_iter
def get_tws_first_iter(self):
self.tws_index = 0
return self._tws_index_iter()
def get_tws_prev_iter(self):
if self.tws_index:
self.tws_index -= 1
return self._tws_index_iter()
def get_tws_next_iter(self):
self.tws_index += 1
if self.tws_index >= len(self.tws_list):
self.tws_index = len(self.tws_list) - 1
return self._tws_index_iter()
def get_tws_last_iter(self):
self.tws_index = len(self.tws_list) - 1
return self._tws_index_iter()
def check_save_sensitive(self):
return self._save_file is not None and os.path.exists(self._save_file)
def check_set_save_sensitive(self):
set_sensitive = self.check_save_sensitive()
self._action_group.get_action("diff_save").set_sensitive(set_sensitive)
def _refresh_acb(self, _action):
self._set_contents()
def _save_acb(self, _action):
self._save_to_file()
def _save_as_acb(self, _action):
if self._save_file:
suggestion = self._save_file
else:
suggestion = os.getcwd()
self._save_file = dialogue.main_window.ask_file_path(_("Save as ..."), suggestion=suggestion, existing=False)
self._save_to_file()
def get_action_button_box(self, a_name_list):
return gutils.ActionHButtonBox([self._action_group], action_name_list=a_name_list)
def get_action_button_list(self, a_name_list):
return gutils.ActionButtonList([self._action_group], action_name_list=a_name_list)
def _tws_nav_first_acb(self, _action):
self.view.scroll_to_iter(self.get_tws_first_iter(), 0.01, True)
def _tws_nav_prev_acb(self, _action):
self.view.scroll_to_iter(self.get_tws_prev_iter(), 0.01, True)
def _tws_nav_next_acb(self, _action):
self.view.scroll_to_iter(self.get_tws_next_iter(), 0.01, True)
def _tws_nav_last_acb(self, _action):
self.view.scroll_to_iter(self.get_tws_last_iter(), 0.01, True)
def get_tws_nav_button_box(self):
a_name_list = ["tws_nav_first", "tws_nav_prev", "tws_nav_next", "tws_nav_last"]
return self.get_action_button_box(action_name_list=a_name_list)
class DiffPlusDisplay(TextWidget):
def __init__(self, diffplus):
self.diffplus = diffplus
self._diff_digest = diffplus.get_hash_digest()
TextWidget.__init__(self)
self.tws_nav_buttonbox.pack_start(self.tws_display, expand=False, fill=True, padding=0)
self.tws_nav_buttonbox.reorder_child(self.tws_display, 0)
def _get_diff_text_iter(self):
return self.diffplus.iter_lines()
def update(self, diffplus):
digest = diffplus.get_hash_digest()
if digest != self._diff_digest:
sbars = self.get_scrollbar_values()
self.diffplus = diffplus
self._diff_digest = digest
self._set_contents()
self.set_scrollbar_values(sbars)
class DiffPlusNotebook(Gtk.Notebook):
class TWSDisplay(TwsLineCountDisplay):
LABEL = _("File(s) that add TWS: ")
def __init__(self, diff_pluses=None, digest=None, num_strip_levels=1):
Gtk.Notebook.__init__(self)
self.diff_pluses = [] if diff_pluses is None else diff_pluses
self.digest = self.calc_diff_pluses_digest(diff_pluses) if (digest is None and diff_pluses) else digest
self.num_strip_levels = num_strip_levels
self.tws_display = self.TWSDisplay()
self.tws_display.set_value(0)
self.set_scrollable(True)
self.popup_enable()
self.diff_displays = {}
self._populate_pages()
@staticmethod
def calc_diff_pluses_digest(diff_pluses):
h = hashlib.sha1()
for diff_plus in diff_pluses:
for line in diff_plus.iter_lines():
h.update(line.encode())
return h.digest()
@staticmethod
def _make_file_label(filepath, file_icon):
hbox = Gtk.HBox()
icon = file_icon
hbox.pack_start(Gtk.Image.new_from_stock(icon, Gtk.IconSize.MENU), expand=False, fill=True, padding=0)
label = Gtk.Label(label=filepath)
label.set_alignment(0, 0)
label.set_padding(4, 0)
hbox.pack_start(label, expand=True, fill=True, padding=0)
hbox.show_all()
return hbox
@staticmethod
def _file_icon_for_condition(condition):
if not condition:
return wsm_icons.STOCK_FILE_PROBLEM
return Gtk.STOCK_FILE
def _populate_pages(self):
num_tws_files = 0
for diffplus in self.diff_pluses:
filepath = diffplus.get_file_path(self.num_strip_levels)
if diffplus.report_trailing_whitespace():
file_icon = self._file_icon_for_condition(False)
num_tws_files += 1
else:
file_icon = self._file_icon_for_condition(True)
tab_label = self._make_file_label(filepath, file_icon)
menu_label = self._make_file_label(filepath, file_icon)
self.diff_displays[filepath] = DiffPlusDisplay(diffplus)
self.append_page_menu(self.diff_displays[filepath], tab_label, menu_label)
self.tws_display.set_value(num_tws_files)
def _update_pages(self):
existing = set([fpath for fpath in self.diff_displays])
num_tws_files = 0
for diffplus in self.diff_pluses:
filepath = diffplus.get_file_path(self.num_strip_levels)
if diffplus.report_trailing_whitespace():
file_icon = self._file_icon_for_condition(False)
num_tws_files += 1
else:
file_icon = self._file_icon_for_condition(True)
tab_label = self._make_file_label(filepath, file_icon)
menu_label = self._make_file_label(filepath, file_icon)
if filepath in existing:
self.diff_displays[filepath].update(diffplus)
self.set_tab_label(self.diff_displays[filepath], tab_label)
self.set_menu_label(self.diff_displays[filepath], menu_label)
existing.remove(filepath)
else:
self.diff_displays[filepath] = DiffPlusDisplay(diffplus)
self.append_page_menu(self.diff_displays[filepath], tab_label, menu_label)
for gone in existing:
gonedd = self.diff_displays.pop(gone)
pnum = self.page_num(gonedd)
self.remove_page(pnum)
self.tws_display.set_value(num_tws_files)
def set_diff_pluses(self, diff_pluses, digest=None):
if digest is None:
digest = self.calc_diff_pluses_digest(diff_pluses)
if digest != self.digest:
self.diff_pluses = diff_pluses
self.digest = digest
self._update_pages()
def __str__(self):
return "".join((str(diff_plus) for diff_plus in self.diff_pluses))
class DiffPlusesWidget(DiffPlusNotebook, FileAndRefreshActions):
A_NAME_LIST = ["diff_save", "diff_save_as", "diff_refresh"]
def __init__(self, num_strip_levels=1, **kwargs):
DiffPlusNotebook.__init__(self, diff_pluses=self._get_diff_pluses(), num_strip_levels=num_strip_levels)
FileAndRefreshActions.__init__(self)
self.diff_buttons = gutils.ActionButtonList([self._action_group], self.A_NAME_LIST)
def _get_diff_pluses(self):
assert False, _("_get_diff_pluses() must be defined in children")
def _refresh_acb(self, _action):
self.update()
def update(self):
diff_pluses = self._get_diff_pluses()
digest = self.calc_diff_pluses_digest(diff_pluses)
if digest != self.digest:
self.diff_pluses = diff_pluses
self.digest = digest
self._update_pages()
def _get_text_to_save(self):
return str(self)
def window_title(self):
return ""
class DiffTextsWidget(DiffPlusNotebook, FileAndRefreshActions):
A_NAME_LIST = ["diff_save", "diff_save_as", "diff_refresh"]
def __init__(self, num_strip_levels=1, **kwargs):
diff_text = self._get_diff_text()
digest = hashlib.sha1(diff_text.encode()).digest()
diff_pluses = patchlib.Patch.parse_text(diff_text).diff_pluses
DiffPlusNotebook.__init__(self, diff_pluses=diff_pluses, digest=digest, num_strip_levels=num_strip_levels)
FileAndRefreshActions.__init__(self)
self.diff_buttons = gutils.ActionButtonList([self._action_group], self.A_NAME_LIST)
def _get_diff_text(self):
assert False, _("_get_diff_text() must be defined in children")
def _refresh_acb(self, _action):
self.update()
def update(self):
diff_text = self._get_diff_text()
digest = hashlib.sha1(diff_text.encode()).digest()
if digest != self.digest:
self.diff_pluses = patchlib.Patch.parse_text(diff_text).diff_pluses
self.digest = digest
self._update_pages()
def _get_text_to_save(self):
return str(self)
def window_title(self):
return ""
class GenericDiffDialog(dialogue.ListenerDialog):
DIFFS_WIDGET = None
def __init__(self, parent=None, **kwargs):
flags = Gtk.DialogFlags.DESTROY_WITH_PARENT
dialogue.ListenerDialog.__init__(self, None, parent if parent else dialogue.main_window, flags, ())
dtw = self.DIFFS_WIDGET(**kwargs)
self.set_title(dtw.window_title)
self.vbox.pack_start(dtw, expand=True, fill=True, padding=0)
tws_display = dtw.tws_display
self.action_area.pack_end(tws_display, expand=False, fill=False, padding=0)
for button in dtw.diff_buttons.list:
self.action_area.pack_start(button, expand=True, fill=True, padding=0)
self.add_buttons(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
self.connect("response", self._close_cb)
self.show_all()
def _close_cb(self, dialog, response_id):
dialog.destroy()
def launch_external_diff(file_a, file_b):
extdiff = options.get("diff", "extdiff")
if not extdiff:
return CmdResult.warning(_("No external diff viewer is defined.\n"))
try:
runext.run_cmd_in_bgnd([extdiff, file_a, file_b])
except OSError as edata:
return CmdResult.error(stderr=_("Error launching external viewer \"{0}\": {1}\n").format(extdiff, edata.strerror))
return CmdResult.ok()
|
pwil3058/pysm_wsm
|
patch_diff/gui/diff.py
|
Python
|
gpl-2.0
| 22,025
|
#!/usr/bin/python
# TODO: Use tracy (https://github.com/MerlijnWajer/tracy) to see if lwan
# performs certain system calls. This should speed up the mmap tests
# considerably and make it possible to perform more low-level tests.
import subprocess
import time
import unittest
import requests
import socket
import sys
import os
import re
LWAN_PATH = './build/lwan/lwan'
for arg in sys.argv[1:]:
if not arg.startswith('-') and os.path.exists(arg):
LWAN_PATH = arg
sys.argv.remove(arg)
print 'Using', LWAN_PATH, 'for lwan'
class LwanTest(unittest.TestCase):
def setUp(self):
for spawn_try in range(20):
self.lwan=subprocess.Popen([LWAN_PATH],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for request_try in range(20):
try:
requests.get('http://127.0.0.1:8080/hello')
return
except requests.ConnectionError:
time.sleep(0.1)
time.sleep(0.1)
raise Exception('Timeout waiting for lwan')
def tearDown(self):
self.lwan.poll()
if self.lwan.returncode is not None:
self.assertEqual(self.lwan.returncode, 0)
else:
self.lwan.kill()
def assertHttpResponseValid(self, request, status_code, content_type):
self.assertEqual(request.status_code, status_code)
self.assertTrue('Content-Type' in request.headers)
self.assertEqual(request.headers['Content-Type'], content_type)
def assertResponse404(self, request):
self.assertHttpResponseValid(request, 404, 'text/html')
def assertResponseHtml(self, request, status_code=200):
self.assertHttpResponseValid(request, status_code, 'text/html')
def assertResponsePlain(self, request, status_code=200):
self.assertHttpResponseValid(request, status_code, 'text/plain')
class TestFileServing(LwanTest):
def test_mime_type_is_correct(self):
table = (
('/', 'text/html'),
('/icons/back.png', 'image/png'),
('/icons', 'text/plain'),
('/icons/', 'text/html'),
('/zero', 'application/octet-stream')
)
for path, expected_mime in table:
r = requests.head('http://127.0.0.1:8080%s' % path)
self.assertEqual(r.headers['content-type'], expected_mime)
def test_non_existent_file_yields_404(self):
r = requests.get('http://127.0.0.1:8080/icons/non-existent-file.png')
self.assertResponse404(r)
def test_dot_dot_slash_yields_404(self):
r = requests.get('http://127.0.0.1:8080/../../../../../../../../../etc/passwd')
self.assertResponse404(r)
def test_slash_slash_slash_does_not_matter_200(self):
r = requests.get('http://127.0.0.1:8080//////////icons/file.png')
self.assertHttpResponseValid(r, 200, 'image/png')
def test_slash_slash_slash_does_not_matter_404(self):
r = requests.get('http://127.0.0.1:8080//////////etc/passwd')
self.assertResponse404(r)
def test_head_request_small_file(self):
r = requests.head('http://127.0.0.1:8080/100.html',
headers={'Accept-Encoding': 'foobar'})
self.assertResponseHtml(r)
self.assertTrue('content-length' in r.headers)
self.assertEqual(r.headers['content-length'], '100')
self.assertEqual(r.text, '')
def test_head_request_larger_file(self):
r = requests.head('http://127.0.0.1:8080/zero',
headers={'Accept-Encoding': 'foobar'})
self.assertHttpResponseValid(r, 200, 'application/octet-stream')
self.assertTrue('content-length' in r.headers)
self.assertEqual(r.headers['content-length'], '32768')
self.assertEqual(r.text, '')
def test_uncompressed_small_file(self):
r = requests.get('http://127.0.0.1:8080/100.html',
headers={'Accept-Encoding': 'foobar'})
self.assertResponseHtml(r)
self.assertTrue('content-length' in r.headers)
self.assertEqual(r.headers['content-length'], '100')
self.assertEqual(r.text, 'X' * 100)
def test_get_root(self):
r = requests.get('http://127.0.0.1:8080/')
self.assertResponseHtml(r)
self.assertTrue('It works!' in r.text)
def test_compressed_small_file(self):
encodings = (
'deflate',
' deflate',
'foo,bar,deflate',
'foo, bar, deflate',
'deflote' # This should fail, but won't in our current implementation
)
for encoding in encodings:
r = requests.get('http://127.0.0.1:8080/100.html',
headers={'Accept-Encoding': encoding})
self.assertResponseHtml(r)
self.assertTrue('content-length' in r.headers)
self.assertLess(int(r.headers['content-length']), 100)
self.assertTrue('content-encoding' in r.headers)
self.assertEqual(r.headers['content-encoding'], 'deflate')
self.assertEqual(r.text, 'X' * 100)
def test_get_larger_file(self):
r = requests.get('http://127.0.0.1:8080/zero',
headers={'Accept-Encoding': 'foobar'})
self.assertHttpResponseValid(r, 200, 'application/octet-stream')
self.assertTrue('content-length' in r.headers)
self.assertEqual(r.headers['content-length'], '32768')
self.assertEqual(r.text, '\0' * 32768)
def test_directory_listing(self):
r = requests.get('http://127.0.0.1:8080/icons',
headers={'Accept-Encoding': 'foobar'})
self.assertResponseHtml(r)
self.assertTrue('<h1>Index of /icons</h1>' in r.text)
def assertHasImage(name):
imgtag = "<a href=\"/icons/%s.png\">%s.png</a>" % (name, name)
self.assertTrue(imgtag in r.text)
assertHasImage('back')
assertHasImage('file')
assertHasImage('folder')
self.assertTrue('</html>' in r.text)
def test_has_lwan_server_header(self):
r = requests.get('http://127.0.0.1:8080/100.html')
self.assertTrue('server' in r.headers)
self.assertEqual(r.headers['server'], 'lwan')
def test_directory_without_trailing_slash_redirects(self):
r = requests.get('http://127.0.0.1:8080/icons', allow_redirects=False)
self.assertResponsePlain(r, 301)
self.assertTrue('location' in r.headers)
self.assertEqual(r.headers['location'], '/icons/')
class SocketTest(LwanTest):
def connect(self, host='127.0.0.1', port=8080):
def _connect(host, port):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, port))
except socket.error:
return None
return sock
sock = _connect(host, port)
self.assertNotEqual(sock, None)
return sock
class TestMalformedRequests(SocketTest):
def assertHttpCode(self, sock, code):
contents = sock.recv(128)
self.assertRegexpMatches(contents, r'^HTTP/1\.[01] ' + str(code) + r' ')
def test_random_flood(self):
with open('/dev/urandom', 'rb') as urandom:
for step in range(10):
buffer = b''
while len(buffer) < 8192:
buffer += urandom.read(8192 - len(buffer))
sock = self.connect()
sock.send(buffer)
self.assertHttpCode(sock, 413)
def test_cat_sleeping_on_keyboard(self):
sock = self.connect()
sock.send('asldkfjg238045tgqwdcjv1li 2u4ftw dfjkb12345t\r\n\r\n')
self.assertHttpCode(sock, 405)
def test_no_http_version_fails(self):
sock = self.connect()
sock.send('GET /\r\n\r\n')
self.assertHttpCode(sock, 400)
def test_proxy_get_fails(self):
sock = self.connect()
sock.send('GET http://example.com HTTP/1.0\r\n\r\n')
self.assertHttpCode(sock, 400)
def test_get_not_http(self):
sock = self.connect()
sock.send('GET / FROG/1.0\r\n\r\n')
self.assertHttpCode(sock, 400)
def test_get_http_not_1_x(self):
sock = self.connect()
sock.send('GET / HTTP/2.0\r\n\r\n')
self.assertHttpCode(sock, 400)
def test_request_too_large(self):
r = requests.get('http://127.0.0.1:8080/' + 'X' * 10000)
self.assertResponseHtml(r, 413)
class TestHelloWorld(LwanTest):
def test_head_request_hello(self):
r = requests.head('http://127.0.0.1:8080/hello',
headers={'Accept-Encoding': 'foobar'})
self.assertResponsePlain(r)
self.assertTrue('content-length' in r.headers)
self.assertEqual(int(r.headers['content-length']), len('Hello, world!'))
self.assertEqual(r.text, '')
def test_has_custom_header(self):
r = requests.get('http://127.0.0.1:8080/hello')
self.assertTrue('x-the-answer-to-the-universal-question' in r.headers)
self.assertEqual(r.headers['x-the-answer-to-the-universal-question'], '42')
def test_no_param(self):
r = requests.get('http://127.0.0.1:8080/hello')
self.assertResponsePlain(r)
self.assertTrue('content-length' in r.headers)
self.assertEqual(int(r.headers['content-length']), len('Hello, world!'))
self.assertEqual(r.text, 'Hello, world!')
def test_with_param(self):
r = requests.get('http://127.0.0.1:8080/hello?name=testsuite')
self.assertResponsePlain(r)
self.assertTrue('content-length' in r.headers)
self.assertEqual(int(r.headers['content-length']),
len('Hello, testsuite!'))
self.assertEqual(r.text, 'Hello, testsuite!')
def test_with_param_and_fragment(self):
r = requests.get('http://127.0.0.1:8080/hello?name=testsuite#fragment')
self.assertResponsePlain(r)
self.assertTrue('content-length' in r.headers)
self.assertEqual(int(r.headers['content-length']),
len('Hello, testsuite!'))
self.assertEqual(r.text, 'Hello, testsuite!')
def test_post_request(self):
data = {
'answer': 'fourty-two',
'foo': 'bar'
}
r = requests.post('http://127.0.0.1:8080/hello?dump_vars=1', data=data)
self.assertResponsePlain(r)
self.assertTrue('POST data' in r.text)
for k, v in data.items():
self.assertTrue('Key = "%s"; Value = "%s"\n' % (k, v) in r.text)
class TestCache(LwanTest):
def mmaps(self, f):
f = f + '\n'
return (l.endswith(f) for l in
file('/proc/%d/maps' % self.lwan.pid))
def count_mmaps(self, f):
return sum(self.mmaps(f))
def is_mmapped(self, f):
return any(self.mmaps(f))
def wait_munmap(self, f, timeout=20.0):
while self.is_mmapped(f) and timeout >= 0:
time.sleep(0.1)
timeout -= 0.1
def test_cache_munmaps_conn_close(self):
r = requests.get('http://127.0.0.1:8080/100.html')
self.assertTrue(self.is_mmapped('/100.html'))
self.wait_munmap('/100.html')
self.assertFalse(self.is_mmapped('/100.html'))
def test_cache_munmaps_conn_keep_alive(self):
s = requests.Session()
r = s.get('http://127.0.0.1:8080/100.html')
self.assertTrue(self.is_mmapped('/100.html'))
self.wait_munmap('/100.html')
self.assertFalse(self.is_mmapped('/100.html'))
def test_cache_does_not_mmap_large_files(self):
r = requests.get('http://127.0.0.1:8080/zero')
self.assertFalse(self.is_mmapped('/zero'))
def test_cache_mmaps_once_conn_keep_alive(self):
s = requests.Session()
for request in range(5):
r = s.get('http://127.0.0.1:8080/100.html')
self.assertEqual(self.count_mmaps('/100.html'), 1)
def test_cache_mmaps_once_conn_close(self):
for request in range(5):
requests.get('http://127.0.0.1:8080/100.html')
self.assertEqual(self.count_mmaps('/100.html'), 1)
def test_cache_mmaps_once_even_after_timeout(self):
for request in range(5):
requests.get('http://127.0.0.1:8080/100.html')
self.assertEqual(self.count_mmaps('/100.html'), 1)
time.sleep(10)
requests.get('http://127.0.0.1:8080/100.html')
self.assertEqual(self.count_mmaps('/100.html'), 1)
class TestPipelinedRequests(SocketTest):
def test_pipelined_requests(self):
response_separator = re.compile('\r\n\r\n')
names = ['name%04x' % x for x in range(16)]
reqs = '\r\n\r\n'.join('''GET /hello?name=%s HTTP/1.1\r
Host: localhost\r
Connection: keep-alive\r
Accept: text/plain,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7''' % name for name in names)
reqs += '\r\n\r\n'
sock = self.connect()
sock.send(reqs)
responses = ''
while len(response_separator.findall(responses)) != 16:
response = sock.recv(32)
if response:
responses += response
else:
break
for name in names:
s = 'Hello, %s!' % name
self.assertTrue(s in responses)
responses = responses.replace(s, '')
if __name__ == '__main__':
unittest.main()
|
schets/lwan
|
tools/testsuite.py
|
Python
|
gpl-2.0
| 12,353
|
import numpy
import sklearn.datasets
infile = "./covtype"
outfile1 = "./covtype_train_noise"
outfile2 = "./covtype_test_noise"
x0, y0 = sklearn.datasets.load_svmlight_file(infile)
x0 = x0.todense()
x1 = numpy.concatenate((x0, x0, x0, x0, x0), axis=0)
y1 = numpy.concatenate((y0, y0, y0, y0, y0), axis=0)
n, d = x1.shape
noise = numpy.random.normal(0, 0.02, (n, d))
x1 = x1 + noise
idx = numpy.random.permutation(n)
x = x1[idx, :]
y = y1[idx]
ntrain = int(numpy.floor(n * 0.8))
sklearn.datasets.dump_svmlight_file(x[0:ntrain, :], y[0:ntrain], outfile1, zero_based=False)
sklearn.datasets.dump_svmlight_file(x[ntrain:n, :], y[ntrain:n], outfile2, zero_based=False)
|
wangshusen/SparkGiant
|
data/RandNoise.py
|
Python
|
mit
| 664
|
#####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
import gc
from datetime import datetime
from twisted.internet import reactor
from twisted.internet.defer import DeferredList, inlineCallbacks, returnValue
from twisted.internet.task import LoopingCall
try:
# Manhole support needs a couple of packages optional for Crossbar.
# So we catch import errors and note those.
#
# twisted.conch.manhole_ssh will import even without, but we _need_ SSH
import Crypto # noqa
import pyasn1 # noqa
from twisted.cred import checkers, portal
from twisted.conch.manhole import ColoredManhole
from twisted.conch.manhole_ssh import ConchFactory, \
TerminalRealm, \
TerminalSession
except ImportError as e:
_HAS_MANHOLE = False
_MANHOLE_MISSING_REASON = str(e)
else:
_HAS_MANHOLE = True
_MANHOLE_MISSING_REASON = None
from autobahn.util import utcnow, utcstr, rtime
from autobahn.twisted.wamp import ApplicationSession
from autobahn.wamp.exception import ApplicationError
from autobahn.wamp.types import PublishOptions, RegisterOptions
from crossbar.common import checkconfig
from crossbar.twisted.endpoint import create_listening_port_from_config
from crossbar.common.processinfo import _HAS_PSUTIL
if _HAS_PSUTIL:
from crossbar.common.processinfo import ProcessInfo
# from crossbar.common.processinfo import SystemInfo
from crossbar._logging import make_logger
__all__ = ('NativeProcessSession',)
if _HAS_MANHOLE:
class ManholeService:
"""
Manhole service running inside a native processes (controller, router, container).
This class is for _internal_ use within NativeProcessSession.
"""
def __init__(self, config, who):
"""
Ctor.
:param config: The configuration the manhole service was started with.
:type config: dict
:param who: Who triggered creation of this service.
:type who: str
"""
self.config = config
self.who = who
self.status = 'starting'
self.created = datetime.utcnow()
self.started = None
self.port = None
def marshal(self):
"""
Marshal object information for use with WAMP calls/events.
:returns: dict -- The marshalled information.
"""
now = datetime.utcnow()
return {
'created': utcstr(self.created),
'status': self.status,
'started': utcstr(self.started) if self.started else None,
'uptime': (now - self.started).total_seconds() if self.started else None,
'config': self.config
}
class NativeProcessSession(ApplicationSession):
"""
A native Crossbar.io process (currently: controller, router or container).
"""
log = make_logger()
def onConnect(self, do_join=True):
"""
"""
if not hasattr(self, 'cbdir'):
self.cbdir = self.config.extra.cbdir
if not hasattr(self, '_uri_prefix'):
self._uri_prefix = 'crossbar.node.{}'.format(self.config.extra.node)
self.log.debug("Session connected to management router")
self._started = datetime.utcnow()
# see: BaseSession
self.include_traceback = False
self.debug_app = False
self._manhole_service = None
if _HAS_PSUTIL:
self._pinfo = ProcessInfo()
self._pinfo_monitor = None
self._pinfo_monitor_seq = 0
else:
self._pinfo = None
self._pinfo_monitor = None
self._pinfo_monitor_seq = None
self.log.info("Warning: process utilities not available")
if do_join:
self.join(self.config.realm)
@inlineCallbacks
def onJoin(self, details):
"""
Called when process has joined the node's management realm.
"""
procs = [
'start_manhole',
'stop_manhole',
'get_manhole',
'trigger_gc',
'utcnow',
'started',
'uptime',
'get_process_info',
'get_process_stats',
'set_process_stats_monitoring'
]
dl = []
for proc in procs:
uri = '{}.{}'.format(self._uri_prefix, proc)
self.log.debug("Registering procedure '{uri}'", uri=uri)
dl.append(self.register(getattr(self, proc), uri, options=RegisterOptions(details_arg='details')))
regs = yield DeferredList(dl)
self.log.debug("{cls} registered {len_reg} procedures",
cls=self.__class__.__name__, len_reg=len(regs))
def get_process_info(self, details=None):
"""
Get process information (open files, sockets, ...).
:returns: dict -- Dictionary with process information.
"""
self.log.debug("{cls}.get_process_info",
cls=self.__class__.__name__)
if self._pinfo:
return self._pinfo.get_info()
else:
emsg = "ERROR: could not retrieve process statistics - required packages not installed"
raise ApplicationError("crossbar.error.feature_unavailable", emsg)
def get_process_stats(self, details=None):
"""
Get process statistics (CPU, memory, I/O).
:returns: dict -- Dictionary with process statistics.
"""
self.log.debug("{cls}.get_process_stats", cls=self.__class__.__name__)
if self._pinfo:
return self._pinfo.get_stats()
else:
emsg = "ERROR: could not retrieve process statistics - required packages not installed"
raise ApplicationError("crossbar.error.feature_unavailable", emsg)
def set_process_stats_monitoring(self, interval, details=None):
"""
Enable/disable periodic publication of process statistics.
:param interval: The monitoring interval in seconds. Set to 0 to disable monitoring.
:type interval: float
"""
self.log.debug("{cls}.set_process_stats_monitoring(interval = {interval})",
cls=self.__class__.__name__, interval=interval)
if self._pinfo:
stats_monitor_set_topic = '{}.on_process_stats_monitoring_set'.format(self._uri_prefix)
# stop and remove any existing monitor
if self._pinfo_monitor:
self._pinfo_monitor.stop()
self._pinfo_monitor = None
self.publish(stats_monitor_set_topic, 0, options=PublishOptions(exclude=[details.caller]))
# possibly start a new monitor
if interval > 0:
stats_topic = '{}.on_process_stats'.format(self._uri_prefix)
def publish_stats():
stats = self._pinfo.get_stats()
self._pinfo_monitor_seq += 1
stats['seq'] = self._pinfo_monitor_seq
self.publish(stats_topic, stats)
self._pinfo_monitor = LoopingCall(publish_stats)
self._pinfo_monitor.start(interval)
self.publish(stats_monitor_set_topic, interval, options=PublishOptions(exclude=[details.caller]))
else:
emsg = "ERROR: cannot setup process statistics monitor - required packages not installed"
raise ApplicationError("crossbar.error.feature_unavailable", emsg)
def trigger_gc(self, details=None):
"""
Triggers a garbage collection.
:returns: float -- Time consumed for GC in ms.
"""
self.msg.debug("{cls}.trigger_gc", cls=self.__class__.__name__)
started = rtime()
gc.collect()
return 1000. * (rtime() - started)
@inlineCallbacks
def start_manhole(self, config, details=None):
"""
Start a manhole (SSH) within this worker.
:param config: Manhole configuration.
:type config: obj
"""
self.log.debug("{cls}.start_manhole(config = {config})",
cls=self.__class__.__name__, config=config)
if not _HAS_MANHOLE:
emsg = "ERROR: could not start manhole - required packages are missing ({})".format(_MANHOLE_MISSING_REASON)
self.log.error(emsg)
raise ApplicationError("crossbar.error.feature_unavailable", emsg)
if self._manhole_service:
emsg = "ERROR: could not start manhole - already running (or starting)"
self.log.warn(emsg)
raise ApplicationError("crossbar.error.already_started", emsg)
try:
checkconfig.check_manhole(config)
except Exception as e:
emsg = "ERROR: could not start manhole - invalid configuration ({})".format(e)
self.log.error(emsg)
raise ApplicationError('crossbar.error.invalid_configuration', emsg)
# setup user authentication
#
checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
for user in config['users']:
checker.addUser(user['user'], user['password'])
# setup manhole namespace
#
namespace = {'session': self}
class PatchedTerminalSession(TerminalSession):
# get rid of
# exceptions.AttributeError: TerminalSession instance has no attribute 'windowChanged'
def windowChanged(self, winSize):
pass
rlm = TerminalRealm()
rlm.sessionFactory = PatchedTerminalSession # monkey patch
rlm.chainedProtocolFactory.protocolFactory = lambda _: ColoredManhole(namespace)
ptl = portal.Portal(rlm, [checker])
factory = ConchFactory(ptl)
factory.noisy = False
self._manhole_service = ManholeService(config, details.caller)
starting_topic = '{}.on_manhole_starting'.format(self._uri_prefix)
starting_info = self._manhole_service.marshal()
# the caller gets a progressive result ..
if details.progress:
details.progress(starting_info)
# .. while all others get an event
self.publish(starting_topic, starting_info, options=PublishOptions(exclude=[details.caller]))
try:
self._manhole_service.port = yield create_listening_port_from_config(config['endpoint'], factory, self.cbdir, reactor)
except Exception as e:
self._manhole_service = None
emsg = "ERROR: manhole service endpoint cannot listen - {}".format(e)
self.log.error(emsg)
raise ApplicationError("crossbar.error.cannot_listen", emsg)
# alright, manhole has started
self._manhole_service.started = datetime.utcnow()
self._manhole_service.status = 'started'
started_topic = '{}.on_manhole_started'.format(self._uri_prefix)
started_info = self._manhole_service.marshal()
self.publish(started_topic, started_info, options=PublishOptions(exclude=[details.caller]))
returnValue(started_info)
@inlineCallbacks
def stop_manhole(self, details=None):
"""
Stop Manhole.
"""
self.log.debug("{cls}.stop_manhole", cls=self.__class__.__name__)
if not _HAS_MANHOLE:
emsg = "ERROR: could not start manhole - required packages are missing ({})".format(_MANHOLE_MISSING_REASON)
self.log.error(emsg)
raise ApplicationError("crossbar.error.feature_unavailable", emsg)
if not self._manhole_service or self._manhole_service.status != 'started':
emsg = "ERROR: cannot stop manhole - not running (or already shutting down)"
raise ApplicationError("crossbar.error.not_started", emsg)
self._manhole_service.status = 'stopping'
stopping_topic = '{}.on_manhole_stopping'.format(self._uri_prefix)
stopping_info = None
# the caller gets a progressive result ..
if details.progress:
details.progress(stopping_info)
# .. while all others get an event
self.publish(stopping_topic, stopping_info, options=PublishOptions(exclude=[details.caller]))
try:
yield self._manhole_service.port.stopListening()
except Exception as e:
raise Exception("INTERNAL ERROR: don't know how to handle a failed called to stopListening() - {}".format(e))
self._manhole_service = None
stopped_topic = '{}.on_manhole_stopped'.format(self._uri_prefix)
stopped_info = None
self.publish(stopped_topic, stopped_info, options=PublishOptions(exclude=[details.caller]))
returnValue(stopped_info)
def get_manhole(self, details=None):
"""
Get current manhole service information.
:returns: dict -- A dict with service information or `None` if the service is not running.
"""
self.log.debug("{cls}.get_manhole", cls=self.__class__.__name__)
if not _HAS_MANHOLE:
emsg = "ERROR: could not start manhole - required packages are missing ({})".format(_MANHOLE_MISSING_REASON)
self.log.error(emsg)
raise ApplicationError("crossbar.error.feature_unavailable", emsg)
if not self._manhole_service:
return None
else:
return self._manhole_service.marshal()
def utcnow(self, details=None):
"""
Return current time as determined from within this process.
:returns str -- Current time (UTC) in UTC ISO 8601 format.
"""
self.log.debug("{cls}.utcnow", cls=self.__class__.__name__)
return utcnow()
def started(self, details=None):
"""
Return start time of this process.
:returns str -- Start time (UTC) in UTC ISO 8601 format.
"""
self.log.debug("{cls}.started", cls=self.__class__.__name__)
return utcstr(self._started)
def uptime(self, details=None):
"""
Uptime of this process.
:returns float -- Uptime in seconds.
"""
self.log.debug("{cls}.uptime", cls=self.__class__.__name__)
now = datetime.utcnow()
return (now - self._started).total_seconds()
|
GoodgameStudios/crossbar
|
crossbar/common/process.py
|
Python
|
agpl-3.0
| 15,725
|
"""
This file contains all the classes used by has_access for error handling
"""
from django.utils.translation import ugettext as _
class AccessResponse(object):
"""Class that represents a response from a has_access permission check."""
def __init__(self, has_access, error_code=None, developer_message=None, user_message=None):
"""
Creates an AccessResponse object.
Arguments:
has_access (bool): if the user is granted access or not
error_code (String): optional - default is None. Unique identifier
for the specific type of error
developer_message (String): optional - default is None. Message
to show the developer
user_message (String): optional - default is None. Message to
show the user
"""
self.has_access = has_access
self.error_code = error_code
self.developer_message = developer_message
self.user_message = user_message
if has_access:
assert error_code is None
def __nonzero__(self):
"""
Overrides bool().
Allows for truth value testing of AccessResponse objects, so callers
who do not need the specific error information can check if access
is granted.
Returns:
bool: whether or not access is granted
"""
return self.has_access
def to_json(self):
"""
Creates a serializable JSON representation of an AccessResponse object.
Returns:
dict: JSON representation
"""
return {
"has_access": self.has_access,
"error_code": self.error_code,
"developer_message": self.developer_message,
"user_message": self.user_message
}
class AccessError(AccessResponse):
"""
Class that holds information about the error in the case of an access
denial in has_access. Contains the error code, user and developer
messages. Subclasses represent specific errors.
"""
def __init__(self, error_code, developer_message, user_message):
"""
Creates an AccessError object.
An AccessError object represents an AccessResponse where access is
denied (has_access is False).
Arguments:
error_code (String): unique identifier for the specific type of
error developer_message (String): message to show the developer
user_message (String): message to show the user
"""
super(AccessError, self).__init__(False, error_code, developer_message, user_message)
class StartDateError(AccessError):
"""
Access denied because the course has not started yet and the user
is not staff
"""
def __init__(self, start_date):
error_code = "course_not_started"
developer_message = "Course does not start until {}".format(start_date)
user_message = _("Course does not start until {}" # pylint: disable=translation-of-non-string
.format("{:%B %d, %Y}".format(start_date)))
super(StartDateError, self).__init__(error_code, developer_message, user_message)
class MilestoneError(AccessError):
"""
Access denied because the user has unfulfilled milestones
"""
def __init__(self):
error_code = "unfulfilled_milestones"
developer_message = "User has unfulfilled milestones"
user_message = _("You have unfulfilled milestones")
super(MilestoneError, self).__init__(error_code, developer_message, user_message)
class VisibilityError(AccessError):
"""
Access denied because the user does have the correct role to view this
course.
"""
def __init__(self):
error_code = "not_visible_to_user"
developer_message = "Course is not visible to this user"
user_message = _("You do not have access to this course")
super(VisibilityError, self).__init__(error_code, developer_message, user_message)
class MobileAvailabilityError(AccessError):
"""
Access denied because the course is not available on mobile for the user
"""
def __init__(self):
error_code = "mobile_unavailable"
developer_message = "Course is not available on mobile for this user"
user_message = _("You do not have access to this course on a mobile device")
super(MobileAvailabilityError, self).__init__(error_code, developer_message, user_message)
|
nagyistoce/edx-platform
|
lms/djangoapps/courseware/access_response.py
|
Python
|
agpl-3.0
| 4,494
|
from dowser.dowser import Root, ReferrerTree # NOQA
|
appknox/dowser-py3
|
dowser/__init__.py
|
Python
|
mit
| 53
|
import types
import inspect
def get_attrs(obj):
all_obj_attrs = [x for x in dir(obj) if not x.startswith('_')]
props = []
funcs = []
fields = []
for obj_attr in all_obj_attrs:
objs_to_check = list(obj.__class__.__mro__)
objs_to_check.insert(0, obj)
for obj_class in objs_to_check:
try:
attr = getattr(obj_class, obj_attr)
if isinstance(attr, property):
get_sig = str(inspect.signature(attr.fget))
if '->' in get_sig:
get_sig = ':' + get_sig.split('-> ')[1]
else:
get_sig = ''
if attr.fset is not None:
set_sig = inspect.signature(attr.fset)
if len(set_sig.parameters.keys()) > 0:
set_type = str(set_sig.parameters[list(set_sig.parameters.keys())[-1]])
else:
set_type = ''
if ':' in set_type:
set_type = ':' + set_type.split(':')[1]
else:
set_type = ''
props.append(obj_attr + ' - get{0}, set{1}'.format(get_sig, set_type))
else:
props.append(obj_attr + ' - get{0}'.format(get_sig))
break
elif isinstance(attr, types.FunctionType):
funcs.append(obj_attr + str(inspect.signature(attr)))
break
except Exception as err:
pass
else:
fields.append(obj_attr)
return sorted(props), sorted(funcs), sorted(fields)
def print_attrs(obj, recurse_to=None, indent=0):
if recurse_to is None:
recurse_to = set()
else:
recurse_to = set(recurse_to)
props, funcs, fields = get_attrs(obj)
def print2(x): return print((" " * indent) + x)
print(obj.__class__.__name__)
if len(props) > 0:
if len(funcs) + len(fields) == 0:
print2(" └ properties:")
pipe = ' '
else:
print2(" ├ properties:")
pipe = '│'
for index, prop in enumerate(props, start=1):
if prop in recurse_to:
if len(props) != index:
print((" " * indent) + ' {0} ├- {1} - '.format(pipe, prop), end='')
print_attrs(getattr(obj, prop), recurse_to=(recurse_to - set(prop)), indent=indent + 7)
else:
print((" " * indent) + ' {0} └- {1} - '.format(pipe, prop), end='')
print_attrs(getattr(obj, prop), recurse_to=(recurse_to - set(prop)), indent=indent + 7)
else:
if len(props) != index:
print2(' {0} ├- {1}'.format(pipe, prop))
else:
print2(' {0} └- {1}'.format(pipe, prop))
if len(funcs) > 0:
if len(fields) == 0:
print2(" └ methods:")
pipe = ' '
else:
print2(" ├ methods:")
pipe = '│'
for index, func in enumerate(funcs, start=1):
if len(funcs) != index:
print2(' {0} ├- {1}'.format(pipe, func))
else:
print2(' {0} └- {1}'.format(pipe, func))
if len(fields) > 0:
print2(" └ fields:")
for index, field in enumerate(fields, start=1):
if field in recurse_to:
if len(fields) != index:
print((" " * indent) + ' ├- {0} - '.format(field), end='')
print_attrs(getattr(obj, field), recurse_to=(recurse_to - set(field)), indent=indent + 6)
else:
print((" " * indent) + ' └- {0} - '.format(field), end='')
print_attrs(getattr(obj, field), recurse_to=(recurse_to - set(field)), indent=indent + 6)
else:
if len(fields) != index:
print2(' ├- {0}'.format(field))
else:
print2(' └- {0}'.format(field))
|
openrazer/openrazer
|
pylib/openrazer/client/debug.py
|
Python
|
gpl-2.0
| 4,206
|
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../tools'))
import fasta
import genetics
import table
def main(argv):
codon = table.codon(argv[0])
strings = fasta.read_ordered(argv[1])
dna = strings[0]
introns = strings[1:]
for intron in introns:
dna = dna.replace(intron, '')
print genetics.encode_protein(genetics.dna_to_rna(dna), codon)
if __name__ == "__main__":
main(sys.argv[1:])
|
cowboysmall/rosalind
|
src/stronghold/rosalind_splc.py
|
Python
|
mit
| 466
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import copy
import sys
from pants.option.arg_splitter import GLOBAL_SCOPE, ArgSplitter
from pants.option.global_options import GlobalOptionsRegistrar
from pants.option.option_util import is_list_option
from pants.option.option_value_container import OptionValueContainer
from pants.option.parser_hierarchy import ParserHierarchy, enclosing_scope
from pants.option.scope import ScopeInfo
class Options(object):
"""The outward-facing API for interacting with options.
Supports option registration and fetching option values.
Examples:
The value in global scope of option '--foo-bar' (registered in global scope) will be selected
in the following order:
- The value of the --foo-bar flag in global scope.
- The value of the PANTS_GLOBAL_FOO_BAR environment variable.
- The value of the PANTS_FOO_BAR environment variable.
- The value of the foo_bar key in the [GLOBAL] section of pants.ini.
- The hard-coded value provided at registration time.
- None.
The value in scope 'compile.java' of option '--foo-bar' (registered in global scope) will be
selected in the following order:
- The value of the --foo-bar flag in scope 'compile.java'.
- The value of the --foo-bar flag in scope 'compile'.
- The value of the --foo-bar flag in global scope.
- The value of the PANTS_COMPILE_JAVA_FOO_BAR environment variable.
- The value of the PANTS_COMPILE_FOO_BAR environment variable.
- The value of the PANTS_GLOBAL_FOO_BAR environment variable.
- The value of the PANTS_FOO_BAR environment variable.
- The value of the foo_bar key in the [compile.java] section of pants.ini.
- The value of the foo_bar key in the [compile] section of pants.ini.
- The value of the foo_bar key in the [GLOBAL] section of pants.ini.
- The hard-coded value provided at registration time.
- None.
The value in scope 'compile.java' of option '--foo-bar' (registered in scope 'compile') will be
selected in the following order:
- The value of the --foo-bar flag in scope 'compile.java'.
- The value of the --foo-bar flag in scope 'compile'.
- The value of the PANTS_COMPILE_JAVA_FOO_BAR environment variable.
- The value of the PANTS_COMPILE_FOO_BAR environment variable.
- The value of the foo_bar key in the [compile.java] section of pants.ini.
- The value of the foo_bar key in the [compile] section of pants.ini.
- The value of the foo_bar key in the [GLOBAL] section of pants.ini
(because of automatic config file fallback to that section).
- The hard-coded value provided at registration time.
- None.
"""
class OptionTrackerRequiredError(Exception):
"""Options requires an OptionTracker instance."""
@classmethod
def complete_scopes(cls, scope_infos):
"""Expand a set of scopes to include all enclosing scopes.
E.g., if the set contains `foo.bar.baz`, ensure that it also contains `foo.bar` and `foo`.
"""
ret = {GlobalOptionsRegistrar.get_scope_info()}
for scope_info in scope_infos:
ret.add(scope_info)
original_scopes = {si.scope for si in scope_infos}
for scope_info in scope_infos:
scope = scope_info.scope
while scope != '':
if scope not in original_scopes:
ret.add(ScopeInfo(scope, ScopeInfo.INTERMEDIATE))
scope = enclosing_scope(scope)
return ret
@classmethod
def create(cls, env, config, known_scope_infos, args=None, bootstrap_option_values=None,
option_tracker=None,):
"""Create an Options instance.
:param env: a dict of environment variables.
:param :class:`pants.option.config.Config` config: data from a config file.
:param known_scope_infos: ScopeInfos for all scopes that may be encountered.
:param args: a list of cmd-line args; defaults to `sys.argv` if None is supplied.
:param bootstrap_option_values: An optional namespace containing the values of bootstrap
options. We can use these values when registering other options.
:param :class:`pants.option.option_tracker.OptionTracker` option_tracker: option tracker
instance to record how option values were assigned.
"""
# We need parsers for all the intermediate scopes, so inherited option values
# can propagate through them.
complete_known_scope_infos = cls.complete_scopes(known_scope_infos)
splitter = ArgSplitter(complete_known_scope_infos)
args = sys.argv if args is None else args
goals, scope_to_flags, target_specs, passthru, passthru_owner = splitter.split_args(args)
if not option_tracker:
raise cls.OptionTrackerRequiredError()
if bootstrap_option_values:
target_spec_files = bootstrap_option_values.target_spec_files
if target_spec_files:
for spec in target_spec_files:
with open(spec) as f:
target_specs.extend(filter(None, [line.strip() for line in f]))
help_request = splitter.help_request
parser_hierarchy = ParserHierarchy(env, config, complete_known_scope_infos, option_tracker)
values_by_scope = {} # Arg values, parsed per-scope on demand.
bootstrap_option_values = bootstrap_option_values
known_scope_to_info = {s.scope: s for s in complete_known_scope_infos}
return cls(goals, scope_to_flags, target_specs, passthru, passthru_owner, help_request,
parser_hierarchy, values_by_scope, bootstrap_option_values, known_scope_to_info,
option_tracker)
def __init__(self, goals, scope_to_flags, target_specs, passthru, passthru_owner, help_request,
parser_hierarchy, values_by_scope, bootstrap_option_values, known_scope_to_info,
option_tracker):
"""The low-level constructor for an Options instance.
Dependees should use `Options.create` instead.
"""
self._goals = goals
self._scope_to_flags = scope_to_flags
self._target_specs = target_specs
self._passthru = passthru
self._passthru_owner = passthru_owner
self._help_request = help_request
self._parser_hierarchy = parser_hierarchy
self._values_by_scope = values_by_scope
self._bootstrap_option_values = bootstrap_option_values
self._known_scope_to_info = known_scope_to_info
self._option_tracker = option_tracker
@property
def tracker(self):
return self._option_tracker
@property
def help_request(self):
"""
:API: public
"""
return self._help_request
@property
def target_specs(self):
"""The targets to operate on.
:API: public
"""
return self._target_specs
@property
def goals(self):
"""The requested goals, in the order specified on the cmd line.
:API: public
"""
return self._goals
@property
def known_scope_to_info(self):
return self._known_scope_to_info
@property
def scope_to_flags(self):
return self._scope_to_flags
def drop_flag_values(self):
"""Returns a copy of these options that ignores values specified via flags.
Any pre-cached option values are cleared and only option values that come from option defaults,
the config or the environment are used.
"""
# An empty scope_to_flags to force all values to come via the config -> env hierarchy alone
# and empty values in case we already cached some from flags.
no_flags = {}
no_values = {}
return Options(self._goals,
no_flags,
self._target_specs,
self._passthru,
self._passthru_owner,
self._help_request,
self._parser_hierarchy,
no_values,
self._bootstrap_option_values,
self._known_scope_to_info,
self._option_tracker)
def is_known_scope(self, scope):
"""Whether the given scope is known by this instance.
:API: public
"""
return scope in self._known_scope_to_info
def passthru_args_for_scope(self, scope):
# Passthru args "belong" to the last scope mentioned on the command-line.
# Note: If that last scope is a goal, we allow all tasks in that goal to access the passthru
# args. This is to allow the more intuitive
# pants run <target> -- <passthru args>
# instead of requiring
# pants run.py <target> -- <passthru args>.
#
# However note that in the case where multiple tasks run in the same goal, e.g.,
# pants test <target> -- <passthru args>
# Then, e.g., both junit and pytest will get the passthru args even though the user probably
# only intended them to go to one of them. If the wrong one is not a no-op then the error will
# be unpredictable. However this is not a common case, and can be circumvented with an
# explicit test.pytest or test.junit scope.
if (scope and self._passthru_owner and scope.startswith(self._passthru_owner) and
(len(scope) == len(self._passthru_owner) or scope[len(self._passthru_owner)] == '.')):
return self._passthru
else:
return []
def register(self, scope, *args, **kwargs):
"""Register an option in the given scope."""
self.get_parser(scope).register(*args, **kwargs)
def registration_function_for_optionable(self, optionable_class):
"""Returns a function for registering options on the given scope."""
# TODO(benjy): Make this an instance of a class that implements __call__, so we can
# docstring it, and so it's less weird than attatching properties to a function.
def register(*args, **kwargs):
kwargs['registering_class'] = optionable_class
self.register(optionable_class.options_scope, *args, **kwargs)
# Clients can access the bootstrap option values as register.bootstrap.
register.bootstrap = self.bootstrap_option_values()
# Clients can access the scope as register.scope.
register.scope = optionable_class.options_scope
return register
def get_parser(self, scope):
"""Returns the parser for the given scope, so code can register on it directly."""
return self._parser_hierarchy.get_parser_by_scope(scope)
def walk_parsers(self, callback):
self._parser_hierarchy.walk(callback)
def for_scope(self, scope):
"""Return the option values for the given scope.
Values are attributes of the returned object, e.g., options.foo.
Computed lazily per scope.
:API: public
"""
# Short-circuit, if already computed.
if scope in self._values_by_scope:
return self._values_by_scope[scope]
# First get enclosing scope's option values, if any.
if scope == GLOBAL_SCOPE:
values = OptionValueContainer()
else:
values = copy.copy(self.for_scope(enclosing_scope(scope)))
# Now add our values.
flags_in_scope = self._scope_to_flags.get(scope, [])
self._parser_hierarchy.get_parser_by_scope(scope).parse_args(flags_in_scope, values)
self._values_by_scope[scope] = values
for option in values:
self._option_tracker.record_option(scope=scope, option=option, value=values[option],
rank=values.get_rank(option))
return values
def get_fingerprintable_for_scope(self, scope):
"""Returns a list of fingerprintable (option type, option value) pairs for the given scope.
Fingerprintable options are options registered via a "fingerprint=True" kwarg.
:API: public
"""
pairs = []
# Note that we iterate over options registered at `scope` and at all enclosing scopes, since
# option-using code can read those values indirectly via its own OptionValueContainer, so
# they can affect that code's output.
registration_scope = scope
while registration_scope is not None:
parser = self._parser_hierarchy.get_parser_by_scope(registration_scope)
# Sort the arguments, so that the fingerprint is consistent.
for (_, kwargs) in sorted(parser.option_registrations_iter()):
if kwargs.get('recursive') and not kwargs.get('recursive_root'):
continue # We only need to fprint recursive options once.
if kwargs.get('fingerprint') is not True:
continue
# Note that we read the value from scope, even if the registration was on an enclosing
# scope, to get the right value for recursive options (and because this mirrors what
# option-using code does).
val = self.for_scope(scope)[kwargs['dest']]
# If we have a list then we delegate to the fingerprinting implementation of the members.
if is_list_option(kwargs):
val_type = kwargs.get('member_type', str)
else:
val_type = kwargs.get('type', str)
pairs.append((val_type, val))
registration_scope = (None if registration_scope == ''
else enclosing_scope(registration_scope))
return pairs
def __getitem__(self, scope):
# TODO(John Sirois): Mainly supports use of dict<str, dict<str, str>> for mock options in tests,
# Consider killing if tests consolidate on using TestOptions instead of the raw dicts.
return self.for_scope(scope)
def bootstrap_option_values(self):
"""Return the option values for bootstrap options.
General code can also access these values in the global scope. But option registration code
cannot, hence this special-casing of this small set of options.
"""
return self._bootstrap_option_values
def for_global_scope(self):
"""Return the option values for the global scope.
:API: public
"""
return self.for_scope(GLOBAL_SCOPE)
|
dbentley/pants
|
src/python/pants/option/options.py
|
Python
|
apache-2.0
| 13,830
|
# Copyright 2013 IBM Corp.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Authors:
# Erik Zaadi <erikz@il.ibm.com>
# Avishay Traeger <avishay@il.ibm.com>
import copy
import mock
from cinder import context
from cinder import exception
from cinder.i18n import _
from cinder.objects import fields
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.ibm import xiv_ds8k
from cinder.volume import volume_types
FAKE = "fake"
FAKE2 = "fake2"
CANNOT_DELETE = "Can not delete"
TOO_BIG_VOLUME_SIZE = 12000
POOL_SIZE = 100
CONSISTGROUP_ID = 1
VOLUME = {'size': 16,
'name': FAKE,
'id': 1,
'status': 'available'}
VOLUME2 = {'size': 32,
'name': FAKE2,
'id': 2,
'status': 'available'}
CG_VOLUME = {'size': 16,
'name': FAKE,
'id': 3,
'consistencygroup_id': CONSISTGROUP_ID,
'status': 'available'}
MANAGED_FAKE = "managed_fake"
MANAGED_VOLUME = {'size': 16,
'name': MANAGED_FAKE,
'id': 2}
REPLICA_FAKE = "repicated_fake"
REPLICATED_VOLUME = {'size': 64,
'name': REPLICA_FAKE,
'id': 2}
REPLICATION_TARGETS = [{'target_device_id': 'fakedevice'}]
SECONDARY = 'fakedevice'
FAKE_FAILOVER_HOST = 'fakehost@fakebackend#fakepool'
FAKE_PROVIDER_LOCATION = 'fake_provider_location'
FAKE_DRIVER_DATA = 'fake_driver_data'
CONTEXT = {}
FAKESNAPSHOT = 'fakesnapshot'
SNAPSHOT = {'name': 'fakesnapshot',
'id': 3}
CONSISTGROUP = {'id': CONSISTGROUP_ID, }
CG_SNAPSHOT_ID = 1
CG_SNAPSHOT = {'id': CG_SNAPSHOT_ID,
'consistencygroup_id': CONSISTGROUP_ID}
CONNECTOR = {'initiator': "iqn.2012-07.org.fake:01:948f189c4695", }
class XIVDS8KFakeProxyDriver(object):
"""Fake IBM XIV and DS8K Proxy Driver."""
def __init__(self, xiv_ds8k_info, logger, expt,
driver=None, active_backend_id=None):
"""Initialize Proxy."""
self.xiv_ds8k_info = xiv_ds8k_info
self.logger = logger
self.exception = expt
self.xiv_ds8k_portal = \
self.xiv_ds8k_iqn = FAKE
self.volumes = {}
self.snapshots = {}
self.driver = driver
def setup(self, context):
if self.xiv_ds8k_info['xiv_ds8k_user'] != self.driver\
.configuration.san_login:
raise self.exception.NotAuthorized()
if self.xiv_ds8k_info['xiv_ds8k_address'] != self.driver\
.configuration.san_ip:
raise self.exception.HostNotFound(host='fake')
def create_volume(self, volume):
if volume['size'] > POOL_SIZE:
raise self.exception.VolumeBackendAPIException(data='blah')
self.volumes[volume['name']] = volume
def volume_exists(self, volume):
return self.volumes.get(volume['name'], None) is not None
def delete_volume(self, volume):
if self.volumes.get(volume['name'], None) is not None:
del self.volumes[volume['name']]
def manage_volume_get_size(self, volume, existing_ref):
if self.volumes.get(existing_ref['source-name'], None) is None:
raise self.exception.VolumeNotFound(volume_id=volume['id'])
return self.volumes[existing_ref['source-name']]['size']
def manage_volume(self, volume, existing_ref):
if self.volumes.get(existing_ref['source-name'], None) is None:
raise self.exception.VolumeNotFound(volume_id=volume['id'])
volume['size'] = MANAGED_VOLUME['size']
return {}
def unmanage_volume(self, volume):
pass
def initialize_connection(self, volume, connector):
if not self.volume_exists(volume):
raise self.exception.VolumeNotFound(volume_id=volume['id'])
lun_id = volume['id']
self.volumes[volume['name']]['attached'] = connector
return {'driver_volume_type': 'iscsi',
'data': {'target_discovered': True,
'target_portal': self.xiv_ds8k_portal,
'target_iqn': self.xiv_ds8k_iqn,
'target_lun': lun_id,
'volume_id': volume['id'],
'multipath': True,
'provider_location': "%s,1 %s %s" % (
self.xiv_ds8k_portal,
self.xiv_ds8k_iqn,
lun_id), },
}
def terminate_connection(self, volume, connector):
if not self.volume_exists(volume):
raise self.exception.VolumeNotFound(volume_id=volume['id'])
if not self.is_volume_attached(volume, connector):
raise self.exception.NotFound(_('Volume not found for '
'instance %(instance_id)s.')
% {'instance_id': 'fake'})
del self.volumes[volume['name']]['attached']
def is_volume_attached(self, volume, connector):
if not self.volume_exists(volume):
raise self.exception.VolumeNotFound(volume_id=volume['id'])
return (self.volumes[volume['name']].get('attached', None)
== connector)
def get_replication_status(self, context, volume):
if volume['replication_status'] == 'invalid_status_val':
raise exception.CinderException()
return {'replication_status': 'active'}
def retype(self, ctxt, volume, new_type, diff, host):
volume['easytier'] = new_type['extra_specs']['easytier']
return True, volume
def create_consistencygroup(self, ctxt, group):
volumes = [volume for k, volume in self.volumes.items()
if volume['consistencygroup_id'] == group['id']]
if volumes:
raise exception.CinderException(
message='The consistency group id of volume may be wrong.')
return {'status': fields.ConsistencyGroupStatus.AVAILABLE}
def delete_consistencygroup(self, ctxt, group, volumes):
for volume in self.volumes.values():
if (group.get('id', None)
== volume.get('consistencygroup_id', None)):
if volume['name'] == CANNOT_DELETE:
raise exception.VolumeBackendAPIException(
message='Volume can not be deleted')
else:
volume['status'] = 'deleted'
volumes.append(volume)
# Delete snapshots in consistency group
self.snapshots = {k: snap for k, snap in self.snapshots.items()
if not(snap.get('consistencygroup_id', None)
== group.get('id', None))}
# Delete volume in consistency group
self.volumes = {k: vol for k, vol in self.volumes.items()
if not(vol.get('consistencygroup_id', None)
== group.get('id', None))}
return {'status': 'deleted'}, volumes
def update_consistencygroup(
self, context, group,
add_volumes, remove_volumes):
model_update = {'status': fields.ConsistencyGroupStatus.AVAILABLE}
return model_update, None, None
def create_consistencygroup_from_src(
self, context, group, volumes, cgsnapshot, snapshots,
source_cg=None, source_vols=None):
return None, None
def create_cgsnapshot(self, ctxt, cgsnapshot, snapshots):
for volume in self.volumes.values():
if (cgsnapshot.get('consistencygroup_id', None)
== volume.get('consistencygroup_id', None)):
if volume['size'] > POOL_SIZE / 2:
raise self.exception.VolumeBackendAPIException(data='blah')
snapshot = copy.deepcopy(volume)
snapshot['name'] = CANNOT_DELETE \
if snapshot['name'] == CANNOT_DELETE \
else snapshot['name'] + 'Snapshot'
snapshot['status'] = 'available'
snapshot['cgsnapshot_id'] = cgsnapshot.get('id', None)
snapshot['consistencygroup_id'] = \
cgsnapshot.get('consistencygroup_id', None)
self.snapshots[snapshot['name']] = snapshot
snapshots.append(snapshot)
return {'status': 'available'}, snapshots
def delete_cgsnapshot(self, ctxt, cgsnapshot, snapshots):
updated_snapshots = []
for snapshot in snapshots:
if snapshot['name'] == CANNOT_DELETE:
raise exception.VolumeBackendAPIException(
message='Snapshot can not be deleted')
else:
snapshot['status'] = 'deleted'
updated_snapshots.append(snapshot)
# Delete snapshots in consistency group
self.snapshots = {k: snap for k, snap in self.snapshots.items()
if not(snap.get('consistencygroup_id', None)
== cgsnapshot.get('cgsnapshot_id', None))}
return {'status': 'deleted'}, updated_snapshots
def freeze_backend(self, context):
return True
def thaw_backend(self, context):
return True
def failover_host(self, context, volumes, secondary_id):
target_id = 'BLA'
volume_update_list = []
for volume in volumes:
status = 'failed-over'
if volume['replication_status'] == 'invalid_status_val':
status = 'error'
volume_update_list.append(
{'volume_id': volume['id'],
'updates': {'replication_status': status}})
return target_id, volume_update_list
class XIVDS8KVolumeDriverTest(test.TestCase):
"""Test IBM XIV and DS8K volume driver."""
def setUp(self):
"""Initialize IBM XIV and DS8K Driver."""
super(XIVDS8KVolumeDriverTest, self).setUp()
configuration = mock.Mock(conf.Configuration)
configuration.san_is_local = False
configuration.xiv_ds8k_proxy = \
'cinder.tests.unit.test_ibm_xiv_ds8k.XIVDS8KFakeProxyDriver'
configuration.xiv_ds8k_connection_type = 'iscsi'
configuration.xiv_chap = 'disabled'
configuration.san_ip = FAKE
configuration.management_ips = FAKE
configuration.san_login = FAKE
configuration.san_clustername = FAKE
configuration.san_password = FAKE
configuration.append_config_values(mock.ANY)
self.driver = xiv_ds8k.XIVDS8KDriver(
configuration=configuration)
def test_initialized_should_set_xiv_ds8k_info(self):
"""Test that the san flags are passed to the IBM proxy."""
self.assertEqual(
self.driver.xiv_ds8k_proxy.xiv_ds8k_info['xiv_ds8k_user'],
self.driver.configuration.san_login)
self.assertEqual(
self.driver.xiv_ds8k_proxy.xiv_ds8k_info['xiv_ds8k_pass'],
self.driver.configuration.san_password)
self.assertEqual(
self.driver.xiv_ds8k_proxy.xiv_ds8k_info['xiv_ds8k_address'],
self.driver.configuration.san_ip)
self.assertEqual(
self.driver.xiv_ds8k_proxy.xiv_ds8k_info['xiv_ds8k_vol_pool'],
self.driver.configuration.san_clustername)
def test_setup_should_fail_if_credentials_are_invalid(self):
"""Test that the xiv_ds8k_proxy validates credentials."""
self.driver.xiv_ds8k_proxy.xiv_ds8k_info['xiv_ds8k_user'] = 'invalid'
self.assertRaises(exception.NotAuthorized, self.driver.do_setup, None)
def test_setup_should_fail_if_connection_is_invalid(self):
"""Test that the xiv_ds8k_proxy validates connection."""
self.driver.xiv_ds8k_proxy.xiv_ds8k_info['xiv_ds8k_address'] = \
'invalid'
self.assertRaises(exception.HostNotFound, self.driver.do_setup, None)
def test_create_volume(self):
"""Test creating a volume."""
self.driver.do_setup(None)
self.driver.create_volume(VOLUME)
has_volume = self.driver.xiv_ds8k_proxy.volume_exists(VOLUME)
self.assertTrue(has_volume)
self.driver.delete_volume(VOLUME)
def test_volume_exists(self):
"""Test the volume exist method with a volume that doesn't exist."""
self.driver.do_setup(None)
self.assertFalse(
self.driver.xiv_ds8k_proxy.volume_exists({'name': FAKE})
)
def test_delete_volume(self):
"""Verify that a volume is deleted."""
self.driver.do_setup(None)
self.driver.create_volume(VOLUME)
self.driver.delete_volume(VOLUME)
has_volume = self.driver.xiv_ds8k_proxy.volume_exists(VOLUME)
self.assertFalse(has_volume)
def test_delete_volume_should_fail_for_not_existing_volume(self):
"""Verify that deleting a non-existing volume is OK."""
self.driver.do_setup(None)
self.driver.delete_volume(VOLUME)
def test_create_volume_should_fail_if_no_pool_space_left(self):
"""Verify that the xiv_ds8k_proxy validates volume pool space."""
self.driver.do_setup(None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume,
{'name': FAKE,
'id': 1,
'size': TOO_BIG_VOLUME_SIZE})
def test_initialize_connection(self):
"""Test that inititialize connection attaches volume to host."""
self.driver.do_setup(None)
self.driver.create_volume(VOLUME)
self.driver.initialize_connection(VOLUME, CONNECTOR)
self.assertTrue(
self.driver.xiv_ds8k_proxy.is_volume_attached(VOLUME, CONNECTOR))
self.driver.terminate_connection(VOLUME, CONNECTOR)
self.driver.delete_volume(VOLUME)
def test_initialize_connection_should_fail_for_non_existing_volume(self):
"""Verify that initialize won't work for non-existing volume."""
self.driver.do_setup(None)
self.assertRaises(exception.VolumeNotFound,
self.driver.initialize_connection,
VOLUME,
CONNECTOR)
def test_terminate_connection(self):
"""Test terminating a connection."""
self.driver.do_setup(None)
self.driver.create_volume(VOLUME)
self.driver.initialize_connection(VOLUME, CONNECTOR)
self.driver.terminate_connection(VOLUME, CONNECTOR)
self.assertFalse(self.driver.xiv_ds8k_proxy.is_volume_attached(
VOLUME,
CONNECTOR))
self.driver.delete_volume(VOLUME)
def test_terminate_connection_should_fail_on_non_existing_volume(self):
"""Test that terminate won't work for non-existing volumes."""
self.driver.do_setup(None)
self.assertRaises(exception.VolumeNotFound,
self.driver.terminate_connection,
VOLUME,
CONNECTOR)
def test_manage_existing_get_size(self):
"""Test that manage_existing_get_size returns the expected size. """
self.driver.do_setup(None)
self.driver.create_volume(MANAGED_VOLUME)
existing_ref = {'source-name': MANAGED_VOLUME['name']}
return_size = self.driver.manage_existing_get_size(
VOLUME,
existing_ref)
self.assertEqual(return_size, MANAGED_VOLUME['size'])
# cover both case, whether driver renames the volume or not
self.driver.delete_volume(VOLUME)
self.driver.delete_volume(MANAGED_VOLUME)
def test_manage_existing_get_size_should_fail_on_non_existing_volume(self):
"""Test that manage_existing_get_size fails on non existing volume. """
self.driver.do_setup(None)
# on purpose - do NOT create managed volume
existing_ref = {'source-name': MANAGED_VOLUME['name']}
self.assertRaises(exception.VolumeNotFound,
self.driver.manage_existing_get_size,
VOLUME,
existing_ref)
def test_manage_existing(self):
"""Test that manage_existing returns successfully. """
self.driver.do_setup(None)
self.driver.create_volume(MANAGED_VOLUME)
existing_ref = {'source-name': MANAGED_VOLUME['name']}
self.driver.manage_existing(VOLUME, existing_ref)
self.assertEqual(VOLUME['size'], MANAGED_VOLUME['size'])
# cover both case, whether driver renames the volume or not
self.driver.delete_volume(VOLUME)
self.driver.delete_volume(MANAGED_VOLUME)
def test_manage_existing_should_fail_on_non_existing_volume(self):
"""Test that manage_existing fails on non existing volume. """
self.driver.do_setup(None)
# on purpose - do NOT create managed volume
existing_ref = {'source-name': MANAGED_VOLUME['name']}
self.assertRaises(exception.VolumeNotFound,
self.driver.manage_existing,
VOLUME,
existing_ref)
def test_get_replication_status(self):
"""Test that get_replication_status return successfully. """
self.driver.do_setup(None)
# assume the replicated volume is inactive
replicated_volume = copy.deepcopy(REPLICATED_VOLUME)
replicated_volume['replication_status'] = 'inactive'
model_update = self.driver.get_replication_status(
CONTEXT,
replicated_volume
)
self.assertEqual(
model_update['replication_status'],
'active'
)
def test_get_replication_status_fail_on_exception(self):
"""Test that get_replication_status fails on exception"""
self.driver.do_setup(None)
replicated_volume = copy.deepcopy(REPLICATED_VOLUME)
# on purpose - set invalid value to replication_status
# expect an exception.
replicated_volume['replication_status'] = 'invalid_status_val'
self.assertRaises(
exception.CinderException,
self.driver.get_replication_status,
CONTEXT,
replicated_volume
)
def test_retype(self):
"""Test that retype returns successfully."""
self.driver.do_setup(None)
# prepare parameters
ctxt = context.get_admin_context()
host = {
'host': 'foo',
'capabilities': {
'location_info': 'xiv_ds8k_fake_1',
'extent_size': '1024'
}
}
key_specs_old = {'easytier': False, 'warning': 2, 'autoexpand': True}
key_specs_new = {'easytier': True, 'warning': 5, 'autoexpand': False}
old_type_ref = volume_types.create(ctxt, 'old', key_specs_old)
new_type_ref = volume_types.create(ctxt, 'new', key_specs_new)
diff, equal = volume_types.volume_types_diff(
ctxt,
old_type_ref['id'],
new_type_ref['id'],
)
volume = copy.deepcopy(VOLUME)
old_type = volume_types.get_volume_type(ctxt, old_type_ref['id'])
volume['volume_type'] = old_type
volume['host'] = host
new_type = volume_types.get_volume_type(ctxt, new_type_ref['id'])
self.driver.create_volume(volume)
ret = self.driver.retype(ctxt, volume, new_type, diff, host)
self.assertTrue(ret)
self.assertTrue(volume['easytier'])
def test_retype_fail_on_exception(self):
"""Test that retype fails on exception."""
self.driver.do_setup(None)
# prepare parameters
ctxt = context.get_admin_context()
host = {
'host': 'foo',
'capabilities': {
'location_info': 'xiv_ds8k_fake_1',
'extent_size': '1024'
}
}
key_specs_old = {'easytier': False, 'warning': 2, 'autoexpand': True}
old_type_ref = volume_types.create(ctxt, 'old', key_specs_old)
new_type_ref = volume_types.create(ctxt, 'new')
diff, equal = volume_types.volume_types_diff(
ctxt,
old_type_ref['id'],
new_type_ref['id'],
)
volume = copy.deepcopy(VOLUME)
old_type = volume_types.get_volume_type(ctxt, old_type_ref['id'])
volume['volume_type'] = old_type
volume['host'] = host
new_type = volume_types.get_volume_type(ctxt, new_type_ref['id'])
self.driver.create_volume(volume)
self.assertRaises(
KeyError,
self.driver.retype,
ctxt, volume, new_type, diff, host
)
def test_create_consistencygroup(self):
"""Test that create_consistencygroup return successfully."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, CONSISTGROUP)
self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE,
model_update['status'],
"Consistency Group created failed")
def test_create_consistencygroup_fail_on_cg_not_empty(self):
"""Test create_consistencygroup with empty consistency group."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create volumes
# And add the volumes into the consistency group before creating cg
self.driver.create_volume(CG_VOLUME)
self.assertRaises(exception.CinderException,
self.driver.create_consistencygroup,
ctxt, CONSISTGROUP)
def test_delete_consistencygroup(self):
"""Test that delete_consistencygroup return successfully."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group
self.driver.create_consistencygroup(ctxt, CONSISTGROUP)
# Create volumes and add them to consistency group
self.driver.create_volume(CG_VOLUME)
# Delete consistency group
model_update, volumes = \
self.driver.delete_consistencygroup(
ctxt, CONSISTGROUP, [CG_VOLUME])
# Verify the result
self.assertEqual(fields.ConsistencyGroupStatus.DELETED,
model_update['status'],
'Consistency Group deleted failed')
for volume in volumes:
self.assertEqual('deleted',
volume['status'],
'Consistency Group deleted failed')
def test_delete_consistencygroup_fail_on_volume_not_delete(self):
"""Test delete_consistencygroup with volume delete failure."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group
self.driver.create_consistencygroup(ctxt, CONSISTGROUP)
# Set the volume not to be deleted
volume = copy.deepcopy(CG_VOLUME)
volume['name'] = CANNOT_DELETE
# Create volumes and add them to consistency group
self.driver.create_volume(volume)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_consistencygroup,
ctxt, CONSISTGROUP, [volume])
def test_create_cgsnapshot(self):
"""Test that create_cgsnapshot return successfully."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group
self.driver.create_consistencygroup(ctxt, CONSISTGROUP)
# Create volumes and add them to consistency group
self.driver.create_volume(VOLUME)
# Create consistency group snapshot
model_update, snapshots = \
self.driver.create_cgsnapshot(ctxt, CG_SNAPSHOT, [VOLUME])
# Verify the result
self.assertEqual('available',
model_update['status'],
'Consistency Group Snapshot created failed')
for snap in snapshots:
self.assertEqual('available',
snap['status'])
# Clean the environment
self.driver.delete_cgsnapshot(ctxt, CG_SNAPSHOT, [VOLUME])
self.driver.delete_consistencygroup(ctxt, CONSISTGROUP, [VOLUME])
def test_create_cgsnapshot_fail_on_no_pool_space_left(self):
"""Test that create_cgsnapshot return fail when no pool space left."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group
self.driver.create_consistencygroup(ctxt, CONSISTGROUP)
# Set the volume size
volume = copy.deepcopy(CG_VOLUME)
volume['size'] = POOL_SIZE / 2 + 1
# Create volumes and add them to consistency group
self.driver.create_volume(volume)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt, CG_SNAPSHOT, [volume])
# Clean the environment
self.driver.volumes = None
self.driver.delete_consistencygroup(ctxt, CONSISTGROUP, [volume])
def test_delete_cgsnapshot(self):
"""Test that delete_cgsnapshot return successfully."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group
self.driver.create_consistencygroup(ctxt, CONSISTGROUP)
# Create volumes and add them to consistency group
self.driver.create_volume(CG_VOLUME)
# Create consistency group snapshot
self.driver.create_cgsnapshot(ctxt, CG_SNAPSHOT, [CG_VOLUME])
# Delete consistency group snapshot
model_update, snapshots = \
self.driver.delete_cgsnapshot(ctxt, CG_SNAPSHOT, [CG_VOLUME])
# Verify the result
self.assertEqual('deleted',
model_update['status'],
'Consistency Group Snapshot deleted failed')
for snap in snapshots:
self.assertEqual('deleted',
snap['status'])
# Clean the environment
self.driver.delete_consistencygroup(ctxt, CONSISTGROUP, [CG_VOLUME])
def test_delete_cgsnapshot_fail_on_snapshot_not_delete(self):
"""Test delete_cgsnapshot when the snapshot cannot be deleted."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group
self.driver.create_consistencygroup(ctxt, CONSISTGROUP)
# Set the snapshot not to be deleted
volume = copy.deepcopy(CG_VOLUME)
volume['name'] = CANNOT_DELETE
# Create volumes and add them to consistency group
self.driver.create_volume(volume)
# Create consistency group snapshot
self.driver.create_cgsnapshot(ctxt, CG_SNAPSHOT, [volume])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_cgsnapshot,
ctxt, CG_SNAPSHOT, [volume])
def test_update_consistencygroup_without_volumes(self):
"""Test update_consistencygroup when there are no volumes specified."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Update consistency group
model_update, added, removed = self.driver.update_consistencygroup(
ctxt, CONSISTGROUP, [], [])
self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE,
model_update['status'],
"Consistency Group update failed")
self.assertIsNone(added,
"added volumes list is not empty")
self.assertIsNone(removed,
"removed volumes list is not empty")
def test_update_consistencygroup_with_volumes(self):
"""Test update_consistencygroup when there are volumes specified."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Update consistency group
model_update, added, removed = self.driver.update_consistencygroup(
ctxt, CONSISTGROUP, [VOLUME], [VOLUME2])
self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE,
model_update['status'],
"Consistency Group update failed")
self.assertIsNone(added,
"added volumes list is not empty")
self.assertIsNone(removed,
"removed volumes list is not empty")
def test_create_consistencygroup_from_src_without_volumes(self):
"""Test create_consistencygroup_from_src with no volumes specified."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group from source
model_update, volumes_model_update = (
self.driver.create_consistencygroup_from_src(
ctxt, CONSISTGROUP, [], CG_SNAPSHOT, []))
# model_update can be None or return available in status
if model_update:
self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE,
model_update['status'],
"Consistency Group create from source failed")
# volumes_model_update can be None or return available in status
if volumes_model_update:
self.assertFalse(volumes_model_update,
"volumes list is not empty")
def test_create_consistencygroup_from_src_with_volumes(self):
"""Test create_consistencygroup_from_src with volumes specified."""
self.driver.do_setup(None)
ctxt = context.get_admin_context()
# Create consistency group from source
model_update, volumes_model_update = (
self.driver.create_consistencygroup_from_src(
ctxt, CONSISTGROUP, [VOLUME], CG_SNAPSHOT, [SNAPSHOT]))
# model_update can be None or return available in status
if model_update:
self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE,
model_update['status'],
"Consistency Group create from source failed")
# volumes_model_update can be None or return available in status
if volumes_model_update:
self.assertEqual('available',
volumes_model_update['status'],
"volumes list status failed")
def test_freeze_backend(self):
"""Test that freeze_backend returns successful"""
self.driver.do_setup(None)
# not much we can test here...
self.assertTrue(self.driver.freeze_backend(CONTEXT))
def test_thaw_backend(self):
"""Test that thaw_backend returns successful"""
self.driver.do_setup(None)
# not much we can test here...
self.assertTrue(self.driver.thaw_backend(CONTEXT))
def test_failover_host(self):
"""Test that failover_host returns expected values"""
self.driver.do_setup(None)
replicated_volume = copy.deepcopy(REPLICATED_VOLUME)
# assume the replication_status is active
replicated_volume['replication_status'] = 'active'
expected_target_id = 'BLA'
expected_volume_update_list = [
{'volume_id': REPLICATED_VOLUME['id'],
'updates': {'replication_status': 'failed-over'}}]
target_id, volume_update_list = self.driver.failover_host(
CONTEXT,
[replicated_volume],
SECONDARY
)
self.assertEqual(expected_target_id, target_id)
self.assertEqual(expected_volume_update_list, volume_update_list)
def test_failover_host_bad_state(self):
"""Test that failover_host returns with error"""
self.driver.do_setup(None)
replicated_volume = copy.deepcopy(REPLICATED_VOLUME)
# assume the replication_status is active
replicated_volume['replication_status'] = 'invalid_status_val'
expected_target_id = 'BLA'
expected_volume_update_list = [
{'volume_id': REPLICATED_VOLUME['id'],
'updates': {'replication_status': 'error'}}]
target_id, volume_update_list = self.driver.failover_host(
CONTEXT,
[replicated_volume],
SECONDARY
)
self.assertEqual(expected_target_id, target_id)
self.assertEqual(expected_volume_update_list, volume_update_list)
|
bswartz/cinder
|
cinder/tests/unit/test_ibm_xiv_ds8k.py
|
Python
|
apache-2.0
| 33,327
|
# Copyright (c) 2015, Frappe and contributors
# For license information, please see license.txt
from frappe.model.document import Document
class ProgramCourse(Document):
pass
|
frappe/erpnext
|
erpnext/education/doctype/program_course/program_course.py
|
Python
|
gpl-3.0
| 180
|
# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Cylc site and user configuration file spec."""
import os
from typing import List, Optional, Tuple, Any
from pkg_resources import parse_version
from cylc.flow import LOG
from cylc.flow import __version__ as CYLC_VERSION
from cylc.flow.exceptions import GlobalConfigError
from cylc.flow.hostuserutil import get_user_home
from cylc.flow.network.client_factory import CommsMeth
from cylc.flow.parsec.config import (
ConfigNode as Conf,
ParsecConfig,
)
from cylc.flow.parsec.exceptions import ParsecError
from cylc.flow.parsec.upgrade import upgrader
from cylc.flow.parsec.validate import (
CylcConfigValidator as VDR,
DurationFloat,
cylc_config_validate,
)
# Nested dict of spec items.
# Spec value is [value_type, default, allowed_2, allowed_3, ...]
# where:
# - value_type: value type (compulsory).
# - default: the default value (optional).
# - allowed_2, ...: the only other allowed values of this setting (optional).
# Standard executable search paths to pass to job submission subprocesses.
SYSPATH = [
'/bin',
'/usr/bin',
'/usr/local/bin',
'/sbin',
'/usr/sbin',
'/usr/local/sbin'
]
TIMEOUT_DESCR = "Previously, 'timeout' was a stall timeout."
REPLACES = 'This item was previously called '
MOVEDFROMJOB = '''
.. versionchanged:: 8.0.0
Moved from ``suite.rc[runtime][<namespace>]job``.
'''
PLATFORM_META_DESCR = '''
Metadata for this platform or platform group.
Allows writers of platform configurations to add information
about platform usage. There are no-preset items because
Cylc does not use any platform (or group) metadata internally.
Users can then see information about defined platforms using::
cylc config -i [platforms]
cylc config -i [platform groups]
.. seealso::
:ref:`AdminGuide.PlatformConfigs`
'''
# Event config descriptions shared between global and workflow config.
EVENTS_DESCR = {
'startup handlers': (
f'''
Handlers to run at scheduler startup.
.. versionchanged:: 8.0.0
{REPLACES}``startup handler``.
'''
),
'shutdown handlers': (
f'''
Handlers to run at scheduler shutdown.
.. versionchanged:: 8.0.0
{REPLACES}``shutdown handler``.
'''
),
'abort handlers': (
f'''
Handlers to run if the scheduler aborts.
.. versionchanged:: 8.0.0
{REPLACES}``aborted handler``.
'''
),
'workflow timeout': (
f'''
Workflow timeout interval. The timer starts counting down at scheduler
startup. It resets on workflow restart.
.. versionadded:: 8.0.0
{TIMEOUT_DESCR}
'''
),
'workflow timeout handlers': (
f'''
Handlers to run if the workflow timer times out.
.. versionadded:: 8.0.0
{TIMEOUT_DESCR}
'''
),
'abort on workflow timeout': (
f'''
Whether to abort if the workflow timer times out.
.. versionadded:: 8.0.0
{TIMEOUT_DESCR}
'''
),
'stall handlers': (
f'''
Handlers to run if the scheduler stalls.
.. versionchanged:: 8.0.0
{REPLACES}``stalled handler``.
'''
),
'stall timeout': (
f'''
The length of a timer which starts if the scheduler stalls.
A workflow will stall if there are no tasks ready to run and no
waiting external triggers, but the presence of incomplete
tasks or unsatisified prerequisites shows the workflow did not run to
completion. The stall timer turns off on any post-stall task activity.
It resets on restarting a stalled workflow.
.. versionadded:: 8.0.0
{TIMEOUT_DESCR}
'''
),
'stall timeout handlers': (
f'''
Handlers to run if the stall timer times out.
.. versionadded:: 8.0.0
{TIMEOUT_DESCR}
'''
),
'abort on stall timeout': (
f'''
Whether to abort if the stall timer times out.
.. versionadded:: 8.0.0
{TIMEOUT_DESCR}
'''
),
'inactivity timeout': (
f'''
Scheduler inactivity timeout interval. The timer resets when any
workflow activity occurs.
.. versionchanged:: 8.0.0
{REPLACES} ``inactivity``.
'''
),
'inactivity timeout handlers': (
f'''
Handlers to run if the inactivity timer times out.
.. versionchanged:: 8.0.0
{REPLACES}``inactivity handler``.
'''
),
'abort on inactivity timeout': (
f'''
Whether to abort if the inactivity timer times out.
.. versionchanged:: 8.0.0
{REPLACES}``abort on inactivity``.
'''
)
}
with Conf('global.cylc', desc='''
The global configuration which defines default Cylc Flow settings
for a user or site.
To view your global config, run:
$ cylc config
Cylc will attempt to load the global configuration (``global.cylc``) from a
hierarchy of locations, including the site directory (defaults to
``/etc/cylc/flow/``) and the user directory (``~/.cylc/flow/``). For
example at Cylc version 8.0.1, the hierarchy would be, in order of
ascending priority:
.. code-block:: sub
<site-conf-path>/flow/global.cylc
<site-conf-path>/flow/8/global.cylc
<site-conf-path>/flow/8.0/global.cylc
<site-conf-path>/flow/8.0.1/global.cylc
~/.cylc/flow/global.cylc
~/.cylc/flow/8/global.cylc
~/.cylc/flow/8.0/global.cylc
~/.cylc/flow/8.0.1/global.cylc
Where ``<site-conf-path>`` is ``/etc/cylc/flow/`` by default but can be
changed by :envvar:`CYLC_SITE_CONF_PATH`.
A setting in a file lower down in the list will override the same setting
from those higher up (but if a setting is present in a file higher up and
not in any files lower down, it will not be overridden).
The following environment variables can change the files which are loaded:
.. envvar:: CYLC_CONF_PATH
If set this bypasses the default site/user configuration hierarchy used
to load the Cylc Flow global configuration.
This should be set to a directory containing a :cylc:conf:`global.cylc`
file.
.. envvar:: CYLC_SITE_CONF_PATH
By default the site configuration is located in ``/etc/cylc/``. For
installations where this is not convenient, this path can be overridden
by setting ``CYLC_SITE_CONF_PATH`` to point at another location.
Configuration for different Cylc components should be in sub-directories
within this location.
For example to configure Cylc Flow you could do the following::
$CYLC_SITE_CONF_PATH/
`-- flow/
`-- global.cylc
.. note::
The ``global.cylc`` file can be templated using Jinja2 variables.
See :ref:`Jinja`.
.. versionchanged:: 8.0.0
Prior to Cylc 8, ``global.cylc`` was named ``global.rc``, but that name
is no longer supported.
''') as SPEC:
with Conf('scheduler', desc=f'''
Default values for entries in :cylc:conf:`flow.cylc[scheduler]`
section.
.. versionchanged:: 8.0.0
{REPLACES}``[cylc]``.
.. note::
:cylc:conf:`global.cylc[scheduler]` should not be confused with
:cylc:conf:`flow.cylc[scheduling]`.
'''):
Conf('UTC mode', VDR.V_BOOLEAN, False, desc='''
Default for :cylc:conf:`flow.cylc[scheduler]UTC mode`.
''')
Conf('process pool size', VDR.V_INTEGER, 4, desc='''
Maximum number of concurrent processes used to execute external job
submission, event handlers, and job poll and kill commands
.. versionchanged:: 8.0.0
Moved into the ``[scheduler]`` section from the top level.
.. seealso::
:ref:`Managing External Command Execution`.
''')
Conf('process pool timeout', VDR.V_INTERVAL, DurationFloat(600),
desc='''
After this interval Cylc will kill long running commands in the
process pool.
.. versionchanged:: 8.0.0
Moved into the ``[scheduler]`` section from the top level.
.. seealso::
:ref:`Managing External Command Execution`.
.. note::
The default is set quite high to avoid killing important
processes when the system is under load.
''')
Conf('auto restart delay', VDR.V_INTERVAL, desc=f'''
Maximum number of seconds the auto-restart mechanism will delay
before restarting workflows.
.. versionchanged:: 8.0.0
{REPLACES}``global.rc[suite servers]auto restart delay``.
When a host is set to automatically
shutdown/restart it waits a random period of time
between zero and ``auto restart delay`` seconds before
beginning the process. This is to prevent large numbers of
workflows from restarting simultaneously.
.. seealso::
:ref:`auto-stop-restart`
''')
with Conf('run hosts', desc=f'''
Configure workflow hosts and ports for starting workflows.
.. versionchanged:: 8.0.0
{REPLACES}``[suite servers]``.
Additionally configure host selection settings specifying how to
determine the most suitable run host at any given time from those
configured.
'''):
Conf('available', VDR.V_SPACELESS_STRING_LIST, desc=f'''
A list of workflow run hosts.
.. versionchanged:: 8.0.0
{REPLACES}``[suite servers]run hosts``.
Cylc will choose one of these hosts for a workflow to start on.
(Unless an explicit host is provided as an option to the
``cylc play --host=<myhost>`` command.)
''')
Conf('ports', VDR.V_INTEGER_LIST, list(range(43001, 43101)),
desc=f'''
A list of allowed ports for Cylc to use to run workflows.
.. versionchanged:: 8.0.0
{REPLACES}``[suite servers]run ports``
''')
Conf('condemned', VDR.V_ABSOLUTE_HOST_LIST, desc=f'''
These hosts will not be used to run jobs.
.. versionchanged:: 8.0.0
{REPLACES}``[suite servers]condemned hosts``.
If workflows are already running on
condemned hosts, Cylc will shut them down and
restart them on different hosts.
.. seealso::
:ref:`auto-stop-restart`
''')
Conf('ranking', VDR.V_STRING, desc=f'''
Rank and filter run hosts based on system information.
.. versionchanged:: 8.0.0
{REPLACES}``[suite servers][run host select]rank``.
Ranking can be used to provide load balancing to ensure no
single run host is overloaded. It also provides thresholds
beyond which Cylc will not attempt to start new schedulers on
a host.
.. _psutil: https://psutil.readthedocs.io/en/latest/
This should be a multiline string containing Python expressions
to rank and/or filter hosts. All `psutil`_ attributes are
available for use in these expressions.
.. rubric:: Ranking
Rankings are expressions which return numerical values.
The host which returns the lowest value is chosen. Examples:
.. code-block:: python
# rank hosts by cpu_percent
cpu_percent()
# rank hosts by 15min average of server load
getloadavg()[2]
# rank hosts by the number of cores
# (multiple by -1 because the lowest value is chosen)
-1 * cpu_count()
.. rubric:: Threshold
Thresholds are expressions which return boolean values.
If a host returns a ``False`` value that host will not be
selected. Examples:
.. code-block:: python
# filter out hosts with a CPU utilisation of 70% or above
cpu_percent() < 70
# filter out hosts with less than 1GB of RAM available
virtual_memory.available > 1000000000
# filter out hosts with less than 1GB of disk space
# available on the "/" mount
disk_usage('/').free > 1000000000
.. rubric:: Combining
Multiple rankings and thresholds can be combined in this
section e.g:
.. code-block:: python
# filter hosts
cpu_percent() < 70
disk_usage('/').free > 1000000000
# rank hosts by CPU count
1 / cpu_count()
# if two hosts have the same CPU count
# then rank them by CPU usage
cpu_percent()
''')
with Conf('host self-identification', desc=f'''
How Cylc determines and shares the identity of the workflow host.
.. versionchanged:: 8.0.0
{REPLACES}``[suite host self-identification]``.
The workflow host's identity must be determined locally by cylc and
passed to running tasks (via ``$CYLC_WORKFLOW_HOST``) so that task
messages can target the right workflow on the right host.
'''):
# TODO
# Is it conceivable that different remote task hosts at the same
# site might see the workflow host differently? If so we'd need to
# be able to override the target in workflow configurations.
Conf(
'method', VDR.V_STRING, 'name',
options=['name', 'address', 'hardwired'],
desc=f'''
Determines how cylc finds the identity of the
workflow host.
.. versionchanged:: 8.0.0
{REPLACES}``[suite host self-identification]``.
Options:
name
(The default method) Self-identified host name.
Cylc asks the workflow host for its host name. This
should resolve on task hosts to the IP address of the
workflow host; if it doesn't, adjust network settings or
use one of the other methods.
address
Automatically determined IP address (requires *target*).
Cylc attempts to use a special external "target address"
to determine the IP address of the workflow host as
seen by remote task hosts.
hardwired
(only to be used as a last resort) Manually specified
host name or IP address (requires *host*) of the
workflow host.
''')
Conf('target', VDR.V_STRING, 'google.com', desc=f'''
Target for use by the *address* self-identification method.
If your workflow host sees the internet, a common
address such as ``google.com`` will do; otherwise choose a host
visible on your intranet.
.. versionchanged:: 8.0.0
{REPLACES}``[suite host self-identification]``.
''')
Conf('host', VDR.V_STRING, desc=f'''
The name or IP address of the workflow host used by the
*hardwired* self-identification method.
.. versionchanged:: 8.0.0
{REPLACES}``[suite host self-identification]``.
''')
with Conf('events', desc='''
Define site defaults for :cylc:conf:`flow.cylc[scheduler][events]`.
'''):
Conf('handlers', VDR.V_STRING_LIST)
Conf('handler events', VDR.V_STRING_LIST)
Conf('mail events', VDR.V_STRING_LIST)
for item, desc in EVENTS_DESCR.items():
if item.endswith("handlers"):
Conf(item, VDR.V_STRING_LIST, desc=desc)
elif item.startswith("abort on"):
default = (item == "abort on stall timeout")
Conf(item, VDR.V_BOOLEAN, default, desc=desc)
elif item.endswith("timeout"):
if item == "stall timeout":
def_intv: Optional['DurationFloat'] = (
DurationFloat(3600))
else:
def_intv = None
Conf(item, VDR.V_INTERVAL, def_intv, desc=desc)
with Conf('mail', desc=f'''
Options for email handling.
.. versionchanged:: 8.0.0
{REPLACES}``[cylc][events]mail <item>``.
'''):
Conf('from', VDR.V_STRING)
Conf('smtp', VDR.V_STRING)
Conf('to', VDR.V_STRING)
Conf('footer', VDR.V_STRING)
Conf(
'task event batch interval',
VDR.V_INTERVAL,
DurationFloat(300),
desc='''
Default for
:cylc:conf:`flow.cylc
[scheduler][mail]task event batch interval`
.. versionchanged:: 8.0.0
This item was previously
``[cylc]task event mail interval``
'''
)
with Conf('main loop', desc='''
Configuration of the Cylc Scheduler's main loop.
.. versionadded:: 8.0.0
'''):
Conf('plugins', VDR.V_STRING_LIST,
['health check', 'reset bad hosts'],
desc='''
Configure the default main loop plugins to use when
starting new workflows.
.. versionadded:: 8.0.0
''')
with Conf('<plugin name>', desc='''
Configure a main loop plugin.
''') as MainLoopPlugin:
Conf('interval', VDR.V_INTERVAL, desc='''
The interval with which this plugin is run.
.. versionadded:: 8.0.0
''')
with Conf('health check', meta=MainLoopPlugin, desc='''
Checks the integrity of the workflow run directory.
.. versionadded:: 8.0.0
'''):
Conf('interval', VDR.V_INTERVAL, DurationFloat(600), desc='''
The interval with which this plugin is run.
.. versionadded:: 8.0.0
''')
with Conf('reset bad hosts', meta=MainLoopPlugin, desc='''
Periodically clear the scheduler list of unreachable (bad)
hosts.
.. versionadded:: 8.0.0
'''):
Conf('interval', VDR.V_INTERVAL, DurationFloat(1800), desc='''
How often (in seconds) to run this plugin.
.. versionadded:: 8.0.0
''')
with Conf('logging', desc=f'''
Settings for the workflow event log.
The workflow event log, held under the workflow run directory, is
maintained as a rolling archive. Logs are rolled over (backed up
and started anew) when they reach a configurable limit size.
.. versionchanged:: 8.0.0
{REPLACES}``[suite logging]``.
'''):
Conf('rolling archive length', VDR.V_INTEGER, 5, desc='''
How many rolled logs to retain in the archive.
''')
Conf('maximum size in bytes', VDR.V_INTEGER, 1000000, desc='''
Workflow event logs are rolled over when they reach this
file size.
''')
with Conf('install', desc='''
Configure directories and files to be installed on remote hosts.
.. versionadded:: 8.0.0
'''):
Conf('max depth', VDR.V_INTEGER, default=4, desc='''
How many directory levels deep Cylc should look for installed
workflows in the :term:`cylc-run directory`.
This also sets the limit on how deep a :term:`workflow ID` can be
before ``cylc install`` will refuse to install it. For example,
if set to 4, ``cylc install one/two/three/four`` will fail,
because the resultant workflow ID would be
``one/two/three/four/run1``, which is 5 levels deep. (However,
``cylc install one/two/three/four --no-run-name`` would work.)
.. note::
A high value may cause a slowdown of Cylc commands such
``install``, ``scan`` and ``clean`` if there are many
:term:`run directories <run directory>` in the
cylc-run directory for Cylc to check, or if the filesystem
is slow (e.g. NFS).
.. versionadded:: 8.0.0
''')
Conf('source dirs', VDR.V_STRING_LIST, default=['~/cylc-src'], desc='''
List of paths that Cylc searches for workflows to install.
All workflow source directories in these locations will
also show up in the GUI, ready for installation.
.. note::
If workflow source directories of the same name exist in more
than one of these paths, only the first one will be picked up.
.. versionadded:: 8.0.0
''')
# Symlink Dirs
with Conf('symlink dirs', # noqa: SIM117 (keep same format)
desc="""
Configure alternate workflow run directory locations.
Symlinks from the the standard ``$HOME/cylc-run`` locations will be
created.
.. versionadded:: 8.0.0
"""):
with Conf('<install target>'):
Conf('run', VDR.V_STRING, None, desc="""
Alternative location for the run dir.
If specified, the workflow run directory will
be created in ``<this-path>/cylc-run/<workflow-name>``
and a symbolic link will be created from
``$HOME/cylc-run/<workflow-name>``.
If not specified the workflow run directory will be created
in ``$HOME/cylc-run/<workflow-name>``.
All the workflow files and the ``.service`` directory get
installed into this directory.
.. versionadded:: 8.0.0
""")
Conf('log', VDR.V_STRING, None, desc="""
Alternative location for the log dir.
If specified the workflow log directory will be created in
``<this-path>/cylc-run/<workflow-name>/log`` and a
symbolic link will be created from
``$HOME/cylc-run/<workflow-name>/log``. If not specified
the workflow log directory will be created in
``$HOME/cylc-run/<workflow-name>/log``.
.. versionadded:: 8.0.0
""")
Conf('share', VDR.V_STRING, None, desc="""
Alternative location for the share dir.
If specified the workflow share directory will be
created in ``<this-path>/cylc-run/<workflow-name>/share``
and a symbolic link will be created from
``<$HOME/cylc-run/<workflow-name>/share``. If not specified
the workflow share directory will be created in
``$HOME/cylc-run/<workflow-name>/share``.
.. versionadded:: 8.0.0
""")
Conf('share/cycle', VDR.V_STRING, None, desc="""
Alternative directory for the share/cycle dir.
If specified the workflow share/cycle directory
will be created in
``<this-path>/cylc-run/<workflow-name>/share/cycle``
and a symbolic link will be created from
``$HOME/cylc-run/<workflow-name>/share/cycle``. If not
specified the workflow share/cycle directory will be
created in ``$HOME/cylc-run/<workflow-name>/share/cycle``.
.. versionadded:: 8.0.0
""")
Conf('work', VDR.V_STRING, None, desc="""
Alternative directory for the work dir.
If specified the workflow work directory will be created in
``<this-path>/cylc-run/<workflow-name>/work`` and a
symbolic link will be created from
``$HOME/cylc-run/<workflow-name>/work``. If not specified
the workflow work directory will be created in
``$HOME/cylc-run/<workflow-name>/work``.
.. versionadded:: 8.0.0
""")
with Conf('editors', desc='''
Choose your favourite text editor for editing workflow configurations.
'''):
Conf('terminal', VDR.V_STRING, desc='''
An in-terminal text editor to be used by the Cylc command line.
If unspecified Cylc will use the environment variable
``$EDITOR`` which is the preferred way to set your text editor.
.. Note::
You can set your ``$EDITOR`` in your shell profile file
(e.g. ``~.bashrc``)
If neither this or ``$EDITOR`` are specified then Cylc will
default to ``vi``.
Examples::
ed
emacs -nw
nano
vi
''')
Conf('gui', VDR.V_STRING, desc='''
A graphical text editor to be used by cylc.
If unspecified Cylc will use the environment variable
``$GEDITOR`` which is the preferred way to set your text editor.
.. Note::
You can set your ``$GEDITOR`` in your shell profile file
(e.g. ``~.bashrc``)
If neither this or ``$GEDITOR`` are specified then Cylc will
default to ``gvim -fg``.
Examples::
atom --wait
code --new-window --wait
emacs
gedit -s
gvim -fg
nedit
''')
with Conf('platforms', desc='''
Platforms allow you to define compute resources available at your
site.
.. versionadded:: 8.0.0
A platform consists of a group of one or more hosts which share a
file system and a job runner (batch system).
A platform must allow interaction with the same task job from *any*
of its hosts.
'''):
with Conf('<platform name>', desc='''
Configuration defining a platform.
.. versionadded:: 8.0.0
Many of the items in platform definitions have been moved from
``flow.cylc[runtime][<namespace>][job]`` and
``flow.cylc[runtime][<namespace>][remote]``
Platform names can be regular expressions: If you have a set of
compute resources such as ``bigmachine1, bigmachine2`` or
``desktop0000, .., desktop9999`` one would define platforms with
names ``[[bigmachine[12]]]`` and ``[[desktop[0-9]{4}]]``.
.. note::
Each possible match to the definition regular expression is
considered a separate platform.
If you had a supercomputer with multiple login nodes this would
be a single platform with multiple :cylc:conf:`hosts`
.. seealso::
- :ref:`MajorChangesPlatforms` in the Cylc 8 migration guide.
- :ref:`AdminGuide.PlatformConfigs`, an administrator's guide to
platform configurations.
''') as Platform:
with Conf('meta', desc=PLATFORM_META_DESCR):
Conf('<custom metadata>', VDR.V_STRING, '', desc='''
Any user-defined metadata item.
''')
Conf('hosts', VDR.V_STRING_LIST, desc='''
A list of hosts from which the job host can be selected using
:cylc:conf:`[..][selection]method`.
.. versionadded:: 8.0.0
All hosts should share a file system.
''')
Conf('job runner', VDR.V_STRING, 'background', desc=f'''
The batch system/job submit method used to run jobs on the
platform.
.. versionchanged:: 8.0.0
{REPLACES}
``suite.rc[runtime][<namespace>][job]batch system``.
Examples:
* ``background``
* ``slurm``
* ``pbs``
''')
Conf('job runner command template', VDR.V_STRING, desc=f'''
Set the command used by the chosen job runner.
.. versionchanged:: 8.0.0
{REPLACES}``suite.rc[runtime][<namespace>][job]
batch system command template``.
The template's ``%(job)s`` will be
substituted by the job file path.
''')
Conf('shell', VDR.V_STRING, '/bin/bash', desc=f'''
{MOVEDFROMJOB}
''')
Conf('communication method',
VDR.V_STRING, 'zmq',
options=[meth.value for meth in CommsMeth], desc='''
The means by which task progress messages are reported back to
the running workflow.
Options:
zmq
Direct client-server TCP communication via network ports
poll
The workflow polls for task status (no task messaging)
ssh
Use non-interactive ssh for task communications
''')
# TODO ensure that it is possible to over-ride the following three
# settings in workflow config.
Conf('submission polling intervals', VDR.V_INTERVAL_LIST, desc=f'''
List of intervals at which to poll status of job submission.
{MOVEDFROMJOB}
Cylc can poll submitted jobs to catch problems that
prevent the submitted job from executing at all, such as
deletion from an external job runner queue. Routine
polling is done only for the polling ``task communication
method`` unless workflow-specific polling is configured in
the workflow configuration. A list of interval values can be
specified as for execution polling but a single value
is probably sufficient for job submission polling.
Example::
5*PT1M, 10*PT5M
''')
Conf('submission retry delays', VDR.V_INTERVAL_LIST, None, desc=f'''
{MOVEDFROMJOB}
''')
Conf('execution polling intervals', VDR.V_INTERVAL_LIST, desc=f'''
List of intervals at which to poll status of job execution.
{MOVEDFROMJOB}
Cylc can poll running jobs to catch problems that prevent task
messages from being sent back to the workflow, such as hard job
kills, network outages, or unplanned task host shutdown.
Routine polling is done only for the polling *task
communication method* (below) unless polling is
configured in the workflow configuration. A list of interval
values can be specified, with the last value used repeatedly
until the task is finished - this allows more frequent polling
near the beginning and end of the anticipated task run time.
Multipliers can be used as shorthand as in the example below.
Example::
5*PT1M, 10*PT5M
''')
Conf('execution time limit polling intervals',
VDR.V_INTERVAL_LIST, desc='''
List of intervals after execution time limit to poll jobs.
The intervals between polling after a task job (submitted to
the relevant job runner on the relevant host) exceeds its
execution time limit. The default setting is PT1M, PT2M, PT7M.
The accumulated times (in minutes) for these intervals will be
roughly 1, 1 + 2 = 3 and 1 + 2 + 7 = 10 after a task job
exceeds its execution time limit.
''')
Conf('ssh command',
VDR.V_STRING,
'ssh -oBatchMode=yes -oConnectTimeout=10',
desc='''
A communication command used to invoke commands on this
platform.
Not used on the workflow host unless you run local tasks
under another user account. The value is assumed to be ``ssh``
with some initial options or a command that implements a
similar interface to ``ssh``.
''')
Conf('use login shell', VDR.V_BOOLEAN, True, desc='''
Whether to use a login shell or not for remote command
invocation.
By default, Cylc runs remote SSH commands using a login shell:
.. code-block:: bash
ssh user@host 'bash --login cylc ...'
which will source the following files (in order):
* ``/etc/profile``
* ``~/.bash_profile``
* ``~/.bash_login``
* ``~/.profile``
.. _Bash man pages: https://linux.die.net/man/1/bash
For more information on login shells see the "Invocation"
section of the `Bash man pages`_.
For security reasons some institutions do not allow unattended
commands to start login shells, so you can turn off this
behaviour to get:
.. code-block:: bash
ssh user@host 'cylc ...'
which will use the default shell on the remote machine,
sourcing ``~/.bashrc`` (or ``~/.cshrc``) to set up the
environment.
''')
Conf('cylc path', VDR.V_STRING, desc='''
The path containing the ``cylc`` executable on a remote
platform.
.. versionchanged:: 8.0.0
Moved from ``suite.rc[runtime][<namespace>][job]
cylc executable``.
This may be necessary if the ``cylc`` executable is not in the
``$PATH`` for an ``ssh`` call.
Test whether this is the case by using
``ssh <host> command -v cylc``.
This path is used for remote invocations of the ``cylc``
command and is added to the ``$PATH`` in job scripts
for the configured platform.
.. note::
If :cylc:conf:`[..]use login shell = True` (the default)
then an alternative approach is to add ``cylc`` to the
``$PATH`` in the system or user Bash profile files
(e.g. ``~/.bash_profile``).
.. tip::
For multi-version installations this should point to the
Cylc wrapper script rather than the ``cylc`` executable
itself.
See :ref:`managing environments` for more information on
the wrapper script.
''')
Conf('global init-script', VDR.V_STRING, desc='''
A per-platform script which is run before other job scripts.
This should be used sparingly to perform any shell
configuration that cannot be performed via other means.
.. versionchanged:: 8.0.0
The ``global init-script`` now runs *before* any job
scripting which introduces caveats outlined below.
.. warning::
The ``global init-script`` has the following caveats,
as compared to the other task ``script-*`` items:
* The script is not covered by error trapping.
* The job environment is not available to this script.
* In debug mode this script will not be included in
xtrace output.
''')
Conf('copyable environment variables', VDR.V_STRING_LIST, '',
desc='''
A list containing the names of the environment variables to
be copied from the scheduler to a job.
''')
Conf('retrieve job logs', VDR.V_BOOLEAN, desc='''
Global default for
:cylc:conf:`flow.cylc[runtime][<namespace>][remote]
retrieve job logs`.
''')
Conf('retrieve job logs command', VDR.V_STRING, 'rsync -a',
desc='''
If ``rsync -a`` is unavailable or insufficient to retrieve job
logs from a remote platform, you can use this setting to
specify a suitable command.
''')
Conf('retrieve job logs max size', VDR.V_STRING, desc='''
Global default for
:cylc:conf:`flow.cylc[runtime][<namespace>][remote]
retrieve job logs max size` for this platform.
''')
Conf('retrieve job logs retry delays', VDR.V_INTERVAL_LIST,
desc='''
Global default for
:cylc:conf:`flow.cylc[runtime][<namespace>][remote]
retrieve job logs retry delays`
for this platform.
''')
Conf('tail command template',
VDR.V_STRING, 'tail -n +1 -F %(filename)s', desc='''
A command template (with ``%(filename)s`` substitution) to
tail-follow job logs this platform, by ``cylc cat-log``.
You are are unlikely to need to override this.
''')
Conf('err tailer', VDR.V_STRING, desc='''
A command template (with ``%(job_id)s`` substitution) that can
be used to tail-follow the stderr stream of a running job if
SYSTEM does not use the normal log file location while the job
is running. This setting overrides
:cylc:conf:`[..]tail command template`.
Examples::
# for PBS
qcat -f -e %(job_id)s
''')
Conf('out tailer', VDR.V_STRING, desc='''
A command template (with ``%(job_id)s`` substitution) that can
be used to tail-follow the stdout stream of a running job if
SYSTEM does not use the normal log file location while the job
is running. This setting overrides
:cylc:conf:`[..]tail command template`.
Examples::
# for PBS
qcat -f -o %(job_id)s
''')
Conf('err viewer', VDR.V_STRING, desc='''
A command template (with ``%(job_id)s`` substitution) that can
be used to view the stderr stream of a running job if SYSTEM
does not use the normal log file location while the job is
running.
Examples::
# for PBS
qcat -e %(job_id)s
''')
Conf('out viewer', VDR.V_STRING, desc='''
A command template (with ``%(job_id)s`` substitution) that can
be used to view the stdout stream of a running job if SYSTEM
does not use the normal log file location while the job is
running.
Examples::
# for PBS
qcat -o %(job_id)s
''')
Conf('job name length maximum', VDR.V_INTEGER, desc='''
The maximum length for job name acceptable by a job runner on
a given host. Currently, this setting is only meaningful for
PBS jobs. For example, PBS 12 or older will fail a job submit
if the job name has more than 15 characters; whereas PBS 13
accepts up to 236 characters.
''')
Conf('install target', VDR.V_STRING, desc='''
This defaults to the platform name. This will be used as the
target for remote file installation.
For example, if Platform_A shares a file system with localhost:
.. code-block:: cylc
[platforms]
[[Platform_A]]
install target = localhost
''')
Conf('clean job submission environment', VDR.V_BOOLEAN, False,
desc='''
Job submission subprocesses inherit their parent environment by
default. Remote jobs inherit the default non-interative shell
environment for their platform. Jobs on the scheduler host
inherit the scheduler environment (unless their job runner
prevents this).
If, for example, the ``$PYTHON`` variable is different on the
scheduler and the remote host the same program may run in
different ways.
We recommend using a clean job submission environment for
consistent handling of local and remote jobs. However,
this is not the default behavior because it prevents
local task jobs from running, unless ``$PATH`` contains the
``cylc`` wrapper script.
Specific environment variables can be singled out to pass
through to the clean environment, if necessary.
A standard set of executable paths is passed through to clean
environments, and can be added to if necessary.
''')
Conf('job submission environment pass-through', VDR.V_STRING_LIST,
desc='''
List of environment variable names to pass through to
job submission subprocesses.
``$HOME`` is passed automatically.
You are unlikely to need this.
''')
Conf('job submission executable paths', VDR.V_STRING_LIST,
desc=f'''
Additional executable locations to pass to the job
submission subprocess beyond the standard locations
{", ".join(f"``{i}``" for i in SYSPATH)}.
You are unlikely to need this.
''')
Conf('max batch submit size', VDR.V_INTEGER, default=100, desc='''
Limits the maximum number of jobs that can be submitted at
once.
Where possible Cylc will batch together job submissions to
the same platform for efficiency. Submitting very large
numbers of jobs can cause problems with some submission
systems so for safety there is an upper limit on the number
of job submissions which can be batched together.
''')
with Conf('selection', desc='''
How to select platform from list of hosts.
.. versionadded:: 8.0.0
''') as Selection:
Conf('method', VDR.V_STRING, default='random',
options=['random', 'definition order'],
desc='''
Method for choosing the job host from the platform.
.. versionadded:: 8.0.0
.. rubric:: Available options
- ``random``: Choose randomly from the list of hosts.
This is suitable for a pool of identical hosts.
- ``definition order``: Take the first host in the list
unless that host was unreachable. In many cases
this is likely to cause load imbalances, but might
be appropriate if following the pattern
``hosts = main, backup, failsafe``.
''')
with Conf('localhost', meta=Platform, desc='''
A default platform defining settings for jobs to be run on the
same host as the workflow scheduler.
.. attention::
It is common practice to run the Cylc scheduler on a dedicated
host: In this case **"localhost" will refer to the host where
the scheduler is running and not the computer where you
ran "cylc play"**.
'''):
Conf('hosts', VDR.V_STRING_LIST, ['localhost'])
with Conf('selection', meta=Selection):
Conf('method', VDR.V_STRING, default='definition order')
# Platform Groups
with Conf('platform groups', desc='''
Platform groups allow you to group together platforms which would
all be suitable for a given job.
.. versionadded:: 8.0.0
When Cylc sets up a task job it will pick a platform from a group.
Cylc will then use the selected platform for all interactions with
that job.
For example, if you have a group of computers
without a shared file system, but otherwise identical called
``bigmachine01..02`` you might set up a platform group
``[[bigmachines]]platforms=bigmachine01, bigmachine02``.
.. seealso::
- :ref:`MajorChangesPlatforms` in the Cylc 8 migration guide.
- :ref:`AdminGuide.PlatformConfigs`, an guide to platform
configurations.
'''): # noqa: SIM117 (keep same format)
with Conf('<group>'):
with Conf('meta', desc=PLATFORM_META_DESCR):
Conf('<custom metadata>', VDR.V_STRING, '', desc='''
Any user-defined metadata item.
''')
Conf('platforms', VDR.V_STRING_LIST, desc='''
A list of platforms which can be selected if
:cylc:conf:`flow.cylc[runtime][<namespace>]platform` matches
the name of this platform group.
.. versionadded:: 8.0.0
''')
with Conf('selection'):
Conf(
'method', VDR.V_STRING, default='random',
options=['random', 'definition order'],
desc='''
Method for selecting platform from group.
.. versionadded:: 8.0.0
options:
- random: Suitable for an identical pool of platforms.
- definition order: Pick the first available platform
from the list.
'''
)
# task
with Conf('task events', desc='''
Global site/user defaults for
:cylc:conf:`flow.cylc[runtime][<namespace>][events]`.
'''):
Conf('execution timeout', VDR.V_INTERVAL)
Conf('handlers', VDR.V_STRING_LIST)
Conf('handler events', VDR.V_STRING_LIST)
Conf('handler retry delays', VDR.V_INTERVAL_LIST, None)
Conf('mail events', VDR.V_STRING_LIST)
Conf('submission timeout', VDR.V_INTERVAL)
def upg(cfg, descr):
"""Upgrader."""
u = upgrader(cfg, descr)
u.upgrade()
def get_version_hierarchy(version: str) -> List[str]:
"""Return list of versions whose global configs are compatible, in
ascending priority.
Args:
version: A PEP 440 compliant version tag.
Example:
>>> get_version_hierarchy('8.0.1a2.dev')
['', '8', '8.0', '8.0.1', '8.0.1a2', '8.0.1a2.dev']
"""
smart_ver: Any = parse_version(version)
# (No type anno. yet for Version in pkg_resources.extern.packaging.version)
base = [str(i) for i in smart_ver.release]
hierarchy = ['']
hierarchy += ['.'.join(base[:i]) for i in range(1, len(base) + 1)]
if smart_ver.pre: # alpha/beta (excluding dev) part of version
pre_ver = ''.join(str(i) for i in smart_ver.pre)
hierarchy.append(f'{hierarchy[-1]}{pre_ver}')
if version not in hierarchy: # catch-all
hierarchy.append(version)
return hierarchy
class GlobalConfig(ParsecConfig):
"""
Handle global (all workflows) site and user configuration for cylc.
User file values override site file values.
"""
_DEFAULT: Optional['GlobalConfig'] = None
CONF_BASENAME: str = "global.cylc"
DEFAULT_SITE_CONF_PATH: str = os.path.join(os.sep, 'etc', 'cylc')
USER_CONF_PATH: str = os.path.join(
os.getenv('HOME') or get_user_home(), '.cylc', 'flow'
)
VERSION_HIERARCHY: List[str] = get_version_hierarchy(CYLC_VERSION)
def __init__(self, *args, **kwargs) -> None:
site_conf_root = (
os.getenv('CYLC_SITE_CONF_PATH') or self.DEFAULT_SITE_CONF_PATH
)
self.conf_dir_hierarchy: List[Tuple[str, str]] = [
*[
(upgrader.SITE_CONFIG,
os.path.join(site_conf_root, 'flow', ver))
for ver in self.VERSION_HIERARCHY
],
*[
(upgrader.USER_CONFIG,
os.path.join(self.USER_CONF_PATH, ver))
for ver in self.VERSION_HIERARCHY
]
]
super().__init__(*args, **kwargs)
@classmethod
def get_inst(cls, cached=True):
"""Return a GlobalConfig instance.
Args:
cached (bool):
If cached create if necessary and return the singleton
instance, else return a new instance.
"""
if not cached:
# Return an up-to-date global config without affecting the
# singleton.
new_instance = cls(SPEC, upg, validator=cylc_config_validate)
new_instance.load()
return new_instance
elif not cls._DEFAULT:
cls._DEFAULT = cls(SPEC, upg, validator=cylc_config_validate)
cls._DEFAULT.load()
return cls._DEFAULT
def _load(self, fname, conf_type):
if os.access(fname, os.F_OK | os.R_OK):
self.loadcfg(fname, conf_type)
def load(self):
"""Load or reload configuration from files."""
self.sparse.clear()
self.dense.clear()
LOG.debug("Loading site/user config files")
conf_path_str = os.getenv("CYLC_CONF_PATH")
if conf_path_str:
# Explicit config file override.
fname = os.path.join(conf_path_str, self.CONF_BASENAME)
self._load(fname, upgrader.USER_CONFIG)
else:
# Use default locations.
for conf_type, conf_dir in self.conf_dir_hierarchy:
fname = os.path.join(conf_dir, self.CONF_BASENAME)
try:
self._load(fname, conf_type)
except ParsecError:
LOG.error(f'bad {conf_type} {fname}')
raise
self._set_default_editors()
self._no_platform_group_name_overlap()
def _set_default_editors(self):
# default to $[G]EDITOR unless an editor is defined in the config
# NOTE: use `or` to handle cases where an env var is set to ''
cfg = self.get()
if not cfg['editors']['terminal']:
cfg['editors']['terminal'] = os.environ.get('EDITOR') or 'vi'
if not cfg['editors']['gui']:
cfg['editors']['gui'] = os.environ.get('GEDITOR') or 'gvim -fg'
def _no_platform_group_name_overlap(self):
if (
'platforms' in self.sparse and
'platform groups' in self.sparse
):
names_in_platforms_and_groups = set(
self.sparse['platforms'].keys()).intersection(
set(self.sparse['platform groups'].keys()))
if names_in_platforms_and_groups:
msg = (
'Platforms and platform groups must not share names. '
'The following are in both sets:'
)
for name in names_in_platforms_and_groups:
msg += f'\n * {name}'
raise GlobalConfigError(msg)
|
hjoliver/cylc
|
cylc/flow/cfgspec/globalcfg.py
|
Python
|
gpl-3.0
| 54,288
|
#
# =================================================================
# =================================================================
"""Table Creation/Deletion for Schema changes in the PowerVC 1.2.1 Release"""
from datetime import datetime
from sqlalchemy import MetaData, Boolean, Column, DateTime
from sqlalchemy import Integer, String, Text, Table
from sqlalchemy.sql import expression
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
POWER_SPEC_COLUMNS = [
Column('dedicated_sharing_mode', String(31)),
Column('dedicated_proc', String(31)), Column('avail_priority', Integer),
Column('shared_weight', Integer), Column('rmc_state', String(31)),
Column('uncapped', Boolean), Column('operating_system', String(255)),
Column('cpu_topology', Text)
]
def upgrade(migration_engine):
""" Upgrades the Nova DB Tables to include those for PowerVC 1.2.1 """
metadata = MetaData(migration_engine)
metadata.reflect(migration_engine)
metadata.bind = migration_engine
try:
#We need to add the additional columns to the PowerSpecs table
spec_table = Table('instance_power_specs', metadata, autoload=True)
for column in POWER_SPEC_COLUMNS:
#Check first to see if the Column exists on the Table already
if spec_table._columns.get(column.name) is None:
spec_table.create_column(column)
#Need to temporarily create server_vio2 table until Network DOM switch
try:
vios2_table = Table(
'server_vio2', metadata,
Column('created_at', DateTime), Column('updated_at', DateTime),
Column('deleted_at', DateTime), Column('deleted', Integer),
Column('pk_id', Integer, primary_key=True, nullable=False),
Column('lpar_id', Integer, nullable=False),
Column('lpar_name', String(length=255), nullable=False),
Column('state', String(length=31)),
Column('rmc_state', String(length=31)),
Column('cluster_provider_name', String(length=255)),
Column('compute_node_id', Integer, nullable=False))
vios2_table.create(checkfirst=True)
#Ignore the exception on the table create, since it is temporary
except:
pass
#Next we need to try to add the default entries to the PowerSpecs table
inst_table = Table('instances', metadata, autoload=True)
inst_query = expression.select(
[inst_table.c.uuid]).where(inst_table.c.deleted == 0)
inst_uuids = migration_engine.execute(inst_query).fetchall()
#Loop through each Instance UUID's, seeing if a PowerSpecs exists
for uuid in inst_uuids:
uuid = uuid[0]
spec_query = expression.select(
[spec_table.c.id]).where(spec_table.c.instance_uuid == uuid)
#If the PowerSpec entry doesn't exist already, need to create it
if migration_engine.execute(spec_query).fetchone() is None:
spec_table.insert().values(
created_at=datetime.utcnow(), instance_uuid=uuid, deleted=0
).execute()
except Exception as exc:
LOG.exception(_('Exception updating instance_power_specs: %s') % exc)
raise exc
def downgrade(migration_engine):
""" Downgrades the Nova DB Tables to remove those for PowerVC 1.2.1 """
pass
|
windskyer/k_nova
|
paxes_nova/db/sqlalchemy/migrate_repo/versions/002_ibm_powervc_v1r2m1.py
|
Python
|
apache-2.0
| 3,470
|
from scipy.ndimage import gaussian_filter1d
from scipy.optimize import curve_fit
import scipy.fft as fp
import numpy as np
import pickle
import matplotlib.pyplot as plt
import lib.plotting as plt2
def parse_probe_qubit_sts(freqs, S21):
amps = np.abs(S21)
frequencies = freqs[gaussian_filter1d(amps, sigma=1).argmin(axis=-1)]
return frequencies
def parse_sps_sts(freqs, S21):
amps = np.abs(S21)
frequencies = freqs[gaussian_filter1d(amps, sigma=10).argmax(axis=-1)]
return frequencies
def qubit_fit_func(x, a, b, c):
return a * (x - b)**2 + c
def fit_probe_qubit_sts(filename, plot=True):
with open(filename, 'rb') as f:
data = pickle.load(f)
currents = data['bias, [A]']
freqs = data['Frequency [Hz]']
S21 = data['data']
frequencies = parse_probe_qubit_sts(freqs, S21)
popt, conv = curve_fit(qubit_fit_func, currents, frequencies,
p0=(-1e16, -2.5e-3, 5.15e9))
if plot:
xx, yy = np.meshgrid(currents, freqs)
plt2.plot_2D(xx, yy,
np.transpose(gaussian_filter1d(np.abs(S21), sigma=20)))
plt.figure()
plt.plot(currents, frequencies, 'o')
plt.plot(currents, qubit_fit_func(currents, *popt))
plt.margins(x=0)
plt.xlabel("Current, A")
plt.ylabel("Qubit if_freq, Hz")
plt.show()
return popt
def fit_sps_sts(filename, plot=True):
with open(filename, 'rb') as f:
data = pickle.load(f)
currents = data['bias, [A]']
freqs = data['Frequency [Hz]']
S21 = data['data']
frequencies = parse_sps_sts(freqs, S21)
popt, conv = curve_fit(qubit_fit_func, currents, frequencies,
p0=(-1e15, -5e-4, 5.15e9))
if plot:
xx, yy = np.meshgrid(currents, freqs)
plt2.plot_2D(xx, yy,
np.transpose(gaussian_filter1d(np.abs(S21), sigma=10)))
plt.figure()
plt.plot(currents, frequencies, 'o')
plt.plot(currents, qubit_fit_func(currents, *popt))
plt.margins(x=0)
plt.xlabel("Current, A")
plt.ylabel("Qubit if_freq, Hz")
plt.show()
return popt
def get_current(frequency, a, b, c):
current = b + np.sqrt((frequency - c) / a)
return current
def remove_outliers():
pass
def get_signal_amplitude(downconverted_trace):
N = len(downconverted_trace)
return np.abs(fp.fft(downconverted_trace)[0] / N)
def get_noise(downconverted_trace):
return np.std(downconverted_trace)
def measure_snr(devices_dict):
# turn off microwave
devices_dict['mw'].set_output_state("ON")
# turn off AWG
devices_dict['awg'].reset()
devices_dict['awg'].synchronize_channels(channelI, channelQ)
devices_dict['awg'].trigger_output_config(channel=channelI,
trig_length=100)
devices_dict['awg'].stop_AWG(channel=channelI)
devices_dict['iqawg'].set_parameters({"calibration": devices_dict['upconv_cal']})
devices_dict['iqawg'].output_IQ_waves_from_calibration(
amp_coeffs=(0.5, 0.5))
|
vdrhtc/Measurement-automation
|
scripts/photon_wave_mixing/helpers.py
|
Python
|
gpl-3.0
| 3,114
|
import os
from pcs import settings
from pcs.common import pcs_pycurl as pycurl
from pcs.common import reports
from pcs.common.reports.item import ReportItem
from pcs.common.node_communicator import (
CommunicatorLoggerInterface,
HostNotFound,
NodeTargetFactory,
)
from pcs.common.reports import (
ReportItemSeverity,
ReportProcessor,
)
from pcs.lib.errors import LibraryError
def _get_port(port):
return port if port is not None else settings.pcsd_default_port
class LibCommunicatorLogger(CommunicatorLoggerInterface):
def __init__(self, logger, reporter: ReportProcessor):
self._logger = logger
self._reporter = reporter
def log_request_start(self, request):
msg = "Sending HTTP Request to: {url}"
if request.data:
msg += "\n--Debug Input Start--\n{data}\n--Debug Input End--"
self._logger.debug(msg.format(url=request.url, data=request.data))
self._reporter.report(
ReportItem.debug(
reports.messages.NodeCommunicationStarted(
request.url,
request.data,
)
)
)
def log_response(self, response):
if response.was_connected:
self._log_response_successful(response)
else:
self._log_response_failure(response)
self._log_debug(response)
def _log_response_successful(self, response):
url = response.request.url
msg = (
"Finished calling: {url}\nResponse Code: {code}"
+ "\n--Debug Response Start--\n{response}\n--Debug Response End--"
)
self._logger.debug(
msg.format(
url=url, code=response.response_code, response=response.data
)
)
self._reporter.report(
ReportItem.debug(
reports.messages.NodeCommunicationFinished(
url,
response.response_code,
response.data,
)
)
)
def _log_response_failure(self, response):
msg = "Unable to connect to {node} ({reason})"
self._logger.debug(
msg.format(
node=response.request.host_label, reason=response.error_msg
)
)
self._reporter.report(
ReportItem.debug(
reports.messages.NodeCommunicationNotConnected(
response.request.host_label,
response.error_msg,
)
)
)
if is_proxy_set(os.environ):
self._logger.warning("Proxy is set")
self._reporter.report(
ReportItem.warning(
reports.messages.NodeCommunicationProxyIsSet(
response.request.host_label,
response.request.dest.addr,
)
)
)
def _log_debug(self, response):
url = response.request.url
debug_data = response.debug
self._logger.debug(
(
"Communication debug info for calling: {url}\n"
"--Debug Communication Info Start--\n"
"{data}\n"
"--Debug Communication Info End--"
).format(url=url, data=debug_data)
)
self._reporter.report(
ReportItem.debug(
reports.messages.NodeCommunicationDebugInfo(url, debug_data)
)
)
def log_retry(self, response, previous_dest):
old_port = _get_port(previous_dest.port)
new_port = _get_port(response.request.dest.port)
msg = (
"Unable to connect to '{label}' via address '{old_addr}' and port "
"'{old_port}'. Retrying request '{req}' via address '{new_addr}' "
"and port '{new_port}'"
).format(
label=response.request.host_label,
old_addr=previous_dest.addr,
old_port=old_port,
new_addr=response.request.dest.addr,
new_port=new_port,
req=response.request.url,
)
self._logger.warning(msg)
self._reporter.report(
ReportItem.warning(
reports.messages.NodeCommunicationRetrying(
response.request.host_label,
previous_dest.addr,
old_port,
response.request.dest.addr,
new_port,
response.request.url,
)
)
)
def log_no_more_addresses(self, response):
msg = "No more addresses for node {label} to run '{req}'".format(
label=response.request.host_label,
req=response.request.url,
)
self._logger.warning(msg)
self._reporter.report(
ReportItem.warning(
reports.messages.NodeCommunicationNoMoreAddresses(
response.request.host_label,
response.request.url,
)
)
)
class NodeTargetLibFactory(NodeTargetFactory):
def __init__(self, known_hosts, report_processor: ReportProcessor):
super().__init__(known_hosts)
self._report_processor = report_processor
def get_target_list_with_reports(
self,
host_name_list,
skip_non_existing=False,
allow_skip=True,
report_none_host_found=True,
):
target_list = []
unknown_host_list = []
for host_name in host_name_list:
try:
target_list.append(self.get_target(host_name))
except HostNotFound:
unknown_host_list.append(host_name)
report_list = []
if unknown_host_list:
report_list.append(
ReportItem(
severity=reports.item.get_severity(
(
reports.codes.SKIP_OFFLINE_NODES
if allow_skip
else None
),
skip_non_existing,
),
message=reports.messages.HostNotFound(
sorted(unknown_host_list),
),
)
)
if not target_list and host_name_list and report_none_host_found:
# we want to create this report only if there was at least one
# required address specified
report_list.append(
ReportItem.error(reports.messages.NoneHostFound())
)
return report_list, target_list
def get_target_list(
self, host_name_list, skip_non_existing=False, allow_skip=True
):
report_list, target_list = self.get_target_list_with_reports(
host_name_list, skip_non_existing, allow_skip
)
if report_list:
if self._report_processor.report_list(report_list).has_errors:
raise LibraryError()
return target_list
def response_to_report_item(
response,
severity=ReportItemSeverity.ERROR,
forceable=None,
report_pcsd_too_old_on_404=False,
):
"""
Returns report item which corresponds to response if was not successful.
Otherwise returns None.
Response response -- response from which report item shoculd be created
ReportItemseverity severity -- severity of report item
string forceable -- force code
bool report_pcsd_too_old_on_404 -- if False, report unsupported command
"""
response_code = response.response_code
report_item = None
reason = None
if (
report_pcsd_too_old_on_404
and response.was_connected
and response_code == 404
):
return ReportItem.error(
reports.messages.PcsdVersionTooOld(response.request.host_label)
)
if response.was_connected:
if response_code == 400:
# old pcsd protocol: error messages are commonly passed in plain
# text in response body with HTTP code 400
# we need to be backward compatible with that
report_item = reports.messages.NodeCommunicationCommandUnsuccessful
reason = response.data.rstrip()
elif response_code == 401:
report_item = reports.messages.NodeCommunicationErrorNotAuthorized
reason = "HTTP error: {0}".format(response_code)
elif response_code == 403:
report_item = (
reports.messages.NodeCommunicationErrorPermissionDenied
)
reason = "HTTP error: {0}".format(response_code)
elif response_code == 404:
report_item = (
reports.messages.NodeCommunicationErrorUnsupportedCommand
)
reason = "HTTP error: {0}".format(response_code)
elif response_code >= 400:
report_item = reports.messages.NodeCommunicationError
reason = "HTTP error: {0}".format(response_code)
else:
if response.errno in [
pycurl.E_OPERATION_TIMEDOUT,
pycurl.E_OPERATION_TIMEOUTED,
]:
report_item = reports.messages.NodeCommunicationErrorTimedOut
reason = response.error_msg
else:
report_item = reports.messages.NodeCommunicationErrorUnableToConnect
reason = response.error_msg
if not report_item:
return None
return ReportItem(
severity=ReportItemSeverity(severity, forceable),
message=report_item(
response.request.host_label,
response.request.action,
reason,
),
)
def is_proxy_set(env_dict):
"""
Returns True whenever any of proxy environment variables (https_proxy,
HTTPS_PROXY, all_proxy, ALL_PROXY) are set in env_dict. False otherwise.
dict env_dict -- environment variables in dict
"""
proxy_list = ["https_proxy", "all_proxy"]
for var in proxy_list + [v.upper() for v in proxy_list]:
if env_dict.get(var, "") != "":
return True
return False
|
feist/pcs
|
pcs/lib/node_communication.py
|
Python
|
gpl-2.0
| 10,162
|
"""
# Majic
# Copyright (C) 2014 CEH
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from datetime import datetime
from hamcrest import *
from mock import *
from pylons import url, config
from webob.exc import HTTPInternalServerError
import json
from majic_web_service.controllers.run_property import RunPropertyController
from majic_web_service.services.run_property_service import RunPropertyService, ServiceException
from majic_web_service.tests import TestController
from majic_web_service.utils.constants import JSON_MODEL_RUNS, MODEL_RUN_STATUS_COMPLETED, MODEL_RUN_STATUS_PUBLISHED, \
MODEL_RUN_STATUS_PUBLIC
from majic_web_service.utils.general import convert_time_to_standard_string
class TestGetRunProperties(TestController):
def setUp(self):
self.clean_database()
def test_GIVEN_no_runs_WHEN_get_run_properties_THEN_return_empty_list(self):
response = self.app.get(
url(controller='run_property', action='list'),
expect_errors=False
)
assert_that(response.status_code, is_(200))
response = response.json_body
assert_that(response[JSON_MODEL_RUNS], empty(), "the model runs")
def test_GIVEN_database_has_one_row_WHEN_request_run_properties_THEN_return_list_with_item_in(self):
username = "username"
last_status_change = datetime(2015, 5, 3, 2, 1)
status = MODEL_RUN_STATUS_COMPLETED
model_id = self.add_model_run(username, last_status_change, status)
response = self.app.get(
url(controller='run_property', action='list'),
expect_errors=False
)
assert_that(response.status_code, is_(200))
response = response.json_body
assert_that(response[JSON_MODEL_RUNS], has_length(1), "model run count")
self.assert_model_run_json_is(
response[JSON_MODEL_RUNS][0],
model_id,
last_status_change,
username,
False,
False)
def test_GIVEN_database_has_different_workbench_username_from_username_WHEN_request_run_properties_THEN_workbench_username_returned(self):
workbench_username = "workbench"
majic_username = "majic"
last_status_change = datetime(2015, 5, 3, 2, 1)
status = MODEL_RUN_STATUS_COMPLETED
model_id = self.add_model_run(workbench_username, last_status_change, status, majic_username=majic_username)
response = self.app.get(
url(controller='run_property', action='list'),
expect_errors=False
)
assert_that(response.status_code, is_(200))
response = response.json_body
assert_that(response[JSON_MODEL_RUNS], has_length(1), "model run count")
self.assert_model_run_json_is(
response[JSON_MODEL_RUNS][0],
model_id,
last_status_change,
workbench_username,
False,
False)
def test_GIVEN_workbench_username_is_none_WHEN_request_run_properties_THEN_workbench_username_none_returned(self):
username = "username"
workbench_username = None
last_status_change = datetime(2015, 5, 3, 2, 1)
status = MODEL_RUN_STATUS_COMPLETED
model_id = self.add_model_run(workbench_username, last_status_change, status, majic_username=username)
response = self.app.get(
url(controller='run_property', action='list'),
expect_errors=False
)
assert_that(response.status_code, is_(200))
response = response.json_body
assert_that(response[JSON_MODEL_RUNS], has_length(1), "model run count")
self.assert_model_run_json_is(
response[JSON_MODEL_RUNS][0],
model_id,
last_status_change,
workbench_username,
False,
False)
def test_GIVEN_database_has_more_than_one_row_WHEN_request_run_properties_THEN_return_list_with_all_items_in(self):
usernames = ["username", "username2", "username3"]
last_status_changes = [datetime(2015, 5, 3, 2, 1), datetime(2014, 6, 4, 3, 2), datetime(2013, 6, 4, 3, 20)]
statuses = [MODEL_RUN_STATUS_COMPLETED, MODEL_RUN_STATUS_PUBLISHED, MODEL_RUN_STATUS_PUBLIC]
expected_is_published = [False, True, True]
expected_is_public = [False, False, True]
model_ids = []
for username, last_status_change, status in zip(usernames, last_status_changes, statuses):
model_id = self.add_model_run(username, last_status_change, status)
model_ids.append(model_id)
response = self.app.get(
url(controller='run_property', action='list'),
expect_errors=False
)
assert_that(response.status_code, is_(200))
response = response.json_body
assert_that(response[JSON_MODEL_RUNS], has_length(3), "model run count")
self.assert_model_run_json_is(
response[JSON_MODEL_RUNS][0],
model_ids[2],
last_status_changes[2],
usernames[2],
expected_is_published[2],
expected_is_public[2])
self.assert_model_run_json_is(
response[JSON_MODEL_RUNS][1],
model_ids[1],
last_status_changes[1],
usernames[1],
expected_is_published[1],
expected_is_public[1])
self.assert_model_run_json_is(
response[JSON_MODEL_RUNS][2],
model_ids[0],
last_status_changes[0],
usernames[0],
expected_is_published[0],
expected_is_public[0])
|
NERC-CEH/jules-jasmin
|
majic_web_service/majic_web_service/tests/functional/test_get_run_properties.py
|
Python
|
gpl-2.0
| 6,320
|
"""Support for UPnP/IGD Binary Sensors."""
from __future__ import annotations
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import UpnpBinarySensorEntityDescription, UpnpDataUpdateCoordinator, UpnpEntity
from .const import DOMAIN, LOGGER, WAN_STATUS
BINARYSENSOR_ENTITY_DESCRIPTIONS: tuple[UpnpBinarySensorEntityDescription, ...] = (
UpnpBinarySensorEntityDescription(
key=WAN_STATUS,
name="wan status",
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the UPnP/IGD sensors."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
LOGGER.debug("Adding binary sensor")
entities = [
UpnpStatusBinarySensor(
coordinator=coordinator,
entity_description=entity_description,
)
for entity_description in BINARYSENSOR_ENTITY_DESCRIPTIONS
if coordinator.data.get(entity_description.key) is not None
]
LOGGER.debug("Adding entities: %s", entities)
async_add_entities(entities)
class UpnpStatusBinarySensor(UpnpEntity, BinarySensorEntity):
"""Class for UPnP/IGD binary sensors."""
_attr_device_class = DEVICE_CLASS_CONNECTIVITY
def __init__(
self,
coordinator: UpnpDataUpdateCoordinator,
entity_description: UpnpBinarySensorEntityDescription,
) -> None:
"""Initialize the base sensor."""
super().__init__(coordinator=coordinator, entity_description=entity_description)
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self.coordinator.data[self.entity_description.key] == "Connected"
|
lukas-hetzenecker/home-assistant
|
homeassistant/components/upnp/binary_sensor.py
|
Python
|
apache-2.0
| 1,960
|
'''Library / toolkit for creating command line programs with minimal effort.'''
# Copyright (c) 2013-2016, 2018 Benjamin Althues <benjamin@babab.nl>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import
__docformat__ = 'restructuredtext'
__author__ = "Benjamin Althues"
__copyright__ = "Copyright (C) 2013-2016, 2018 Benjamin Althues"
__version_info__ = (0, 4, 0, 'final', 0)
__version__ = '0.4.0'
from pycommand.pycommand import (
CommandBase,
CommandExit,
OptionError,
run_and_exit,
)
__all__ = ('CommandBase', 'run_and_exit')
|
babab/pycommand
|
pycommand/__init__.py
|
Python
|
isc
| 1,254
|
from unittest import TestCase
from django.contrib.sites.models import Site
from django.http import HttpRequest
from mock import Mock, patch
import urls # NOQA
from cmsplugin_feedback.models import Message
from cmsplugin_feedback import signals
class EmailTest(TestCase):
def setUp(self):
self.msg = Message.objects.create(
name='Anton Egorov',
email='aeg@example.com',
text='Hello World!')
self.request = Mock(spec=HttpRequest)
def test_admin_url(self):
site = Mock(spec=Site, domain='mysite.com')
with patch.object(signals, 'get_current_site',
return_value=site) as get_site:
url = signals.get_admin_url(self.msg, self.request)
get_site.assert_called_once_with(self.request)
self.assertEqual(
url, 'http://{s}/admin/cmsplugin_feedback/'
'message/{id}/'.format(s=site.domain, id=self.msg.id))
def test_render_email(self):
url = 'http://example.com/admin/cmsplugin_feedback/message/1/'
with patch('cmsplugin_feedback.signals.get_admin_url',
return_value=url) as get_url:
text = signals.render_email(self.msg, self.request)
get_url.assert_called_once_with(self.msg, self.request)
self.assertIn(self.msg.name, text)
self.assertIn(self.msg.text, text)
self.assertIn(url, text)
|
satyrius/cmsplugin-feedback
|
tests/test_email.py
|
Python
|
mit
| 1,443
|
import atexit
import base64
import datetime
import importlib
import json
import logging
import os
import shutil
import subprocess
import traceback
import urllib
import falcon
import engine.util as util
from engine.code_runner import CodeRunner, FilePushError, FilePullError, EngineExecutionError
from engine.docker_util import (
docker_init,
docker_file_push,
docker_file_pull,
create_docker_container,
remove_docker_container,
)
log_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "logging.ini")
logging.config.fileConfig(log_file_path, disable_existing_loggers=False)
logger = logging.getLogger(__name__)
cwd = os.path.dirname(os.path.abspath(__file__))
os.chdir(cwd)
class SubmitResource:
def __init__(self):
self.pid = os.getpid()
# self.container_image = "lovelace-image"
container_name = "lovelace-{:d}-{:s}".format(
self.pid, datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
)
# Start a container to use for all submissions
# TODO this container_name might not be unique!
self.container_id, self.container_name = create_docker_container(name=container_name)
logger.debug(
"Docker container id: {}; name: {}".format(self.container_id, self.container_name)
)
atexit.register(remove_docker_container, self.container_id)
def on_post(self, req, resp):
payload = req.media
code = payload["code"]
language = payload["language"]
if not code:
resp_dict = {"error": "No code provided!"}
resp.status = falcon.HTTP_400
resp.set_header("Access-Control-Allow-Origin", "*")
resp.body = json.dumps(resp_dict)
return
code_filename = write_code_to_file(code, language)
try:
# Fetch problem ID and load the correct problem module.
problem_name = payload["problem"].replace("-", "_")
problem_module = "problems.{:s}".format(problem_name)
logger.debug(
"Importing problem_name={:s} problem_module={:s}...".format(
problem_name, problem_module
)
)
problems = importlib.import_module("problems")
problem = importlib.import_module(problem_module)
except Exception:
explanation = (
"Could not import module {:s}. "
"Returning HTTP 400 Bad Request due to possibly invalid JSON.".format(
problem_module
)
)
add_error_to_response(
resp, explanation, traceback.format_exc(), falcon.HTTP_400, code_filename
)
return
function_name = problem.FUNCTION_NAME
problem_dir = problem_name
# Copy static resources into engine directory and push them into the Linux container.
static_resources = []
for resource_file_name in problem.STATIC_RESOURCES:
from_path = os.path.join(cwd, "..", "resources", problem_dir, resource_file_name)
to_path = os.path.join(cwd, resource_file_name)
logger.debug("Copying static resource from {:s} to {:s}".format(from_path, to_path))
try:
shutil.copyfile(from_path, to_path)
except Exception:
explanation = "Engine failed to copy a static resource. Returning falcon HTTP 500."
add_error_to_response(
resp, explanation, traceback.format_exc(), falcon.HTTP_500, code_filename
)
return
static_resources.append(to_path)
container_path = "/root/{:}".format(resource_file_name)
logger.debug(
"Pushing static resource to container {:}{:}".format(
self.container_id, container_path
)
)
_ = docker_file_push(self.container_id, from_path, container_path)
if not problem.STATIC_RESOURCES:
logger.debug("No static resources to push")
logger.info("Generating test cases...")
test_cases = []
try:
for i, test_type in enumerate(problem.TestCaseType):
for j in range(test_type.multiplicity):
logger.debug(
"Generating test case {:d}: {:s} ({:d}/{:d})...".format(
len(test_cases) + 1, str(test_type), j + 1, test_type.multiplicity
)
)
test_cases.append(problem.generate_test_case(test_type))
except Exception:
explanation = "Engine failed to generate a test case. Returning falcon HTTP 500."
add_error_to_response(
resp, explanation, traceback.format_exc(), falcon.HTTP_500, code_filename
)
return
# Copy over all the dynamic resources generated by the test cases.
dynamic_resources = []
for i, tc in enumerate(test_cases):
if "DYNAMIC_RESOURCES" in tc.input:
for dynamic_resource_filename in tc.input["DYNAMIC_RESOURCES"]:
resource_path = os.path.join(
cwd, "..", "resources", problem_dir, dynamic_resource_filename
)
destination_path = os.path.join(cwd, dynamic_resource_filename)
logger.debug(
"Copying test case resource from {:s} to {:s}...".format(
resource_path, destination_path
)
)
shutil.copyfile(resource_path, destination_path)
dynamic_resources.append(resource_path)
dynamic_resources.append(destination_path)
container_path = "/root/{:}".format(dynamic_resource_filename)
logger.debug(
"Pushing dynamic resource to container {:}{:}".format(
self.container_id, container_path
)
)
_ = docker_file_push(self.container_id, resource_path, container_path)
if not dynamic_resources:
logger.debug("No dynamic resources to push")
runner = CodeRunner(language)
input_tuples = [tc.input_tuple() for tc in test_cases]
output_tuples = [tc.output_tuple() for tc in test_cases]
try:
user_outputs, p_infos = runner.run(
self.container_name, code_filename, function_name, input_tuples, output_tuples
)
except (FilePushError, FilePullError):
explanation = "File could not be pushed to or pulled from docker container. Returning falcon HTTP 500."
add_error_to_response(
resp, explanation, traceback.format_exc(), falcon.HTTP_500, code_filename
)
return
except EngineExecutionError:
explanation = (
"Return code from executing user code in docker container is nonzero. "
"Returning falcon HTTP 400."
)
add_error_to_response(
resp, explanation, traceback.format_exc(), falcon.HTTP_400, code_filename
)
return
# Pull any user generated files.
files_pulled = False
for i, tc in enumerate(test_cases):
if "USER_GENERATED_FILES" in tc.output:
for user_generated_filename in tc.output["USER_GENERATED_FILES"]:
container_filepath = "/root/{:s}".format(user_generated_filename)
logger.debug(
"Pulling user generated file from container {:s}{:s}".format(
self.container_name, container_filepath
)
)
_ = docker_file_pull(
self.container_id, container_filepath, user_generated_filename
)
files_pulled = True
if not files_pulled:
logger.debug("No user generated files to pull")
n_cases = len(test_cases)
n_passes = 0 # Number of test cases passed.
test_case_details = (
[]
) # List of dicts each containing the details of a particular test case.
# Verify that user outputs are all correct (i.e. check whether each test case passes or fails).
for input_tuple, user_output, p_info, tc in zip(
input_tuples, user_outputs, p_infos, test_cases
):
if isinstance(user_output, list):
# user_output is a list. This could be a multiple-return, or a legitimate list return.
# Here we will disambiguate dependant on the output variables the problem requires
if len(problem.OUTPUT_VARS) == 1:
# Only one variable should be returned; Thus, this is a "list return"
user_output = (user_output,)
else:
# More than one variable should be returned, so this is a multiple return
user_output = tuple(user_output)
if user_output[0] is None:
logger.debug(
"Looks like user's function returned None: output={:}".format(user_output)
)
passed = False
expected_output = "Your function returned None. It shouldn't do that."
else:
try:
user_test_case = problem.ProblemTestCase(None, problem.INPUT_VARS, input_tuple, problem.OUTPUT_VARS, user_output)
passed, correct_test_case = problems.test_case.test_case_solution_correct(tc, user_test_case, problem.ATOL, problem.RTOL)
expected_output = correct_test_case.output_tuple()
except Exception:
explanation = "Internal engine error during user test case verification. Returning falcon HTTP 500."
add_error_to_response(
resp, explanation, traceback.format_exc(), falcon.HTTP_500, code_filename
)
return
if passed:
n_passes += 1
test_case_details.append(
{
"testCaseType": tc.test_type.test_name,
"input": input_tuple,
"output": user_output,
"expected": expected_output,
"inputString": str(input_tuple),
"outputString": str(user_output),
"expectedString": str(expected_output),
"passed": passed,
"processInfo": p_info,
}
)
if "DYNAMIC_RESOURCES" in tc.input:
for dynamic_resource_path in dynamic_resources:
logger.debug("Deleting dynamic resource: {:s}".format(dynamic_resource_path))
util.delete_file(dynamic_resource_path)
logger.info("Passed %d/%d test cases.", n_passes, n_cases)
resp_dict = {
"success": True if n_passes == n_cases else False,
"numTestCases": n_cases,
"numTestCasesPassed": n_passes,
"testCaseDetails": test_case_details,
}
resp.status = falcon.HTTP_200
resp.set_header("Access-Control-Allow-Origin", "*")
resp.body = json.dumps(resp_dict)
util.delete_file(code_filename)
logger.debug("User code file deleted: {:s}".format(code_filename))
for file_path in static_resources:
logging.debug("Deleting static resource {:s}".format(file_path))
util.delete_file(file_path)
def parse_payload(http_request):
try:
raw_payload_data = http_request.stream.read().decode("utf-8")
except Exception as ex:
logger.error("Bad request, reason unknown. Returning 400.")
raise falcon.HTTPError(falcon.HTTP_400, "Error", ex.message)
try:
json_payload = json.loads(raw_payload_data)
except ValueError:
logger.error("Received invalid JSON: {:}".format(raw_payload_data))
logger.error("Returning 400 error.")
raise falcon.HTTPError(falcon.HTTP_400, "Invalid JSON", "Could not decode request body.")
return json_payload
def write_code_to_file(code, language):
"""
Write code into a file with the appropriate file extension.
:param code: a base64 encoded string representing the user's submitted source code
:param language: the code's programming language
:return: the name of the file containing the user's code
"""
decoded_code = str(base64.b64decode(code), "utf-8")
extension = {"python": ".py", "javascript": ".js", "julia": ".jl", "c": ".c"}.get(language)
code_filename = util.write_str_to_file(decoded_code, extension)
logger.debug("User code saved in: {:s}".format(code_filename))
return code_filename
def add_error_to_response(resp, explanation, tb, falcon_http_error_code, code_filename):
"""
Modify the falcon HTTP response object with an error to be shown to the user. Also deletes the user's code as the
engine cannot run it.
:param resp: The falcon HTTP response object to be modified.
:param explanation: A human-friendly explanation of the error.
:param tb: Traceback string.
:param falcon_http_error_code: Falcon HTTP error code to return.
:param code_filename: Filepath to user code to be deleted.
:return: nothing
"""
logger.error(explanation)
logger.error(tb)
util.delete_file(code_filename)
# URL friendly traceback we can embed into a mailto: link.
url_friendly_tb = urllib.parse.quote(tb)
DISCOURSE_LINK = '<a href="https://discourse.projectlovelace.net/">https://discourse.projectlovelace.net/</a>'
EMAIL_LINK = (
'<a href="mailto:ada@mg.projectlovelace.net?&subject=Project Lovelace error report'
+ "&body={:}%0A%0A{:}".format(explanation, url_friendly_tb)
+ '">ada@mg.projectlovelace.net</a>'
)
NOTICE = (
"A stacktrace should appear below with more information about this error which might help\n"
"you debug your code. But if it's not your code then it might be our fault :( If this is a\n"
"website error and you have the time, we'd really appreciate it if you could report this\n"
"on Discourse (" + DISCOURSE_LINK + ") or via email (" + EMAIL_LINK + ").\n"
"All the information is embedded in the email link so all you have to do is press send.\n"
"Thanks so much!"
)
error_message = "{:s}\n\n{:s}\n\nError: {:}".format(explanation, NOTICE, tb)
resp_dict = {"error": error_message}
resp.status = falcon_http_error_code
resp.set_header("Access-Control-Allow-Origin", "*")
resp.body = json.dumps(resp_dict)
return
docker_init()
app = falcon.API()
app.add_route("/submit", SubmitResource())
app.add_error_handler(Exception, lambda ex, req, resp, params: logger.exception(ex))
|
project-lovelace/lovelace-engine
|
engine/api.py
|
Python
|
mit
| 15,244
|
# -*- coding: utf-8 -*-
# $URL$
# $Date$
# $Revision$
# See LICENSE.txt for licensing terms
import os
from urllib.parse import urljoin, urlparse
from xml.sax.saxutils import escape
import docutils.nodes
from rst2pdf.basenodehandler import NodeHandler
from rst2pdf.image import MyImage, missing
from rst2pdf.opt_imports import Paragraph
class FontHandler(NodeHandler):
def get_pre_post(self, client, node, replaceEnt):
return self.get_font_prefix(client, node, replaceEnt), '</font>'
def get_font_prefix(self, client, node, replaceEnt):
return client.styleToFont(self.fontstyle)
class HandleText(NodeHandler, docutils.nodes.Text):
def gather_elements(self, client, node, style):
return [Paragraph(client.gather_pdftext(node), style)]
def get_text(self, client, node, replaceEnt):
text = node.astext()
if replaceEnt:
text = escape(text)
return text
class HandleStrong(NodeHandler, docutils.nodes.strong):
pre = "<b>"
post = "</b>"
class HandleEmphasis(NodeHandler, docutils.nodes.emphasis):
pre = "<i>"
post = "</i>"
class HandleLiteral(NodeHandler, docutils.nodes.literal):
def get_pre_post(self, client, node, replaceEnt):
if node['classes']:
pre = client.styleToFont(node['classes'][0])
else:
pre = client.styleToFont('literal')
post = "</font>"
if not client.styles['literal'].hyphenation:
pre = '<nobr>' + pre
post += '</nobr>'
return pre, post
def get_text(self, client, node, replaceEnt):
text = node.astext()
text = escape(node.astext())
text = text.replace(' ', ' ')
return text
class HandleSuper(NodeHandler, docutils.nodes.superscript):
pre = '<super>'
post = "</super>"
class HandleSub(NodeHandler, docutils.nodes.subscript):
pre = '<sub>'
post = "</sub>"
class HandleTitleReference(FontHandler, docutils.nodes.title_reference):
fontstyle = 'title_reference'
class HandleReference(NodeHandler, docutils.nodes.reference):
def get_pre_post(self, client, node, replaceEnt):
pre, post = '', ''
uri = node.get('refuri')
if uri:
# Issue 366: links to "#" make no sense in a PDF
if uri == "#":
return "", ""
if uri.startswith ('#'):
pass
elif client.baseurl: # Need to join the uri with the base url
uri = urljoin(client.baseurl, uri)
if urlparse(uri)[0] and client.inlinelinks:
# external inline reference
if uri in [node.astext(), "mailto:" + node.astext()]:
# No point on repeating it
post = ''
elif uri.startswith('http://') or uri.startswith('ftp://'):
post = ' (%s)' % uri
elif uri.startswith('mailto:'):
# No point on showing "mailto:"
post = ' (%s)' % uri[7:]
else:
# A plain old link
pre += '<a href="%s" color="%s">' % \
(uri, client.styles.linkColor)
post = '</a>' + post
else:
uri = node.get('refid')
if uri:
pre += '<a href="#%s" color="%s">' % \
(uri, client.styles.linkColor)
post = '</a>' + post
return pre, post
class HandleOptions(HandleText, docutils.nodes.option_string, docutils.nodes.option_argument):
pass
class HandleSysMessage(HandleText, docutils.nodes.system_message, docutils.nodes.problematic):
pre = '<font color="red">'
post = "</font>"
def gather_elements(self, client, node, style):
# FIXME show the error in the document, red, whatever
# log.warning("Problematic node %s", node.astext())
return []
class HandleGenerated(HandleText, docutils.nodes.generated):
pass
# def get_text(self, client, node, replaceEnt):
# if 'sectnum' in node['classes']:
# # This is the child of a title with a section number
# # Send the section number up to the title node
# node.parent['_sectnum'] = node.astext()
# return node.astext()
class HandleImage(NodeHandler, docutils.nodes.image):
def gather_elements(self, client, node, style):
# FIXME: handle alt
target = None
if isinstance(node.parent, docutils.nodes.reference):
target = node.parent.get('refuri', None)
st_name = 'image'
if node.get('classes'):
st_name = node.get('classes')[0]
style = client.styles[st_name]
uri = str(node.get("uri"))
if uri.split("://")[0].lower() not in ('http', 'ftp', 'https'):
imgname = os.path.join(client.basedir, uri)
else:
imgname = uri
try:
w, h, kind = MyImage.size_for_node(node, client=client)
except ValueError:
# Broken image, return arbitrary stuff
imgname = missing
w, h, kind = 100, 100, 'direct'
node.elements = [
MyImage(filename=imgname, height=h, width=w,
kind=kind, client=client, target=target)]
alignment = node.get('align', '').upper()
if not alignment:
# There is no JUSTIFY for flowables, of course, so 4:LEFT
alignment = {
0: 'LEFT',
1: 'CENTER',
2: 'RIGHT',
4:'LEFT'
}[style.alignment]
if not alignment:
alignment = 'CENTER'
node.elements[0].image.hAlign = alignment
node.elements[0].spaceBefore = style.spaceBefore
node.elements[0].spaceAfter = style.spaceAfter
# Image flowables don't support valign (makes no sense for them?)
# elif alignment in ('TOP','MIDDLE','BOTTOM'):
# i.vAlign = alignment
return node.elements
def get_text(self, client, node, replaceEnt):
# First see if the image file exists, or else,
# use image-missing.png
imgname = os.path.join(client.basedir, str(node.get("uri")))
try:
w, h, kind = MyImage.size_for_node(node, client=client)
except ValueError:
# Broken image, return arbitrary stuff
imgname = missing
w, h, kind = 100, 100, 'direct'
alignment = node.get('align', 'CENTER').lower()
if alignment in ('top', 'middle', 'bottom'):
align = 'valign="%s"' % alignment
else:
align = ''
# TODO: inline images don't support SVG, vectors and PDF,
# which may be surprising. So, work on converting them
# previous to passing to reportlab.
# Try to rasterize using the backend
w, h, kind = MyImage.size_for_node(node, client=client)
uri = MyImage.raster(imgname, client)
return '<img src="%s" width="%f" height="%f" %s/>' % \
(uri, w, h, align)
class HandleFootRef(NodeHandler, docutils.nodes.footnote_reference,
docutils.nodes.citation_reference):
def get_text(self, client, node, replaceEnt):
# TODO: when used in Sphinx, all footnotes are autonumbered
anchors = ''
for i in node.get('ids'):
if i not in client.targets:
anchors += '<a name="%s"/>' % i
client.targets.append(i)
return '%s<super><a href="%s" color="%s">%s</a></super>' % \
(anchors, '#' + node.get('refid', node.astext()),
client.styles.linkColor, node.astext())
class HandleTarget(NodeHandler, docutils.nodes.target):
def gather_elements(self, client, node, style):
if 'refid' in node:
client.pending_targets.append(node['refid'])
return client.gather_elements(node, style)
def get_text(self, client, node, replaceEnt):
text = client.gather_pdftext(node)
if replaceEnt:
text = escape(text)
return text
def get_pre_post(self, client, node, replaceEnt):
pre = ''
if node['ids'][0] not in client.targets:
pre = '<a name="%s"/>' % node['ids'][0]
client.targets.append(node['ids'][0])
return pre, ''
class HandleInline(NodeHandler, docutils.nodes.inline):
def get_pre_post(self, client, node, replaceEnt):
r = client.styleToTags(node['classes'][0])
if r:
return r
return '', ''
|
aquavitae/rst2pdf-py3-dev
|
rst2pdf/genpdftext.py
|
Python
|
mit
| 8,587
|
import matplotlib.pyplot as plt
import numpy as np
t = np.loadtxt( "../../res/time.dat" )
data = np.loadtxt("res/ppar_1d_0_0.dat")
a = data[:,4]
font_title = {'family' : 'serif',
'weight' : 'normal',
'size' : 18,}
font_label = {
'weight' : 'normal',
'size' : 16,}
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
plt.plot( t, a, marker='.' )
ax.grid( True )
plt.xlabel(r"Time (fs)", fontdict=font_label)
plt.ylabel(r"Polarization (arb.)", fontdict=font_label)
plt.title(r"Polarization for different directions", fontdict=font_title)
plt.savefig("fig/ppar.svg")
plt.show()
|
timy/dm_spec
|
ana/pullerits_1d/plot_test.py
|
Python
|
mit
| 611
|
import os
import base64
from requests import Session, Request
from OpenSSL import crypto
url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots'
s = Session()
r = Request('GET',
url)
prepped = r.prepare()
r = s.send(prepped)
if r.status_code == 200:
roots = r.json()
# RFC 6962 defines the certificate objects as base64 encoded certs.
# Importantly, these are not PEM formatted certs but base64 encoded
# ASN.1 (DER) encoded
for i in roots:
certs = roots[i]
for k in certs:
try:
certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k))
subject = certobj.get_subject()
print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName,
subject.organizationalUnitName,
subject.organizationName,
subject.localityName,
subject.stateOrProvinceName,
subject.countryName)
except:
print subject.get_components()
|
wgoulet/CTPyClient
|
fetchroots.py
|
Python
|
apache-2.0
| 1,184
|
"""Feed to Zinnia command module"""
import os
import sys
from urllib2 import urlopen
from datetime import datetime
from optparse import make_option
from django.conf import settings
from django.utils import timezone
from django.core.files import File
from django.utils.text import Truncator
from django.utils.html import strip_tags
from django.db.utils import IntegrityError
from django.utils.encoding import smart_str
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
from django.core.management.base import CommandError
from django.core.management.base import LabelCommand
from django.core.files.temp import NamedTemporaryFile
from zinnia import __version__
from zinnia.models import Entry
from zinnia.models import Category
from zinnia.managers import PUBLISHED
from zinnia.signals import disconnect_zinnia_signals
class Command(LabelCommand):
"""Command object for importing a RSS or Atom
feed into Zinnia."""
help = 'Import a RSS or Atom feed into Zinnia.'
label = 'feed url'
args = 'url'
option_list = LabelCommand.option_list + (
make_option('--no-auto-excerpt', action='store_false',
dest='auto-excerpt', default=True,
help='Do NOT generate an excerpt if not present.'),
make_option('--no-enclosure', action='store_false',
dest='image-enclosure', default=True,
help='Do NOT save image enclosure if present.'),
make_option('--no-tags', action='store_false',
dest='tags', default=True,
help='Do NOT store categories as tags'),
make_option('--author', dest='author', default='',
help='All imported entries belong to specified author'),
)
SITE = Site.objects.get_current()
def __init__(self):
"""Init the Command and add custom styles"""
super(Command, self).__init__()
self.style.TITLE = self.style.SQL_FIELD
self.style.STEP = self.style.SQL_COLTYPE
self.style.ITEM = self.style.HTTP_INFO
disconnect_zinnia_signals()
def write_out(self, message, verbosity_level=1):
"""Convenient method for outputing"""
if self.verbosity and self.verbosity >= verbosity_level:
sys.stdout.write(smart_str(message))
sys.stdout.flush()
def handle_label(self, url, **options):
try:
import feedparser
except ImportError:
raise CommandError('You need to install the feedparser ' \
'module to run this command.')
self.tags = options.get('tags', True)
self.default_author = options.get('author')
self.verbosity = int(options.get('verbosity', 1))
self.auto_excerpt = options.get('auto-excerpt', True)
self.image_enclosure = options.get('image-enclosure', True)
if self.default_author:
try:
self.default_author = User.objects.get(
username=self.default_author)
except User.DoesNotExist:
raise CommandError('Invalid username for default author')
self.write_out(self.style.TITLE(
'Starting importation of %s to Zinnia %s:\n' % (url, __version__)))
feed = feedparser.parse(url)
self.import_entries(feed.entries)
def import_entries(self, feed_entries):
"""Import entries"""
for feed_entry in feed_entries:
self.write_out('> %s... ' % feed_entry.title)
if feed_entry.get('publised_parsed'):
creation_date = datetime(*feed_entry.published_parsed[:6])
if settings.USE_TZ:
creation_date = timezone.make_aware(
creation_date, timezone.utc)
else:
creation_date = timezone.now()
slug = slugify(feed_entry.title)[:255]
if Entry.objects.filter(creation_date__year=creation_date.year,
creation_date__month=creation_date.month,
creation_date__day=creation_date.day,
slug=slug):
self.write_out(self.style.NOTICE(
'SKIPPED (already imported)\n'))
continue
categories = self.import_categories(feed_entry)
entry_dict = {'title': feed_entry.title[:255],
'content': feed_entry.description,
'excerpt': feed_entry.get('summary'),
'status': PUBLISHED,
'creation_date': creation_date,
'start_publication': creation_date,
'last_update': timezone.now(),
'slug': slug}
if not entry_dict['excerpt'] and self.auto_excerpt:
entry_dict['excerpt'] = Truncator('...').words(
50, strip_tags(feed_entry.description))
if self.tags:
entry_dict['tags'] = self.import_tags(categories)
entry = Entry(**entry_dict)
entry.save()
entry.categories.add(*categories)
entry.sites.add(self.SITE)
if self.image_enclosure:
for enclosure in feed_entry.enclosures:
if 'image' in enclosure.get('type') \
and enclosure.get('href'):
img_tmp = NamedTemporaryFile(delete=True)
img_tmp.write(urlopen(enclosure['href']).read())
img_tmp.flush()
entry.image.save(os.path.basename(enclosure['href']),
File(img_tmp))
break
if self.default_author:
entry.authors.add(self.default_author)
elif feed_entry.get('author_detail'):
try:
user = User.objects.create_user(
slugify(feed_entry.author_detail.get('name')),
feed_entry.author_detail.get('email', ''))
except IntegrityError:
user = User.objects.get(
username=slugify(feed_entry.author_detail.get('name')))
entry.authors.add(user)
self.write_out(self.style.ITEM('OK\n'))
def import_categories(self, feed_entry):
categories = []
for cat in feed_entry.get('tags', ''):
category, created = Category.objects.get_or_create(
slug=slugify(cat.term), defaults={'title': cat.term})
categories.append(category)
return categories
def import_tags(self, categories):
tags = []
for cat in categories:
if len(cat.title.split()) > 1:
tags.append('"%s"' % slugify(cat.title).replace('-', ' '))
else:
tags.append(slugify(cat.title).replace('-', ' '))
return ', '.join(tags)
|
westinedu/similarinterest
|
zinnia/management/commands/feed2zinnia.py
|
Python
|
bsd-3-clause
| 7,134
|
"""
Copyright (C) 2010 Stephen Georg
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
For Questions about this program please contact
Stephen Georg at srgeorg@gmail.com
A copy of the license should be included in the file LICENSE.txt
"""
from PyQt4 import QtGui
from PyQt4 import QtCore
import logging
from widgetContextEditor import WidgetContextEditor
from widgetContextList import WidgetContextList
class PageContexts(QtGui.QWidget):
# Signals
gotoContext = QtCore.pyqtSignal(int)
def __init__(self, parent, databaseCon):
logging.info("PageContexts->__init__(self, parent, databaseCon)")
QtGui.QWidget.__init__(self, parent)
self.databaseCon = databaseCon
self.settings = QtCore.QSettings("tracks-queue", "tracks-queue")
#latitudeLabel = QtGui.QLabel("Latitude:")
#layout = QtGui.QGridLayout(self)
#layout.addWidget(latitudeLabel, 0, 0)
# The main page layout
self.horizontalLayout = QtGui.QHBoxLayout(self)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setMargin(0)
# Scroll area for lists
self.scrollArea = QtGui.QScrollArea()
self.scrollArea.setWidgetResizable(True)
# Layout for scroll area
widget = QtGui.QWidget()
self.scrollArea.setWidget(widget)
self.verticalLayout = QtGui.QVBoxLayout(widget)
self.horizontalLayout.addWidget(self.scrollArea)
# Add the context editor form
self.contextEditor = WidgetContextEditor(self.databaseCon)
self.contextEditor.setVisible(False)
self.horizontalLayout.addWidget(self.contextEditor)
self.contextEditor.contextModified.connect(self.refresh)
# Add the lists
# Active Contexts
self.activeContextsList = WidgetContextList(self.databaseCon, "Visible Contexts", None, True)
self.activeContextsList.editContext.connect(self.contextEditor.setCurrentContextID)
self.activeContextsList.gotoContext.connect(self.slotGotoContext)
self.verticalLayout.addWidget(self.activeContextsList)
# Hidden Contexts
self.hiddenContextsList = WidgetContextList(self.databaseCon, "Hidden Contexts", None, False)
self.hiddenContextsList.editContext.connect(self.contextEditor.setCurrentContextID)
self.hiddenContextsList.gotoContext.connect(self.slotGotoContext)
self.verticalLayout.addWidget(self.hiddenContextsList)
##Connect project save event to refresh lists
#self.projectEditor.projectModified.connect(self.refresh)
# Add a vertical spacer
spacerItem = QtGui.QSpacerItem(
1, 1, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.current_user_id = None
def refresh(self, userId=None):
logging.info("PageContexts->refresh()")
if userId:
self.current_user_id = userId
# Active Contexts
queryActive = "SELECT contexts.id, contexts.name, SUM(CASE WHEN \
todos.state IS 'active' THEN 1 ELSE 0 END), SUM(CASE\
WHEN todos.state = 'completed' THEN 1 ELSE 0 END) FROM \
contexts LEFT JOIN todos ON contexts.id=todos.context_id AND contexts.user_id=todos.user_id \
WHERE contexts.hide='f' and contexts.user_id=%s GROUP BY contexts.id ORDER BY contexts.name" % (self.current_user_id)
self.activeContextsList.setDBQuery(queryActive)
# Hidden Contexts
queryHidden = "SELECT contexts.id, contexts.name, SUM(CASE WHEN \
todos.state IS 'active' THEN 1 ELSE 0 END), SUM(CASE \
WHEN todos.state = 'completed' THEN 1 ELSE 0 END) FROM \
contexts LEFT JOIN todos ON contexts.id=todos.context_id AND contexts.user_id=todos.user_id\
WHERE contexts.hide='t' and contexts.user_id=%s GROUP BY contexts.id ORDER BY contexts.name" % (self.current_user_id)
self.hiddenContextsList.setDBQuery(queryHidden)
self.contextEditor.setCurrentUser(self.current_user_id)
def setFormVisible(self, visible):
logging.info("PageContexts->setFormVisible(self, visible)")
self.contextEditor.setVisible(visible)
self.contextEditor.setFocus()
def slotGotoContext(self, id):
logging.info("PageContexts->slotGotoContext(self, id)")
self.emit(QtCore.SIGNAL("gotoContext(int)"), id)
def moveExclusiveExpandUp(self):
logging.info("tracks->moveExclusiveExpandUp")
# shrink all lists but the expanded list
focuspos = None
posList = {}
for key in self.homeContexts.keys():
pos = self.homeContexts[key].pos().y()
posList[pos] = key
if self.homeContexts[key].isExpanded():
if (not focuspos) or (pos < focuspos):
focuspos = pos
self.homeContexts[key].setExpanded(False)
posKeys = posList.keys()
posKeys.sort(reverse=True)
done = False
for pos in posKeys:
if focuspos and pos<focuspos:
self.homeContexts[posList[pos]].setExpanded(True)
done = True
break
if done == False:
self.homeContexts[posList[posKeys[len(posKeys)-1]]].setExpanded(True)
def moveExclusiveExpandDown(self):
logging.info("PageContexts->moveExclusiveExpandDown(self)")
# shrink all lists but the expanded list
focuspos = None
posList = {}
for key in self.homeContexts.keys():
pos = self.homeContexts[key].pos().y()
posList[pos] = key
if self.homeContexts[key].isExpanded():
if (not focuspos) or (pos > focuspos):
focuspos = pos
self.homeContexts[key].setExpanded(False)
posKeys = posList.keys()
posKeys.sort()
done = False
for pos in posKeys:
if focuspos and pos>focuspos:
self.homeContexts[posList[pos]].setExpanded(True)
done = True
break
if done == False:
self.homeContexts[posList[posKeys[len(posKeys)-1]]].setExpanded(True)
def moveFocusUp(self):
# moves the keyboard focus up to the next expanded list
logging.info("PageContexts->moveFocusUp")
keyfocuspos = None
posList = {}
# find the list with keyboard focus if there is one
for key in self.homeContexts.keys():
pos = self.homeContexts[key].pos().y()
posList[pos] = key
if self.homeContexts[key].isAncestorOf(QtGui.QApplication.focusWidget()):
keyfocuspos = pos
# sort the lists by position
posKeys = posList.keys()
posKeys.sort(reverse=True)
done = False
# set keyboard focus on the next highest list that is expanded
for pos in posKeys:
if pos<keyfocuspos:
if self.homeContexts[posList[pos]].isExpanded():
self.homeContexts[posList[pos]].listWidget.setFocus()
else:
self.homeContexts[posList[pos]].toggleListButton.setFocus()
done = True
break
# If none were expanded set to highest list and expand
if done == False:
if self.homeContexts[posList[posKeys[0]]].isExpanded():
self.homeContexts[posList[posKeys[0]]].listWidget.setFocus()#setExpanded(True)
else:
self.homeContexts[posList[posKeys[0]]].toggleListButton.setFocus()
def moveFocusDown(self):
# moves the keyboard focus down to the next expanded list
logging.info("PageContexts->moveFocusDown")
keyfocuspos = None
posList = {}
# find the list with keyboard focus if there is one
for key in self.homeContexts.keys():
pos = self.homeContexts[key].pos().y()
posList[pos] = key
if self.homeContexts[key].isAncestorOf(QtGui.QApplication.focusWidget()):
keyfocuspos = pos
# sort the lists by position
posKeys = posList.keys()
posKeys.sort()
done = False
# set keyboard focus on the next lowest list that is expanded
for pos in posKeys:
if keyfocuspos and pos>keyfocuspos:
if self.homeContexts[posList[pos]].isExpanded():
self.homeContexts[posList[pos]].listWidget.setFocus()
else:
self.homeContexts[posList[pos]].toggleListButton.setFocus()
done = True
break
# If none were expanded set to lowest list
if done == False:
if self.homeContexts[posList[posKeys[0]]].isExpanded():
self.homeContexts[posList[posKeys[0]]].listWidget.setFocus()#setExpanded(True)
else:
self.homeContexts[posList[posKeys[0]]].toggleListButton.setFocus()
|
SteveG/tracks-queue
|
src/pagecontexts.py
|
Python
|
gpl-2.0
| 9,901
|
import sys
sys.path.insert(0, '..')
import os
import glob
import unittest
os.chdir(os.path.dirname(__file__))
tests_names = []
for test in glob.glob('Test*.py'):
if test.endswith('TestAll.py'):
continue
tests_names.append(test[:-3])
loader = unittest.defaultTestLoader
suites = loader.loadTestsFromNames(tests_names)
unittest.TextTestRunner(verbosity=2).run(suites)
|
mbudde/mail-indicator
|
tests/TestAll.py
|
Python
|
gpl-3.0
| 386
|
# -*- coding: utf-8 -*-
from dateutil import parser
import logging
import gzip
import os
from visir_slurper.settings import DATA_DIR
from scrapy.utils.serialize import ScrapyJSONEncoder
import json
logger = logging.getLogger(__name__)
_encoder = ScrapyJSONEncoder()
class VisirSlurperSaveArticleByDate(object):
def __init__(self):
pass
def process_item(self, item, spider):
date_parsed = parser.parse(item["date_published"])
directory = os.path.join(DATA_DIR,
date_parsed.strftime("%Y"),
date_parsed.strftime("%m"),
date_parsed.strftime("%d")
)
json_filename = os.path.join(directory, item["id"] + ".json")
txt_filename = os.path.join(directory, item["id"] + ".txt")
raw_filename = os.path.join(directory, item["id"] + "-html.gz")
if not os.path.exists(directory):
os.makedirs(directory)
with open(txt_filename, "w") as f:
f.write(item["article_text"].encode("utf-8"))
with gzip.open(raw_filename, "w") as f:
f.write(item["body"].encode("utf-8"))
# store the article_text
article_text = item["article_text"]
# store the body
body = item["body"]
# pop article text and body from the item so as to no write it to the json
item.pop("article_text", None)
item.pop("body", None)
with open(json_filename, "w") as f:
f.write(_encoder.encode(item))
item["article_text"] = article_text
item["body"] = body
logger.debug("Saved {}".format(item["headline"].encode("utf-8")))
return item
class VisirSlurperSaveAuthorArticles(object):
def process_item(self, item, spider):
author = item['author']
filename = author.replace(" ", "_") + ".csv"
filename = filename.lower()
directory = os.path.join(DATA_DIR,
"authors")
filename = os.path.join(directory, filename)
if not os.path.exists(directory):
os.makedirs(directory)
if os.path.exists(filename):
with open(filename, "ab") as f:
f.write(item["id"] + "\n")
else:
with open(filename, "wb") as f:
f.write(item["author"].encode("utf-8") + "\n")
f.write(item["id"] + "\n")
return item
def close_spider(self, spider):
# Uncomment to save a zipped archive of the data folder on exit
# import shutil
# import datetime
# zip_filename = os.path.join(DATA_DIR, str(datetime.datetime.now().strftime("%Y-%d-%m-%H-%M")))
# shutil.make_archive(zip_filename, "zip", DATA_DIR)
# logger.info("Zipped data to %s" % zip_filename)
pass
|
gogn-in/visir_slurper
|
visir_slurper/visir_slurper/pipelines.py
|
Python
|
mit
| 2,870
|
"""LomLobot configuration data."""
from beem.config import BotConfig
class LomLobotConfig(BotConfig):
"""Handle configuration data loading for LomLobot."""
def check_twitch(self):
"""Check the 'twitch' table in the TOML config."""
if not self.get("twitch"):
self.error("The twitch table is undefined")
self.require_table_fields("twitch", self.twitch,
["hostname", "port", "nick", "password",
"message_limit", "moderator_message_limit",
"message_timeout", "max_chat_idle",
"request_expire_time"])
if self.twitch.get("watch_username"):
self.twitch["max_watched_subscribers"] = 1
self.twitch["max_chat_idle"] = float("inf")
self.twitch["request_expire_time"] = float("inf")
return
self.require_table_fields("twitch", self.twitch,
["max_channels", "max_chat_idle",
"request_expire_time"])
def check_webtiles(self):
"""Check the 'webtiles' table in the TOML config."""
if not self.get("webtiles") or not self.webtiles.get("enabled"):
return
webtiles = self.webtiles
if not webtiles.get("servers"):
self.error("In webtiles table, servers table undefined")
for i, server in enumerate(webtiles["servers"]):
self.require_table_fields("entry {} of webtiles.servers".format(
i + 1), server, ["name", "url", "protocol_version"])
if server.get("username") and not server.get("password"):
self.error("In entry {} of webtiles.servers table, username "
"defined with no password field".format(i + 1))
if (self.twitch.get("watch_username")
and not webtiles.get("watch_username")):
self.error("The watch_username field is set in the twitch table, "
"but in the webtiles table watch_username is undefined")
if (webtiles.get("reminder_text")
and not webtiles.get("reminder_period")):
self.error("In webtiles table, reminder_text defined but "
"reminder_period undefined")
def load(self):
"""Read the main TOML configuration data from self.path and check that
the configuration is valid."""
super().load()
if not self.get("db_file"):
self.error("Field db_file undefined.")
self.check_twitch()
self.check_dcss()
self.check_webtiles()
|
gammafunk/lomlobot
|
lomlobot/config.py
|
Python
|
gpl-2.0
| 2,686
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-01-18 01:21
from __future__ import unicode_literals
import django.db.models.deletion
import jsonfield.fields
import morango.utils.uuids
from django.conf import settings
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
initial = True
dependencies = [("kolibriauth", "0007_auto_20171226_1125")]
operations = [
migrations.CreateModel(
name="Lesson",
fields=[
(
"id",
morango.utils.uuids.UUIDField(
editable=False, primary_key=True, serialize=False
),
),
(
"_morango_dirty_bit",
models.BooleanField(default=True, editable=False),
),
("_morango_source_id", models.CharField(editable=False, max_length=96)),
(
"_morango_partition",
models.CharField(editable=False, max_length=128),
),
("name", models.CharField(max_length=50)),
(
"description",
models.CharField(blank=True, default="", max_length=200),
),
("resources", jsonfield.fields.JSONField(blank=True, default=[])),
("is_active", models.BooleanField(default=False)),
("is_archived", models.BooleanField(default=False)),
(
"collection",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="lessons",
to="kolibriauth.Collection",
),
),
(
"created_by",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="lessons_created",
to="kolibriauth.FacilityUser",
),
),
(
"dataset",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="kolibriauth.FacilityDataset",
),
),
],
options={"abstract": False},
),
migrations.CreateModel(
name="LessonAssignment",
fields=[
(
"id",
morango.utils.uuids.UUIDField(
editable=False, primary_key=True, serialize=False
),
),
(
"_morango_dirty_bit",
models.BooleanField(default=True, editable=False),
),
("_morango_source_id", models.CharField(editable=False, max_length=96)),
(
"_morango_partition",
models.CharField(editable=False, max_length=128),
),
(
"assigned_by",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assigned_lessons",
to="kolibriauth.FacilityUser",
),
),
(
"collection",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assigned_lessons",
to="kolibriauth.Collection",
),
),
(
"dataset",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="kolibriauth.FacilityDataset",
),
),
(
"lesson",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="assigned_groups",
to="lessons.Lesson",
),
),
],
options={"abstract": False},
),
]
|
lyw07/kolibri
|
kolibri/core/lessons/migrations/0001_initial.py
|
Python
|
mit
| 4,431
|
#
# Test Cases for the ShareBackendClassic Class
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
#
from swat.tests import *
from pylons import config
from swat.controllers import share
from samba import param
import random
class MockShareParams(object):
def create_backend_object(self, params=None):
file = config["backend.testfiles"] + config["backend.classic.testfile"]
samba_lp = param.LoadParm()
samba_lp.load(file)
if params is None:
params = {}
params["share_path"] = "/usr/local/path"
params["share_hosts_allow"] = "a,b,c,d"
return share.ShareBackendClassic(samba_lp, params)
class TestShareBackendClassic(TestController):
def setUp(self):
self.mock = MockShareParams()
def tearDown(self):
backend = self.mock.create_backend_object();
list = backend.get_share_list();
for l in list:
backend.delete(l.get_share_name());
def test_crud(self):
self.test_backend = self.mock.create_backend_object();
name = "TestShare"
# Add a New Share With Name
self.__add(name)
self.__add_existing(name)
# Edit Some Parameters
self.__edit(name)
# Copy The Share
self.__copy(name)
self.__many_copies(name)
# Change the Share Name
new_name = "TestShareRename"
self.__rename(new_name, name)
# Restore the Share Name
self.__rename(name, new_name)
# Test Existance
self.__exists(name)
self.__exists_ghost(new_name)
# Test SambaShare
self.__get_share(name)
# Delete this Share
self.__many_deletes()
self.__delete(name)
def __exists(self, name):
self.assertEqual(self.test_backend.share_name_exists(name), True)
def __exists_ghost(self, name):
self.assertEqual(self.test_backend.share_name_exists(name), False)
def __get_share(self, name):
tmp_backend = self.mock.create_backend_object();
share = tmp_backend.get_share_by_name(name)
self.assertNotEqual(share, None)
# Non Existing Attribute
self.assertEqual(share.get("does-not-exist"), None)
# Existing Attribute
self.assertEqual(share.get_share_name(), name)
def __add_existing(self, name):
is_new = True
self.assertEqual(self.test_backend.store(name, is_new), False)
def __add(self, name):
is_new = True
self.assertEqual(self.test_backend.store(name, is_new), True)
self.assertEqual(self.test_backend.share_name_exists(name), True)
# Must have the Name attribute
tmp_backend = self.mock.create_backend_object();
share = tmp_backend.get_share_by_name(name)
self.assertNotEqual(share, None)
self.assertEqual(share.get_share_name(), name)
def __edit(self, name):
params = {}
params["share_path"] = "/usr/local/new-path"
params["share_hosts_allow"] = "these, params, are, new"
# Recreate the Backend Object with a new Parameters List
tmp_backend = self.mock.create_backend_object(params);
is_new = False
self.assertEqual(tmp_backend.share_name_exists(name), True)
self.assertEqual(tmp_backend.store(name, is_new), True)
self.assertEqual(tmp_backend.share_name_exists(name), True)
# Check if parameters were well inserted
# @see fixme in ShareBackend.get_share_by_name()
tmp_backend = self.mock.create_backend_object();
share = tmp_backend.get_share_by_name(name)
self.assertNotEqual(share, None)
self.assertEqual(share.get_share_name(), name)
self.assertEqual(share.get("path"), params["share_path"])
share_hosts_allow_list = ['these', 'params', 'are', 'new']
self.assertEqual(share.get("hosts allow"), share_hosts_allow_list)
def __rename(self, name, old_name):
is_new = False
self.assertEqual(self.test_backend.share_name_exists(old_name), True)
self.assertEqual(self.test_backend.store(name, is_new, old_name), True)
self.assertEqual(self.test_backend.share_name_exists(name), True)
def __copy(self, name):
self.assertEqual(self.test_backend.copy(name), True)
def __many_copies(self, name):
for i in range(1, 5):
self.__copy(name)
def __delete(self, name):
self.assertEqual(self.test_backend.share_name_exists(name), True)
self.assertEqual(self.test_backend.delete(name), True)
self.assertEqual(self.test_backend.share_name_exists(name), False)
def __many_deletes(self):
list = ['copy of copy of TestShare', 'copy of TestShare', \
'copy of copy of copy of TestShare']
for l in list:
self.__delete(l)
|
rvelhote/GSoC-SWAT
|
swat/tests/test_sharebackendclassic.py
|
Python
|
gpl-3.0
| 5,698
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training.moving_averages when using a DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import strategy_combinations
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import moving_averages
all_combinations = combinations.combine(
distribution=[
strategy_combinations.default_strategy,
strategy_combinations.one_device_strategy,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
],
mode=["graph"])
class AssignMovingAveragesTest(test.TestCase, parameterized.TestCase):
@combinations.generate(all_combinations)
def testReplicaModeWithoutZeroDebias(self, distribution):
replica_id = [0]
def replica_fn():
var = variables.Variable([10.0, 11.0])
val = constant_op.constant([1.0 + replica_id[0], 2.0 - replica_id[0]])
replica_id[0] += 1
decay = 0.25
assign = moving_averages.assign_moving_average(
var, val, decay, zero_debias=False)
return var, assign
with distribution.scope(), self.cached_session() as sess:
var, assign = distribution.extended.call_for_each_replica(replica_fn)
variables.global_variables_initializer().run()
self.assertAllClose([10.0, 11.0], var.eval())
sess.run(distribution.experimental_local_results(assign))
# Mean of val across calls to replica_fn().
average_val = [1.0 + 0.5 * (replica_id[0] - 1),
2.0 - 0.5 * (replica_id[0] - 1)]
val_weight = 1.0 - 0.25
self.assertAllClose(
[10.0 * 0.25 + average_val[0] * val_weight,
11.0 * 0.25 + average_val[1] * val_weight],
var.eval())
@combinations.generate(all_combinations)
def testReplicaMode(self, distribution):
replica_id = [0]
def replica_fn():
var = variables.Variable([0.0, 0.0])
val = constant_op.constant([1.0 + replica_id[0], 2.0 - replica_id[0]])
replica_id[0] += 1
decay = 0.25
assign = moving_averages.assign_moving_average(var, val, decay)
return var, assign.op
with distribution.scope(), self.cached_session() as sess:
var, assign_op = distribution.extended.call_for_each_replica(replica_fn)
variables.global_variables_initializer().run()
self.assertAllClose([0.0, 0.0], var.eval())
sess.run(distribution.experimental_local_results(assign_op))
# Mean of val across calls to replica_fn().
average_val = [1.0 + 0.5 * (replica_id[0] - 1),
2.0 - 0.5 * (replica_id[0] - 1)]
self.assertAllClose(average_val, var.eval())
@combinations.generate(all_combinations)
def testCrossDeviceWithoutZeroDebias(self, distribution):
with distribution.scope(), self.cached_session() as sess:
var = variables.Variable([10.0, 11.0])
val = constant_op.constant([1.0, 2.0])
decay = 0.25
# NOTE(josh11b): We currently generate an error if val is a PerReplica
# value.
assign = moving_averages.assign_moving_average(
var, val, decay, zero_debias=False)
variables.global_variables_initializer().run()
self.assertAllClose([10.0, 11.0], var.eval())
sess.run(assign)
average_val = [1.0, 2.0]
val_weight = 1.0 - 0.25
self.assertAllClose(
[10.0 * 0.25 + average_val[0] * val_weight,
11.0 * 0.25 + average_val[1] * val_weight],
var.eval())
# Also try assign.op.
sess.run(assign.op)
orig_weight = 0.25 * 0.25
val_weight = 1.0 - orig_weight
self.assertAllClose(
[10.0 * orig_weight + average_val[0] * val_weight,
11.0 * orig_weight + average_val[1] * val_weight],
var.eval())
@combinations.generate(all_combinations)
def testCrossDevice(self, distribution):
with distribution.scope(), self.cached_session() as sess:
var = variables.Variable([0.0, 0.0])
val = array_ops.placeholder(dtypes.float32)
decay = 0.25
# NOTE(josh11b): We currently generate an error if val is a PerReplica
# value.
assign = moving_averages.assign_moving_average(var, val, decay)
variables.global_variables_initializer().run()
self.assertAllClose([0.0, 0.0], var.eval())
sess.run(assign, feed_dict={val: [1.0, 2.0]})
self.assertAllClose([1.0, 2.0], var.eval())
# Also try assign.op.
sess.run(assign.op, feed_dict={val: [10.0, 0.0]})
self.assertAllClose(
[(1.0 * 0.25 + 10.0) / (1.0 * 0.25 + 1.0),
(2.0 * 0.25 + 0.0) / (1.0 * 0.25 + 1.0)],
var.eval())
@combinations.generate(all_combinations)
def testAssignVariable(self, distribution):
def replica_fn():
var = variables.Variable([10.0, 11.0])
# Here we expect to check the case when input value are variable.
val = variables.Variable([1., 2.])
decay = 0.25
assign = moving_averages.assign_moving_average(
var, val, decay, zero_debias=False)
return var, assign
with distribution.scope(), self.cached_session() as sess:
var, assign = distribution.extended.call_for_each_replica(replica_fn)
variables.global_variables_initializer().run()
self.assertAllClose([10.0, 11.0], var.eval())
sess.run(distribution.experimental_local_results(assign))
self.assertAllClose(
[10 * 0.25 + 1. * (1 - 0.25), 11 * 0.25 + 2. * (1 - 0.25)],
var.eval())
if __name__ == "__main__":
test.main()
|
kevin-coder/tensorflow-fork
|
tensorflow/python/distribute/moving_averages_test.py
|
Python
|
apache-2.0
| 6,514
|
from cwpoliticl.extensions.base_parser import BaseParser
from cwpoliticl.items import CacheItem, WDPost
class DeccanchronicleParser(BaseParser):
detail_root_selector = '//*[@class="story-main"]'
page_selector_dict = {
"title": '{}/h1/span/text()'.format(detail_root_selector),
"image": '{}/*[@class="story-body"]/*[@class="cover"]/img/@src'.format(detail_root_selector),
"content": '{}/*[@class="story-body"]/*[@id="storyBody"]/p/text()'.format(detail_root_selector),
"tags": '{}/*[@class="story-body"]/*[@class="articleTags"]/a/text()'.format(detail_root_selector),
}
def __init__(self):
from cwpoliticl.scraped_websites import WebsiteTypes
self.url_from = WebsiteTypes.deccanchronicle.value
super(DeccanchronicleParser, self).__init__()
def parse_paginate(self, url, hxs, cache_db, history_db):
select_block = '//*[@class="col-sm-12 noPadding noMargin"]/*[@class="col-sm-12 SunChNewListing"]'
self._parse_block_for_pagination(url, hxs, cache_db, history_db, select_block)
def _parse_block_for_pagination(self, url, hxs, cache_db, history_db, select_block):
links = hxs.xpath(select_block).extract()
for idx, link in enumerate(links):
href_selector = '{}[{}]/*[@class="col-sm-4 ThumbImg"]/a/@href'.format(select_block, (idx + 1))
thumbnail_selector = '{}[{}]/*[@class="col-sm-4 ThumbImg"]/a/img/@data-src'.format(select_block, (idx + 1))
href = self.get_value_with_urljoin(hxs, href_selector, url)
if history_db.check_history_exist(href): # If the link already exist on the history database, ignore it.
continue
thumbnail_src = self.get_value_response(hxs, thumbnail_selector)
cache_db.save_cache(CacheItem.get_default(url=href, thumbnail_url=thumbnail_src, url_from=self.url_from))
def parse(self, url, hxs, wd_rpc, thumbnail_url, access_denied_cookie):
title = self.get_value_response(hxs, self.page_selector_dict['title'])
image_src = self.get_value_response(hxs, self.page_selector_dict['image'])
content = self.get_all_value_response(hxs, self.page_selector_dict['content'])
tags = hxs.xpath(self.page_selector_dict['tags']).extract()
item = WDPost.get_default(url, self.url_from, title, image_src, thumbnail_url, content, tags,
access_denied_cookie=access_denied_cookie)
post_id = wd_rpc.post_to_wd(item)
return item
|
trujunzhang/djzhang-targets
|
cwpoliticl/cwpoliticl/extensions/deccanchronicle_parser.py
|
Python
|
mit
| 2,532
|
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from selvbetjening.sadmin2 import menu
from selvbetjening.sadmin2.decorators import sadmin_prerequisites
from selvbetjening.sadmin2.forms import UserForm, PasswordForm
from selvbetjening.sadmin2.views.generic import generic_create_view
@sadmin_prerequisites
def user_change(request, user_pk):
user = get_object_or_404(get_user_model(), pk=user_pk)
context = {
'sadmin2_menu_main_active': 'userportal',
'sadmin2_breadcrumbs_active': 'user',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_user,
'sadmin2_menu_tab_active': 'user',
'user': user
}
return generic_create_view(request,
UserForm,
reverse('sadmin2:user', kwargs={'user_pk': user.pk}),
message_success=_('User updated'),
context=context,
instance=user)
@sadmin_prerequisites
def user_password(request, user_pk):
user = get_object_or_404(get_user_model(), pk=user_pk)
context = {
'sadmin2_menu_main_active': 'userportal',
'sadmin2_breadcrumbs_active': 'user_password',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_user,
'sadmin2_menu_tab_active': 'password',
'user': user
}
return generic_create_view(request,
PasswordForm,
redirect_success_url=reverse('sadmin2:user_password', kwargs={'user_pk': user.pk}),
message_success=_('Password updated'),
context=context,
instance=user)
|
animekita/selvbetjening
|
selvbetjening/sadmin2/views/user.py
|
Python
|
mit
| 1,851
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
NUMERALS = "Zv0w2x4y6z8AaBcCeDgEiFkGmHoIqJsKuL3M7NbOfPjQnRrS1T9UhVpW5XlYdt"
def rebase(num, numerals=NUMERALS):
base = len(numerals)
left_digits = num // base
if left_digits == 0:
return numerals[num % base]
else:
return rebase(left_digits, numerals) + numerals[num % base]
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
for user in orm.Page.objects.all():
user.short_url_id = rebase(user.id)
user.save()
def backwards(self, orm):
"Write your backwards methods here."
for user in orm.Page.objects.all():
user.short_url_id = ''
user.save()
models = {
'api.page': {
'Meta': {'object_name': 'Page'},
'html': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'short_url_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10', 'blank': 'True'})
}
}
complete_apps = ['api']
symmetrical = True
|
brianloveswords/webpagemaker
|
webpagemaker/api/migrations/0003_create_short_url_ids.py
|
Python
|
mpl-2.0
| 1,419
|
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'dev'
#
# Third app config
#
# Django debug toolbar
# INSTALLED_APPS.append('debug_toolbar')
# MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
# INTERNAL_IPS = ['127.0.0.1']
# Sorl Thumbnail
THUMBNAIL_DEBUG = DEBUG
|
mittya/duoclub
|
duoclub/duoclub/settings/dev.py
|
Python
|
mit
| 326
|
input = """
c num blocks = 1
c num vars = 100
c minblockids[0] = 1
c maxblockids[0] = 100
p cnf 100 415
-71 -62 -94 0
-33 -74 93 0
16 -58 -2 0
-65 -69 56 0
12 -46 -68 0
6 14 7 0
-39 -31 -87 0
-47 -71 75 0
11 -85 -8 0
-49 -64 63 0
-8 -1 15 0
-21 -60 -40 0
-68 -71 -85 0
-68 49 -55 0
51 17 -40 0
-51 -82 -59 0
-53 45 -41 0
85 -44 34 0
-12 46 -66 0
100 -19 -93 0
-75 -32 82 0
78 -31 70 0
26 -65 -74 0
-57 88 -51 0
18 94 48 0
-71 62 -83 0
9 -91 -36 0
15 -66 78 0
41 -71 42 0
99 -70 61 0
-15 59 39 0
24 -10 -99 0
2 -52 -93 0
-18 -42 74 0
40 46 58 0
-89 99 13 0
-2 20 22 0
70 29 -92 0
-52 -26 20 0
-96 86 -2 0
12 -73 -55 0
-58 89 16 0
-61 -78 -32 0
89 -47 17 0
71 16 67 0
-93 -22 -30 0
-91 -14 95 0
-53 -27 48 0
63 2 -22 0
46 -13 -99 0
-21 -98 -23 0
23 93 68 0
46 73 -11 0
62 -84 83 0
-71 -40 4 0
-59 -35 64 0
26 -35 -98 0
90 -78 66 0
-53 -50 79 0
64 33 62 0
64 2 39 0
32 -87 -19 0
-41 -12 13 0
21 -43 -98 0
66 96 -72 0
37 87 -47 0
65 22 61 0
90 -63 32 0
100 2 -52 0
-41 77 -5 0
100 -71 -47 0
-72 30 95 0
29 -25 94 0
17 -93 47 0
-23 -13 39 0
64 76 43 0
73 17 63 0
2 91 31 0
36 28 63 0
55 39 93 0
77 72 -80 0
-64 35 -28 0
41 -58 -72 0
-85 98 -99 0
-62 91 15 0
-71 -99 39 0
-12 -78 47 0
59 23 -50 0
49 69 99 0
87 -71 77 0
13 35 -43 0
-12 -49 50 0
-18 -62 -94 0
-60 1 57 0
9 -74 -91 0
-36 -90 -82 0
-2 31 10 0
-65 -90 -59 0
64 -40 27 0
10 84 57 0
71 -3 31 0
40 78 -86 0
-94 28 -37 0
61 100 -59 0
1 22 8 0
27 -76 94 0
-94 86 80 0
57 88 -12 0
36 41 86 0
-82 -62 -88 0
26 -27 -15 0
27 59 -98 0
38 69 14 0
-96 -13 8 0
-46 -97 74 0
81 -45 21 0
33 -88 17 0
35 5 -4 0
90 -83 89 0
65 36 13 0
25 -52 -65 0
-58 63 -10 0
85 -19 99 0
40 69 15 0
41 -91 39 0
71 -86 47 0
-19 30 -67 0
54 33 12 0
-32 24 -99 0
-40 -2 -58 0
-60 30 12 0
-46 85 -33 0
-88 74 22 0
42 46 -30 0
35 86 -77 0
-49 94 35 0
-91 81 -74 0
85 44 63 0
45 68 -20 0
-75 -40 -32 0
99 7 50 0
41 33 -19 0
22 -33 -70 0
17 84 -70 0
53 15 -5 0
41 49 -65 0
3 46 72 0
-40 2 78 0
21 59 -31 0
-2 69 62 0
68 61 49 0
12 91 21 0
72 55 50 0
55 -7 -40 0
-37 -77 -80 0
-50 39 71 0
14 51 -10 0
87 -78 69 0
-88 -84 45 0
-50 27 43 0
-24 -44 68 0
7 77 75 0
97 -63 -22 0
-65 -16 96 0
-71 -47 69 0
57 94 -4 0
59 -90 -96 0
-75 -90 93 0
-53 84 98 0
87 -78 -9 0
-15 -25 -23 0
19 34 1 0
-47 -45 43 0
69 -88 99 0
92 -84 -24 0
-89 27 -21 0
47 -81 -20 0
57 -46 27 0
20 -92 -69 0
-27 -11 78 0
80 -100 -69 0
88 -91 92 0
73 -9 67 0
45 13 99 0
-7 71 -55 0
-69 -82 7 0
92 83 -64 0
-99 71 -72 0
-47 100 -33 0
-42 100 67 0
-26 -9 4 0
65 -70 23 0
20 -85 -80 0
-13 64 -51 0
-20 78 -16 0
61 65 50 0
19 58 -24 0
-17 -33 -49 0
84 20 63 0
64 -69 -51 0
-12 -66 -14 0
68 -38 30 0
-21 -89 -74 0
69 -95 -60 0
8 21 -42 0
-77 -43 32 0
-92 -26 89 0
54 43 -10 0
-100 -57 36 0
-23 35 -97 0
-23 65 -84 0
57 46 5 0
37 90 -88 0
62 54 71 0
-21 -40 100 0
68 -87 -23 0
68 48 40 0
-32 34 -99 0
-24 36 -16 0
-67 59 -61 0
-82 -45 -55 0
-80 -82 -45 0
-60 -50 -5 0
-62 64 -58 0
-51 -68 57 0
9 -57 41 0
-60 84 59 0
-26 -55 22 0
70 -21 50 0
6 22 -81 0
27 31 -3 0
77 -9 -45 0
-37 29 97 0
28 98 -59 0
-40 64 72 0
66 42 34 0
41 58 83 0
-90 -86 -50 0
-72 -73 49 0
30 23 5 0
90 -24 16 0
-19 -23 98 0
22 44 1 0
33 89 87 0
-11 31 28 0
16 32 91 0
1 86 95 0
-81 -36 -7 0
-70 -92 44 0
39 -61 17 0
74 -21 41 0
-62 -30 -49 0
10 -27 57 0
80 34 57 0
-85 -49 74 0
16 -86 47 0
-42 27 -68 0
-44 15 -64 0
2 -35 -17 0
-24 -4 81 0
-94 33 -47 0
-49 9 -66 0
-49 -97 -4 0
-66 -48 -62 0
22 -65 -23 0
61 -94 -7 0
93 -30 -48 0
-25 39 1 0
26 99 -32 0
76 26 74 0
-15 31 -67 0
-19 -68 26 0
-55 -36 47 0
-20 -65 -64 0
-94 -55 77 0
67 -47 29 0
71 -21 85 0
24 -41 -69 0
-62 71 -18 0
-62 50 -33 0
-47 41 9 0
75 -51 30 0
81 88 -74 0
-96 44 -23 0
-60 -97 -23 0
-66 -24 -10 0
55 41 -76 0
-56 -88 91 0
-97 35 28 0
-41 -96 32 0
77 -28 99 0
5 -82 -24 0
-69 58 86 0
50 66 -11 0
-67 75 -4 0
90 -39 -69 0
6 -86 -69 0
-58 -97 55 0
67 11 -24 0
88 77 97 0
44 -32 -1 0
25 48 18 0
8 -52 18 0
-56 -90 -21 0
60 53 4 0
86 41 23 0
-57 -26 -97 0
40 49 -54 0
23 -31 84 0
-42 51 91 0
5 72 -47 0
-49 42 -64 0
-92 16 -95 0
-68 80 -97 0
-45 -60 -4 0
-92 -53 -86 0
92 -31 86 0
54 -88 -12 0
-34 -46 70 0
42 61 -14 0
56 -98 -52 0
-100 47 53 0
-68 22 -48 0
94 64 -15 0
40 13 -58 0
-71 37 -67 0
-81 18 -38 0
-9 -28 -62 0
51 62 54 0
23 14 63 0
17 100 -77 0
61 7 -60 0
58 -91 49 0
21 55 -32 0
-54 71 49 0
-54 2 -75 0
33 -87 59 0
-44 61 50 0
36 -15 65 0
17 20 -31 0
81 94 -55 0
-87 85 48 0
-64 -4 58 0
52 -11 -93 0
-26 -37 6 0
-97 -73 -48 0
-85 -54 -87 0
87 -75 68 0
-43 -22 -94 0
-79 -68 -62 0
-38 91 -89 0
-68 -78 -88 0
65 83 -56 0
50 -43 -17 0
51 60 -14 0
39 -40 17 0
55 32 -60 0
-94 -50 60 0
66 -5 -14 0
63 -29 -33 0
-40 3 23 0
-28 88 -99 0
53 33 48 0
30 -67 -14 0
25 22 26 0
83 59 -18 0
41 84 -37 0
-33 -54 -19 0
-92 -26 -30 0
-97 -35 -95 0
91 -48 86 0
22 76 29 0
-49 33 52 0
-38 16 81 0
-46 -89 1 0
61 -14 42 0
-18 -98 51 0
4 91 -55 0
-87 65 -17 0
97 88 -37 0
55 53 -60 0
100 -76 4 0
64 -78 -14 0
-63 -53 -62 0
-31 57 -100 0
-89 -93 72 0
63 38 -99 0
-67 -3 42 0
-82 83 32 0
43 -81 -2 0
6 -97 48 0
4 50 -9 0
-91 -65 -94 0
-20 74 -21 0
32 21 -86 0
-69 67 -61 0
1 87 -92 0
-98 -70 -10 0
14 -71 51 0
-60 26 77 0
42 -28 81 0
97 -41 78 0
-26 -10 -74 0
-58 -79 17 0
-30 -6 49 0
-64 23 22 0
-97 95 94 0
-55 -82 -83 0
28 82 55 0
-50 -87 -77 0
92 94 -26 0
-33 20 70 0
57 -21 -23 0
31 -43 74 0
-86 46 -32 0
"""
output = "UNSAT"
|
alviano/aspino
|
tests/sat/Models/c415.100.UNSAT.dimacs.test.py
|
Python
|
apache-2.0
| 5,236
|
"""
.. _ex-vector-mne-solution:
============================================
Plotting the full vector-valued MNE solution
============================================
The source space that is used for the inverse computation defines a set of
dipoles, distributed across the cortex. When visualizing a source estimate, it
is sometimes useful to show the dipole directions in addition to their
estimated magnitude. This can be accomplished by computing a
:class:`mne.VectorSourceEstimate` and plotting it with
:meth:`stc.plot <mne.VectorSourceEstimate.plot>`, which uses
:func:`~mne.viz.plot_vector_source_estimates` under the hood rather than
:func:`~mne.viz.plot_source_estimates`.
It can also be instructive to visualize the actual dipole/activation locations
in 3D space in a glass brain, as opposed to activations imposed on an inflated
surface (as typically done in :meth:`mne.SourceEstimate.plot`), as it allows
you to get a better sense of the underlying source geometry.
"""
# Author: Marijn van Vliet <w.m.vanvliet@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
import mne
from mne.datasets import sample
from mne.minimum_norm import read_inverse_operator, apply_inverse
print(__doc__)
data_path = sample.data_path()
subjects_dir = data_path + '/subjects'
# Read evoked data
fname_evoked = data_path + '/MEG/sample/sample_audvis-ave.fif'
evoked = mne.read_evokeds(fname_evoked, condition=0, baseline=(None, 0))
# Read inverse solution
fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'
inv = read_inverse_operator(fname_inv)
# Apply inverse solution, set pick_ori='vector' to obtain a
# :class:`mne.VectorSourceEstimate` object
snr = 3.0
lambda2 = 1.0 / snr ** 2
stc = apply_inverse(evoked, inv, lambda2, 'dSPM', pick_ori='vector')
# Use peak getter to move visualization to the time point of the peak magnitude
_, peak_time = stc.magnitude().get_peak(hemi='lh')
###############################################################################
# Plot the source estimate:
# sphinx_gallery_thumbnail_number = 2
brain = stc.plot(
initial_time=peak_time, hemi='lh', subjects_dir=subjects_dir)
# You can save a brain movie with:
# brain.save_movie(time_dilation=20, tmin=0.05, tmax=0.16, framerate=10,
# interpolation='linear', time_viewer=True)
###############################################################################
# Plot the activation in the direction of maximal power for this data:
stc_max, directions = stc.project('pca', src=inv['src'])
# These directions must by design be close to the normals because this
# inverse was computed with loose=0.2:
print('Absolute cosine similarity between source normals and directions: '
f'{np.abs(np.sum(directions * inv["source_nn"][2::3], axis=-1)).mean()}')
brain_max = stc_max.plot(
initial_time=peak_time, hemi='lh', subjects_dir=subjects_dir,
time_label='Max power')
brain_normal = stc.project('normal', inv['src'])[0].plot(
initial_time=peak_time, hemi='lh', subjects_dir=subjects_dir,
time_label='Normal')
###############################################################################
# You can also do this with a fixed-orientation inverse. It looks a lot like
# the result above because the ``loose=0.2`` orientation constraint keeps
# sources close to fixed orientation:
fname_inv_fixed = (
data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-fixed-inv.fif')
inv_fixed = read_inverse_operator(fname_inv_fixed)
stc_fixed = apply_inverse(
evoked, inv_fixed, lambda2, 'dSPM', pick_ori='vector')
brain_fixed = stc_fixed.plot(
initial_time=peak_time, hemi='lh', subjects_dir=subjects_dir)
|
mne-tools/mne-tools.github.io
|
0.21/_downloads/9de3654ac15b4882b43ef0142cacd42b/plot_vector_mne_solution.py
|
Python
|
bsd-3-clause
| 3,659
|
people = 30
cars = 40
buses = 15
if cars > people:
print "We should take the car."
elif cars < people:
print "We should not take the cars."
else:
print "We can not decide."
if buses > cars:
print "That's too many buses"
elif buses < cars:
print "May be we could take the buses."
else:
print "We can not decide."
if people > buses:
print "Alright, let's just take the buese. "
else:
print "Fine, let's stay home then."
|
auspbro/CodeSnippets
|
Python/LPTHW/ex30.py
|
Python
|
gpl-3.0
| 433
|
'''
common.py - this file is part of S3QL (http://s3ql.googlecode.com)
Copyright (C) 2008-2009 Nikolaus Rath <Nikolaus@rath.org>
This program can be distributed under the terms of the GNU GPLv3.
'''
from __future__ import division, print_function, absolute_import
from functools import wraps
from llfuse import ROOT_INODE
import cPickle as pickle
import hashlib
import logging
import os
import shutil
import stat
import sys
import tempfile
import threading
import time
import traceback
log = logging.getLogger('common')
def setup_logging(options):
root_logger = logging.getLogger()
if root_logger.handlers:
log.debug("Logging already initialized.")
return
stdout_handler = add_stdout_logging(options.quiet)
if hasattr(options, 'log') and options.log:
root_logger.addHandler(options.log)
debug_handler = options.log
else:
debug_handler = stdout_handler
setup_excepthook()
if options.debug:
root_logger.setLevel(logging.DEBUG)
debug_handler.setLevel(logging.NOTSET)
if 'all' not in options.debug:
# Adding the filter to the root logger has no effect.
debug_handler.addFilter(LoggerFilter(options.debug, logging.INFO))
logging.disable(logging.NOTSET)
else:
root_logger.setLevel(logging.INFO)
logging.disable(logging.DEBUG)
return stdout_handler
class LoggerFilter(object):
"""
For use with the logging module as a message filter.
This filter accepts all messages which have at least the specified
priority *or* come from a configured list of loggers.
"""
def __init__(self, acceptnames, acceptlevel):
"""Initializes a Filter object"""
self.acceptlevel = acceptlevel
self.acceptnames = [ x.lower() for x in acceptnames ]
def filter(self, record):
'''Determine if the log message should be printed'''
if record.levelno >= self.acceptlevel:
return True
if record.name.lower() in self.acceptnames:
return True
return False
def add_stdout_logging(quiet=False):
'''Add stdout logging handler to root logger'''
root_logger = logging.getLogger()
formatter = logging.Formatter('%(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
if quiet:
handler.setLevel(logging.WARN)
else:
handler.setLevel(logging.INFO)
root_logger.addHandler(handler)
return handler
RETRY_TIMEOUT=60*60*24
def retry(fn):
'''Decorator for retrying a method on some exceptions
If the decorated method raises an exception for which the instance's
`_retry_on(exc)` method is true, the decorated method is called again at
increasing intervals. If this persists for more than *timeout* seconds,
the most-recently caught exception is re-raised.
'''
@wraps(fn)
def wrapped(self, *a, **kw):
interval = 1/50
waited = 0
while True:
try:
return fn(self, *a, **kw)
except Exception as exc:
# Access to protected member ok
#pylint: disable=W0212
if not self._retry_on(exc):
raise
if waited > RETRY_TIMEOUT:
log.error('%s.%s(*): Timeout exceeded, re-raising %r exception',
self.__class__.__name__, fn.__name__, exc)
raise
log.debug('%s.%s(*): trying again after %r exception:',
self.__class__.__name__, fn.__name__, exc)
time.sleep(interval)
waited += interval
if interval < 20*60:
interval *= 2
# False positive
#pylint: disable=E1101
wrapped.__doc__ += '''
This method has been decorated and will automatically recall itself in
increasing intervals for up to s3ql.common.RETRY_TIMEOUT seconds if it raises an
exception for which the instance's `_retry_on` method returns True.
'''
return wrapped
def get_seq_no(bucket):
'''Get current metadata sequence number'''
from .backends.common import NoSuchObject
seq_nos = list(bucket.list('s3ql_seq_no_'))
if not seq_nos:
# Maybe list result is outdated
seq_nos = [ 's3ql_seq_no_1' ]
if (seq_nos[0].endswith('.meta')
or seq_nos[0].endswith('.dat')):
raise QuietError('Old file system revision, please run `s3qladm upgrade` first.')
seq_nos = [ int(x[len('s3ql_seq_no_'):]) for x in seq_nos ]
seq_no = max(seq_nos)
# Make sure that object really exists
while ('s3ql_seq_no_%d' % seq_no) not in bucket:
seq_no -= 1
if seq_no == 0:
raise QuietError('No S3QL file system found in bucket.')
while ('s3ql_seq_no_%d' % seq_no) in bucket:
seq_no += 1
seq_no -= 1
# Delete old seq nos
for i in [ x for x in seq_nos if x < seq_no - 10 ]:
try:
del bucket['s3ql_seq_no_%d' % i]
except NoSuchObject:
pass # Key list may not be up to date
return seq_no
def cycle_metadata(bucket):
from .backends.common import NoSuchObject
for i in reversed(range(10)):
try:
bucket.copy("s3ql_metadata_bak_%d" % i, "s3ql_metadata_bak_%d" % (i + 1))
except NoSuchObject:
pass
bucket.copy("s3ql_metadata", "s3ql_metadata_bak_0")
def dump_metadata(ofh, conn):
pickler = pickle.Pickler(ofh, 2)
bufsize = 256
buf = range(bufsize)
tables_to_dump = [('objects', 'id'), ('blocks', 'id'),
('inode_blocks', 'inode, blockno'),
('inodes', 'id'), ('symlink_targets', 'inode'),
('names', 'id'), ('contents', 'parent_inode, name_id'),
('ext_attributes', 'inode, name')]
columns = dict()
for (table, _) in tables_to_dump:
columns[table] = list()
for row in conn.query('PRAGMA table_info(%s)' % table):
columns[table].append(row[1])
pickler.dump((tables_to_dump, columns))
for (table, order) in tables_to_dump:
log.info('Saving %s' % table)
pickler.clear_memo()
i = 0
for row in conn.query('SELECT %s FROM %s ORDER BY %s'
% (','.join(columns[table]), table, order)):
buf[i] = row
i += 1
if i == bufsize:
pickler.dump(buf)
pickler.clear_memo()
i = 0
if i != 0:
pickler.dump(buf[:i])
pickler.dump(None)
def restore_metadata(ifh, conn):
# Unpickling is terribly slow if fh is not a real file
# object.
if not hasattr(ifh, 'fileno'):
log.info('Caching tables...')
with tempfile.TemporaryFile() as tmp:
shutil.copyfileobj(ifh, tmp)
tmp.seek(0)
return restore_metadata(tmp, conn)
unpickler = pickle.Unpickler(ifh)
(to_dump, columns) = unpickler.load()
create_tables(conn)
for (table, _) in to_dump:
log.info('Loading %s', table)
col_str = ', '.join(columns[table])
val_str = ', '.join('?' for _ in columns[table])
sql_str = 'INSERT INTO %s (%s) VALUES(%s)' % (table, col_str, val_str)
while True:
buf = unpickler.load()
if not buf:
break
for row in buf:
conn.execute(sql_str, row)
conn.execute('ANALYZE')
class QuietError(Exception):
'''
QuietError is the base class for exceptions that should not result
in a stack trace being printed.
It is typically used for exceptions that are the result of the user
supplying invalid input data. The exception argument should be a
string containing sufficient information about the problem.
'''
def __init__(self, msg=''):
super(QuietError, self).__init__()
self.msg = msg
def __str__(self):
return self.msg
def setup_excepthook():
'''Modify sys.excepthook to log exceptions
Also makes sure that exceptions derived from `QuietException`
do not result in stacktraces.
'''
def excepthook(type_, val, tb):
root_logger = logging.getLogger()
if isinstance(val, QuietError):
root_logger.error(val.msg)
else:
root_logger.error('Uncaught top-level exception',
exc_info=(type_, val, tb))
sys.excepthook = excepthook
def inode_for_path(path, conn):
"""Return inode of directory entry at `path`
Raises `KeyError` if the path does not exist.
"""
from .database import NoSuchRowError
if not isinstance(path, bytes):
raise TypeError('path must be of type bytes')
# Remove leading and trailing /
path = path.lstrip(b"/").rstrip(b"/")
# Traverse
inode = ROOT_INODE
for el in path.split(b'/'):
try:
inode = conn.get_val("SELECT inode FROM contents_v WHERE name=? AND parent_inode=?",
(el, inode))
except NoSuchRowError:
raise KeyError('Path %s does not exist' % path)
return inode
def get_path(id_, conn, name=None):
"""Return a full path for inode `id_`.
If `name` is specified, it is appended at the very end of the
path (useful if looking up the path for file name with parent
inode).
"""
if name is None:
path = list()
else:
if not isinstance(name, bytes):
raise TypeError('name must be of type bytes')
path = [ name ]
maxdepth = 255
while id_ != ROOT_INODE:
# This can be ambiguous if directories are hardlinked
(name2, id_) = conn.get_row("SELECT name, parent_inode FROM contents_v "
"WHERE inode=? LIMIT 1", (id_,))
path.append(name2)
maxdepth -= 1
if maxdepth == 0:
raise RuntimeError('Failed to resolve name "%s" at inode %d to path',
name, id_)
path.append(b'')
path.reverse()
return b'/'.join(path)
def _escape(s):
'''Escape '/', '=' and '\0' in s'''
s = s.replace('=', '=3D')
s = s.replace('/', '=2F')
s = s.replace('\0', '=00')
return s
def get_bucket_cachedir(storage_url, cachedir):
if not os.path.exists(cachedir):
os.mkdir(cachedir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
return os.path.join(cachedir, _escape(storage_url))
# Name and inode of the special s3ql control file
CTRL_NAME = b'.__s3ql__ctrl__'
CTRL_INODE = 2
class ExceptionStoringThread(threading.Thread):
def __init__(self):
super(ExceptionStoringThread, self).__init__()
self._exc_info = None
self._joined = False
def run_protected(self):
pass
def run(self):
try:
self.run_protected()
except:
# This creates a circular reference chain
self._exc_info = sys.exc_info()
def join_get_exc(self):
self._joined = True
self.join()
return self._exc_info
def join_and_raise(self):
'''Wait for the thread to finish, raise any occurred exceptions'''
self._joined = True
if self.is_alive():
self.join()
if self._exc_info is not None:
# Break reference chain
exc_info = self._exc_info
self._exc_info = None
raise EmbeddedException(exc_info, self.name)
def __del__(self):
if not self._joined:
raise RuntimeError("ExceptionStoringThread instance was destroyed "
"without calling join_and_raise()!")
class AsyncFn(ExceptionStoringThread):
def __init__(self, fn, *args, **kwargs):
super(AsyncFn, self).__init__()
self.target = fn
self.args = args
self.kwargs = kwargs
def run_protected(self):
self.target(*self.args, **self.kwargs)
class EmbeddedException(Exception):
'''Encapsulates an exception that happened in a different thread
'''
def __init__(self, exc_info, threadname):
super(EmbeddedException, self).__init__()
self.exc_info = exc_info
self.threadname = threadname
log.error('Thread %s terminated with exception:\n%s',
self.threadname, ''.join(traceback.format_exception(*self.exc_info)))
def __str__(self):
return ''.join(['caused by an exception in thread %s.\n' % self.threadname,
'Original/inner traceback (most recent call last): \n' ] +
traceback.format_exception(*self.exc_info))
def sha256_fh(fh):
fh.seek(0)
# Bogus error about hashlib not having a sha256 member
#pylint: disable=E1101
sha = hashlib.sha256()
while True:
buf = fh.read(128 * 1024)
if not buf:
break
sha.update(buf)
return sha.digest()
def init_tables(conn):
# Insert root directory
timestamp = time.time() - time.timezone
conn.execute("INSERT INTO inodes (id,mode,uid,gid,mtime,atime,ctime,refcount) "
"VALUES (?,?,?,?,?,?,?,?)",
(ROOT_INODE, stat.S_IFDIR | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
| stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH,
os.getuid(), os.getgid(), timestamp, timestamp, timestamp, 1))
# Insert control inode, the actual values don't matter that much
conn.execute("INSERT INTO inodes (id,mode,uid,gid,mtime,atime,ctime,refcount) "
"VALUES (?,?,?,?,?,?,?,?)",
(CTRL_INODE, stat.S_IFIFO | stat.S_IRUSR | stat.S_IWUSR,
0, 0, timestamp, timestamp, timestamp, 42))
# Insert lost+found directory
inode = conn.rowid("INSERT INTO inodes (mode,uid,gid,mtime,atime,ctime,refcount) "
"VALUES (?,?,?,?,?,?,?)",
(stat.S_IFDIR | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR,
os.getuid(), os.getgid(), timestamp, timestamp, timestamp, 1))
name_id = conn.rowid('INSERT INTO names (name, refcount) VALUES(?,?)',
(b'lost+found', 1))
conn.execute("INSERT INTO contents (name_id, inode, parent_inode) VALUES(?,?,?)",
(name_id, inode, ROOT_INODE))
def create_tables(conn):
# Table of storage objects
# Refcount is included for performance reasons
conn.execute("""
CREATE TABLE objects (
id INTEGER PRIMARY KEY AUTOINCREMENT,
refcount INT NOT NULL,
compr_size INT
)""")
# Table of known data blocks
# Refcount is included for performance reasons
conn.execute("""
CREATE TABLE blocks (
id INTEGER PRIMARY KEY,
hash BLOB(16) UNIQUE,
refcount INT NOT NULL,
size INT NOT NULL,
obj_id INTEGER NOT NULL REFERENCES objects(id)
)""")
# Table with filesystem metadata
# The number of links `refcount` to an inode can in theory
# be determined from the `contents` table. However, managing
# this separately should be significantly faster (the information
# is required for every getattr!)
conn.execute("""
CREATE TABLE inodes (
-- id has to specified *exactly* as follows to become
-- an alias for the rowid.
id INTEGER PRIMARY KEY,
uid INT NOT NULL,
gid INT NOT NULL,
mode INT NOT NULL,
mtime REAL NOT NULL,
atime REAL NOT NULL,
ctime REAL NOT NULL,
refcount INT NOT NULL,
size INT NOT NULL DEFAULT 0,
rdev INT NOT NULL DEFAULT 0,
locked BOOLEAN NOT NULL DEFAULT 0,
-- id of first block (blockno == 0)
-- since most inodes have only one block, we can make the db 20%
-- smaller by not requiring a separate inode_blocks row for these
-- cases.
block_id INT REFERENCES blocks(id)
)""")
# Further Blocks used by inode (blockno >= 1)
conn.execute("""
CREATE TABLE inode_blocks (
inode INTEGER NOT NULL REFERENCES inodes(id),
blockno INT NOT NULL,
block_id INTEGER NOT NULL REFERENCES blocks(id),
PRIMARY KEY (inode, blockno)
)""")
# Symlinks
conn.execute("""
CREATE TABLE symlink_targets (
inode INTEGER PRIMARY KEY REFERENCES inodes(id),
target BLOB NOT NULL
)""")
# Names of file system objects
conn.execute("""
CREATE TABLE names (
id INTEGER PRIMARY KEY,
name BLOB NOT NULL,
refcount INT NOT NULL,
UNIQUE (name)
)""")
# Table of filesystem objects
# rowid is used by readdir() to restart at the correct position
conn.execute("""
CREATE TABLE contents (
rowid INTEGER PRIMARY KEY AUTOINCREMENT,
name_id INT NOT NULL REFERENCES names(id),
inode INT NOT NULL REFERENCES inodes(id),
parent_inode INT NOT NULL REFERENCES inodes(id),
UNIQUE (parent_inode, name_id)
)""")
# Extended attributes
conn.execute("""
CREATE TABLE ext_attributes (
inode INTEGER NOT NULL REFERENCES inodes(id),
name BLOB NOT NULL,
value BLOB NOT NULL,
PRIMARY KEY (inode, name)
)""")
# Shortcurts
conn.execute("""
CREATE VIEW contents_v AS
SELECT * FROM contents JOIN names ON names.id = name_id
""")
conn.execute("""
CREATE VIEW inode_blocks_v AS
SELECT * FROM inode_blocks
UNION
SELECT id as inode, 0 as blockno, block_id FROM inodes WHERE block_id IS NOT NULL
""")
|
drewlu/ossql
|
src/s3ql/common.py
|
Python
|
gpl-3.0
| 18,254
|
"""Support for Plaato devices."""
from datetime import timedelta
import logging
from aiohttp import web
from pyplaato.models.airlock import PlaatoAirlock
from pyplaato.plaato import (
ATTR_ABV,
ATTR_BATCH_VOLUME,
ATTR_BPM,
ATTR_BUBBLES,
ATTR_CO2_VOLUME,
ATTR_DEVICE_ID,
ATTR_DEVICE_NAME,
ATTR_OG,
ATTR_SG,
ATTR_TEMP,
ATTR_TEMP_UNIT,
ATTR_VOLUME_UNIT,
Plaato,
PlaatoDeviceType,
)
import voluptuous as vol
from homeassistant.components import webhook
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_SCAN_INTERVAL,
CONF_TOKEN,
CONF_WEBHOOK_ID,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
VOLUME_GALLONS,
VOLUME_LITERS,
Platform,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import aiohttp_client
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import (
CONF_DEVICE_NAME,
CONF_DEVICE_TYPE,
CONF_USE_WEBHOOK,
COORDINATOR,
DEFAULT_SCAN_INTERVAL,
DEVICE,
DEVICE_ID,
DEVICE_NAME,
DEVICE_TYPE,
DOMAIN,
PLATFORMS,
SENSOR_DATA,
UNDO_UPDATE_LISTENER,
)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ["webhook"]
SENSOR_UPDATE = f"{DOMAIN}_sensor_update"
SENSOR_DATA_KEY = f"{DOMAIN}.{SENSOR}"
WEBHOOK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_NAME): cv.string,
vol.Required(ATTR_DEVICE_ID): cv.positive_int,
vol.Required(ATTR_TEMP_UNIT): vol.Any(TEMP_CELSIUS, TEMP_FAHRENHEIT),
vol.Required(ATTR_VOLUME_UNIT): vol.Any(VOLUME_LITERS, VOLUME_GALLONS),
vol.Required(ATTR_BPM): cv.positive_int,
vol.Required(ATTR_TEMP): vol.Coerce(float),
vol.Required(ATTR_SG): vol.Coerce(float),
vol.Required(ATTR_OG): vol.Coerce(float),
vol.Required(ATTR_ABV): vol.Coerce(float),
vol.Required(ATTR_CO2_VOLUME): vol.Coerce(float),
vol.Required(ATTR_BATCH_VOLUME): vol.Coerce(float),
vol.Required(ATTR_BUBBLES): cv.positive_int,
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Configure based on config entry."""
hass.data.setdefault(DOMAIN, {})
if entry.data[CONF_USE_WEBHOOK]:
async_setup_webhook(hass, entry)
else:
await async_setup_coordinator(hass, entry)
hass.config_entries.async_setup_platforms(
entry, [platform for platform in PLATFORMS if entry.options.get(platform, True)]
)
return True
@callback
def async_setup_webhook(hass: HomeAssistant, entry: ConfigEntry):
"""Init webhook based on config entry."""
webhook_id = entry.data[CONF_WEBHOOK_ID]
device_name = entry.data[CONF_DEVICE_NAME]
_set_entry_data(entry, hass)
webhook.async_register(
hass, DOMAIN, f"{DOMAIN}.{device_name}", webhook_id, handle_webhook
)
async def async_setup_coordinator(hass: HomeAssistant, entry: ConfigEntry):
"""Init auth token based on config entry."""
auth_token = entry.data[CONF_TOKEN]
device_type = entry.data[CONF_DEVICE_TYPE]
if entry.options.get(CONF_SCAN_INTERVAL):
update_interval = timedelta(minutes=entry.options[CONF_SCAN_INTERVAL])
else:
update_interval = timedelta(minutes=DEFAULT_SCAN_INTERVAL)
coordinator = PlaatoCoordinator(hass, auth_token, device_type, update_interval)
await coordinator.async_config_entry_first_refresh()
_set_entry_data(entry, hass, coordinator, auth_token)
for platform in PLATFORMS:
if entry.options.get(platform, True):
coordinator.platforms.append(platform)
def _set_entry_data(entry, hass, coordinator=None, device_id=None):
device = {
DEVICE_NAME: entry.data[CONF_DEVICE_NAME],
DEVICE_TYPE: entry.data[CONF_DEVICE_TYPE],
DEVICE_ID: device_id,
}
hass.data[DOMAIN][entry.entry_id] = {
COORDINATOR: coordinator,
DEVICE: device,
SENSOR_DATA: None,
UNDO_UPDATE_LISTENER: entry.add_update_listener(_async_update_listener),
}
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
use_webhook = entry.data[CONF_USE_WEBHOOK]
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
if use_webhook:
return await async_unload_webhook(hass, entry)
return await async_unload_coordinator(hass, entry)
async def async_unload_webhook(hass: HomeAssistant, entry: ConfigEntry):
"""Unload webhook based entry."""
if entry.data[CONF_WEBHOOK_ID] is not None:
webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID])
return await async_unload_platforms(hass, entry, PLATFORMS)
async def async_unload_coordinator(hass: HomeAssistant, entry: ConfigEntry):
"""Unload auth token based entry."""
coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR]
return await async_unload_platforms(hass, entry, coordinator.platforms)
async def async_unload_platforms(hass: HomeAssistant, entry: ConfigEntry, platforms):
"""Unload platforms."""
unloaded = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unloaded:
hass.data[DOMAIN].pop(entry.entry_id)
return unloaded
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook from Plaato."""
try:
data = WEBHOOK_SCHEMA(await request.json())
except vol.MultipleInvalid as error:
_LOGGER.warning("An error occurred when parsing webhook data <%s>", error)
return
device_id = _device_id(data)
sensor_data = PlaatoAirlock.from_web_hook(data)
async_dispatcher_send(hass, SENSOR_UPDATE, *(device_id, sensor_data))
return web.Response(text=f"Saving status for {device_id}")
def _device_id(data):
"""Return name of device sensor."""
return f"{data.get(ATTR_DEVICE_NAME)}_{data.get(ATTR_DEVICE_ID)}"
class PlaatoCoordinator(DataUpdateCoordinator):
"""Class to manage fetching data from the API."""
def __init__(
self,
hass,
auth_token,
device_type: PlaatoDeviceType,
update_interval: timedelta,
):
"""Initialize."""
self.api = Plaato(auth_token=auth_token)
self.hass = hass
self.device_type = device_type
self.platforms: list[Platform] = []
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=update_interval,
)
async def _async_update_data(self):
"""Update data via library."""
data = await self.api.get_data(
session=aiohttp_client.async_get_clientsession(self.hass),
device_type=self.device_type,
)
return data
|
rohitranjan1991/home-assistant
|
homeassistant/components/plaato/__init__.py
|
Python
|
mit
| 7,162
|
"""Unit tests for cat2cohort."""
import unittest
from bingo import bingo
class TestBingoGenerator(unittest.TestCase):
"""Test methods from bingo."""
def test_bingo_generator_has_default_size(self):
bingo_generator = bingo.BingoGenerator()
expected = pow(bingo.DEFAULT_SIZE, 2)
self.assertEquals(bingo_generator.size, expected)
def test_bingo_generator_has_given_size(self):
bingo_generator = bingo.BingoGenerator(4)
self.assertEquals(bingo_generator.size, 16)
def test_select_words_should_have_the_right_size(self):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']
bingo_generator.words = seed_list
selection = bingo_generator.select_words()
self.assertEquals(len(selection), pow(test_size, 2))
def test_select_words_should_return_words_from_the_seed_list(self):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = set(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'])
bingo_generator.words = seed_list
selection = set(bingo_generator.select_words())
self.assertTrue(seed_list.difference(selection))
def test_select_with_seed_too_small_raise_Exception(self):
with self.assertRaises(ValueError):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = ['a', 'b', 'c']
bingo_generator.words = seed_list
bingo_generator.select_words()
|
Commonists/bingo
|
tests/test_bingo.py
|
Python
|
mit
| 1,598
|
import os
import sublime
from sublime_plugin import WindowCommand, TextCommand
from ..commands import *
from ...common import ui
from ..git_command import GitCommand
from ...common import util
class GsShowStatusCommand(WindowCommand, GitCommand):
"""
Open a status view for the active git repository.
"""
def run(self):
StatusInterface(repo_path=self.repo_path)
class StatusInterface(ui.Interface, GitCommand):
"""
Status dashboard.
"""
interface_type = "status"
read_only = True
syntax_file = "Packages/GitSavvy/syntax/status.sublime-syntax"
word_wrap = False
tab_size = 2
template = """\
BRANCH: {branch_status}
ROOT: {git_root}
HEAD: {head}
{< unstaged_files}
{< untracked_files}
{< staged_files}
{< merge_conflicts}
{< no_status_message}
{< stashes}
{< help}
"""
template_help = """
################### ###############
## SELECTED FILE ## ## ALL FILES ##
################### ###############
[o] open file [a] stage all unstaged files
[s] stage file [A] stage all unstaged and untracked files
[u] unstage file [U] unstage all staged files
[d] discard changes to file [D] discard all unstaged changes
[h] open file on remote
[M] launch external merge tool for conflict
[l] diff file inline [f] diff all files
[e] diff file [F] diff all cached files
############# #############
## ACTIONS ## ## STASHES ##
############# #############
[c] commit [t][a] apply stash
[C] commit, including unstaged [t][p] pop stash
[m] amend previous commit [t][s] show stash
[p] push current branch [t][c] create stash
[t][u] create stash including untracked files
[i] ignore file [t][g] create stash of staged changes only
[I] ignore pattern [t][d] discard stash
###########
## OTHER ##
###########
[r] refresh status
[?] toggle this help menu
[tab] transition to next dashboard
[SHIFT-tab] transition to previous dashboard
[.] move cursor to next file
[,] move cursor to previous file
-
"""
template_staged = """
STAGED:
{}
"""
template_unstaged = """
UNSTAGED:
{}
"""
template_untracked = """
UNTRACKED:
{}
"""
template_merge_conflicts = """
MERGE CONFLICTS:
{}
"""
template_stashes = """
STASHES:
{}
"""
def title(self):
return "STATUS: {}".format(os.path.basename(self.repo_path))
def pre_render(self):
(self.staged_entries,
self.unstaged_entries,
self.untracked_entries,
self.conflict_entries) = self.sort_status_entries(self.get_status())
def on_new_dashboard(self):
self.view.run_command("gs_status_navigate_file")
@ui.partial("branch_status")
def render_branch_status(self):
return self.get_branch_status(delim="\n ")
@ui.partial("git_root")
def render_git_root(self):
return self.short_repo_path
@ui.partial("head")
def render_head(self):
return self.get_latest_commit_msg_for_head()
@ui.partial("staged_files")
def render_staged_files(self):
if not self.staged_entries:
return ""
def get_path(file_status):
""" Display full file_status path, including path_alt if exists """
if file_status.path_alt:
return '{} -> {}'.format(file_status.path_alt, file_status.path)
return file_status.path
return self.template_staged.format("\n".join(
" {} {}".format("-" if f.index_status == "D" else " ", get_path(f))
for f in self.staged_entries
))
@ui.partial("unstaged_files")
def render_unstaged_files(self):
if not self.unstaged_entries:
return ""
return self.template_unstaged.format("\n".join(
" {} {}".format("-" if f.working_status == "D" else " ", f.path)
for f in self.unstaged_entries
))
@ui.partial("untracked_files")
def render_untracked_files(self):
if not self.untracked_entries:
return ""
return self.template_untracked.format(
"\n".join(" " + f.path for f in self.untracked_entries))
@ui.partial("merge_conflicts")
def render_merge_conflicts(self):
if not self.conflict_entries:
return ""
return self.template_merge_conflicts.format(
"\n".join(" " + f.path for f in self.conflict_entries))
@ui.partial("no_status_message")
def render_no_status_message(self):
return ("\n Your working directory is clean.\n"
if not (self.staged_entries or
self.unstaged_entries or
self.untracked_entries or
self.conflict_entries)
else "")
@ui.partial("stashes")
def render_stashes(self):
stash_list = self.get_stashes()
if not stash_list:
return ""
return self.template_stashes.format("\n".join(
" ({}) {}".format(stash.id, stash.description) for stash in stash_list))
@ui.partial("help")
def render_help(self):
help_hidden = self.view.settings().get("git_savvy.help_hidden")
if help_hidden:
return ""
else:
return self.template_help
ui.register_listeners(StatusInterface)
class GsStatusOpenFileCommand(TextCommand, GitCommand):
"""
For every file that is selected or under a cursor, open a that
file in a new view.
"""
def run(self, edit):
lines = util.view.get_lines_from_regions(self.view, self.view.sel())
file_paths = (line.strip() for line in lines if line[:4] == " ")
abs_paths = (os.path.join(self.repo_path, file_path) for file_path in file_paths)
for path in abs_paths:
self.view.window().open_file(path)
class GsStatusDiffInlineCommand(TextCommand, GitCommand):
"""
For every file selected or under a cursor, open a new inline-diff view for
that file. If the file is staged, open the inline-diff in cached mode.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
non_cached_sections = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("merge_conflicts"))
non_cached_lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=non_cached_sections
)
non_cached_files = (
os.path.join(self.repo_path, line.strip())
for line in non_cached_lines
if line[:4] == " ")
cached_sections = interface.get_view_regions("staged_files")
cached_lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=cached_sections
)
cached_files = (
os.path.join(self.repo_path, line.strip())
for line in cached_lines
if line[:4] == " ")
sublime.set_timeout_async(
lambda: self.load_inline_diff_windows(non_cached_files, cached_files), 0)
def load_inline_diff_windows(self, non_cached_files, cached_files):
for fpath in non_cached_files:
syntax = util.file.get_syntax_for_file(fpath)
settings = {
"git_savvy.file_path": fpath,
"git_savvy.repo_path": self.repo_path,
"syntax": syntax
}
self.view.window().run_command("gs_inline_diff", {"settings": settings})
for fpath in cached_files:
syntax = util.file.get_syntax_for_file(fpath)
settings = {
"git_savvy.file_path": fpath,
"git_savvy.repo_path": self.repo_path,
"syntax": syntax
}
self.view.window().run_command("gs_inline_diff", {
"settings": settings,
"cached": True
})
class GsStatusDiffCommand(TextCommand, GitCommand):
"""
For every file selected or under a cursor, open a new diff view for
that file. If the file is staged, open the diff in cached mode.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
non_cached_sections = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("untracked_files") +
interface.get_view_regions("merge_conflicts"))
non_cached_lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=non_cached_sections
)
non_cached_files = (
os.path.join(self.repo_path, line.strip())
for line in non_cached_lines
if line[:4] == " "
)
cached_sections = interface.get_view_regions("staged_files")
cached_lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=cached_sections
)
cached_files = (
os.path.join(self.repo_path, line.strip())
for line in cached_lines
if line[:4] == " "
)
sublime.set_timeout_async(
lambda: self.load_diff_windows(non_cached_files, cached_files), 0)
def load_diff_windows(self, non_cached_files, cached_files):
for fpath in non_cached_files:
self.view.window().run_command("gs_diff", {
"file_path": fpath,
"current_file": True
})
for fpath in cached_files:
self.view.window().run_command("gs_diff", {
"file_path": fpath,
"in_cached_mode": True,
"current_file": True
})
class GsStatusStageFileCommand(TextCommand, GitCommand):
"""
For every file that is selected or under a cursor, if that file is
unstaged, stage it.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
valid_ranges = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("untracked_files") +
interface.get_view_regions("merge_conflicts"))
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
# Remove the leading spaces and hyphen-character for deleted files.
file_paths = tuple(line[4:].strip() for line in lines if line)
if file_paths:
for fpath in file_paths:
self.stage_file(fpath, force=False)
sublime.status_message("Staged files successfully.")
util.view.refresh_gitsavvy(self.view)
class GsStatusUnstageFileCommand(TextCommand, GitCommand):
"""
For every file that is selected or under a cursor, if that file is
staged, unstage it.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
valid_ranges = interface.get_view_regions("staged_files")
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
# Remove the leading spaces and hyphen-character for deleted files.
file_paths = tuple(line[4:].strip() for line in lines if line)
if file_paths:
for fpath in file_paths:
self.unstage_file(fpath)
sublime.status_message("Unstaged files successfully.")
util.view.refresh_gitsavvy(self.view)
class GsStatusDiscardChangesToFileCommand(TextCommand, GitCommand):
"""
For every file that is selected or under a cursor, if that file is
unstaged, reset the file to HEAD. If it is untracked, delete it.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
self.discard_untracked(interface)
self.discard_unstaged(interface)
util.view.refresh_gitsavvy(self.view)
sublime.status_message("Successfully discarded changes.")
def discard_untracked(self, interface):
valid_ranges = interface.get_view_regions("untracked_files")
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
file_paths = tuple(line[4:].strip() for line in lines if line)
@util.actions.destructive(description="discard one or more untracked files")
def do_discard():
for fpath in file_paths:
self.discard_untracked_file(fpath)
if file_paths:
do_discard()
def discard_unstaged(self, interface):
valid_ranges = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("merge_conflicts"))
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
file_paths = tuple(line[4:].strip() for line in lines if line)
@util.actions.destructive(description="discard one or more unstaged files")
def do_discard():
for fpath in file_paths:
self.checkout_file(fpath)
if file_paths:
do_discard()
class GsStatusOpenFileOnRemoteCommand(TextCommand, GitCommand):
"""
For every file that is selected or under a cursor, open a new browser
window to that file on GitHub.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
valid_ranges = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("merge_conflicts") +
interface.get_view_regions("staged_files"))
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
file_paths = tuple(line[4:].strip() for line in lines if line)
self.view.run_command("gs_open_file_on_remote", {"fpath": list(file_paths)})
class GsStatusStageAllFilesCommand(TextCommand, GitCommand):
"""
Stage all unstaged files.
"""
def run(self, edit):
self.add_all_tracked_files()
util.view.refresh_gitsavvy(self.view)
class GsStatusStageAllFilesWithUntrackedCommand(TextCommand, GitCommand):
"""
Stage all unstaged files, including new files.
"""
def run(self, edit):
self.add_all_files()
util.view.refresh_gitsavvy(self.view)
class GsStatusUnstageAllFilesCommand(TextCommand, GitCommand):
"""
Unstage all staged changes.
"""
def run(self, edit):
self.unstage_all_files()
util.view.refresh_gitsavvy(self.view)
class GsStatusDiscardAllChangesCommand(TextCommand, GitCommand):
"""
Reset all unstaged files to HEAD.
"""
@util.actions.destructive(description="discard all unstaged changes, "
"and delete all untracked files")
def run(self, edit):
self.discard_all_unstaged()
util.view.refresh_gitsavvy(self.view)
class GsStatusCommitCommand(TextCommand, GitCommand):
"""
Open a commit window.
"""
def run(self, edit):
self.view.window().run_command("gs_commit", {"repo_path": self.repo_path})
class GsStatusCommitUnstagedCommand(TextCommand, GitCommand):
"""
Open a commit window. When the commit message is provided, stage all unstaged
changes and then do the commit.
"""
def run(self, edit):
self.view.window().run_command(
"gs_commit",
{"repo_path": self.repo_path, "include_unstaged": True}
)
class GsStatusAmendCommand(TextCommand, GitCommand):
"""
Open a commit window to amend the previous commit.
"""
def run(self, edit):
self.view.window().run_command(
"gs_commit",
{"repo_path": self.repo_path, "amend": True}
)
class GsStatusIgnoreFileCommand(TextCommand, GitCommand):
"""
For each file that is selected or under a cursor, add an
entry to the git root's `.gitignore` file.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
valid_ranges = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("untracked_files") +
interface.get_view_regions("merge_conflicts") +
interface.get_view_regions("staged_files"))
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
file_paths = tuple(line[4:].strip() for line in lines if line)
if file_paths:
for fpath in file_paths:
self.add_ignore(os.path.join("/", fpath))
sublime.status_message("Successfully ignored files.")
util.view.refresh_gitsavvy(self.view)
class GsStatusIgnorePatternCommand(TextCommand, GitCommand):
"""
For the first file that is selected or under a cursor (other
selections/cursors will be ignored), prompt the user for
a new pattern to `.gitignore`, prefilled with the filename.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
valid_ranges = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("untracked_files") +
interface.get_view_regions("merge_conflicts") +
interface.get_view_regions("staged_files"))
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
file_paths = tuple(line[4:].strip() for line in lines if line)
if file_paths:
self.view.window().run_command("gs_ignore_pattern", {"pre_filled": file_paths[0]})
class GsStatusApplyStashCommand(TextCommand, GitCommand):
"""
Apply the selected stash. The user can only apply one stash at a time.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=interface.get_view_regions("stashes")
)
ids = tuple(line[line.find("(")+1:line.find(")")] for line in lines if line)
if len(ids) > 1:
sublime.status_message("You can only apply one stash at a time.")
return
self.apply_stash(ids[0])
util.view.refresh_gitsavvy(self.view)
class GsStatusPopStashCommand(TextCommand, GitCommand):
"""
Pop the selected stash. The user can only pop one stash at a time.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=interface.get_view_regions("stashes")
)
ids = tuple(line[line.find("(")+1:line.find(")")] for line in lines if line)
if len(ids) > 1:
sublime.status_message("You can only pop one stash at a time.")
return
if len(ids) == 0:
# happens if command get called when none of the cursors
# is pointed on one stash
return
self.pop_stash(ids[0])
util.view.refresh_gitsavvy(self.view)
class GsStatusShowStashCommand(TextCommand, GitCommand):
"""
For each selected stash, open a new window to display the diff
for that stash.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=interface.get_view_regions("stashes")
)
ids = tuple(line[line.find("(")+1:line.find(")")] for line in lines if line)
for stash_id in ids:
stash_name = "stash@{{{}}}".format(stash_id)
stash_text = self.git("stash", "show", "--no-color", "-p", stash_name)
stash_view = self.get_stash_view(stash_name)
stash_view.set_read_only(False)
stash_view.replace(edit, sublime.Region(0, 0), stash_text)
stash_view.set_read_only(True)
stash_view.sel().add(sublime.Region(0, 0))
def get_stash_view(self, title):
window = self.window if hasattr(self, "window") else self.view.window()
repo_path = self.repo_path
stash_view = util.view.get_scratch_view(self, "stash_" + title, read_only=True)
stash_view.set_name(title)
stash_view.set_syntax_file("Packages/GitSavvy/syntax/diff.sublime-syntax")
stash_view.settings().set("git_savvy.repo_path", repo_path)
window.focus_view(stash_view)
stash_view.sel().clear()
return stash_view
class GsStatusCreateStashCommand(TextCommand, GitCommand):
"""
Create a new stash from the user's unstaged changes.
"""
def run(self, edit):
self.view.window().show_input_panel("Description:", "", self.on_done, None, None)
def on_done(self, description):
self.create_stash(description)
util.view.refresh_gitsavvy(self.view)
class GsStatusCreateStashWithUntrackedCommand(TextCommand, GitCommand):
"""
Create a new stash from the user's unstaged changes, including
new files.
"""
def run(self, edit):
self.view.window().show_input_panel("Description:", "", self.on_done, None, None)
def on_done(self, description):
self.create_stash(description, include_untracked=True)
util.view.refresh_gitsavvy(self.view)
class GsStatusCreateStashOfIndexedCommand(TextCommand, GitCommand):
"""
Create a new stash from the user's staged changes.
"""
def run(self, edit):
self.view.window().show_input_panel("Description:", "", self.on_done, None, None)
def on_done(self, description):
# Create a temporary stash of everything, including staged files.
self.git("stash", "--keep-index")
# Stash only the indexed files, since they're the only thing left in the working directory.
self.create_stash(description)
# Clean out the working directory.
self.git("reset", "--hard")
try:
# Pop the original stash, taking us back to the original working state.
self.apply_stash(1)
# Get the diff from the originally staged files, and remove them from the working dir.
stash_text = self.git("stash", "show", "--no-color", "-p")
self.git("apply", "-R", stdin=stash_text)
# Delete the temporary stash.
self.drop_stash(1)
# Remove all changes from the staging area.
self.git("reset")
except Exception as e:
# Restore the original working state.
self.pop_stash(1)
raise e
class GsStatusDiscardStashCommand(TextCommand, GitCommand):
"""
Drop the selected stash. The user can only discard one stash
at a time.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=interface.get_view_regions("stashes")
)
ids = tuple(line[line.find("(")+1:line.find(")")] for line in lines if line)
if len(ids) > 1:
sublime.status_message("You can only drop one stash at a time.")
return
if len(ids) == 0:
# happens if command get called when none of the cursors
# is pointed on one stash
return
@util.actions.destructive(description="discard a stash")
def do_drop_stash(id):
self.drop_stash(id)
do_drop_stash(ids[0])
util.view.refresh_gitsavvy(self.view)
class GsStatusLaunchMergeToolCommand(TextCommand, GitCommand):
"""
Launch external merge tool for selected file.
"""
def run(self, edit):
interface = ui.get_interface(self.view.id())
valid_ranges = (interface.get_view_regions("unstaged_files") +
interface.get_view_regions("untracked_files") +
interface.get_view_regions("merge_conflicts") +
interface.get_view_regions("staged_files"))
lines = util.view.get_lines_from_regions(
self.view,
self.view.sel(),
valid_ranges=valid_ranges
)
file_paths = tuple(line[4:].strip() for line in lines if line)
if len(file_paths) > 1:
sublime.error_message("You can only launch merge tool for a single file at a time.")
return
sublime.set_timeout_async(lambda: self.launch_tool_for_file(file_paths[0]), 0)
class GsStatusNavigateFileCommand(GsNavigate):
"""
Move cursor to the next (or previous) selectable file in the dashboard.
"""
def get_available_regions(self):
file_regions = [file_region
for region in self.view.find_by_selector("meta.git-savvy.status.file")
for file_region in self.view.lines(region)]
stash_regions = [stash_region
for region in self.view.find_by_selector("meta.git-savvy.status.saved_stash")
for stash_region in self.view.lines(region)]
return file_regions + stash_regions
|
stoivo/GitSavvy
|
core/interfaces/status.py
|
Python
|
mit
| 26,371
|
# Copyright 2015 The go-python Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
## py2/py3 compat
from __future__ import print_function
import iface
### test docs
print("doc(iface): %r" % (iface.__doc__,))
print("t = iface.T()")
t = iface.T()
print("t.F()")
t.F()
print("iface.CallIface(t)")
iface.CallIface(t)
|
kellrott/gopy
|
_examples/iface/test.py
|
Python
|
bsd-3-clause
| 401
|
from dipy.reconst.cache import Cache
from dipy.core.sphere import Sphere
from numpy.testing import assert_, assert_equal, run_module_suite
class TestModel(Cache):
def __init__(self):
pass
def test_basic_cache():
t = TestModel()
s = Sphere(theta=[0], phi=[0])
assert_(t.cache_get("design_matrix", s) is None)
m = [[1, 0], [0, 1]]
t.cache_set("design_matrix", key=s, value=m)
assert_equal(t.cache_get("design_matrix", s), m)
t.cache_clear()
assert_(t.cache_get("design_matrix", s) is None)
if __name__ == "__main__":
run_module_suite()
|
StongeEtienne/dipy
|
dipy/reconst/tests/test_cache.py
|
Python
|
bsd-3-clause
| 593
|
"""This component provides basic support for Foscam IP cameras."""
import asyncio
from libpyfoscam import FoscamCamera
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_STREAM, Camera
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.helpers import config_validation as cv, entity_platform
from .const import (
CONF_RTSP_PORT,
CONF_STREAM,
DOMAIN,
LOGGER,
SERVICE_PTZ,
SERVICE_PTZ_PRESET,
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required("ip"): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_NAME, default="Foscam Camera"): cv.string,
vol.Optional(CONF_PORT, default=88): cv.port,
vol.Optional(CONF_RTSP_PORT): cv.port,
}
)
DIR_UP = "up"
DIR_DOWN = "down"
DIR_LEFT = "left"
DIR_RIGHT = "right"
DIR_TOPLEFT = "top_left"
DIR_TOPRIGHT = "top_right"
DIR_BOTTOMLEFT = "bottom_left"
DIR_BOTTOMRIGHT = "bottom_right"
MOVEMENT_ATTRS = {
DIR_UP: "ptz_move_up",
DIR_DOWN: "ptz_move_down",
DIR_LEFT: "ptz_move_left",
DIR_RIGHT: "ptz_move_right",
DIR_TOPLEFT: "ptz_move_top_left",
DIR_TOPRIGHT: "ptz_move_top_right",
DIR_BOTTOMLEFT: "ptz_move_bottom_left",
DIR_BOTTOMRIGHT: "ptz_move_bottom_right",
}
DEFAULT_TRAVELTIME = 0.125
ATTR_MOVEMENT = "movement"
ATTR_TRAVELTIME = "travel_time"
ATTR_PRESET_NAME = "preset_name"
PTZ_GOTO_PRESET_COMMAND = "ptz_goto_preset"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a Foscam IP Camera."""
LOGGER.warning(
"Loading foscam via platform config is deprecated, it will be automatically imported; Please remove it afterwards"
)
config_new = {
CONF_NAME: config[CONF_NAME],
CONF_HOST: config["ip"],
CONF_PORT: config[CONF_PORT],
CONF_USERNAME: config[CONF_USERNAME],
CONF_PASSWORD: config[CONF_PASSWORD],
CONF_STREAM: "Main",
CONF_RTSP_PORT: config.get(CONF_RTSP_PORT, 554),
}
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config_new
)
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add a Foscam IP camera from a config entry."""
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_PTZ,
{
vol.Required(ATTR_MOVEMENT): vol.In(
[
DIR_UP,
DIR_DOWN,
DIR_LEFT,
DIR_RIGHT,
DIR_TOPLEFT,
DIR_TOPRIGHT,
DIR_BOTTOMLEFT,
DIR_BOTTOMRIGHT,
]
),
vol.Optional(ATTR_TRAVELTIME, default=DEFAULT_TRAVELTIME): cv.small_float,
},
"async_perform_ptz",
)
platform.async_register_entity_service(
SERVICE_PTZ_PRESET,
{
vol.Required(ATTR_PRESET_NAME): cv.string,
},
"async_perform_ptz_preset",
)
camera = FoscamCamera(
config_entry.data[CONF_HOST],
config_entry.data[CONF_PORT],
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],
verbose=False,
)
async_add_entities([HassFoscamCamera(camera, config_entry)])
class HassFoscamCamera(Camera):
"""An implementation of a Foscam IP camera."""
def __init__(self, camera, config_entry):
"""Initialize a Foscam camera."""
super().__init__()
self._foscam_session = camera
self._name = config_entry.title
self._username = config_entry.data[CONF_USERNAME]
self._password = config_entry.data[CONF_PASSWORD]
self._stream = config_entry.data[CONF_STREAM]
self._unique_id = config_entry.entry_id
self._rtsp_port = config_entry.data[CONF_RTSP_PORT]
self._motion_status = False
async def async_added_to_hass(self):
"""Handle entity addition to hass."""
# Get motion detection status
ret, response = await self.hass.async_add_executor_job(
self._foscam_session.get_motion_detect_config
)
if ret == -3:
LOGGER.info(
"Can't get motion detection status, camera %s configured with non-admin user",
self._name,
)
elif ret != 0:
LOGGER.error(
"Error getting motion detection status of %s: %s", self._name, ret
)
else:
self._motion_status = response == 1
@property
def unique_id(self):
"""Return the entity unique ID."""
return self._unique_id
def camera_image(self):
"""Return a still image response from the camera."""
# Send the request to snap a picture and return raw jpg data
# Handle exception if host is not reachable or url failed
result, response = self._foscam_session.snap_picture_2()
if result != 0:
return None
return response
@property
def supported_features(self):
"""Return supported features."""
if self._rtsp_port:
return SUPPORT_STREAM
return None
async def stream_source(self):
"""Return the stream source."""
if self._rtsp_port:
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
return None
@property
def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return self._motion_status
def enable_motion_detection(self):
"""Enable motion detection in camera."""
try:
ret = self._foscam_session.enable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = True
except TypeError:
LOGGER.debug(
"Failed enabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
def disable_motion_detection(self):
"""Disable motion detection."""
try:
ret = self._foscam_session.disable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = False
except TypeError:
LOGGER.debug(
"Failed disabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
async def async_perform_ptz(self, movement, travel_time):
"""Perform a PTZ action on the camera."""
LOGGER.debug("PTZ action '%s' on %s", movement, self._name)
movement_function = getattr(self._foscam_session, MOVEMENT_ATTRS[movement])
ret, _ = await self.hass.async_add_executor_job(movement_function)
if ret != 0:
LOGGER.error("Error moving %s '%s': %s", movement, self._name, ret)
return
await asyncio.sleep(travel_time)
ret, _ = await self.hass.async_add_executor_job(
self._foscam_session.ptz_stop_run
)
if ret != 0:
LOGGER.error("Error stopping movement on '%s': %s", self._name, ret)
return
async def async_perform_ptz_preset(self, preset_name):
"""Perform a PTZ preset action on the camera."""
LOGGER.debug("PTZ preset '%s' on %s", preset_name, self._name)
preset_function = getattr(self._foscam_session, PTZ_GOTO_PRESET_COMMAND)
ret, _ = await self.hass.async_add_executor_job(preset_function, preset_name)
if ret != 0:
LOGGER.error(
"Error moving to preset %s on '%s': %s", preset_name, self._name, ret
)
return
@property
def name(self):
"""Return the name of this camera."""
return self._name
|
w1ll1am23/home-assistant
|
homeassistant/components/foscam/camera.py
|
Python
|
apache-2.0
| 8,588
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import distutils.util
from cliff import command
from cliff import lister
from cliff import show
from gnocchiclient import exceptions
from gnocchiclient import utils
class CliResourceList(lister.Lister):
"""List resources"""
COLS = ('id', 'type',
'project_id', 'user_id',
'original_resource_id',
'started_at', 'ended_at',
'revision_start', 'revision_end')
def get_parser(self, prog_name, history=True):
parser = super(CliResourceList, self).get_parser(prog_name)
parser.add_argument("--details", action='store_true',
help="Show all attributes of generic resources"),
if history:
parser.add_argument("--history", action='store_true',
help="Show history of the resources"),
parser.add_argument("--limit", type=int, metavar="<LIMIT>",
help="Number of resources to return "
"(Default is server default)")
parser.add_argument("--marker", metavar="<MARKER>",
help="Last item of the previous listing. "
"Return the next results after this value")
parser.add_argument("--sort", action="append", metavar="<SORT>",
help="Sort of resource attribute "
"(example: user_id:desc-nullslast")
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
return parser
def _list2cols(self, resources):
"""Return a formatted list of resources."""
if not resources:
return self.COLS, []
cols = list(self.COLS)
for k in resources[0]:
if k not in cols:
cols.append(k)
if 'creator' in cols:
cols.remove('created_by_user_id')
cols.remove('created_by_project_id')
return utils.list2cols(cols, resources)
def take_action(self, parsed_args):
resources = utils.get_client(self).resource.list(
resource_type=parsed_args.resource_type,
**utils.get_pagination_options(parsed_args))
# Do not dump metrics because it makes the list way too long
for r in resources:
del r['metrics']
return self._list2cols(resources)
class CliResourceHistory(CliResourceList):
"""Show the history of a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceHistory, self).get_parser(prog_name,
history=False)
parser.add_argument("resource_id",
help="ID of a resource")
return parser
def take_action(self, parsed_args):
resources = utils.get_client(self).resource.history(
resource_type=parsed_args.resource_type,
resource_id=parsed_args.resource_id,
**utils.get_pagination_options(parsed_args))
if parsed_args.formatter == 'table':
return self._list2cols(list(map(normalize_metrics, resources)))
return self._list2cols(resources)
class CliResourceSearch(CliResourceList):
"""Search resources with specified query rules"""
def get_parser(self, prog_name):
parser = super(CliResourceSearch, self).get_parser(prog_name)
utils.add_query_argument("query", parser)
return parser
def take_action(self, parsed_args):
resources = utils.get_client(self).resource.search(
resource_type=parsed_args.resource_type,
query=parsed_args.query,
**utils.get_pagination_options(parsed_args))
# Do not dump metrics because it makes the list way too long
for r in resources:
del r['metrics']
return self._list2cols(resources)
def normalize_metrics(res):
res['metrics'] = "\n".join(sorted(
["%s: %s" % (name, _id)
for name, _id in res['metrics'].items()]))
return res
class CliResourceShow(show.ShowOne):
"""Show a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceShow, self).get_parser(prog_name)
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
parser.add_argument("resource_id",
help="ID of a resource")
return parser
def take_action(self, parsed_args):
res = utils.get_client(self).resource.get(
resource_type=parsed_args.resource_type,
resource_id=parsed_args.resource_id)
if parsed_args.formatter == 'table':
normalize_metrics(res)
return self.dict2columns(res)
class CliResourceCreate(show.ShowOne):
"""Create a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceCreate, self).get_parser(prog_name)
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
parser.add_argument("resource_id",
help="ID of the resource")
parser.add_argument("-a", "--attribute", action='append',
default=[],
help=("name and value of an attribute "
"separated with a ':'"))
parser.add_argument("-m", "--add-metric", action='append',
default=[],
help="name:id of a metric to add"),
parser.add_argument(
"-n", "--create-metric", action='append', default=[],
help="name:archive_policy_name of a metric to create"),
return parser
def _resource_from_args(self, parsed_args, update=False):
# Get the resource type to set the correct type
rt_attrs = utils.get_client(self).resource_type.get(
name=parsed_args.resource_type)['attributes']
resource = {}
if not update:
resource['id'] = parsed_args.resource_id
if parsed_args.attribute:
for attr in parsed_args.attribute:
attr, __, value = attr.partition(":")
attr_type = rt_attrs.get(attr, {}).get('type')
if attr_type == "number":
value = float(value)
elif attr_type == "bool":
value = bool(distutils.util.strtobool(value))
resource[attr] = value
if (parsed_args.add_metric
or parsed_args.create_metric
or (update and parsed_args.delete_metric)):
if update:
r = utils.get_client(self).resource.get(
parsed_args.resource_type,
parsed_args.resource_id)
default = r['metrics']
for metric_name in parsed_args.delete_metric:
try:
del default[metric_name]
except KeyError:
raise exceptions.MetricNotFound(
message="Metric name %s not found" % metric_name)
else:
default = {}
resource['metrics'] = default
for metric in parsed_args.add_metric:
name, _, value = metric.partition(":")
resource['metrics'][name] = value
for metric in parsed_args.create_metric:
name, _, value = metric.partition(":")
if value is "":
resource['metrics'][name] = {}
else:
resource['metrics'][name] = {'archive_policy_name': value}
return resource
def take_action(self, parsed_args):
resource = self._resource_from_args(parsed_args)
res = utils.get_client(self).resource.create(
resource_type=parsed_args.resource_type, resource=resource)
if parsed_args.formatter == 'table':
normalize_metrics(res)
return self.dict2columns(res)
class CliResourceUpdate(CliResourceCreate):
"""Update a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceUpdate, self).get_parser(prog_name)
parser.add_argument("-d", "--delete-metric", action='append',
default=[],
help="Name of a metric to delete"),
return parser
def take_action(self, parsed_args):
resource = self._resource_from_args(parsed_args, update=True)
res = utils.get_client(self).resource.update(
resource_type=parsed_args.resource_type,
resource_id=parsed_args.resource_id,
resource=resource)
if parsed_args.formatter == 'table':
normalize_metrics(res)
return self.dict2columns(res)
class CliResourceDelete(command.Command):
"""Delete a resource"""
def get_parser(self, prog_name):
parser = super(CliResourceDelete, self).get_parser(prog_name)
parser.add_argument("resource_id",
help="ID of the resource")
return parser
def take_action(self, parsed_args):
utils.get_client(self).resource.delete(parsed_args.resource_id)
class CliResourceBatchDelete(show.ShowOne):
"""Delete a batch of resources based on attribute values"""
def get_parser(self, prog_name):
parser = super(CliResourceBatchDelete, self).get_parser(prog_name)
parser.add_argument("--type", "-t", dest="resource_type",
default="generic", help="Type of resource")
utils.add_query_argument("query", parser)
return parser
def take_action(self, parsed_args):
res = utils.get_client(self).resource.batch_delete(
resource_type=parsed_args.resource_type,
query=parsed_args.query)
return self.dict2columns(res)
|
sileht/python-gnocchiclient
|
gnocchiclient/v1/resource_cli.py
|
Python
|
apache-2.0
| 10,589
|
"""SCons.Tool.wix
Tool-specific initialization for wix, the Windows Installer XML Tool.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/wix.py 3603 2008/10/10 05:46:45 scons"
import SCons.Builder
import SCons.Action
import os
import string
def generate(env):
"""Add Builders and construction variables for WiX to an Environment."""
if not exists(env):
return
env['WIXCANDLEFLAGS'] = ['-nologo']
env['WIXCANDLEINCLUDE'] = []
env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}'
env['WIXLIGHTFLAGS'].append( '-nologo' )
env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}"
env['WIXSRCSUFFIX'] = '.wxs'
env['WIXOBJPREFIX'] = ''
env['WIXOBJSUFFIX'] = '.wxiobj'
env['WIXMSIPREFIX'] = ''
env['WIXMSISUFFIX'] = '.msi'
object_builder = SCons.Builder.Builder(
action = '$WIXCANDLECOM',
prefix = '$WIXOBJPREFIX',
suffix = '$WIXOBJSUFFIX',
src_suffix = '$WIXSRCSUFFIX')
linker_builder = SCons.Builder.Builder(
action = '$WIXLIGHTCOM',
prefix = '$WIXMSIPREFIX',
suffix = '$WIXMSISUFFIX',
src_suffix = '$WIXOBJSUFFIX',
src_builder = object_builder)
env['BUILDERS']['WiX'] = linker_builder
def exists(env):
env['WIXCANDLE'] = 'candle.exe'
env['WIXLIGHT'] = 'light.exe'
env['WIXLIGHTFLAGS'] = []
# try to find the candle.exe and light.exe tools and
# add the install directory to light libpath.
# for path in os.environ['PATH'].split(os.pathsep).
# also look in env['ENV']['PATH'] first.
wix_paths = string.split(env['ENV'].get('PATH', ''), os.pathsep)
wix_paths += string.split(os.environ['PATH'], os.pathsep)
for path in wix_paths:
if not path:
continue
# workaround for some weird python win32 bug.
if path[0] == '"' and path[-1:]=='"':
path = path[1:-1]
# normalize the path
path = os.path.normpath(path)
# search for the tools in the PATH environment variable
try:
if env['WIXCANDLE'] in os.listdir(path) and\
env['WIXLIGHT'] in os.listdir(path):
env.PrependENVPath('PATH', path)
extra_files = [os.path.join(i) for i in [
'wixui.wixlib',
'WixUI_en-us.wxl']]
if (os.path.exists(extra_files[0]) and
os.path.exists(extra_files[1])):
env.Append(WIXLIGHTFLAGS=[
extra_files[0],
'-loc', extra_files[1]])
return 1
except OSError:
pass # ignore this, could be a stale PATH entry.
return None
|
taxilian/omaha
|
site_scons/site_tools/wix.py
|
Python
|
apache-2.0
| 4,114
|
#!/usr/bin/env python
# Copyright 2013 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import argparse
import datetime
import httplib
import imp
import json
import os
import re
import subprocess
import sys
import textwrap
import time
import urllib
import urllib2
from git_recipes import GitRecipesMixin
from git_recipes import GitFailedException
PERSISTFILE_BASENAME = "PERSISTFILE_BASENAME"
BRANCHNAME = "BRANCHNAME"
DOT_GIT_LOCATION = "DOT_GIT_LOCATION"
VERSION_FILE = "VERSION_FILE"
CHANGELOG_FILE = "CHANGELOG_FILE"
CHANGELOG_ENTRY_FILE = "CHANGELOG_ENTRY_FILE"
COMMITMSG_FILE = "COMMITMSG_FILE"
PATCH_FILE = "PATCH_FILE"
def TextToFile(text, file_name):
with open(file_name, "w") as f:
f.write(text)
def AppendToFile(text, file_name):
with open(file_name, "a") as f:
f.write(text)
def LinesInFile(file_name):
with open(file_name) as f:
for line in f:
yield line
def FileToText(file_name):
with open(file_name) as f:
return f.read()
def MSub(rexp, replacement, text):
return re.sub(rexp, replacement, text, flags=re.MULTILINE)
def Fill80(line):
# Replace tabs and remove surrounding space.
line = re.sub(r"\t", r" ", line.strip())
# Format with 8 characters indentation and line width 80.
return textwrap.fill(line, width=80, initial_indent=" ",
subsequent_indent=" ")
def MakeComment(text):
return MSub(r"^( ?)", "#", text)
def StripComments(text):
# Use split not splitlines to keep terminal newlines.
return "\n".join(filter(lambda x: not x.startswith("#"), text.split("\n")))
def MakeChangeLogBody(commit_messages, auto_format=False):
result = ""
added_titles = set()
for (title, body, author) in commit_messages:
# TODO(machenbach): Better check for reverts. A revert should remove the
# original CL from the actual log entry.
title = title.strip()
if auto_format:
# Only add commits that set the LOG flag correctly.
log_exp = r"^[ \t]*LOG[ \t]*=[ \t]*(?:(?:Y(?:ES)?)|TRUE)"
if not re.search(log_exp, body, flags=re.I | re.M):
continue
# Never include reverts.
if title.startswith("Revert "):
continue
# Don't include duplicates.
if title in added_titles:
continue
# Add and format the commit's title and bug reference. Move dot to the end.
added_titles.add(title)
raw_title = re.sub(r"(\.|\?|!)$", "", title)
bug_reference = MakeChangeLogBugReference(body)
space = " " if bug_reference else ""
result += "%s\n" % Fill80("%s%s%s." % (raw_title, space, bug_reference))
# Append the commit's author for reference if not in auto-format mode.
if not auto_format:
result += "%s\n" % Fill80("(%s)" % author.strip())
result += "\n"
return result
def MakeChangeLogBugReference(body):
"""Grep for "BUG=xxxx" lines in the commit message and convert them to
"(issue xxxx)".
"""
crbugs = []
v8bugs = []
def AddIssues(text):
ref = re.match(r"^BUG[ \t]*=[ \t]*(.+)$", text.strip())
if not ref:
return
for bug in ref.group(1).split(","):
bug = bug.strip()
match = re.match(r"^v8:(\d+)$", bug)
if match: v8bugs.append(int(match.group(1)))
else:
match = re.match(r"^(?:chromium:)?(\d+)$", bug)
if match: crbugs.append(int(match.group(1)))
# Add issues to crbugs and v8bugs.
map(AddIssues, body.splitlines())
# Filter duplicates, sort, stringify.
crbugs = map(str, sorted(set(crbugs)))
v8bugs = map(str, sorted(set(v8bugs)))
bug_groups = []
def FormatIssues(prefix, bugs):
if len(bugs) > 0:
plural = "s" if len(bugs) > 1 else ""
bug_groups.append("%sissue%s %s" % (prefix, plural, ", ".join(bugs)))
FormatIssues("", v8bugs)
FormatIssues("Chromium ", crbugs)
if len(bug_groups) > 0:
return "(%s)" % ", ".join(bug_groups)
else:
return ""
# Some commands don't like the pipe, e.g. calling vi from within the script or
# from subscripts like git cl upload.
def Command(cmd, args="", prefix="", pipe=True):
# TODO(machenbach): Use timeout.
cmd_line = "%s %s %s" % (prefix, cmd, args)
print "Command: %s" % cmd_line
sys.stdout.flush()
try:
if pipe:
return subprocess.check_output(cmd_line, shell=True)
else:
return subprocess.check_call(cmd_line, shell=True)
except subprocess.CalledProcessError:
return None
finally:
sys.stdout.flush()
sys.stderr.flush()
# Wrapper for side effects.
class SideEffectHandler(object): # pragma: no cover
def Call(self, fun, *args, **kwargs):
return fun(*args, **kwargs)
def Command(self, cmd, args="", prefix="", pipe=True):
return Command(cmd, args, prefix, pipe)
def ReadLine(self):
return sys.stdin.readline().strip()
def ReadURL(self, url, params=None):
# pylint: disable=E1121
url_fh = urllib2.urlopen(url, params, 60)
try:
return url_fh.read()
finally:
url_fh.close()
def ReadClusterFuzzAPI(self, api_key, **params):
params["api_key"] = api_key.strip()
params = urllib.urlencode(params)
headers = {"Content-type": "application/x-www-form-urlencoded"}
conn = httplib.HTTPSConnection("backend-dot-cluster-fuzz.appspot.com")
conn.request("POST", "/_api/", params, headers)
response = conn.getresponse()
data = response.read()
try:
return json.loads(data)
except:
print data
print "ERROR: Could not read response. Is your key valid?"
raise
def Sleep(self, seconds):
time.sleep(seconds)
def GetDate(self):
return datetime.date.today().strftime("%Y-%m-%d")
DEFAULT_SIDE_EFFECT_HANDLER = SideEffectHandler()
class NoRetryException(Exception):
pass
class Step(GitRecipesMixin):
def __init__(self, text, requires, number, config, state, options, handler):
self._text = text
self._requires = requires
self._number = number
self._config = config
self._state = state
self._options = options
self._side_effect_handler = handler
assert self._number >= 0
assert self._config is not None
assert self._state is not None
assert self._side_effect_handler is not None
def __getitem__(self, key):
# Convenience method to allow direct [] access on step classes for
# manipulating the backed state dict.
return self._state[key]
def __setitem__(self, key, value):
# Convenience method to allow direct [] access on step classes for
# manipulating the backed state dict.
self._state[key] = value
def Config(self, key):
return self._config[key]
def Run(self):
# Restore state.
state_file = "%s-state.json" % self._config[PERSISTFILE_BASENAME]
if not self._state and os.path.exists(state_file):
self._state.update(json.loads(FileToText(state_file)))
# Skip step if requirement is not met.
if self._requires and not self._state.get(self._requires):
return
print ">>> Step %d: %s" % (self._number, self._text)
try:
return self.RunStep()
finally:
# Persist state.
TextToFile(json.dumps(self._state), state_file)
def RunStep(self): # pragma: no cover
raise NotImplementedError
def Retry(self, cb, retry_on=None, wait_plan=None):
""" Retry a function.
Params:
cb: The function to retry.
retry_on: A callback that takes the result of the function and returns
True if the function should be retried. A function throwing an
exception is always retried.
wait_plan: A list of waiting delays between retries in seconds. The
maximum number of retries is len(wait_plan).
"""
retry_on = retry_on or (lambda x: False)
wait_plan = list(wait_plan or [])
wait_plan.reverse()
while True:
got_exception = False
try:
result = cb()
except NoRetryException, e:
raise e
except Exception:
got_exception = True
if got_exception or retry_on(result):
if not wait_plan: # pragma: no cover
raise Exception("Retried too often. Giving up.")
wait_time = wait_plan.pop()
print "Waiting for %f seconds." % wait_time
self._side_effect_handler.Sleep(wait_time)
print "Retrying..."
else:
return result
def ReadLine(self, default=None):
# Don't prompt in forced mode.
if self._options.force_readline_defaults and default is not None:
print "%s (forced)" % default
return default
else:
return self._side_effect_handler.ReadLine()
def Git(self, args="", prefix="", pipe=True, retry_on=None):
cmd = lambda: self._side_effect_handler.Command("git", args, prefix, pipe)
result = self.Retry(cmd, retry_on, [5, 30])
if result is None:
raise GitFailedException("'git %s' failed." % args)
return result
def SVN(self, args="", prefix="", pipe=True, retry_on=None):
cmd = lambda: self._side_effect_handler.Command("svn", args, prefix, pipe)
return self.Retry(cmd, retry_on, [5, 30])
def Editor(self, args):
if self._options.requires_editor:
return self._side_effect_handler.Command(os.environ["EDITOR"], args,
pipe=False)
def ReadURL(self, url, params=None, retry_on=None, wait_plan=None):
wait_plan = wait_plan or [3, 60, 600]
cmd = lambda: self._side_effect_handler.ReadURL(url, params)
return self.Retry(cmd, retry_on, wait_plan)
def GetDate(self):
return self._side_effect_handler.GetDate()
def Die(self, msg=""):
if msg != "":
print "Error: %s" % msg
print "Exiting"
raise Exception(msg)
def DieNoManualMode(self, msg=""):
if not self._options.manual: # pragma: no cover
msg = msg or "Only available in manual mode."
self.Die(msg)
def Confirm(self, msg):
print "%s [Y/n] " % msg,
answer = self.ReadLine(default="Y")
return answer == "" or answer == "Y" or answer == "y"
def DeleteBranch(self, name):
for line in self.GitBranch().splitlines():
if re.match(r".*\s+%s$" % name, line):
msg = "Branch %s exists, do you want to delete it?" % name
if self.Confirm(msg):
self.GitDeleteBranch(name)
print "Branch %s deleted." % name
else:
msg = "Can't continue. Please delete branch %s and try again." % name
self.Die(msg)
def InitialEnvironmentChecks(self):
# Cancel if this is not a git checkout.
if not os.path.exists(self._config[DOT_GIT_LOCATION]): # pragma: no cover
self.Die("This is not a git checkout, this script won't work for you.")
# Cancel if EDITOR is unset or not executable.
if (self._options.requires_editor and (not os.environ.get("EDITOR") or
Command("which", os.environ["EDITOR"]) is None)): # pragma: no cover
self.Die("Please set your EDITOR environment variable, you'll need it.")
def CommonPrepare(self):
# Check for a clean workdir.
if not self.GitIsWorkdirClean(): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
# Persist current branch.
self["current_branch"] = self.GitCurrentBranch()
# Fetch unfetched revisions.
self.GitSVNFetch()
def PrepareBranch(self):
# Delete the branch that will be created later if it exists already.
self.DeleteBranch(self._config[BRANCHNAME])
def CommonCleanup(self):
self.GitCheckout(self["current_branch"])
if self._config[BRANCHNAME] != self["current_branch"]:
self.GitDeleteBranch(self._config[BRANCHNAME])
# Clean up all temporary files.
Command("rm", "-f %s*" % self._config[PERSISTFILE_BASENAME])
def ReadAndPersistVersion(self, prefix=""):
def ReadAndPersist(var_name, def_name):
match = re.match(r"^#define %s\s+(\d*)" % def_name, line)
if match:
value = match.group(1)
self["%s%s" % (prefix, var_name)] = value
for line in LinesInFile(self._config[VERSION_FILE]):
for (var_name, def_name) in [("major", "MAJOR_VERSION"),
("minor", "MINOR_VERSION"),
("build", "BUILD_NUMBER"),
("patch", "PATCH_LEVEL")]:
ReadAndPersist(var_name, def_name)
def WaitForLGTM(self):
print ("Please wait for an LGTM, then type \"LGTM<Return>\" to commit "
"your change. (If you need to iterate on the patch or double check "
"that it's sane, do so in another shell, but remember to not "
"change the headline of the uploaded CL.")
answer = ""
while answer != "LGTM":
print "> ",
answer = self.ReadLine(None if self._options.wait_for_lgtm else "LGTM")
if answer != "LGTM":
print "That was not 'LGTM'."
def WaitForResolvingConflicts(self, patch_file):
print("Applying the patch \"%s\" failed. Either type \"ABORT<Return>\", "
"or resolve the conflicts, stage *all* touched files with "
"'git add', and type \"RESOLVED<Return>\"")
self.DieNoManualMode()
answer = ""
while answer != "RESOLVED":
if answer == "ABORT":
self.Die("Applying the patch failed.")
if answer != "":
print "That was not 'RESOLVED' or 'ABORT'."
print "> ",
answer = self.ReadLine()
# Takes a file containing the patch to apply as first argument.
def ApplyPatch(self, patch_file, revert=False):
try:
self.GitApplyPatch(patch_file, revert)
except GitFailedException:
self.WaitForResolvingConflicts(patch_file)
def FindLastTrunkPush(self, parent_hash="", include_patches=False):
push_pattern = "^Version [[:digit:]]*\.[[:digit:]]*\.[[:digit:]]*"
if not include_patches:
# Non-patched versions only have three numbers followed by the "(based
# on...) comment."
push_pattern += " (based"
branch = "" if parent_hash else "svn/trunk"
return self.GitLog(n=1, format="%H", grep=push_pattern,
parent_hash=parent_hash, branch=branch)
class UploadStep(Step):
MESSAGE = "Upload for code review."
def RunStep(self):
if self._options.reviewer:
print "Using account %s for review." % self._options.reviewer
reviewer = self._options.reviewer
else:
print "Please enter the email address of a V8 reviewer for your patch: ",
self.DieNoManualMode("A reviewer must be specified in forced mode.")
reviewer = self.ReadLine()
self.GitUpload(reviewer, self._options.author, self._options.force_upload)
class DetermineV8Sheriff(Step):
MESSAGE = "Determine the V8 sheriff for code review."
def RunStep(self):
self["sheriff"] = None
if not self._options.sheriff: # pragma: no cover
return
try:
# The googlers mapping maps @google.com accounts to @chromium.org
# accounts.
googlers = imp.load_source('googlers_mapping',
self._options.googlers_mapping)
googlers = googlers.list_to_dict(googlers.get_list())
except: # pragma: no cover
print "Skip determining sheriff without googler mapping."
return
# The sheriff determined by the rotation on the waterfall has a
# @google.com account.
url = "https://chromium-build.appspot.com/p/chromium/sheriff_v8.js"
match = re.match(r"document\.write\('(\w+)'\)", self.ReadURL(url))
# If "channel is sheriff", we can't match an account.
if match:
g_name = match.group(1)
self["sheriff"] = googlers.get(g_name + "@google.com",
g_name + "@chromium.org")
self._options.reviewer = self["sheriff"]
print "Found active sheriff: %s" % self["sheriff"]
else:
print "No active sheriff found."
def MakeStep(step_class=Step, number=0, state=None, config=None,
options=None, side_effect_handler=DEFAULT_SIDE_EFFECT_HANDLER):
# Allow to pass in empty dictionaries.
state = state if state is not None else {}
config = config if config is not None else {}
try:
message = step_class.MESSAGE
except AttributeError:
message = step_class.__name__
try:
requires = step_class.REQUIRES
except AttributeError:
requires = None
return step_class(message, requires, number=number, config=config,
state=state, options=options,
handler=side_effect_handler)
class ScriptsBase(object):
# TODO(machenbach): Move static config here.
def __init__(self, config, side_effect_handler=DEFAULT_SIDE_EFFECT_HANDLER,
state=None):
self._config = config
self._side_effect_handler = side_effect_handler
self._state = state if state is not None else {}
def _Description(self):
return None
def _PrepareOptions(self, parser):
pass
def _ProcessOptions(self, options):
return True
def _Steps(self): # pragma: no cover
raise Exception("Not implemented.")
def MakeOptions(self, args=None):
parser = argparse.ArgumentParser(description=self._Description())
parser.add_argument("-a", "--author", default="",
help="The author email used for rietveld.")
parser.add_argument("-g", "--googlers-mapping",
help="Path to the script mapping google accounts.")
parser.add_argument("-r", "--reviewer", default="",
help="The account name to be used for reviews.")
parser.add_argument("--sheriff", default=False, action="store_true",
help=("Determine current sheriff to review CLs. On "
"success, this will overwrite the reviewer "
"option."))
parser.add_argument("-s", "--step",
help="Specify the step where to start work. Default: 0.",
default=0, type=int)
self._PrepareOptions(parser)
if args is None: # pragma: no cover
options = parser.parse_args()
else:
options = parser.parse_args(args)
# Process common options.
if options.step < 0: # pragma: no cover
print "Bad step number %d" % options.step
parser.print_help()
return None
if options.sheriff and not options.googlers_mapping: # pragma: no cover
print "To determine the current sheriff, requires the googler mapping"
parser.print_help()
return None
# Defaults for options, common to all scripts.
options.manual = getattr(options, "manual", True)
options.force = getattr(options, "force", False)
# Derived options.
options.requires_editor = not options.force
options.wait_for_lgtm = not options.force
options.force_readline_defaults = not options.manual
options.force_upload = not options.manual
# Process script specific options.
if not self._ProcessOptions(options):
parser.print_help()
return None
return options
def RunSteps(self, step_classes, args=None):
options = self.MakeOptions(args)
if not options:
return 1
state_file = "%s-state.json" % self._config[PERSISTFILE_BASENAME]
if options.step == 0 and os.path.exists(state_file):
os.remove(state_file)
steps = []
for (number, step_class) in enumerate(step_classes):
steps.append(MakeStep(step_class, number, self._state, self._config,
options, self._side_effect_handler))
for step in steps[options.step:]:
if step.Run():
return 1
return 0
def Run(self, args=None):
return self.RunSteps(self._Steps(), args)
|
nextsmsversion/macchina.io
|
platform/JS/V8/v8-3.28.4/tools/push-to-trunk/common_includes.py
|
Python
|
apache-2.0
| 21,066
|
"""
Example of usage of builder classes. Builder class is sommething which takes interface as an input and can be used to automatically instanciate, configure and connect
various component to that interface.
Components like buffers, CDC, parsers, inteconnects, decoders, interface conventors can be easily instanciated using such a class.
"""
|
Nic30/hwtLib
|
hwtLib/examples/builders/__init__.py
|
Python
|
mit
| 342
|
# -*- coding: utf-8 -*-
"""
Unit tests for the da.monitor module.
---
type:
python_module
validation_level:
v00_minimum
protection:
k00_public
copyright:
"Copyright 2016 High Integrity Artificial Intelligence Systems"
license:
"Licensed under the Apache License, Version 2.0 (the License);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an AS IS BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."
...
"""
import datetime
import os
import textwrap
import pytest
# -----------------------------------------------------------------------------
@pytest.fixture
def cfg(tmpdir):
"""
Return a mock build configuration.
"""
return {
'build_id': 'TEST_BUILD_ID',
'cfg_name': 'TEST_CFG_NAME',
'safe_branch_name': 'TEST_SAFE_BRANCH_NAME',
'defined_baseline': {
'commit_summary': 'TEST_COMMIT_SUMMARY'
},
'paths': {
'rootpath_tmp': os.path.join(str(tmpdir), 'tmp'),
'dirpath_branch_log': os.path.join(str(tmpdir), 'log'),
'dirpath_isolated_src': os.path.join(str(tmpdir), 'iso'),
'dirpath_lwc_root': os.path.join(str(tmpdir), 'src')
},
'timestamp': {
'datetime_utc': datetime.datetime.utcnow()
}
}
# -----------------------------------------------------------------------------
@pytest.fixture
def build_unit():
"""
Return a mock build unit.
"""
return {
'relpath': 'TEST_RELPATH'
}
# -----------------------------------------------------------------------------
@pytest.fixture
def patch_progressbar(monkeypatch):
"""
Monkeypatch the click.progress bar so it can run without a real tty.
render_progress has a bug that manifests when we call it without a
real tty. As a quick fix we simply disable the method here...
"""
import click._termui_impl
monkeypatch.setattr(
click._termui_impl.ProgressBar, 'render_progress', lambda: None)
# =============================================================================
class SpecifyBuildMonitorReportProgress:
"""
Specify the da.monitor.BuildMonitor.report_progress() function.
"""
def it_reports_progress_to_console_and_to_file(
self, cfg, build_unit, capsys):
"""
The report_progress() method prints reports to console and to file.
"""
import da.monitor
mon = da.monitor.BuildMonitor(cfg)
# The BuildMonitor class constructor writes a header to the console.
filepath_report = os.path.join(
cfg['paths']['dirpath_branch_log'], 'index.html')
(out, err) = capsys.readouterr()
assert err == ''
assert out == textwrap.dedent("""\
Build id: TEST_BUILD_ID
Last commit: TEST_COMMIT_SUMMARY
Report: file://{filepath_report}
""".format(filepath_report = filepath_report))
# The BuildMonitor class constructor writes a header to the file.
assert os.path.isfile(filepath_report)
with open(filepath_report, 'rt') as file:
report_header = file.read()
assert report_header == textwrap.dedent(
"""\
<html>
<head>
</head>
<body>
""")
mon.report_progress(build_unit)
(out, err) = capsys.readouterr()
assert err == ''
assert out != ''
# =============================================================================
class SpecifyBuildMonitorReportNonconformity:
"""
Specify the da.monitor.BuildMonitor.report_nonconformity() function.
"""
def it_is_callable(self):
"""
The report_nonconformity() method is callable.
"""
import da.monitor
assert callable(da.monitor.BuildMonitor.report_nonconformity)
# =============================================================================
class SpecifyBuildMonitorNotifyBuildEnd:
"""
Specify the da.monitor.BuildMonitor.notify_build_end() function.
"""
def it_is_callable(self):
"""
The notify_build_end() method is callable.
"""
import da.monitor
assert callable(da.monitor.BuildMonitor.notify_build_end)
|
wtpayne/hiai
|
a3_src/h70_internal/da/monitor/spec/spec_monitor.py
|
Python
|
apache-2.0
| 4,825
|
#!/usr/bin/env python
#
# grass_lidar
#
# Author: Dan Clewley (dac@pml.ac.uk)
# Created on: 05 November 2014
# This file has been created by ARSF Data Analysis Node and
# is licensed under the GPL v3 Licence. A copy of this
# licence is available to download with this file.
"""
Functions for working with LiDAR data using GRASS
Available Functions:
* las_to_dsm - Create DSM from LAS file.
* las_to_dtm - Create last-returns DTM from LAS file.
* las_to_intensity - Create intensity image from LAS file.
* las_to_density - Create density image from LAS file
* las_to_raster - Convert lidar data in LAS format to raster.
* ascii_to_raster - Convert lidar data in ASCII format to raster.
* las_to_vector - Import LAS file into GRASS as vector.
* ascii_to_vector - Import ASCII file into GRASS as vector.
"""
from __future__ import print_function # Import print function (so we can use Python 3 syntax with Python 2)
import os, sys
import shutil
import tempfile
# Import common files
from .. import dem_common
from .. import dem_utilities
from . import laspy_lidar
from . import lastools_lidar
from . import ascii_lidar
from .. import grass_library
from .. import dem_common_functions
# Import GRASS
sys.path.append(dem_common.GRASS_PYTHON_LIB_PATH)
try:
import grass.script as grass
except ImportError as err:
raise ImportError("Could not import grass library. "
"Try setting 'GRASS_PYTHON_LIB_PATH' environmental variable."
"\n{}".format(err))
def ascii_to_raster(in_ascii,out_raster=None,
remove_grassdb=True,
grassdb_path=None,
xyz_bounds=None,
val_field='z',
drop_class=None,
keep_class=None,
returns='all',
raster_statistic='mean',
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS,
bin_size=dem_common.DEFAULT_LIDAR_RES_METRES,
out_raster_type=dem_common.GDAL_OUTFILE_DATATYPE):
"""
Create raster from lidar data in ASCII format using GRASS.
Uses r.in.xyz function in GRASS. For more details see:
https://grass.osgeo.org/grass64/manuals/r.in.xyz.html
By default the pixel values are the mean 'val_field' of all points within a pixel.
Default is to use the elevation and create a DSM.
To create a DTM classify ground returns in LAS file and only export these
to the ASCII file.
Intensity images can be created by setting the value field to 'intensity'
If an existing grass db is provided will add DSM to this,
else will create one.
Default is to leave raster in GRASS database rather than exporting.
Arguments:
* in_ascii - Input ASCII file.
* out_raster - Output raster (set to None to leave in GRASS database.
* remove_grassdb - Remove GRASS database after processing is complete.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* val_field - Value field to use for raster, default is 'z' (elevation).
* drop_class - Class to drop from input lidar file (default = None, assume classes are dropped prior to input).
* keep_class - Class to keep from input lidar file (default = None).
* returns - Returns to keep from input lidar file. Options are 'all' (Default), 'first' and 'last'.
* raster_statistic - Statistic to use for points (default mean)
* projection - Projection of lidar data (e.g., UKBNG).
* bin_size - Resolution to use for output raster.
* out_raster_type - GDAL datatype for output raster (e.g., Float32).
Returns:
* out_raster path / out_raster name in GRASS database.
* path to GRASS database / None.
"""
try:
dem_common.LIDAR_ASCII_ORDER[val_field]
except KeyError:
raise Exception('Could not find field "{}"'.format(val_field))
if out_raster is not None:
out_raster_name = os.path.basename(out_raster).replace("-","_")
out_raster_format = dem_utilities.get_gdal_type_from_path(out_raster)
else:
out_raster_name = os.path.basename(in_ascii).replace("-","_")
out_raster_name = os.path.splitext(out_raster_name)[0] + '.dem'
out_raster_format = dem_common.GDAL_OUTFILE_FORMAT,
# Check if all returns are needed or only first / last
first_only = False
last_only = False
if returns.lower() == 'first':
first_only = True
elif returns.lower() == 'last':
last_only = True
# Create copy of ASCII file, if needed
if (drop_class is not None) or (keep_class is not None) or first_only or last_only:
tmp_ascii_fh, in_ascii_drop = tempfile.mkstemp(suffix='.txt', prefix='lidar_',dir=dem_common.TEMP_PATH)
grass_library.removeASCIIClass(in_ascii, in_ascii_drop,drop_class=drop_class, first_only=first_only, last_only=last_only)
else:
in_ascii_drop = in_ascii
# Get bounds from ASCII (if not passed in)
bounding_box = {}
if xyz_bounds is None or xyz_bounds[0][0] is None:
xyz_bounds = ascii_lidar.get_ascii_bounds(in_ascii_drop)
bounding_box['w'] = xyz_bounds[0][0]
bounding_box['e'] = xyz_bounds[0][1]
bounding_box['s'] = xyz_bounds[1][0]
bounding_box['n'] = xyz_bounds[1][1]
# If GRASS database has not been passed in
# need to create one and initialise
if grassdb_path is None:
grassdb_path = grass_library.grassDBsetup()
grass_library.setLocation(projection)
else:
location = projection
mapset = 'PERMANENT'
grass.setup.init(dem_common.GRASS_LIB_PATH,
grassdb_path,
location,
mapset)
# Set extent
grass_library.SetRegion(bounds=bounding_box,res=bin_size)
# Import lidar into GRASS and create DEM
print('Importing {} to GRASS'.format(in_ascii_drop))
grass.run_command('r.in.xyz',
input=in_ascii_drop,
output=out_raster_name,
method=raster_statistic,
fs=' ',
x=dem_common.LIDAR_ASCII_ORDER['x'],
y=dem_common.LIDAR_ASCII_ORDER['y'],
z=dem_common.LIDAR_ASCII_ORDER[val_field],
overwrite = True)
if not grass_library.checkFileExists(out_raster_name):
raise Exception('Could not create output raster')
if out_raster is not None:
print('Exporting')
grass.run_command('r.out.gdal',
format=out_raster_format,
type=out_raster_type,
input=out_raster_name,
output=out_raster,
nodata=dem_common.NODATA_VALUE,
overwrite=True,
flags='fc')
dem_utilities.remove_gdal_aux_file(out_raster)
if (drop_class is not None) or (keep_class is not None) or first_only or last_only:
os.close(tmp_ascii_fh)
os.remove(in_ascii_drop)
# Remove GRASS database if requested.
if remove_grassdb:
shutil.rmtree(grassdb_path)
return out_raster, None
else:
return out_raster_name, grassdb_path
def las_to_raster(in_las,out_raster=None,
remove_grassdb=True,
grassdb_path=None,
val_field='z',
drop_class=7,
keep_class=None,
las2txt_flags=None,
raster_statistic='mean',
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS,
bin_size=dem_common.DEFAULT_LIDAR_RES_METRES,
out_raster_type=dem_common.GDAL_OUTFILE_DATATYPE):
"""
Create a raster from lidar data in LAS format using GRASS.
The pixel values are the mean 'val_field' of all points within a pixel.
Default is to use the elevation ('z') and create a DSM.
To create a DTM classify ground returns in LAS file and select non-ground
classes to be dropped using 'drop_class'.
Intensity images can be created by setting the value field to 'intensity'
Currently a wrapper for ascii_to_raster which converts LAS to ASCII before
running.
In GRASS 7 native LAS support should be possible.
If an existing grass db is provided will add raster to this,
else will create one.
Default is to leave raster in GRASS database rather than exporting.
Arguments:
* in_las - Input LAS file.
* out_raster - Output raster (set to None to leave in GRASS database.
* remove_grassdb - Remove GRASS database after processing is complete.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* val_field - Value field to use for raster, default is 'z' (elevation).
* drop_class - Class / list of classes to drop when converting to ASCII (default = 7).
* keep_class - Class / list of classes to keep when converting to ASCII.
* las2txt_flags - Additional flags passed to las2txt when converting LAS to ASCII.
* raster_statistic - Statistic to use for points (default mean)
* projection - Projection of lidar data (e.g., UKBNG).
* bin_size - Resolution to use for output raster.
* out_raster_format - GDAL format name for output raster (e.g., ENVI)
* out_raster_type - GDAL datatype for output raster (e.g., Float32)
Returns:
* out_raster path / out_raster name in GRASS database
* path to GRASS database / None"
"""
tmp_ascii_fh, ascii_file_tmp = tempfile.mkstemp(suffix='.txt', prefix='lidar_',dir=dem_common.TEMP_PATH)
if out_raster is not None:
out_raster_name = os.path.basename(out_raster).replace("-","_")
else:
out_raster_name = os.path.basename(in_las).replace("-","_")
out_raster_name = os.path.splitext(out_raster_name)[0] + '.dem'
# Try to get bounds of LAS file if laspy library is available
# Don't check if input is LAZ.
xyz_bounds = None
if laspy_lidar.HAVE_LASPY and os.path.splitext(in_las)[-1].lower() != '.laz':
try:
xyz_bounds = laspy_lidar.get_las_bounds(in_las,
from_header=True)
except Exception as err:
dem_common_functions.WARNING('Could not get bounds from LAS file ({}). Will try from ASCII'.format(err))
# Convert LAS to ASCII
print('Converting LAS file to ASCII')
lastools_lidar.convert_las_to_ascii(in_las,ascii_file_tmp,
drop_class=drop_class,
keep_class=keep_class,
flags=las2txt_flags)
# Create raster from ASCII
try:
out_raster_name, grassdb_path = ascii_to_raster(ascii_file_tmp,out_raster,
remove_grassdb=remove_grassdb,
grassdb_path=grassdb_path,
xyz_bounds=xyz_bounds,
val_field=val_field,
raster_statistic=raster_statistic,
projection=projection,
bin_size=bin_size,
out_raster_type=out_raster_type)
except Exception as err:
os.close(tmp_ascii_fh)
os.remove(ascii_file_tmp)
raise
# Remove ASCII file created
os.close(tmp_ascii_fh)
os.remove(ascii_file_tmp)
return out_raster_name, grassdb_path
def ascii_to_vector(in_ascii,
grassdb_path=None,
xyz_bounds=None,
drop_class=None,
keep_class=None,
returns='all',
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS):
"""
Imports ASCII to GRASS vector
Uses v.in.ascii function in GRASS. For more details see:
https://grass.osgeo.org/grass64/manuals/v.in.ascii.html
Arguments:
* in_ascii - Input ASCII file.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* drop_class - Class to drop from input lidar file (default = None, assume classes are dropped prior to input).
* keep_class - Class to keep from input lidar file (default = None).
* returns - Returns to keep from input lidar file. Options are 'all' (Default), 'first' and 'last'.
* projection - Projection of lidar data (e.g., UKBNG).
Returns:
* out_vector name in GRASS database.
* path to GRASS database
"""
out_vector_name = os.path.basename(in_ascii).replace("-","_")
out_vector_name = out_vector_name.replace(".","_")
out_vector_name = os.path.splitext(out_vector_name)[0]
# Check if all returns are needed or only first / last
first_only = False
last_only = False
if returns.lower() == 'first':
first_only = True
elif returns.lower() == 'last':
last_only = True
# Create copy of ASCII file, if needed
if (drop_class is not None) or (keep_class is not None) or first_only or last_only:
tmp_ascii_fh, in_ascii_drop = tempfile.mkstemp(suffix='.txt', prefix='lidar_',dir=dem_common.TEMP_PATH)
grass_library.removeASCIIClass(in_ascii, in_ascii_drop,drop_class=drop_class, first_only=first_only, last_only=last_only)
else:
in_ascii_drop = in_ascii
# Get bounds from ASCII (if not passed in)
bounding_box = {}
if xyz_bounds is None or xyz_bounds[0][0] is None:
xyz_bounds = ascii_lidar.get_ascii_bounds(in_ascii_drop)
bounding_box['w'] = xyz_bounds[0][0]
bounding_box['e'] = xyz_bounds[0][1]
bounding_box['s'] = xyz_bounds[1][0]
bounding_box['n'] = xyz_bounds[1][1]
# If GRASS database has not been passed in
# need to create one and initialise
if grassdb_path is None:
grassdb_path = grass_library.grassDBsetup()
grass_library.setLocation(projection)
else:
location = projection
mapset = 'PERMANENT'
grass.setup.init(dem_common.GRASS_LIB_PATH,
grassdb_path,
location,
mapset)
# Set extent
grass_library.SetRegion(bounds=bounding_box,res=dem_common.DEFAULT_LIDAR_RES_METRES)
# Import lidar into GRASS
print('Importing {} to GRASS'.format(in_ascii_drop))
grass.run_command('v.in.ascii',
input=in_ascii_drop,
output=out_vector_name,
fs=' ',
x=dem_common.LIDAR_ASCII_ORDER['x'],
y=dem_common.LIDAR_ASCII_ORDER['y'],
z=dem_common.LIDAR_ASCII_ORDER['z'],
cat=dem_common.LIDAR_ASCII_ORDER['returnnumber'],
flags='bt',
overwrite = True)
if (drop_class is not None) or (keep_class is not None) or first_only or last_only:
os.close(tmp_ascii_fh)
os.remove(in_ascii_drop)
return out_vector_name, grassdb_path
def las_to_vector(in_las,
grassdb_path=None,
drop_class=7,
keep_class=None,
las2txt_flags=None,
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS):
"""
Import LAS points to GRASS as vector data
Currently a wrapper for ascii_to_vector which converts LAS to ASCII before
running.
In GRASS 7 native LAS support should be possible.
If an existing grass db is provided will add raster to this,
else will create one.
Arguments:
* in_las - Input LAS file.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* drop_class - Class / list of classes to drop when converting to ASCII (default = 7).
* keep_class - Class / list of classes to keep when converting to ASCII.
* las2txt_flags - Additional flags passed to las2txt when converting LAS to ASCII.
* projection - Projection of lidar data (e.g., UKBNG).
Returns:
* out_vector name in GRASS database
* path to GRASS database
"""
tmp_ascii_fh, ascii_file_tmp = tempfile.mkstemp(suffix='.txt',
prefix='lidar_',
dir=dem_common.TEMP_PATH)
out_vector_name = os.path.basename(in_las).replace("-","_")
out_vector_name = os.path.splitext(out_vector_name)[0]
out_vector_name = out_vector_name.replace(".","_")
# Try to get bounds of LAS file if laspy library is available
# Don't check if input is LAZ.
xyz_bounds = None
if laspy_lidar.HAVE_LASPY and os.path.splitext(in_las)[-1].lower() != '.laz':
try:
xyz_bounds = laspy_lidar.get_las_bounds(in_las,
from_header=True)
except Exception as err:
dem_common_functions.WARNING('Could not get bounds from LAS file ({}). Will try from ASCII'.format(err))
# Convert LAS to ASCII
print('Converting LAS file to ASCII')
lastools_lidar.convert_las_to_ascii(in_las,ascii_file_tmp,
drop_class=drop_class,
keep_class=keep_class,
flags=las2txt_flags)
# Import to GRASS
try:
out_vector_name, grassdb_path = ascii_to_vector(ascii_file_tmp,
grassdb_path=grassdb_path,
xyz_bounds=xyz_bounds,
projection=projection)
except Exception as err:
os.close(tmp_ascii_fh)
os.remove(ascii_file_tmp)
raise
# Remove ASCII file created
os.close(tmp_ascii_fh)
os.remove(ascii_file_tmp)
return out_vector_name, grassdb_path
def las_to_dsm(in_las,out_raster=None,
remove_grassdb=True,
grassdb_path=None,
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS,
bin_size=dem_common.DEFAULT_LIDAR_RES_METRES,
out_raster_type=dem_common.GDAL_OUTFILE_DATATYPE):
"""
Helper function to generate a Digital Surface Model (DSM) from a LAS file using
GRASS.
The DSM is created using only first returns.
Arguments:
* in_las - Input LAS file.
* out_raster - Output raster (set to None to leave in GRASS database).
* remove_grassdb - Remove GRASS database after processing is complete.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* projection - Projection of lidar data (e.g., UKBNG).
* bin_size - Resolution to use for output raster.
* out_raster_type - GDAL datatype for output raster (e.g., Float32)
Returns:
* out_raster path / out_raster name in GRASS database
* path to GRASS database / None"
Example::
from arsf_dem import dem_lidar
dem_lidar.grass_lidar.las_to_dsm('in_las_file.las','out_dsm.dem')
"""
out_raster_name, grassdb_path = las_to_raster(in_las,out_raster=out_raster,
remove_grassdb=remove_grassdb,
grassdb_path=grassdb_path,
val_field='z',
drop_class=7,
las2txt_flags='-first_only',
projection=projection,
bin_size=bin_size,
out_raster_type=out_raster_type)
return out_raster_name, grassdb_path
def las_to_dtm(in_las,out_raster=None,
remove_grassdb=True,
grassdb_path=None,
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS,
bin_size=dem_common.DEFAULT_LIDAR_RES_METRES,
out_raster_format=dem_common.GDAL_OUTFILE_FORMAT,
out_raster_type=dem_common.GDAL_OUTFILE_DATATYPE):
"""
Helper function to generate a Digital Terrain Model (DTM) from a LAS file using
GRASS.
The DTM is created using only last returns, therefore is not a true DTM.
To improve the quality of the DTM classification of ground returns is required.
If a ground classified LAS file is available a better DTM can be created using
'las_to_raster' and setting 'keep_class=2'.
Arguments:
* in_las - Input LAS file.
* out_raster - Output raster (set to None to leave in GRASS database).
* remove_grassdb - Remove GRASS database after processing is complete.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* projection - Projection of lidar data (e.g., UKBNG).
* bin_size - Resolution to use for output raster.
* out_raster_type - GDAL datatype for output raster (e.g., Float32)
Returns:
* out_raster path / out_raster name in GRASS database
* path to GRASS database / None"
Example::
from arsf_dem import dem_lidar
dem_lidar.grass_lidar.las_to_dtm('in_las_file.las','out_dtm.dem')
"""
out_raster_name, grassdb_path = las_to_raster(in_las,out_raster=out_raster,
remove_grassdb=remove_grassdb,
grassdb_path=grassdb_path,
val_field='z',
drop_class=7,
las2txt_flags='-last_only',
projection=projection,
bin_size=bin_size,
out_raster_type=out_raster_type)
return out_raster_name, grassdb_path
def las_to_intensity(in_las,out_raster=None,
remove_grassdb=True,
grassdb_path=None,
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS,
bin_size=dem_common.DEFAULT_LIDAR_RES_METRES,
out_raster_type=dem_common.GDAL_OUTFILE_DATATYPE):
"""
Helper function to generate an intensity image from a LAS file using
GRASS.
Arguments:
* in_las - Input LAS file.
* out_raster - Output raster (set to None to leave in GRASS database).
* remove_grassdb - Remove GRASS database after processing is complete.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* projection - Projection of lidar data (e.g., UKBNG).
* bin_size - Resolution to use for output raster.
* out_raster_type - GDAL datatype for output raster (e.g., Float32)
Returns:
* out_raster path / out_raster name in GRASS database
* path to GRASS database / None"
Example::
from arsf_dem import dem_lidar
dem_lidar.grass_lidar.las_to_intensity('in_las_file.las','out_intensity.tif')
"""
# Get output raster format to check if it is JPEG
if out_raster is not None:
out_raster_format = dem_utilities.get_gdal_type_from_path(out_raster)
else:
out_raster_format = ''
# If JPEG output call export screenshot after to scale image
if out_raster is not None and out_raster_format == 'JPEG':
out_raster_name, grassdb_path = las_to_raster(in_las,out_raster=None,
remove_grassdb=False,
grassdb_path=grassdb_path,
val_field='intensity',
drop_class=7,
las2txt_flags='-last_only',
projection=projection,
bin_size=bin_size,
out_raster_type=out_raster_type)
out_raster_type, grassdb_path = dem_utilities.export_screenshot(out_raster_name,
out_raster,
import_to_grass=False,
projection=projection,
grassdb_path=grassdb_path,
remove_grassdb=remove_grassdb)
else:
out_raster_name, grassdb_path = las_to_raster(in_las,out_raster=out_raster,
remove_grassdb=remove_grassdb,
grassdb_path=grassdb_path,
val_field='intensity',
drop_class=7,
las2txt_flags='-last_only',
projection=projection,
bin_size=bin_size,
out_raster_type=out_raster_type)
return out_raster_name, grassdb_path
def las_to_density(in_las,out_raster=None,
remove_grassdb=True,
grassdb_path=None,
projection=dem_common.DEFAULT_LIDAR_PROJECTION_GRASS,
bin_size=1,
out_raster_type=dem_common.GDAL_OUTFILE_DATATYPE):
"""
Helper function to generate a map of point density from a LAS file using
GRASS.
Arguments:
* in_las - Input LAS file.
* out_raster - Output raster (set to None to leave in GRASS database).
* remove_grassdb - Remove GRASS database after processing is complete.
* grassdb_path - Input path to GRASS database, if not supplied will create one.
* projection - Projection of lidar data (e.g., UKBNG).
* bin_size - Resolution to use for output raster.
* out_raster_type - GDAL datatype for output raster (e.g., Float32)
Returns:
* out_raster path / out_raster name in GRASS database
* path to GRASS database / None"
Example::
from arsf_dem import dem_lidar
dem_lidar.grass_lidar.las_to_density('in_las_file.las','out_density.tif')
"""
out_raster_name, grassdb_path = las_to_raster(in_las,out_raster=out_raster,
remove_grassdb=remove_grassdb,
grassdb_path=grassdb_path,
val_field='z',
drop_class=7,
raster_statistic='n',
projection=projection,
bin_size=bin_size,
out_raster_type=out_raster_type)
return out_raster_name, grassdb_path
|
pmlrsg/arsf_dem_scripts
|
arsf_dem/dem_lidar/grass_lidar.py
|
Python
|
gpl-3.0
| 26,458
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 BhaaL
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Convert Gettext PO localization files to flat XML files.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/flatxml2po.html
for examples and usage instructions.
"""
from translate.convert import convert
from translate.storage import flatxml, po
class po2flatxml:
"""Convert to a single PO file to a single XML file, optionally
applying modifications to a template file instead of creating
one from scratch based on input parameters.
"""
TargetStoreClass = flatxml.FlatXMLFile
TargetUnitClass = flatxml.FlatXMLUnit
def __init__(self, inputfile, outputfile, templatefile=None,
root="root", value="str", key="key", ns=None, indent=2):
"""Initialize the converter."""
self.inputfile = inputfile
self.outputfile = outputfile
self.templatefile = templatefile
self.value_name = value
self.key_name = key
self.namespace = ns
indent_chars = None
if indent > 0:
indent_chars = (" " * indent)
self.source_store = po.pofile(inputfile)
self.target_store = self.TargetStoreClass(templatefile,
root_name=root,
value_name=value,
key_name=key,
namespace=ns,
indent_chars=indent_chars)
def convert_unit(self, unit):
"""Convert a source format unit to a target format unit."""
target_unit = self.TargetUnitClass(source=None,
namespace=self.namespace,
element_name=self.value_name,
attribute_name=self.key_name)
target_unit.source = unit.source
if unit.istranslated() or not bool(unit.source):
target_unit.target = unit.target
else:
target_unit.target = unit.source
return target_unit
def convert_store(self):
"""Convert a single source file to a target format file."""
for unit in self.source_store.units:
key = unit.source
if not key:
continue
target_unit = self.target_store.findid(key)
if target_unit is None:
target_unit = self.convert_unit(unit)
self.target_store.addunit(target_unit)
else:
target_unit.target = unit.target
def run(self):
"""Run the converter."""
self.convert_store()
if self.target_store.isempty():
return 0
self.target_store.serialize(self.outputfile)
return 1
def run_converter(inputfile, outputfile, templatefile=None,
root="root", value="str", key="key", ns=None, indent=2):
"""Wrapper around the converter."""
return po2flatxml(inputfile, outputfile, templatefile,
root, value, key, ns, indent).run()
formats = {
("po"): ("xml", run_converter),
("po", "xml"): ("xml", run_converter),
}
def main(argv=None):
parser = convert.ConvertOptionParser(formats,
usetemplates=True,
description=__doc__)
parser.add_option("-r", "--root", action="store", dest="root",
default="root",
help='name of the XML root element (default: "root")')
parser.add_option("-v", "--value", action="store", dest="value",
default="str",
help='name of the XML value element (default: "str")')
parser.add_option("-k", "--key", action="store", dest="key",
default="key",
help='name of the XML key attribute (default: "key")')
parser.add_option("-n", "--namespace", action="store", dest="ns",
default=None,
help="XML namespace uri (default: None)")
parser.add_option("-w", "--indent", action="store", dest="indent",
type="int", default=2,
help="indent width in spaces, 0 for no indent (default: 2)")
parser.passthrough.append("root")
parser.passthrough.append("value")
parser.passthrough.append("key")
parser.passthrough.append("ns")
parser.passthrough.append("indent")
parser.run(argv)
if __name__ == "__main__":
main()
|
unho/translate
|
translate/convert/po2flatxml.py
|
Python
|
gpl-2.0
| 5,280
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.