repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
waytai/odoo | addons/account_budget/report/analytic_account_budget_report.py | 360 | 7589 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
class analytic_account_budget_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(analytic_account_budget_report, self).__init__(cr, uid, name, context=context)
self.localcontext.update( {
'funct': self.funct,
'funct_total': self.funct_total,
'time': time,
})
self.context = context
def funct(self, object, form, ids=None, done=None, level=1):
if ids is None:
ids = {}
if not ids:
ids = self.ids
if not done:
done = {}
global tot
tot = {
'theo':0.00,
'pln':0.00,
'prac':0.00,
'perc':0.00
}
result = []
accounts = self.pool.get('account.analytic.account').browse(self.cr, self.uid, [object.id], self.context.copy())
c_b_lines_obj = self.pool.get('crossovered.budget.lines')
obj_c_budget = self.pool.get('crossovered.budget')
for account_id in accounts:
res = {}
b_line_ids = []
for line in account_id.crossovered_budget_line:
b_line_ids.append(line.id)
if not b_line_ids:
return []
d_from = form['date_from']
d_to = form['date_to']
self.cr.execute('SELECT DISTINCT(crossovered_budget_id) FROM crossovered_budget_lines WHERE id =ANY(%s)',(b_line_ids,))
budget_ids = self.cr.fetchall()
context = {'wizard_date_from':d_from,'wizard_date_to':d_to}
for i in range(0, len(budget_ids)):
budget_name = obj_c_budget.browse(self.cr, self.uid, [budget_ids[i][0]])
res= {
'b_id':'-1',
'a_id':'-1',
'name':budget_name[0].name,
'status':1,
'theo':0.00,
'pln':0.00,
'prac':0.00,
'perc':0.00
}
result.append(res)
line_ids = c_b_lines_obj.search(self.cr, self.uid, [('id', 'in', b_line_ids), ('crossovered_budget_id','=',budget_ids[i][0])])
line_id = c_b_lines_obj.browse(self.cr, self.uid, line_ids)
tot_theo = tot_pln = tot_prac = tot_perc = 0
done_budget = []
for line in line_id:
if line.id in b_line_ids:
theo = pract = 0.00
theo = c_b_lines_obj._theo_amt(self.cr, self.uid, [line.id], context)[line.id]
pract = c_b_lines_obj._prac_amt(self.cr, self.uid, [line.id], context)[line.id]
if line.general_budget_id.id in done_budget:
for record in result:
if record['b_id'] == line.general_budget_id.id and record['a_id'] == line.analytic_account_id.id:
record['theo'] += theo
record['pln'] += line.planned_amount
record['prac'] += pract
record['perc'] += line.percentage
tot_theo += theo
tot_pln += line.planned_amount
tot_prac += pract
tot_perc += line.percentage
else:
res1 = {
'b_id': line.general_budget_id.id,
'a_id': line.analytic_account_id.id,
'name': line.general_budget_id.name,
'status': 2,
'theo': theo,
'pln': line.planned_amount,
'prac': pract,
'perc': line.percentage
}
tot_theo += theo
tot_pln += line.planned_amount
tot_prac += pract
tot_perc += line.percentage
result.append(res1)
done_budget.append(line.general_budget_id.id)
else:
if line.general_budget_id.id in done_budget:
continue
else:
res1={
'b_id': line.general_budget_id.id,
'a_id': line.analytic_account_id.id,
'name': line.general_budget_id.name,
'status': 2,
'theo': 0.00,
'pln': 0.00,
'prac': 0.00,
'perc': 0.00
}
result.append(res1)
done_budget.append(line.general_budget_id.id)
if tot_theo == 0.00:
tot_perc = 0.00
else:
tot_perc = float(tot_prac / tot_theo) * 100
result[-(len(done_budget) +1)]['theo'] = tot_theo
tot['theo'] +=tot_theo
result[-(len(done_budget) +1)]['pln'] = tot_pln
tot['pln'] +=tot_pln
result[-(len(done_budget) +1)]['prac'] = tot_prac
tot['prac'] +=tot_prac
result[-(len(done_budget) +1)]['perc'] = tot_perc
if tot['theo'] == 0.00:
tot['perc'] = 0.00
else:
tot['perc'] = float(tot['prac'] / tot['theo']) * 100
return result
def funct_total(self, form):
result = []
res = {}
res = {
'tot_theo': tot['theo'],
'tot_pln': tot['pln'],
'tot_prac': tot['prac'],
'tot_perc': tot['perc']
}
result.append(res)
return result
class report_analyticaccountbudget(osv.AbstractModel):
_name = 'report.account_budget.report_analyticaccountbudget'
_inherit = 'report.abstract_report'
_template = 'account_budget.report_analyticaccountbudget'
_wrapped_report_class = analytic_account_budget_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
victorzhao/miniblink49 | third_party/WebKit/Tools/Scripts/webkitpy/tool/commands/rebaseline_unittest.py | 6 | 70107 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
from webkitpy.common.checkout.scm.scm_mock import MockSCM
from webkitpy.common.host_mock import MockHost
from webkitpy.common.net.buildbot.buildbot_mock import MockBuilder
from webkitpy.common.net.layouttestresults import LayoutTestResults
from webkitpy.common.system.executive_mock import MockExecutive
from webkitpy.common.system.executive_mock import MockExecutive2
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.tool.commands.rebaseline import *
from webkitpy.tool.mocktool import MockTool, MockOptions
class _BaseTestCase(unittest.TestCase):
MOCK_WEB_RESULT = 'MOCK Web result, convert 404 to None=True'
WEB_PREFIX = 'http://example.com/f/builders/WebKit Mac10.7/results/layout-test-results'
command_constructor = None
def setUp(self):
self.tool = MockTool()
self.command = self.command_constructor() # lint warns that command_constructor might not be set, but this is intentional; pylint: disable=E1102
self.command.bind_to_tool(self.tool)
self.lion_port = self.tool.port_factory.get_from_builder_name("WebKit Mac10.7")
self.lion_expectations_path = self.lion_port.path_to_generic_test_expectations_file()
self.tool.filesystem.write_text_file(self.tool.filesystem.join(self.lion_port.layout_tests_dir(), "VirtualTestSuites"),
'[]')
# FIXME: crbug.com/279494. We should override builders._exact_matches
# here to point to a set of test ports and restore the value in
# tearDown(), and that way the individual tests wouldn't have to worry
# about it.
def _expand(self, path):
if self.tool.filesystem.isabs(path):
return path
return self.tool.filesystem.join(self.lion_port.layout_tests_dir(), path)
def _read(self, path):
return self.tool.filesystem.read_text_file(self._expand(path))
def _write(self, path, contents):
self.tool.filesystem.write_text_file(self._expand(path), contents)
def _zero_out_test_expectations(self):
for port_name in self.tool.port_factory.all_port_names():
port = self.tool.port_factory.get(port_name)
for path in port.expectations_files():
self._write(path, '')
self.tool.filesystem.written_files = {}
def _setup_mock_builder_data(self):
data = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"userscripts": {
"first-test.html": {
"expected": "PASS",
"actual": "IMAGE+TEXT"
},
"second-test.html": {
"expected": "FAIL",
"actual": "IMAGE+TEXT"
}
}
}
});""")
# FIXME: crbug.com/279494 - we shouldn't be mixing mock and real builder names.
for builder in ['MOCK builder', 'MOCK builder (Debug)', 'WebKit Mac10.7']:
self.command._builder_data[builder] = data
class TestCopyExistingBaselinesInternal(_BaseTestCase):
command_constructor = CopyExistingBaselinesInternal
def setUp(self):
super(TestCopyExistingBaselinesInternal, self).setUp()
def test_copying_overwritten_baseline(self):
self.tool.executive = MockExecutive2()
# FIXME: crbug.com/279494. it's confusing that this is the test- port, and not the regular lion port. Really all of the tests should be using the test ports.
port = self.tool.port_factory.get('test-mac-snowleopard')
self._write(port._filesystem.join(port.layout_tests_dir(), 'platform/test-mac-snowleopard/failures/expected/image-expected.txt'), 'original snowleopard result')
old_exact_matches = builders._exact_matches
oc = OutputCapture()
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK SnowLeopard": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
options = MockOptions(builder="MOCK SnowLeopard", suffixes="txt", verbose=True, test="failures/expected/image.html", results_directory=None)
oc.capture_output()
self.command.execute(options, [], self.tool)
finally:
out, _, _ = oc.restore_output()
builders._exact_matches = old_exact_matches
self.assertMultiLineEqual(self._read(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/test-mac-leopard/failures/expected/image-expected.txt')), 'original snowleopard result')
self.assertMultiLineEqual(out, '{"add": [], "remove-lines": [], "delete": []}\n')
def test_copying_overwritten_baseline_to_multiple_locations(self):
self.tool.executive = MockExecutive2()
# FIXME: crbug.com/279494. it's confusing that this is the test- port, and not the regular win port. Really all of the tests should be using the test ports.
port = self.tool.port_factory.get('test-win-win7')
self._write(port._filesystem.join(port.layout_tests_dir(), 'platform/test-win-win7/failures/expected/image-expected.txt'), 'original win7 result')
old_exact_matches = builders._exact_matches
oc = OutputCapture()
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK Linux": {"port_name": "test-linux-x86_64", "specifiers": set(["mock-specifier"])},
"MOCK Win7": {"port_name": "test-win-win7", "specifiers": set(["mock-specifier"])},
}
options = MockOptions(builder="MOCK Win7", suffixes="txt", verbose=True, test="failures/expected/image.html", results_directory=None)
oc.capture_output()
self.command.execute(options, [], self.tool)
finally:
out, _, _ = oc.restore_output()
builders._exact_matches = old_exact_matches
self.assertMultiLineEqual(self._read(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/test-linux-x86_64/failures/expected/image-expected.txt')), 'original win7 result')
self.assertFalse(self.tool.filesystem.exists(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/mac-leopard/userscripts/another-test-expected.txt')))
self.assertMultiLineEqual(out, '{"add": [], "remove-lines": [], "delete": []}\n')
def test_no_copy_existing_baseline(self):
self.tool.executive = MockExecutive2()
# FIXME: it's confusing that this is the test- port, and not the regular win port. Really all of the tests should be using the test ports.
port = self.tool.port_factory.get('test-win-win7')
self._write(port._filesystem.join(port.layout_tests_dir(), 'platform/test-win-win7/failures/expected/image-expected.txt'), 'original win7 result')
old_exact_matches = builders._exact_matches
oc = OutputCapture()
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK Linux": {"port_name": "test-linux-x86_64", "specifiers": set(["mock-specifier"])},
"MOCK Win7": {"port_name": "test-win-win7", "specifiers": set(["mock-specifier"])},
}
options = MockOptions(builder="MOCK Win7", suffixes="txt", verbose=True, test="failures/expected/image.html", results_directory=None)
oc.capture_output()
self.command.execute(options, [], self.tool)
finally:
out, _, _ = oc.restore_output()
builders._exact_matches = old_exact_matches
self.assertMultiLineEqual(self._read(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/test-linux-x86_64/failures/expected/image-expected.txt')), 'original win7 result')
self.assertMultiLineEqual(self._read(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/test-win-win7/failures/expected/image-expected.txt')), 'original win7 result')
self.assertFalse(self.tool.filesystem.exists(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/mac-leopard/userscripts/another-test-expected.txt')))
self.assertMultiLineEqual(out, '{"add": [], "remove-lines": [], "delete": []}\n')
def test_no_copy_skipped_test(self):
self.tool.executive = MockExecutive2()
port = self.tool.port_factory.get('test-win-win7')
fs = self.tool.filesystem
self._write(fs.join(port.layout_tests_dir(), 'platform/test-win-win7/failures/expected/image-expected.txt'), 'original win7 result')
expectations_path = fs.join(port.path_to_generic_test_expectations_file())
self._write(expectations_path, (
"[ Win ] failures/expected/image.html [ Failure ]\n"
"[ Linux ] failures/expected/image.html [ Skip ]\n"))
old_exact_matches = builders._exact_matches
oc = OutputCapture()
try:
builders._exact_matches = {
"MOCK Linux": {"port_name": "test-linux-x86_64", "specifiers": set(["mock-specifier"])},
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK Win7": {"port_name": "test-win-win7", "specifiers": set(["mock-specifier"])},
}
options = MockOptions(builder="MOCK Win7", suffixes="txt", verbose=True, test="failures/expected/image.html", results_directory=None)
oc.capture_output()
self.command.execute(options, [], self.tool)
finally:
out, _, _ = oc.restore_output()
builders._exact_matches = old_exact_matches
self.assertFalse(fs.exists(fs.join(port.layout_tests_dir(), 'platform/test-linux-x86_64/failures/expected/image-expected.txt')))
self.assertEqual(self._read(fs.join(port.layout_tests_dir(), 'platform/test-win-win7/failures/expected/image-expected.txt')),
'original win7 result')
class TestRebaselineTest(_BaseTestCase):
command_constructor = RebaselineTest # AKA webkit-patch rebaseline-test-internal
def setUp(self):
super(TestRebaselineTest, self).setUp()
self.options = MockOptions(builder="WebKit Mac10.7", test="userscripts/another-test.html", suffixes="txt", results_directory=None)
def test_baseline_directory(self):
command = self.command
self.assertMultiLineEqual(command._baseline_directory("WebKit Mac10.7"), "/mock-checkout/third_party/WebKit/LayoutTests/platform/mac-lion")
self.assertMultiLineEqual(command._baseline_directory("WebKit Mac10.6"), "/mock-checkout/third_party/WebKit/LayoutTests/platform/mac-snowleopard")
def test_rebaseline_updates_expectations_file_noop(self):
self._zero_out_test_expectations()
self._write(self.lion_expectations_path, """Bug(B) [ Mac Linux XP Debug ] fast/dom/Window/window-postmessage-clone-really-deep-array.html [ Pass ]
Bug(A) [ Debug ] : fast/css/large-list-of-rules-crash.html [ Failure ]
""")
self._write("fast/dom/Window/window-postmessage-clone-really-deep-array.html", "Dummy test contents")
self._write("fast/css/large-list-of-rules-crash.html", "Dummy test contents")
self._write("userscripts/another-test.html", "Dummy test contents")
self.options.suffixes = "png,wav,txt"
self.command._rebaseline_test_and_update_expectations(self.options)
self.assertItemsEqual(self.tool.web.urls_fetched,
[self.WEB_PREFIX + '/userscripts/another-test-actual.png',
self.WEB_PREFIX + '/userscripts/another-test-actual.wav',
self.WEB_PREFIX + '/userscripts/another-test-actual.txt'])
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, """Bug(B) [ Mac Linux XP Debug ] fast/dom/Window/window-postmessage-clone-really-deep-array.html [ Pass ]
Bug(A) [ Debug ] : fast/css/large-list-of-rules-crash.html [ Failure ]
""")
def test_rebaseline_test(self):
self.command._rebaseline_test("WebKit Linux", "userscripts/another-test.html", "txt", self.WEB_PREFIX)
self.assertItemsEqual(self.tool.web.urls_fetched, [self.WEB_PREFIX + '/userscripts/another-test-actual.txt'])
def test_rebaseline_test_with_results_directory(self):
self._write("userscripts/another-test.html", "test data")
self._write(self.lion_expectations_path, "Bug(x) [ Mac ] userscripts/another-test.html [ ImageOnlyFailure ]\nbug(z) [ Linux ] userscripts/another-test.html [ ImageOnlyFailure ]\n")
self.options.results_directory = '/tmp'
self.command._rebaseline_test_and_update_expectations(self.options)
self.assertItemsEqual(self.tool.web.urls_fetched, ['file:///tmp/userscripts/another-test-actual.txt'])
def test_rebaseline_reftest(self):
self._write("userscripts/another-test.html", "test data")
self._write("userscripts/another-test-expected.html", "generic result")
OutputCapture().assert_outputs(self, self.command._rebaseline_test_and_update_expectations, args=[self.options],
expected_logs="Cannot rebaseline reftest: userscripts/another-test.html\n")
self.assertDictEqual(self.command._scm_changes, {'add': [], 'remove-lines': [], "delete": []})
def test_rebaseline_test_and_print_scm_changes(self):
self.command._print_scm_changes = True
self.command._scm_changes = {'add': [], 'delete': []}
self.tool._scm.exists = lambda x: False
self.command._rebaseline_test("WebKit Linux", "userscripts/another-test.html", "txt", None)
self.assertDictEqual(self.command._scm_changes, {'add': ['/mock-checkout/third_party/WebKit/LayoutTests/platform/linux/userscripts/another-test-expected.txt'], 'delete': []})
def test_rebaseline_test_internal_with_port_that_lacks_buildbot(self):
self.tool.executive = MockExecutive2()
# FIXME: it's confusing that this is the test- port, and not the regular win port. Really all of the tests should be using the test ports.
port = self.tool.port_factory.get('test-win-win7')
self._write(port._filesystem.join(port.layout_tests_dir(), 'platform/test-win-win7/failures/expected/image-expected.txt'), 'original win7 result')
old_exact_matches = builders._exact_matches
oc = OutputCapture()
try:
builders._exact_matches = {
"MOCK XP": {"port_name": "test-win-xp"},
"MOCK Win7": {"port_name": "test-win-win7"},
}
options = MockOptions(optimize=True, builder="MOCK Win7", suffixes="txt",
verbose=True, test="failures/expected/image.html", results_directory=None)
oc.capture_output()
self.command.execute(options, [], self.tool)
finally:
out, _, _ = oc.restore_output()
builders._exact_matches = old_exact_matches
self.assertMultiLineEqual(self._read(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/test-win-win7/failures/expected/image-expected.txt')), 'MOCK Web result, convert 404 to None=True')
self.assertFalse(self.tool.filesystem.exists(self.tool.filesystem.join(port.layout_tests_dir(), 'platform/test-win-xp/failures/expected/image-expected.txt')))
self.assertMultiLineEqual(out, '{"add": [], "remove-lines": [{"test": "failures/expected/image.html", "builder": "MOCK Win7"}], "delete": []}\n')
class TestAbstractParallelRebaselineCommand(_BaseTestCase):
command_constructor = AbstractParallelRebaselineCommand
def test_builders_to_fetch_from(self):
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK XP": {"port_name": "test-win-xp"},
"MOCK Win7": {"port_name": "test-win-win7"},
"MOCK Win7 (dbg)(1)": {"port_name": "test-win-win7"},
"MOCK Win7 (dbg)(2)": {"port_name": "test-win-win7"},
}
builders_to_fetch = self.command._builders_to_fetch_from(["MOCK XP", "MOCK Win7 (dbg)(1)", "MOCK Win7 (dbg)(2)", "MOCK Win7"])
self.assertEqual(builders_to_fetch, ["MOCK XP", "MOCK Win7"])
finally:
builders._exact_matches = old_exact_matches
class TestRebaselineJson(_BaseTestCase):
command_constructor = RebaselineJson
def setUp(self):
super(TestRebaselineJson, self).setUp()
self.tool.executive = MockExecutive2()
self.old_exact_matches = builders._exact_matches
builders._exact_matches = {
"MOCK builder": {"port_name": "test-mac-snowleopard"},
"MOCK builder (Debug)": {"port_name": "test-mac-snowleopard"},
}
def tearDown(self):
builders._exact_matches = self.old_exact_matches
super(TestRebaselineJson, self).tearDown()
def test_rebaseline_test_passes_on_all_builders(self):
self._setup_mock_builder_data()
def builder_data():
self.command._builder_data['MOCK builder'] = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"userscripts": {
"first-test.html": {
"expected": "NEEDSREBASELINE",
"actual": "PASS"
}
}
}
});""")
return self.command._builder_data
self.command.builder_data = builder_data
options = MockOptions(optimize=True, verbose=True, results_directory=None)
self._write(self.lion_expectations_path, "Bug(x) userscripts/first-test.html [ ImageOnlyFailure ]\n")
self._write("userscripts/first-test.html", "Dummy test contents")
self.command._rebaseline(options, {"userscripts/first-test.html": {"MOCK builder": ["txt", "png"]}})
# Note that we have one run_in_parallel() call followed by a run_command()
self.assertEqual(self.tool.executive.calls,
[[['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', '', 'userscripts/first-test.html', '--verbose']]])
def test_rebaseline_all(self):
self._setup_mock_builder_data()
options = MockOptions(optimize=True, verbose=True, results_directory=None)
self._write("userscripts/first-test.html", "Dummy test contents")
self.command._rebaseline(options, {"userscripts/first-test.html": {"MOCK builder": ["txt", "png"]}})
# Note that we have one run_in_parallel() call followed by a run_command()
self.assertEqual(self.tool.executive.calls,
[[['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--verbose']],
[['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--verbose']],
[['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', 'txt,png', 'userscripts/first-test.html', '--verbose']]])
def test_rebaseline_debug(self):
self._setup_mock_builder_data()
options = MockOptions(optimize=True, verbose=True, results_directory=None)
self._write("userscripts/first-test.html", "Dummy test contents")
self.command._rebaseline(options, {"userscripts/first-test.html": {"MOCK builder (Debug)": ["txt", "png"]}})
# Note that we have one run_in_parallel() call followed by a run_command()
self.assertEqual(self.tool.executive.calls,
[[['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder (Debug)', '--test', 'userscripts/first-test.html', '--verbose']],
[['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder (Debug)', '--test', 'userscripts/first-test.html', '--verbose']],
[['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', 'txt,png', 'userscripts/first-test.html', '--verbose']]])
def test_no_optimize(self):
self._setup_mock_builder_data()
options = MockOptions(optimize=False, verbose=True, results_directory=None)
self._write("userscripts/first-test.html", "Dummy test contents")
self.command._rebaseline(options, {"userscripts/first-test.html": {"MOCK builder (Debug)": ["txt", "png"]}})
# Note that we have only one run_in_parallel() call
self.assertEqual(self.tool.executive.calls,
[[['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder (Debug)', '--test', 'userscripts/first-test.html', '--verbose']],
[['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder (Debug)', '--test', 'userscripts/first-test.html', '--verbose']]])
def test_results_directory(self):
self._setup_mock_builder_data()
options = MockOptions(optimize=False, verbose=True, results_directory='/tmp')
self._write("userscripts/first-test.html", "Dummy test contents")
self.command._rebaseline(options, {"userscripts/first-test.html": {"MOCK builder": ["txt", "png"]}})
# Note that we have only one run_in_parallel() call
self.assertEqual(self.tool.executive.calls,
[[['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--results-directory', '/tmp', '--verbose']],
[['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--results-directory', '/tmp', '--verbose']]])
class TestRebaselineJsonUpdatesExpectationsFiles(_BaseTestCase):
command_constructor = RebaselineJson
def setUp(self):
super(TestRebaselineJsonUpdatesExpectationsFiles, self).setUp()
self.tool.executive = MockExecutive2()
def mock_run_command(args,
cwd=None,
input=None,
error_handler=None,
return_exit_code=False,
return_stderr=True,
decode_output=False,
env=None):
return '{"add": [], "remove-lines": [{"test": "userscripts/first-test.html", "builder": "WebKit Mac10.7"}]}\n'
self.tool.executive.run_command = mock_run_command
def test_rebaseline_updates_expectations_file(self):
options = MockOptions(optimize=False, verbose=True, results_directory=None)
self._write(self.lion_expectations_path, "Bug(x) [ Mac ] userscripts/first-test.html [ ImageOnlyFailure ]\nbug(z) [ Linux ] userscripts/first-test.html [ ImageOnlyFailure ]\n")
self._write("userscripts/first-test.html", "Dummy test contents")
self._setup_mock_builder_data()
self.command._rebaseline(options, {"userscripts/first-test.html": {"WebKit Mac10.7": ["txt", "png"]}})
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, "Bug(x) [ Mavericks MountainLion Retina SnowLeopard Yosemite ] userscripts/first-test.html [ ImageOnlyFailure ]\nbug(z) [ Linux ] userscripts/first-test.html [ ImageOnlyFailure ]\n")
def test_rebaseline_updates_expectations_file_all_platforms(self):
options = MockOptions(optimize=False, verbose=True, results_directory=None)
self._write(self.lion_expectations_path, "Bug(x) userscripts/first-test.html [ ImageOnlyFailure ]\n")
self._write("userscripts/first-test.html", "Dummy test contents")
self._setup_mock_builder_data()
self.command._rebaseline(options, {"userscripts/first-test.html": {"WebKit Mac10.7": ["txt", "png"]}})
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, "Bug(x) [ Android Linux Mavericks MountainLion Retina SnowLeopard Win Yosemite ] userscripts/first-test.html [ ImageOnlyFailure ]\n")
def test_rebaseline_handles_platform_skips(self):
# This test is just like test_rebaseline_updates_expectations_file_all_platforms(),
# except that if a particular port happens to SKIP a test in an overrides file,
# we count that as passing, and do not think that we still need to rebaseline it.
options = MockOptions(optimize=False, verbose=True, results_directory=None)
self._write(self.lion_expectations_path, "Bug(x) userscripts/first-test.html [ ImageOnlyFailure ]\n")
self._write("NeverFixTests", "Bug(y) [ Android ] userscripts [ Skip ]\n")
self._write("userscripts/first-test.html", "Dummy test contents")
self._setup_mock_builder_data()
self.command._rebaseline(options, {"userscripts/first-test.html": {"WebKit Mac10.7": ["txt", "png"]}})
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, "Bug(x) [ Linux Mavericks MountainLion Retina SnowLeopard Win Yosemite ] userscripts/first-test.html [ ImageOnlyFailure ]\n")
def test_rebaseline_handles_skips_in_file(self):
# This test is like test_Rebaseline_handles_platform_skips, except that the
# Skip is in the same (generic) file rather than a platform file. In this case,
# the Skip line should be left unmodified. Note that the first line is now
# qualified as "[Linux Mac Win]"; if it was unqualified, it would conflict with
# the second line.
options = MockOptions(optimize=False, verbose=True, results_directory=None)
self._write(self.lion_expectations_path,
("Bug(x) [ Linux Mac Win ] userscripts/first-test.html [ ImageOnlyFailure ]\n"
"Bug(y) [ Android ] userscripts/first-test.html [ Skip ]\n"))
self._write("userscripts/first-test.html", "Dummy test contents")
self._setup_mock_builder_data()
self.command._rebaseline(options, {"userscripts/first-test.html": {"WebKit Mac10.7": ["txt", "png"]}})
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(
new_expectations,
("Bug(x) [ Linux Mavericks MountainLion Retina SnowLeopard Win Yosemite ] userscripts/first-test.html [ ImageOnlyFailure ]\n"
"Bug(y) [ Android ] userscripts/first-test.html [ Skip ]\n"))
def test_rebaseline_handles_smoke_tests(self):
# This test is just like test_rebaseline_handles_platform_skips, except that we check for
# a test not being in the SmokeTests file, instead of using overrides files.
# If a test is not part of the smoke tests, we count that as passing on ports that only
# run smoke tests, and do not think that we still need to rebaseline it.
options = MockOptions(optimize=False, verbose=True, results_directory=None)
self._write(self.lion_expectations_path, "Bug(x) userscripts/first-test.html [ ImageOnlyFailure ]\n")
self._write("SmokeTests", "fast/html/article-element.html")
self._write("userscripts/first-test.html", "Dummy test contents")
self._setup_mock_builder_data()
self.command._rebaseline(options, {"userscripts/first-test.html": {"WebKit Mac10.7": ["txt", "png"]}})
new_expectations = self._read(self.lion_expectations_path)
self.assertMultiLineEqual(new_expectations, "Bug(x) [ Linux Mavericks MountainLion Retina SnowLeopard Win Yosemite ] userscripts/first-test.html [ ImageOnlyFailure ]\n")
class TestRebaseline(_BaseTestCase):
# This command shares most of its logic with RebaselineJson, so these tests just test what is different.
command_constructor = Rebaseline # AKA webkit-patch rebaseline
def test_rebaseline(self):
self.command._builders_to_pull_from = lambda: [MockBuilder('MOCK builder')]
self._write("userscripts/first-test.html", "test data")
self._zero_out_test_expectations()
self._setup_mock_builder_data()
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK builder": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
}
self.command.execute(MockOptions(results_directory=False, optimize=False, builders=None, suffixes="txt,png", verbose=True), ['userscripts/first-test.html'], self.tool)
finally:
builders._exact_matches = old_exact_matches
calls = filter(lambda x: x != ['qmake', '-v'] and x[0] != 'perl', self.tool.executive.calls)
self.assertEqual(calls,
[[['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--verbose']],
[['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--verbose']]])
def test_rebaseline_directory(self):
self.command._builders_to_pull_from = lambda: [MockBuilder('MOCK builder')]
self._write("userscripts/first-test.html", "test data")
self._write("userscripts/second-test.html", "test data")
self._setup_mock_builder_data()
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK builder": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
}
self.command.execute(MockOptions(results_directory=False, optimize=False, builders=None, suffixes="txt,png", verbose=True), ['userscripts'], self.tool)
finally:
builders._exact_matches = old_exact_matches
calls = filter(lambda x: x != ['qmake', '-v'] and x[0] != 'perl', self.tool.executive.calls)
self.assertEqual(calls,
[[['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--verbose'],
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/second-test.html', '--verbose']],
[['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/first-test.html', '--verbose'],
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'userscripts/second-test.html', '--verbose']]])
class MockLineRemovingExecutive(MockExecutive):
def run_in_parallel(self, commands):
assert len(commands)
num_previous_calls = len(self.calls)
command_outputs = []
for cmd_line, cwd in commands:
out = self.run_command(cmd_line, cwd=cwd)
if 'rebaseline-test-internal' in cmd_line:
out = '{"add": [], "remove-lines": [{"test": "%s", "builder": "%s"}], "delete": []}\n' % (cmd_line[8], cmd_line[6])
command_outputs.append([0, out, ''])
new_calls = self.calls[num_previous_calls:]
self.calls = self.calls[:num_previous_calls]
self.calls.append(new_calls)
return command_outputs
class TestRebaselineExpectations(_BaseTestCase):
command_constructor = RebaselineExpectations
def setUp(self):
super(TestRebaselineExpectations, self).setUp()
self.options = MockOptions(optimize=False, builders=None, suffixes=['txt'], verbose=False, platform=None, results_directory=None)
def _write_test_file(self, port, path, contents):
abs_path = self.tool.filesystem.join(port.layout_tests_dir(), path)
self.tool.filesystem.write_text_file(abs_path, contents)
def _setup_test_port(self):
test_port = self.tool.port_factory.get('test')
original_get = self.tool.port_factory.get
def get_test_port(port_name=None, options=None, **kwargs):
if not port_name:
return test_port
return original_get(port_name, options, **kwargs)
# Need to make sure all the ports grabbed use the test checkout path instead of the mock checkout path.
# FIXME: crbug.com/279494 - we shouldn't be doing this.
self.tool.port_factory.get = get_test_port
return test_port
def test_rebaseline_expectations(self):
self._zero_out_test_expectations()
self.tool.executive = MockExecutive2()
def builder_data():
self.command._builder_data['MOCK SnowLeopard'] = self.command._builder_data['MOCK Leopard'] = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"userscripts": {
"another-test.html": {
"expected": "PASS",
"actual": "PASS TEXT"
},
"images.svg": {
"expected": "FAIL",
"actual": "IMAGE+TEXT"
}
}
}
});""")
return self.command._builder_data
self.command.builder_data = builder_data
self._write("userscripts/another-test.html", "Dummy test contents")
self._write("userscripts/images.svg", "Dummy test contents")
self.command._tests_to_rebaseline = lambda port: {
'userscripts/another-test.html': set(['txt']),
'userscripts/images.svg': set(['png']),
'userscripts/not-actually-failing.html': set(['txt', 'png', 'wav']),
}
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK SnowLeopard": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
self.command.execute(self.options, [], self.tool)
finally:
builders._exact_matches = old_exact_matches
# FIXME: change this to use the test- ports.
calls = filter(lambda x: x != ['qmake', '-v'], self.tool.executive.calls)
self.assertEqual(self.tool.executive.calls, [
[
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt', '--builder', 'MOCK Leopard', '--test', 'userscripts/another-test.html'],
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt', '--builder', 'MOCK SnowLeopard', '--test', 'userscripts/another-test.html'],
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'png', '--builder', 'MOCK Leopard', '--test', 'userscripts/images.svg'],
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'png', '--builder', 'MOCK SnowLeopard', '--test', 'userscripts/images.svg'],
],
[
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt', '--builder', 'MOCK Leopard', '--test', 'userscripts/another-test.html'],
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt', '--builder', 'MOCK SnowLeopard', '--test', 'userscripts/another-test.html'],
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'png', '--builder', 'MOCK Leopard', '--test', 'userscripts/images.svg'],
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'png', '--builder', 'MOCK SnowLeopard', '--test', 'userscripts/images.svg'],
],
])
def test_rebaseline_expectations_noop(self):
self._zero_out_test_expectations()
oc = OutputCapture()
try:
oc.capture_output()
self.command.execute(self.options, [], self.tool)
finally:
_, _, logs = oc.restore_output()
self.assertEqual(self.tool.filesystem.written_files, {})
self.assertEqual(logs, 'Did not find any tests marked Rebaseline.\n')
def disabled_test_overrides_are_included_correctly(self):
# This tests that the any tests marked as REBASELINE in the overrides are found, but
# that the overrides do not get written into the main file.
self._zero_out_test_expectations()
self._write(self.lion_expectations_path, '')
self.lion_port.expectations_dict = lambda: {
self.lion_expectations_path: '',
'overrides': ('Bug(x) userscripts/another-test.html [ Failure Rebaseline ]\n'
'Bug(y) userscripts/test.html [ Crash ]\n')}
self._write('/userscripts/another-test.html', '')
self.assertDictEqual(self.command._tests_to_rebaseline(self.lion_port), {'userscripts/another-test.html': set(['png', 'txt', 'wav'])})
self.assertEqual(self._read(self.lion_expectations_path), '')
def test_rebaseline_without_other_expectations(self):
self._write("userscripts/another-test.html", "Dummy test contents")
self._write(self.lion_expectations_path, "Bug(x) userscripts/another-test.html [ Rebaseline ]\n")
self.assertDictEqual(self.command._tests_to_rebaseline(self.lion_port), {'userscripts/another-test.html': ('png', 'wav', 'txt')})
def test_rebaseline_test_passes_everywhere(self):
test_port = self._setup_test_port()
old_builder_data = self.command.builder_data
def builder_data():
self.command._builder_data['MOCK Leopard'] = self.command._builder_data['MOCK SnowLeopard'] = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"fast": {
"dom": {
"prototype-taco.html": {
"expected": "FAIL",
"actual": "PASS",
"is_unexpected": true
}
}
}
}
});""")
return self.command._builder_data
self.command.builder_data = builder_data
self.tool.filesystem.write_text_file(test_port.path_to_generic_test_expectations_file(), """
Bug(foo) fast/dom/prototype-taco.html [ Rebaseline ]
""")
self._write_test_file(test_port, 'fast/dom/prototype-taco.html', "Dummy test contents")
self.tool.executive = MockLineRemovingExecutive()
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK SnowLeopard": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
self.command.execute(self.options, [], self.tool)
self.assertEqual(self.tool.executive.calls, [])
# The mac ports should both be removed since they're the only ones in builders._exact_matches.
self.assertEqual(self.tool.filesystem.read_text_file(test_port.path_to_generic_test_expectations_file()), """
Bug(foo) [ Linux Win ] fast/dom/prototype-taco.html [ Rebaseline ]
""")
finally:
builders._exact_matches = old_exact_matches
class _FakeOptimizer(BaselineOptimizer):
def read_results_by_directory(self, baseline_name):
if baseline_name.endswith('txt'):
return {'LayoutTests/passes/text.html': '123456'}
return {}
class TestOptimizeBaselines(_BaseTestCase):
command_constructor = OptimizeBaselines
def _write_test_file(self, port, path, contents):
abs_path = self.tool.filesystem.join(port.layout_tests_dir(), path)
self.tool.filesystem.write_text_file(abs_path, contents)
def setUp(self):
super(TestOptimizeBaselines, self).setUp()
# FIXME: This is a hack to get the unittest and the BaselineOptimize to both use /mock-checkout
# instead of one using /mock-checkout and one using /test-checkout.
default_port = self.tool.port_factory.get()
self.tool.port_factory.get = lambda port_name=None: default_port
def test_modify_scm(self):
test_port = self.tool.port_factory.get('test')
self._write_test_file(test_port, 'another/test.html', "Dummy test contents")
self._write_test_file(test_port, 'platform/mac-snowleopard/another/test-expected.txt', "result A")
self._write_test_file(test_port, 'another/test-expected.txt', "result A")
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard Debug": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
OutputCapture().assert_outputs(self, self.command.execute, args=[
MockOptions(suffixes='txt', no_modify_scm=False, platform='test-mac-snowleopard'),
['another/test.html'],
self.tool,
], expected_stdout='{"add": [], "remove-lines": [], "delete": []}\n')
finally:
builders._exact_matches = old_exact_matches
self.assertFalse(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'platform/mac/another/test-expected.txt')))
self.assertTrue(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'another/test-expected.txt')))
def test_no_modify_scm(self):
test_port = self.tool.port_factory.get('test')
self._write_test_file(test_port, 'another/test.html', "Dummy test contents")
self._write_test_file(test_port, 'platform/mac-snowleopard/another/test-expected.txt', "result A")
self._write_test_file(test_port, 'another/test-expected.txt', "result A")
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard Debug": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
OutputCapture().assert_outputs(self, self.command.execute, args=[
MockOptions(suffixes='txt', no_modify_scm=True, platform='test-mac-snowleopard'),
['another/test.html'],
self.tool,
], expected_stdout='{"add": [], "remove-lines": [], "delete": ["/mock-checkout/third_party/WebKit/LayoutTests/platform/mac-snowleopard/another/test-expected.txt"]}\n')
finally:
builders._exact_matches = old_exact_matches
self.assertFalse(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'platform/mac/another/test-expected.txt')))
self.assertTrue(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'another/test-expected.txt')))
def test_optimize_all_suffixes_by_default(self):
test_port = self.tool.port_factory.get('test')
self._write_test_file(test_port, 'another/test.html', "Dummy test contents")
self._write_test_file(test_port, 'platform/mac-snowleopard/another/test-expected.txt', "result A")
self._write_test_file(test_port, 'platform/mac-snowleopard/another/test-expected.png', "result A png")
self._write_test_file(test_port, 'another/test-expected.txt', "result A")
self._write_test_file(test_port, 'another/test-expected.png', "result A png")
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard Debug": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
oc = OutputCapture()
oc.capture_output()
self.command.execute(MockOptions(suffixes='txt,wav,png', no_modify_scm=True, platform='test-mac-snowleopard'),
['another/test.html'],
self.tool)
finally:
out, err, logs = oc.restore_output()
builders._exact_matches = old_exact_matches
self.assertEquals(out, '{"add": [], "remove-lines": [], "delete": ["/mock-checkout/third_party/WebKit/LayoutTests/platform/mac-snowleopard/another/test-expected.txt", "/mock-checkout/third_party/WebKit/LayoutTests/platform/mac-snowleopard/another/test-expected.png"]}\n')
self.assertFalse(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'platform/mac/another/test-expected.txt')))
self.assertFalse(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'platform/mac/another/test-expected.png')))
self.assertTrue(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'another/test-expected.txt')))
self.assertTrue(self.tool.filesystem.exists(self.tool.filesystem.join(test_port.layout_tests_dir(), 'another/test-expected.png')))
class TestAnalyzeBaselines(_BaseTestCase):
command_constructor = AnalyzeBaselines
def setUp(self):
super(TestAnalyzeBaselines, self).setUp()
self.port = self.tool.port_factory.get('test')
self.tool.port_factory.get = (lambda port_name=None, options=None: self.port)
self.lines = []
self.command._optimizer_class = _FakeOptimizer
self.command._write = (lambda msg: self.lines.append(msg)) # pylint bug warning about unnecessary lambda? pylint: disable=W0108
def test_default(self):
self.command.execute(MockOptions(suffixes='txt', missing=False, platform=None), ['passes/text.html'], self.tool)
self.assertEqual(self.lines,
['passes/text-expected.txt:',
' (generic): 123456'])
def test_missing_baselines(self):
self.command.execute(MockOptions(suffixes='png,txt', missing=True, platform=None), ['passes/text.html'], self.tool)
self.assertEqual(self.lines,
['passes/text-expected.png: (no baselines found)',
'passes/text-expected.txt:',
' (generic): 123456'])
class TestAutoRebaseline(_BaseTestCase):
SVN_REMOTE_CMD = ['git', 'config', '--local', '--get-regexp', '^svn-remote\\.']
command_constructor = AutoRebaseline
def _write_test_file(self, port, path, contents):
abs_path = self.tool.filesystem.join(port.layout_tests_dir(), path)
self.tool.filesystem.write_text_file(abs_path, contents)
def _setup_test_port(self):
test_port = self.tool.port_factory.get('test')
original_get = self.tool.port_factory.get
def get_test_port(port_name=None, options=None, **kwargs):
if not port_name:
return test_port
return original_get(port_name, options, **kwargs)
# Need to make sure all the ports grabbed use the test checkout path instead of the mock checkout path.
# FIXME: crbug.com/279494 - we shouldn't be doing this.
self.tool.port_factory.get = get_test_port
return test_port
def setUp(self):
super(TestAutoRebaseline, self).setUp()
self.command.latest_revision_processed_on_all_bots = lambda: 9000
self.command.bot_revision_data = lambda: [{"builder": "Mock builder", "revision": "9000"}]
def test_release_builders(self):
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK Leopard Debug": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
"MOCK Leopard ASAN": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
self.assertEqual(self.command._release_builders(), ['MOCK Leopard'])
finally:
builders._exact_matches = old_exact_matches
def test_tests_to_rebaseline(self):
def blame(path):
return """
624c3081c0 path/to/TestExpectations (foobarbaz1@chromium.org 2013-06-14 20:18:46 +0000 11) crbug.com/24182 [ Debug ] path/to/norebaseline.html [ ImageOnlyFailure ]
624c3081c0 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 13) Bug(foo) path/to/rebaseline-without-bug-number.html [ NeedsRebaseline ]
624c3081c0 path/to/TestExpectations (foobarbaz1@chromium.org 2013-06-14 20:18:46 +0000 11) crbug.com/24182 [ Debug ] path/to/rebaseline-with-modifiers.html [ NeedsRebaseline ]
624c3081c0 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 12) crbug.com/24182 crbug.com/234 path/to/rebaseline-without-modifiers.html [ NeedsRebaseline ]
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 12) crbug.com/24182 path/to/rebaseline-new-revision.html [ NeedsRebaseline ]
624caaaaaa path/to/TestExpectations (foo@chromium.org 2013-04-28 04:52:41 +0000 12) crbug.com/24182 path/to/not-cycled-through-bots.html [ NeedsRebaseline ]
0000000000 path/to/TestExpectations (foo@chromium.org 2013-04-28 04:52:41 +0000 12) crbug.com/24182 path/to/locally-changed-lined.html [ NeedsRebaseline ]
"""
self.tool.scm().blame = blame
min_revision = 9000
self.assertEqual(self.command.tests_to_rebaseline(self.tool, min_revision, print_revisions=False), (
set(['path/to/rebaseline-without-bug-number.html', 'path/to/rebaseline-with-modifiers.html', 'path/to/rebaseline-without-modifiers.html']),
5678,
'foobarbaz1@chromium.org',
set(['24182', '234']),
True))
def test_tests_to_rebaseline_over_limit(self):
def blame(path):
result = ""
for i in range(0, self.command.MAX_LINES_TO_REBASELINE + 1):
result += "624c3081c0 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 13) crbug.com/24182 path/to/rebaseline-%s.html [ NeedsRebaseline ]\n" % i
return result
self.tool.scm().blame = blame
expected_list_of_tests = []
for i in range(0, self.command.MAX_LINES_TO_REBASELINE):
expected_list_of_tests.append("path/to/rebaseline-%s.html" % i)
min_revision = 9000
self.assertEqual(self.command.tests_to_rebaseline(self.tool, min_revision, print_revisions=False), (
set(expected_list_of_tests),
5678,
'foobarbaz1@chromium.org',
set(['24182']),
True))
def test_commit_message(self):
author = "foo@chromium.org"
revision = 1234
bugs = set()
self.assertEqual(self.command.commit_message(author, revision, bugs),
"""Auto-rebaseline for r1234
http://src.chromium.org/viewvc/blink?view=revision&revision=1234
TBR=foo@chromium.org
""")
bugs = set(["234", "345"])
self.assertEqual(self.command.commit_message(author, revision, bugs),
"""Auto-rebaseline for r1234
http://src.chromium.org/viewvc/blink?view=revision&revision=1234
BUG=234,345
TBR=foo@chromium.org
""")
def test_no_needs_rebaseline_lines(self):
def blame(path):
return """
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-06-14 20:18:46 +0000 11) crbug.com/24182 [ Debug ] path/to/norebaseline.html [ ImageOnlyFailure ]
"""
self.tool.scm().blame = blame
self.command.execute(MockOptions(optimize=True, verbose=False, results_directory=False), [], self.tool)
self.assertEqual(self.tool.executive.calls, [])
def test_execute(self):
def blame(path):
return """
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-06-14 20:18:46 +0000 11) # Test NeedsRebaseline being in a comment doesn't bork parsing.
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-06-14 20:18:46 +0000 11) crbug.com/24182 [ Debug ] path/to/norebaseline.html [ ImageOnlyFailure ]
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 13) Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-06-14 20:18:46 +0000 11) crbug.com/24182 [ SnowLeopard ] fast/dom/prototype-strawberry.html [ NeedsRebaseline ]
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 12) crbug.com/24182 fast/dom/prototype-chocolate.html [ NeedsRebaseline ]
624caaaaaa path/to/TestExpectations (foo@chromium.org 2013-04-28 04:52:41 +0000 12) crbug.com/24182 path/to/not-cycled-through-bots.html [ NeedsRebaseline ]
0000000000 path/to/TestExpectations (foo@chromium.org 2013-04-28 04:52:41 +0000 12) crbug.com/24182 path/to/locally-changed-lined.html [ NeedsRebaseline ]
"""
self.tool.scm().blame = blame
test_port = self._setup_test_port()
old_builder_data = self.command.builder_data
def builder_data():
old_builder_data()
# have prototype-chocolate only fail on "MOCK Leopard".
self.command._builder_data['MOCK SnowLeopard'] = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"fast": {
"dom": {
"prototype-taco.html": {
"expected": "PASS",
"actual": "PASS TEXT",
"is_unexpected": true
},
"prototype-chocolate.html": {
"expected": "FAIL",
"actual": "PASS"
},
"prototype-strawberry.html": {
"expected": "PASS",
"actual": "IMAGE PASS",
"is_unexpected": true
}
}
}
}
});""")
return self.command._builder_data
self.command.builder_data = builder_data
self.tool.filesystem.write_text_file(test_port.path_to_generic_test_expectations_file(), """
crbug.com/24182 [ Debug ] path/to/norebaseline.html [ Rebaseline ]
Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
crbug.com/24182 [ SnowLeopard ] fast/dom/prototype-strawberry.html [ NeedsRebaseline ]
crbug.com/24182 fast/dom/prototype-chocolate.html [ NeedsRebaseline ]
crbug.com/24182 path/to/not-cycled-through-bots.html [ NeedsRebaseline ]
crbug.com/24182 path/to/locally-changed-lined.html [ NeedsRebaseline ]
""")
self._write_test_file(test_port, 'fast/dom/prototype-taco.html', "Dummy test contents")
self._write_test_file(test_port, 'fast/dom/prototype-strawberry.html', "Dummy test contents")
self._write_test_file(test_port, 'fast/dom/prototype-chocolate.html', "Dummy test contents")
self.tool.executive = MockLineRemovingExecutive(
should_return_zero_when_run=set(self.SVN_REMOTE_CMD))
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK SnowLeopard": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
self.command.tree_status = lambda: 'closed'
self.command.execute(MockOptions(optimize=True, verbose=False, results_directory=False), [], self.tool)
self.assertEqual(self.tool.executive.calls, [])
self.command.tree_status = lambda: 'open'
self.tool.executive.calls = []
self.command.execute(MockOptions(optimize=True, verbose=False, results_directory=False), [], self.tool)
self.assertEqual(self.tool.executive.calls, [
self.SVN_REMOTE_CMD,
['git', 'rev-parse', '--symbolic-full-name', 'HEAD'],
[
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt,png', '--builder', 'MOCK Leopard', '--test', 'fast/dom/prototype-chocolate.html'],
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'png', '--builder', 'MOCK SnowLeopard', '--test', 'fast/dom/prototype-strawberry.html'],
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt', '--builder', 'MOCK Leopard', '--test', 'fast/dom/prototype-taco.html'],
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt', '--builder', 'MOCK SnowLeopard', '--test', 'fast/dom/prototype-taco.html'],
],
[
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK Leopard', '--test', 'fast/dom/prototype-chocolate.html'],
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'png', '--builder', 'MOCK SnowLeopard', '--test', 'fast/dom/prototype-strawberry.html'],
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt', '--builder', 'MOCK Leopard', '--test', 'fast/dom/prototype-taco.html'],
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt', '--builder', 'MOCK SnowLeopard', '--test', 'fast/dom/prototype-taco.html'],
],
[
['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', 'txt,png', 'fast/dom/prototype-chocolate.html'],
['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', 'png', 'fast/dom/prototype-strawberry.html'],
['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', 'txt', 'fast/dom/prototype-taco.html'],
],
['git', 'cl', 'upload', '-f'],
['git', 'pull'],
['git', 'cl', 'dcommit', '-f'],
['git', 'cl', 'set_close'],
])
# The mac ports should both be removed since they're the only ones in builders._exact_matches.
self.assertEqual(self.tool.filesystem.read_text_file(test_port.path_to_generic_test_expectations_file()), """
crbug.com/24182 [ Debug ] path/to/norebaseline.html [ Rebaseline ]
Bug(foo) [ Linux Win ] fast/dom/prototype-taco.html [ NeedsRebaseline ]
crbug.com/24182 [ Linux Win ] fast/dom/prototype-chocolate.html [ NeedsRebaseline ]
crbug.com/24182 path/to/not-cycled-through-bots.html [ NeedsRebaseline ]
crbug.com/24182 path/to/locally-changed-lined.html [ NeedsRebaseline ]
""")
finally:
builders._exact_matches = old_exact_matches
def test_execute_git_cl_hangs(self):
def blame(path):
return """
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 13) Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
"""
self.tool.scm().blame = blame
test_port = self._setup_test_port()
old_builder_data = self.command.builder_data
def builder_data():
old_builder_data()
# have prototype-chocolate only fail on "MOCK Leopard".
self.command._builder_data['MOCK SnowLeopard'] = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"fast": {
"dom": {
"prototype-taco.html": {
"expected": "PASS",
"actual": "PASS TEXT",
"is_unexpected": true
}
}
}
}
});""")
return self.command._builder_data
self.command.builder_data = builder_data
self.tool.filesystem.write_text_file(test_port.path_to_generic_test_expectations_file(), """
Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
""")
self._write_test_file(test_port, 'fast/dom/prototype-taco.html', "Dummy test contents")
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK SnowLeopard": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
self.command.SECONDS_BEFORE_GIVING_UP = 0
self.command.tree_status = lambda: 'open'
self.tool.executive = MockExecutive(
should_return_zero_when_run=set(self.SVN_REMOTE_CMD))
self.tool.executive.calls = []
self.command.execute(MockOptions(optimize=True, verbose=False, results_directory=False), [], self.tool)
self.assertEqual(self.tool.executive.calls, [
self.SVN_REMOTE_CMD,
['git', 'rev-parse', '--symbolic-full-name', 'HEAD'],
[
['python', 'echo', 'copy-existing-baselines-internal', '--suffixes', 'txt', '--builder', 'MOCK SnowLeopard', '--test', 'fast/dom/prototype-taco.html'],
],
[
['python', 'echo', 'rebaseline-test-internal', '--suffixes', 'txt', '--builder', 'MOCK SnowLeopard', '--test', 'fast/dom/prototype-taco.html'],
],
[['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', 'txt', 'fast/dom/prototype-taco.html']],
['git', 'cl', 'upload', '-f'],
])
finally:
builders._exact_matches = old_exact_matches
def test_execute_test_passes_everywhere(self):
def blame(path):
return """
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 13) Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
"""
self.tool.scm().blame = blame
test_port = self._setup_test_port()
old_builder_data = self.command.builder_data
def builder_data():
self.command._builder_data['MOCK Leopard'] = self.command._builder_data['MOCK SnowLeopard'] = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"fast": {
"dom": {
"prototype-taco.html": {
"expected": "FAIL",
"actual": "PASS",
"is_unexpected": true
}
}
}
}
});""")
return self.command._builder_data
self.command.builder_data = builder_data
self.tool.filesystem.write_text_file(test_port.path_to_generic_test_expectations_file(), """
Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
""")
self._write_test_file(test_port, 'fast/dom/prototype-taco.html', "Dummy test contents")
self.tool.executive = MockLineRemovingExecutive(
should_return_zero_when_run=set(self.SVN_REMOTE_CMD))
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Leopard": {"port_name": "test-mac-leopard", "specifiers": set(["mock-specifier"])},
"MOCK SnowLeopard": {"port_name": "test-mac-snowleopard", "specifiers": set(["mock-specifier"])},
}
self.command.tree_status = lambda: 'open'
self.command.execute(MockOptions(optimize=True, verbose=False, results_directory=False), [], self.tool)
self.assertEqual(self.tool.executive.calls, [
self.SVN_REMOTE_CMD,
['git', 'rev-parse', '--symbolic-full-name', 'HEAD'],
[['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', '', 'fast/dom/prototype-taco.html']],
['git', 'cl', 'upload', '-f'],
['git', 'pull'],
['git', 'cl', 'dcommit', '-f'],
['git', 'cl', 'set_close'],
])
# The mac ports should both be removed since they're the only ones in builders._exact_matches.
self.assertEqual(self.tool.filesystem.read_text_file(test_port.path_to_generic_test_expectations_file()), """
Bug(foo) [ Linux Win ] fast/dom/prototype-taco.html [ NeedsRebaseline ]
""")
finally:
builders._exact_matches = old_exact_matches
def test_execute_setup_git_svn(self):
def blame(path):
return """
6469e754a1 path/to/TestExpectations (foobarbaz1@chromium.org 2013-04-28 04:52:41 +0000 13) Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
"""
self.tool.scm().blame = blame
test_port = self._setup_test_port()
old_builder_data = self.command.builder_data
def builder_data():
self.command._builder_data['MOCK Win'] = LayoutTestResults.results_from_string("""ADD_RESULTS({
"tests": {
"fast": {
"dom": {
"prototype-taco.html": {
"expected": "FAIL",
"actual": "PASS",
"is_unexpected": true
}
}
}
}
});""")
return self.command._builder_data
self.command.builder_data = builder_data
self.tool.filesystem.write_text_file(test_port.path_to_generic_test_expectations_file(), """
Bug(foo) fast/dom/prototype-taco.html [ NeedsRebaseline ]
""")
self._write_test_file(test_port, 'fast/dom/prototype-taco.html', "Dummy test contents")
self.tool.executive = MockLineRemovingExecutive()
old_exact_matches = builders._exact_matches
try:
builders._exact_matches = {
"MOCK Win": {"port_name": "test-win-win7", "specifiers": set(["mock-specifier"])},
}
self.command.tree_status = lambda: 'open'
self.command.execute(MockOptions(optimize=True, verbose=False, results_directory=False), [], self.tool)
self.assertEqual(self.tool.executive.calls, [
self.SVN_REMOTE_CMD,
['git', 'auto-svn'],
['git', 'rev-parse', '--symbolic-full-name', 'HEAD'],
[['python', 'echo', 'optimize-baselines', '--no-modify-scm', '--suffixes', '', 'fast/dom/prototype-taco.html']],
['git', 'cl', 'upload', '-f'],
['git', 'pull'],
['git', 'cl', 'dcommit', '-f'],
['git', 'cl', 'set_close'],
])
self.assertEqual(self.tool.filesystem.read_text_file(test_port.path_to_generic_test_expectations_file()), """
Bug(foo) [ Linux Mac XP ] fast/dom/prototype-taco.html [ NeedsRebaseline ]
""")
finally:
builders._exact_matches = old_exact_matches
class TestRebaselineOMatic(_BaseTestCase):
command_constructor = RebaselineOMatic
def setUp(self):
super(TestRebaselineOMatic, self).setUp()
self._logs = []
def _mock_log_to_server(self, log=''):
self._logs.append(log)
def test_run_logged_command(self):
self.command._verbose = False
self.command._post_log_to_server = self._mock_log_to_server
self.command._run_logged_command(['echo', 'foo'])
self.assertEqual(self.tool.executive.calls, [['echo', 'foo']])
self.assertEqual(self._logs, ['MOCK STDOUT'])
def test_do_one_rebaseline(self):
self.command._verbose = False
self.command._post_log_to_server = self._mock_log_to_server
oc = OutputCapture()
oc.capture_output()
self.command._do_one_rebaseline()
out, _, _ = oc.restore_output()
self.assertEqual(out, '')
self.assertEqual(self.tool.executive.calls, [
['git', 'pull'],
['/mock-checkout/third_party/WebKit/Tools/Scripts/webkit-patch', 'auto-rebaseline'],
])
self.assertEqual(self._logs, ['MOCK STDOUT'])
def test_do_one_rebaseline_verbose(self):
self.command._verbose = True
self.command._post_log_to_server = self._mock_log_to_server
oc = OutputCapture()
oc.capture_output()
self.command._do_one_rebaseline()
out, _, _ = oc.restore_output()
self.assertEqual(out, 'MOCK STDOUT\n')
self.assertEqual(self.tool.executive.calls, [
['git', 'pull'],
['/mock-checkout/third_party/WebKit/Tools/Scripts/webkit-patch', 'auto-rebaseline', '--verbose'],
])
self.assertEqual(self._logs, ['MOCK STDOUT'])
| gpl-3.0 |
redhat-openstack/ironic | ironic/tests/drivers/test_seamicro.py | 3 | 30134 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for Ironic SeaMicro driver."""
import uuid
import mock
from oslo_utils import uuidutils
from seamicroclient import client as seamicro_client
from seamicroclient import exceptions as seamicro_client_exception
from ironic.common import boot_devices
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.drivers.modules import console_utils
from ironic.drivers.modules import seamicro
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
INFO_DICT = db_utils.get_test_seamicro_info()
class Fake_Server(object):
def __init__(self, active=False, *args, **kwargs):
self.active = active
self.nic = {'0': {'untaggedVlan': ''}}
def power_on(self):
self.active = True
def power_off(self, force=False):
self.active = False
def reset(self):
self.active = True
def set_untagged_vlan(self, vlan_id):
return
def attach_volume(self, volume_id):
return
def detach_volume(self):
return
def set_boot_order(self, boot_order):
return
def refresh(self, wait=0):
return self
class Fake_Volume(object):
def __init__(self, id=None, *args, **kwargs):
if id is None:
self.id = "%s/%s/%s" % ("0", "ironic-p6-6", str(uuid.uuid4()))
else:
self.id = id
class Fake_Pool(object):
def __init__(self, freeSize=None, *args, **kwargs):
self.freeSize = freeSize
class SeaMicroValidateParametersTestCase(db_base.DbTestCase):
def test__parse_driver_info_good(self):
# make sure we get back the expected things
node = obj_utils.get_test_node(
self.context,
driver='fake_seamicro',
driver_info=INFO_DICT)
info = seamicro._parse_driver_info(node)
self.assertIsNotNone(info.get('api_endpoint'))
self.assertIsNotNone(info.get('username'))
self.assertIsNotNone(info.get('password'))
self.assertIsNotNone(info.get('server_id'))
self.assertIsNotNone(info.get('uuid'))
def test__parse_driver_info_missing_api_endpoint(self):
# make sure error is raised when info is missing
info = dict(INFO_DICT)
del info['seamicro_api_endpoint']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
seamicro._parse_driver_info,
node)
def test__parse_driver_info_missing_username(self):
# make sure error is raised when info is missing
info = dict(INFO_DICT)
del info['seamicro_username']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
seamicro._parse_driver_info,
node)
def test__parse_driver_info_missing_password(self):
# make sure error is raised when info is missing
info = dict(INFO_DICT)
del info['seamicro_password']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
seamicro._parse_driver_info,
node)
def test__parse_driver_info_missing_server_id(self):
# make sure error is raised when info is missing
info = dict(INFO_DICT)
del info['seamicro_server_id']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
seamicro._parse_driver_info,
node)
@mock.patch('eventlet.greenthread.sleep', lambda n: None)
class SeaMicroPrivateMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(SeaMicroPrivateMethodsTestCase, self).setUp()
n = {
'driver': 'fake_seamicro',
'driver_info': INFO_DICT
}
self.node = obj_utils.create_test_node(self.context, **n)
self.Server = Fake_Server
self.Volume = Fake_Volume
self.Pool = Fake_Pool
self.config(action_timeout=0, group='seamicro')
self.config(max_retry=2, group='seamicro')
self.info = seamicro._parse_driver_info(self.node)
@mock.patch.object(seamicro_client, "Client", autospec=True)
def test__get_client(self, mock_client):
args = {'username': self.info['username'],
'password': self.info['password'],
'auth_url': self.info['api_endpoint']}
seamicro._get_client(**self.info)
mock_client.assert_called_once_with(self.info['api_version'], **args)
@mock.patch.object(seamicro_client, "Client", autospec=True)
def test__get_client_fail(self, mock_client):
args = {'username': self.info['username'],
'password': self.info['password'],
'auth_url': self.info['api_endpoint']}
mock_client.side_effect = seamicro_client_exception.UnsupportedVersion
self.assertRaises(exception.InvalidParameterValue,
seamicro._get_client,
**self.info)
mock_client.assert_called_once_with(self.info['api_version'], **args)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__get_power_status_on(self, mock_get_server):
mock_get_server.return_value = self.Server(active=True)
pstate = seamicro._get_power_status(self.node)
self.assertEqual(states.POWER_ON, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__get_power_status_off(self, mock_get_server):
mock_get_server.return_value = self.Server(active=False)
pstate = seamicro._get_power_status(self.node)
self.assertEqual(states.POWER_OFF, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__get_power_status_error(self, mock_get_server):
mock_get_server.return_value = self.Server(active=None)
pstate = seamicro._get_power_status(self.node)
self.assertEqual(states.ERROR, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__power_on_good(self, mock_get_server):
mock_get_server.return_value = self.Server(active=False)
pstate = seamicro._power_on(self.node)
self.assertEqual(states.POWER_ON, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__power_on_fail(self, mock_get_server):
def fake_power_on():
return
server = self.Server(active=False)
server.power_on = fake_power_on
mock_get_server.return_value = server
pstate = seamicro._power_on(self.node)
self.assertEqual(states.ERROR, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__power_off_good(self, mock_get_server):
mock_get_server.return_value = self.Server(active=True)
pstate = seamicro._power_off(self.node)
self.assertEqual(states.POWER_OFF, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__power_off_fail(self, mock_get_server):
def fake_power_off():
return
server = self.Server(active=True)
server.power_off = fake_power_off
mock_get_server.return_value = server
pstate = seamicro._power_off(self.node)
self.assertEqual(states.ERROR, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__reboot_good(self, mock_get_server):
mock_get_server.return_value = self.Server(active=True)
pstate = seamicro._reboot(self.node)
self.assertEqual(states.POWER_ON, pstate)
@mock.patch.object(seamicro, "_get_server", autospec=True)
def test__reboot_fail(self, mock_get_server):
def fake_reboot():
return
server = self.Server(active=False)
server.reset = fake_reboot
mock_get_server.return_value = server
pstate = seamicro._reboot(self.node)
self.assertEqual(states.ERROR, pstate)
@mock.patch.object(seamicro, "_get_volume", autospec=True)
def test__validate_fail(self, mock_get_volume):
volume_id = "0/p6-6/vol1"
volume = self.Volume()
volume.id = volume_id
mock_get_volume.return_value = volume
self.assertRaises(exception.InvalidParameterValue,
seamicro._validate_volume, self.info, volume_id)
@mock.patch.object(seamicro, "_get_volume", autospec=True)
def test__validate_good(self, mock_get_volume):
volume = self.Volume()
mock_get_volume.return_value = volume
valid = seamicro._validate_volume(self.info, volume.id)
self.assertEqual(valid, True)
@mock.patch.object(seamicro, "_get_pools", autospec=True)
def test__create_volume_fail(self, mock_get_pools):
mock_get_pools.return_value = None
self.assertRaises(exception.IronicException,
seamicro._create_volume,
self.info, 2)
@mock.patch.object(seamicro, "_get_pools", autospec=True)
@mock.patch.object(seamicro, "_get_client", autospec=True)
def test__create_volume_good(self, mock_get_client, mock_get_pools):
pools = [self.Pool(1), self.Pool(6), self.Pool(5)]
mock_seamicro_volumes = mock.MagicMock(spec_set=['create'])
mock_get_client.return_value = mock.MagicMock(
volumes=mock_seamicro_volumes, spec_set=['volumes'])
mock_get_pools.return_value = pools
seamicro._create_volume(self.info, 2)
class SeaMicroPowerDriverTestCase(db_base.DbTestCase):
def setUp(self):
super(SeaMicroPowerDriverTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_seamicro')
self.driver = driver_factory.get_driver('fake_seamicro')
self.node = obj_utils.create_test_node(self.context,
driver='fake_seamicro',
driver_info=INFO_DICT)
self.get_server_patcher = mock.patch.object(seamicro, '_get_server',
autospec=True)
self.get_server_mock = None
self.Server = Fake_Server
self.Volume = Fake_Volume
self.info = seamicro._parse_driver_info(self.node)
def test_get_properties(self):
expected = seamicro.COMMON_PROPERTIES
with task_manager.acquire(self.context, self.node['uuid'],
shared=True) as task:
self.assertEqual(expected, task.driver.power.get_properties())
expected = (list(seamicro.COMMON_PROPERTIES) +
list(seamicro.CONSOLE_PROPERTIES))
console_properties = task.driver.console.get_properties().keys()
self.assertEqual(sorted(expected), sorted(console_properties))
self.assertEqual(sorted(expected),
sorted(task.driver.get_properties().keys()))
def test_vendor_routes(self):
expected = ['set_node_vlan_id', 'attach_volume']
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
vendor_routes = task.driver.vendor.vendor_routes
self.assertIsInstance(vendor_routes, dict)
self.assertEqual(sorted(expected), sorted(vendor_routes))
def test_driver_routes(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
driver_routes = task.driver.vendor.driver_routes
self.assertIsInstance(driver_routes, dict)
self.assertEqual({}, driver_routes)
@mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
def test_power_interface_validate_good(self, parse_drv_info_mock):
with task_manager.acquire(self.context, self.node['uuid'],
shared=True) as task:
task.driver.power.validate(task)
self.assertEqual(1, parse_drv_info_mock.call_count)
@mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
def test_power_interface_validate_fails(self, parse_drv_info_mock):
side_effect = iter([exception.InvalidParameterValue("Bad input")])
parse_drv_info_mock.side_effect = side_effect
with task_manager.acquire(self.context, self.node['uuid'],
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.validate, task)
self.assertEqual(1, parse_drv_info_mock.call_count)
@mock.patch.object(seamicro, '_reboot', autospec=True)
def test_reboot(self, mock_reboot):
mock_reboot.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
task.driver.power.reboot(task)
mock_reboot.assert_called_once_with(task.node)
def test_set_power_state_bad_state(self):
self.get_server_mock = self.get_server_patcher.start()
self.get_server_mock.return_value = self.Server()
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
self.assertRaises(exception.IronicException,
task.driver.power.set_power_state,
task, "BAD_PSTATE")
self.get_server_patcher.stop()
@mock.patch.object(seamicro, '_power_on', autospec=True)
def test_set_power_state_on_good(self, mock_power_on):
mock_power_on.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_ON)
mock_power_on.assert_called_once_with(task.node)
@mock.patch.object(seamicro, '_power_on', autospec=True)
def test_set_power_state_on_fail(self, mock_power_on):
mock_power_on.return_value = states.POWER_OFF
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
self.assertRaises(exception.PowerStateFailure,
task.driver.power.set_power_state,
task, states.POWER_ON)
mock_power_on.assert_called_once_with(task.node)
@mock.patch.object(seamicro, '_power_off', autospec=True)
def test_set_power_state_off_good(self, mock_power_off):
mock_power_off.return_value = states.POWER_OFF
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_OFF)
mock_power_off.assert_called_once_with(task.node)
@mock.patch.object(seamicro, '_power_off', autospec=True)
def test_set_power_state_off_fail(self, mock_power_off):
mock_power_off.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
self.assertRaises(exception.PowerStateFailure,
task.driver.power.set_power_state,
task, states.POWER_OFF)
mock_power_off.assert_called_once_with(task.node)
@mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
def test_vendor_passthru_validate_good(self, mock_info):
with task_manager.acquire(self.context, self.node['uuid'],
shared=True) as task:
for method in task.driver.vendor.vendor_routes:
task.driver.vendor.validate(task, **{'method': method})
self.assertEqual(len(task.driver.vendor.vendor_routes),
mock_info.call_count)
@mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
def test_vendor_passthru_validate_parse_driver_info_fail(self, mock_info):
mock_info.side_effect = iter([exception.InvalidParameterValue("bad")])
with task_manager.acquire(self.context, self.node['uuid'],
shared=True) as task:
method = list(task.driver.vendor.vendor_routes)[0]
self.assertRaises(exception.InvalidParameterValue,
task.driver.vendor.validate,
task, **{'method': method})
mock_info.assert_called_once_with(task.node)
@mock.patch.object(seamicro, '_get_server', autospec=True)
def test_set_node_vlan_id_good(self, mock_get_server):
vlan_id = "12"
mock_get_server.return_value = self.Server(active="true")
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
kwargs = {'vlan_id': vlan_id}
task.driver.vendor.set_node_vlan_id(task, **kwargs)
mock_get_server.assert_called_once_with(self.info)
def test_set_node_vlan_id_no_input(self):
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.vendor.set_node_vlan_id,
task, **{})
@mock.patch.object(seamicro, '_get_server', autospec=True)
def test_set_node_vlan_id_fail(self, mock_get_server):
def fake_set_untagged_vlan(self, **kwargs):
raise seamicro_client_exception.ClientException(500)
vlan_id = "12"
server = self.Server(active="true")
server.set_untagged_vlan = fake_set_untagged_vlan
mock_get_server.return_value = server
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
kwargs = {'vlan_id': vlan_id}
self.assertRaises(exception.IronicException,
task.driver.vendor.set_node_vlan_id,
task, **kwargs)
mock_get_server.assert_called_once_with(self.info)
@mock.patch.object(seamicro, '_get_server', autospec=True)
@mock.patch.object(seamicro, '_validate_volume', autospec=True)
def test_attach_volume_with_volume_id_good(self, mock_validate_volume,
mock_get_server):
volume_id = '0/ironic-p6-1/vol1'
mock_validate_volume.return_value = True
mock_get_server.return_value = self.Server(active="true")
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
kwargs = {'volume_id': volume_id}
task.driver.vendor.attach_volume(task, **kwargs)
mock_get_server.assert_called_once_with(self.info)
@mock.patch.object(seamicro, '_get_server', autospec=True)
@mock.patch.object(seamicro, '_get_volume', autospec=True)
def test_attach_volume_with_invalid_volume_id_fail(self,
mock_get_volume,
mock_get_server):
volume_id = '0/p6-1/vol1'
mock_get_volume.return_value = self.Volume(volume_id)
mock_get_server.return_value = self.Server(active="true")
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
kwargs = {'volume_id': volume_id}
self.assertRaises(exception.InvalidParameterValue,
task.driver.vendor.attach_volume,
task, **kwargs)
@mock.patch.object(seamicro, '_get_server', autospec=True)
@mock.patch.object(seamicro, '_validate_volume', autospec=True)
def test_attach_volume_fail(self, mock_validate_volume,
mock_get_server):
def fake_attach_volume(self, **kwargs):
raise seamicro_client_exception.ClientException(500)
volume_id = '0/p6-1/vol1'
mock_validate_volume.return_value = True
server = self.Server(active="true")
server.attach_volume = fake_attach_volume
mock_get_server.return_value = server
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
kwargs = {'volume_id': volume_id}
self.assertRaises(exception.IronicException,
task.driver.vendor.attach_volume,
task, **kwargs)
mock_get_server.assert_called_once_with(self.info)
@mock.patch.object(seamicro, '_get_server', autospec=True)
@mock.patch.object(seamicro, '_validate_volume', autospec=True)
@mock.patch.object(seamicro, '_create_volume', autospec=True)
def test_attach_volume_with_volume_size_good(self, mock_create_volume,
mock_validate_volume,
mock_get_server):
volume_id = '0/ironic-p6-1/vol1'
volume_size = 2
mock_create_volume.return_value = volume_id
mock_validate_volume.return_value = True
mock_get_server.return_value = self.Server(active="true")
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
kwargs = {'volume_size': volume_size}
task.driver.vendor.attach_volume(task, **kwargs)
mock_get_server.assert_called_once_with(self.info)
mock_create_volume.assert_called_once_with(self.info, volume_size)
def test_attach_volume_with_no_input_fail(self):
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.vendor.attach_volume, task,
**{})
@mock.patch.object(seamicro, '_get_server', autospec=True)
def test_set_boot_device_good(self, mock_get_server):
boot_device = "disk"
mock_get_server.return_value = self.Server(active="true")
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
task.driver.management.set_boot_device(task, boot_device)
mock_get_server.assert_called_once_with(self.info)
@mock.patch.object(seamicro, '_get_server', autospec=True)
def test_set_boot_device_invalid_device_fail(self, mock_get_server):
boot_device = "invalid_device"
mock_get_server.return_value = self.Server(active="true")
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.management.set_boot_device,
task, boot_device)
@mock.patch.object(seamicro, '_get_server', autospec=True)
def test_set_boot_device_fail(self, mock_get_server):
def fake_set_boot_order(self, **kwargs):
raise seamicro_client_exception.ClientException(500)
boot_device = "pxe"
server = self.Server(active="true")
server.set_boot_order = fake_set_boot_order
mock_get_server.return_value = server
with task_manager.acquire(self.context, self.info['uuid'],
shared=False) as task:
self.assertRaises(exception.IronicException,
task.driver.management.set_boot_device,
task, boot_device)
mock_get_server.assert_called_once_with(self.info)
def test_management_interface_get_supported_boot_devices(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
expected = [boot_devices.PXE, boot_devices.DISK]
self.assertEqual(sorted(expected), sorted(task.driver.management.
get_supported_boot_devices(task)))
def test_management_interface_get_boot_device(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
expected = {'boot_device': None, 'persistent': None}
self.assertEqual(expected,
task.driver.management.get_boot_device(task))
def test_management_interface_validate_good(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
task.driver.management.validate(task)
def test_management_interface_validate_fail(self):
# Missing SEAMICRO driver_info information
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
driver='fake_seamicro')
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaises(exception.MissingParameterValue,
task.driver.management.validate, task)
class SeaMicroDriverTestCase(db_base.DbTestCase):
def setUp(self):
super(SeaMicroDriverTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_seamicro')
self.driver = driver_factory.get_driver('fake_seamicro')
self.node = obj_utils.create_test_node(self.context,
driver='fake_seamicro',
driver_info=INFO_DICT)
self.get_server_patcher = mock.patch.object(seamicro, '_get_server',
autospec=True)
self.get_server_mock = None
self.Server = Fake_Server
self.Volume = Fake_Volume
self.info = seamicro._parse_driver_info(self.node)
@mock.patch.object(console_utils, 'start_shellinabox_console',
autospec=True)
def test_start_console(self, mock_exec):
mock_exec.return_value = None
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.console.start_console(task)
mock_exec.assert_called_once_with(self.info['uuid'],
self.info['port'],
mock.ANY)
@mock.patch.object(console_utils, 'start_shellinabox_console',
autospec=True)
def test_start_console_fail(self, mock_exec):
mock_exec.side_effect = iter(
[exception.ConsoleSubprocessFailed(error='error')])
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.assertRaises(exception.ConsoleSubprocessFailed,
self.driver.console.start_console,
task)
@mock.patch.object(console_utils, 'stop_shellinabox_console',
autospec=True)
def test_stop_console(self, mock_exec):
mock_exec.return_value = None
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.console.stop_console(task)
mock_exec.assert_called_once_with(self.info['uuid'])
@mock.patch.object(console_utils, 'stop_shellinabox_console',
autospec=True)
def test_stop_console_fail(self, mock_stop):
mock_stop.side_effect = iter([exception.ConsoleError()])
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.assertRaises(exception.ConsoleError,
self.driver.console.stop_console,
task)
mock_stop.assert_called_once_with(self.node.uuid)
@mock.patch.object(console_utils, 'start_shellinabox_console',
autospec=True)
def test_start_console_fail_nodir(self, mock_exec):
mock_exec.side_effect = iter([exception.ConsoleError()])
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.assertRaises(exception.ConsoleError,
self.driver.console.start_console,
task)
mock_exec.assert_called_once_with(self.node.uuid, mock.ANY, mock.ANY)
@mock.patch.object(console_utils, 'get_shellinabox_console_url',
autospec=True)
def test_get_console(self, mock_exec):
url = 'http://localhost:4201'
mock_exec.return_value = url
expected = {'type': 'shellinabox', 'url': url}
with task_manager.acquire(self.context,
self.node.uuid) as task:
console_info = self.driver.console.get_console(task)
self.assertEqual(expected, console_info)
mock_exec.assert_called_once_with(self.info['port'])
| apache-2.0 |
erjohnso/ansible | lib/ansible/modules/cloud/amazon/ec2_vpc_dhcp_option.py | 31 | 14953 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = """
---
module: ec2_vpc_dhcp_option
short_description: Manages DHCP Options, and can ensure the DHCP options for the given VPC match what's
requested
description:
- This module removes, or creates DHCP option sets, and can associate them to a VPC.
Optionally, a new DHCP Options set can be created that converges a VPC's existing
DHCP option set with values provided.
When dhcp_options_id is provided, the module will
1. remove (with state='absent')
2. ensure tags are applied (if state='present' and tags are provided
3. attach it to a VPC (if state='present' and a vpc_id is provided.
If any of the optional values are missing, they will either be treated
as a no-op (i.e., inherit what already exists for the VPC)
To remove existing options while inheriting, supply an empty value
(e.g. set ntp_servers to [] if you want to remove them from the VPC's options)
Most of the options should be self-explanatory.
author: "Joel Thompson (@joelthompson)"
version_added: 2.1
options:
domain_name:
description:
- The domain name to set in the DHCP option sets
required: false
default: None
dns_servers:
description:
- A list of hosts to set the DNS servers for the VPC to. (Should be a
list of IP addresses rather than host names.)
required: false
default: None
ntp_servers:
description:
- List of hosts to advertise as NTP servers for the VPC.
required: false
default: None
netbios_name_servers:
description:
- List of hosts to advertise as NetBIOS servers.
required: false
default: None
netbios_node_type:
description:
- NetBIOS node type to advertise in the DHCP options.
The AWS recommendation is to use 2 (when using netbios name services)
http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html
required: false
default: None
vpc_id:
description:
- VPC ID to associate with the requested DHCP option set.
If no vpc id is provided, and no matching option set is found then a new
DHCP option set is created.
required: false
default: None
delete_old:
description:
- Whether to delete the old VPC DHCP option set when associating a new one.
This is primarily useful for debugging/development purposes when you
want to quickly roll back to the old option set. Note that this setting
will be ignored, and the old DHCP option set will be preserved, if it
is in use by any other VPC. (Otherwise, AWS will return an error.)
required: false
default: true
inherit_existing:
description:
- For any DHCP options not specified in these parameters, whether to
inherit them from the options set already applied to vpc_id, or to
reset them to be empty.
required: false
default: false
tags:
description:
- Tags to be applied to a VPC options set if a new one is created, or
if the resource_id is provided. (options must match)
required: False
default: None
aliases: [ 'resource_tags']
version_added: "2.1"
dhcp_options_id:
description:
- The resource_id of an existing DHCP options set.
If this is specified, then it will override other settings, except tags
(which will be updated to match)
required: False
default: None
version_added: "2.1"
state:
description:
- create/assign or remove the DHCP options.
If state is set to absent, then a DHCP options set matched either
by id, or tags and options will be removed if possible.
required: False
default: present
choices: [ 'absent', 'present' ]
version_added: "2.1"
extends_documentation_fragment: aws
requirements:
- boto
"""
RETURN = """
new_options:
description: The DHCP options created, associated or found
returned: when appropriate
type: dict
sample:
domain-name-servers:
- 10.0.0.1
- 10.0.1.1
netbois-name-servers:
- 10.0.0.1
- 10.0.1.1
netbios-node-type: 2
domain-name: "my.example.com"
dhcp_options_id:
description: The aws resource id of the primary DCHP options set created, found or removed
type: string
returned: when available
changed:
description: Whether the dhcp options were changed
type: bool
returned: always
"""
EXAMPLES = """
# Completely overrides the VPC DHCP options associated with VPC vpc-123456 and deletes any existing
# DHCP option set that may have been attached to that VPC.
- ec2_vpc_dhcp_option:
domain_name: "foo.example.com"
region: us-east-1
dns_servers:
- 10.0.0.1
- 10.0.1.1
ntp_servers:
- 10.0.0.2
- 10.0.1.2
netbios_name_servers:
- 10.0.0.1
- 10.0.1.1
netbios_node_type: 2
vpc_id: vpc-123456
delete_old: True
inherit_existing: False
# Ensure the DHCP option set for the VPC has 10.0.0.4 and 10.0.1.4 as the specified DNS servers, but
# keep any other existing settings. Also, keep the old DHCP option set around.
- ec2_vpc_dhcp_option:
region: us-east-1
dns_servers:
- "{{groups['dns-primary']}}"
- "{{groups['dns-secondary']}}"
vpc_id: vpc-123456
inherit_existing: True
delete_old: False
## Create a DHCP option set with 4.4.4.4 and 8.8.8.8 as the specified DNS servers, with tags
## but do not assign to a VPC
- ec2_vpc_dhcp_option:
region: us-east-1
dns_servers:
- 4.4.4.4
- 8.8.8.8
tags:
Name: google servers
Environment: Test
## Delete a DHCP options set that matches the tags and options specified
- ec2_vpc_dhcp_option:
region: us-east-1
dns_servers:
- 4.4.4.4
- 8.8.8.8
tags:
Name: google servers
Environment: Test
state: absent
## Associate a DHCP options set with a VPC by ID
- ec2_vpc_dhcp_option:
region: us-east-1
dhcp_options_id: dopt-12345678
vpc_id: vpc-123456
"""
import collections
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import HAS_BOTO, connect_to_aws, ec2_argument_spec, get_aws_connection_info
if HAS_BOTO:
import boto.vpc
import boto.ec2
from boto.exception import EC2ResponseError
def get_resource_tags(vpc_conn, resource_id):
return dict((t.name, t.value) for t in vpc_conn.get_all_tags(filters={'resource-id': resource_id}))
def ensure_tags(module, vpc_conn, resource_id, tags, add_only, check_mode):
try:
cur_tags = get_resource_tags(vpc_conn, resource_id)
if tags == cur_tags:
return {'changed': False, 'tags': cur_tags}
to_delete = dict((k, cur_tags[k]) for k in cur_tags if k not in tags)
if to_delete and not add_only:
vpc_conn.delete_tags(resource_id, to_delete, dry_run=check_mode)
to_add = dict((k, tags[k]) for k in tags if k not in cur_tags)
if to_add:
vpc_conn.create_tags(resource_id, to_add, dry_run=check_mode)
latest_tags = get_resource_tags(vpc_conn, resource_id)
return {'changed': True, 'tags': latest_tags}
except EC2ResponseError as e:
module.fail_json(msg="Failed to modify tags: %s" % e.message, exception=traceback.format_exc())
def fetch_dhcp_options_for_vpc(vpc_conn, vpc_id):
"""
Returns the DHCP options object currently associated with the requested VPC ID using the VPC
connection variable.
"""
vpcs = vpc_conn.get_all_vpcs(vpc_ids=[vpc_id])
if len(vpcs) != 1 or vpcs[0].dhcp_options_id == "default":
return None
dhcp_options = vpc_conn.get_all_dhcp_options(dhcp_options_ids=[vpcs[0].dhcp_options_id])
if len(dhcp_options) != 1:
return None
return dhcp_options[0]
def match_dhcp_options(vpc_conn, tags=None, options=None):
"""
Finds a DHCP Options object that optionally matches the tags and options provided
"""
dhcp_options = vpc_conn.get_all_dhcp_options()
for dopts in dhcp_options:
if (not tags) or get_resource_tags(vpc_conn, dopts.id) == tags:
if (not options) or dopts.options == options:
return(True, dopts)
return(False, None)
def remove_dhcp_options_by_id(vpc_conn, dhcp_options_id):
associations = vpc_conn.get_all_vpcs(filters={'dhcpOptionsId': dhcp_options_id})
if len(associations) > 0:
return False
else:
vpc_conn.delete_dhcp_options(dhcp_options_id)
return True
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
dhcp_options_id=dict(type='str', default=None),
domain_name=dict(type='str', default=None),
dns_servers=dict(type='list', default=None),
ntp_servers=dict(type='list', default=None),
netbios_name_servers=dict(type='list', default=None),
netbios_node_type=dict(type='int', default=None),
vpc_id=dict(type='str', default=None),
delete_old=dict(type='bool', default=True),
inherit_existing=dict(type='bool', default=False),
tags=dict(type='dict', default=None, aliases=['resource_tags']),
state=dict(type='str', default='present', choices=['present', 'absent'])
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
params = module.params
found = False
changed = False
new_options = collections.defaultdict(lambda: None)
if not HAS_BOTO:
module.fail_json(msg='boto is required for this module')
region, ec2_url, boto_params = get_aws_connection_info(module)
connection = connect_to_aws(boto.vpc, region, **boto_params)
existing_options = None
# First check if we were given a dhcp_options_id
if not params['dhcp_options_id']:
# No, so create new_options from the parameters
if params['dns_servers'] is not None:
new_options['domain-name-servers'] = params['dns_servers']
if params['netbios_name_servers'] is not None:
new_options['netbios-name-servers'] = params['netbios_name_servers']
if params['ntp_servers'] is not None:
new_options['ntp-servers'] = params['ntp_servers']
if params['domain_name'] is not None:
# needs to be a list for comparison with boto objects later
new_options['domain-name'] = [ params['domain_name'] ]
if params['netbios_node_type'] is not None:
# needs to be a list for comparison with boto objects later
new_options['netbios-node-type'] = [ str(params['netbios_node_type']) ]
# If we were given a vpc_id then we need to look at the options on that
if params['vpc_id']:
existing_options = fetch_dhcp_options_for_vpc(connection, params['vpc_id'])
# if we've been asked to inherit existing options, do that now
if params['inherit_existing']:
if existing_options:
for option in [ 'domain-name-servers', 'netbios-name-servers', 'ntp-servers', 'domain-name', 'netbios-node-type']:
if existing_options.options.get(option) and new_options[option] != [] and (not new_options[option] or [''] == new_options[option]):
new_options[option] = existing_options.options.get(option)
# Do the vpc's dhcp options already match what we're asked for? if so we are done
if existing_options and new_options == existing_options.options:
module.exit_json(changed=changed, new_options=new_options, dhcp_options_id=existing_options.id)
# If no vpc_id was given, or the options don't match then look for an existing set using tags
found, dhcp_option = match_dhcp_options(connection, params['tags'], new_options)
# Now let's cover the case where there are existing options that we were told about by id
# If a dhcp_options_id was supplied we don't look at options inside, just set tags (if given)
else:
supplied_options = connection.get_all_dhcp_options(filters={'dhcp-options-id':params['dhcp_options_id']})
if len(supplied_options) != 1:
if params['state'] != 'absent':
module.fail_json(msg=" a dhcp_options_id was supplied, but does not exist")
else:
found = True
dhcp_option = supplied_options[0]
if params['state'] != 'absent' and params['tags']:
ensure_tags(module, connection, dhcp_option.id, params['tags'], False, module.check_mode)
# Now we have the dhcp options set, let's do the necessary
# if we found options we were asked to remove then try to do so
if params['state'] == 'absent':
if not module.check_mode:
if found:
changed = remove_dhcp_options_by_id(connection, dhcp_option.id)
module.exit_json(changed=changed, new_options={})
# otherwise if we haven't found the required options we have something to do
elif not module.check_mode and not found:
# create some dhcp options if we weren't able to use existing ones
if not found:
# Convert netbios-node-type and domain-name back to strings
if new_options['netbios-node-type']:
new_options['netbios-node-type'] = new_options['netbios-node-type'][0]
if new_options['domain-name']:
new_options['domain-name'] = new_options['domain-name'][0]
# create the new dhcp options set requested
dhcp_option = connection.create_dhcp_options(
new_options['domain-name'],
new_options['domain-name-servers'],
new_options['ntp-servers'],
new_options['netbios-name-servers'],
new_options['netbios-node-type'])
changed = True
if params['tags']:
ensure_tags(module, connection, dhcp_option.id, params['tags'], False, module.check_mode)
# If we were given a vpc_id, then attach the options we now have to that before we finish
if params['vpc_id'] and not module.check_mode:
changed = True
connection.associate_dhcp_options(dhcp_option.id, params['vpc_id'])
# and remove old ones if that was requested
if params['delete_old'] and existing_options:
remove_dhcp_options_by_id(connection, existing_options.id)
module.exit_json(changed=changed, new_options=new_options, dhcp_options_id=dhcp_option.id)
if __name__ == "__main__":
main()
| gpl-3.0 |
bwsblake/lettercounter | django-norel-env/lib/python2.7/site-packages/django/contrib/localflavor/gb/gb_regions.py | 199 | 3504 | """
Sources:
English regions: http://www.statistics.gov.uk/geography/downloads/31_10_01_REGION_names_and_codes_12_00.xls
Northern Ireland regions: http://en.wikipedia.org/wiki/List_of_Irish_counties_by_area
Welsh regions: http://en.wikipedia.org/wiki/Preserved_counties_of_Wales
Scottish regions: http://en.wikipedia.org/wiki/Regions_and_districts_of_Scotland
"""
from django.utils.translation import ugettext_lazy as _
ENGLAND_REGION_CHOICES = (
("Bedfordshire", _("Bedfordshire")),
("Buckinghamshire", _("Buckinghamshire")),
("Cambridgeshire", ("Cambridgeshire")),
("Cheshire", _("Cheshire")),
("Cornwall and Isles of Scilly", _("Cornwall and Isles of Scilly")),
("Cumbria", _("Cumbria")),
("Derbyshire", _("Derbyshire")),
("Devon", _("Devon")),
("Dorset", _("Dorset")),
("Durham", _("Durham")),
("East Sussex", _("East Sussex")),
("Essex", _("Essex")),
("Gloucestershire", _("Gloucestershire")),
("Greater London", _("Greater London")),
("Greater Manchester", _("Greater Manchester")),
("Hampshire", _("Hampshire")),
("Hertfordshire", _("Hertfordshire")),
("Kent", _("Kent")),
("Lancashire", _("Lancashire")),
("Leicestershire", _("Leicestershire")),
("Lincolnshire", _("Lincolnshire")),
("Merseyside", _("Merseyside")),
("Norfolk", _("Norfolk")),
("North Yorkshire", _("North Yorkshire")),
("Northamptonshire", _("Northamptonshire")),
("Northumberland", _("Northumberland")),
("Nottinghamshire", _("Nottinghamshire")),
("Oxfordshire", _("Oxfordshire")),
("Shropshire", _("Shropshire")),
("Somerset", _("Somerset")),
("South Yorkshire", _("South Yorkshire")),
("Staffordshire", _("Staffordshire")),
("Suffolk", _("Suffolk")),
("Surrey", _("Surrey")),
("Tyne and Wear", _("Tyne and Wear")),
("Warwickshire", _("Warwickshire")),
("West Midlands", _("West Midlands")),
("West Sussex", _("West Sussex")),
("West Yorkshire", _("West Yorkshire")),
("Wiltshire", _("Wiltshire")),
("Worcestershire", _("Worcestershire")),
)
NORTHERN_IRELAND_REGION_CHOICES = (
("County Antrim", _("County Antrim")),
("County Armagh", _("County Armagh")),
("County Down", _("County Down")),
("County Fermanagh", _("County Fermanagh")),
("County Londonderry", _("County Londonderry")),
("County Tyrone", _("County Tyrone")),
)
WALES_REGION_CHOICES = (
("Clwyd", _("Clwyd")),
("Dyfed", _("Dyfed")),
("Gwent", _("Gwent")),
("Gwynedd", _("Gwynedd")),
("Mid Glamorgan", _("Mid Glamorgan")),
("Powys", _("Powys")),
("South Glamorgan", _("South Glamorgan")),
("West Glamorgan", _("West Glamorgan")),
)
SCOTTISH_REGION_CHOICES = (
("Borders", _("Borders")),
("Central Scotland", _("Central Scotland")),
("Dumfries and Galloway", _("Dumfries and Galloway")),
("Fife", _("Fife")),
("Grampian", _("Grampian")),
("Highland", _("Highland")),
("Lothian", _("Lothian")),
("Orkney Islands", _("Orkney Islands")),
("Shetland Islands", _("Shetland Islands")),
("Strathclyde", _("Strathclyde")),
("Tayside", _("Tayside")),
("Western Isles", _("Western Isles")),
)
GB_NATIONS_CHOICES = (
("England", _("England")),
("Northern Ireland", _("Northern Ireland")),
("Scotland", _("Scotland")),
("Wales", _("Wales")),
)
GB_REGION_CHOICES = ENGLAND_REGION_CHOICES + NORTHERN_IRELAND_REGION_CHOICES + WALES_REGION_CHOICES + SCOTTISH_REGION_CHOICES
| mit |
ZaraSeries/repo | script.module.urlresolver/lib/urlresolver/plugins/filepup.py | 8 | 2817 | """
urlresolver XBMC Addon
Copyright (C) 2015 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
from urlresolver import common
from urlresolver.resolver import UrlResolver, ResolverError # @UnusedImport
from lib import helpers
class FilePupResolver(UrlResolver):
name = "filepup"
domains = ["filepup.net"]
pattern = '(?://|\.)(filepup.(?:net))/(?:play|files)/([0-9a-zA-Z]+)'
headers = {'User-Agent': common.RAND_UA}
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
html = self.net.http_GET(web_url, headers=self.headers).content
default_url = self.__get_def_source(html)
if default_url:
qualities = self.__get_qualities(html)
def_quality = self.__get_default(html)
sources = []
for quality in qualities:
if quality == def_quality:
sources.append((quality, default_url))
else:
stream_url = default_url.replace('.mp4?', '-%s.mp4?' % (quality))
sources.append((quality, stream_url))
try: sources.sort(key=lambda x: int(x[0][:-1]), reverse=True)
except: pass
return helpers.pick_source(sources)
def __get_def_source(self, html):
default_url = ''
match = re.search('sources\s*:\s*\[(.*?)\]', html, re.DOTALL)
if match:
match = re.search('src\s*:\s*"([^"]+)', match.group(1))
if match:
default_url = match.group(1) + helpers.append_headers(self.headers)
return default_url
def __get_default(self, html):
match = re.search('defaultQuality\s*:\s*"([^"]+)', html)
if match:
return match.group(1)
else:
return ''
def __get_qualities(self, html):
qualities = []
match = re.search('qualities\s*:\s*\[(.*?)\]', html)
if match:
qualities = re.findall('"([^"]+)"', match.group(1))
return qualities
def get_url(self, host, media_id):
return 'http://www.filepup.net/play/%s' % (media_id)
| gpl-2.0 |
jelugbo/hebs_master | common/djangoapps/service_status/views.py | 188 | 1296 | """
Django Views for service status app
"""
import json
import time
from django.http import HttpResponse
from dogapi import dog_stats_api
from service_status import tasks
from djcelery import celery
from celery.exceptions import TimeoutError
def index(_):
"""
An empty view
"""
return HttpResponse()
@dog_stats_api.timed('status.service.celery.status')
def celery_status(_):
"""
A view that returns Celery stats
"""
stats = celery.control.inspect().stats() or {}
return HttpResponse(json.dumps(stats, indent=4),
mimetype="application/json")
@dog_stats_api.timed('status.service.celery.ping')
def celery_ping(_):
"""
A Simple view that checks if Celery can process a simple task
"""
start = time.time()
result = tasks.delayed_ping.apply_async(('ping', 0.1))
task_id = result.id
# Wait until we get the result
try:
value = result.get(timeout=4.0)
success = True
except TimeoutError:
value = None
success = False
output = {
'success': success,
'task_id': task_id,
'value': value,
'time': time.time() - start,
}
return HttpResponse(json.dumps(output, indent=4),
mimetype="application/json")
| agpl-3.0 |
kmoocdev/edx-platform | cms/djangoapps/contentstore/management/commands/export_convert_format.py | 67 | 2362 | """
Script for converting a tar.gz file representing an exported course
to the archive format used by a different version of export.
Sample invocation: ./manage.py export_convert_format mycourse.tar.gz ~/newformat/
"""
import os
from path import path
from django.core.management.base import BaseCommand, CommandError
from tempfile import mkdtemp
import tarfile
import shutil
from extract_tar import safetar_extractall
from xmodule.modulestore.xml_exporter import convert_between_versions
class Command(BaseCommand):
"""
Convert between export formats.
"""
help = 'Convert between versions 0 and 1 of the course export format'
args = '<tar.gz archive file> <output path>'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 2:
raise CommandError("export requires two arguments: <tar.gz file> <output path>")
source_archive = args[0]
output_path = args[1]
# Create temp directories to extract the source and create the target archive.
temp_source_dir = mkdtemp()
temp_target_dir = mkdtemp()
try:
extract_source(source_archive, temp_source_dir)
desired_version = convert_between_versions(temp_source_dir, temp_target_dir)
# New zip up the target directory.
parts = os.path.basename(source_archive).split('.')
archive_name = path(output_path) / "{source_name}_version_{desired_version}.tar.gz".format(
source_name=parts[0], desired_version=desired_version
)
with open(archive_name, "w"):
tar_file = tarfile.open(archive_name, mode='w:gz')
try:
for item in os.listdir(temp_target_dir):
tar_file.add(path(temp_target_dir) / item, arcname=item)
finally:
tar_file.close()
print("Created archive {0}".format(archive_name))
except ValueError as err:
raise CommandError(err)
finally:
shutil.rmtree(temp_source_dir)
shutil.rmtree(temp_target_dir)
def extract_source(source_archive, target):
"""
Extract the archive into the given target directory.
"""
with tarfile.open(source_archive) as tar_file:
safetar_extractall(tar_file, target)
| agpl-3.0 |
myfreshcity/mystock | webapp/models/stock.py | 1 | 1472 | from webapp.services import db
from datetime import datetime
import urllib2,re
class Stock(db.Model):
__tablename__ = 'stock_basic'
id = db.Column(db.Integer, primary_key=True)
code = db.Column(db.String(255))
name = db.Column(db.String(255))
flag = db.Column(db.String(5))
industry = db.Column(db.String(255))
area = db.Column(db.String(255))
zgb = db.Column(db.Numeric)
ltgb = db.Column(db.Numeric)
launch_date = db.Column(db.Date)
latest_report = db.Column(db.Date)
holder_updated_time = db.Column(db.DateTime)
trade_updated_time = db.Column(db.DateTime)
finance_updated_time = db.Column(db.DateTime)
desc = db.Column(db.String(500))
grow_type = db.Column(db.String(3))
def __init__(self, name, code):
self.name = name
self.code = code
def __repr__(self):
return '<Stock %r>' % self.id
@classmethod
def find_by_code(self,cd):
return Stock.query.filter_by(code=cd,flag=0).first()
@property
def current_price(self):
try:
data = self.query_trade_data()
return round(float(data[3]), 2)
except:
return None
def query_trade_data(self):
url = "http://hq.sinajs.cn/list=" + self.code
req = urllib2.Request(url)
res_data = urllib2.urlopen(req).read()
match = re.search(r'".*"', res_data).group(0)
trade_data = match.split(',')
return trade_data
| mit |
v-iam/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_06_01/models/flow_log_information.py | 2 | 1856 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FlowLogInformation(Model):
"""Information on the configuration of flow log.
:param target_resource_id: The ID of the resource to configure for flow
logging.
:type target_resource_id: str
:param storage_id: ID of the storage account which is used to store the
flow log.
:type storage_id: str
:param enabled: Flag to enable/disable flow logging.
:type enabled: bool
:param retention_policy:
:type retention_policy: :class:`RetentionPolicyParameters
<azure.mgmt.network.v2017_06_01.models.RetentionPolicyParameters>`
"""
_validation = {
'target_resource_id': {'required': True},
'storage_id': {'required': True},
'enabled': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'storage_id': {'key': 'properties.storageId', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'retention_policy': {'key': 'properties.retentionPolicy', 'type': 'RetentionPolicyParameters'},
}
def __init__(self, target_resource_id, storage_id, enabled, retention_policy=None):
self.target_resource_id = target_resource_id
self.storage_id = storage_id
self.enabled = enabled
self.retention_policy = retention_policy
| mit |
andrejserafim/elasticsearch | dev-tools/prepare_release_candidate.py | 12 | 20105 | # Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# Prepare a release
#
# 1. Update the Version.java to remove the snapshot bit
# 2. Remove the -SNAPSHOT suffix in all pom.xml files
#
# USAGE:
#
# python3 ./dev-tools/prepare-release.py
#
# Note: Ensure the script is run from the elasticsearch top level directory
#
import fnmatch
import argparse
from prepare_release_update_documentation import update_reference_docs
import subprocess
import tempfile
import re
import os
import shutil
from functools import partial
import sys
VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
POM_FILE = 'pom.xml'
MAIL_TEMPLATE = """
Hi all
The new release candidate for %(version)s is now available, including the x-plugins and RPM/deb repos. This release is based on:
* Elasticsearch commit: %(hash)s - https://github.com/elastic/elasticsearch/commit/%(hash)s
* X-Plugins commit: FILL_IN_X-PLUGINS_HASH - https://github.com/elastic/x-plugins/commit/FILL_IN_X-PLUGINS_HASH
The packages may be downloaded from the following URLs:
* ZIP - http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/zip/elasticsearch/%(version)s/elasticsearch-%(version)s.zip
* tar.gz - http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/tar/elasticsearch/%(version)s/elasticsearch-%(version)s.tar.gz
* RPM - http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/rpm/elasticsearch/%(version)s/elasticsearch-%(version)s.rpm
* deb - http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/deb/elasticsearch/%(version)s/elasticsearch-%(version)s.deb
Plugins can be installed as follows:
bin/plugin -Des.plugins.staging=true install cloud-aws
The same goes for the x-plugins:
bin/plugin -Des.plugins.staging=true install license
bin/plugin -Des.plugins.staging=true install marvel-agent
bin/plugin -Des.plugins.staging=true install shield
bin/plugin -Des.plugins.staging=true install watcher
To install the deb from an APT repo:
APT line sources.list line:
deb http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/repos/%(package_repo_version)s/debian/ stable main
To install the RPM, create a YUM file like:
/etc/yum.repos.d/elasticsearch.repo
containing:
[elasticsearch-2.0]
name=Elasticsearch repository for packages
baseurl=http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/repos/%(package_repo_version)s/centos
gpgcheck=1
gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch
enabled=1
To smoke-test the release please run:
python3 -B ./dev-tools/smoke_test_rc.py --version %(version)s --hash %(hash)s --plugins license,shield,watcher
NOTE: this script requires JAVA_HOME to point to a Java 7 Runtime
"""
# console colors
COLOR_OK = '\033[92m'
COLOR_END = '\033[0m'
COLOR_FAIL = '\033[91m'
def run(command, env_vars=None):
if env_vars:
for key, value in env_vars.items():
os.putenv(key, value)
print('*** Running: %s%s%s' % (COLOR_OK, command, COLOR_END))
if os.system(command):
raise RuntimeError(' FAILED: %s' % (command))
def ensure_checkout_is_clean():
# Make sure no local mods:
s = subprocess.check_output('git diff --shortstat', shell=True).decode('utf-8')
if len(s) > 0:
raise RuntimeError('git diff --shortstat is non-empty got:\n%s' % s)
# Make sure no untracked files:
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
if 'Untracked files:' in s:
if 'dev-tools/__pycache__/' in s:
print('*** NOTE: invoke python with -B to prevent __pycache__ directories ***')
raise RuntimeError('git status shows untracked files got:\n%s' % s)
# Make sure we have all changes from origin:
if 'is behind' in s:
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch got:\n%s' % (s))
# Make sure we no local unpushed changes (this is supposed to be a clean area):
if 'is ahead' in s:
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch got:\n%s' % (s))
# Reads the given file and applies the
# callback to it. If the callback changed
# a line the given file is replaced with
# the modified input.
def process_file(file_path, line_callback):
fh, abs_path = tempfile.mkstemp()
modified = False
with open(abs_path,'w', encoding='utf-8') as new_file:
with open(file_path, encoding='utf-8') as old_file:
for line in old_file:
new_line = line_callback(line)
modified = modified or (new_line != line)
new_file.write(new_line)
os.close(fh)
if modified:
#Remove original file
os.remove(file_path)
#Move new file
shutil.move(abs_path, file_path)
return True
else:
# nothing to do - just remove the tmp file
os.remove(abs_path)
return False
# Moves the Version.java file from a snapshot to a release
def remove_version_snapshot(version_file, release):
# 1.0.0.Beta1 -> 1_0_0_Beta1
release = release.replace('.', '_')
release = release.replace('-', '_')
pattern = 'new Version(V_%s_ID, true' % (release)
replacement = 'new Version(V_%s_ID, false' % (release)
def callback(line):
return line.replace(pattern, replacement)
processed = process_file(version_file, callback)
if not processed:
raise RuntimeError('failed to remove snapshot version for %s' % (release))
def rename_local_meta_files(path):
for root, _, file_names in os.walk(path):
for file_name in fnmatch.filter(file_names, 'maven-metadata-local.xml*'):
full_path = os.path.join(root, file_name)
os.rename(full_path, os.path.join(root, file_name.replace('-local', '')))
# Checks the pom.xml for the release version.
# This method fails if the pom file has no SNAPSHOT version set ie.
# if the version is already on a release version we fail.
# Returns the next version string ie. 0.90.7
def find_release_version():
with open('pom.xml', encoding='utf-8') as file:
for line in file:
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
if match:
return match.group(1)
raise RuntimeError('Could not find release version in branch')
# Checks if the produced RPM is signed with the supplied GPG key
def ensure_rpm_is_signed(rpm, gpg_key):
rpm_check_signature_cmd = 'rpm -v -K %s | grep -qi %s' % (rpm, gpg_key)
try:
subprocess.check_output(rpm_check_signature_cmd, shell=True)
except:
raise RuntimeError('Aborting. RPM does not seem to be signed, check with: rpm -v -K %s' % rpm)
# Checks if a command exists, needed for external binaries
def check_command_exists(name, cmd):
try:
subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
raise RuntimeError('Could not run command %s - please make sure it is installed and in $PATH' % (name))
def run_and_print(text, run_function):
try:
print(text, end='')
run_function()
print(COLOR_OK + 'OK' + COLOR_END)
return True
except RuntimeError:
print(COLOR_FAIL + 'NOT OK' + COLOR_END)
return False
def check_env_var(text, env_var):
try:
print(text, end='')
os.environ[env_var]
print(COLOR_OK + 'OK' + COLOR_END)
return True
except KeyError:
print(COLOR_FAIL + 'NOT OK' + COLOR_END)
return False
def check_environment_and_commandline_tools(check_only):
checks = list()
checks.append(check_env_var('Checking for AWS env configuration AWS_SECRET_KEY... ', 'AWS_SECRET_KEY'))
checks.append(check_env_var('Checking for AWS env configuration AWS_ACCESS_KEY... ', 'AWS_ACCESS_KEY'))
checks.append(run_and_print('Checking command: rpm... ', partial(check_command_exists, 'rpm', 'rpm --version')))
checks.append(run_and_print('Checking command: dpkg... ', partial(check_command_exists, 'dpkg', 'dpkg --version')))
checks.append(run_and_print('Checking command: gpg... ', partial(check_command_exists, 'gpg', 'gpg --version')))
checks.append(run_and_print('Checking command: expect... ', partial(check_command_exists, 'expect', 'expect -v')))
checks.append(run_and_print('Checking command: createrepo... ', partial(check_command_exists, 'createrepo', 'createrepo --version')))
checks.append(run_and_print('Checking command: s3cmd... ', partial(check_command_exists, 's3cmd', 's3cmd --version')))
checks.append(run_and_print('Checking command: deb-s3... ', partial(check_command_exists, 'deb-s3', 'deb-s3 -h')))
checks.append(run_and_print('Checking command: rpm-s3... ', partial(check_command_exists, 'rpm-s3', 'rpm-s3 -h')))
if check_only:
sys.exit(0)
if False in checks:
print("Exiting due to failing checks")
sys.exit(0)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release')
parser.add_argument('--deploy-sonatype', dest='deploy_sonatype', action='store_true',
help='Installs and Deploys the release on a sonatype staging repository.')
parser.add_argument('--deploy-s3', dest='deploy_s3', action='store_true',
help='Pushes artifacts to the S3 staging area')
parser.add_argument('--deploy-s3-repos', dest='deploy_s3_repos', action='store_true',
help='Creates package repositories in S3 repo')
parser.add_argument('--no-install', dest='no_install', action='store_true',
help='Does not run "mvn install", expects this to be run already and reuses artifacts from local repo, only useful with --deploy-s3/--deploy-s3-repos, after sonatype deplomeny to ensure same artifacts')
parser.add_argument('--skip-doc-check', dest='skip_doc_check', action='store_false',
help='Skips any checks for pending documentation changes')
parser.add_argument('--skip-tests', dest='skip_tests', action='store_true',
help='Skips any test runs')
parser.add_argument('--gpg-key', dest='gpg_key', default="D88E42B4",
help='Allows you to specify a different gpg_key to be used instead of the default release key')
parser.add_argument('--bucket', '-b', dest='bucket', default="download.elasticsearch.org",
help='Allows you to specify a different s3 bucket to upload the artifacts to')
parser.add_argument('--quiet', dest='quiet', action='store_true',
help='Runs the script in quiet mode')
parser.add_argument('--check', dest='check', action='store_true',
help='Checks and reports for all requirements and then exits')
# by default, we only run mvn install and dont push anything repo
parser.set_defaults(deploy_sonatype=False)
parser.set_defaults(deploy_s3=False)
parser.set_defaults(deploy_s3_repos=False)
parser.set_defaults(no_install=False)
# other defaults
parser.set_defaults(skip_doc_check=False)
parser.set_defaults(quiet=False)
parser.set_defaults(skip_tests=False)
args = parser.parse_args()
skip_doc_check = args.skip_doc_check
gpg_key = args.gpg_key
bucket = args.bucket
deploy_sonatype = args.deploy_sonatype
deploy_s3 = args.deploy_s3
deploy_s3_repos = args.deploy_s3_repos
run_mvn_install = not args.no_install
skip_tests = args.skip_tests
check_environment_and_commandline_tools(args.check)
if not run_mvn_install and deploy_sonatype:
print('Using --no-install and --deploy-sonatype together does not work. Exiting')
sys.exit(-1)
print('*** Preparing a release candidate: ', end='')
print('deploy sonatype: %s%s%s' % (COLOR_OK if deploy_sonatype else COLOR_FAIL, 'yes' if deploy_sonatype else 'no', COLOR_END), end='')
print(', deploy s3: %s%s%s' % (COLOR_OK if deploy_s3 else COLOR_FAIL, 'yes' if deploy_s3 else 'no', COLOR_END), end='')
print(', deploy s3 repos: %s%s%s' % (COLOR_OK if deploy_s3_repos else COLOR_FAIL, 'yes' if deploy_s3_repos else 'no', COLOR_END), end='')
print('')
shortHash = subprocess.check_output('git log --pretty=format:"%h" -n 1', shell=True).decode('utf-8')
releaseDirectory = os.getenv('HOME') + '/elastic-releases'
release_version = find_release_version()
localRepo = '%s/elasticsearch-%s-%s' % (releaseDirectory, release_version, shortHash)
localRepoElasticsearch = localRepo + '/org/elasticsearch'
ensure_checkout_is_clean()
if not re.match('(\d+\.\d+)\.*',release_version):
raise RuntimeError('illegal release version format: %s' % (release_version))
package_repo_version = '%s.x' % re.match('(\d+)\.*', release_version).group(1)
print('*** Preparing release version: [%s]' % release_version)
if not skip_doc_check:
print('*** Check for pending documentation changes')
pending_files = update_reference_docs(release_version)
if pending_files:
raise RuntimeError('pending coming[%s] documentation changes found in %s' % (release_version, pending_files))
run('cd dev-tools && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
run('cd rest-api-spec && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
run('mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
remove_version_snapshot(VERSION_FILE, release_version)
print('*** Done removing snapshot version. DO NOT COMMIT THIS, WHEN CREATING A RELEASE CANDIDATE.')
if not os.path.exists(releaseDirectory):
os.mkdir(releaseDirectory)
if os.path.exists(localRepoElasticsearch) and run_mvn_install:
print('clean local repository %s' % localRepoElasticsearch)
shutil.rmtree(localRepoElasticsearch)
mvn_target = 'deploy' if deploy_sonatype else 'install'
tests = '-DskipTests' if skip_tests else '-Dskip.integ.tests=true'
install_command = 'mvn clean %s -Prelease %s -Dgpg.key="%s" -Dpackaging.rpm.rpmbuild=/usr/bin/rpmbuild -Drpm.sign=true -Dmaven.repo.local=%s -Dno.commit.pattern="\\bno(n|)commit\\b" -Dforbidden.test.signatures=""' % (mvn_target, tests, gpg_key, localRepo)
clean_repo_command = 'find %s -name _remote.repositories -exec rm {} \;' % (localRepoElasticsearch)
if not run_mvn_install:
print('')
print('*** By choosing --no-install we assume you ran the following commands successfully:')
print(' %s' % (install_command))
print(' 1. Remove all _remote.repositories: %s' % (clean_repo_command))
rename_metadata_files_command = 'for i in $(find %s -name "maven-metadata-local.xml*") ; do mv "$i" "${i/-local/}" ; done' % (localRepoElasticsearch)
print(' 2. Rename all maven metadata files: %s' % (rename_metadata_files_command))
else:
for cmd in [install_command, clean_repo_command]:
run(cmd)
rename_local_meta_files(localRepoElasticsearch)
rpm = '%s/distribution/rpm/elasticsearch/%s/elasticsearch-%s.rpm' % (localRepoElasticsearch, release_version, release_version)
print('Ensuring that RPM has been signed')
ensure_rpm_is_signed(rpm, gpg_key)
# repository push commands
s3cmd_sync_to_staging_bucket_cmd = 's3cmd sync -P %s s3://%s/elasticsearch/staging/%s-%s/org/' % (localRepoElasticsearch, bucket, release_version, shortHash)
s3_bucket_sync_to = '%s/elasticsearch/staging/%s-%s/repos/' % (bucket, release_version, shortHash)
s3cmd_sync_official_repo_cmd = 's3cmd sync s3://packages.elasticsearch.org/elasticsearch/%s s3://%s' % (package_repo_version, s3_bucket_sync_to)
debs3_prefix = 'elasticsearch/staging/%s-%s/repos/%s/debian' % (release_version, shortHash, package_repo_version)
debs3_upload_cmd = 'deb-s3 upload --preserve-versions %s/distribution/deb/elasticsearch/%s/elasticsearch-%s.deb -b %s --prefix %s --sign %s --arch amd64' % (localRepoElasticsearch, release_version, release_version, bucket, debs3_prefix, gpg_key)
debs3_list_cmd = 'deb-s3 list -b %s --prefix %s' % (bucket, debs3_prefix)
debs3_verify_cmd = 'deb-s3 verify -b %s --prefix %s' % (bucket, debs3_prefix)
rpms3_prefix = 'elasticsearch/staging/%s-%s/repos/%s/centos' % (release_version, shortHash, package_repo_version)
rpms3_upload_cmd = 'rpm-s3 -v -b %s -p %s --sign --visibility public-read -k 100 %s' % (bucket, rpms3_prefix, rpm)
if deploy_s3:
run(s3cmd_sync_to_staging_bucket_cmd)
else:
print('')
print('*** To push a release candidate to s3 run: ')
print(' 1. Sync %s into S3 bucket' % (localRepoElasticsearch))
print (' %s' % (s3cmd_sync_to_staging_bucket_cmd))
if deploy_s3_repos:
print('*** Syncing official package repository into staging s3 bucket')
run(s3cmd_sync_official_repo_cmd)
print('*** Uploading debian package (you will be prompted for the passphrase!)')
run(debs3_upload_cmd)
run(debs3_list_cmd)
run(debs3_verify_cmd)
print('*** Uploading rpm package (you will be prompted for the passphrase!)')
run(rpms3_upload_cmd)
else:
print('*** To create repositories on S3 run:')
print(' 1. Sync existing repo into staging: %s' % s3cmd_sync_official_repo_cmd)
print(' 2. Upload debian package (and sign it): %s' % debs3_upload_cmd)
print(' 3. List all debian packages: %s' % debs3_list_cmd)
print(' 4. Verify debian packages: %s' % debs3_verify_cmd)
print(' 5. Upload RPM: %s' % rpms3_upload_cmd)
print('')
print('NOTE: the above mvn command will promt you several times for the GPG passphrase of the key you specified you can alternatively pass it via -Dgpg.passphrase=yourPassPhrase')
print(' since RPM signing doesn\'t support gpg-agents the recommended way to set the password is to add a release profile to your settings.xml:')
print("""
<profiles>
<profile>
<id>release</id>
<properties>
<gpg.passphrase>YourPasswordGoesHere</gpg.passphrase>
</properties>
</profile>
</profiles>
""")
print('NOTE: Running s3cmd might require you to create a config file with your credentials, if the s3cmd does not support suppliying them via the command line!')
print('*** Once the release is deployed and published send out the following mail to dev@elastic.co:')
string_format_dict = {'version' : release_version, 'hash': shortHash, 'package_repo_version' : package_repo_version, 'bucket': bucket}
print(MAIL_TEMPLATE % string_format_dict)
print('')
print('You can verify that pushing to the staging repository pushed all the artifacts by running (log into sonatype to find out the correct id):')
print(' python -B dev-tools/validate-maven-repository.py %s https://oss.sonatype.org/service/local/repositories/orgelasticsearch-IDTOFINDOUT/content/org/elasticsearch ' %(localRepoElasticsearch))
print('')
print('To publish the release and the repo on S3 execute the following commands:')
print(' s3cmd cp --recursive s3://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/repos/%(package_repo_version)s/ s3://packages.elasticsearch.org/elasticsearch/%(package_repo_version)s' % string_format_dict)
print(' s3cmd cp --recursive s3://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/ s3://%(bucket)s/elasticsearch/release/org' % string_format_dict)
print('Now go ahead and tag the release:')
print(' git tag -a v%(version)s %(hash)s' % string_format_dict)
print(' git push origin v%(version)s' % string_format_dict )
| apache-2.0 |
tectronics/evennia | contrib/procpools/python_procpool_plugin.py | 4 | 4944 | """
Python ProcPool plugin
Evennia contribution - Griatch 2012
This is a plugin for the Evennia services. It will make the service
and run_async in python_procpool.py available to the system.
To activate, add the following line to your settings file:
SERVER_SERVICES_PLUGIN_MODULES.append("contrib.procpools.python_procpool_plugin")
Next reboot the server and the new service will be available.
If you want to adjust the defaults, copy this file to
game/gamesrc/conf/ and re-point
settings.SERVER_SERVICES_PLUGINS_MODULES to that file instead. This
is to avoid clashes with eventual upstream modifications to this file.
It is not recommended to use this with an SQLite3 database, at least
if you plan to do many out-of-process database writes. SQLite3 does
not work very well with a high frequency of off-process writes due to
file locking clashes. Test what works with your mileage.
"""
import os
import sys
from django.conf import settings
# Process Pool setup
# convenient flag to turn off process pool without changing settings
PROCPOOL_ENABLED = True
# relay process stdout to log (debug mode, very spammy)
PROCPOOL_DEBUG = False
# max/min size of the process pool. Will expand up to max limit on demand.
PROCPOOL_MIN_NPROC = 5
PROCPOOL_MAX_NPROC = 20
# maximum time (seconds) a process may idle before being pruned from
# pool (if pool bigger than minsize)
PROCPOOL_IDLETIME = 20
# after sending a command, this is the maximum time in seconds the process
# may run without returning. After this time the process will be killed. This
# can be seen as a fallback; the run_async method takes a keyword proc_timeout
# that will override this value on a per-case basis.
PROCPOOL_TIMEOUT = 10
# only change if the port clashes with something else on the system
PROCPOOL_PORT = 5001
# 0.0.0.0 means listening to all interfaces
PROCPOOL_INTERFACE = '127.0.0.1'
# user-id and group-id to run the processes as (for OS:es supporting this).
# If you plan to run unsafe code one could experiment with setting this
# to an unprivileged user.
PROCPOOL_UID = None
PROCPOOL_GID = None
# real path to a directory where all processes will be run. If
# not given, processes will be executed in game/.
PROCPOOL_DIRECTORY = None
# don't need to change normally
SERVICE_NAME = "PythonProcPool"
# plugin hook
def start_plugin_services(server):
"""
This will be called by the Evennia Server when starting up.
server - the main Evennia server application
"""
if not PROCPOOL_ENABLED:
return
# terminal output
print ' amp (Process Pool): %s' % PROCPOOL_PORT
from contrib.procpools.ampoule import main as ampoule_main
from contrib.procpools.ampoule import service as ampoule_service
from contrib.procpools.ampoule import pool as ampoule_pool
from contrib.procpools.ampoule.main import BOOTSTRAP as _BOOTSTRAP
from contrib.procpools.python_procpool import PythonProcPoolChild
# for some reason absolute paths don't work here, only relative ones.
apackages = ("twisted",
os.path.join(os.pardir, "contrib", "procpools", "ampoule"),
os.path.join(os.pardir, "ev"),
"settings")
aenv = {"DJANGO_SETTINGS_MODULE": "settings",
"DATABASE_NAME": settings.DATABASES.get("default", {}).get("NAME") or settings.DATABASE_NAME}
if PROCPOOL_DEBUG:
_BOOTSTRAP = _BOOTSTRAP % "log.startLogging(sys.stderr)"
else:
_BOOTSTRAP = _BOOTSTRAP % ""
procpool_starter = ampoule_main.ProcessStarter(packages=apackages,
env=aenv,
path=PROCPOOL_DIRECTORY,
uid=PROCPOOL_UID,
gid=PROCPOOL_GID,
bootstrap=_BOOTSTRAP,
childReactor=sys.platform == 'linux2' and "epoll" or "default")
procpool = ampoule_pool.ProcessPool(name=SERVICE_NAME,
min=PROCPOOL_MIN_NPROC,
max=PROCPOOL_MAX_NPROC,
recycleAfter=500,
timeout=PROCPOOL_TIMEOUT,
maxIdle=PROCPOOL_IDLETIME,
ampChild=PythonProcPoolChild,
starter=procpool_starter)
procpool_service = ampoule_service.AMPouleService(procpool,
PythonProcPoolChild,
PROCPOOL_PORT,
PROCPOOL_INTERFACE)
procpool_service.setName(SERVICE_NAME)
# add the new services to the server
server.services.addService(procpool_service)
| bsd-3-clause |
pombredanne/django-tenant-schemas | tenant_schemas/test/cases.py | 7 | 1445 | import django
from django.core.management import call_command
from django.db import connection
from django.test import TestCase
from tenant_schemas.utils import get_tenant_model
from tenant_schemas.utils import get_public_schema_name
class TenantTestCase(TestCase):
@classmethod
def setUpClass(cls):
cls.sync_shared()
tenant_domain = 'tenant.test.com'
cls.tenant = get_tenant_model()(domain_url=tenant_domain, schema_name='test')
cls.tenant.save(verbosity=0) # todo: is there any way to get the verbosity from the test command here?
connection.set_tenant(cls.tenant)
@classmethod
def tearDownClass(cls):
connection.set_schema_to_public()
cls.tenant.delete()
cursor = connection.cursor()
cursor.execute('DROP SCHEMA test CASCADE')
@classmethod
def sync_shared(cls):
if django.VERSION >= (1, 7, 0):
call_command('migrate_schemas',
schema_name=get_public_schema_name(),
interactive=False,
verbosity=0)
else:
call_command('sync_schemas',
schema_name=get_public_schema_name(),
tenant=False,
public=True,
interactive=False,
migrate_all=True,
verbosity=0,
)
| mit |
AndroidForWave/devil_kernel_samsung_wave | toolchain/share/gdb/python/gdb/command/prompt.py | 107 | 2135 | # Extended prompt.
# Copyright (C) 2011-2012 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""GDB command for working with extended prompts."""
import gdb
import gdb.prompt
class _ExtendedPrompt(gdb.Parameter):
"""Set the extended prompt.
Usage: set extended-prompt VALUE
Substitutions are applied to VALUE to compute the real prompt.
The currently defined substitutions are:
"""
# Add the prompt library's dynamically generated help to the
# __doc__ string.
__doc__ = __doc__ + gdb.prompt.prompt_help()
set_doc = "Set the extended prompt."
show_doc = "Show the extended prompt."
def __init__(self):
super(_ExtendedPrompt, self).__init__("extended-prompt",
gdb.COMMAND_SUPPORT,
gdb.PARAM_STRING_NOESCAPE)
self.value = ''
self.hook_set = False
def get_show_string (self, pvalue):
if self.value is not '':
return "The extended prompt is: " + self.value
else:
return "The extended prompt is not set."
def get_set_string (self):
if self.hook_set == False:
gdb.prompt_hook = self.before_prompt_hook
self.hook_set = True
return ""
def before_prompt_hook(self, current):
if self.value is not '':
newprompt = gdb.prompt.substitute_prompt(self.value)
return newprompt.replace('\\', '\\\\')
else:
return None
_ExtendedPrompt()
| gpl-2.0 |
bjzhang/xen_arm | tools/python/xen/util/ip.py | 51 | 3175 | import os
import re
import socket
import struct
import errno
##### Networking-related functions
def get_defaultroute():
fd = os.popen('/sbin/ip route list 2>/dev/null')
for line in fd.readlines():
m = re.search('^default via ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+) dev ([^ ]*)',
line)
if m:
return [m.group(1), m.group(2)]
return [None, None]
def get_current_ipaddr(dev='defaultroute'):
"""Get the primary IP address for the given network interface.
dev network interface (default: default route device)
returns interface address as a string
"""
if dev == 'defaultroute':
dev = get_defaultroute()[1]
if not dev:
return
fd = os.popen( '/sbin/ifconfig ' + dev + ' 2>/dev/null' )
for line in fd.readlines():
m = re.search( '^\s+inet addr:([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+).*',
line )
if m:
return m.group(1)
return None
def get_current_ipmask(dev='defaultroute'):
"""Get the primary IP netmask for a network interface.
dev network interface (default: default route device)
returns interface netmask as a string
"""
if dev == 'defaultroute':
dev = get_defaultroute()[1]
if not dev:
return
fd = os.popen( '/sbin/ifconfig ' + dev + ' 2>/dev/null' )
for line in fd.readlines():
m = re.search( '^.+Mask:([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+).*',
line )
if m:
return m.group(1)
return None
def get_current_ipgw(dev='defaultroute'):
"""Get the IP gateway for a network interface.
dev network interface (default: default route device)
returns gateway address as a string
"""
if dev == 'defaultroute':
return get_defaultroute()[0]
if not dev:
return
fd = os.popen( '/sbin/route -n' )
for line in fd.readlines():
m = re.search( '^\S+\s+([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)' +
'\s+\S+\s+\S*G.*' + dev + '.*', line )
if m:
return m.group(1)
return None
def inet_aton(addr):
"""Convert an IP addr in IPv4 dot notation into an int.
addr IP address as a string
returns integer
"""
b = socket.inet_aton(addr)
return struct.unpack('!I', b)[0]
def inet_ntoa(n):
"""Convert an int into an IP addr in IPv4 dot notation.
n IP address
returns string
"""
b = struct.pack('!I', n)
return socket.inet_ntoa(b)
def add_offset_to_ip(addr, offset):
"""Add a numerical offset to an IP addr in IPv4 dot notation.
addr IP address
offset offset to add
returns new address
"""
n = inet_aton(addr)
n += offset
return inet_ntoa(n)
def check_subnet( ip, network, netmask ):
"""Check if an IP address is in the subnet defined by
a network address and mask'.
ip IP adress
network network address
netmask network mask
returns 1 if it is in the subnet, 0 if not
"""
n_ip = inet_aton(ip)
n_net = inet_aton(network)
n_mask = inet_aton(netmask)
return (n_ip & n_mask) == (n_net & n_mask)
| gpl-2.0 |
sacnayak/ssnayak-explore | lib/jinja2/parser.py | 336 | 35442 | # -*- coding: utf-8 -*-
"""
jinja2.parser
~~~~~~~~~~~~~
Implements the template parser.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2 import nodes
from jinja2.exceptions import TemplateSyntaxError, TemplateAssertionError
from jinja2.lexer import describe_token, describe_token_expr
from jinja2._compat import imap
_statement_keywords = frozenset(['for', 'if', 'block', 'extends', 'print',
'macro', 'include', 'from', 'import',
'set'])
_compare_operators = frozenset(['eq', 'ne', 'lt', 'lteq', 'gt', 'gteq'])
class Parser(object):
"""This is the central parsing class Jinja2 uses. It's passed to
extensions and can be used to parse expressions or statements.
"""
def __init__(self, environment, source, name=None, filename=None,
state=None):
self.environment = environment
self.stream = environment._tokenize(source, name, filename, state)
self.name = name
self.filename = filename
self.closed = False
self.extensions = {}
for extension in environment.iter_extensions():
for tag in extension.tags:
self.extensions[tag] = extension.parse
self._last_identifier = 0
self._tag_stack = []
self._end_token_stack = []
def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
"""Convenience method that raises `exc` with the message, passed
line number or last line number as well as the current name and
filename.
"""
if lineno is None:
lineno = self.stream.current.lineno
raise exc(msg, lineno, self.name, self.filename)
def _fail_ut_eof(self, name, end_token_stack, lineno):
expected = []
for exprs in end_token_stack:
expected.extend(imap(describe_token_expr, exprs))
if end_token_stack:
currently_looking = ' or '.join(
"'%s'" % describe_token_expr(expr)
for expr in end_token_stack[-1])
else:
currently_looking = None
if name is None:
message = ['Unexpected end of template.']
else:
message = ['Encountered unknown tag \'%s\'.' % name]
if currently_looking:
if name is not None and name in expected:
message.append('You probably made a nesting mistake. Jinja '
'is expecting this tag, but currently looking '
'for %s.' % currently_looking)
else:
message.append('Jinja was looking for the following tags: '
'%s.' % currently_looking)
if self._tag_stack:
message.append('The innermost block that needs to be '
'closed is \'%s\'.' % self._tag_stack[-1])
self.fail(' '.join(message), lineno)
def fail_unknown_tag(self, name, lineno=None):
"""Called if the parser encounters an unknown tag. Tries to fail
with a human readable error message that could help to identify
the problem.
"""
return self._fail_ut_eof(name, self._end_token_stack, lineno)
def fail_eof(self, end_tokens=None, lineno=None):
"""Like fail_unknown_tag but for end of template situations."""
stack = list(self._end_token_stack)
if end_tokens is not None:
stack.append(end_tokens)
return self._fail_ut_eof(None, stack, lineno)
def is_tuple_end(self, extra_end_rules=None):
"""Are we at the end of a tuple?"""
if self.stream.current.type in ('variable_end', 'block_end', 'rparen'):
return True
elif extra_end_rules is not None:
return self.stream.current.test_any(extra_end_rules)
return False
def free_identifier(self, lineno=None):
"""Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
self._last_identifier += 1
rv = object.__new__(nodes.InternalName)
nodes.Node.__init__(rv, 'fi%d' % self._last_identifier, lineno=lineno)
return rv
def parse_statement(self):
"""Parse a single statement."""
token = self.stream.current
if token.type != 'name':
self.fail('tag name expected', token.lineno)
self._tag_stack.append(token.value)
pop_tag = True
try:
if token.value in _statement_keywords:
return getattr(self, 'parse_' + self.stream.current.value)()
if token.value == 'call':
return self.parse_call_block()
if token.value == 'filter':
return self.parse_filter_block()
ext = self.extensions.get(token.value)
if ext is not None:
return ext(self)
# did not work out, remove the token we pushed by accident
# from the stack so that the unknown tag fail function can
# produce a proper error message.
self._tag_stack.pop()
pop_tag = False
self.fail_unknown_tag(token.value, token.lineno)
finally:
if pop_tag:
self._tag_stack.pop()
def parse_statements(self, end_tokens, drop_needle=False):
"""Parse multiple statements into a list until one of the end tokens
is reached. This is used to parse the body of statements as it also
parses template data if appropriate. The parser checks first if the
current token is a colon and skips it if there is one. Then it checks
for the block end and parses until if one of the `end_tokens` is
reached. Per default the active token in the stream at the end of
the call is the matched end token. If this is not wanted `drop_needle`
can be set to `True` and the end token is removed.
"""
# the first token may be a colon for python compatibility
self.stream.skip_if('colon')
# in the future it would be possible to add whole code sections
# by adding some sort of end of statement token and parsing those here.
self.stream.expect('block_end')
result = self.subparse(end_tokens)
# we reached the end of the template too early, the subparser
# does not check for this, so we do that now
if self.stream.current.type == 'eof':
self.fail_eof(end_tokens)
if drop_needle:
next(self.stream)
return result
def parse_set(self):
"""Parse an assign statement."""
lineno = next(self.stream).lineno
target = self.parse_assign_target()
if self.stream.skip_if('assign'):
expr = self.parse_tuple()
return nodes.Assign(target, expr, lineno=lineno)
body = self.parse_statements(('name:endset',),
drop_needle=True)
return nodes.AssignBlock(target, body, lineno=lineno)
def parse_for(self):
"""Parse a for loop."""
lineno = self.stream.expect('name:for').lineno
target = self.parse_assign_target(extra_end_rules=('name:in',))
self.stream.expect('name:in')
iter = self.parse_tuple(with_condexpr=False,
extra_end_rules=('name:recursive',))
test = None
if self.stream.skip_if('name:if'):
test = self.parse_expression()
recursive = self.stream.skip_if('name:recursive')
body = self.parse_statements(('name:endfor', 'name:else'))
if next(self.stream).value == 'endfor':
else_ = []
else:
else_ = self.parse_statements(('name:endfor',), drop_needle=True)
return nodes.For(target, iter, body, else_, test,
recursive, lineno=lineno)
def parse_if(self):
"""Parse an if construct."""
node = result = nodes.If(lineno=self.stream.expect('name:if').lineno)
while 1:
node.test = self.parse_tuple(with_condexpr=False)
node.body = self.parse_statements(('name:elif', 'name:else',
'name:endif'))
token = next(self.stream)
if token.test('name:elif'):
new_node = nodes.If(lineno=self.stream.current.lineno)
node.else_ = [new_node]
node = new_node
continue
elif token.test('name:else'):
node.else_ = self.parse_statements(('name:endif',),
drop_needle=True)
else:
node.else_ = []
break
return result
def parse_block(self):
node = nodes.Block(lineno=next(self.stream).lineno)
node.name = self.stream.expect('name').value
node.scoped = self.stream.skip_if('name:scoped')
# common problem people encounter when switching from django
# to jinja. we do not support hyphens in block names, so let's
# raise a nicer error message in that case.
if self.stream.current.type == 'sub':
self.fail('Block names in Jinja have to be valid Python '
'identifiers and may not contain hyphens, use an '
'underscore instead.')
node.body = self.parse_statements(('name:endblock',), drop_needle=True)
self.stream.skip_if('name:' + node.name)
return node
def parse_extends(self):
node = nodes.Extends(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
return node
def parse_import_context(self, node, default):
if self.stream.current.test_any('name:with', 'name:without') and \
self.stream.look().test('name:context'):
node.with_context = next(self.stream).value == 'with'
self.stream.skip()
else:
node.with_context = default
return node
def parse_include(self):
node = nodes.Include(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
if self.stream.current.test('name:ignore') and \
self.stream.look().test('name:missing'):
node.ignore_missing = True
self.stream.skip(2)
else:
node.ignore_missing = False
return self.parse_import_context(node, True)
def parse_import(self):
node = nodes.Import(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
self.stream.expect('name:as')
node.target = self.parse_assign_target(name_only=True).name
return self.parse_import_context(node, False)
def parse_from(self):
node = nodes.FromImport(lineno=next(self.stream).lineno)
node.template = self.parse_expression()
self.stream.expect('name:import')
node.names = []
def parse_context():
if self.stream.current.value in ('with', 'without') and \
self.stream.look().test('name:context'):
node.with_context = next(self.stream).value == 'with'
self.stream.skip()
return True
return False
while 1:
if node.names:
self.stream.expect('comma')
if self.stream.current.type == 'name':
if parse_context():
break
target = self.parse_assign_target(name_only=True)
if target.name.startswith('_'):
self.fail('names starting with an underline can not '
'be imported', target.lineno,
exc=TemplateAssertionError)
if self.stream.skip_if('name:as'):
alias = self.parse_assign_target(name_only=True)
node.names.append((target.name, alias.name))
else:
node.names.append(target.name)
if parse_context() or self.stream.current.type != 'comma':
break
else:
break
if not hasattr(node, 'with_context'):
node.with_context = False
self.stream.skip_if('comma')
return node
def parse_signature(self, node):
node.args = args = []
node.defaults = defaults = []
self.stream.expect('lparen')
while self.stream.current.type != 'rparen':
if args:
self.stream.expect('comma')
arg = self.parse_assign_target(name_only=True)
arg.set_ctx('param')
if self.stream.skip_if('assign'):
defaults.append(self.parse_expression())
elif defaults:
self.fail('non-default argument follows default argument')
args.append(arg)
self.stream.expect('rparen')
def parse_call_block(self):
node = nodes.CallBlock(lineno=next(self.stream).lineno)
if self.stream.current.type == 'lparen':
self.parse_signature(node)
else:
node.args = []
node.defaults = []
node.call = self.parse_expression()
if not isinstance(node.call, nodes.Call):
self.fail('expected call', node.lineno)
node.body = self.parse_statements(('name:endcall',), drop_needle=True)
return node
def parse_filter_block(self):
node = nodes.FilterBlock(lineno=next(self.stream).lineno)
node.filter = self.parse_filter(None, start_inline=True)
node.body = self.parse_statements(('name:endfilter',),
drop_needle=True)
return node
def parse_macro(self):
node = nodes.Macro(lineno=next(self.stream).lineno)
node.name = self.parse_assign_target(name_only=True).name
self.parse_signature(node)
node.body = self.parse_statements(('name:endmacro',),
drop_needle=True)
return node
def parse_print(self):
node = nodes.Output(lineno=next(self.stream).lineno)
node.nodes = []
while self.stream.current.type != 'block_end':
if node.nodes:
self.stream.expect('comma')
node.nodes.append(self.parse_expression())
return node
def parse_assign_target(self, with_tuple=True, name_only=False,
extra_end_rules=None):
"""Parse an assignment target. As Jinja2 allows assignments to
tuples, this function can parse all allowed assignment targets. Per
default assignments to tuples are parsed, that can be disable however
by setting `with_tuple` to `False`. If only assignments to names are
wanted `name_only` can be set to `True`. The `extra_end_rules`
parameter is forwarded to the tuple parsing function.
"""
if name_only:
token = self.stream.expect('name')
target = nodes.Name(token.value, 'store', lineno=token.lineno)
else:
if with_tuple:
target = self.parse_tuple(simplified=True,
extra_end_rules=extra_end_rules)
else:
target = self.parse_primary()
target.set_ctx('store')
if not target.can_assign():
self.fail('can\'t assign to %r' % target.__class__.
__name__.lower(), target.lineno)
return target
def parse_expression(self, with_condexpr=True):
"""Parse an expression. Per default all expressions are parsed, if
the optional `with_condexpr` parameter is set to `False` conditional
expressions are not parsed.
"""
if with_condexpr:
return self.parse_condexpr()
return self.parse_or()
def parse_condexpr(self):
lineno = self.stream.current.lineno
expr1 = self.parse_or()
while self.stream.skip_if('name:if'):
expr2 = self.parse_or()
if self.stream.skip_if('name:else'):
expr3 = self.parse_condexpr()
else:
expr3 = None
expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
lineno = self.stream.current.lineno
return expr1
def parse_or(self):
lineno = self.stream.current.lineno
left = self.parse_and()
while self.stream.skip_if('name:or'):
right = self.parse_and()
left = nodes.Or(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_and(self):
lineno = self.stream.current.lineno
left = self.parse_not()
while self.stream.skip_if('name:and'):
right = self.parse_not()
left = nodes.And(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_not(self):
if self.stream.current.test('name:not'):
lineno = next(self.stream).lineno
return nodes.Not(self.parse_not(), lineno=lineno)
return self.parse_compare()
def parse_compare(self):
lineno = self.stream.current.lineno
expr = self.parse_add()
ops = []
while 1:
token_type = self.stream.current.type
if token_type in _compare_operators:
next(self.stream)
ops.append(nodes.Operand(token_type, self.parse_add()))
elif self.stream.skip_if('name:in'):
ops.append(nodes.Operand('in', self.parse_add()))
elif (self.stream.current.test('name:not') and
self.stream.look().test('name:in')):
self.stream.skip(2)
ops.append(nodes.Operand('notin', self.parse_add()))
else:
break
lineno = self.stream.current.lineno
if not ops:
return expr
return nodes.Compare(expr, ops, lineno=lineno)
def parse_add(self):
lineno = self.stream.current.lineno
left = self.parse_sub()
while self.stream.current.type == 'add':
next(self.stream)
right = self.parse_sub()
left = nodes.Add(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_sub(self):
lineno = self.stream.current.lineno
left = self.parse_concat()
while self.stream.current.type == 'sub':
next(self.stream)
right = self.parse_concat()
left = nodes.Sub(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_concat(self):
lineno = self.stream.current.lineno
args = [self.parse_mul()]
while self.stream.current.type == 'tilde':
next(self.stream)
args.append(self.parse_mul())
if len(args) == 1:
return args[0]
return nodes.Concat(args, lineno=lineno)
def parse_mul(self):
lineno = self.stream.current.lineno
left = self.parse_div()
while self.stream.current.type == 'mul':
next(self.stream)
right = self.parse_div()
left = nodes.Mul(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_div(self):
lineno = self.stream.current.lineno
left = self.parse_floordiv()
while self.stream.current.type == 'div':
next(self.stream)
right = self.parse_floordiv()
left = nodes.Div(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_floordiv(self):
lineno = self.stream.current.lineno
left = self.parse_mod()
while self.stream.current.type == 'floordiv':
next(self.stream)
right = self.parse_mod()
left = nodes.FloorDiv(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_mod(self):
lineno = self.stream.current.lineno
left = self.parse_pow()
while self.stream.current.type == 'mod':
next(self.stream)
right = self.parse_pow()
left = nodes.Mod(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_pow(self):
lineno = self.stream.current.lineno
left = self.parse_unary()
while self.stream.current.type == 'pow':
next(self.stream)
right = self.parse_unary()
left = nodes.Pow(left, right, lineno=lineno)
lineno = self.stream.current.lineno
return left
def parse_unary(self, with_filter=True):
token_type = self.stream.current.type
lineno = self.stream.current.lineno
if token_type == 'sub':
next(self.stream)
node = nodes.Neg(self.parse_unary(False), lineno=lineno)
elif token_type == 'add':
next(self.stream)
node = nodes.Pos(self.parse_unary(False), lineno=lineno)
else:
node = self.parse_primary()
node = self.parse_postfix(node)
if with_filter:
node = self.parse_filter_expr(node)
return node
def parse_primary(self):
token = self.stream.current
if token.type == 'name':
if token.value in ('true', 'false', 'True', 'False'):
node = nodes.Const(token.value in ('true', 'True'),
lineno=token.lineno)
elif token.value in ('none', 'None'):
node = nodes.Const(None, lineno=token.lineno)
else:
node = nodes.Name(token.value, 'load', lineno=token.lineno)
next(self.stream)
elif token.type == 'string':
next(self.stream)
buf = [token.value]
lineno = token.lineno
while self.stream.current.type == 'string':
buf.append(self.stream.current.value)
next(self.stream)
node = nodes.Const(''.join(buf), lineno=lineno)
elif token.type in ('integer', 'float'):
next(self.stream)
node = nodes.Const(token.value, lineno=token.lineno)
elif token.type == 'lparen':
next(self.stream)
node = self.parse_tuple(explicit_parentheses=True)
self.stream.expect('rparen')
elif token.type == 'lbracket':
node = self.parse_list()
elif token.type == 'lbrace':
node = self.parse_dict()
else:
self.fail("unexpected '%s'" % describe_token(token), token.lineno)
return node
def parse_tuple(self, simplified=False, with_condexpr=True,
extra_end_rules=None, explicit_parentheses=False):
"""Works like `parse_expression` but if multiple expressions are
delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
This method could also return a regular expression instead of a tuple
if no commas where found.
The default parsing mode is a full tuple. If `simplified` is `True`
only names and literals are parsed. The `no_condexpr` parameter is
forwarded to :meth:`parse_expression`.
Because tuples do not require delimiters and may end in a bogus comma
an extra hint is needed that marks the end of a tuple. For example
for loops support tuples between `for` and `in`. In that case the
`extra_end_rules` is set to ``['name:in']``.
`explicit_parentheses` is true if the parsing was triggered by an
expression in parentheses. This is used to figure out if an empty
tuple is a valid expression or not.
"""
lineno = self.stream.current.lineno
if simplified:
parse = self.parse_primary
elif with_condexpr:
parse = self.parse_expression
else:
parse = lambda: self.parse_expression(with_condexpr=False)
args = []
is_tuple = False
while 1:
if args:
self.stream.expect('comma')
if self.is_tuple_end(extra_end_rules):
break
args.append(parse())
if self.stream.current.type == 'comma':
is_tuple = True
else:
break
lineno = self.stream.current.lineno
if not is_tuple:
if args:
return args[0]
# if we don't have explicit parentheses, an empty tuple is
# not a valid expression. This would mean nothing (literally
# nothing) in the spot of an expression would be an empty
# tuple.
if not explicit_parentheses:
self.fail('Expected an expression, got \'%s\'' %
describe_token(self.stream.current))
return nodes.Tuple(args, 'load', lineno=lineno)
def parse_list(self):
token = self.stream.expect('lbracket')
items = []
while self.stream.current.type != 'rbracket':
if items:
self.stream.expect('comma')
if self.stream.current.type == 'rbracket':
break
items.append(self.parse_expression())
self.stream.expect('rbracket')
return nodes.List(items, lineno=token.lineno)
def parse_dict(self):
token = self.stream.expect('lbrace')
items = []
while self.stream.current.type != 'rbrace':
if items:
self.stream.expect('comma')
if self.stream.current.type == 'rbrace':
break
key = self.parse_expression()
self.stream.expect('colon')
value = self.parse_expression()
items.append(nodes.Pair(key, value, lineno=key.lineno))
self.stream.expect('rbrace')
return nodes.Dict(items, lineno=token.lineno)
def parse_postfix(self, node):
while 1:
token_type = self.stream.current.type
if token_type == 'dot' or token_type == 'lbracket':
node = self.parse_subscript(node)
# calls are valid both after postfix expressions (getattr
# and getitem) as well as filters and tests
elif token_type == 'lparen':
node = self.parse_call(node)
else:
break
return node
def parse_filter_expr(self, node):
while 1:
token_type = self.stream.current.type
if token_type == 'pipe':
node = self.parse_filter(node)
elif token_type == 'name' and self.stream.current.value == 'is':
node = self.parse_test(node)
# calls are valid both after postfix expressions (getattr
# and getitem) as well as filters and tests
elif token_type == 'lparen':
node = self.parse_call(node)
else:
break
return node
def parse_subscript(self, node):
token = next(self.stream)
if token.type == 'dot':
attr_token = self.stream.current
next(self.stream)
if attr_token.type == 'name':
return nodes.Getattr(node, attr_token.value, 'load',
lineno=token.lineno)
elif attr_token.type != 'integer':
self.fail('expected name or number', attr_token.lineno)
arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
if token.type == 'lbracket':
args = []
while self.stream.current.type != 'rbracket':
if args:
self.stream.expect('comma')
args.append(self.parse_subscribed())
self.stream.expect('rbracket')
if len(args) == 1:
arg = args[0]
else:
arg = nodes.Tuple(args, 'load', lineno=token.lineno)
return nodes.Getitem(node, arg, 'load', lineno=token.lineno)
self.fail('expected subscript expression', self.lineno)
def parse_subscribed(self):
lineno = self.stream.current.lineno
if self.stream.current.type == 'colon':
next(self.stream)
args = [None]
else:
node = self.parse_expression()
if self.stream.current.type != 'colon':
return node
next(self.stream)
args = [node]
if self.stream.current.type == 'colon':
args.append(None)
elif self.stream.current.type not in ('rbracket', 'comma'):
args.append(self.parse_expression())
else:
args.append(None)
if self.stream.current.type == 'colon':
next(self.stream)
if self.stream.current.type not in ('rbracket', 'comma'):
args.append(self.parse_expression())
else:
args.append(None)
else:
args.append(None)
return nodes.Slice(lineno=lineno, *args)
def parse_call(self, node):
token = self.stream.expect('lparen')
args = []
kwargs = []
dyn_args = dyn_kwargs = None
require_comma = False
def ensure(expr):
if not expr:
self.fail('invalid syntax for function call expression',
token.lineno)
while self.stream.current.type != 'rparen':
if require_comma:
self.stream.expect('comma')
# support for trailing comma
if self.stream.current.type == 'rparen':
break
if self.stream.current.type == 'mul':
ensure(dyn_args is None and dyn_kwargs is None)
next(self.stream)
dyn_args = self.parse_expression()
elif self.stream.current.type == 'pow':
ensure(dyn_kwargs is None)
next(self.stream)
dyn_kwargs = self.parse_expression()
else:
ensure(dyn_args is None and dyn_kwargs is None)
if self.stream.current.type == 'name' and \
self.stream.look().type == 'assign':
key = self.stream.current.value
self.stream.skip(2)
value = self.parse_expression()
kwargs.append(nodes.Keyword(key, value,
lineno=value.lineno))
else:
ensure(not kwargs)
args.append(self.parse_expression())
require_comma = True
self.stream.expect('rparen')
if node is None:
return args, kwargs, dyn_args, dyn_kwargs
return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs,
lineno=token.lineno)
def parse_filter(self, node, start_inline=False):
while self.stream.current.type == 'pipe' or start_inline:
if not start_inline:
next(self.stream)
token = self.stream.expect('name')
name = token.value
while self.stream.current.type == 'dot':
next(self.stream)
name += '.' + self.stream.expect('name').value
if self.stream.current.type == 'lparen':
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
else:
args = []
kwargs = []
dyn_args = dyn_kwargs = None
node = nodes.Filter(node, name, args, kwargs, dyn_args,
dyn_kwargs, lineno=token.lineno)
start_inline = False
return node
def parse_test(self, node):
token = next(self.stream)
if self.stream.current.test('name:not'):
next(self.stream)
negated = True
else:
negated = False
name = self.stream.expect('name').value
while self.stream.current.type == 'dot':
next(self.stream)
name += '.' + self.stream.expect('name').value
dyn_args = dyn_kwargs = None
kwargs = []
if self.stream.current.type == 'lparen':
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
elif (self.stream.current.type in ('name', 'string', 'integer',
'float', 'lparen', 'lbracket',
'lbrace') and not
self.stream.current.test_any('name:else', 'name:or',
'name:and')):
if self.stream.current.test('name:is'):
self.fail('You cannot chain multiple tests with is')
args = [self.parse_expression()]
else:
args = []
node = nodes.Test(node, name, args, kwargs, dyn_args,
dyn_kwargs, lineno=token.lineno)
if negated:
node = nodes.Not(node, lineno=token.lineno)
return node
def subparse(self, end_tokens=None):
body = []
data_buffer = []
add_data = data_buffer.append
if end_tokens is not None:
self._end_token_stack.append(end_tokens)
def flush_data():
if data_buffer:
lineno = data_buffer[0].lineno
body.append(nodes.Output(data_buffer[:], lineno=lineno))
del data_buffer[:]
try:
while self.stream:
token = self.stream.current
if token.type == 'data':
if token.value:
add_data(nodes.TemplateData(token.value,
lineno=token.lineno))
next(self.stream)
elif token.type == 'variable_begin':
next(self.stream)
add_data(self.parse_tuple(with_condexpr=True))
self.stream.expect('variable_end')
elif token.type == 'block_begin':
flush_data()
next(self.stream)
if end_tokens is not None and \
self.stream.current.test_any(*end_tokens):
return body
rv = self.parse_statement()
if isinstance(rv, list):
body.extend(rv)
else:
body.append(rv)
self.stream.expect('block_end')
else:
raise AssertionError('internal parsing error')
flush_data()
finally:
if end_tokens is not None:
self._end_token_stack.pop()
return body
def parse(self):
"""Parse the whole template into a `Template` node."""
result = nodes.Template(self.subparse(), lineno=1)
result.set_environment(self.environment)
return result
| apache-2.0 |
galtys/odoo | addons/l10n_cr/__init__.py | 438 | 2045 | # -*- encoding: utf-8 -*-
##############################################################################
#
# __init__.py
# l10n_cr_account
# First author: Carlos Vásquez <carlos.vasquez@clearcorp.co.cr> (ClearCorp S.A.)
# Copyright (c) 2010-TODAY ClearCorp S.A. (http://clearcorp.co.cr). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of ClearCorp S.A..
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
gilt/incubator-airflow | airflow/ti_deps/deps/prev_dagrun_dep.py | 38 | 3341 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.utils.db import provide_session
from airflow.utils.state import State
class PrevDagrunDep(BaseTIDep):
"""
Is the past dagrun in a state that allows this task instance to run, e.g. did this
task instance's task in the previous dagrun complete if we are depending on past.
"""
NAME = "Previous Dagrun State"
IGNOREABLE = True
IS_TASK_DEP = True
@provide_session
def _get_dep_statuses(self, ti, session, dep_context):
if dep_context.ignore_depends_on_past:
yield self._passing_status(
reason="The context specified that the state of past DAGs could be "
"ignored.")
return
if not ti.task.depends_on_past:
yield self._passing_status(
reason="The task did not have depends_on_past set.")
return
# Don't depend on the previous task instance if we are the first task
dag = ti.task.dag
if dag.catchup:
if dag.previous_schedule(ti.execution_date) is None:
yield self._passing_status(
reason="This task does not have a schedule or is @once"
)
return
if dag.previous_schedule(ti.execution_date) < ti.task.start_date:
yield self._passing_status(
reason="This task instance was the first task instance for its task.")
return
else:
dr = ti.get_dagrun()
last_dagrun = dr.get_previous_dagrun() if dr else None
if not last_dagrun:
yield self._passing_status(
reason="This task instance was the first task instance for its task.")
return
previous_ti = ti.previous_ti
if not previous_ti:
yield self._failing_status(
reason="depends_on_past is true for this task's DAG, but the previous "
"task instance has not run yet.")
return
if previous_ti.state not in {State.SKIPPED, State.SUCCESS}:
yield self._failing_status(
reason="depends_on_past is true for this task, but the previous task "
"instance {0} is in the state '{1}' which is not a successful "
"state.".format(previous_ti, previous_ti.state))
previous_ti.task = ti.task
if (ti.task.wait_for_downstream and
not previous_ti.are_dependents_done(session=session)):
yield self._failing_status(
reason="The tasks downstream of the previous task instance {0} haven't "
"completed.".format(previous_ti))
| apache-2.0 |
SlimRemix/android_external_chromium_org | tools/telemetry/telemetry/page/actions/swipe.py | 45 | 2997 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.page.actions import page_action
class SwipeAction(page_action.PageAction):
def __init__(self, selector=None, text=None, element_function=None,
left_start_ratio=0.5, top_start_ratio=0.5,
direction='left', distance=100, speed_in_pixels_per_second=800):
super(SwipeAction, self).__init__()
if direction not in ['down', 'up', 'left', 'right']:
raise page_action.PageActionNotSupported(
'Invalid swipe direction: %s' % self.direction)
self._selector = selector
self._text = text
self._element_function = element_function
self._left_start_ratio = left_start_ratio
self._top_start_ratio = top_start_ratio
self._direction = direction
self._distance = distance
self._speed = speed_in_pixels_per_second
def WillRunAction(self, tab):
for js_file in ['gesture_common.js', 'swipe.js']:
with open(os.path.join(os.path.dirname(__file__), js_file)) as f:
js = f.read()
tab.ExecuteJavaScript(js)
# Fail if browser doesn't support synthetic swipe gestures.
if not tab.EvaluateJavaScript('window.__SwipeAction_SupportedByBrowser()'):
raise page_action.PageActionNotSupported(
'Synthetic swipe not supported for this browser')
if (page_action.GetGestureSourceTypeFromOptions(tab) ==
'chrome.gpuBenchmarking.MOUSE_INPUT'):
raise page_action.PageActionNotSupported(
'Swipe page action does not support mouse input')
if not page_action.IsGestureSourceTypeSupported(tab, 'touch'):
raise page_action.PageActionNotSupported(
'Touch input not supported for this browser')
done_callback = 'function() { window.__swipeActionDone = true; }'
tab.ExecuteJavaScript("""
window.__swipeActionDone = false;
window.__swipeAction = new __SwipeAction(%s);"""
% (done_callback))
def RunAction(self, tab):
if (self._selector is None and self._text is None and
self._element_function is None):
self._element_function = 'document.body'
code = '''
function(element, info) {
if (!element) {
throw Error('Cannot find element: ' + info);
}
window.__swipeAction.start({
element: element,
left_start_ratio: %s,
top_start_ratio: %s,
direction: '%s',
distance: %s,
speed: %s
});
}''' % (self._left_start_ratio,
self._top_start_ratio,
self._direction,
self._distance,
self._speed)
page_action.EvaluateCallbackWithElement(
tab, code, selector=self._selector, text=self._text,
element_function=self._element_function)
tab.WaitForJavaScriptExpression('window.__swipeActionDone', 60)
| bsd-3-clause |
danielballan/scikit-xray | skbeam/io/gsas_file_reader.py | 12 | 8616 | # ######################################################################
# Original code: #
# @author: Robert B. Von Dreele and Brian Toby #
# General Structure Analysis System - II (GSAS-II) #
# https://subversion.xor.aps.anl.gov/trac/pyGSAS #
# Copyright 2010, UChicago Argonne, LLC, Operator of #
# Argonne National Laboratory All rights reserved. #
# #
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
"""
This is the module for reading files created in GSAS file formats
https://subversion.xor.aps.anl.gov/trac/pyGSAS
"""
from __future__ import absolute_import, division, print_function
import os
import numpy as np
def gsas_reader(file):
"""
Parameters
----------
file: str
GSAS powder data file
Returns
--------
tth : ndarray
twotheta values (degrees) shape (N, ) array
intensity : ndarray
intensity values shape (N, ) array
err : ndarray
error value of intensity shape(N, ) array
"""
if os.path.splitext(file)[1] != ".gsas":
raise IOError("Provide a file with diffraction data saved in GSAS,"
" file extension has to be .gsas ")
# find the file mode, could be 'std', 'esd', 'fxye'
with open(file, 'r') as fi:
S = fi.readlines()[1]
mode = S.split()[9]
try:
tth, intensity, err = _func_look_up[mode](file)
except KeyError:
raise ValueError("Provide a correct mode of the GSAS file, "
"file modes could be in 'STD', 'ESD', 'FXYE' ")
return tth, intensity, err
def _get_fxye_data(file):
"""
Parameters
----------
file: str
GSAS powder data file
Return
------
tth : ndarray
twotheta values (degrees) shape (N, ) array
intensity : ndarray
intensity values shape (N, ) array
err : ndarray
error value of intensity shape(N, ) array
"""
tth = []
intensity = []
err = []
with open(file, 'r') as fi:
S = fi.readlines()[2:]
for line in S:
vals = line.split()
tth.append(float(vals[0]))
f = float(vals[1])
s = float(vals[2])
if f <= 0.0:
intensity.append(0.0)
else:
intensity.append(float(vals[1]))
if s > 0.0:
err.append(1.0/float(vals[2])**2)
else:
err.append(0.0)
return [np.array(tth), np.array(intensity), np.array(err)]
def _get_esd_data(file):
"""
Parameters
----------
file: str
GSAS powder data file
Return
------
tth : ndarray
twotheta values (degrees) shape (N, ) array
intensity : ndarray
intensity values shape (N, ) array
err : ndarray
error value of intensity shape(N, ) array
"""
tth = []
intensity = []
err = []
with open(file, 'r') as fi:
S = fi.readlines()[1:]
# convert from centidegrees to degrees
start = float(S[0].split()[5])/100.0
step = float(S[0].split()[6])/100.0
j = 0
for line in S[1:]:
for i in range(0, 80, 16):
xi = start + step*j
yi = _sfloat(line[i: i + 8])
ei = _sfloat(line[i + 8: i + 16])
tth.append(xi)
if yi > 0.0:
intensity.append(yi)
else:
intensity.append(0.0)
if ei > 0.0:
err.append(1.0/ei**2)
else:
err.append(0.0)
j += 1
return [np.array(tth), np.array(intensity), np.array(err)]
def _get_std_data(file):
"""
Parameters
----------
file: str
GSAS powder data file
Return
------
tth : ndarray
twotheta values (degrees) shape (N, ) array
intensity : ndarray
intensity values shape (N, ) array
err : ndarray
error value of intensity shape(N, ) array
"""
tth = []
intensity = []
err = []
with open(file, 'r') as fi:
S = fi.readlines()[1:]
# convert from centidegrees to degrees
start = float(S[0].split()[5])/100.0
step = float(S[0].split()[6])/100.0
# number of data values(two theta or intensity)
nch = float(S[0].split()[2])
j = 0
for line in S[1:]:
for i in range(0, 80, 8):
xi = start + step*j
ni = max(_sint(line[i: i + 2]), 1)
yi = max(_sfloat(line[i + 2: i + 8]), 0.0)
if yi:
vi = yi/ni
else:
yi = 0.0
vi = 0.0
if j < nch:
tth.append(xi)
if vi <= 0.:
intensity.append(0.)
err.append(0.)
else:
intensity.append(yi)
err.append(1.0/vi)
j += 1
return [np.array(tth), np.array(intensity), np.array(err)]
# find the which function to use according to mode of the GSAS file
# mode could be "STD", "ESD" or "FXYE"
_func_look_up = {'STD': _get_std_data, 'ESD': _get_esd_data,
'FXYE': _get_fxye_data}
def _sfloat(S):
"""
convert a string to a float, treating an all-blank string as zero
Parameter
---------
S : str
string that need to be converted as float treating an
all-blank string as zero
Returns
-------
float or zero
"""
if S.strip():
return float(S)
else:
return 0.0
def _sint(S):
"""
convert a string to an integer, treating an all-blank string as zero
Parameter
---------
S : str
string that need to be converted as integer treating an all-blank
strings as zero
Returns
-------
integer or zero
"""
if S.strip():
return int(S)
else:
return 0
| bsd-3-clause |
opentracing/opentracing-python | opentracing/scope.py | 3 | 3274 | # Copyright (c) 2017-2019 The OpenTracing Authors.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from .span import Span
class Scope(object):
"""A scope formalizes the activation and deactivation of a :class:`Span`,
usually from a CPU standpoint. Many times a :class:`Span` will be extant
(in that :meth:`Span.finish()` has not been called) despite being in a
non-runnable state from a CPU/scheduler standpoint. For instance, a
:class:`Span` representing the client side of an RPC will be unfinished but
blocked on IO while the RPC is still outstanding. A scope defines when a
given :class:`Span` is scheduled and on the path.
:param manager: the :class:`ScopeManager` that created this :class:`Scope`.
:type manager: ScopeManager
:param span: the :class:`Span` used for this :class:`Scope`.
:type span: Span
"""
def __init__(self, manager, span):
"""Initializes a scope for *span*."""
self._manager = manager
self._span = span
@property
def span(self):
"""Returns the :class:`Span` wrapped by this :class:`Scope`.
:rtype: Span
"""
return self._span
@property
def manager(self):
"""Returns the :class:`ScopeManager` that created this :class:`Scope`.
:rtype: ScopeManager
"""
return self._manager
def close(self):
"""Marks the end of the active period for this :class:`Scope`, updating
:attr:`ScopeManager.active` in the process.
NOTE: Calling this method more than once on a single :class:`Scope`
leads to undefined behavior.
"""
pass
def __enter__(self):
"""Allows :class:`Scope` to be used inside a Python Context Manager."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Calls :meth:`close()` when the execution is outside the Python
Context Manager.
If exception has occurred during execution, it is automatically logged
and added as a tag to the :class:`Span`.
:attr:`~operation.ext.tags.ERROR` will also be set to `True`.
"""
Span._on_error(self.span, exc_type, exc_val, exc_tb)
self.close()
| apache-2.0 |
oberlin/django | django/contrib/gis/geos/prepared.py | 328 | 2445 | from .base import GEOSBase
from .error import GEOSException
from .libgeos import geos_version_info
from .prototypes import prepared as capi
class PreparedGeometry(GEOSBase):
"""
A geometry that is prepared for performing certain operations.
At the moment this includes the contains covers, and intersects
operations.
"""
ptr_type = capi.PREPGEOM_PTR
def __init__(self, geom):
# Keeping a reference to the original geometry object to prevent it
# from being garbage collected which could then crash the prepared one
# See #21662
self._base_geom = geom
from .geometry import GEOSGeometry
if not isinstance(geom, GEOSGeometry):
raise TypeError
self.ptr = capi.geos_prepare(geom.ptr)
def __del__(self):
if self._ptr and capi:
capi.prepared_destroy(self._ptr)
def contains(self, other):
return capi.prepared_contains(self.ptr, other.ptr)
def contains_properly(self, other):
return capi.prepared_contains_properly(self.ptr, other.ptr)
def covers(self, other):
return capi.prepared_covers(self.ptr, other.ptr)
def intersects(self, other):
return capi.prepared_intersects(self.ptr, other.ptr)
# Added in GEOS 3.3:
def crosses(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("crosses on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_crosses(self.ptr, other.ptr)
def disjoint(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("disjoint on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_disjoint(self.ptr, other.ptr)
def overlaps(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("overlaps on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_overlaps(self.ptr, other.ptr)
def touches(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("touches on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_touches(self.ptr, other.ptr)
def within(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("within on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_within(self.ptr, other.ptr)
| bsd-3-clause |
xq262144/hue | desktop/core/ext-py/pysaml2-2.4.0/src/saml2/cache.py | 32 | 5624 | #!/usr/bin/env python
import shelve
from saml2.ident import code, decode
from saml2 import time_util, SAMLError
import logging
logger = logging.getLogger(__name__)
# The assumption is that any subject may consist of data
# gathered from several different sources, all with their own
# timeout time.
class ToOld(SAMLError):
pass
class CacheError(SAMLError):
pass
class Cache(object):
def __init__(self, filename=None):
if filename:
self._db = shelve.open(filename, writeback=True)
self._sync = True
else:
self._db = {}
self._sync = False
def delete(self, name_id):
"""
:param name_id: The subject identifier, a NameID instance
"""
del self._db[code(name_id)]
if self._sync:
try:
self._db.sync()
except AttributeError:
pass
def get_identity(self, name_id, entities=None,
check_not_on_or_after=True):
""" Get all the identity information that has been received and
are still valid about the subject.
:param name_id: The subject identifier, a NameID instance
:param entities: The identifiers of the entities whoes assertions are
interesting. If the list is empty all entities are interesting.
:return: A 2-tuple consisting of the identity information (a
dictionary of attributes and values) and the list of entities
whoes information has timed out.
"""
if not entities:
try:
cni = code(name_id)
entities = self._db[cni].keys()
except KeyError:
return {}, []
res = {}
oldees = []
for entity_id in entities:
try:
info = self.get(name_id, entity_id, check_not_on_or_after)
except ToOld:
oldees.append(entity_id)
continue
if not info:
oldees.append(entity_id)
continue
for key, vals in info["ava"].items():
try:
tmp = set(res[key]).union(set(vals))
res[key] = list(tmp)
except KeyError:
res[key] = vals
return res, oldees
def get(self, name_id, entity_id, check_not_on_or_after=True):
""" Get session information about a subject gotten from a
specified IdP/AA.
:param name_id: The subject identifier, a NameID instance
:param entity_id: The identifier of the entity_id
:param check_not_on_or_after: if True it will check if this
subject is still valid or if it is too old. Otherwise it
will not check this. True by default.
:return: The session information
"""
cni = code(name_id)
(timestamp, info) = self._db[cni][entity_id]
if check_not_on_or_after and time_util.after(timestamp):
raise ToOld("past %s" % timestamp)
return info or None
def set(self, name_id, entity_id, info, not_on_or_after=0):
""" Stores session information in the cache. Assumes that the name_id
is unique within the context of the Service Provider.
:param name_id: The subject identifier, a NameID instance
:param entity_id: The identifier of the entity_id/receiver of an
assertion
:param info: The session info, the assertion is part of this
:param not_on_or_after: A time after which the assertion is not valid.
"""
cni = code(name_id)
if cni not in self._db:
self._db[cni] = {}
self._db[cni][entity_id] = (not_on_or_after, info)
if self._sync:
try:
self._db.sync()
except AttributeError:
pass
def reset(self, name_id, entity_id):
""" Scrap the assertions received from a IdP or an AA about a special
subject.
:param name_id: The subject identifier, a NameID instance
:param entity_id: The identifier of the entity_id of the assertion
:return:
"""
self.set(name_id, entity_id, {}, 0)
def entities(self, name_id):
""" Returns all the entities of assertions for a subject, disregarding
whether the assertion still is valid or not.
:param name_id: The subject identifier, a NameID instance
:return: A possibly empty list of entity identifiers
"""
cni = code(name_id)
return self._db[cni].keys()
def receivers(self, name_id):
""" Another name for entities() just to make it more logic in the IdP
scenario """
return self.entities(name_id)
def active(self, name_id, entity_id):
""" Returns the status of assertions from a specific entity_id.
:param name_id: The ID of the subject
:param entity_id: The entity ID of the entity_id of the assertion
:return: True or False depending on if the assertion is still
valid or not.
"""
try:
cni = code(name_id)
(timestamp, info) = self._db[cni][entity_id]
except KeyError:
return False
if not info:
return False
else:
return time_util.not_on_or_after(timestamp)
def subjects(self):
""" Return identifiers for all the subjects that are in the cache.
:return: list of subject identifiers
"""
return [decode(c) for c in self._db.keys()]
| apache-2.0 |
NavyaJayaram/MyRepository | YouTubeUsingAJS/lib/python2.7/site-packages/pip/commands/show.py | 344 | 2767 | import os
from pip.basecommand import Command
from pip.log import logger
from pip._vendor import pkg_resources
class ShowCommand(Command):
"""Show information about one or more installed packages."""
name = 'show'
usage = """
%prog [options] <package> ..."""
summary = 'Show information about installed packages.'
def __init__(self, *args, **kw):
super(ShowCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-f', '--files',
dest='files',
action='store_true',
default=False,
help='Show the full list of installed files for each package.')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
if not args:
logger.warn('ERROR: Please provide a package name or names.')
return
query = args
results = search_packages_info(query)
print_results(results, options.files)
def search_packages_info(query):
"""
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
"""
installed_packages = dict(
[(p.project_name.lower(), p) for p in pkg_resources.working_set])
for name in query:
normalized_name = name.lower()
if normalized_name in installed_packages:
dist = installed_packages[normalized_name]
package = {
'name': dist.project_name,
'version': dist.version,
'location': dist.location,
'requires': [dep.project_name for dep in dist.requires()],
}
filelist = os.path.join(
dist.location,
dist.egg_name() + '.egg-info',
'installed-files.txt')
if os.path.isfile(filelist):
package['files'] = filelist
yield package
def print_results(distributions, list_all_files):
"""
Print the informations from installed distributions found.
"""
for dist in distributions:
logger.notify("---")
logger.notify("Name: %s" % dist['name'])
logger.notify("Version: %s" % dist['version'])
logger.notify("Location: %s" % dist['location'])
logger.notify("Requires: %s" % ', '.join(dist['requires']))
if list_all_files:
logger.notify("Files:")
if 'files' in dist:
for line in open(dist['files']):
logger.notify(" %s" % line.strip())
else:
logger.notify("Cannot locate installed-files.txt")
| mit |
fevxie/odoo | addons/account_payment/account_invoice.py | 382 | 2377 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tools.translate import _
from openerp.osv import osv
class Invoice(osv.osv):
_inherit = 'account.invoice'
# Forbid to cancel an invoice if the related move lines have already been
# used in a payment order. The risk is that importing the payment line
# in the bank statement will result in a crash cause no more move will
# be found in the payment line
def action_cancel(self, cr, uid, ids, context=None):
payment_line_obj = self.pool.get('payment.line')
for inv in self.browse(cr, uid, ids, context=context):
pl_line_ids = []
if inv.move_id and inv.move_id.line_id:
inv_mv_lines = [x.id for x in inv.move_id.line_id]
pl_line_ids = payment_line_obj.search(cr, uid, [('move_line_id','in',inv_mv_lines)], context=context)
if pl_line_ids:
pay_line = payment_line_obj.browse(cr, uid, pl_line_ids, context=context)
payment_order_name = ','.join(map(lambda x: x.order_id.reference, pay_line))
raise osv.except_osv(_('Error!'), _("You cannot cancel an invoice which has already been imported in a payment order. Remove it from the following payment order : %s."%(payment_order_name)))
return super(Invoice, self).action_cancel(cr, uid, ids, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hryamzik/ansible | lib/ansible/modules/cloud/vmware/vmware_guest_disk_facts.py | 10 | 6407 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_disk_facts
short_description: Gather facts about disks of given virtual machine
description:
- This module can be used to gather facts about disks belonging to given virtual machine.
- All parameters and VMware object names are case sensitive.
version_added: 2.6
author:
- Abhijeet Kasurde (@Akasurde) <akasurde@redhat.com>
notes:
- Tested on vSphere 6.0 and 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the virtual machine.
- This is required parameter, if parameter C(uuid) is not supplied.
uuid:
description:
- UUID of the instance to gather facts if known, this is VMware's unique identifier.
- This is required parameter, if parameter C(name) is not supplied.
folder:
description:
- Destination folder, absolute or relative path to find an existing guest.
- This is required parameter, only if multiple VMs are found with same name.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
- ' folder: vm/folder2'
- ' folder: folder2'
datacenter:
description:
- The datacenter name to which virtual machine belongs to.
required: True
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Gather disk facts from virtual machine using UUID
vmware_guest_disk_facts:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
datacenter: ha-datacenter
validate_certs: no
uuid: 421e4592-c069-924d-ce20-7e7533fab926
delegate_to: localhost
register: disk_facts
- name: Gather disk facts from virtual machine using name
vmware_guest_disk_facts:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
datacenter: ha-datacenter
validate_certs: no
name: VM_225
delegate_to: localhost
register: disk_facts
'''
RETURN = """
guest_disk_facts:
description: metadata about the virtual machine's disks
returned: always
type: dict
sample: {
"0": {
"backing_datastore": "datastore2",
"backing_disk_mode": "persistent",
"backing_eagerlyscrub": false,
"backing_filename": "[datastore2] VM_225/VM_225.vmdk",
"backing_thinprovisioned": false,
"backing_writethrough": false,
"capacity_in_bytes": 10485760,
"capacity_in_kb": 10240,
"controller_key": 1000,
"key": 2000,
"label": "Hard disk 1",
"summary": "10,240 KB",
"unit_number": 0
},
}
"""
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec
class PyVmomiHelper(PyVmomi):
def __init__(self, module):
super(PyVmomiHelper, self).__init__(module)
def gather_disk_facts(self, vm_obj):
"""
Function to gather facts about VM's disks
Args:
vm_obj: Managed object of virtual machine
Returns: A list of dict containing disks information
"""
disks_facts = dict()
if vm_obj is None:
return disks_facts
disk_index = 0
for disk in vm_obj.config.hardware.device:
if isinstance(disk, vim.vm.device.VirtualDisk):
disks_facts[disk_index] = dict(
key=disk.key,
label=disk.deviceInfo.label,
summary=disk.deviceInfo.summary,
backing_filename=disk.backing.fileName,
backing_datastore=disk.backing.datastore.name,
backing_disk_mode=disk.backing.diskMode,
backing_writethrough=disk.backing.writeThrough,
backing_thinprovisioned=disk.backing.thinProvisioned,
backing_eagerlyscrub=bool(disk.backing.eagerlyScrub),
controller_key=disk.controllerKey,
unit_number=disk.unitNumber,
capacity_in_kb=disk.capacityInKB,
capacity_in_bytes=disk.capacityInBytes,
)
disk_index += 1
return disks_facts
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
name=dict(type='str'),
uuid=dict(type='str'),
folder=dict(type='str'),
datacenter=dict(type='str', required=True),
)
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=[['name', 'uuid']])
if module.params['folder']:
# FindByInventoryPath() does not require an absolute path
# so we should leave the input folder path unmodified
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.get_vm()
if vm:
# VM exists
try:
module.exit_json(guest_disk_facts=pyv.gather_disk_facts(vm))
except Exception as exc:
module.fail_json(msg="Failed to gather facts with exception : %s" % to_text(exc))
else:
# We unable to find the virtual machine user specified
# Bail out
module.fail_json(msg="Unable to gather disk facts for non-existing VM %s" % (module.params.get('uuid') or module.params.get('name')))
if __name__ == '__main__':
main()
| gpl-3.0 |
kalvdans/scipy | scipy/_lib/tests/test__version.py | 107 | 2027 | from numpy.testing import assert_, run_module_suite, assert_raises
from scipy._lib._version import NumpyVersion
def test_main_versions():
assert_(NumpyVersion('1.8.0') == '1.8.0')
for ver in ['1.9.0', '2.0.0', '1.8.1']:
assert_(NumpyVersion('1.8.0') < ver)
for ver in ['1.7.0', '1.7.1', '0.9.9']:
assert_(NumpyVersion('1.8.0') > ver)
def test_version_1_point_10():
# regression test for gh-2998.
assert_(NumpyVersion('1.9.0') < '1.10.0')
assert_(NumpyVersion('1.11.0') < '1.11.1')
assert_(NumpyVersion('1.11.0') == '1.11.0')
assert_(NumpyVersion('1.99.11') < '1.99.12')
def test_alpha_beta_rc():
assert_(NumpyVersion('1.8.0rc1') == '1.8.0rc1')
for ver in ['1.8.0', '1.8.0rc2']:
assert_(NumpyVersion('1.8.0rc1') < ver)
for ver in ['1.8.0a2', '1.8.0b3', '1.7.2rc4']:
assert_(NumpyVersion('1.8.0rc1') > ver)
assert_(NumpyVersion('1.8.0b1') > '1.8.0a2')
def test_dev_version():
assert_(NumpyVersion('1.9.0.dev-Unknown') < '1.9.0')
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev-ffffffff']:
assert_(NumpyVersion('1.9.0.dev-f16acvda') < ver)
assert_(NumpyVersion('1.9.0.dev-f16acvda') == '1.9.0.dev-11111111')
def test_dev_a_b_rc_mixed():
assert_(NumpyVersion('1.9.0a2.dev-f16acvda') == '1.9.0a2.dev-11111111')
assert_(NumpyVersion('1.9.0a2.dev-6acvda54') < '1.9.0a2')
def test_dev0_version():
assert_(NumpyVersion('1.9.0.dev0+Unknown') < '1.9.0')
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev0+ffffffff']:
assert_(NumpyVersion('1.9.0.dev0+f16acvda') < ver)
assert_(NumpyVersion('1.9.0.dev0+f16acvda') == '1.9.0.dev0+11111111')
def test_dev0_a_b_rc_mixed():
assert_(NumpyVersion('1.9.0a2.dev0+f16acvda') == '1.9.0a2.dev0+11111111')
assert_(NumpyVersion('1.9.0a2.dev0+6acvda54') < '1.9.0a2')
def test_raises():
for ver in ['1.9', '1,9.0', '1.7.x']:
assert_raises(ValueError, NumpyVersion, ver)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
neumerance/cloudloon2 | .venv/lib/python2.7/site-packages/novaclient/tests/test_shell.py | 3 | 7541 | import io
import prettytable
import re
import sys
from distutils.version import StrictVersion
import fixtures
import mock
from testtools import matchers
import novaclient.client
from novaclient import exceptions
import novaclient.shell
from novaclient.tests import utils
FAKE_ENV = {'OS_USERNAME': 'username',
'OS_PASSWORD': 'password',
'OS_TENANT_NAME': 'tenant_name',
'OS_AUTH_URL': 'http://no.where'}
FAKE_ENV2 = {'OS_USERNAME': 'username',
'OS_PASSWORD': 'password',
'OS_TENANT_ID': 'tenant_id',
'OS_AUTH_URL': 'http://no.where'}
class ShellTest(utils.TestCase):
def make_env(self, exclude=None, fake_env=FAKE_ENV):
env = dict((k, v) for k, v in fake_env.items() if k != exclude)
self.useFixture(fixtures.MonkeyPatch('os.environ', env))
def setUp(self):
super(ShellTest, self).setUp()
self.useFixture(fixtures.MonkeyPatch(
'novaclient.client.get_client_class',
mock.MagicMock))
self.nc_util = mock.patch('novaclient.utils.isunauthenticated').start()
self.nc_util.return_value = False
def shell(self, argstr, exitcodes=(0,)):
orig = sys.stdout
orig_stderr = sys.stderr
try:
sys.stdout = io.BytesIO()
sys.stderr = io.BytesIO()
_shell = novaclient.shell.OpenStackComputeShell()
_shell.main(argstr.split())
except SystemExit:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.assertIn(exc_value.code, exitcodes)
finally:
stdout = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = orig
stderr = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = orig_stderr
return (stdout, stderr)
def test_help_unknown_command(self):
self.assertRaises(exceptions.CommandError, self.shell, 'help foofoo')
def test_invalid_timeout(self):
for f in [0, -1, -10]:
cmd_text = '--timeout %s' % (f)
stdout, stderr = self.shell(cmd_text, exitcodes=[0, 2])
required = [
'argument --timeout: %s must be greater than 0' % (f),
]
for r in required:
self.assertIn(r, stderr)
def test_help(self):
required = [
'.*?^usage: ',
'.*?^\s+root-password\s+Change the root password',
'.*?^See "nova help COMMAND" for help on a specific command',
]
stdout, stderr = self.shell('help')
for r in required:
self.assertThat((stdout + stderr),
matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE))
def test_help_on_subcommand(self):
required = [
'.*?^usage: nova root-password',
'.*?^Change the root password',
'.*?^Positional arguments:',
]
stdout, stderr = self.shell('help root-password')
for r in required:
self.assertThat((stdout + stderr),
matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE))
def test_help_no_options(self):
required = [
'.*?^usage: ',
'.*?^\s+root-password\s+Change the root password',
'.*?^See "nova help COMMAND" for help on a specific command',
]
stdout, stderr = self.shell('')
for r in required:
self.assertThat((stdout + stderr),
matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE))
def test_bash_completion(self):
stdout, stderr = self.shell('bash-completion')
# just check we have some output
required = [
'.*--matching',
'.*--wrap',
'.*help',
'.*secgroup-delete-rule',
'.*--priority']
for r in required:
self.assertThat((stdout + stderr),
matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE))
def test_no_username(self):
required = ('You must provide a username'
' via either --os-username or env[OS_USERNAME]',)
self.make_env(exclude='OS_USERNAME')
try:
self.shell('list')
except exceptions.CommandError as message:
self.assertEqual(required, message.args)
else:
self.fail('CommandError not raised')
def test_no_tenant_name(self):
required = ('You must provide a tenant name or tenant id'
' via --os-tenant-name, --os-tenant-id,'
' env[OS_TENANT_NAME] or env[OS_TENANT_ID]',)
self.make_env(exclude='OS_TENANT_NAME')
try:
self.shell('list')
except exceptions.CommandError as message:
self.assertEqual(required, message.args)
else:
self.fail('CommandError not raised')
def test_no_tenant_id(self):
required = ('You must provide a tenant name or tenant id'
' via --os-tenant-name, --os-tenant-id,'
' env[OS_TENANT_NAME] or env[OS_TENANT_ID]',)
self.make_env(exclude='OS_TENANT_ID', fake_env=FAKE_ENV2)
try:
self.shell('list')
except exceptions.CommandError as message:
self.assertEqual(required, message.args)
else:
self.fail('CommandError not raised')
def test_no_auth_url(self):
required = ('You must provide an auth url'
' via either --os-auth-url or env[OS_AUTH_URL] or'
' specify an auth_system which defines a default url'
' with --os-auth-system or env[OS_AUTH_SYSTEM]',)
self.make_env(exclude='OS_AUTH_URL')
try:
self.shell('list')
except exceptions.CommandError as message:
self.assertEqual(required, message.args)
else:
self.fail('CommandError not raised')
@mock.patch('sys.stdin', side_effect=mock.MagicMock)
@mock.patch('getpass.getpass', return_value='password')
def test_password(self, mock_getpass, mock_stdin):
# default output of empty tables differs depending between prettytable
# versions
if (hasattr(prettytable, '__version__') and
StrictVersion(prettytable.__version__) < StrictVersion('0.7.2')):
ex = '\n'
else:
ex = (
'+----+------+--------+------------+-------------+----------+\n'
'| ID | Name | Status | Task State | Power State | Networks |\n'
'+----+------+--------+------------+-------------+----------+\n'
'+----+------+--------+------------+-------------+----------+\n'
)
self.make_env(exclude='OS_PASSWORD')
stdout, stderr = self.shell('list')
self.assertEqual((stdout + stderr), ex)
@mock.patch('sys.stdin', side_effect=mock.MagicMock)
@mock.patch('getpass.getpass', side_effect=EOFError)
def test_no_password(self, mock_getpass, mock_stdin):
required = ('Expecting a password provided'
' via either --os-password, env[OS_PASSWORD],'
' or prompted response',)
self.make_env(exclude='OS_PASSWORD')
try:
self.shell('list')
except exceptions.CommandError as message:
self.assertEqual(required, message.args)
else:
self.fail('CommandError not raised')
| apache-2.0 |
frishberg/django | django/contrib/sessions/backends/cache.py | 117 | 2766 | from django.conf import settings
from django.contrib.sessions.backends.base import (
CreateError, SessionBase, UpdateError,
)
from django.core.cache import caches
from django.utils.six.moves import range
KEY_PREFIX = "django.contrib.sessions.cache"
class SessionStore(SessionBase):
"""
A cache-based session store.
"""
cache_key_prefix = KEY_PREFIX
def __init__(self, session_key=None):
self._cache = caches[settings.SESSION_CACHE_ALIAS]
super(SessionStore, self).__init__(session_key)
@property
def cache_key(self):
return self.cache_key_prefix + self._get_or_create_session_key()
def load(self):
try:
session_data = self._cache.get(self.cache_key)
except Exception:
# Some backends (e.g. memcache) raise an exception on invalid
# cache keys. If this happens, reset the session. See #17810.
session_data = None
if session_data is not None:
return session_data
self._session_key = None
return {}
def create(self):
# Because a cache can fail silently (e.g. memcache), we don't know if
# we are failing to create a new session because of a key collision or
# because the cache is missing. So we try for a (large) number of times
# and then raise an exception. That's the risk you shoulder if using
# cache backing.
for i in range(10000):
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
return
raise RuntimeError(
"Unable to create a new session key. "
"It is likely that the cache is unavailable.")
def save(self, must_create=False):
if self.session_key is None:
return self.create()
if must_create:
func = self._cache.add
elif self._cache.get(self.cache_key) is not None:
func = self._cache.set
else:
raise UpdateError
result = func(self.cache_key,
self._get_session(no_load=must_create),
self.get_expiry_age())
if must_create and not result:
raise CreateError
def exists(self, session_key):
return session_key and (self.cache_key_prefix + session_key) in self._cache
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
self._cache.delete(self.cache_key_prefix + session_key)
@classmethod
def clear_expired(cls):
pass
| bsd-3-clause |
loxdegio/GT_S7500_LoxKernel_trebon | GT-S7500_Kernel/tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | 2058 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
| gpl-2.0 |
emilybache/KataMedicineClash | SampleVisualization/test_patient.py | 1 | 2779 | from datetime import date, timedelta
from patient import Patient, Prescription
# to run these tests, use py.test (http://pytest.org/)
class TestPatient:
def test_clash_with_no_prescriptions(self):
patient = Patient(prescriptions=[])
assert patient.clash([]) == set()
def test_clash_with_one_irrelevant_prescription(self):
patient = Patient(prescriptions=[Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=2), days_supply=2)])
assert patient.clash(["Aspirin"]) == set()
def test_clash_with_one_prescription(self):
patient = Patient(prescriptions=[Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=2), days_supply=2)])
assert patient.clash(["Paracetamol"]) == set([date.today() - timedelta(days=2), date.today() - timedelta(days=1)])
def test_clash_with_two_different_prescriptions(self):
patient = Patient(prescriptions=[Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=2), days_supply=2),
Prescription("Aspirin", dispense_date = date.today() - timedelta(days=2), days_supply=2)])
assert patient.clash(["Paracetamol", "Aspirin"]) == set([date.today() - timedelta(days=2), date.today() - timedelta(days=1)])
def test_clash_with_two_prescriptions_for_same_medication(self):
patient = Patient(prescriptions=[Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=2), days_supply=2),
Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=3), days_supply=2)])
assert patient.clash(["Paracetamol"]) == set([date.today() - timedelta(days=3),
date.today() - timedelta(days=2),
date.today() - timedelta(days=1)])
def test_days_taking_for_irrelevant_prescription(self):
patient = Patient(prescriptions=[Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=2), days_supply=2)])
assert patient.days_taking("Aspirin") == set()
def test_days_taking(self):
patient = Patient(prescriptions=[Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=2), days_supply=2),
Prescription("Paracetamol", dispense_date = date.today() - timedelta(days=3), days_supply=2)])
assert patient.days_taking("Paracetamol") == set([date.today() - timedelta(days=3),
date.today() - timedelta(days=2),
date.today() - timedelta(days=1)])
| mit |
kesuki/pysmali | pysmali.py | 1 | 7282 | #!/usr/bin/python
import os
import os.path
import xml.dom.minidom
var1 = '/home/mak/tmp/apktool/smali'
print var1+'/android'
manifest = '/home/mak/tmp/apktool/AndroidManifest.xml'
ShellHelper = '/home/mak/AndroidStudioProjects/StubShell/app/src/main/java/com/ceprei/stubshell/ShellHelper.java'
tranfile = '/home/mak/work/tools/pysmali/tranfile'
#packagename = ''
smalicode1 = '.method static constructor <clinit>()V\n'
smalicode2 = ' .locals 2\n'
#smalicode3 = ' const-string v0, ' + packagename + '\n\n'
smalicode3 = ' const-string v0, '
smalicode4 = ' const/16 v1, '
smalicode5 = ' invoke-static {v0, v1}, Lcom/ceprei/stubshell/ShellHelper;->StartShell(Ljava/lang/String;I)Z\n\n'
smalicode6 = ' return-void\n'
smalicode7 = '.end method\n\n'
class PyAddSmaliCode():
def __init__(self):
"""constructor"""
def EditShellHelper(self, packagename):
fp = file(ShellHelper)
lines = []
for line in fp:
lines.append(line)
fp.close()
for num in range(0, len(lines)):
if (lines[num].find('public static String getPackageName()') != -1):
lines[num] = ' public static String getPackageName() { return ' + packagename + '; }\n'
#print lines[num]
break
s = ''.join(lines)
fp = file(ShellHelper, 'w')
fp.write(s)
fp.close()
def findPackageName(self):
dom = xml.dom.minidom.parse(manifest)
root = dom.documentElement
#print root.nodeName
package = root.getAttribute("package")
packagename = '"' + package + '"'
print packagename
self.EditShellHelper(packagename)
return packagename
def EditSmaliCode(self, ospath, start, length, packagename):
tranfilefp = file(tranfile)
tranfilelines = []
for tranfileline in tranfilefp:
tranfilelines.append(tranfileline)
tranfilefp.close()
fp = file(ospath)
lines = []
for line in fp:
lines.append(line)
fp.close()
#tranfilelines.append('%d' % start + '!' + '%d' % length + '!' + lines[0][14:])
#tranfilefp.write('%d' % start + '!' + '%d' % length + '!' + lines[0][14:])
tmplines = ''.join(lines)
if tmplines.find('# direct methods') == -1:
print "did not find method, exit!" + ospath
return -1
index = lines.index('# direct methods\n')
if lines[index+1].find('.method static constructor <clinit>()V') != -1:
index = index+1
if (lines[index+1].find('\n') == 0):
index = index + 1
#print lines[index+1]
#print lines[index+1].find('.locals')
if (lines[index+1].find('.locals ') != -1):
index = index + 1;
#print int(lines[index][12:])
if int(lines[index][12:]) < 2:
lines[index] = smalicode2
if (lines[index+1].find('\n') == 0):
index = index + 1
for num in range(0, length):
index = index + 1
lines.insert(index, smalicode3 + packagename + '\n\n')
index = index + 1
lines.insert(index, smalicode4 + hex(start + num) + '\n\n') #no padding 0
index = index + 1
lines.insert(index, smalicode5)
else:
print "EditSmaliCode false in " + ospath
return -1
else:
index = index + 1
lines.insert(index, smalicode1)
index = index + 1
lines.insert(index, smalicode2 + '\n')
for num in range(0, length):
index = index + 1
lines.insert(index, smalicode3 + packagename + '\n\n')
index = index + 1
lines.insert(index, smalicode4 + hex(start + num) + '\n\n') #no padding 0
index = index + 1
lines.insert(index, smalicode5)
index = index + 1
lines.insert(index, smalicode6)
index = index + 1
lines.insert(index, smalicode7)
foundmethod = 0
for num in range(index, len(lines)):
if (lines[num].find('.method') != -1):
if (lines[num].find('<init>') != -1):
tranfilelines.append('%d' % (start + foundmethod) + '!' + lines[0][lines[0].rindex(" ")+1:])
tranfilelines.append("<init>\n")
#tranfilefp.write("init\n")
# else:
# if (lines[num].find('(') != -1):
# lindex = lines[num][:lines[num].find('(')].rindex(' ') + 1
# rindex = lines[num].find('(')
# tranfilelines.append('%d' % (start + foundmethod) + '!' + lines[0][lines[0].rindex(" ")+1:])
# tranfilelines.append(lines[num][lindex:rindex] + '\n')
# #tranfilefp.write(lines[num][lindex:rindex] + '\n')
# else:
# print "EditSmaliCode-findmethod false in " + ospath
foundmethod = foundmethod + 1
if foundmethod == length:
break;
if foundmethod != length:
print "did not find method, exit!" + ospath
return -1
s = ''.join(lines)
fp = file(ospath, 'w')
fp.write(s)
fp.close()
tranfiles = ''.join(tranfilelines)
tranfilefp = file(tranfile, 'w')
tranfilefp.write(tranfiles)
tranfilefp.close()
return 0
def findpath(self, packagename):
size = 0L
count = 0
for root, dirs, files in os.walk(var1):
for name in files:
if (root.find(var1+'/android') == -1) and (root.find('ceprei') == -1) and (root.find('spongycastle') == -1):
count = count + 1
size = size + os.path.getsize(os.path.join(root, name))
#print root + '/' + name
perfile = size/count
#print count
#print size
count = 0
for root, dirs, files in os.walk(var1):
for name in files:
if (root.find(var1+'/android') == -1) and (root.find('ceprei') == -1) and (root.find('spongycastle') == -1):
i = os.path.getsize(os.path.join(root, name))/perfile/2 - 1
#print i
if (i > 0):
if (i > 2):
i = 2
print root + '/' + name
#string = root + '/' + name
i = 1
result = self.EditSmaliCode(root + '/' + name, count, i, packagename)
print result
if (result == 0):
count = count + i
print count
#print string
#self.EditSmaliCode('/home/mak/tmp/apktool/smali/com/tencent/mm/sdk/platformtools/LBSManager.smali', 300, 5)
if __name__ == "__main__":
t = PyAddSmaliCode()
packagename = t.findPackageName()
t.findpath(packagename)
| apache-2.0 |
camptocamp/ngo-addons-backport | addons/account/wizard/account_journal_select.py | 56 | 2093 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class account_journal_select(osv.osv_memory):
"""
Account Journal Select
"""
_name = "account.journal.select"
_description = "Account Journal Select"
def action_open_window(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
if context is None:
context = {}
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_move_line_select')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id])[0]
cr.execute('select journal_id, period_id from account_journal_period where id=%s', (context['active_id'],))
res = cr.fetchone()
if res:
journal_id, period_id = res
result['domain'] = str([('journal_id', '=', journal_id), ('period_id', '=', period_id)])
result['context'] = str({'journal_id': journal_id, 'period_id': period_id})
return result
account_journal_select()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hcleon/zxingExtend | cpp/scons/scons-local-2.0.0.final.0/SCons/Tool/c++.py | 34 | 3380 | """SCons.Tool.c++
Tool-specific initialization for generic Posix C++ compilers.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/c++.py 5023 2010/06/14 22:05:46 scons"
import os.path
import SCons.Tool
import SCons.Defaults
import SCons.Util
compilers = ['CC', 'c++']
CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++', '.mm']
if SCons.Util.case_sensitive_suffixes('.c', '.C'):
CXXSuffixes.append('.C')
def iscplusplus(source):
if not source:
# Source might be None for unusual cases like SConf.
return 0
for s in source:
if s.sources:
ext = os.path.splitext(str(s.sources[0]))[1]
if ext in CXXSuffixes:
return 1
return 0
def generate(env):
"""
Add Builders and construction variables for Visual Age C++ compilers
to an Environment.
"""
import SCons.Tool
import SCons.Tool.cc
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
SCons.Tool.cc.add_common_cc_variables(env)
env['CXX'] = 'c++'
env['CXXFLAGS'] = SCons.Util.CLVar('')
env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.os'
env['OBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CXXFILESUFFIX'] = '.cc'
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
moijes12/oh-mainline | vendor/packages/PyYaml/lib/yaml/tokens.py | 985 | 2573 |
class Token(object):
def __init__(self, start_mark, end_mark):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in self.__dict__
if not key.endswith('_mark')]
attributes.sort()
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
#class BOMToken(Token):
# id = '<byte order mark>'
class DirectiveToken(Token):
id = '<directive>'
def __init__(self, name, value, start_mark, end_mark):
self.name = name
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class DocumentStartToken(Token):
id = '<document start>'
class DocumentEndToken(Token):
id = '<document end>'
class StreamStartToken(Token):
id = '<stream start>'
def __init__(self, start_mark=None, end_mark=None,
encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndToken(Token):
id = '<stream end>'
class BlockSequenceStartToken(Token):
id = '<block sequence start>'
class BlockMappingStartToken(Token):
id = '<block mapping start>'
class BlockEndToken(Token):
id = '<block end>'
class FlowSequenceStartToken(Token):
id = '['
class FlowMappingStartToken(Token):
id = '{'
class FlowSequenceEndToken(Token):
id = ']'
class FlowMappingEndToken(Token):
id = '}'
class KeyToken(Token):
id = '?'
class ValueToken(Token):
id = ':'
class BlockEntryToken(Token):
id = '-'
class FlowEntryToken(Token):
id = ','
class AliasToken(Token):
id = '<alias>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class AnchorToken(Token):
id = '<anchor>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class TagToken(Token):
id = '<tag>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class ScalarToken(Token):
id = '<scalar>'
def __init__(self, value, plain, start_mark, end_mark, style=None):
self.value = value
self.plain = plain
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
| agpl-3.0 |
naresh21/synergetics-edx-platform | common/lib/xmodule/xmodule/partitions/partitions.py | 59 | 7569 | """Defines ``Group`` and ``UserPartition`` models for partitioning"""
from collections import namedtuple
from stevedore.extension import ExtensionManager
# We use ``id`` in this file as the IDs of our Groups and UserPartitions,
# which Pylint disapproves of.
# pylint: disable=redefined-builtin
class UserPartitionError(Exception):
"""
Base Exception for when an error was found regarding user partitions.
"""
pass
class NoSuchUserPartitionError(UserPartitionError):
"""
Exception to be raised when looking up a UserPartition by its ID fails.
"""
pass
class NoSuchUserPartitionGroupError(UserPartitionError):
"""
Exception to be raised when looking up a UserPartition Group by its ID fails.
"""
pass
class Group(namedtuple("Group", "id name")):
"""
An id and name for a group of students. The id should be unique
within the UserPartition this group appears in.
"""
# in case we want to add to this class, a version will be handy
# for deserializing old versions. (This will be serialized in courses)
VERSION = 1
def __new__(cls, id, name):
return super(Group, cls).__new__(cls, int(id), name)
def to_json(self):
"""
'Serialize' to a json-serializable representation.
Returns:
a dictionary with keys for the properties of the group.
"""
return {
"id": self.id,
"name": self.name,
"version": Group.VERSION
}
@staticmethod
def from_json(value):
"""
Deserialize a Group from a json-like representation.
Args:
value: a dictionary with keys for the properties of the group.
Raises TypeError if the value doesn't have the right keys.
"""
if isinstance(value, Group):
return value
for key in ("id", "name", "version"):
if key not in value:
raise TypeError("Group dict {0} missing value key '{1}'".format(
value, key))
if value["version"] != Group.VERSION:
raise TypeError("Group dict {0} has unexpected version".format(
value))
return Group(value["id"], value["name"])
# The Stevedore extension point namespace for user partition scheme plugins.
USER_PARTITION_SCHEME_NAMESPACE = 'openedx.user_partition_scheme'
class UserPartition(namedtuple("UserPartition", "id name description groups scheme parameters active")):
"""A named way to partition users into groups, primarily intended for
running experiments. It is expected that each user will be in at most one
group in a partition.
A Partition has an id, name, scheme, description, parameters, and a list
of groups. The id is intended to be unique within the context where these
are used. (e.g., for partitions of users within a course, the ids should
be unique per-course). The scheme is used to assign users into groups.
The parameters field is used to save extra parameters e.g., location of
the block in case of VerificationPartitionScheme.
Partitions can be marked as inactive by setting the "active" flag to False.
Any group access rule referencing inactive partitions will be ignored
when performing access checks.
"""
VERSION = 3
# The collection of user partition scheme extensions.
scheme_extensions = None
# The default scheme to be used when upgrading version 1 partitions.
VERSION_1_SCHEME = "random"
def __new__(cls, id, name, description, groups, scheme=None, parameters=None, active=True, scheme_id=VERSION_1_SCHEME): # pylint: disable=line-too-long
if not scheme:
scheme = UserPartition.get_scheme(scheme_id)
if parameters is None:
parameters = {}
return super(UserPartition, cls).__new__(cls, int(id), name, description, groups, scheme, parameters, active)
@staticmethod
def get_scheme(name):
"""
Returns the user partition scheme with the given name.
"""
# Note: we're creating the extension manager lazily to ensure that the Python path
# has been correctly set up. Trying to create this statically will fail, unfortunately.
if not UserPartition.scheme_extensions:
UserPartition.scheme_extensions = ExtensionManager(namespace=USER_PARTITION_SCHEME_NAMESPACE)
try:
scheme = UserPartition.scheme_extensions[name].plugin
except KeyError:
raise UserPartitionError("Unrecognized scheme {0}".format(name))
scheme.name = name
return scheme
def to_json(self):
"""
'Serialize' to a json-serializable representation.
Returns:
a dictionary with keys for the properties of the partition.
"""
return {
"id": self.id,
"name": self.name,
"scheme": self.scheme.name,
"description": self.description,
"parameters": self.parameters,
"groups": [g.to_json() for g in self.groups],
"active": bool(self.active),
"version": UserPartition.VERSION
}
@staticmethod
def from_json(value):
"""
Deserialize a Group from a json-like representation.
Args:
value: a dictionary with keys for the properties of the group.
Raises TypeError if the value doesn't have the right keys.
"""
if isinstance(value, UserPartition):
return value
for key in ("id", "name", "description", "version", "groups"):
if key not in value:
raise TypeError("UserPartition dict {0} missing value key '{1}'".format(value, key))
if value["version"] == 1:
# If no scheme was provided, set it to the default ('random')
scheme_id = UserPartition.VERSION_1_SCHEME
# Version changes should be backwards compatible in case the code
# gets rolled back. If we see a version number greater than the current
# version, we should try to read it rather than raising an exception.
elif value["version"] >= 2:
if "scheme" not in value:
raise TypeError("UserPartition dict {0} missing value key 'scheme'".format(value))
scheme_id = value["scheme"]
else:
raise TypeError("UserPartition dict {0} has unexpected version".format(value))
parameters = value.get("parameters", {})
active = value.get("active", True)
groups = [Group.from_json(g) for g in value["groups"]]
scheme = UserPartition.get_scheme(scheme_id)
if not scheme:
raise TypeError("UserPartition dict {0} has unrecognized scheme {1}".format(value, scheme_id))
return UserPartition(
value["id"],
value["name"],
value["description"],
groups,
scheme,
parameters,
active,
)
def get_group(self, group_id):
"""
Returns the group with the specified id.
Arguments:
group_id (int): ID of the partition group.
Raises:
NoSuchUserPartitionGroupError: The specified group could not be found.
"""
for group in self.groups:
if group.id == group_id:
return group
raise NoSuchUserPartitionGroupError(
"could not find a Group with ID [{}] in UserPartition [{}]".format(group_id, self.id)
)
| agpl-3.0 |
sabi0/intellij-community | python/lib/Lib/wsgiref/simple_server.py | 104 | 4789 | """BaseHTTPServer that implements the Python WSGI protocol (PEP 333, rev 1.21)
This is both an example of how WSGI can be implemented, and a basis for running
simple web applications on a local machine, such as might be done when testing
or debugging an application. It has not been reviewed for security issues,
however, and we strongly recommend that you use a "real" web server for
production use.
For example usage, see the 'if __name__=="__main__"' block at the end of the
module. See also the BaseHTTPServer module docs for other API information.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import urllib, sys
from wsgiref.handlers import SimpleHandler
__version__ = "0.1"
__all__ = ['WSGIServer', 'WSGIRequestHandler', 'demo_app', 'make_server']
server_version = "WSGIServer/" + __version__
sys_version = "Python/" + sys.version.split()[0]
software_version = server_version + ' ' + sys_version
class ServerHandler(SimpleHandler):
server_software = software_version
def close(self):
try:
self.request_handler.log_request(
self.status.split(' ',1)[0], self.bytes_sent
)
finally:
SimpleHandler.close(self)
class WSGIServer(HTTPServer):
"""BaseHTTPServer that implements the Python WSGI protocol"""
application = None
def server_bind(self):
"""Override server_bind to store the server name."""
HTTPServer.server_bind(self)
self.setup_environ()
def setup_environ(self):
# Set up base environment
env = self.base_environ = {}
env['SERVER_NAME'] = self.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PORT'] = str(self.server_port)
env['REMOTE_HOST']=''
env['CONTENT_LENGTH']=''
env['SCRIPT_NAME'] = ''
def get_app(self):
return self.application
def set_app(self,application):
self.application = application
class WSGIRequestHandler(BaseHTTPRequestHandler):
server_version = "WSGIServer/" + __version__
def get_environ(self):
env = self.server.base_environ.copy()
env['SERVER_PROTOCOL'] = self.request_version
env['REQUEST_METHOD'] = self.command
if '?' in self.path:
path,query = self.path.split('?',1)
else:
path,query = self.path,''
env['PATH_INFO'] = urllib.unquote(path)
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
for h in self.headers.headers:
k,v = h.split(':',1)
k=k.replace('-','_').upper(); v=v.strip()
if k in env:
continue # skip content length, type,etc.
if 'HTTP_'+k in env:
env['HTTP_'+k] += ','+v # comma-separate multiple headers
else:
env['HTTP_'+k] = v
return env
def get_stderr(self):
return sys.stderr
def handle(self):
"""Handle a single HTTP request"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
handler = ServerHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
def demo_app(environ,start_response):
from StringIO import StringIO
stdout = StringIO()
print >>stdout, "Hello world!"
print >>stdout
h = environ.items(); h.sort()
for k,v in h:
print >>stdout, k,'=',`v`
start_response("200 OK", [('Content-Type','text/plain')])
return [stdout.getvalue()]
def make_server(
host, port, app, server_class=WSGIServer, handler_class=WSGIRequestHandler
):
"""Create a new WSGI server listening on `host` and `port` for `app`"""
server = server_class((host, port), handler_class)
server.set_app(app)
return server
if __name__ == '__main__':
httpd = make_server('', 8000, demo_app)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
import webbrowser
webbrowser.open('http://localhost:8000/xyz?abc')
httpd.handle_request() # serve one request, then exit
#
| apache-2.0 |
milogert/ddnsc | org.py | 1 | 20365 | import tkinter as tk
import tkinter.font as tkFont
import tkinter.ttk as ttk
import tkinter.messagebox as tkMessageBox
import pymysql.cursors
## Model Class ###############################################################
class Model:
"""
Right now the only options for variables in the url are:
{domain}
{subdomain}
{ip}
{extras}
More will come in the future.
"""
# Email dictionary.
def __init__(self):
self._myEmail = {
"text": "Error in update DDNS.\n\nError is: {}",
"html": "Error in updating DDNS.\n\nError is: <pre>{}</pre>",
"subject": "DDNS Update Error: {}",
"from": "server@milogert.com",
"pass": "servergothacked",
"to": "milo@milogert.com"
}
self.myDbConn = pymysql.connect(
host='milogert.com',
user='milo',
passwd='locatetimefarduck',
db='personal',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
# Set the edited status as false. This is for reloading rows.
self.myEdited = False
def __del__(self):
"""Destructor."""
# Close the connection.
self.myDbConn.close()
def setEdited(self, theBool):
"""Method to tell the view if the model has been edited or not."""
self.myEdited = theBool
def getEdited(self):
"""Get the edited status of the model."""
return self.myEdited
def getRows(self):
try:
with self.myDbConn.cursor() as cursor:
# Read a single record
sql = "SELECT * FROM `ddns__credentials`"
cursor.execute(sql)
result = cursor.fetchall()
return result
except ValueError:
# TODO
pass
def getRow(self, theKey):
try:
with self.myDbConn.cursor() as cursor:
# Read a single record
sql = "SELECT * FROM `ddns__credentials` where subdomain = %s"
cursor.execute(sql, (theKey))
result = cursor.fetchone()
return result
except ValueError:
# TODO
pass
def addEntry(
self,
theSub,
theDomain,
theIp,
theProvider,
theUsername,
thePassword,
theApi,
theExtras
):
try:
with self.myDbConn.cursor() as cursor:
# Read a single record
sql = """
INSERT
INTO `ddns__credentials`
(`subdomain`, `domain`, `ip`, `provider`, `username`, `password`, `api`, `extras`)
VALUES
(%s, %s, %s, %s, %s, %s, %s, %s)
"""
cursor.execute(sql, (
theSub,
theDomain,
theIp,
theProvider,
theUsername,
thePassword,
theApi,
theExtras
))
self.myDbConn.commit()
except ValueError:
# TODO
pass
def updateEntry(
self,
theId,
theSub,
theDomain,
theIp,
theProvider,
theUsername,
thePassword,
theApi,
theExtras
):
try:
with self.myDbConn.cursor() as cursor:
# Read a single record
sql = """
UPDATE `ddns__credentials`
SET
`subdomain`=%s,
`domain`=%s,
`ip`=%s,
`provider`=%s,
`username`=%s,
`password`=%s,
`api`=%s,
`extras`=%s
WHERE `id`=%s
"""
cursor.execute(sql, (
theSub,
theDomain,
theIp,
theProvider,
theUsername,
thePassword,
theApi,
theExtras,
theId
))
self.myDbConn.commit()
except ValueError:
# TODO
pass
def deleteEntries(self, theIdList):
"""Method to delete entries in the database."""
try:
with self.myDbConn.cursor() as cursor:
# Read a single record
sql = "DELETE FROM `ddns__credentials` WHERE id in (%s)"
s = ', '.join(list(map(lambda x: '%s', theIdList)))
sql = sql % s
print(sql)
print(theIdList)
print(sql % tuple(theIdList))
cursor.execute(sql, (theIdList))
self.myDbConn.commit()
except ValueError:
# TODO
pass
def setupUrl(self, theRows):
"""Function to setup the url based on the rows passed in."""
# Loop through each row.
for aRow in theRows:
# Only update if the current ip is different from the stored one.
if aRow["ip"] != findIp():
# Format the url properly.
aUrl = aRow["api"].format(**aRow)
print("Setup url as: ", aUrl)
# Update the ip with the url.
aPage = updateIp(theUrl, aRow["username"], aRow["password"])
# Do something with the response.
manageResponse(aPage)
def _updateIp(theUrl, theUser, thePass):
"""Update the ip."""
# Setup the request header.
request = urllib2.Request(theUrl)
# User agent.
userAgent = "Python-urllib/2.6"
request.add_header("User-Agent", userAgent)
# Username and password, if present.
if theUser != "" and thePass != "":
base64string = base64.encodestring('%s:%s' % (theUser, thePass)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
# Make the request.
response = urllib2.urlopen(request)
# Read the page.
return response.read()
def _manageResponse(theResp):
"""Manage the response we got from the dns provider."""
good = ["good", "nochg"]
bad = ["nohost", "badauth", "notfqdn", "badagent", "abuse", "911"]
# Do stuff with the results.
if any(s in theResp for s in good):
# Make a cursor.
# c = _myConn.cursor()
# Select all the data.
# data = (currentip, theResp, username, password,)
# result = c.execute(
# """
# UPDATE ddns__credentials
# SET ip = %s, response = %s
# WHERE username = %s AND password = %s
# """,
# data
# )
# # Commit the data.
# _myConn.commit()
print("We are good.")
print(theResp)
elif theResp in bad:
# Email the error and exit.
# Create the message.
# msg = MIMEMultipart("alternative")
# # Update the msg.
# msg["Subject"] = _myEmail["subject"].format(subdomain)
# msg["From"] = _myEmail["from"]
# msg["To"] = _myEmail["to"]
# # Add the html to the message.
# msg.attach(MIMEText(_myEmail["text"].format(theResp), "plain"))
# msg.attach(MIMEText(_myEmail["html"].format(theResp), "html"))
# # Send the email through gmail.
# server = smtplib.SMTP("smtp.gmail.com:587")
# server.ehlo()
# server.starttls()
# server.login(_myEmail["from"], _myEmail["pass"])
# server.sendmail(_myEmail["from"], _myEmail["to"], msg.as_string())
# server.quit()
print("no good")
else:
# Log that this should never happen.
print("What happened?")
def _findIp():
"""Find the current ip of the server."""
request = urllib2.urlopen("http://checkip.dyndns.org").read()
return re.findall(r"\d{1,3}\.\d{1,3}\.\d{1,3}.\d{1,3}", request)[0]
##############################################################################
## MainWindow class ##########################################################
class MainWindow(tk.Toplevel):
def __init__(self, master):
# These are the headers to us up top.
self.headers = ('subdomain', 'domain', 'ip', 'provider', 'response')
tk.Toplevel.__init__(self, master)
self.protocol('WM_DELETE_WINDOW', self.master.destroy)
self.title("DDNS Client")
# tk.Label(self, text='My Money').pack(side='left')
# self.moneyCtrl = tk.Entry(self, width=8)
# self.moneyCtrl.pack(side='left')
# Menu bar.
aMenuBar = tk.Menu(self)
# File menu.
aMenuFile = tk.Menu(aMenuBar, tearoff=0)
aMenuBar.add_cascade(menu=aMenuFile, label='File')
aMenuFile.add_command(label="Quit", command=master.destroy)
# Edit menu.
aMenuEdit = tk.Menu(aMenuBar, tearoff=0)
aMenuBar.add_cascade(menu=aMenuEdit, label='Edit')
aMenuEdit.add_command(label="Add...", command=self.startAdd)
# Actually add the menu bar.
self["menu"] = aMenuBar
# Create the container to hold everything.
container = ttk.Frame(self)
container.pack(fill='both', expand=True, padx=10, pady=10)
# create a treeview with dual scrollbars
self.myTree = ttk.Treeview(container, columns=self.headers, show="headings")
vsb = ttk.Scrollbar(container, orient="vertical", command=self.myTree.yview)
hsb = ttk.Scrollbar(container, orient="horizontal", command=self.myTree.xview)
self.myTree.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set)
# self.myTree.bind('<1>', select_cmd)
self.myTree.grid(column=0, row=0, rowspan=10, sticky='nsew')
vsb.grid(column=1, row=0, rowspan=10, sticky='ns')
hsb.grid(column=0, row=10, sticky='ew')
aBtnAdd = ttk.Button(container)
aBtnAdd.configure(text="Add", command=self.startAdd)
aBtnAdd.grid(column=2, row=1, padx=25, pady=10)
aBtnEdit = ttk.Button(container)
aBtnEdit.configure(text="Edit", command=self.startEdit)
aBtnEdit.grid(column=2, row=2, padx=25, pady=10)
aBtnDelete = ttk.Button(container)
aBtnDelete.configure(text="Delete", command=self.startDelete)
aBtnDelete.grid(column=2, row=3, padx=25, pady=10)
# Separator.
aSep = ttk.Separator(container)
aSep.grid(column=0, columnspan=3, row=11, padx=25, pady=25, sticky="ew")
aBtnUpdate = ttk.Button(container)
aBtnUpdate.configure(text="Update")
aBtnUpdate.grid(column=2, row=12, padx=25, pady=25)
container.grid_columnconfigure(0, weight=1)
container.grid_rowconfigure(0, weight=1)
# Create a model to use.
# self.myModel = Model()
# Initialize the table, since everything is created.
self._setRows()
def _setRows(self):
# Get the rows from the database.
aRows = myGlobalModel.getRows()
# Setup the headers.
for col in self.headers:
self.myTree.heading(
col,
text=col,
command=lambda c=col: self._sortby(self.myTree, c, 0)
)
# adjust the column's width to the header string
self.myTree.column(col, width=tkFont.Font().measure(col.title()))
# Setup the actual content.
for item in aRows:
ins = self.normalizeData(item)
self.myTree.insert('', 'end', values=ins)
# adjust column's width if necessary to fit each value
for ix, val in enumerate(ins):
col_w = tkFont.Font().measure(val)
if self.myTree.column(self.headers[ix], width=None) < col_w:
self.myTree.column(self.headers[ix], width=col_w)
# Sort the rows by subdomain by default.
self._sortby(self.myTree, "subdomain", False)
def _sortby(self, tree, col, descending):
"""sort tree contents when a column header is clicked on"""
# grab values to sort
data = [(tree.set(child, col), child) \
for child in tree.get_children('')]
# if the data to be sorted is numeric change to float
#data = change_numeric(data)
# now sort the data in place
data.sort(reverse=descending)
for ix, item in enumerate(data):
tree.move(item[1], '', ix)
# switch the heading so it will sort in the opposite direction
tree.heading(col, command=lambda col=col: self._sortby(tree, col, \
int(not descending)))
def _clearRows(self):
"""Clear all the rows from the table."""
for i in self.myTree.get_children():
self.myTree.delete(i)
def normalizeData(self, data):
ret = []
for item in self.headers:
ret.append(data[item])
return ret
def startAdd(self):
aAdd = AddEditWindow(self)
# Make the window modal.
aAdd.transient(self)
aAdd.grab_set()
self.wait_window(aAdd)
if myGlobalModel.getEdited():
# Update the table regardless.
self._clearRows()
self._setRows()
# Set the edited status back.
myGlobalModel.setEdited(False)
def startEdit(self):
try:
if len(self.myTree.selection()) > 1:
raise IndexError
item = self.myTree.selection()[0]
aKey = self.myTree.item(item, "values")[0]
aRow = myGlobalModel.getRow(aKey)
aEdit = AddEditWindow(self, aRow, True)
# Make the window modal.
aEdit.transient(self)
aEdit.grab_set()
self.wait_window(aEdit)
if myGlobalModel.getEdited():
# Update the table regardless.
self._clearRows()
self._setRows()
# Set the edited status back.
myGlobalModel.setEdited(False)
except IndexError:
tkMessageBox.showinfo("Select a Single Row", "Please select one single row.")
def startDelete(self):
try:
# Get the perinent data.
items = self.myTree.selection()
aIdList = []
for i, item in enumerate(items):
aKey = self.myTree.item(item, "values")[0]
aRow = myGlobalModel.getRow(aKey)
# Extract the id.
aIdList.append(aRow["id"])
# Call the dialog.
if tkMessageBox.askyesno("Delete Entry(ies)", "Should we delete the selected entries?"):
myGlobalModel.deleteEntries(aIdList)
# Update the table if we pressed yes.
self._clearRows()
self._setRows()
except IndexError:
tkMessageBox.showinfo("Select a Single Row", "Please select one single row.")
##############################################################################
## MainWindow class ##########################################################
class AddEditWindow(tk.Toplevel):
def __init__(self, master, theRow=None, isEdit=False):
tk.Toplevel.__init__(self, master)
if isEdit:
self.title("Editing " + theRow["subdomain"])
else:
self.title("Adding a New Entry")
# Disable resizing.
self.resizable(False, False)
# Set the size.
# self.minsize()
self.isEdit = isEdit
container = ttk.Frame(self)
container.pack(fill='both', expand=True)
# Create string variables to hold the value and to trace.
self.aStrSub = tk.StringVar(container)
self.aStrDomain = tk.StringVar(container)
self.aStrIp = tk.StringVar(container)
self.aStrProvider = tk.StringVar(container)
self.aStrUsername = tk.StringVar(container)
self.aStrPassword = tk.StringVar(container)
self.aStrApi = tk.StringVar(container)
# Add a trace to each one.
self.aStrSub.trace('w', self._filledOut)
self.aStrDomain.trace('w', self._filledOut)
self.aStrIp.trace('w', self._filledOut)
self.aStrProvider.trace('w', self._filledOut)
self.aStrUsername.trace('w', self._filledOut)
self.aStrPassword.trace('w', self._filledOut)
self.aStrApi.trace('w', self._filledOut)
# Create all the widgets.
aLblSub = tk.Label(container, text="Subdomain", anchor=tk.W, padx=10, pady=10)
self.aEntSub = tk.Entry(container, textvariable=self.aStrSub)
aLblDomain = tk.Label(container, text="Domain", anchor=tk.W, padx=10, pady=10)
self.aEntDomain = tk.Entry(container, textvariable=self.aStrDomain)
aLblIp = tk.Label(container, text="IP Address", anchor=tk.W, padx=10, pady=10)
self.aEntIp = tk.Entry(container, textvariable=self.aStrIp)
aLblProvider = tk.Label(container, text="Provider", anchor=tk.W, padx=10, pady=10)
self.aEntProvider = tk.Entry(container, textvariable=self.aStrProvider)
aLblUsername = tk.Label(container, text="Username", anchor=tk.W, padx=10, pady=10)
self.aEntUsername = tk.Entry(container, textvariable=self.aStrUsername)
aLblPassword = tk.Label(container, text="Password", anchor=tk.W, padx=10, pady=10)
self.aEntPassword = tk.Entry(container, textvariable=self.aStrPassword)
aLblApi = tk.Label(container, text="Api", anchor=tk.W, padx=10, pady=10)
self.aEntApi = tk.Entry(container, textvariable=self.aStrApi)
aLblExtras = tk.Label(container, text="Extras", anchor=tk.W, padx=10, pady=10)
self.aEntExtras = tk.Entry(container)
self.aBtnSave = ttk.Button(container)
self.aBtnSave.configure(text="Save Entry", command=self.saveEntry)
aSep = ttk.Separator(container)
aBtnCancel = tk.Button(container, text="Cancel", command=self.destroy)
# Load values if editing is enabled.
if theRow is not None:
print("Loading values to edit:", theRow)
self.aEntSub.insert(0, theRow["subdomain"])
self.aEntDomain.insert(0, theRow["domain"])
self.aEntIp.insert(0, theRow["ip"])
self.aEntProvider.insert(0, theRow["provider"])
self.aEntUsername.insert(0, theRow["username"])
self.aEntPassword.insert(0, theRow["password"])
self.aEntApi.insert(0, theRow["api"])
self.aEntExtras.insert(0, theRow["extras"])
self.aId = theRow["id"]
# Pack everything into the grid.
aLblSub.grid(column=0, row=0)
self.aEntSub.grid(column=1, row=0, padx=10, pady=10)
aLblDomain.grid(column=0, row=1)
self.aEntDomain.grid(column=1, row=1, padx=10, pady=10)
aLblIp.grid(column=0, row=2)
self.aEntIp.grid(column=1, row=2, padx=10, pady=10)
aLblProvider.grid(column=0, row=3)
self.aEntProvider.grid(column=1, row=3, padx=10, pady=10)
aLblUsername.grid(column=0, row=4)
self.aEntUsername.grid(column=1, row=4, padx=10, pady=10)
aLblPassword.grid(column=0, row=5)
self.aEntPassword.grid(column=1, row=5, padx=10, pady=10)
aLblApi.grid(column=0, row=6)
self.aEntApi.grid(column=1, row=6, padx=10, pady=10)
aLblExtras.grid(column=0, row=7)
self.aEntExtras.grid(column=1, row=7, padx=10, pady=10)
aSep.grid(column=0, columnspan=2, row=8, padx=25, pady=25, sticky="ew")
self.aBtnSave.grid(column=1, row=9, padx=10, pady=10)
aBtnCancel.grid(column=2, row=9, padx=10, pady=10)
# Finally, set the button.
self._filledOut()
def _filledOut(self, *args):
"""This is intended to check if all the proper fields are filled out."""
# Get the status of all the fields.
aVerify = (
self.aEntDomain.get() and
self.aEntIp.get() and
self.aEntProvider.get() and
self.aEntUsername.get() and
self.aEntPassword.get() and
self.aEntApi.get()
)
# Get the regex of the ip address field.
import re
r = re.compile("\d{1,3}\.\d{1,3}\.\d{1,3}.\d{1,3}$")
aVerify = aVerify and r.match(self.aEntIp.get())
# Set the button to enabled or disabled.
if aVerify:
self.aBtnSave.config(state="normal")
else:
self.aBtnSave.config(state="disable")
def saveEntry(self):
"""Pass all the collected data to the model."""
# Collect all the data.
aSub = self.aEntSub.get() if self.aEntSub.get() else ""
aDomain = self.aEntDomain.get()
aIp = self.aEntIp.get()
aProvider = self.aEntProvider.get()
aUsername = self.aEntUsername.get()
aPassword = self.aEntPassword.get()
aApi = self.aEntApi.get()
aExtras = self.aEntExtras.get() if self.aEntExtras.get() else ""
# Save the data appropriately.
if(self.isEdit):
myGlobalModel.updateEntry(
self.aId,
aSub,
aDomain,
aIp,
aProvider,
aUsername,
aPassword,
aApi,
aExtras
)
else:
myGlobalModel.addEntry(
aSub,
aDomain,
aIp,
aProvider,
aUsername,
aPassword,
aApi,
aExtras
)
myGlobalModel.setEdited(True)
self.destroy()
##############################################################################
## Main method ###############################################################
if __name__ == '__main__':
import argparse
# Create the parser.
aParser = argparse.ArgumentParser(description="Update DDNS entries.")
# Add the arguments.
aParser.add_argument("-d", "--daemon", action="store_true")
aParser.add_argument("-v", "--verbose", action="store_true")
aParser.add_argument("-q", "--quiet", action="store_true")
# Parse the arguments.
aArgs = aParser.parse_args()
# Check for daemon.
if aArgs.daemon:
print("ii Daemonize it.")
else:
root = tk.Tk()
root.withdraw()
myGlobalModel = Model()
app = MainWindow(root)
# main loop.
root.mainloop()
##############################################################################
| mit |
mmckinst/pykickstart | pykickstart/commands/network.py | 7 | 24769 | #
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2005, 2006, 2007, 2008 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
from pykickstart.base import BaseData, KickstartCommand
from pykickstart.constants import BOOTPROTO_BOOTP, BOOTPROTO_DHCP, BOOTPROTO_IBFT, BOOTPROTO_QUERY, BOOTPROTO_STATIC
from pykickstart.options import KSOptionParser
from pykickstart.errors import KickstartValueError, formatErrorMsg
import warnings
from pykickstart.i18n import _
MIN_VLAN_ID = 0
MAX_VLAN_ID = 4095
class FC3_NetworkData(BaseData):
removedKeywords = BaseData.removedKeywords
removedAttrs = BaseData.removedAttrs
def __init__(self, *args, **kwargs):
BaseData.__init__(self, *args, **kwargs)
self.bootProto = kwargs.get("bootProto", BOOTPROTO_DHCP)
self.dhcpclass = kwargs.get("dhcpclass", "")
self.device = kwargs.get("device", "")
self.essid = kwargs.get("essid", "")
self.ethtool = kwargs.get("ethtool", "")
self.gateway = kwargs.get("gateway", "")
self.hostname = kwargs.get("hostname", "")
self.ip = kwargs.get("ip", "")
self.mtu = kwargs.get("mtu", "")
self.nameserver = kwargs.get("nameserver", "")
self.netmask = kwargs.get("netmask", "")
self.nodns = kwargs.get("nodns", False)
self.onboot = kwargs.get("onboot", True)
self.wepkey = kwargs.get("wepkey", "")
def __eq__(self, y):
if not y:
return False
return self.device and self.device == y.device
def __ne__(self, y):
return not self == y
def _getArgsAsStr(self):
retval = ""
if self.bootProto != "":
retval += " --bootproto=%s" % self.bootProto
if self.dhcpclass != "":
retval += " --dhcpclass=%s" % self.dhcpclass
if self.device != "":
retval += " --device=%s" % self.device
if self.essid != "":
retval += " --essid=\"%s\"" % self.essid
if self.ethtool != "":
retval += " --ethtool=\"%s\"" % self.ethtool
if self.gateway != "":
retval += " --gateway=%s" % self.gateway
if self.hostname != "":
retval += " --hostname=%s" % self.hostname
if self.ip != "":
retval += " --ip=%s" % self.ip
if self.mtu != "":
retval += " --mtu=%s" % self.mtu
if self.nameserver != "":
retval += " --nameserver=%s" % self.nameserver
if self.netmask != "":
retval += " --netmask=%s" % self.netmask
if self.nodns:
retval += " --nodns"
if not self.onboot:
retval += " --onboot=off"
if self.wepkey != "":
retval += " --wepkey=%s" % self.wepkey
return retval
def __str__(self):
retval = BaseData.__str__(self)
retval += "network %s\n" % self._getArgsAsStr()
return retval
class FC4_NetworkData(FC3_NetworkData):
removedKeywords = FC3_NetworkData.removedKeywords
removedAttrs = FC3_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
FC3_NetworkData.__init__(self, *args, **kwargs)
self.notksdevice = kwargs.get("notksdevice", False)
def _getArgsAsStr(self):
retval = FC3_NetworkData._getArgsAsStr(self)
if self.notksdevice:
retval += " --notksdevice"
return retval
class FC6_NetworkData(FC4_NetworkData):
removedKeywords = FC4_NetworkData.removedKeywords
removedAttrs = FC4_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
FC4_NetworkData.__init__(self, *args, **kwargs)
self.noipv4 = kwargs.get("noipv4", False)
self.noipv6 = kwargs.get("noipv6", False)
def _getArgsAsStr(self):
retval = FC4_NetworkData._getArgsAsStr(self)
if self.noipv4:
retval += " --noipv4"
if self.noipv6:
retval += " --noipv6"
return retval
class F8_NetworkData(FC6_NetworkData):
removedKeywords = FC6_NetworkData.removedKeywords
removedAttrs = FC6_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
FC6_NetworkData.__init__(self, *args, **kwargs)
self.ipv6 = kwargs.get("ipv6", "")
def _getArgsAsStr(self):
retval = FC6_NetworkData._getArgsAsStr(self)
if self.ipv6 != "":
retval += " --ipv6=%s" % self.ipv6
return retval
class F16_NetworkData(F8_NetworkData):
removedKeywords = F8_NetworkData.removedKeywords
removedAttrs = F8_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
F8_NetworkData.__init__(self, *args, **kwargs)
self.activate = kwargs.get("activate", False)
self.nodefroute = kwargs.get("nodefroute", False)
self.wpakey = kwargs.get("wpakey", "")
def _getArgsAsStr(self):
retval = F8_NetworkData._getArgsAsStr(self)
if self.activate:
retval += " --activate"
if self.nodefroute:
retval += " --nodefroute"
if self.wpakey != "":
retval += " --wpakey=%s" % self.wpakey
return retval
class F19_NetworkData(F16_NetworkData):
removedKeywords = F16_NetworkData.removedKeywords
removedAttrs = F16_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
F16_NetworkData.__init__(self, *args, **kwargs)
self.bondslaves = kwargs.get("bondslaves", "")
self.bondopts = kwargs.get("bondopts", "")
self.vlanid = kwargs.get("vlanid", "")
self.ipv6gateway = kwargs.get("ipv6gateway", "")
def _getArgsAsStr(self):
retval = F16_NetworkData._getArgsAsStr(self)
if self.bondslaves != "":
retval += " --bondslaves=%s" % self.bondslaves
if self.bondopts != "":
retval += " --bondopts=%s" % self.bondopts
if self.vlanid:
retval += " --vlanid=%s" % self.vlanid
if self.ipv6gateway:
retval += " --ipv6gateway=%s" % self.ipv6gateway
return retval
class F20_NetworkData(F19_NetworkData):
removedKeywords = F19_NetworkData.removedKeywords
removedAttrs = F19_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
F19_NetworkData.__init__(self, *args, **kwargs)
self.teamslaves = kwargs.get("teamslaves", [])
self.teamconfig = kwargs.get("teamconfig", "")
def _getArgsAsStr(self):
retval = F19_NetworkData._getArgsAsStr(self)
# see the tests for format description
if self.teamslaves:
slavecfgs = []
for slave, config in self.teamslaves:
if config:
config = "'" + config + "'"
slavecfgs.append(slave+config)
slavecfgs = ",".join(slavecfgs).replace('"', r'\"')
retval += ' --teamslaves="%s"' % slavecfgs
if self.teamconfig:
retval += ' --teamconfig="%s"' % self.teamconfig.replace('"', r'\"')
return retval
class F21_NetworkData(F20_NetworkData):
removedKeywords = F20_NetworkData.removedKeywords
removedAttrs = F20_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
F20_NetworkData.__init__(self, *args, **kwargs)
self.interfacename = kwargs.get("interfacename", "")
def _getArgsAsStr(self):
retval = F20_NetworkData._getArgsAsStr(self)
if self.interfacename:
retval += " --interfacename=%s" % self.interfacename
return retval
class F22_NetworkData(F21_NetworkData):
removedKeywords = F21_NetworkData.removedKeywords
removedAttrs = F21_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
F21_NetworkData.__init__(self, *args, **kwargs)
self.bridgeslaves = kwargs.get("bridgeslaves", "")
self.bridgeopts = kwargs.get("bridgeopts", "")
def _getArgsAsStr(self):
retval = F21_NetworkData._getArgsAsStr(self)
if self.bridgeslaves != "":
retval += " --bridgeslaves=%s" % self.bridgeslaves
if self.bridgeopts != "":
retval += " --bridgeopts=%s" % self.bridgeopts
return retval
class RHEL4_NetworkData(FC3_NetworkData):
removedKeywords = FC3_NetworkData.removedKeywords
removedAttrs = FC3_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
FC3_NetworkData.__init__(self, *args, **kwargs)
self.notksdevice = kwargs.get("notksdevice", False)
def _getArgsAsStr(self):
retval = FC3_NetworkData._getArgsAsStr(self)
if self.notksdevice:
retval += " --notksdevice"
return retval
class RHEL6_NetworkData(F8_NetworkData):
removedKeywords = F8_NetworkData.removedKeywords
removedAttrs = F8_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
F8_NetworkData.__init__(self, *args, **kwargs)
self.activate = kwargs.get("activate", False)
self.nodefroute = kwargs.get("nodefroute", False)
self.vlanid = kwargs.get("vlanid", "")
self.bondslaves = kwargs.get("bondslaves", "")
self.bondopts = kwargs.get("bondopts", "")
def _getArgsAsStr(self):
retval = F8_NetworkData._getArgsAsStr(self)
if self.activate:
retval += " --activate"
if self.nodefroute:
retval += " --nodefroute"
if self.vlanid:
retval += " --vlanid=%s" % self.vlanid
if self.bondslaves:
retval += " --bondslaves=%s" % self.bondslaves
if self.bondopts:
retval += " --bondopts=%s" % self.bondopts
return retval
class RHEL7_NetworkData(F21_NetworkData):
removedKeywords = F21_NetworkData.removedKeywords
removedAttrs = F21_NetworkData.removedAttrs
def __init__(self, *args, **kwargs):
F21_NetworkData.__init__(self, *args, **kwargs)
self.bridgeslaves = kwargs.get("bridgeslaves", "")
self.bridgeopts = kwargs.get("bridgeopts", "")
def _getArgsAsStr(self):
retval = F21_NetworkData._getArgsAsStr(self)
if self.bridgeslaves != "":
retval += " --bridgeslaves=%s" % self.bridgeslaves
if self.bridgeopts != "":
retval += " --bridgeopts=%s" % self.bridgeopts
return retval
class FC3_Network(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.bootprotoList = [BOOTPROTO_DHCP, BOOTPROTO_BOOTP,
BOOTPROTO_STATIC]
self.op = self._getParser()
self.network = kwargs.get("network", [])
def __str__(self):
retval = ""
for nic in self.network:
retval += nic.__str__()
if retval != "":
return "# Network information\n" + retval
else:
return ""
def _getParser(self):
op = KSOptionParser()
op.add_option("--bootproto", dest="bootProto",
default=BOOTPROTO_DHCP,
choices=self.bootprotoList)
op.add_option("--dhcpclass", dest="dhcpclass")
op.add_option("--device", dest="device")
op.add_option("--essid", dest="essid")
op.add_option("--ethtool", dest="ethtool")
op.add_option("--gateway", dest="gateway")
op.add_option("--hostname", dest="hostname")
op.add_option("--ip", dest="ip")
op.add_option("--mtu", dest="mtu")
op.add_option("--nameserver", dest="nameserver")
op.add_option("--netmask", dest="netmask")
op.add_option("--nodns", dest="nodns", action="store_true",
default=False)
op.add_option("--onboot", dest="onboot", action="store",
type="ksboolean")
op.add_option("--wepkey", dest="wepkey")
return op
def parse(self, args):
(opts, _extra) = self.op.parse_args(args=args, lineno=self.lineno)
nd = self.handler.NetworkData()
self._setToObj(self.op, opts, nd)
nd.lineno = self.lineno
# Check for duplicates in the data list.
if nd in self.dataList():
warnings.warn(_("A network device with the name %s has already been defined.") % nd.device)
return nd
def dataList(self):
return self.network
class FC4_Network(FC3_Network):
removedKeywords = FC3_Network.removedKeywords
removedAttrs = FC3_Network.removedAttrs
def _getParser(self):
op = FC3_Network._getParser(self)
op.add_option("--notksdevice", dest="notksdevice", action="store_true",
default=False)
return op
class FC6_Network(FC4_Network):
removedKeywords = FC4_Network.removedKeywords
removedAttrs = FC4_Network.removedAttrs
def _getParser(self):
op = FC4_Network._getParser(self)
op.add_option("--noipv4", dest="noipv4", action="store_true",
default=False)
op.add_option("--noipv6", dest="noipv6", action="store_true",
default=False)
return op
class F8_Network(FC6_Network):
removedKeywords = FC6_Network.removedKeywords
removedAttrs = FC6_Network.removedAttrs
def _getParser(self):
op = FC6_Network._getParser(self)
op.add_option("--ipv6", dest="ipv6")
return op
class F9_Network(F8_Network):
removedKeywords = F8_Network.removedKeywords
removedAttrs = F8_Network.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
F8_Network.__init__(self, writePriority, *args, **kwargs)
self.bootprotoList.append(BOOTPROTO_QUERY)
def _getParser(self):
op = F8_Network._getParser(self)
op.add_option("--bootproto", dest="bootProto",
default=BOOTPROTO_DHCP,
choices=self.bootprotoList)
return op
class F16_Network(F9_Network):
removedKeywords = F9_Network.removedKeywords
removedAttrs = F9_Network.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
F9_Network.__init__(self, writePriority, *args, **kwargs)
self.bootprotoList.append(BOOTPROTO_IBFT)
def _getParser(self):
op = F9_Network._getParser(self)
op.add_option("--activate", dest="activate", action="store_true",
default=False)
op.add_option("--nodefroute", dest="nodefroute", action="store_true",
default=False)
op.add_option("--wpakey", dest="wpakey", action="store", default="")
return op
class F18_Network(F16_Network):
@property
def hostname(self):
for nd in self.dataList():
if nd.hostname:
return nd.hostname
return None
class F19_Network(F18_Network):
def _getParser(self):
op = F18_Network._getParser(self)
op.add_option("--bondslaves", dest="bondslaves", action="store",
default="")
op.add_option("--bondopts", dest="bondopts", action="store",
default="")
op.add_option("--vlanid", dest="vlanid")
op.add_option("--ipv6gateway", dest="ipv6gateway", action="store",
default="")
return op
class F20_Network(F19_Network):
def _getParser(self):
# see the tests for teamslaves option
def teamslaves_cb(option, opt_str, value, parser):
# value is of: "<DEV1>['<JSON_CONFIG1>'],<DEV2>['<JSON_CONFIG2>'],..."
# for example: "eth1,eth2'{"prio": 100}',eth3"
teamslaves = []
if value:
# Although slaves, having optional config, are separated by ","
# first extract json configs because they can contain the ","
parts = value.split("'")
# parts == ['eth1,eth2', '{"prio": 100}', ',eth3']
# ensure the list has even number of items for further zipping,
# for odd number of items
if len(parts) % 2 == 1:
# if the list ends with an empty string which must be a leftover
# from splitting string not ending with device eg
# "eth1,eth2'{"prio":100"}'"
if not parts[-1]:
# just remove it
parts = parts[:-1]
# if not (our example), add empty config for the last device
else:
parts.append('')
# parts == ['eth1,eth2', '{"prio": 100}', ',eth3', '']
# zip devices with their configs
it = iter(parts)
for devs, cfg in zip(it,it):
# first loop:
# devs == "eth1,eth2", cfg == '{"prio": 100}'
devs = devs.strip(',').split(',')
# devs == ["eth1", "eth2"]
# initialize config of all devs but the last one to empty
for d in devs[:-1]:
teamslaves.append((d, ''))
# teamslaves == [("eth1", '')]
# and set config of the last device
teamslaves.append((devs[-1], cfg))
# teamslaves == [('eth1', ''), ('eth2', '{"prio": 100}']
parser.values.teamslaves = teamslaves
op = F19_Network._getParser(self)
op.add_option("--teamslaves", dest="teamslaves", action="callback",
callback=teamslaves_cb, nargs=1, type="string")
op.add_option("--teamconfig", dest="teamconfig", action="store",
default="")
return op
class F21_Network(F20_Network):
def _getParser(self):
op = F20_Network._getParser(self)
op.add_option("--interfacename", dest="interfacename", action="store",
default="")
return op
class F22_Network(F21_Network):
def _getParser(self):
op = F21_Network._getParser(self)
op.add_option("--bridgeslaves", dest="bridgeslaves", action="store",
default="")
op.add_option("--bridgeopts", dest="bridgeopts", action="store",
default="")
return op
def parse(self, args):
# call the overridden command to do it's job first
retval = F21_Network.parse(self, args)
if retval.bridgeopts:
if not retval.bridgeslaves:
msg = formatErrorMsg(self.lineno, msg=_("Option --bridgeopts requires "\
"--bridgeslaves to be specified"))
raise KickstartValueError(msg)
opts = retval.bridgeopts.split(",")
for opt in opts:
_key, _sep, value = opt.partition("=")
if not value or "=" in value:
msg = formatErrorMsg(self.lineno, msg=_("Bad format of --bridgeopts, expecting key=value options separated by ','"))
raise KickstartValueError(msg)
return retval
class RHEL4_Network(FC3_Network):
removedKeywords = FC3_Network.removedKeywords
removedAttrs = FC3_Network.removedAttrs
def _getParser(self):
op = FC3_Network._getParser(self)
op.add_option("--notksdevice", dest="notksdevice", action="store_true",
default=False)
return op
class RHEL5_Network(FC6_Network):
removedKeywords = FC6_Network.removedKeywords
removedAttrs = FC6_Network.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
FC6_Network.__init__(self, writePriority, *args, **kwargs)
self.bootprotoList.append(BOOTPROTO_QUERY)
def _getParser(self):
op = FC6_Network._getParser(self)
op.add_option("--bootproto", dest="bootProto",
default=BOOTPROTO_DHCP,
choices=self.bootprotoList)
return op
class RHEL6_Network(F9_Network):
removedKeywords = F9_Network.removedKeywords
removedAttrs = F9_Network.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
F9_Network.__init__(self, writePriority, *args, **kwargs)
self.bootprotoList.append(BOOTPROTO_IBFT)
def _getParser(self):
op = F9_Network._getParser(self)
op.add_option("--activate", dest="activate", action="store_true",
default=False)
op.add_option("--nodefroute", dest="nodefroute", action="store_true",
default=False)
op.add_option("--vlanid", dest="vlanid")
op.add_option("--bondslaves", dest="bondslaves")
op.add_option("--bondopts", dest="bondopts")
return op
def validate_network_interface_name(name):
"""Check if the given network interface name is valid, return an error message
if an error is found or None if no errors are found
:param str name: name to validate
:returns: error message or None if no error is found
:rtype: str or NoneType
"""
# (for reference see the NetworkManager source code:
# NetworkManager/src/settings/plugins/ifcfg-rh/reader.c
# and the make_vlan_setting function)
vlan_id = None
# if it contains '.', vlan id should follow (eg 'ens7.171', 'mydev.171')
(vlan, dot, id_candidate) = name.partition(".")
if dot:
# 'vlan' can't be followed by a '.'
if vlan == "vlan":
return _("When using the <prefix>.<vlan id> interface name notation, <prefix> can't be equal to 'vlan'.")
try:
vlan_id = int(id_candidate)
except ValueError:
return _("If network --interfacename contains a '.', valid vlan id should follow.")
# if it starts with 'vlan', vlan id should follow ('vlan171')
(empty, sep, id_candidate) = name.partition("vlan")
if sep and empty == "":
# if we checked only for empty == "", we would evaluate missing interface name as an error
try:
vlan_id = int(id_candidate)
except ValueError:
return _("If network --interfacename starts with 'vlan', valid vlan id should follow.")
# check if the vlan id is in range
if vlan_id is not None:
if not(MIN_VLAN_ID <= vlan_id <= MAX_VLAN_ID):
return _("The vlan id out of the %d-%d vlan id range.") % (MIN_VLAN_ID, MAX_VLAN_ID)
# network interface name seems to be valid (no error found)
return None
class RHEL7_Network(F21_Network):
def _getParser(self):
op = F21_Network._getParser(self)
op.add_option("--bridgeslaves", dest="bridgeslaves", action="store",
default="")
op.add_option("--bridgeopts", dest="bridgeopts", action="store",
default="")
return op
def parse(self, args):
# call the overridden command to do it's job first
retval = F21_Network.parse(self, args)
# validate the network interface name
error_message = validate_network_interface_name(retval.interfacename)
# something is wrong with the interface name
if error_message is not None:
raise KickstartValueError(formatErrorMsg(self.lineno,msg=error_message))
if retval.bridgeopts:
if not retval.bridgeslaves:
msg = formatErrorMsg(self.lineno, msg=_("Option --bridgeopts requires "\
"--bridgeslaves to be specified"))
raise KickstartValueError(msg)
opts = retval.bridgeopts.split(",")
for opt in opts:
_key, _sep, value = opt.partition("=")
if not value or "=" in value:
msg = formatErrorMsg(self.lineno, msg=_("Bad format of --bridgeopts, expecting key=value options separated by ','"))
raise KickstartValueError(msg)
return retval
| gpl-2.0 |
shashisp/junction | junction/proposals/migrations/0002_auto_20150105_2220.py | 9 | 2179 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
# Third Party Stuff
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('conferences', '0001_initial'),
('proposals', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name='conferenceproposalsection',
unique_together=None,
),
migrations.RemoveField(
model_name='conferenceproposalsection',
name='conference',
),
migrations.RemoveField(
model_name='conferenceproposalsection',
name='created_by',
),
migrations.RemoveField(
model_name='conferenceproposalsection',
name='modified_by',
),
migrations.RemoveField(
model_name='conferenceproposalsection',
name='proposal_section',
),
migrations.DeleteModel(
name='ConferenceProposalSection',
),
migrations.AlterUniqueTogether(
name='conferenceproposaltype',
unique_together=None,
),
migrations.RemoveField(
model_name='conferenceproposaltype',
name='conference',
),
migrations.RemoveField(
model_name='conferenceproposaltype',
name='created_by',
),
migrations.RemoveField(
model_name='conferenceproposaltype',
name='modified_by',
),
migrations.RemoveField(
model_name='conferenceproposaltype',
name='proposal_type',
),
migrations.DeleteModel(
name='ConferenceProposalType',
),
migrations.AddField(
model_name='proposalsection',
name='conferences',
field=models.ManyToManyField(to='conferences.Conference'),
preserve_default=True,
),
migrations.AddField(
model_name='proposaltype',
name='conferences',
field=models.ManyToManyField(to='conferences.Conference'),
preserve_default=True,
),
]
| mit |
maartenq/ansible | test/units/modules/network/f5/test_bigip_selfip.py | 10 | 6267 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_selfip import ApiParameters
from library.modules.bigip_selfip import ModuleParameters
from library.modules.bigip_selfip import ModuleManager
from library.modules.bigip_selfip import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_selfip import ApiParameters
from ansible.modules.network.f5.bigip_selfip import ModuleParameters
from ansible.modules.network.f5.bigip_selfip import ModuleManager
from ansible.modules.network.f5.bigip_selfip import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
address='10.10.10.10',
allow_service=[
'tcp:80',
'udp:53',
'gre'
],
name='net1',
netmask='255.255.255.0',
partition='Common',
route_domain='1',
state='present',
traffic_group='traffic-group-local-only',
vlan='net1'
)
p = ModuleParameters(params=args)
assert p.address == '10.10.10.10%1/24'
assert p.allow_service == ['gre:0', 'tcp:80', 'udp:53']
assert p.name == 'net1'
assert p.netmask == 24
assert p.route_domain == 1
assert p.traffic_group == '/Common/traffic-group-local-only'
assert p.vlan == '/Common/net1'
def test_module_invalid_service(self):
args = dict(
allow_service=[
'tcp:80',
'udp:53',
'grp'
]
)
p = ModuleParameters(params=args)
with pytest.raises(F5ModuleError) as ex:
assert p.allow_service == ['grp', 'tcp:80', 'udp:53']
assert 'The provided protocol' in str(ex)
def test_api_parameters(self):
args = dict(
address='10.10.10.10%1/24',
allowService=[
'tcp:80',
'udp:53',
'gre'
],
name='net1',
state='present',
trafficGroup='/Common/traffic-group-local-only',
vlan='net1'
)
p = ApiParameters(params=args)
assert p.address == '10.10.10.10%1/24'
assert p.allow_service == ['gre', 'tcp:80', 'udp:53']
assert p.name == 'net1'
assert p.netmask == 24
assert p.traffic_group == '/Common/traffic-group-local-only'
assert p.vlan == '/Common/net1'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_selfip(self, *args):
set_module_args(dict(
address='10.10.10.10',
allow_service=[
'tcp:80',
'udp:53',
'gre'
],
name='net1',
netmask='255.255.255.0',
partition='Common',
route_domain='1',
state='present',
traffic_group='traffic-group-local-only',
vlan='net1',
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
def test_create_selfip_idempotent(self, *args):
set_module_args(dict(
address='10.10.10.10',
allow_service=[
'tcp:80',
'udp:53',
'gre'
],
name='net1',
netmask='255.255.255.0',
partition='Common',
route_domain='1',
state='present',
traffic_group='traffic-group-local-only',
vlan='net1',
password='password',
server='localhost',
user='admin'
))
current = ApiParameters(params=load_fixture('load_tm_net_self.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exists = Mock(side_effect=[True, True])
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is False
| gpl-3.0 |
orangeholic/protobuf | python/google/protobuf/internal/enum_type_wrapper.py | 292 | 3541 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A simple wrapper around enum types to expose utility functions.
Instances are created as properties with the same name as the enum they wrap
on proto classes. For usage, see:
reflection_test.py
"""
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
class EnumTypeWrapper(object):
"""A utility for finding the names of enum values."""
DESCRIPTOR = None
def __init__(self, enum_type):
"""Inits EnumTypeWrapper with an EnumDescriptor."""
self._enum_type = enum_type
self.DESCRIPTOR = enum_type;
def Name(self, number):
"""Returns a string containing the name of an enum value."""
if number in self._enum_type.values_by_number:
return self._enum_type.values_by_number[number].name
raise ValueError('Enum %s has no name defined for value %d' % (
self._enum_type.name, number))
def Value(self, name):
"""Returns the value coresponding to the given enum name."""
if name in self._enum_type.values_by_name:
return self._enum_type.values_by_name[name].number
raise ValueError('Enum %s has no value defined for name %s' % (
self._enum_type.name, name))
def keys(self):
"""Return a list of the string names in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.name
for value_descriptor in self._enum_type.values]
def values(self):
"""Return a list of the integer values in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.number
for value_descriptor in self._enum_type.values]
def items(self):
"""Return a list of the (name, value) pairs of the enum.
These are returned in the order they were defined in the .proto file.
"""
return [(value_descriptor.name, value_descriptor.number)
for value_descriptor in self._enum_type.values]
| bsd-3-clause |
Blue7Alien/sloopdoorbot | Door System V1/membersdoor.py | 1 | 5976 | import RPi.GPIO as GPIO
import time
from socket import *
import thread
from threading import Thread
import os
import datetime
import pygame
import paramiko
MAINDOORIP = '192.168.1.17' #Enter the IP address of the main door
DOORBELLPORT = 12346 #Port to send the doorbell command through
PORT = 12345 #Send in out info over this port
DOORBELL = "doorbell.mp3"
people = 0 #Number of people in the gym
sensor1 = 0 #flag for sensor 1
sensor2 = 0 #flag for sensor 2
SLEEPTIME = 0.2 #time to wait for the 2nd sensor to trip after the first one in seconds
BOUNCETIME = 300 #laser debounce time in ms
WAITTIME = 0.5 #Time to wait between people in seconds
BUFF = 1024
HOST = '' #Listen to all hosts
start = datetime.datetime.now()
############################################################################
GPIO.setmode(GPIO.BCM)
GPIO.setup(23, GPIO.IN, pull_up_down = GPIO.PUD_UP)
GPIO.setup(24, GPIO.IN, pull_up_down = GPIO.PUD_UP)
############################################################################
def playbell():
pygame.mixer.init()
pygame.mixer.music.load(DOORBELL)
pygame.mixer.music.play()
while pygame.mixer.music.get_busy() == True:
continue
return
############################################################################
def response(key):
return 'Server response: ' + key
def handler(clientsock,addr):
global start
while 1:
data = clientsock.recv(BUFF)
if not data: break
print repr(addr) + ' recv:' + repr(data)
clientsock.send(response(data))
print repr(addr) + ' sent:' + repr(response(data))
if "close" == data.rstrip(): break # type 'close' on client console to close connection from the server side
if data == "ringbell":
print "TIME TO RING THE BELL!"
try:
thread.start_new_thread(playbell, ())
except:
print "ERROR: Couldn't Start playbell Thread"
clientsock.close()
print addr, "- closed connection" #log on console
return
############################################################################
def sendcommand(string):
# SOCK_STREAM == a TCP socket
sock = socket(AF_INET, SOCK_STREAM)
#sock.setblocking(0) # optional non-blocking
sock.connect((MAINDOORIP, PORT))
print "sending data => [%s]" % (string)
sock.send(string)
#reply = sock.recv(16384) # limit reply to 16K
#print "reply => \n [%s]" % (reply)
sock.close()
#return reply
return
############################################################################
def sensor1function(channel):
global sensor1
sensor1 = 1
def sensor2function(channel):
global sensor2
sensor2 = 1
def lasers():
global people
global sensor1
global sensor2
GPIO.add_event_detect(23, GPIO.FALLING, callback=sensor1function, bouncetime=BOUNCETIME)
GPIO.add_event_detect(24, GPIO.FALLING, callback=sensor2function, bouncetime=BOUNCETIME)
print("Digital Turnstile started")
while True:
if sensor1 == 1:
time.sleep(SLEEPTIME)
if sensor2 == 1:
people += 1
print "Someone came in the members door"
print "People in the gym: " + str(people)
sensor1 = 0
sensor2 = 0
try:
#sendcommand("in")
thread.start_new_thread(sendcommand, ('in',))
except Exception,e:
print "ERROR: Couldn't send 'in' to main door"
#runscript() #try to start the script on the main door
print str(e)
with open('Door.log','a') as f:
now = time.strftime("%c")
temp = 'Couldnt send in to main door: %s %s\n' % (now, e)
f.write(temp)
f.close() # you can omit in most cases as the destructor will call if
time.sleep(WAITTIME)
sensor1 = 0
sensor2 = 0
elif sensor2 == 1:
time.sleep(SLEEPTIME)
if sensor1 == 1:
if people > 0:
people -= 1
print "Someone went out the members door"
print "People in the gym: " + str(people)
sensor1 = 0
sensor2 = 0
try:
#sendcommand("out")
thread.start_new_thread(sendcommand, ('out',))
except Exception,e:
print "ERROR: Couldn't send 'out' to main door"
#runscript() #try to start the script on the main door
print str(e)
with open('Door.log','a') as f:
now = time.strftime("%c")
temp = 'Couldnt send out to main door: %s %s\n' % (now, e)
f.write(temp)
f.close() # you can omit in most cases as the destructor will call if
time.sleep(WAITTIME)
sensor1 = 0
sensor2 = 0
GPIO.cleanup()
############################################################################
if __name__=='__main__':
try:
thread.start_new_thread(lasers, ())
except Exception,e:
print "ERROR: Couldn't Start Laser Thread"
print str(e)
with open('Door.log','a') as f:
now = time.strftime("%c")
temp = 'Couldnt Start Laser Thread: %s %s\n' % (now, e)
f.write(temp)
f.close() # you can omit in most cases as the destructor will call if
ADDR = (HOST, DOORBELLPORT)
serversock = socket(AF_INET, SOCK_STREAM)
serversock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
serversock.bind(ADDR)
serversock.listen(5)
while 1:
print 'waiting for connection... listening on port', DOORBELLPORT
clientsock, addr = serversock.accept()
print '...connected from:', addr
thread.start_new_thread(handler, (clientsock, addr))
############################################################################
| gpl-2.0 |
gioman/QGIS | python/plugins/processing/script/AddScriptFromFileAction.py | 6 | 3506 | # -*- coding: utf-8 -*-
"""
***************************************************************************
EditScriptAction.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'April 2014'
__copyright__ = '(C) 201, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtWidgets import QFileDialog, QMessageBox
from qgis.PyQt.QtCore import QFileInfo
from qgis.core import QgsApplication, QgsSettings
from processing.script.ScriptAlgorithm import ScriptAlgorithm
from processing.gui.ToolboxAction import ToolboxAction
from processing.script.WrongScriptException import WrongScriptException
from processing.script.ScriptUtils import ScriptUtils
pluginPath = os.path.split(os.path.dirname(__file__))[0]
class AddScriptFromFileAction(ToolboxAction):
def __init__(self):
self.name, self.i18n_name = self.trAction('Add script from file')
self.group, self.i18n_group = self.trAction('Tools')
def getIcon(self):
return QgsApplication.getThemeIcon("/processingScript.svg")
def execute(self):
settings = QgsSettings()
lastDir = settings.value('Processing/lastScriptsDir', '')
filenames, selected_filter = QFileDialog.getOpenFileNames(self.toolbox,
self.tr('Script files', 'AddScriptFromFileAction'), lastDir,
self.tr('Script files (*.py *.PY)', 'AddScriptFromFileAction'))
if filenames:
validAlgs = 0
wrongAlgs = []
for filename in filenames:
try:
settings.setValue('Processing/lastScriptsDir',
QFileInfo(filename).absoluteDir().absolutePath())
script = ScriptAlgorithm(filename)
destFilename = os.path.join(ScriptUtils.scriptsFolders()[0], os.path.basename(filename))
with open(destFilename, 'w') as f:
f.write(script.script)
validAlgs += 1
except WrongScriptException:
wrongAlgs.append(os.path.basename(filename))
if validAlgs:
QgsApplication.processingRegistry().providerById('script').refreshAlgorithms()
if wrongAlgs:
QMessageBox.warning(self.toolbox,
self.tr('Error reading scripts', 'AddScriptFromFileAction'),
self.tr('The following files do not contain a valid script:\n-', 'AddScriptFromFileAction') +
"\n-".join(wrongAlgs))
| gpl-2.0 |
ameily/paramiko | paramiko/kex_gex.py | 7 | 10302 | # Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Variant on `KexGroup1 <paramiko.kex_group1.KexGroup1>` where the prime "p" and
generator "g" are provided by the server. A bit more work is required on the
client side, and a **lot** more on the server side.
"""
import os
from hashlib import sha1, sha256
from paramiko import util
from paramiko.common import DEBUG
from paramiko.message import Message
from paramiko.py3compat import byte_chr, byte_ord, byte_mask
from paramiko.ssh_exception import SSHException
(
_MSG_KEXDH_GEX_REQUEST_OLD,
_MSG_KEXDH_GEX_GROUP,
_MSG_KEXDH_GEX_INIT,
_MSG_KEXDH_GEX_REPLY,
_MSG_KEXDH_GEX_REQUEST,
) = range(30, 35)
(
c_MSG_KEXDH_GEX_REQUEST_OLD,
c_MSG_KEXDH_GEX_GROUP,
c_MSG_KEXDH_GEX_INIT,
c_MSG_KEXDH_GEX_REPLY,
c_MSG_KEXDH_GEX_REQUEST,
) = [byte_chr(c) for c in range(30, 35)]
class KexGex(object):
name = "diffie-hellman-group-exchange-sha1"
min_bits = 1024
max_bits = 8192
preferred_bits = 2048
hash_algo = sha1
def __init__(self, transport):
self.transport = transport
self.p = None
self.q = None
self.g = None
self.x = None
self.e = None
self.f = None
self.old_style = False
def start_kex(self, _test_old_style=False):
if self.transport.server_mode:
self.transport._expect_packet(
_MSG_KEXDH_GEX_REQUEST, _MSG_KEXDH_GEX_REQUEST_OLD
)
return
# request a bit range: we accept (min_bits) to (max_bits), but prefer
# (preferred_bits). according to the spec, we shouldn't pull the
# minimum up above 1024.
m = Message()
if _test_old_style:
# only used for unit tests: we shouldn't ever send this
m.add_byte(c_MSG_KEXDH_GEX_REQUEST_OLD)
m.add_int(self.preferred_bits)
self.old_style = True
else:
m.add_byte(c_MSG_KEXDH_GEX_REQUEST)
m.add_int(self.min_bits)
m.add_int(self.preferred_bits)
m.add_int(self.max_bits)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_GROUP)
def parse_next(self, ptype, m):
if ptype == _MSG_KEXDH_GEX_REQUEST:
return self._parse_kexdh_gex_request(m)
elif ptype == _MSG_KEXDH_GEX_GROUP:
return self._parse_kexdh_gex_group(m)
elif ptype == _MSG_KEXDH_GEX_INIT:
return self._parse_kexdh_gex_init(m)
elif ptype == _MSG_KEXDH_GEX_REPLY:
return self._parse_kexdh_gex_reply(m)
elif ptype == _MSG_KEXDH_GEX_REQUEST_OLD:
return self._parse_kexdh_gex_request_old(m)
msg = "KexGex {} asked to handle packet type {:d}"
raise SSHException(msg.format(self.name, ptype))
# ...internals...
def _generate_x(self):
# generate an "x" (1 < x < (p-1)/2).
q = (self.p - 1) // 2
qnorm = util.deflate_long(q, 0)
qhbyte = byte_ord(qnorm[0])
byte_count = len(qnorm)
qmask = 0xff
while not (qhbyte & 0x80):
qhbyte <<= 1
qmask >>= 1
while True:
x_bytes = os.urandom(byte_count)
x_bytes = byte_mask(x_bytes[0], qmask) + x_bytes[1:]
x = util.inflate_long(x_bytes, 1)
if (x > 1) and (x < q):
break
self.x = x
def _parse_kexdh_gex_request(self, m):
minbits = m.get_int()
preferredbits = m.get_int()
maxbits = m.get_int()
# smoosh the user's preferred size into our own limits
if preferredbits > self.max_bits:
preferredbits = self.max_bits
if preferredbits < self.min_bits:
preferredbits = self.min_bits
# fix min/max if they're inconsistent. technically, we could just pout
# and hang up, but there's no harm in giving them the benefit of the
# doubt and just picking a bitsize for them.
if minbits > preferredbits:
minbits = preferredbits
if maxbits < preferredbits:
maxbits = preferredbits
# now save a copy
self.min_bits = minbits
self.preferred_bits = preferredbits
self.max_bits = maxbits
# generate prime
pack = self.transport._get_modulus_pack()
if pack is None:
raise SSHException("Can't do server-side gex with no modulus pack")
self.transport._log(
DEBUG,
"Picking p ({} <= {} <= {} bits)".format(
minbits, preferredbits, maxbits
),
)
self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits)
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_GROUP)
m.add_mpint(self.p)
m.add_mpint(self.g)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_INIT)
def _parse_kexdh_gex_request_old(self, m):
# same as above, but without min_bits or max_bits (used by older
# clients like putty)
self.preferred_bits = m.get_int()
# smoosh the user's preferred size into our own limits
if self.preferred_bits > self.max_bits:
self.preferred_bits = self.max_bits
if self.preferred_bits < self.min_bits:
self.preferred_bits = self.min_bits
# generate prime
pack = self.transport._get_modulus_pack()
if pack is None:
raise SSHException("Can't do server-side gex with no modulus pack")
self.transport._log(
DEBUG, "Picking p (~ {} bits)".format(self.preferred_bits)
)
self.g, self.p = pack.get_modulus(
self.min_bits, self.preferred_bits, self.max_bits
)
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_GROUP)
m.add_mpint(self.p)
m.add_mpint(self.g)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_INIT)
self.old_style = True
def _parse_kexdh_gex_group(self, m):
self.p = m.get_mpint()
self.g = m.get_mpint()
# reject if p's bit length < 1024 or > 8192
bitlen = util.bit_length(self.p)
if (bitlen < 1024) or (bitlen > 8192):
raise SSHException(
"Server-generated gex p (don't ask) is out of range "
"({} bits)".format(bitlen)
)
self.transport._log(DEBUG, "Got server p ({} bits)".format(bitlen))
self._generate_x()
# now compute e = g^x mod p
self.e = pow(self.g, self.x, self.p)
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_INIT)
m.add_mpint(self.e)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_GEX_REPLY)
def _parse_kexdh_gex_init(self, m):
self.e = m.get_mpint()
if (self.e < 1) or (self.e > self.p - 1):
raise SSHException('Client kex "e" is out of range')
self._generate_x()
self.f = pow(self.g, self.x, self.p)
K = pow(self.e, self.x, self.p)
key = self.transport.get_server_key().asbytes()
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa
hm = Message()
hm.add(
self.transport.remote_version,
self.transport.local_version,
self.transport.remote_kex_init,
self.transport.local_kex_init,
key,
)
if not self.old_style:
hm.add_int(self.min_bits)
hm.add_int(self.preferred_bits)
if not self.old_style:
hm.add_int(self.max_bits)
hm.add_mpint(self.p)
hm.add_mpint(self.g)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = self.hash_algo(hm.asbytes()).digest()
self.transport._set_K_H(K, H)
# sign it
sig = self.transport.get_server_key().sign_ssh_data(H)
# send reply
m = Message()
m.add_byte(c_MSG_KEXDH_GEX_REPLY)
m.add_string(key)
m.add_mpint(self.f)
m.add_string(sig)
self.transport._send_message(m)
self.transport._activate_outbound()
def _parse_kexdh_gex_reply(self, m):
host_key = m.get_string()
self.f = m.get_mpint()
sig = m.get_string()
if (self.f < 1) or (self.f > self.p - 1):
raise SSHException('Server kex "f" is out of range')
K = pow(self.f, self.x, self.p)
# okay, build up the hash H of
# (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa
hm = Message()
hm.add(
self.transport.local_version,
self.transport.remote_version,
self.transport.local_kex_init,
self.transport.remote_kex_init,
host_key,
)
if not self.old_style:
hm.add_int(self.min_bits)
hm.add_int(self.preferred_bits)
if not self.old_style:
hm.add_int(self.max_bits)
hm.add_mpint(self.p)
hm.add_mpint(self.g)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest())
self.transport._verify_key(host_key, sig)
self.transport._activate_outbound()
class KexGexSHA256(KexGex):
name = "diffie-hellman-group-exchange-sha256"
hash_algo = sha256
| lgpl-2.1 |
chirilo/mozillians | vendor-local/lib/python/celery/task/chords.py | 12 | 2174 | # -*- coding: utf-8 -*-
"""
celery.task.chords
~~~~~~~~~~~~~~~~~~
Chords (task set callbacks).
:copyright: (c) 2009 - 2012 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from .. import current_app
from ..result import AsyncResult, TaskSetResult
from ..utils import uuid
from .sets import TaskSet, subtask
@current_app.task(name="celery.chord_unlock", max_retries=None)
def _unlock_chord(setid, callback, interval=1, propagate=False,
max_retries=None, result=None):
result = TaskSetResult(setid, map(AsyncResult, result))
if result.ready():
j = result.join_native if result.supports_native_join else result.join
subtask(callback).delay(j(propagate=propagate))
else:
_unlock_chord.retry(countdown=interval, max_retries=max_retries)
class Chord(current_app.Task):
accept_magic_kwargs = False
name = "celery.chord"
def run(self, set, body, interval=1, max_retries=None,
propagate=False, **kwargs):
if not isinstance(set, TaskSet):
set = TaskSet(set)
r = []
setid = uuid()
for task in set.tasks:
tid = uuid()
task.options.update(task_id=tid, chord=body)
r.append(current_app.AsyncResult(tid))
self.backend.on_chord_apply(setid, body,
interval=interval,
max_retries=max_retries,
propagate=propagate,
result=r)
return set.apply_async(taskset_id=setid)
class chord(object):
Chord = Chord
def __init__(self, tasks, **options):
self.tasks = tasks
self.options = options
def __call__(self, body, **options):
tid = body.options.setdefault("task_id", uuid())
result = self.Chord.apply_async((list(self.tasks), body),
self.options, **options)
if self.Chord.app.conf.CELERY_ALWAYS_EAGER:
return subtask(body).apply(args=(result.result.join(),))
return body.type.AsyncResult(tid)
| bsd-3-clause |
paramite/glance | glance/contrib/plugins/image_artifact/v1/image.py | 14 | 1620 | # Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common.artifacts import definitions
class ImageAsAnArtifact(definitions.ArtifactType):
__type_name__ = 'Image'
__endpoint__ = 'images'
file = definitions.BinaryObject(required=True)
disk_format = definitions.String(allowed_values=['ami', 'ari', 'aki',
'vhd', 'vmdk', 'raw',
'qcow2', 'vdi', 'iso'],
required=True,
mutable=False)
container_format = definitions.String(allowed_values=['ami', 'ari',
'aki', 'bare',
'ovf', 'ova'],
required=True,
mutable=False)
min_disk = definitions.Integer(min_value=0, default=0)
min_ram = definitions.Integer(min_value=0, default=0)
virtual_size = definitions.Integer(min_value=0)
| apache-2.0 |
kerneltask/micropython | tests/basics/string_format.py | 15 | 1782 | # basic functionality test for {} format string
def test(fmt, *args):
print('{:8s}'.format(fmt) + '>' + fmt.format(*args) + '<')
test("}}{{")
test("{}-{}", 1, [4, 5])
test("{0}-{1}", 1, [4, 5])
test("{1}-{0}", 1, [4, 5])
test("{:x}", 1)
test("{!r}", 2)
test("{:x}", 0x10)
test("{!r}", "foo")
test("{!s}", "foo")
test("{0!r:>10s} {0!s:>10s}", "foo")
test("{:4b}", 10)
test("{:4c}", 48)
test("{:4d}", 123)
test("{:4n}", 123)
test("{:4o}", 123)
test("{:4x}", 123)
test("{:4X}", 123)
test("{:4,d}", 12345678)
test("{:#4b}", 10)
test("{:#4o}", 123)
test("{:#4x}", 123)
test("{:#4X}", 123)
test("{:#4d}", 0)
test("{:#4b}", 0)
test("{:#4o}", 0)
test("{:#4x}", 0)
test("{:#4X}", 0)
test("{:<6s}", "ab")
test("{:>6s}", "ab")
test("{:^6s}", "ab")
test("{:.1s}", "ab")
test("{: <6d}", 123)
test("{: <6d}", -123)
test("{:0<6d}", 123)
test("{:0<6d}", -123)
test("{:@<6d}", 123)
test("{:@<6d}", -123)
test("{:@< 6d}", 123)
test("{:@< 6d}", -123)
test("{:@<+6d}", 123)
test("{:@<+6d}", -123)
test("{:@<-6d}", 123)
test("{:@<-6d}", -123)
test("{:@>6d}", -123)
test("{:@<6d}", -123)
test("{:@=6d}", -123)
test("{:06d}", -123)
test("{:>20}", "foo")
test("{:^20}", "foo")
test("{:<20}", "foo")
# formatting bool as int
test('{:d}', False)
test('{:20}', False)
test('{:d}', True)
test('{:20}', True)
# nested format specifiers
print("{:{}}".format(123, '#>10'))
print("{:{}{}{}}".format(123, '#', '>', '10'))
print("{0:{1}{2}}".format(123, '#>', '10'))
print("{text:{align}{width}}".format(text="foo", align="<", width=20))
print("{text:{align}{width}}".format(text="foo", align="^", width=10))
print("{text:{align}{width}}".format(text="foo", align=">", width=30))
print("{foo}/foo".format(foo="bar"))
print("{}".format(123, foo="bar"))
print("{}-{foo}".format(123, foo="bar"))
| mit |
josl/ThinkStats2 | code/chap07soln.py | 68 | 3259 | """This file contains code for use with "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function
import sys
import numpy as np
import math
import first
import thinkplot
import thinkstats2
"""This file contains a solution to an exercise in Think Stats:
Using data from the NSFG, make a scatter plot of birth weight
versus mother's age. Plot percentiles of birth weight
versus mother's age. Compute Pearson's and Spearman's correlations.
How would you characterize the relationship
between these variables?
My conclusions:
1) The scatterplot shows a weak relationship between the variables.
2) The correlations support this. Pearson's is around 0.07, Spearman's
is around 0.09. The difference between them suggests some influence
of outliers or a non-linear relationsip.
3) Plotting percentiles of weight versus age suggests that the
relationship is non-linear. Birth weight increases more quickly
in the range of mother's age from 15 to 25. After that, the effect
is weaker.
"""
def ScatterPlot(ages, weights, alpha=1.0):
"""Make a scatter plot and save it.
ages: sequence of float
weights: sequence of float
alpha: float
"""
thinkplot.Scatter(ages, weights, alpha=alpha)
thinkplot.Config(xlabel='age (years)',
ylabel='weight (lbs)',
xlim=[10, 45],
ylim=[0, 15],
legend=False)
def HexBin(ages, weights, bins=None):
"""Make a hexbin plot and save it.
ages: sequence of float
weights: sequence of float
bins: 'log' or None for linear
"""
thinkplot.HexBin(ages, weights, bins=bins)
thinkplot.Config(xlabel='age (years)',
ylabel='weight (lbs)',
legend=False)
def BinnedPercentiles(df):
"""Bin the data by age and plot percentiles of weight for each bin.
df: DataFrame
"""
bins = np.arange(10, 48, 3)
indices = np.digitize(df.agepreg, bins)
groups = df.groupby(indices)
ages = [group.agepreg.mean() for i, group in groups][1:-1]
cdfs = [thinkstats2.Cdf(group.totalwgt_lb) for i, group in groups][1:-1]
thinkplot.PrePlot(3)
for percent in [75, 50, 25]:
weights = [cdf.Percentile(percent) for cdf in cdfs]
label = '%dth' % percent
thinkplot.Plot(ages, weights, label=label)
thinkplot.Save(root='chap07scatter3',
formats=['jpg'],
xlabel="mother's age (years)",
ylabel='birth weight (lbs)')
def main(script):
thinkstats2.RandomSeed(17)
live, firsts, others = first.MakeFrames()
live = live.dropna(subset=['agepreg', 'totalwgt_lb'])
BinnedPercentiles(live)
ages = live.agepreg
weights = live.totalwgt_lb
print('thinkstats2 Corr', thinkstats2.Corr(ages, weights))
print('thinkstats2 SpearmanCorr',
thinkstats2.SpearmanCorr(ages, weights))
ScatterPlot(ages, weights, alpha=0.1)
thinkplot.Save(root='chap07scatter1',
legend=False,
formats=['jpg'])
if __name__ == '__main__':
main(*sys.argv)
| gpl-3.0 |
vmahuli/tempest | tempest/api/volume/admin/test_volumes_backup.py | 3 | 3279 | # Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.openstack.common import log as logging
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class VolumesBackupsTest(base.BaseVolumeV1AdminTest):
_interface = "json"
@classmethod
@test.safe_setup
def setUpClass(cls):
super(VolumesBackupsTest, cls).setUpClass()
if not CONF.volume_feature_enabled.backup:
raise cls.skipException("Cinder backup feature disabled")
cls.volumes_adm_client = cls.os_adm.volumes_client
cls.backups_adm_client = cls.os_adm.backups_client
cls.volume = cls.create_volume()
@test.attr(type='smoke')
def test_volume_backup_create_get_detailed_list_restore_delete(self):
# Create backup
backup_name = data_utils.rand_name('Backup')
create_backup = self.backups_adm_client.create_backup
resp, backup = create_backup(self.volume['id'],
name=backup_name)
self.assertEqual(202, resp.status)
self.addCleanup(self.backups_adm_client.delete_backup,
backup['id'])
self.assertEqual(backup_name, backup['name'])
self.volumes_adm_client.wait_for_volume_status(self.volume['id'],
'available')
self.backups_adm_client.wait_for_backup_status(backup['id'],
'available')
# Get a given backup
resp, backup = self.backups_adm_client.get_backup(backup['id'])
self.assertEqual(200, resp.status)
self.assertEqual(backup_name, backup['name'])
# Get all backups with detail
resp, backups = self.backups_adm_client.list_backups_with_detail()
self.assertEqual(200, resp.status)
self.assertIn((backup['name'], backup['id']),
[(m['name'], m['id']) for m in backups])
# Restore backup
resp, restore = self.backups_adm_client.restore_backup(backup['id'])
self.assertEqual(202, resp.status)
# Delete backup
self.addCleanup(self.volumes_adm_client.delete_volume,
restore['volume_id'])
self.assertEqual(backup['id'], restore['backup_id'])
self.backups_adm_client.wait_for_backup_status(backup['id'],
'available')
self.volumes_adm_client.wait_for_volume_status(restore['volume_id'],
'available')
| apache-2.0 |
adazey/Muzez | libs/requests/packages/urllib3/packages/backports/makefile.py | 10 | 1514 | # -*- coding: utf-8 -*-
"""
backports.makefile
~~~~~~~~~~~~~~~~~~
Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
"""
import io
from socket import SocketIO
def backport_makefile(self, mode="r", buffering=None, encoding=None,
errors=None, newline=None):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
if not set(mode) <= set(["r", "w", "b"]):
raise ValueError(
"invalid mode %r (only r, w, b allowed)" % (mode,)
)
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
| gpl-3.0 |
InfiniaPress/Passenger-Pigeon | assets/bower_components/bower_components/forge/bower_components/forge/tests/policyserver.py | 171 | 3551 | #!/usr/bin/env python
"""
Flash Socket Policy Server.
- Starts Flash socket policy file server.
- Defaults to port 843.
- NOTE: Most operating systems require administrative privileges to use
ports under 1024.
$ ./policyserver.py [options]
"""
"""
Also consider Adobe's solutions:
http://www.adobe.com/devnet/flashplayer/articles/socket_policy_files.html
"""
from multiprocessing import Process
from optparse import OptionParser
import SocketServer
import logging
# Set address reuse for all TCPServers
SocketServer.TCPServer.allow_reuse_address = True
# Static socket policy file string.
# NOTE: This format is very strict. Edit with care.
socket_policy_file = """\
<?xml version="1.0"?>\
<!DOCTYPE cross-domain-policy\
SYSTEM "http://www.adobe.com/xml/dtds/cross-domain-policy.dtd">\
<cross-domain-policy>\
<allow-access-from domain="*" to-ports="*"/>\
</cross-domain-policy>\0"""
class PolicyHandler(SocketServer.BaseRequestHandler):
"""
The RequestHandler class for our server.
Returns a policy file when requested.
"""
def handle(self):
"""Send policy string if proper request string is received."""
# get some data
# TODO: make this more robust (while loop, etc)
self.data = self.request.recv(1024).rstrip('\0')
logging.debug("%s wrote:%s" % (self.client_address[0], repr(self.data)))
# if policy file request, send the file.
if self.data == "<policy-file-request/>":
logging.info("Policy server request from %s." % (self.client_address[0]))
self.request.send(socket_policy_file)
else:
logging.info("Policy server received junk from %s: \"%s\"" % \
(self.client_address[0], repr(self.data)))
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
def serve_forever(self):
"""Handle one request at a time until shutdown or keyboard interrupt."""
try:
SocketServer.BaseServer.serve_forever(self)
except KeyboardInterrupt:
return
def main():
"""Run socket policy file servers."""
usage = "Usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("", "--host", dest="host", metavar="HOST",
default="localhost", help="bind to HOST")
parser.add_option("-p", "--port", dest="port", metavar="PORT",
default=843, type="int", help="serve on PORT")
parser.add_option("-d", "--debug", dest="debug", action="store_true",
default=False, help="debugging output")
parser.add_option("-v", "--verbose", dest="verbose", action="store_true",
default=False, help="verbose output")
(options, args) = parser.parse_args()
# setup logging
if options.debug:
lvl = logging.DEBUG
elif options.verbose:
lvl = logging.INFO
else:
lvl = logging.WARNING
logging.basicConfig(level=lvl, format="%(levelname)-8s %(message)s")
# log basic info
logging.info("Flash Socket Policy Server. Use ctrl-c to exit.")
# create policy server
logging.info("Socket policy serving on %s:%d." % (options.host, options.port))
policyd = ThreadedTCPServer((options.host, options.port), PolicyHandler)
# start server
policy_p = Process(target=policyd.serve_forever)
policy_p.start()
while policy_p.is_alive():
try:
policy_p.join(1)
except KeyboardInterrupt:
logging.info("Stopping test server...")
if __name__ == "__main__":
main()
| mit |
bq/web2board | res/common/Scons/sconsFiles/SCons/Platform/posix.py | 6 | 4209 | """SCons.Platform.posix
Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/posix.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
import errno
import os
import os.path
import subprocess
import sys
import select
import SCons.Util
from SCons.Platform import TempFileMunge
exitvalmap = {
2 : 127,
13 : 126,
}
def escape(arg):
"escape shell special characters"
slash = '\\'
special = '"$()'
arg = arg.replace(slash, slash+slash)
for c in special:
arg = arg.replace(c, slash+c)
return '"' + arg + '"'
def exec_subprocess(l, env):
proc = subprocess.Popen(l, env = env, close_fds = True)
return proc.wait()
def subprocess_spawn(sh, escape, cmd, args, env):
return exec_subprocess([sh, '-c', ' '.join(args)], env)
def exec_popen3(l, env, stdout, stderr):
proc = subprocess.Popen(l, env = env, close_fds = True,
stdout = stdout,
stderr = stderr)
return proc.wait()
def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr):
# spawn using Popen3 combined with the env command
# the command name and the command's stdout is written to stdout
# the command's stderr is written to stderr
return exec_popen3([sh, '-c', ' '.join(args)],
env, stdout, stderr)
def generate(env):
# Bearing in mind we have python 2.4 as a baseline, we can just do this:
spawn = subprocess_spawn
pspawn = piped_env_spawn
# Note that this means that 'escape' is no longer used
if 'ENV' not in env:
env['ENV'] = {}
env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin'
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.o'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
env['PROGPREFIX'] = ''
env['PROGSUFFIX'] = ''
env['LIBPREFIX'] = 'lib'
env['LIBSUFFIX'] = '.a'
env['SHLIBPREFIX'] = '$LIBPREFIX'
env['SHLIBSUFFIX'] = '.so'
env['LIBPREFIXES'] = [ '$LIBPREFIX' ]
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ]
env['PSPAWN'] = pspawn
env['SPAWN'] = spawn
env['SHELL'] = 'sh'
env['ESCAPE'] = escape
env['TEMPFILE'] = TempFileMunge
env['TEMPFILEPREFIX'] = '@'
#Based on LINUX: ARG_MAX=ARG_MAX=131072 - 3000 for environment expansion
#Note: specific platforms might rise or lower this value
env['MAXLINELENGTH'] = 128072
# This platform supports RPATH specifications.
env['__RPATH'] = '$_RPATH'
# GDC is GCC family, but DMD and LDC have different options.
# Must be able to have GCC and DMD work in the same build, so:
env['__DRPATH'] = '$_DRPATH'
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| lgpl-3.0 |
0111001101111010/open-health-inspection-api | venv/lib/python2.7/site-packages/flask/testsuite/ext.py | 563 | 5156 | # -*- coding: utf-8 -*-
"""
flask.testsuite.ext
~~~~~~~~~~~~~~~~~~~
Tests the extension import thing.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
try:
from imp import reload as reload_module
except ImportError:
reload_module = reload
from flask.testsuite import FlaskTestCase
from flask._compat import PY2
class ExtImportHookTestCase(FlaskTestCase):
def setup(self):
# we clear this out for various reasons. The most important one is
# that a real flaskext could be in there which would disable our
# fake package. Secondly we want to make sure that the flaskext
# import hook does not break on reloading.
for entry, value in list(sys.modules.items()):
if (entry.startswith('flask.ext.') or
entry.startswith('flask_') or
entry.startswith('flaskext.') or
entry == 'flaskext') and value is not None:
sys.modules.pop(entry, None)
from flask import ext
reload_module(ext)
# reloading must not add more hooks
import_hooks = 0
for item in sys.meta_path:
cls = type(item)
if cls.__module__ == 'flask.exthook' and \
cls.__name__ == 'ExtensionImporter':
import_hooks += 1
self.assert_equal(import_hooks, 1)
def teardown(self):
from flask import ext
for key in ext.__dict__:
self.assert_not_in('.', key)
def test_flaskext_new_simple_import_normal(self):
from flask.ext.newext_simple import ext_id
self.assert_equal(ext_id, 'newext_simple')
def test_flaskext_new_simple_import_module(self):
from flask.ext import newext_simple
self.assert_equal(newext_simple.ext_id, 'newext_simple')
self.assert_equal(newext_simple.__name__, 'flask_newext_simple')
def test_flaskext_new_package_import_normal(self):
from flask.ext.newext_package import ext_id
self.assert_equal(ext_id, 'newext_package')
def test_flaskext_new_package_import_module(self):
from flask.ext import newext_package
self.assert_equal(newext_package.ext_id, 'newext_package')
self.assert_equal(newext_package.__name__, 'flask_newext_package')
def test_flaskext_new_package_import_submodule_function(self):
from flask.ext.newext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_new_package_import_submodule(self):
from flask.ext.newext_package import submodule
self.assert_equal(submodule.__name__, 'flask_newext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_simple_import_normal(self):
from flask.ext.oldext_simple import ext_id
self.assert_equal(ext_id, 'oldext_simple')
def test_flaskext_old_simple_import_module(self):
from flask.ext import oldext_simple
self.assert_equal(oldext_simple.ext_id, 'oldext_simple')
self.assert_equal(oldext_simple.__name__, 'flaskext.oldext_simple')
def test_flaskext_old_package_import_normal(self):
from flask.ext.oldext_package import ext_id
self.assert_equal(ext_id, 'oldext_package')
def test_flaskext_old_package_import_module(self):
from flask.ext import oldext_package
self.assert_equal(oldext_package.ext_id, 'oldext_package')
self.assert_equal(oldext_package.__name__, 'flaskext.oldext_package')
def test_flaskext_old_package_import_submodule(self):
from flask.ext.oldext_package import submodule
self.assert_equal(submodule.__name__, 'flaskext.oldext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_package_import_submodule_function(self):
from flask.ext.oldext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_broken_package_no_module_caching(self):
for x in range(2):
with self.assert_raises(ImportError):
import flask.ext.broken
def test_no_error_swallowing(self):
try:
import flask.ext.broken
except ImportError:
exc_type, exc_value, tb = sys.exc_info()
self.assert_true(exc_type is ImportError)
if PY2:
message = 'No module named missing_module'
else:
message = 'No module named \'missing_module\''
self.assert_equal(str(exc_value), message)
self.assert_true(tb.tb_frame.f_globals is globals())
# reraise() adds a second frame so we need to skip that one too.
# On PY3 we even have another one :(
next = tb.tb_next.tb_next
if not PY2:
next = next.tb_next
self.assert_in('flask_broken/__init__.py', next.tb_frame.f_code.co_filename)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ExtImportHookTestCase))
return suite
| gpl-2.0 |
iiegor/reaper | lib/aiml/DefaultSubs.py | 22 | 3587 | """This file contains the default (English) substitutions for the
PyAIML kernel. These substitutions may be overridden by using the
Kernel.loadSubs(filename) method. The filename specified should refer
to a Windows-style INI file with the following format:
# lines that start with '#' are comments
# The 'gender' section contains the substitutions performed by the
# <gender> AIML tag, which swaps masculine and feminine pronouns.
[gender]
he = she
she = he
# and so on...
# The 'person' section contains the substitutions performed by the
# <person> AIML tag, which swaps 1st and 2nd person pronouns.
[person]
I = you
you = I
# and so on...
# The 'person2' section contains the substitutions performed by
# the <person2> AIML tag, which swaps 1st and 3nd person pronouns.
[person2]
I = he
he = I
# and so on...
# the 'normal' section contains subtitutions run on every input
# string passed into Kernel.respond(). It's mainly used to
# correct common misspellings, and to convert contractions
# ("WHAT'S") into a format that will match an AIML pattern ("WHAT
# IS").
[normal]
what's = what is
"""
defaultGender = {
# masculine -> feminine
"he": "she",
"him": "her",
"his": "her",
"himself": "herself",
# feminine -> masculine
"she": "he",
"her": "him",
"hers": "his",
"herself": "himself",
}
defaultPerson = {
# 1st->3rd (masculine)
"I": "he",
"me": "him",
"my": "his",
"mine": "his",
"myself": "himself",
# 3rd->1st (masculine)
"he":"I",
"him":"me",
"his":"my",
"himself":"myself",
# 3rd->1st (feminine)
"she":"I",
"her":"me",
"hers":"mine",
"herself":"myself",
}
defaultPerson2 = {
# 1st -> 2nd
"I": "you",
"me": "you",
"my": "your",
"mine": "yours",
"myself": "yourself",
# 2nd -> 1st
"you": "me",
"your": "my",
"yours": "mine",
"yourself": "myself",
}
# TODO: this list is far from complete
defaultNormal = {
"wanna": "want to",
"gonna": "going to",
"I'm": "I am",
"I'd": "I would",
"I'll": "I will",
"I've": "I have",
"you'd": "you would",
"you're": "you are",
"you've": "you have",
"you'll": "you will",
"he's": "he is",
"he'd": "he would",
"he'll": "he will",
"she's": "she is",
"she'd": "she would",
"she'll": "she will",
"we're": "we are",
"we'd": "we would",
"we'll": "we will",
"we've": "we have",
"they're": "they are",
"they'd": "they would",
"they'll": "they will",
"they've": "they have",
"y'all": "you all",
"can't": "can not",
"cannot": "can not",
"couldn't": "could not",
"wouldn't": "would not",
"shouldn't": "should not",
"isn't": "is not",
"ain't": "is not",
"don't": "do not",
"aren't": "are not",
"won't": "will not",
"weren't": "were not",
"wasn't": "was not",
"didn't": "did not",
"hasn't": "has not",
"hadn't": "had not",
"haven't": "have not",
"where's": "where is",
"where'd": "where did",
"where'll": "where will",
"who's": "who is",
"who'd": "who did",
"who'll": "who will",
"what's": "what is",
"what'd": "what did",
"what'll": "what will",
"when's": "when is",
"when'd": "when did",
"when'll": "when will",
"why's": "why is",
"why'd": "why did",
"why'll": "why will",
"it's": "it is",
"it'd": "it would",
"it'll": "it will",
}
| mit |
shakamunyi/nova | nova/tests/unit/virt/hyperv/test_migrationops.py | 13 | 3177 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.tests.unit import fake_instance
from nova.tests.unit.virt.hyperv import test_base
from nova.virt.hyperv import migrationops
from nova.virt.hyperv import vmutils
class MigrationOpsTestCase(test_base.HyperVBaseTestCase):
"""Unit tests for the Hyper-V MigrationOps class."""
_FAKE_TIMEOUT = 10
_FAKE_RETRY_INTERVAL = 5
def setUp(self):
super(MigrationOpsTestCase, self).setUp()
self.context = 'fake-context'
self._migrationops = migrationops.MigrationOps()
self._migrationops._vmops = mock.MagicMock()
self._migrationops._vmutils = mock.MagicMock()
self._migrationops._pathutils = mock.Mock()
def test_check_and_attach_config_drive_unknown_path(self):
instance = fake_instance.fake_instance_obj(self.context,
expected_attrs=['system_metadata'])
instance.config_drive = 'True'
self._migrationops._pathutils.lookup_configdrive_path.return_value = (
None)
self.assertRaises(vmutils.HyperVException,
self._migrationops._check_and_attach_config_drive,
instance,
mock.sentinel.FAKE_VM_GEN)
@mock.patch.object(migrationops.MigrationOps, '_migrate_disk_files')
@mock.patch.object(migrationops.MigrationOps, '_check_target_flavor')
def test_migrate_disk_and_power_off(self, mock_check_flavor,
mock_migrate_disk_files):
instance = fake_instance.fake_instance_obj(self.context)
flavor = mock.MagicMock()
network_info = mock.MagicMock()
disk_files = [mock.MagicMock()]
volume_drives = [mock.MagicMock()]
mock_get_vm_st_path = self._migrationops._vmutils.get_vm_storage_paths
mock_get_vm_st_path.return_value = (disk_files, volume_drives)
self._migrationops.migrate_disk_and_power_off(
self.context, instance, mock.sentinel.FAKE_DEST, flavor,
network_info, None, self._FAKE_TIMEOUT, self._FAKE_RETRY_INTERVAL)
mock_check_flavor.assert_called_once_with(instance, flavor)
self._migrationops._vmops.power_off.assert_called_once_with(
instance, self._FAKE_TIMEOUT, self._FAKE_RETRY_INTERVAL)
mock_get_vm_st_path.assert_called_once_with(instance.name)
mock_migrate_disk_files.assert_called_once_with(
instance.name, disk_files, mock.sentinel.FAKE_DEST)
self._migrationops._vmops.destroy.assert_called_once_with(
instance, destroy_disks=False)
| apache-2.0 |
philenotfound/beagleboneblack-kernel | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
divegeek/keyczar | cpp/src/tools/swtoolkit/site_scons/site_tools/target_debug.py | 36 | 2128 | #!/usr/bin/python2.4
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Build tool setup for debug environments.
This module is a SCons tool which setups environments for debugging.
It is used as follows:
debug_env = env.Clone(tools = ['target_debug'])
"""
def generate(env):
# NOTE: SCons requires the use of this name, which fails gpylint.
"""SCons entry point for this tool."""
# Set target platform bits
env.SetBits('debug')
env['TARGET_DEBUG'] = True
env.Append(
CPPDEFINES=['_DEBUG'] + env.get('CPPDEFINES_DEBUG', []),
CCFLAGS=env.get('CCFLAGS_DEBUG', []),
LINKFLAGS=env.get('LINKFLAGS_DEBUG', []),
)
| apache-2.0 |
monnand/myblog | blog/views.py | 1 | 14173 | from django.http import HttpResponse, HttpResponseRedirect
from django.http import HttpResponseForbidden
from django.http import HttpRequest
from django.http import Http404
from django.template.loader import render_to_string
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils.translation import activate
from django.utils.translation import get_language
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_exempt, csrf_protect
import django
from django.contrib.syndication.views import Feed
from django.contrib.sites.models import get_current_site
import datetime
import os
import os.path
import re
import json
from blog.models import Post, Author, BlogConfig, Tag, Reader, Comment
from blog.decode import decode_post, dump_html
from blog.forms import PostCommentForm
from captcha.CaptchasDotNet import CaptchasDotNet
class BlogFeed(Feed):
def __call__(self, request, *args, **kwargs):
response = super(BlogFeed, self).__call__(request, *args, **kwargs)
response['Content-Type'] = "application/rss+xml; charset=utf-8"
return response
def title(self):
bc = BlogConfig.get()
return bc.title
def description(self):
bc = BlogConfig.get()
return bc.subtitle
def link(self):
bc = BlogConfig.get()
return bc.link
def items(self):
ret = Post.objects.all()[:100]
return ret
def item_title(self, item):
return item.title
def item_description(self, item):
return item.content_html
def item_link(self, item):
bc = BlogConfig.get()
url = os.path.join(bc.link, "p", item.slug, item.language)
return url
def item_author_name(self, item):
return item.author.name
def item_author_email(self, item):
return item.author.email
def item_pubdate(self, item):
return item.created
@csrf_exempt
def set_config(request):
if request.method == 'POST':
msg = request.POST['msg']
authorname = request.POST['author']
key = request.POST['key']
author = Author.objects.filter(name=authorname)
if len(author) == 0:
return HttpResponseForbidden("Failed\r\n")
author = author[0]
if not author.can_set_config:
return HttpResponseForbidden("Failed\r\n")
msg = decode_post(msg, author.decrypt_key, key)
bc = BlogConfig.get()
if msg.has_key('title'):
bc.title = msg['title']
if msg.has_key('subtitle'):
bc.subtitle = msg['subtitle']
if msg.has_key('nr_posts_per_page'):
bc.nr_posts_per_page = int(msg['nr_posts_per_page'])
if msg.has_key('captcha_name'):
bc.captcha_name = msg['captcha_name']
if msg.has_key('captcha_secret'):
bc.captcha_secret = msg['captcha_secret']
if msg.has_key('nr_poptags'):
bc.nr_poptags = int(msg['nr_poptags'])
if msg.has_key('about'):
bc.about = msg['about']
if msg.has_key('domain_name'):
bc.domain_name = msg['domain_name']
if msg.has_key('link'):
bc.link = msg['link']
if msg.has_key('license'):
bc.license = msg['license']
bc.save()
return HttpResponse("Success\r\n")
return HttpResponseForbidden("Not implemented\r\n")
@csrf_exempt
def add_author(request):
if request.method == 'POST':
msg = request.POST['msg']
authorname = request.POST['author']
key = request.POST['key']
author = Author.objects.filter(name=authorname)
if len(author) == 0:
nr_authors = Author.objects.count()
# This is our first author. He should be able to add users
if nr_authors == 0:
msg = json.loads(msg)
msg['can_add_user'] = True
msg['can_set_config'] = True
else:
return HttpResponseForbidden("Failed\r\n")
else:
author = author[0]
if author.can_add_user:
msg = decode_post(msg, author.decrypt_key, key)
if not msg.has_key('can_set_config'):
msg['can_set_config'] = False
else:
return HttpResponseForbidden("Failed\r\n")
new_author = Author(name=msg['name'], decrypt_key=msg['decrypt_key'], \
email=msg['email'], about=msg['about'], \
can_add_user=msg['can_add_user'], \
can_set_config=msg['can_set_config'])
new_author.save()
return HttpResponse("Success\r\n")
return HttpResponseForbidden("Not implemented\r\n")
def get_post(msg, create=False):
slug = msg['slug']
language = ""
if msg.has_key('language'):
language = msg['language']
post = None
if language:
post = Post.objects.filter(slug=slug, language=language)
else:
post = Post.objects.filter(slug=slug)
if len(post) > 0:
return post
if not create:
return None
title = msg['title']
content = msg['content']
content_format = msg['content_format']
language = msg['language']
content_html = dump_html(content, content_format)
now = datetime.datetime.now()
allow_comment = True
if msg.has_key('allow_comment'):
allow_comment = bool(msg['allow_comment'])
post = Post(title=title, \
author=msg['author'], \
slug=slug, \
created=now, \
modified=now, \
content_format=content_format, \
content=content, \
content_html=content_html, \
view_count=0, \
language=language, \
uuid="", \
allow_comment=allow_comment)
post.save()
if msg.has_key("tags"):
for tag in msg['tags']:
t = get_tag(tag)
t.nr_refs += 1
t.save()
post.tags.add(t)
return [post]
def get_tag(tag, create = True):
try:
ret = Tag.objects.get(tag=tag)
except:
if not create:
return None
ret = Tag(tag=tag, nr_refs=0)
return ret
def modify_post(msg, post):
modified = False
if post.title != msg['title']:
post.title = msg['title']
modified = True
if post.content != msg['content']:
post.content = msg['content']
if post.content_format != msg['content_format']:
post.content_format = msg['content_format']
post.content_html = dump_html(post.content, post.content_format)
modified = True
if post.content_format != msg['content_format']:
post.content_format = msg['content_format']
post.content_html = dump_html(post.content, post.content_format)
modified = True
if msg.has_key("tags"):
for etag in post.tags.all():
found = False
for tag in msg['tags']:
if tag == etag.tag:
found = True
break
if not found:
post.tags.remove(etag)
etag.nr_refs -= 1
if etag.nr_refs == 0:
etag.delete()
else:
etag.save()
modified = True
for tag in msg['tags']:
found = False
for etag in post.tags.all():
if tag == etag.tag:
found = True
break
if not found:
t = get_tag(tag)
t.nr_refs += 1
t.save()
post.tags.add(t)
modified = True
if modified:
post.modified = datetime.datetime.now()
return post
def is_unique_post_spec(msg):
if msg.has_key('slug') and msg.has_key('language'):
return True
return False
def is_full_post_spec(msg):
if not is_unique_post_spec(msg):
return False
if msg.has_key('author') \
and msg.has_key('content') \
and msg.has_key('content_format'):
return True
return False
@csrf_exempt
def post_blog(request):
if request.method == 'POST':
if not request.POST.has_key('msg') or \
not request.POST.has_key('author'):
return HttpResponseForbidden("Failed\r\n")
msg = request.POST['msg']
authorname = request.POST['author']
key = request.POST['key']
author = Author.objects.filter(name=authorname)
if len(author) == 0:
return HttpResponseForbidden("Failed\r\n")
author = author[0]
msg = decode_post(msg, author.decrypt_key, key)
if msg is None:
return HttpResponseForbidden("Failed\r\n")
if not is_full_post_spec(msg):
return HttpResponseForbidden("Failed\r\n")
msg['author'] = author
post = get_post(msg, True)
if len(post) <= 0:
return HttpResponseForbidden("Failed\r\n")
post = post[0]
if len(post.uuid) != 0:
post = modify_post(msg, post)
post.save()
return HttpResponse("Success\r\n")
return HttpResponseForbidden("Not implemented\r\n")
def render_to_resp(template, kv):
bc = BlogConfig.get()
poptags = Tag.objects.all()[:bc.nr_poptags]
meta = {'config':bc, 'poptags':poptags}
meta.update(kv)
return render_to_response(template, meta)
@csrf_exempt
def post_comment(request, postid):
if request.method == 'POST':
post = Post.objects.filter(id=int(postid))
if len(post) == 0:
raise Http404
post = post[0]
form = PostCommentForm(request.POST)
if form.is_valid():
name = form.cleaned_data['name']
url = form.cleaned_data['url']
email = form.cleaned_data['email']
content= form.cleaned_data['content']
password = form.cleaned_data['password']
random = request.POST['random']
now = datetime.datetime.now()
reader = Reader.objects.filter(name=name)
captcha = BlogConfig.get_captcha()
if captcha is not None:
if not captcha.validate(random):
return HttpResponseRedirect('/id/' + postid + "/captchaerr")
if not captcha.verify(password):
return HttpResponseRedirect('/id/' + postid + "/captchaerr")
if len(reader) == 0:
reader = Reader(name=name, url=url, email=email)
reader.save()
else:
reader = reader[0]
if len(url) != 0:
if reader.url != url:
reader.url = url
if len(email) != 0:
if reader.email != email:
reader.email = email
reader.save()
comment = Comment(reader=reader, \
post=post,\
content=content, \
created=now)
comment.save()
return HttpResponseRedirect('/id/' + postid)
return HttpResponseForbidden("Not implemented\r\n")
def respond_post(post):
comments = Comment.objects.filter(post__id=post.id)
form = PostCommentForm()
captcha = BlogConfig.get_captcha()
random = captcha.random()
form.fields['random'] = django.forms.CharField(initial=random, \
widget=django.forms.widgets.HiddenInput())
nr_comments = len(comments)
return render_to_resp('post.html', \
{'post': post, 'commentform':form, 'comments':comments, \
'captcha_img': captcha.image(), \
'captcha_audio': captcha.audio_url(), \
'errormsg': '', 'nr_comments':nr_comments})
def view_post_content(request, slug, lang='enUS'):
if request.method == 'POST':
return HttpResponseForbidden("Not implemented\r\n")
msg = {}
msg['slug'] = slug
msg['language'] = lang
post = get_post(msg)
if post is None or len(post) > 1:
raise Http404
post = post[0]
return respond_post(post)
def view_post_by_id(request, postid, err = ''):
if request.method == 'POST':
return HttpResponseForbidden("Not implemented\r\n")
post = Post.objects.filter(id=postid)
if len(post) == 0:
raise Http404
post = post[0]
return respond_post(post)
def resp_posts_list(posts, page_nr = 1, url_before_pgn = "l", url_after_pgn = ""):
bc = BlogConfig.get()
post_per_page = bc.nr_posts_per_page
page_nr = page_nr - 1
if page_nr < 0:
page_nr = 0
start = page_nr * post_per_page
end = start + post_per_page
nr_posts = posts.count()
nr_pages = nr_posts/post_per_page
if nr_posts % post_per_page:
nr_pages += 1
posts = posts[start:end]
for p in posts:
n = Comment.objects.filter(post__id = p.id).count()
p.nr_comments = n
return render_to_resp('postslist.html', {'posts': posts, \
'pages':range(1, nr_pages + 1), 'url_before_pgn': url_before_pgn, \
'url_after_pgn': url_after_pgn})
def view_posts_list(request, page_nr = 1, lang = 'all'):
if request.method == 'POST':
return HttpResponseForbidden("Not implemented\r\n")
posts = []
if len(lang) != 4:
posts = Post.objects.all()
lang = 'all'
else:
posts = Post.objects.filter(language=lang)
return resp_posts_list(posts, int(page_nr), "l", lang)
def view_author(request, authorname):
author = Author.objects.filter(name=authorname)
if len(author) == 0:
raise Http404
author = author[0]
return render_to_resp('author.html', {'author':author})
def view_about(request):
return render_to_resp('about.html', {})
def view_tag(request, tid, page_nr = 1):
if request.method == 'POST':
return HttpResponseForbidden("Not implemented\r\n")
posts = Post.objects.filter(tags__id=int(tid))
if not page_nr:
page_nr = 1
return resp_posts_list(posts, int(page_nr), "tag/" + str(tid), "")
| apache-2.0 |
JeroenZegers/Nabu-MSSS | nabu/postprocessing/reconstructors/reconstructor.py | 1 | 6611 | """@file reconstructor.py
contains the Reconstructor class"""
from abc import ABCMeta, abstractmethod
import os
import scipy.io.wavfile as wav
import numpy as np
class Reconstructor(object):
"""the general reconstructor class
a reconstructor is used to reconstruct the signals from the models output"""
__metaclass__ = ABCMeta
def __init__(self, conf, evalconf, dataconf, rec_dir, task, optimal_frame_permutation=False):
"""Reconstructor constructor
Args:
conf: the reconstructor configuration as a dictionary
evalconf: the evaluator configuration as a ConfigParser
dataconf: the database configuration
rec_dir: the directory where the reconstructions will be stored
"""
self.conf = conf
self.dataconf = dataconf
if evalconf.has_option(task, 'batch_size'):
self.batch_size = int(evalconf.get(task, 'batch_size'))
else:
self.batch_size = int(evalconf.get('evaluator', 'batch_size'))
self.segment_lengths = evalconf.get('evaluator', 'segment_length').split(' ')
self.optimal_frame_permutation = optimal_frame_permutation
self.nrS = int(conf['nrs'])
if 'transpose_order' in conf:
self.transpose_order = map(int, conf['transpose_order'].split(' '))
else:
self.transpose_order = False
# create the directory to write down the reconstructions
self.rec_dir = rec_dir
if not os.path.isdir(self.rec_dir):
os.makedirs(self.rec_dir)
for spk in range(self.nrS):
if not os.path.isdir(os.path.join(self.rec_dir, 's' + str(spk+1))):
os.makedirs(os.path.join(self.rec_dir, 's' + str(spk+1)))
# the use of the position variable only works because in the evaluator the
# shuffle option in the data_queue is set to False!!
self.pos = 0
self.scp_file = os.path.join(self.rec_dir, 'pointers.scp')
# whether to save output as numpy instead of wav file
if 'save_as_numpy' in conf:
self.save_as_numpy = conf['save_as_numpy'] in ['True', 'true']
else:
self.save_as_numpy = False
# Whether the raw output should also be stored (besides the reconstructed audiosignal)
self.store_output = conf['store_output'] == 'True'
if self.store_output:
self.output_dir = os.path.join(rec_dir, 'raw_output')
if not os.path.isdir(self.output_dir):
os.makedirs(self.output_dir)
def __call__(self, batch_outputs, batch_sequence_lengths):
""" reconstruct the signals and write the audio files
Args:
- batch_outputs: A dictionary containing the batch outputs of the network
- batch_sequence_lengths: A dictionary containing the sequence length for each utterance
"""
if self.transpose_order:
for output_name in self.requested_output_names:
batch_outputs[output_name] = np.transpose(batch_outputs[output_name], self.transpose_order)
for utt_ind in range(self.batch_size):
utt_output = dict()
for output_name in self.requested_output_names:
# anchor output for anchor_deepattractornet_softmax_reconstructor is special case
if output_name is 'anchors' and self.__class__.__name__ in ['AnchorDeepattractorSoftmaxReconstructor', 'WeightedAnchorDeepattractorSoftmaxReconstructor']:
utt_output[output_name] = batch_outputs[output_name]
elif output_name is 'anchors_scale' and self.__class__.__name__ in ['TimeAnchorScalarDeepattractorSoftmaxReconstructor']:
utt_output[output_name] = batch_outputs[output_name]
else:
utt_output[output_name] = \
batch_outputs[output_name][utt_ind][:batch_sequence_lengths[output_name][utt_ind], :]
# reconstruct the signals
reconstructed_signals, utt_info = self.reconstruct_signals(utt_output)
# make the audio files for the reconstructed signals
if self.save_as_numpy:
filename = os.path.join(self.rec_dir, utt_info['utt_name'] + '.npy')
np.save(filename, reconstructed_signals)
else:
self.write_audiofile(reconstructed_signals, utt_info)
# if requested store the raw output
if self.store_output:
for output_name in self.requested_output_names:
savename = output_name+'_'+utt_info['utt_name']
np.save(os.path.join(self.output_dir, savename), utt_output[output_name])
self.pos += 1
def opt_frame_perm(self, batch_outputs, batch_targets, batch_sequence_lengths):
""" reconstruct the signals, using the optimal speaker permutations on frame level using the targets, and write
the audio files
Args:
- batch_outputs: A dictionary containing the batch outputs of the network
- batch_outputs: A dictionary containing the batch targets for the outputs
- batch_sequence_lengths: A dictionary containing the sequence length for each utterance
"""
for utt_ind in range(self.batch_size):
utt_output = dict()
for output_name in self.requested_output_names:
utt_output[output_name] = \
batch_outputs[output_name][utt_ind][:batch_sequence_lengths[output_name][utt_ind], :]
# assuming only one requested target
target_keys = [key for key in batch_targets.keys() if 'target' in key]
utt_target = {
key: batch_targets[key][utt_ind][:batch_sequence_lengths[output_name][utt_ind], :]
for key in target_keys}
# reconstruct the signals
reconstructed_signals, utt_info = self.reconstruct_signals_opt_frame_perm(utt_output, utt_target)
# make the audio files for the reconstructed signals
self.write_audiofile(reconstructed_signals, utt_info)
# if requested store the raw output
if self.store_output:
for output_name in self.requested_output_names:
savename = output_name+'_'+utt_info['utt_name']
np.save(os.path.join(self.output_dir, savename), utt_output[output_name])
self.pos += 1
@abstractmethod
def reconstruct_signals(self, output):
"""reconstruct the signals
Args:
output: the output of a single utterance of the neural network
Returns:
the reconstructed signals"""
def write_audiofile(self, reconstructed_signals, utt_info):
"""write the audiofiles for the reconstructions
Args:
reconstructed_signals: the reconstructed signals for a single mixture
utt_info: some info on the utterance
"""
write_str = utt_info['utt_name']
for spk in range(self.nrS):
rec_dir = os.path.join(self.rec_dir, 's' + str(spk+1))
filename = os.path.join(rec_dir, utt_info['utt_name']+'.wav')
signal = reconstructed_signals[spk]
if signal.dtype == np.float64:
signal = np.float32(signal)
wav.write(filename, utt_info['rate'], signal)
write_str += ' ' + filename
write_str += ' \n'
self.scp_fid.write(write_str)
def open_scp_files(self, from_start=True):
if from_start:
file_mode = 'w'
else:
file_mode = 'a+'
self.scp_fid = open(self.scp_file, file_mode)
| mit |
doismellburning/edx-platform | openedx/core/djangoapps/user_api/course_tag/api.py | 174 | 1940 | """
A service-like user_info interface. Could be made into an http API later, but for now
just in-process. Exposes global and per-course key-value pairs for users.
Implementation note:
Stores global metadata using the UserPreference model, and per-course metadata using the
UserCourseTag model.
"""
from ..models import UserCourseTag
# Scopes
# (currently only allows per-course tags. Can be expanded to support
# global tags (e.g. using the existing UserPreferences table))
COURSE_SCOPE = 'course'
def get_course_tag(user, course_id, key):
"""
Gets the value of the user's course tag for the specified key in the specified
course_id.
Args:
user: the User object for the course tag
course_id: course identifier (string)
key: arbitrary (<=255 char string)
Returns:
string value, or None if there is no value saved
"""
try:
record = UserCourseTag.objects.get(
user=user,
course_id=course_id,
key=key)
return record.value
except UserCourseTag.DoesNotExist:
return None
def set_course_tag(user, course_id, key, value):
"""
Sets the value of the user's course tag for the specified key in the specified
course_id. Overwrites any previous value.
The intention is that the values are fairly short, as they will be included in all
analytics events about this user.
Args:
user: the User object
course_id: course identifier (string)
key: arbitrary (<=255 char string)
value: arbitrary string
"""
# pylint: disable=fixme
# TODO: There is a risk of IntegrityErrors being thrown here given
# simultaneous calls from many processes. Handle by retrying after
# a short delay?
record, _ = UserCourseTag.objects.get_or_create(
user=user,
course_id=course_id,
key=key)
record.value = value
record.save()
| agpl-3.0 |
opensourcechipspark/platform_external_chromium_org | webkit/tools/layout_tests/PRESUBMIT.py | 87 | 2098 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""test_expectations.txt presubmit script.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
TEST_EXPECTATIONS_FILENAMES = ['test_expectations.txt', 'TestExpectations']
def LintTestFiles(input_api, output_api):
current_dir = str(input_api.PresubmitLocalPath())
# Set 'webkit/tools/layout_tests' in include path.
python_paths = [
current_dir,
input_api.os_path.join(current_dir, '..', '..', '..', 'tools', 'python')
]
env = input_api.environ.copy()
if env.get('PYTHONPATH'):
python_paths.append(env['PYTHONPATH'])
env['PYTHONPATH'] = input_api.os_path.pathsep.join(python_paths)
args = [
input_api.python_executable,
input_api.os_path.join(current_dir, 'run_webkit_tests.py'),
'--lint-test-files'
]
subproc = input_api.subprocess.Popen(
args,
cwd=current_dir,
env=env,
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
stderr=input_api.subprocess.STDOUT)
stdout_data = subproc.communicate()[0]
# TODO(ukai): consolidate run_webkit_tests --lint-test-files reports.
is_error = lambda line: (input_api.re.match('^Line:', line) or
input_api.re.search('ERROR Line:', line))
error = filter(is_error, stdout_data.splitlines())
if error:
return [output_api.PresubmitError('Lint error\n%s' % '\n'.join(error),
long_text=stdout_data)]
return []
def LintTestExpectations(input_api, output_api):
for path in input_api.LocalPaths():
if input_api.os_path.basename(path) in TEST_EXPECTATIONS_FILENAMES:
return LintTestFiles(input_api, output_api)
return []
def CheckChangeOnUpload(input_api, output_api):
return LintTestExpectations(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return LintTestExpectations(input_api, output_api)
| bsd-3-clause |
jolene-esposito/osf.io | scripts/migrate_github_oauth_settings.py | 55 | 7419 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to migrate addongithubusersettings and create and attach addongithuboauthsettings.
Log:
Executed on production by SL on 2014-10-05 at 23:11 EST. 269 AddonGithubUserSettings records
were successfully migrated. 3 records with invalidated credentials were skipped.
Script was modified by @chennan47 to handle records with invalidated credentials by unsetting
the oauth_access_token, oauth_token_type, and github_user fields. Run on production by @sloria
on 2014-10-07 at 12:34 EST. 3 records with invalidated credentials were migrated.
"""
import sys
import mock
from nose.tools import *
import github3
from framework.mongo import database
from website.app import init_app
from tests.base import OsfTestCase
from website.addons.github.api import GitHub
from website.addons.github.model import AddonGitHubOauthSettings, AddonGitHubUserSettings
def do_migration(records, dry=True):
count, inval_cred_handled = 0, 0
for raw_user_settings in records:
# False if missing, None if field exists
access_token = raw_user_settings.get('oauth_access_token', False)
token_type = raw_user_settings.get('oauth_token_type', False)
github_user_name = raw_user_settings.get('github_user', False)
if access_token and token_type and github_user_name:
if not dry:
gh = GitHub(access_token, token_type)
try:
github_user = gh.user()
except github3.models.GitHubError:
AddonGitHubUserSettings._storage[0].store.update(
{'_id': raw_user_settings['_id']},
{
'$unset': {
"oauth_access_token" : True,
"oauth_token_type" : True,
"github_user" : True,
},
}
)
inval_cred_handled += 1
print('invalidated credentials handled record: {}'.format(raw_user_settings['_id']))
continue
oauth_settings = AddonGitHubOauthSettings()
oauth_settings.github_user_id = str(github_user.id)
oauth_settings.save()
oauth_settings.oauth_access_token = access_token
oauth_settings.oauth_token_type = token_type
oauth_settings.github_user_name = github_user_name
oauth_settings.save()
AddonGitHubUserSettings._storage[0].store.update(
{'_id': raw_user_settings['_id']},
{
'$unset': {
'oauth_access_token': True,
'oauth_token_type': True,
'github_user': True,
},
'$set': {
'oauth_settings': oauth_settings.github_user_id,
}
}
)
AddonGitHubOauthSettings._storage[0].store.update(
{'github_user_id': oauth_settings.github_user_id},
{
'$push': {
'__backrefs.accessed.addongithubusersettings.oauth_settings': raw_user_settings['_id'],
}
}
)
print('Finished migrating AddonGithubUserSettings record: {}'.format(raw_user_settings['_id']))
count += 1
# Old fields have not yet been unset
elif None in set([access_token, token_type, github_user_name]):
if not dry:
AddonGitHubUserSettings._storage[0].store.update(
{'_id': raw_user_settings['_id']},
{
'$unset': {
'oauth_access_token': True,
'oauth_token_type': True,
'github_user': True,
},
}
)
print('Unset oauth_access_token and oauth_token_type: {0}'.format(raw_user_settings['_id']))
count += 1
return count, inval_cred_handled
def get_user_settings():
# ... return the StoredObjects to migrate ...
return database.addongithubusersettings.find()
def main():
init_app('website.settings', set_backends=True, routes=True) # Sets the storage backends on all models
user_settings = get_user_settings()
n_migrated, n_inval_cred_handled = do_migration(user_settings, dry='dry' in sys.argv)
print("Total migrated records: {}".format(n_migrated))
print("Total invalidated credentials handled records: {}".format(n_inval_cred_handled))
class TestMigrateGitHubOauthSettings(OsfTestCase):
def setUp(self):
super(TestMigrateGitHubOauthSettings, self).setUp()
self.mongo_collection = database.addongithubusersettings
self.user_settings = {
"__backrefs" : {
"authorized" : {
"addongithubnodesettings" : {
"user_settings" : [
"678910",
]
}
}
},
"_id" : "123456",
"_version" : 1,
"deletedAddonGitHubUserSettings" : False,
"github_user" : "testing user",
"oauth_access_token" : "testing acess token",
"oauth_state" : "no state",
"oauth_token_type" : "testing token type",
"owner" : "abcde"
}
self.mongo_collection.insert(self.user_settings)
def test_get_user_settings(self):
records = list(get_user_settings())
assert_equal(1, len(records))
assert_equal(
records[0]['github_user'],
self.user_settings['github_user']
)
assert_equal(
records[0]['oauth_state'],
self.user_settings['oauth_state']
)
assert_equal(
records[0]['oauth_access_token'],
self.user_settings['oauth_access_token']
)
assert_equal(
records[0]['oauth_token_type'],
self.user_settings['oauth_token_type']
)
@mock.patch('website.addons.github.api.GitHub.user')
def test_do_migration(self, mock_github_user):
user = mock.Mock()
user.id = "testing user id"
mock_github_user.return_value = user
do_migration(get_user_settings())
user_settings = AddonGitHubUserSettings.find()[0]
assert_true(user_settings.oauth_settings)
assert_true(user_settings.oauth_state)
assert_equal(
user_settings.oauth_settings.github_user_name,
"testing user"
)
assert_equal(
user_settings.oauth_settings.oauth_access_token,
"testing acess token"
)
assert_equal(
user_settings.oauth_settings.oauth_token_type,
"testing token type"
)
assert_equal(
user_settings.oauth_settings.github_user_id,
"testing user id"
)
def tearDown(self):
self.mongo_collection.remove()
if __name__ == '__main__':
main()
| apache-2.0 |
tensorflow/models | research/object_detection/anchor_generators/grid_anchor_generator_test.py | 2 | 4519 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.grid_anchor_generator."""
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.anchor_generators import grid_anchor_generator
from object_detection.utils import test_case
class GridAnchorGeneratorTest(test_case.TestCase):
def test_construct_single_anchor(self):
"""Builds a 1x1 anchor grid to test the size of the output boxes."""
def graph_fn():
scales = [0.5, 1.0, 2.0]
aspect_ratios = [0.25, 1.0, 4.0]
anchor_offset = [7, -3]
anchor_generator = grid_anchor_generator.GridAnchorGenerator(
scales, aspect_ratios, anchor_offset=anchor_offset)
anchors_list = anchor_generator.generate(feature_map_shape_list=[(1, 1)])
anchor_corners = anchors_list[0].get()
return (anchor_corners,)
exp_anchor_corners = [[-121, -35, 135, 29], [-249, -67, 263, 61],
[-505, -131, 519, 125], [-57, -67, 71, 61],
[-121, -131, 135, 125], [-249, -259, 263, 253],
[-25, -131, 39, 125], [-57, -259, 71, 253],
[-121, -515, 135, 509]]
anchor_corners_out = self.execute(graph_fn, [])
self.assertAllClose(anchor_corners_out, exp_anchor_corners)
def test_construct_anchor_grid(self):
def graph_fn():
base_anchor_size = [10, 10]
anchor_stride = [19, 19]
anchor_offset = [0, 0]
scales = [0.5, 1.0, 2.0]
aspect_ratios = [1.0]
anchor_generator = grid_anchor_generator.GridAnchorGenerator(
scales,
aspect_ratios,
base_anchor_size=base_anchor_size,
anchor_stride=anchor_stride,
anchor_offset=anchor_offset)
anchors_list = anchor_generator.generate(feature_map_shape_list=[(2, 2)])
anchor_corners = anchors_list[0].get()
return (anchor_corners,)
exp_anchor_corners = [[-2.5, -2.5, 2.5, 2.5], [-5., -5., 5., 5.],
[-10., -10., 10., 10.], [-2.5, 16.5, 2.5, 21.5],
[-5., 14., 5, 24], [-10., 9., 10, 29],
[16.5, -2.5, 21.5, 2.5], [14., -5., 24, 5],
[9., -10., 29, 10], [16.5, 16.5, 21.5, 21.5],
[14., 14., 24, 24], [9., 9., 29, 29]]
anchor_corners_out = self.execute(graph_fn, [])
self.assertAllClose(anchor_corners_out, exp_anchor_corners)
def test_construct_anchor_grid_with_dynamic_feature_map_shapes(self):
def graph_fn(feature_map_height, feature_map_width):
base_anchor_size = [10, 10]
anchor_stride = [19, 19]
anchor_offset = [0, 0]
scales = [0.5, 1.0, 2.0]
aspect_ratios = [1.0]
anchor_generator = grid_anchor_generator.GridAnchorGenerator(
scales,
aspect_ratios,
base_anchor_size=base_anchor_size,
anchor_stride=anchor_stride,
anchor_offset=anchor_offset)
anchors_list = anchor_generator.generate(
feature_map_shape_list=[(feature_map_height, feature_map_width)])
anchor_corners = anchors_list[0].get()
return (anchor_corners,)
exp_anchor_corners = [[-2.5, -2.5, 2.5, 2.5], [-5., -5., 5., 5.],
[-10., -10., 10., 10.], [-2.5, 16.5, 2.5, 21.5],
[-5., 14., 5, 24], [-10., 9., 10, 29],
[16.5, -2.5, 21.5, 2.5], [14., -5., 24, 5],
[9., -10., 29, 10], [16.5, 16.5, 21.5, 21.5],
[14., 14., 24, 24], [9., 9., 29, 29]]
anchor_corners_out = self.execute_cpu(graph_fn,
[np.array(2, dtype=np.int32),
np.array(2, dtype=np.int32)])
self.assertAllClose(anchor_corners_out, exp_anchor_corners)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
hainm/binder | binder/log.py | 1 | 6573 | import logging
import Queue
import time
from threading import Thread, current_thread, Lock
import zmq
from binder.binderd.client import BinderClient
from binder.settings import LogSettings
class LoggerClient(Thread):
_singleton = None
@staticmethod
def getInstance():
if not LoggerClient._singleton:
client = LoggerClient()
client.start()
LoggerClient._singleton = client
return LoggerClient._singleton
def __init__(self):
super(LoggerClient, self).__init__()
self.parent = current_thread()
self._stopped = False
self._queue = Queue.Queue()
self._client = BinderClient("log_writer")
def stop(self):
self._client.close()
self._stopped= True
def _send_message(self):
msg = self._queue.get()
self._client.send(msg)
def run(self):
while not self._stopped and self.parent.is_alive():
self._send_message()
# keep logging until the queue is empty, even after the parent has died
while not self._queue.empty():
self._send_message()
def _send(self, msg):
self._queue.put(msg)
def debug(self, tag, msg, app=None):
self._send({'type': 'log', 'level': logging.DEBUG, 'msg': msg, 'tag': tag, 'app': app})
def info(self, tag, msg, app=None):
self._send({'type': 'log', 'level': logging.INFO, 'msg': msg, 'tag': tag, 'app': app})
def warn(self, tag, msg, app=None):
self._send({'type': 'log', 'level': logging.WARNING, 'msg': msg, 'tag': tag, 'app': app})
def error(self, tag, msg, app=None):
self._send({'type': 'log', 'level': logging.ERROR, 'msg': msg, 'tag': tag, 'app': app})
def debug_log(tag, msg, app=None):
log = LoggerClient.getInstance()
log.debug(tag, msg, app)
def info_log(tag, msg, app=None):
log = LoggerClient.getInstance()
log.info(tag, msg, app)
def warning_log(tag, msg, app=None):
log = LoggerClient.getInstance()
log.warning(tag, msg, app)
def error_log(tag, msg, app=None):
log = LoggerClient.getInstance()
log.error(tag, msg, app)
def write_stream(tag, level_string, stream, app=None):
def _process_stream(app, stream):
log = LoggerClient.getInstance()
if level_string not in LoggerClient.__dict__:
log.error("LoggerClient", "write_stream failing with unexpected level_string: {}".format(level_string))
return
method = log.__getattribute__(level_string)
for line in iter(stream.readline, ''):
method(tag, line, app=app)
t = Thread(target=_process_stream, args=(app, stream))
t.start()
class PubSubStreamer(Thread):
class SubStreamReader(Thread):
def __init__(self, buf):
super(PubSubStreamer.SubStreamReader, self).__init__()
self._stopped = False
self._buf = buf
def stop(self):
self._stopped = True
def run(self):
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.setsockopt(zmq.SUBSCRIBE, b'')
socket.connect("{}:{}".format(LogSettings.PUBSUB_HOST, LogSettings.PUBSUB_PORT))
while not self._stopped:
try:
topic, msg = socket.recv_multipart(zmq.NOBLOCK)
# buffer the message
self._buf.put((topic, msg))
except zmq.ZMQError:
continue
_singleton = None
def __init__(self):
super(PubSubStreamer, self).__init__()
self._stopped = False
self._queue = Queue.Queue()
self._sub_reader = PubSubStreamer.SubStreamReader(self._queue)
self.callbacks = {}
@staticmethod
def get_instance():
if not PubSubStreamer._singleton:
PubSubStreamer._singleton = PubSubStreamer()
PubSubStreamer._singleton.start()
return PubSubStreamer._singleton
def add_app_callback(self, app, cb):
if app in self.callbacks:
self.callbacks[app].append(cb)
else:
self.callbacks[app] = [cb]
def stop(self):
self._stopped = True
self._sub_reader.stop()
def remove_app_callback(self, app, cb):
if app in self.callbacks:
try:
self.callbacks[app].remove(cb)
except ValueError:
pass
def run(self):
self._sub_reader.start()
while not self._stopped:
app, msg = self._queue.get()
if app in self.callbacks:
for cb in self.callbacks[app]:
cb(msg)
class AppLogStreamer(Thread):
def __init__(self, app, start_time, callback):
super(AppLogStreamer, self).__init__()
self.daemon = True
self._stopped = False
self._app = app
self._start_time = start_time
self._cb = callback
self._pubsub_cb = None
PubSubStreamer.get_instance()
def stop(self):
self._stopped = True
if self._pubsub_cb:
PubSubStreamer.get_instance().remove_app_callback(self._app, self._pubsub_cb)
def run(self):
buf = Queue.Queue()
def buffered_cb(msg):
buf.put(msg)
self._pubsub_cb = buffered_cb
PubSubStreamer.get_instance().add_app_callback(self._app, self._pubsub_cb)
lines = []
bc = BinderClient("log_reader")
rsp = bc.send({"type": "get", "app": self._app, "since": self._start_time})
if rsp["type"] == "success":
lines = rsp["msg"].split("\n")
else:
error_log("LoggerClient", "read_stream failure for app {}: {}".format(self._app, rsp))
return
bc.close()
# exhaust all lines from the get request
last_time = None
for line in lines:
last_time = LogSettings.EXTRACT_TIME(line)
self._cb(line)
if last_time:
last_time = time.strptime(last_time, LogSettings.TIME_FORMAT)
# now start reading the subscriber output (starting strictly after last_time)
while not self._stopped:
try:
timeout = 0.05
line = buf.get(timeout=timeout)
line_time = time.strptime(LogSettings.EXTRACT_TIME(line), LogSettings.TIME_FORMAT)
if not last_time or line_time > last_time:
self._cb(line)
except Queue.Empty:
continue
| apache-2.0 |
dudonwai/dudonsblog | Lib/encodings/cp500.py | 593 | 13377 | """ Python Character Mapping Codec cp500 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP500.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp500',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x9c' # 0x04 -> CONTROL
u'\t' # 0x05 -> HORIZONTAL TABULATION
u'\x86' # 0x06 -> CONTROL
u'\x7f' # 0x07 -> DELETE
u'\x97' # 0x08 -> CONTROL
u'\x8d' # 0x09 -> CONTROL
u'\x8e' # 0x0A -> CONTROL
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x9d' # 0x14 -> CONTROL
u'\x85' # 0x15 -> CONTROL
u'\x08' # 0x16 -> BACKSPACE
u'\x87' # 0x17 -> CONTROL
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x92' # 0x1A -> CONTROL
u'\x8f' # 0x1B -> CONTROL
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u'\x80' # 0x20 -> CONTROL
u'\x81' # 0x21 -> CONTROL
u'\x82' # 0x22 -> CONTROL
u'\x83' # 0x23 -> CONTROL
u'\x84' # 0x24 -> CONTROL
u'\n' # 0x25 -> LINE FEED
u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
u'\x1b' # 0x27 -> ESCAPE
u'\x88' # 0x28 -> CONTROL
u'\x89' # 0x29 -> CONTROL
u'\x8a' # 0x2A -> CONTROL
u'\x8b' # 0x2B -> CONTROL
u'\x8c' # 0x2C -> CONTROL
u'\x05' # 0x2D -> ENQUIRY
u'\x06' # 0x2E -> ACKNOWLEDGE
u'\x07' # 0x2F -> BELL
u'\x90' # 0x30 -> CONTROL
u'\x91' # 0x31 -> CONTROL
u'\x16' # 0x32 -> SYNCHRONOUS IDLE
u'\x93' # 0x33 -> CONTROL
u'\x94' # 0x34 -> CONTROL
u'\x95' # 0x35 -> CONTROL
u'\x96' # 0x36 -> CONTROL
u'\x04' # 0x37 -> END OF TRANSMISSION
u'\x98' # 0x38 -> CONTROL
u'\x99' # 0x39 -> CONTROL
u'\x9a' # 0x3A -> CONTROL
u'\x9b' # 0x3B -> CONTROL
u'\x14' # 0x3C -> DEVICE CONTROL FOUR
u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
u'\x9e' # 0x3E -> CONTROL
u'\x1a' # 0x3F -> SUBSTITUTE
u' ' # 0x40 -> SPACE
u'\xa0' # 0x41 -> NO-BREAK SPACE
u'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
u'[' # 0x4A -> LEFT SQUARE BRACKET
u'.' # 0x4B -> FULL STOP
u'<' # 0x4C -> LESS-THAN SIGN
u'(' # 0x4D -> LEFT PARENTHESIS
u'+' # 0x4E -> PLUS SIGN
u'!' # 0x4F -> EXCLAMATION MARK
u'&' # 0x50 -> AMPERSAND
u'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
u'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
u'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
u']' # 0x5A -> RIGHT SQUARE BRACKET
u'$' # 0x5B -> DOLLAR SIGN
u'*' # 0x5C -> ASTERISK
u')' # 0x5D -> RIGHT PARENTHESIS
u';' # 0x5E -> SEMICOLON
u'^' # 0x5F -> CIRCUMFLEX ACCENT
u'-' # 0x60 -> HYPHEN-MINUS
u'/' # 0x61 -> SOLIDUS
u'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xa6' # 0x6A -> BROKEN BAR
u',' # 0x6B -> COMMA
u'%' # 0x6C -> PERCENT SIGN
u'_' # 0x6D -> LOW LINE
u'>' # 0x6E -> GREATER-THAN SIGN
u'?' # 0x6F -> QUESTION MARK
u'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
u'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
u'`' # 0x79 -> GRAVE ACCENT
u':' # 0x7A -> COLON
u'#' # 0x7B -> NUMBER SIGN
u'@' # 0x7C -> COMMERCIAL AT
u"'" # 0x7D -> APOSTROPHE
u'=' # 0x7E -> EQUALS SIGN
u'"' # 0x7F -> QUOTATION MARK
u'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
u'a' # 0x81 -> LATIN SMALL LETTER A
u'b' # 0x82 -> LATIN SMALL LETTER B
u'c' # 0x83 -> LATIN SMALL LETTER C
u'd' # 0x84 -> LATIN SMALL LETTER D
u'e' # 0x85 -> LATIN SMALL LETTER E
u'f' # 0x86 -> LATIN SMALL LETTER F
u'g' # 0x87 -> LATIN SMALL LETTER G
u'h' # 0x88 -> LATIN SMALL LETTER H
u'i' # 0x89 -> LATIN SMALL LETTER I
u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
u'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
u'\xb1' # 0x8F -> PLUS-MINUS SIGN
u'\xb0' # 0x90 -> DEGREE SIGN
u'j' # 0x91 -> LATIN SMALL LETTER J
u'k' # 0x92 -> LATIN SMALL LETTER K
u'l' # 0x93 -> LATIN SMALL LETTER L
u'm' # 0x94 -> LATIN SMALL LETTER M
u'n' # 0x95 -> LATIN SMALL LETTER N
u'o' # 0x96 -> LATIN SMALL LETTER O
u'p' # 0x97 -> LATIN SMALL LETTER P
u'q' # 0x98 -> LATIN SMALL LETTER Q
u'r' # 0x99 -> LATIN SMALL LETTER R
u'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
u'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
u'\xb8' # 0x9D -> CEDILLA
u'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
u'\xa4' # 0x9F -> CURRENCY SIGN
u'\xb5' # 0xA0 -> MICRO SIGN
u'~' # 0xA1 -> TILDE
u's' # 0xA2 -> LATIN SMALL LETTER S
u't' # 0xA3 -> LATIN SMALL LETTER T
u'u' # 0xA4 -> LATIN SMALL LETTER U
u'v' # 0xA5 -> LATIN SMALL LETTER V
u'w' # 0xA6 -> LATIN SMALL LETTER W
u'x' # 0xA7 -> LATIN SMALL LETTER X
u'y' # 0xA8 -> LATIN SMALL LETTER Y
u'z' # 0xA9 -> LATIN SMALL LETTER Z
u'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
u'\xbf' # 0xAB -> INVERTED QUESTION MARK
u'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
u'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
u'\xae' # 0xAF -> REGISTERED SIGN
u'\xa2' # 0xB0 -> CENT SIGN
u'\xa3' # 0xB1 -> POUND SIGN
u'\xa5' # 0xB2 -> YEN SIGN
u'\xb7' # 0xB3 -> MIDDLE DOT
u'\xa9' # 0xB4 -> COPYRIGHT SIGN
u'\xa7' # 0xB5 -> SECTION SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
u'\xac' # 0xBA -> NOT SIGN
u'|' # 0xBB -> VERTICAL LINE
u'\xaf' # 0xBC -> MACRON
u'\xa8' # 0xBD -> DIAERESIS
u'\xb4' # 0xBE -> ACUTE ACCENT
u'\xd7' # 0xBF -> MULTIPLICATION SIGN
u'{' # 0xC0 -> LEFT CURLY BRACKET
u'A' # 0xC1 -> LATIN CAPITAL LETTER A
u'B' # 0xC2 -> LATIN CAPITAL LETTER B
u'C' # 0xC3 -> LATIN CAPITAL LETTER C
u'D' # 0xC4 -> LATIN CAPITAL LETTER D
u'E' # 0xC5 -> LATIN CAPITAL LETTER E
u'F' # 0xC6 -> LATIN CAPITAL LETTER F
u'G' # 0xC7 -> LATIN CAPITAL LETTER G
u'H' # 0xC8 -> LATIN CAPITAL LETTER H
u'I' # 0xC9 -> LATIN CAPITAL LETTER I
u'\xad' # 0xCA -> SOFT HYPHEN
u'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
u'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
u'}' # 0xD0 -> RIGHT CURLY BRACKET
u'J' # 0xD1 -> LATIN CAPITAL LETTER J
u'K' # 0xD2 -> LATIN CAPITAL LETTER K
u'L' # 0xD3 -> LATIN CAPITAL LETTER L
u'M' # 0xD4 -> LATIN CAPITAL LETTER M
u'N' # 0xD5 -> LATIN CAPITAL LETTER N
u'O' # 0xD6 -> LATIN CAPITAL LETTER O
u'P' # 0xD7 -> LATIN CAPITAL LETTER P
u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
u'R' # 0xD9 -> LATIN CAPITAL LETTER R
u'\xb9' # 0xDA -> SUPERSCRIPT ONE
u'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
u'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\\' # 0xE0 -> REVERSE SOLIDUS
u'\xf7' # 0xE1 -> DIVISION SIGN
u'S' # 0xE2 -> LATIN CAPITAL LETTER S
u'T' # 0xE3 -> LATIN CAPITAL LETTER T
u'U' # 0xE4 -> LATIN CAPITAL LETTER U
u'V' # 0xE5 -> LATIN CAPITAL LETTER V
u'W' # 0xE6 -> LATIN CAPITAL LETTER W
u'X' # 0xE7 -> LATIN CAPITAL LETTER X
u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
u'\xb2' # 0xEA -> SUPERSCRIPT TWO
u'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
u'0' # 0xF0 -> DIGIT ZERO
u'1' # 0xF1 -> DIGIT ONE
u'2' # 0xF2 -> DIGIT TWO
u'3' # 0xF3 -> DIGIT THREE
u'4' # 0xF4 -> DIGIT FOUR
u'5' # 0xF5 -> DIGIT FIVE
u'6' # 0xF6 -> DIGIT SIX
u'7' # 0xF7 -> DIGIT SEVEN
u'8' # 0xF8 -> DIGIT EIGHT
u'9' # 0xF9 -> DIGIT NINE
u'\xb3' # 0xFA -> SUPERSCRIPT THREE
u'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
u'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
ckuethe/gnuradio | gr-analog/python/analog/wfm_rcv_pll.py | 58 | 9802 | #
# Copyright 2005,2006,2012-2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
from gnuradio import blocks
from gnuradio import filter
from fm_emph import fm_deemph
import math
try:
from gnuradio import analog
except ImportError:
import analog_swig as analog
class wfm_rcv_pll(gr.hier_block2):
def __init__(self, demod_rate, audio_decimation):
"""
Hierarchical block for demodulating a broadcast FM signal.
The input is the downconverted complex baseband signal (gr_complex).
The output is two streams of the demodulated audio (float) 0=Left, 1=Right.
Args:
demod_rate: input sample rate of complex baseband input. (float)
audio_decimation: how much to decimate demod_rate to get to audio. (integer)
"""
gr.hier_block2.__init__(self, "wfm_rcv_pll",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(2, 2, gr.sizeof_float)) # Output signature
bandwidth = 250e3
audio_rate = demod_rate / audio_decimation
# We assign to self so that outsiders can grab the demodulator
# if they need to. E.g., to plot its output.
#
# input: complex; output: float
loop_bw = 2*math.pi/100.0
max_freq = 2.0*math.pi*90e3/demod_rate
self.fm_demod = analog.pll_freqdet_cf(loop_bw, max_freq,-max_freq)
# input: float; output: float
self.deemph_Left = fm_deemph(audio_rate)
self.deemph_Right = fm_deemph(audio_rate)
# compute FIR filter taps for audio filter
width_of_transition_band = audio_rate / 32
audio_coeffs = filter.firdes.low_pass(1.0 , # gain
demod_rate, # sampling rate
15000 ,
width_of_transition_band,
filter.firdes.WIN_HAMMING)
# input: float; output: float
self.audio_filter = filter.fir_filter_fff(audio_decimation, audio_coeffs)
if 1:
# Pick off the stereo carrier/2 with this filter. It attenuated 10 dB so apply 10 dB gain
# We pick off the negative frequency half because we want to base band by it!
## NOTE THIS WAS HACKED TO OFFSET INSERTION LOSS DUE TO DEEMPHASIS
stereo_carrier_filter_coeffs = \
filter.firdes.complex_band_pass(10.0,
demod_rate,
-19020,
-18980,
width_of_transition_band,
filter.firdes.WIN_HAMMING)
#print "len stereo carrier filter = ",len(stereo_carrier_filter_coeffs)
#print "stereo carrier filter ", stereo_carrier_filter_coeffs
#print "width of transition band = ",width_of_transition_band, " audio rate = ", audio_rate
# Pick off the double side band suppressed carrier Left-Right audio. It is attenuated 10 dB so apply 10 dB gain
stereo_dsbsc_filter_coeffs = \
filter.firdes.complex_band_pass(20.0,
demod_rate,
38000-15000/2,
38000+15000/2,
width_of_transition_band,
filter.firdes.WIN_HAMMING)
#print "len stereo dsbsc filter = ",len(stereo_dsbsc_filter_coeffs)
#print "stereo dsbsc filter ", stereo_dsbsc_filter_coeffs
# construct overlap add filter system from coefficients for stereo carrier
self.stereo_carrier_filter = \
filter.fir_filter_fcc(audio_decimation, stereo_carrier_filter_coeffs)
# carrier is twice the picked off carrier so arrange to do a commplex multiply
self.stereo_carrier_generator = blocks.multiply_cc();
# Pick off the rds signal
stereo_rds_filter_coeffs = \
filter.firdes.complex_band_pass(30.0,
demod_rate,
57000 - 1500,
57000 + 1500,
width_of_transition_band,
filter.firdes.WIN_HAMMING)
#print "len stereo dsbsc filter = ",len(stereo_dsbsc_filter_coeffs)
#print "stereo dsbsc filter ", stereo_dsbsc_filter_coeffs
# construct overlap add filter system from coefficients for stereo carrier
self.rds_signal_filter = \
filter.fir_filter_fcc(audio_decimation, stereo_rds_filter_coeffs)
self.rds_carrier_generator = blocks.multiply_cc();
self.rds_signal_generator = blocks.multiply_cc();
self_rds_signal_processor = blocks.null_sink(gr.sizeof_gr_complex);
loop_bw = 2*math.pi/100.0
max_freq = -2.0*math.pi*18990/audio_rate;
min_freq = -2.0*math.pi*19010/audio_rate;
self.stereo_carrier_pll_recovery = \
analog.pll_refout_cc(loop_bw, max_freq, min_freq);
#self.stereo_carrier_pll_recovery.squelch_enable(False) #pll_refout does not have squelch yet, so disabled for now
# set up mixer (multiplier) to get the L-R signal at baseband
self.stereo_basebander = blocks.multiply_cc();
# pick off the real component of the basebanded L-R signal. The imaginary SHOULD be zero
self.LmR_real = blocks.complex_to_real();
self.Make_Left = blocks.add_ff();
self.Make_Right = blocks.sub_ff();
self.stereo_dsbsc_filter = \
filter.fir_filter_fcc(audio_decimation, stereo_dsbsc_filter_coeffs)
if 1:
# send the real signal to complex filter to pick off the carrier and then to one side of a multiplier
self.connect(self, self.fm_demod, self.stereo_carrier_filter,
self.stereo_carrier_pll_recovery, (self.stereo_carrier_generator,0))
# send the already filtered carrier to the otherside of the carrier
self.connect(self.stereo_carrier_pll_recovery, (self.stereo_carrier_generator,1))
# the resulting signal from this multiplier is the carrier with correct phase but at -38000 Hz.
# send the new carrier to one side of the mixer (multiplier)
self.connect(self.stereo_carrier_generator, (self.stereo_basebander,0))
# send the demphasized audio to the DSBSC pick off filter, the complex
# DSBSC signal at +38000 Hz is sent to the other side of the mixer/multiplier
self.connect(self.fm_demod,self.stereo_dsbsc_filter, (self.stereo_basebander,1))
# the result is BASEBANDED DSBSC with phase zero!
# Pick off the real part since the imaginary is theoretically zero and then to one side of a summer
self.connect(self.stereo_basebander, self.LmR_real, (self.Make_Left,0))
#take the same real part of the DSBSC baseband signal and send it to negative side of a subtracter
self.connect(self.LmR_real,(self.Make_Right,1))
# Make rds carrier by taking the squared pilot tone and multiplying by pilot tone
self.connect(self.stereo_basebander,(self.rds_carrier_generator,0))
self.connect(self.stereo_carrier_pll_recovery,(self.rds_carrier_generator,1))
# take signal, filter off rds, send into mixer 0 channel
self.connect(self.fm_demod,self.rds_signal_filter,(self.rds_signal_generator,0))
# take rds_carrier_generator output and send into mixer 1 channel
self.connect(self.rds_carrier_generator,(self.rds_signal_generator,1))
# send basebanded rds signal and send into "processor" which for now is a null sink
self.connect(self.rds_signal_generator,self_rds_signal_processor)
if 1:
# pick off the audio, L+R that is what we used to have and send it to the summer
self.connect(self.fm_demod, self.audio_filter, (self.Make_Left, 1))
# take the picked off L+R audio and send it to the PLUS side of the subtractor
self.connect(self.audio_filter,(self.Make_Right, 0))
# The result of Make_Left gets (L+R) + (L-R) and results in 2*L
# The result of Make_Right gets (L+R) - (L-R) and results in 2*R
self.connect(self.Make_Left , self.deemph_Left, (self, 0))
self.connect(self.Make_Right, self.deemph_Right, (self, 1))
# NOTE: mono support will require variable number of outputs in hier_block2s
# See ticket:174 in Trac database
#else:
# self.connect (self.fm_demod, self.audio_filter, self)
| gpl-3.0 |
alexschiller/osf.io | api_tests/nodes/views/test_node_relationship_institutions.py | 7 | 18319 | from nose.tools import * # flake8: noqa
from tests.base import ApiTestCase
from osf_tests.factories import InstitutionFactory, AuthUserFactory, NodeFactory
from api.base.settings.defaults import API_BASE
from website.util import permissions
class TestNodeRelationshipInstitutions(ApiTestCase):
def setUp(self):
super(TestNodeRelationshipInstitutions, self).setUp()
self.institution2 = InstitutionFactory()
self.institution1 = InstitutionFactory()
self.user = AuthUserFactory()
self.user.affiliated_institutions.add(self.institution1)
self.user.affiliated_institutions.add(self.institution2)
self.user.save()
self.read_write_contributor = AuthUserFactory()
self.read_write_contributor_institution = InstitutionFactory()
self.read_write_contributor.affiliated_institutions.add(self.read_write_contributor_institution)
self.read_write_contributor.save()
self.read_only_contributor = AuthUserFactory()
self.read_only_contributor_institution = InstitutionFactory()
self.read_only_contributor.affiliated_institutions.add(self.read_only_contributor_institution)
self.read_only_contributor.save()
self.node = NodeFactory(creator=self.user)
self.node.add_contributor(self.read_write_contributor, permissions=[permissions.WRITE])
self.node.add_contributor(self.read_only_contributor, permissions=[permissions.READ])
self.node.save()
self.node_institutions_url = '/{0}nodes/{1}/relationships/institutions/'.format(API_BASE, self.node._id)
def create_payload(self, *institution_ids):
data = []
for id_ in institution_ids:
data.append({'type': 'institutions', 'id': id_})
return {'data': data}
def test_node_with_no_permissions(self):
user = AuthUserFactory()
user.affiliated_institutions.add(self.institution1)
user.save()
res = self.app.put_json_api(
self.node_institutions_url,
self.create_payload([self.institution1._id]),
auth=user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 403)
def test_user_with_no_institution(self):
user = AuthUserFactory()
node = NodeFactory(creator=user)
res = self.app.put_json_api(
'/{0}nodes/{1}/relationships/institutions/'.format(API_BASE, node._id),
self.create_payload(self.institution1._id),
expect_errors=True,
auth=user.auth
)
assert_equal(res.status_code, 403)
def test_get_public_node(self):
self.node.is_public = True
self.node.save()
res = self.app.get(
self.node_institutions_url
)
assert_equal(res.status_code, 200)
assert_equal(res.json['data'], [])
def test_institution_does_not_exist(self):
res = self.app.put_json_api(
self.node_institutions_url,
self.create_payload('not_an_id'),
expect_errors=True,
auth=self.user.auth
)
assert_equal(res.status_code, 404)
def test_wrong_type(self):
res = self.app.put_json_api(
self.node_institutions_url,
{'data': [{'type': 'not_institution', 'id': self.institution1._id}]},
expect_errors=True,
auth=self.user.auth
)
assert_equal(res.status_code, 409)
def test_user_with_institution_and_permissions(self):
assert_not_in(self.institution1, self.node.affiliated_institutions.all())
assert_not_in(self.institution2, self.node.affiliated_institutions.all())
res = self.app.post_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id, self.institution2._id),
auth=self.user.auth
)
assert_equal(res.status_code, 201)
data = res.json['data']
ret_institutions = [inst['id'] for inst in data]
assert_in(self.institution1._id, ret_institutions)
assert_in(self.institution2._id, ret_institutions)
self.node.reload()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_in(self.institution2, self.node.affiliated_institutions.all())
def test_user_with_institution_and_permissions_through_patch(self):
assert_not_in(self.institution1, self.node.affiliated_institutions.all())
assert_not_in(self.institution2, self.node.affiliated_institutions.all())
res = self.app.put_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id, self.institution2._id),
auth=self.user.auth
)
assert_equal(res.status_code, 200)
data = res.json['data']
ret_institutions = [inst['id'] for inst in data]
assert_in(self.institution1._id, ret_institutions)
assert_in(self.institution2._id, ret_institutions)
self.node.reload()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_in(self.institution2, self.node.affiliated_institutions.all())
def test_remove_institutions_with_no_permissions(self):
res = self.app.put_json_api(
self.node_institutions_url,
self.create_payload(),
expect_errors=True
)
assert_equal(res.status_code, 401)
def test_remove_institutions_with_affiliated_user(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
assert_in(self.institution1, self.node.affiliated_institutions.all())
res = self.app.put_json_api(
self.node_institutions_url,
{'data': []},
auth=self.user.auth
)
assert_equal(res.status_code, 200)
self.node.reload()
assert_equal(self.node.affiliated_institutions.count(), 0)
def test_using_post_making_no_changes_returns_204(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
assert_in(self.institution1, self.node.affiliated_institutions.all())
res = self.app.post_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id),
auth=self.user.auth
)
assert_equal(res.status_code, 204)
self.node.reload()
assert_in(self.institution1, self.node.affiliated_institutions.all())
def test_put_not_admin_but_affiliated(self):
user = AuthUserFactory()
user.affiliated_institutions.add(self.institution1)
user.save()
self.node.add_contributor(user)
self.node.save()
res = self.app.put_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id),
auth=user.auth
)
self.node.reload()
assert_equal(res.status_code, 200)
assert_in(self.institution1, self.node.affiliated_institutions.all())
def test_retrieve_private_node_no_auth(self):
res = self.app.get(self.node_institutions_url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_add_through_patch_one_inst_to_node_with_inst(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_not_in(self.institution2, self.node.affiliated_institutions.all())
res = self.app.patch_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id, self.institution2._id),
auth=self.user.auth
)
assert_equal(res.status_code, 200)
self.node.reload()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_in(self.institution2, self.node.affiliated_institutions.all())
def test_add_through_patch_one_inst_while_removing_other(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_not_in(self.institution2, self.node.affiliated_institutions.all())
res = self.app.patch_json_api(
self.node_institutions_url,
self.create_payload(self.institution2._id),
auth=self.user.auth
)
assert_equal(res.status_code, 200)
self.node.reload()
assert_not_in(self.institution1, self.node.affiliated_institutions.all())
assert_in(self.institution2, self.node.affiliated_institutions.all())
def test_add_one_inst_with_post_to_node_with_inst(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_not_in(self.institution2, self.node.affiliated_institutions.all())
res = self.app.post_json_api(
self.node_institutions_url,
self.create_payload(self.institution2._id),
auth=self.user.auth
)
assert_equal(res.status_code, 201)
self.node.reload()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_in(self.institution2, self.node.affiliated_institutions.all())
def test_delete_nothing(self):
res = self.app.delete_json_api(
self.node_institutions_url,
self.create_payload(),
auth=self.user.auth
)
assert_equal(res.status_code, 204)
def test_delete_existing_inst(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
assert_in(self.institution1, self.node.affiliated_institutions.all())
res = self.app.delete_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id),
auth=self.user.auth
)
assert_equal(res.status_code, 204)
self.node.reload()
assert_not_in(self.institution1, self.node.affiliated_institutions.all())
def test_delete_not_affiliated_and_affiliated_insts(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
assert_in(self.institution1, self.node.affiliated_institutions.all())
assert_not_in(self.institution2, self.node.affiliated_institutions.all())
res = self.app.delete_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id, self.institution2._id),
auth=self.user.auth,
)
assert_equal(res.status_code, 204)
self.node.reload()
assert_not_in(self.institution1, self.node.affiliated_institutions.all())
assert_not_in(self.institution2, self.node.affiliated_institutions.all())
def test_delete_user_is_admin(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
res = self.app.delete_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id),
auth=self.user.auth
)
assert_equal(res.status_code, 204)
def test_delete_user_is_read_write(self):
user = AuthUserFactory()
user.affiliated_institutions.add(self.institution1)
user.save()
self.node.add_contributor(user)
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
res = self.app.delete_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id),
auth=user.auth
)
assert_equal(res.status_code, 204)
def test_delete_user_is_read_only(self):
user = AuthUserFactory()
user.affiliated_institutions.add(self.institution1)
user.save()
self.node.add_contributor(user, permissions=[permissions.READ])
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
res = self.app.delete_json_api(
self.node_institutions_url,
self.create_payload(self.institution1._id),
auth=user.auth,
expect_errors=True
)
assert_equal(res.status_code, 403)
def test_delete_user_is_admin_but_not_affiliated_with_inst(self):
user = AuthUserFactory()
node = NodeFactory(creator=user)
node.affiliated_institutions.add(self.institution1)
node.save()
assert_in(self.institution1, node.affiliated_institutions.all())
res = self.app.delete_json_api(
'/{0}nodes/{1}/relationships/institutions/'.format(API_BASE, node._id),
self.create_payload(self.institution1._id),
auth=user.auth,
)
assert_equal(res.status_code, 204)
node.reload()
assert_not_in(self.institution1, node.affiliated_institutions.all())
def test_admin_can_add_affiliated_institution(self):
payload = {
'data': [{
'type': 'institutions',
'id': self.institution1._id
}]
}
res = self.app.post_json_api(self.node_institutions_url, payload, auth=self.user.auth)
self.node.reload()
assert_equal(res.status_code, 201)
assert_in(self.institution1, self.node.affiliated_institutions.all())
def test_admin_can_remove_admin_affiliated_institution(self):
self.node.affiliated_institutions.add(self.institution1)
payload = {
'data': [{
'type': 'institutions',
'id': self.institution1._id
}]
}
res = self.app.delete_json_api(self.node_institutions_url, payload, auth=self.user.auth)
self.node.reload()
assert_equal(res.status_code, 204)
assert_not_in(self.institution1, self.node.affiliated_institutions.all())
def test_admin_can_remove_read_write_contributor_affiliated_institution(self):
self.node.affiliated_institutions.add(self.read_write_contributor_institution)
self.node.save()
payload = {
'data': [{
'type': 'institutions',
'id': self.read_write_contributor_institution._id
}]
}
res = self.app.delete_json_api(self.node_institutions_url, payload, auth=self.user.auth)
self.node.reload()
assert_equal(res.status_code, 204)
assert_not_in(self.read_write_contributor_institution, self.node.affiliated_institutions.all())
def test_read_write_contributor_can_add_affiliated_institution(self):
payload = {
'data': [{
'type': 'institutions',
'id': self.read_write_contributor_institution._id
}]
}
res = self.app.post_json_api(self.node_institutions_url, payload, auth=self.read_write_contributor.auth)
self.node.reload()
assert_equal(res.status_code, 201)
assert_in(self.read_write_contributor_institution, self.node.affiliated_institutions.all())
def test_read_write_contributor_can_remove_affiliated_institution(self):
self.node.affiliated_institutions.add(self.read_write_contributor_institution)
self.node.save()
payload = {
'data': [{
'type': 'institutions',
'id': self.read_write_contributor_institution._id
}]
}
res = self.app.delete_json_api(self.node_institutions_url, payload, auth=self.read_write_contributor.auth)
self.node.reload()
assert_equal(res.status_code, 204)
assert_not_in(self.read_write_contributor_institution, self.node.affiliated_institutions.all())
def test_read_write_contributor_cannot_remove_admin_affiliated_institution(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
payload = {
'data': [{
'type': 'institutions',
'id': self.institution1._id
}]
}
res = self.app.delete_json_api(self.node_institutions_url, payload, auth=self.read_write_contributor.auth, expect_errors=True)
self.node.reload()
assert_equal(res.status_code, 403)
assert_in(self.institution1, self.node.affiliated_institutions.all())
def test_read_only_contributor_cannot_remove_admin_affiliated_institution(self):
self.node.affiliated_institutions.add(self.institution1)
self.node.save()
payload = {
'data': [{
'type': 'institutions',
'id': self.institution1._id
}]
}
res = self.app.delete_json_api(self.node_institutions_url, payload, auth=self.read_only_contributor.auth, expect_errors=True)
self.node.reload()
assert_equal(res.status_code, 403)
assert_in(self.institution1, self.node.affiliated_institutions.all())
def test_read_only_contributor_cannot_add_affiliated_institution(self):
payload = {
'data': [{
'type': 'institutions',
'id': self.read_only_contributor_institution._id
}]
}
res = self.app.post_json_api(self.node_institutions_url, payload, auth=self.read_only_contributor.auth, expect_errors=True)
self.node.reload()
assert_equal(res.status_code, 403)
assert_not_in(self.read_write_contributor_institution, self.node.affiliated_institutions.all())
def test_read_only_contributor_cannot_remove_affiliated_institution(self):
self.node.affiliated_institutions.add(self.read_only_contributor_institution)
self.node.save()
payload = {
'data': [{
'type': 'institutions',
'id': self.read_only_contributor_institution._id
}]
}
res = self.app.delete_json_api(self.node_institutions_url, payload, auth=self.read_only_contributor.auth, expect_errors=True)
self.node.reload()
assert_equal(res.status_code, 403)
assert_in(self.read_only_contributor_institution, self.node.affiliated_institutions.all())
| apache-2.0 |
akhilaananthram/nupic | nupic/regions/ImageSensorFilters/FillBackground.py | 17 | 3191 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
## @file
"""
from PIL import (Image,
ImageChops)
from nupic.regions.ImageSensorFilters.BaseFilter import BaseFilter
from nupic.image import (createMask, isSimpleBBox)
class FillBackground(BaseFilter):
"""
Fill in the background (around the mask or around the bounding box).
"""
def __init__(self, value=None, threshold=10, maskScale=1.0, blurRadius=0.0):
"""
@param value -- If None, the background is filled in with the background
color. Otherwise, it is filled with value. If value is a list, then
this filter will return multiple images, one for each value
"""
BaseFilter.__init__(self)
if hasattr(value, '__len__'):
self._values = value
else:
self._values = [value]
self._threshold = threshold
self._maskScale = maskScale
self._blurRadius = blurRadius
def getOutputCount(self):
"""
Return the number of images returned by each call to process().
If the filter creates multiple simultaneous outputs, return a tuple:
(outputCount, simultaneousOutputCount).
"""
return len(self._values)
def process(self, image):
"""
@param image -- The image to process.
Returns a single image, or a list containing one or more images.
"""
BaseFilter.process(self, image)
# ---------------------------------------------------------------------------
# Create the mask around the source image
mask = image.split()[-1]
if image.mode[-1] != 'A' or isSimpleBBox(mask):
mask = createMask(image, threshold=self._threshold, fillHoles=True,
backgroundColor=self.background, blurRadius=self._blurRadius,
maskScale=self._maskScale)
# ---------------------------------------------------------------------------
# Process each value
newImages = []
for value in self._values:
if value is None:
value = self.background
bg = ImageChops.constant(image, value)
newImage = Image.composite(image.split()[0], bg, mask)
newImage.putalpha(image.split()[-1])
newImages.append(newImage)
if len(newImages) == 1:
return newImages[0]
else:
return newImages
| agpl-3.0 |
shanglt/youtube-dl | youtube_dl/extractor/testurl.py | 160 | 2162 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import ExtractorError
class TestURLIE(InfoExtractor):
""" Allows adressing of the test cases as test:yout.*be_1 """
IE_DESC = False # Do not list
_VALID_URL = r'test(?:url)?:(?P<id>(?P<extractor>.+?)(?:_(?P<num>[0-9]+))?)$'
def _real_extract(self, url):
from ..extractor import gen_extractors
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
extractor_id = mobj.group('extractor')
all_extractors = gen_extractors()
rex = re.compile(extractor_id, flags=re.IGNORECASE)
matching_extractors = [
e for e in all_extractors if rex.search(e.IE_NAME)]
if len(matching_extractors) == 0:
raise ExtractorError(
'No extractors matching %r found' % extractor_id,
expected=True)
elif len(matching_extractors) > 1:
# Is it obvious which one to pick?
try:
extractor = next(
ie for ie in matching_extractors
if ie.IE_NAME.lower() == extractor_id.lower())
except StopIteration:
raise ExtractorError(
('Found multiple matching extractors: %s' %
' '.join(ie.IE_NAME for ie in matching_extractors)),
expected=True)
else:
extractor = matching_extractors[0]
num_str = mobj.group('num')
num = int(num_str) if num_str else 0
testcases = []
t = getattr(extractor, '_TEST', None)
if t:
testcases.append(t)
testcases.extend(getattr(extractor, '_TESTS', []))
try:
tc = testcases[num]
except IndexError:
raise ExtractorError(
('Test case %d not found, got only %d tests' %
(num, len(testcases))),
expected=True)
self.to_screen('Test URL: %s' % tc['url'])
return {
'_type': 'url',
'url': tc['url'],
'id': video_id,
}
| unlicense |
mibexsoftware/alfred-stash-workflow | workflow/src/lib/requests/packages/chardet/universaldetector.py | 1776 | 6840 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
from .escprober import EscCharSetProber # ISO-2122, etc.
import re
MINIMUM_THRESHOLD = 0.20
ePureAscii = 0
eEscAscii = 1
eHighbyte = 2
class UniversalDetector:
def __init__(self):
self._highBitDetector = re.compile(b'[\x80-\xFF]')
self._escDetector = re.compile(b'(\033|~{)')
self._mEscCharSetProber = None
self._mCharSetProbers = []
self.reset()
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
def feed(self, aBuf):
if self.done:
return
aLen = len(aBuf)
if not aLen:
return
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
if aBuf[:3] == codecs.BOM_UTF8:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_BE:
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
elif aBuf[:2] == codecs.BOM_LE:
# FF FE UTF-16, little endian BOM
self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
elif aBuf[:2] == codecs.BOM_BE:
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
self._mGotData = True
if self.result['encoding'] and (self.result['confidence'] > 0.0):
self.done = True
return
if self._mInputState == ePureAscii:
if self._highBitDetector.search(aBuf):
self._mInputState = eHighbyte
elif ((self._mInputState == ePureAscii) and
self._escDetector.search(self._mLastChar + aBuf)):
self._mInputState = eEscAscii
self._mLastChar = aBuf[-1:]
if self._mInputState == eEscAscii:
if not self._mEscCharSetProber:
self._mEscCharSetProber = EscCharSetProber()
if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
'confidence': self._mEscCharSetProber.get_confidence()}
self.done = True
elif self._mInputState == eHighbyte:
if not self._mCharSetProbers:
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
Latin1Prober()]
for prober in self._mCharSetProbers:
if prober.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': prober.get_charset_name(),
'confidence': prober.get_confidence()}
self.done = True
break
def close(self):
if self.done:
return
if not self._mGotData:
if constants._debug:
sys.stderr.write('no data received!\n')
return
self.done = True
if self._mInputState == ePureAscii:
self.result = {'encoding': 'ascii', 'confidence': 1.0}
return self.result
if self._mInputState == eHighbyte:
proberConfidence = None
maxProberConfidence = 0.0
maxProber = None
for prober in self._mCharSetProbers:
if not prober:
continue
proberConfidence = prober.get_confidence()
if proberConfidence > maxProberConfidence:
maxProberConfidence = proberConfidence
maxProber = prober
if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
self.result = {'encoding': maxProber.get_charset_name(),
'confidence': maxProber.get_confidence()}
return self.result
if constants._debug:
sys.stderr.write('no probers hit minimum threshhold\n')
for prober in self._mCharSetProbers[0].mProbers:
if not prober:
continue
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(),
prober.get_confidence()))
| mit |
soapy/soapy | soapy/pyqtgraph/graphicsItems/ViewBox/axisCtrlTemplate_pyqt5.py | 38 | 5579 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './pyqtgraph/graphicsItems/ViewBox/axisCtrlTemplate.ui'
#
# Created: Wed Mar 26 15:09:28 2014
# by: PyQt5 UI code generator 5.0.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(186, 154)
Form.setMaximumSize(QtCore.QSize(200, 16777215))
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(Form)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 7, 0, 1, 2)
self.linkCombo = QtWidgets.QComboBox(Form)
self.linkCombo.setSizeAdjustPolicy(QtWidgets.QComboBox.AdjustToContents)
self.linkCombo.setObjectName("linkCombo")
self.gridLayout.addWidget(self.linkCombo, 7, 2, 1, 2)
self.autoPercentSpin = QtWidgets.QSpinBox(Form)
self.autoPercentSpin.setEnabled(True)
self.autoPercentSpin.setMinimum(1)
self.autoPercentSpin.setMaximum(100)
self.autoPercentSpin.setSingleStep(1)
self.autoPercentSpin.setProperty("value", 100)
self.autoPercentSpin.setObjectName("autoPercentSpin")
self.gridLayout.addWidget(self.autoPercentSpin, 2, 2, 1, 2)
self.autoRadio = QtWidgets.QRadioButton(Form)
self.autoRadio.setChecked(True)
self.autoRadio.setObjectName("autoRadio")
self.gridLayout.addWidget(self.autoRadio, 2, 0, 1, 2)
self.manualRadio = QtWidgets.QRadioButton(Form)
self.manualRadio.setObjectName("manualRadio")
self.gridLayout.addWidget(self.manualRadio, 1, 0, 1, 2)
self.minText = QtWidgets.QLineEdit(Form)
self.minText.setObjectName("minText")
self.gridLayout.addWidget(self.minText, 1, 2, 1, 1)
self.maxText = QtWidgets.QLineEdit(Form)
self.maxText.setObjectName("maxText")
self.gridLayout.addWidget(self.maxText, 1, 3, 1, 1)
self.invertCheck = QtWidgets.QCheckBox(Form)
self.invertCheck.setObjectName("invertCheck")
self.gridLayout.addWidget(self.invertCheck, 5, 0, 1, 4)
self.mouseCheck = QtWidgets.QCheckBox(Form)
self.mouseCheck.setChecked(True)
self.mouseCheck.setObjectName("mouseCheck")
self.gridLayout.addWidget(self.mouseCheck, 6, 0, 1, 4)
self.visibleOnlyCheck = QtWidgets.QCheckBox(Form)
self.visibleOnlyCheck.setObjectName("visibleOnlyCheck")
self.gridLayout.addWidget(self.visibleOnlyCheck, 3, 2, 1, 2)
self.autoPanCheck = QtWidgets.QCheckBox(Form)
self.autoPanCheck.setObjectName("autoPanCheck")
self.gridLayout.addWidget(self.autoPanCheck, 4, 2, 1, 2)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.label.setText(_translate("Form", "Link Axis:"))
self.linkCombo.setToolTip(_translate("Form", "<html><head/><body><p>Links this axis with another view. When linked, both views will display the same data range.</p></body></html>"))
self.autoPercentSpin.setToolTip(_translate("Form", "<html><head/><body><p>Percent of data to be visible when auto-scaling. It may be useful to decrease this value for data with spiky noise.</p></body></html>"))
self.autoPercentSpin.setSuffix(_translate("Form", "%"))
self.autoRadio.setToolTip(_translate("Form", "<html><head/><body><p>Automatically resize this axis whenever the displayed data is changed.</p></body></html>"))
self.autoRadio.setText(_translate("Form", "Auto"))
self.manualRadio.setToolTip(_translate("Form", "<html><head/><body><p>Set the range for this axis manually. This disables automatic scaling. </p></body></html>"))
self.manualRadio.setText(_translate("Form", "Manual"))
self.minText.setToolTip(_translate("Form", "<html><head/><body><p>Minimum value to display for this axis.</p></body></html>"))
self.minText.setText(_translate("Form", "0"))
self.maxText.setToolTip(_translate("Form", "<html><head/><body><p>Maximum value to display for this axis.</p></body></html>"))
self.maxText.setText(_translate("Form", "0"))
self.invertCheck.setToolTip(_translate("Form", "<html><head/><body><p>Inverts the display of this axis. (+y points downward instead of upward)</p></body></html>"))
self.invertCheck.setText(_translate("Form", "Invert Axis"))
self.mouseCheck.setToolTip(_translate("Form", "<html><head/><body><p>Enables mouse interaction (panning, scaling) for this axis.</p></body></html>"))
self.mouseCheck.setText(_translate("Form", "Mouse Enabled"))
self.visibleOnlyCheck.setToolTip(_translate("Form", "<html><head/><body><p>When checked, the axis will only auto-scale to data that is visible along the orthogonal axis.</p></body></html>"))
self.visibleOnlyCheck.setText(_translate("Form", "Visible Data Only"))
self.autoPanCheck.setToolTip(_translate("Form", "<html><head/><body><p>When checked, the axis will automatically pan to center on the current data, but the scale along this axis will not change.</p></body></html>"))
self.autoPanCheck.setText(_translate("Form", "Auto Pan Only"))
| gpl-3.0 |
cseed/hail | hail/python/test/hailtop/hailctl/dataproc/test_cli.py | 2 | 1186 | from unittest.mock import Mock
import pytest
from hailtop.hailctl.dataproc import cli
from hailtop.hailctl.dataproc import list_clusters
def test_required_gcloud_version_met(monkeypatch):
monkeypatch.setattr("hailtop.hailctl.dataproc.gcloud.get_version", Mock(return_value=cli.MINIMUM_REQUIRED_GCLOUD_VERSION))
mock_list = Mock()
monkeypatch.setattr(list_clusters, "main", mock_list)
cli.main(["list"])
assert mock_list.called
def test_required_gcloud_version_unmet(monkeypatch, capsys):
monkeypatch.setattr("hailtop.hailctl.dataproc.gcloud.get_version", Mock(return_value=(200, 0, 0)))
mock_list = Mock()
monkeypatch.setattr(list_clusters, "main", mock_list)
with pytest.raises(SystemExit):
cli.main(["list"])
assert "hailctl dataproc requires Google Cloud SDK (gcloud) version" in capsys.readouterr().err
assert not mock_list.called
def test_unable_to_determine_version(monkeypatch):
monkeypatch.setattr("hailtop.hailctl.dataproc.gcloud.get_version", Mock(side_effect=ValueError))
mock_list = Mock()
monkeypatch.setattr(list_clusters, "main", mock_list)
cli.main(["list"])
assert mock_list.called
| mit |
bboozzoo/mender-backend-cli | mender/client/__init__.py | 1 | 3091 | # The MIT License (MIT)
#
# Copyright (c) 2016 Maciej Borzecki
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
import requests
import requests.auth
from requests import Session as ApiClient
API_URL = '/api/management/v1/'
API_DEVICES_URL = '/api/devices/v1/'
def add_url_path(base, path):
if not base.endswith('/'):
base += '/'
if path.startswith('/'):
path = path[1:]
return base + path
def service_path(service):
return add_url_path(API_URL, service)
def admissions_url(host, path=''):
ap = add_url_path(host, service_path('/admission/devices'))
if path:
return add_url_path(ap, path)
return ap
def authentication_url(host, path=''):
ap = add_url_path(host, service_path('/devauth/'))
if path:
return add_url_path(ap, path)
return ap
def deployments_url(host, path=''):
ap = add_url_path(host, service_path('/deployments/deployments'))
if path:
return add_url_path(ap, path)
return ap
def artifacts_url(host, path=''):
ap = add_url_path(host, service_path('/deployments/artifacts'))
if path:
return add_url_path(ap, path)
return ap
def inventory_url(host, path=''):
ap = add_url_path(host, service_path('/inventory'))
if path:
return add_url_path(ap, path)
return ap
def device_url(host, path=''):
ap = add_url_path(host, API_DEVICES_URL)
if path:
return add_url_path(ap, path)
return ap
def user_url(host, path=''):
ap = add_url_path(host, service_path('/useradm'))
if path:
return add_url_path(ap, path)
return ap
class ClientError(requests.exceptions.RequestException):
"""Wrapper for client errors"""
pass
class ClientNotAuthorizedError(ClientError):
"""Client not authorized"""
pass
class JWTAuth(requests.auth.AuthBase):
"""Perform device authentication using device token"""
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers['Authorization'] = 'Bearer {}'.format(self.token)
return r
| mit |
okroener/autokey | src/lib/common.py | 47 | 2600 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os.path, dbus.service
CONFIG_DIR = os.path.expanduser("~/.config/autokey")
LOCK_FILE = CONFIG_DIR + "/autokey.pid"
LOG_FILE = CONFIG_DIR + "/autokey.log"
MAX_LOG_SIZE = 5 * 1024 * 1024 # 5 megabytes
MAX_LOG_COUNT = 3
LOG_FORMAT = "%(asctime)s %(levelname)s - %(name)s - %(message)s"
APP_NAME = "autokey"
CATALOG = ""
VERSION = "0.90.4"
HOMEPAGE = "http://autokey.googlecode.com/"
BUG_EMAIL = "cdekter@gmail.com"
FAQ_URL = "http://code.google.com/p/autokey/wiki/FAQ"
API_URL = "http://autokey.googlecode.com/svn/trunk/doc/scripting/index.html"
HELP_URL = "http://code.google.com/p/autokey/w/list"
DONATE_URL = "https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=L333CPRZ6J8JC"
BUG_URL = "http://code.google.com/p/autokey/issues/entry"
ICON_FILE = "autokey"
ICON_FILE_NOTIFICATION = "autokey-status"
ICON_FILE_NOTIFICATION_DARK = "autokey-status-dark"
ICON_FILE_NOTIFICATION_ERROR = "autokey-status-error"
USING_QT = False
class AppService(dbus.service.Object):
def __init__(self, app):
busName = dbus.service.BusName('org.autokey.Service', bus=dbus.SessionBus())
dbus.service.Object.__init__(self, busName, "/AppService")
self.app = app
@dbus.service.method(dbus_interface='org.autokey.Service', in_signature='', out_signature='')
def show_configure(self):
self.app.show_configure()
@dbus.service.method(dbus_interface='org.autokey.Service', in_signature='s', out_signature='')
def run_script(self, name):
self.app.service.run_script(name)
@dbus.service.method(dbus_interface='org.autokey.Service', in_signature='s', out_signature='')
def run_phrase(self, name):
self.app.service.run_phrase(name)
@dbus.service.method(dbus_interface='org.autokey.Service', in_signature='s', out_signature='')
def run_folder(self, name):
self.app.service.run_folder(name)
| gpl-3.0 |
gangadharkadam/saloon_frappe_install | frappe/desk/query_report.py | 20 | 8628 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import os, json
from frappe import _
from frappe.modules import scrub, get_module_path
from frappe.utils import flt, cint, get_html_format
from frappe.translate import send_translations
import frappe.desk.reportview
from frappe.permissions import get_role_permissions
def get_report_doc(report_name):
doc = frappe.get_doc("Report", report_name)
if not doc.has_permission("read"):
frappe.throw(_("You don't have access to Report: {0}").format(report_name), frappe.PermissionError)
if not frappe.has_permission(doc.ref_doctype, "report"):
frappe.throw(_("You don't have permission to get a report on: {0}").format(doc.ref_doctype),
frappe.PermissionError)
if doc.disabled:
frappe.throw(_("Report {0} is disabled").format(report_name))
return doc
@frappe.whitelist()
def get_script(report_name):
report = get_report_doc(report_name)
module = report.module or frappe.db.get_value("DocType", report.ref_doctype, "module")
module_path = get_module_path(module)
report_folder = os.path.join(module_path, "report", scrub(report.name))
script_path = os.path.join(report_folder, scrub(report.name) + ".js")
print_path = os.path.join(report_folder, scrub(report.name) + ".html")
script = None
if os.path.exists(script_path):
with open(script_path, "r") as f:
script = f.read()
html_format = get_html_format(print_path)
if not script and report.javascript:
script = report.javascript
if not script:
script = "frappe.query_reports['%s']={}" % report_name
# load translations
if frappe.lang != "en":
send_translations(frappe.get_lang_dict("report", report_name))
return {
"script": script,
"html_format": html_format
}
@frappe.whitelist()
def run(report_name, filters=()):
report = get_report_doc(report_name)
if filters and isinstance(filters, basestring):
filters = json.loads(filters)
if not frappe.has_permission(report.ref_doctype, "report"):
frappe.msgprint(_("Must have report permission to access this report."),
raise_exception=True)
columns, result = [], []
if report.report_type=="Query Report":
if not report.query:
frappe.msgprint(_("Must specify a Query to run"), raise_exception=True)
if not report.query.lower().startswith("select"):
frappe.msgprint(_("Query must be a SELECT"), raise_exception=True)
result = [list(t) for t in frappe.db.sql(report.query, filters)]
columns = [c[0] for c in frappe.db.get_description()]
else:
module = report.module or frappe.db.get_value("DocType", report.ref_doctype, "module")
if report.is_standard=="Yes":
method_name = get_report_module_dotted_path(module, report.name) + ".execute"
columns, result = frappe.get_attr(method_name)(frappe._dict(filters))
if report.apply_user_permissions and result:
result = get_filtered_data(report.ref_doctype, columns, result)
if cint(report.add_total_row) and result:
result = add_total_row(result, columns)
return {
"result": result,
"columns": columns
}
def get_report_module_dotted_path(module, report_name):
return frappe.local.module_app[scrub(module)] + "." + scrub(module) \
+ ".report." + scrub(report_name) + "." + scrub(report_name)
def add_total_row(result, columns):
total_row = [""]*len(columns)
has_percent = []
for row in result:
for i, col in enumerate(columns):
fieldtype = None
if isinstance(col, basestring):
col = col.split(":")
if len(col) > 1:
fieldtype = col[1]
else:
fieldtype = col.get("fieldtype")
if fieldtype in ["Currency", "Int", "Float", "Percent"] and flt(row[i]):
total_row[i] = flt(total_row[i]) + flt(row[i])
if fieldtype == "Percent" and i not in has_percent:
has_percent.append(i)
for i in has_percent:
total_row[i] = total_row[i] / len(result)
first_col_fieldtype = None
if isinstance(columns[0], basestring):
first_col = columns[0].split(":")
if len(first_col) > 1:
first_col_fieldtype = first_col[1].split("/")[0]
else:
first_col_fieldtype = columns[0].get("fieldtype")
if first_col_fieldtype not in ["Currency", "Int", "Float", "Percent"]:
if first_col_fieldtype == "Link":
total_row[0] = "'" + _("Total") + "'"
else:
total_row[0] = _("Total")
result.append(total_row)
return result
def get_filtered_data(ref_doctype, columns, data):
result = []
linked_doctypes = get_linked_doctypes(columns, data)
match_filters_per_doctype = get_user_match_filters(linked_doctypes, ref_doctype)
shared = frappe.share.get_shared(ref_doctype)
columns_dict = get_columns_dict(columns)
role_permissions = get_role_permissions(frappe.get_meta(ref_doctype))
if_owner = role_permissions.get("if_owner", {}).get("report")
if match_filters_per_doctype:
for row in data:
# Why linked_doctypes.get(ref_doctype)? because if column is empty, linked_doctypes[ref_doctype] is removed
if linked_doctypes.get(ref_doctype) and shared and row[linked_doctypes[ref_doctype]] in shared:
result.append(row)
elif has_match(row, linked_doctypes, match_filters_per_doctype, ref_doctype, if_owner, columns_dict):
result.append(row)
else:
result = list(data)
return result
def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner, columns_dict):
"""Returns True if after evaluating permissions for each linked doctype
- There is an owner match for the ref_doctype
- `and` There is a user permission match for all linked doctypes
Returns True if the row is empty
Note:
Each doctype could have multiple conflicting user permission doctypes.
Hence even if one of the sets allows a match, it is true.
This behavior is equivalent to the trickling of user permissions of linked doctypes to the ref doctype.
"""
resultant_match = True
if not row:
# allow empty rows :)
return resultant_match
for doctype, filter_list in doctype_match_filters.items():
matched_for_doctype = False
if doctype==ref_doctype and if_owner:
idx = linked_doctypes.get("User")
if (idx is not None
and row[idx]==frappe.session.user
and columns_dict[idx]==columns_dict.get("owner")):
# owner match is true
matched_for_doctype = True
if not matched_for_doctype:
for match_filters in filter_list:
match = True
for dt, idx in linked_doctypes.items():
# case handled above
if dt=="User" and columns_dict[idx]==columns_dict.get("owner"):
continue
if dt in match_filters and row[idx] not in match_filters[dt]:
match = False
break
# each doctype could have multiple conflicting user permission doctypes, hence using OR
# so that even if one of the sets allows a match, it is true
matched_for_doctype = matched_for_doctype or match
if matched_for_doctype:
break
# each doctype's user permissions should match the row! hence using AND
resultant_match = resultant_match and matched_for_doctype
if not resultant_match:
break
return resultant_match
def get_linked_doctypes(columns, data):
linked_doctypes = {}
columns_dict = get_columns_dict(columns)
for idx, col in enumerate(columns):
df = columns_dict[idx]
if df.get("fieldtype")=="Link":
if isinstance(col, basestring):
linked_doctypes[df["options"]] = idx
else:
# dict
linked_doctypes[df["options"]] = df["fieldname"]
# remove doctype if column is empty
for doctype, key in linked_doctypes.items():
if not any(d[key] for d in data if d):
del linked_doctypes[doctype]
return linked_doctypes
def get_columns_dict(columns):
"""Returns a dict with column docfield values as dict
The keys for the dict are both idx and fieldname,
so either index or fieldname can be used to search for a column's docfield properties
"""
columns_dict = {}
for idx, col in enumerate(columns):
col_dict = {}
# string
if isinstance(col, basestring):
col = col.split(":")
if len(col) > 1:
if "/" in col[1]:
col_dict["fieldtype"], col_dict["options"] = col[1].split("/")
else:
col_dict["fieldtype"] = col[1]
col_dict["fieldname"] = frappe.scrub(col[0])
# dict
else:
col_dict.update(col)
if "fieldname" not in col_dict:
col_dict["fieldname"] = frappe.scrub(col_dict["label"])
columns_dict[idx] = col_dict
columns_dict[col_dict["fieldname"]] = col_dict
return columns_dict
def get_user_match_filters(doctypes, ref_doctype):
match_filters = {}
for dt in doctypes:
filter_list = frappe.desk.reportview.build_match_conditions(dt, False)
if filter_list:
match_filters[dt] = filter_list
return match_filters
| mit |
SANBI-SA/tools-sanbi-uwc | tools/build_ctb_explorer/build_ctb_explorer.py | 1 | 4075 | #!/usr/bin/env python
from __future__ import print_function
import argparse
import datetime
import glob
import shutil
import os
# try:
# from urllib.parse import urlparse
# except ImportError:
# from urlparse import urlparse
import logging
log = logging.getLogger(__name__)
def copy_output_file_to_dataset(dir_name, input_dir, dt_type=None):
"""
Copy the datasets file to the news dataset cbt_browser
:param dir_name: the target output directory for the ctb_explorer dataset
:param input_dir: the input files
:param dt_type: the type of input dataset (neo4jdb, jbrowser - default to None)
:return: boolean
"""
dt_loc = input_dir.rpartition('/')[2].replace(".dat", "_files")
if dt_type:
if dt_type == "neo4jdb":
src_files = glob.glob(os.path.dirname(input_dir) + '/{}/{}'.format(dt_loc, dt_type) + "/*" )
else:
src_files = glob.glob(os.path.dirname(input_dir) + '/{}'.format(dt_loc) + "/*" )
else:
return False
for file_name in src_files:
if os.path.isfile(file_name):
try:
shutil.copy2(file_name, dir_name)
except shutil.Error as e:
log.debug('Error: %s' % e)
# eg. source or destination doesn't exist
except IOError as e:
log.debug('Error: %s' % e.strerror)
elif os.path.isdir(file_name):
# create the parent dir before copytree
try:
os.chdir(dir_name)
shutil.copytree(file_name, file_name.rsplit('/', 1)[-1])
except shutil.Error as e:
log.debug('Error: %s' % e)
# eg. source or destination doesn't exist
except IOError as e:
log.debug('Error: %s' % e.strerror)
return True
class BuildCtbExplorerRunner(object):
def __init__(self, args=None):
"""
Initializes an object to run CtbRunner in Galaxy.
"""
# Check whether the options are specified and saves them into the object
self.args = args
self.output_neo4jdb = args.output_neo4jdb
self.output_jbrowser = args.output_jbrowser
self.input_neo4jdb = args.input_neo4jdb
self.input_jbrowser = args.input_jbrowser
def build_ctb_explorer(self):
"""
:rtype: boolean
"""
if copy_output_file_to_dataset(self.output_neo4jdb, self.input_neo4jdb, dt_type="neo4jdb") and \
copy_output_file_to_dataset(self.output_jbrowser, self.input_jbrowser, dt_type="jbrowser"):
"""Copy the jbrowser input data file to the outputdir @TODO: investigate altenatives"""
try:
shutil.copy2(self.input_jbrowser, os.path.join(self.output_jbrowser, 'index.html'))
except shutil.Error as e:
log.debug('Error: %s' % e)
# eg. source or destination doesn't exist
except IOError as e:
log.debug('Error: %s' % e.strerror)
print("CTB Report run time: %s" % str(datetime.date.today()))
print("Neo4jDB - Input: %s" % str(self.args.input_neo4jdb))
print("JBrowser - Input: %s" % str(self.args.input_jbrowser))
else:
return False
return True
def main():
parser = argparse.ArgumentParser(description="Tool used to build a combat-tb explorer dataset")
parser.add_argument('--output_neo4jdb')
parser.add_argument('--output_jbrowser')
parser.add_argument('--input_neo4jdb')
parser.add_argument('--input_jbrowser')
args = parser.parse_args()
ctb_explorer_runner = BuildCtbExplorerRunner(args)
# make the output directory (neo4j)
if not os.path.exists(args.output_neo4jdb):
os.makedirs(args.output_neo4jdb)
# make the output directory (jbrowser)
if not os.path.exists(args.output_jbrowser):
os.makedirs(args.output_jbrowser)
status = ctb_explorer_runner.build_ctb_explorer()
if status is None:
exit(1)
if __name__ == "__main__":
main()
| gpl-3.0 |
dagnarf/sgh-i717-dagkernel | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
jtg-gg/blink | Tools/Scripts/webkitpy/thirdparty/coverage/__init__.py | 64 | 3417 | """Code coverage measurement for Python.
Ned Batchelder
http://nedbatchelder.com/code/coverage
"""
__version__ = "3.5.1" # see detailed history in CHANGES.txt
__url__ = "http://nedbatchelder.com/code/coverage"
if max(__version__).isalpha():
# For pre-releases, use a version-specific URL.
__url__ += "/" + __version__
from coverage.control import coverage, process_startup
from coverage.data import CoverageData
from coverage.cmdline import main, CoverageScript
from coverage.misc import CoverageException
# Module-level functions. The original API to this module was based on
# functions defined directly in the module, with a singleton of the coverage()
# class. That design hampered programmability, so the current api uses
# explicitly-created coverage objects. But for backward compatibility, here we
# define the top-level functions to create the singleton when they are first
# called.
# Singleton object for use with module-level functions. The singleton is
# created as needed when one of the module-level functions is called.
_the_coverage = None
def _singleton_method(name):
"""Return a function to the `name` method on a singleton `coverage` object.
The singleton object is created the first time one of these functions is
called.
"""
def wrapper(*args, **kwargs):
"""Singleton wrapper around a coverage method."""
global _the_coverage
if not _the_coverage:
_the_coverage = coverage(auto_data=True)
return getattr(_the_coverage, name)(*args, **kwargs)
return wrapper
# Define the module-level functions.
use_cache = _singleton_method('use_cache')
start = _singleton_method('start')
stop = _singleton_method('stop')
erase = _singleton_method('erase')
exclude = _singleton_method('exclude')
analysis = _singleton_method('analysis')
analysis2 = _singleton_method('analysis2')
report = _singleton_method('report')
annotate = _singleton_method('annotate')
# COPYRIGHT AND LICENSE
#
# Copyright 2001 Gareth Rees. All rights reserved.
# Copyright 2004-2010 Ned Batchelder. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
| bsd-3-clause |
Pallokala/ansible-modules-core | packaging/os/rhn_channel.py | 197 | 5204 | #!/usr/bin/python
# (c) Vincent Van de Kussen
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rhn_channel
short_description: Adds or removes Red Hat software channels
description:
- Adds or removes Red Hat software channels
version_added: "1.1"
author: "Vincent Van der Kussen (@vincentvdk)"
notes:
- this module fetches the system id from RHN.
requirements:
- none
options:
name:
description:
- name of the software channel
required: true
default: null
sysname:
description:
- name of the system as it is known in RHN/Satellite
required: true
default: null
state:
description:
- whether the channel should be present or not
required: false
default: present
url:
description:
- The full url to the RHN/Satellite api
required: true
user:
description:
- RHN/Satellite user
required: true
password:
description:
- "the user's password"
required: true
'''
EXAMPLES = '''
- rhn_channel: name=rhel-x86_64-server-v2vwin-6 sysname=server01 url=https://rhn.redhat.com/rpc/api user=rhnuser password=guessme
'''
import xmlrpclib
from operator import itemgetter
import re
# ------------------------------------------------------- #
def get_systemid(client, session, sysname):
systems = client.system.listUserSystems(session)
for system in systems:
if system.get('name') == sysname:
idres = system.get('id')
idd = int(idres)
return idd
# ------------------------------------------------------- #
# unused:
#
#def get_localsystemid():
# f = open("/etc/sysconfig/rhn/systemid", "r")
# content = f.read()
# loc_id = re.search(r'\b(ID-)(\d{10})' ,content)
# return loc_id.group(2)
# ------------------------------------------------------- #
def subscribe_channels(channelname, client, session, sysname, sys_id):
channels = base_channels(client, session, sys_id)
channels.append(channelname)
return client.system.setChildChannels(session, sys_id, channels)
# ------------------------------------------------------- #
def unsubscribe_channels(channelname, client, session, sysname, sys_id):
channels = base_channels(client, session, sys_id)
channels.remove(channelname)
return client.system.setChildChannels(session, sys_id, channels)
# ------------------------------------------------------- #
def base_channels(client, session, sys_id):
basechan = client.channel.software.listSystemChannels(session, sys_id)
try:
chans = [item['label'] for item in basechan]
except KeyError:
chans = [item['channel_label'] for item in basechan]
return chans
# ------------------------------------------------------- #
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent']),
name = dict(required=True),
sysname = dict(required=True),
url = dict(required=True),
user = dict(required=True),
password = dict(required=True, aliases=['pwd']),
)
# supports_check_mode=True
)
state = module.params['state']
channelname = module.params['name']
systname = module.params['sysname']
saturl = module.params['url']
user = module.params['user']
password = module.params['password']
#initialize connection
client = xmlrpclib.Server(saturl, verbose=0)
session = client.auth.login(user, password)
# get systemid
sys_id = get_systemid(client, session, systname)
# get channels for system
chans = base_channels(client, session, sys_id)
if state == 'present':
if channelname in chans:
module.exit_json(changed=False, msg="Channel %s already exists" % channelname)
else:
subscribe_channels(channelname, client, session, systname, sys_id)
module.exit_json(changed=True, msg="Channel %s added" % channelname)
if state == 'absent':
if not channelname in chans:
module.exit_json(changed=False, msg="Not subscribed to channel %s." % channelname)
else:
unsubscribe_channels(channelname, client, session, systname, sys_id)
module.exit_json(changed=True, msg="Channel %s removed" % channelname)
client.auth.logout(session)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
guozhangwang/kafka | tests/kafkatest/tests/client/compression_test.py | 3 | 4480 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.mark import matrix
from ducktape.utils.util import wait_until
from ducktape.mark.resource import cluster
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService, quorum
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int_with_prefix
class CompressionTest(ProduceConsumeValidateTest):
"""
These tests validate produce / consume for compressed topics.
"""
COMPRESSION_TYPES = ["snappy", "gzip", "lz4", "zstd", "none"]
def __init__(self, test_context):
""":type test_context: ducktape.tests.test.TestContext"""
super(CompressionTest, self).__init__(test_context=test_context)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1) if quorum.for_test(test_context) == quorum.zk else None
self.kafka = KafkaService(test_context, num_nodes=1, zk=self.zk, topics={self.topic: {
"partitions": 10,
"replication-factor": 1}})
self.num_partitions = 10
self.timeout_sec = 60
self.producer_throughput = 1000
self.num_producers = len(self.COMPRESSION_TYPES)
self.messages_per_producer = 1000
self.num_consumers = 1
def setUp(self):
if self.zk:
self.zk.start()
def min_cluster_size(self):
# Override this since we're adding services outside of the constructor
return super(CompressionTest, self).min_cluster_size() + self.num_producers + self.num_consumers
@cluster(num_nodes=8)
@matrix(compression_types=[COMPRESSION_TYPES], metadata_quorum=quorum.all_non_upgrade)
def test_compressed_topic(self, compression_types, metadata_quorum=quorum.zk):
"""Test produce => consume => validate for compressed topics
Setup: 1 zk, 1 kafka node, 1 topic with partitions=10, replication-factor=1
compression_types parameter gives a list of compression types (or no compression if
"none"). Each producer in a VerifiableProducer group (num_producers = number of compression
types) will use a compression type from the list based on producer's index in the group.
- Produce messages in the background
- Consume messages in the background
- Stop producing, and finish consuming
- Validate that every acked message was consumed
"""
self.kafka.security_protocol = "PLAINTEXT"
self.kafka.interbroker_security_protocol = self.kafka.security_protocol
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka,
self.topic, throughput=self.producer_throughput,
message_validator=is_int_with_prefix,
compression_types=compression_types)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic,
consumer_timeout_ms=60000, message_validator=is_int_with_prefix)
self.kafka.start()
self.run_produce_consume_validate(lambda: wait_until(
lambda: self.producer.each_produced_at_least(self.messages_per_producer) == True,
timeout_sec=120, backoff_sec=1,
err_msg="Producer did not produce all messages in reasonable amount of time"))
| apache-2.0 |
ar7z1/ansible | test/units/modules/network/f5/test_bigip_remote_role.py | 9 | 3358 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_remote_role import ApiParameters
from library.modules.bigip_remote_role import ModuleParameters
from library.modules.bigip_remote_role import ModuleManager
from library.modules.bigip_remote_role import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_remote_role import ApiParameters
from ansible.modules.network.f5.bigip_remote_role import ModuleParameters
from ansible.modules.network.f5.bigip_remote_role import ModuleManager
from ansible.modules.network.f5.bigip_remote_role import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
terminal_access='none',
)
p = ModuleParameters(params=args)
assert p.terminal_access == 'disable'
def test_api_parameters(self):
args = load_fixture('load_auth_remote_role_role_info_1.json')
p = ApiParameters(params=args)
assert p.terminal_access == 'disable'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_remote_syslog(self, *args):
set_module_args(dict(
name='foo',
line_order=1000,
attribute_string='bar',
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
| gpl-3.0 |
not-raspberry/mirakuru | tests/executors/test_http_executor.py | 2 | 3131 | """HTTP Executor tests."""
import sys
import socket
from functools import partial
import pytest
from mirakuru import HTTPExecutor
from mirakuru import TimeoutExpired, AlreadyRunning
from mirakuru.compat import HTTPConnection, OK, http_server_cmd
from tests import test_server_path
HOST = "127.0.0.1"
PORT = 7987
http_server_cmd = '{} {}'.format(http_server_cmd, PORT)
http_slow_cmd = '{python} {srv} {host}:{port}' \
.format(python=sys.executable, srv=test_server_path, host=HOST, port=PORT)
slow_server_executor = partial(
HTTPExecutor,
http_slow_cmd,
'http://{0}:{1}/'.format(HOST, PORT),
)
def connect_to_server():
"""Common test to check if can connect to server."""
conn = HTTPConnection(HOST, PORT)
conn.request('GET', '/')
assert conn.getresponse().status is OK
conn.close()
def test_executor_starts_and_waits():
"""Test if process awaits for HEAD request to be completed."""
command = 'bash -c "sleep 3 && {0}"'.format(http_server_cmd)
executor = HTTPExecutor(
command,
'http://{0}:{1}/'.format(HOST, PORT),
timeout=20
)
executor.start()
assert executor.running() is True
connect_to_server()
executor.stop()
# check proper __str__ and __repr__ rendering:
assert 'HTTPExecutor' in repr(executor)
assert command in str(executor)
def test_shell_started_server_stops():
"""Test if executor terminates properly executor with shell=True."""
executor = HTTPExecutor(
http_server_cmd,
'http://{0}:{1}/'.format(HOST, PORT),
timeout=20,
shell=True
)
with pytest.raises(socket.error):
connect_to_server()
with executor:
assert executor.running() is True
connect_to_server()
assert executor.running() is False
with pytest.raises(socket.error):
connect_to_server()
def test_slow_server_starting():
"""
Test whether or not executor awaits for slow starting servers.
Simple example. You run Gunicorn and it is working but you have to
wait for worker processes.
"""
executor = slow_server_executor()
executor.start()
assert executor.running() is True
connect_to_server()
executor.stop()
def test_slow_server_timed_out():
"""Check if timeout properly expires."""
executor = slow_server_executor(timeout=1)
with pytest.raises(TimeoutExpired) as exc:
executor.start()
assert executor.running() is False
assert 'timed out after' in str(exc)
def test_fail_if_other_executor_running():
"""Test raising AlreadyRunning exception when port is blocked."""
executor = HTTPExecutor(
http_server_cmd, 'http://{0}:{1}/'.format(HOST, PORT),
)
executor2 = HTTPExecutor(
http_server_cmd, 'http://{0}:{1}/'.format(HOST, PORT),
)
with executor:
assert executor.running() is True
with pytest.raises(AlreadyRunning):
executor2.start()
with pytest.raises(AlreadyRunning) as exc:
with executor2:
pass
assert 'seems to be already running' in str(exc)
| lgpl-3.0 |
mxia/engine | build/android/pylib/utils/parallelizer.py | 51 | 7129 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Wrapper that allows method execution in parallel.
This class wraps a list of objects of the same type, emulates their
interface, and executes any functions called on the objects in parallel
in ReraiserThreads.
This means that, given a list of objects:
class Foo:
def __init__(self):
self.baz = Baz()
def bar(self, my_param):
// do something
list_of_foos = [Foo(1), Foo(2), Foo(3)]
we can take a sequential operation on that list of objects:
for f in list_of_foos:
f.bar('Hello')
and run it in parallel across all of the objects:
Parallelizer(list_of_foos).bar('Hello')
It can also handle (non-method) attributes of objects, so that this:
for f in list_of_foos:
f.baz.myBazMethod()
can be run in parallel with:
Parallelizer(list_of_foos).baz.myBazMethod()
Because it emulates the interface of the wrapped objects, a Parallelizer
can be passed to a method or function that takes objects of that type:
def DoesSomethingWithFoo(the_foo):
the_foo.bar('Hello')
the_foo.bar('world')
the_foo.baz.myBazMethod
DoesSomethingWithFoo(Parallelizer(list_of_foos))
Note that this class spins up a thread for each object. Using this class
to parallelize operations that are already fast will incur a net performance
penalty.
"""
# pylint: disable=protected-access
from pylib.utils import reraiser_thread
from pylib.utils import watchdog_timer
_DEFAULT_TIMEOUT = 30
_DEFAULT_RETRIES = 3
class Parallelizer(object):
"""Allows parallel execution of method calls across a group of objects."""
def __init__(self, objs):
assert (objs is not None and len(objs) > 0), (
"Passed empty list to 'Parallelizer'")
self._orig_objs = objs
self._objs = objs
def __getattr__(self, name):
"""Emulate getting the |name| attribute of |self|.
Args:
name: The name of the attribute to retrieve.
Returns:
A Parallelizer emulating the |name| attribute of |self|.
"""
self.pGet(None)
r = type(self)(self._orig_objs)
r._objs = [getattr(o, name) for o in self._objs]
return r
def __getitem__(self, index):
"""Emulate getting the value of |self| at |index|.
Returns:
A Parallelizer emulating the value of |self| at |index|.
"""
self.pGet(None)
r = type(self)(self._orig_objs)
r._objs = [o[index] for o in self._objs]
return r
def __call__(self, *args, **kwargs):
"""Emulate calling |self| with |args| and |kwargs|.
Note that this call is asynchronous. Call pFinish on the return value to
block until the call finishes.
Returns:
A Parallelizer wrapping the ReraiserThreadGroup running the call in
parallel.
Raises:
AttributeError if the wrapped objects aren't callable.
"""
self.pGet(None)
if not self._objs:
raise AttributeError('Nothing to call.')
for o in self._objs:
if not callable(o):
raise AttributeError("'%s' is not callable" % o.__name__)
r = type(self)(self._orig_objs)
r._objs = reraiser_thread.ReraiserThreadGroup(
[reraiser_thread.ReraiserThread(
o, args=args, kwargs=kwargs,
name='%s.%s' % (str(d), o.__name__))
for d, o in zip(self._orig_objs, self._objs)])
r._objs.StartAll() # pylint: disable=W0212
return r
def pFinish(self, timeout):
"""Finish any outstanding asynchronous operations.
Args:
timeout: The maximum number of seconds to wait for an individual
result to return, or None to wait forever.
Returns:
self, now emulating the return values.
"""
self._assertNoShadow('pFinish')
if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
self._objs.JoinAll()
self._objs = self._objs.GetAllReturnValues(
watchdog_timer.WatchdogTimer(timeout))
return self
def pGet(self, timeout):
"""Get the current wrapped objects.
Args:
timeout: Same as |pFinish|.
Returns:
A list of the results, in order of the provided devices.
Raises:
Any exception raised by any of the called functions.
"""
self._assertNoShadow('pGet')
self.pFinish(timeout)
return self._objs
def pMap(self, f, *args, **kwargs):
"""Map a function across the current wrapped objects in parallel.
This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
Note that this call is asynchronous. Call pFinish on the return value to
block until the call finishes.
Args:
f: The function to call.
args: The positional args to pass to f.
kwargs: The keyword args to pass to f.
Returns:
A Parallelizer wrapping the ReraiserThreadGroup running the map in
parallel.
"""
self._assertNoShadow('pMap')
r = type(self)(self._orig_objs)
r._objs = reraiser_thread.ReraiserThreadGroup(
[reraiser_thread.ReraiserThread(
f, args=tuple([o] + list(args)), kwargs=kwargs,
name='%s(%s)' % (f.__name__, d))
for d, o in zip(self._orig_objs, self._objs)])
r._objs.StartAll() # pylint: disable=W0212
return r
def _assertNoShadow(self, attr_name):
"""Ensures that |attr_name| isn't shadowing part of the wrapped obejcts.
If the wrapped objects _do_ have an |attr_name| attribute, it will be
inaccessible to clients.
Args:
attr_name: The attribute to check.
Raises:
AssertionError if the wrapped objects have an attribute named 'attr_name'
or '_assertNoShadow'.
"""
if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
assert not hasattr(self._objs, '_assertNoShadow')
assert not hasattr(self._objs, attr_name)
else:
assert not any(hasattr(o, '_assertNoShadow') for o in self._objs)
assert not any(hasattr(o, attr_name) for o in self._objs)
class SyncParallelizer(Parallelizer):
"""A Parallelizer that blocks on function calls."""
#override
def __call__(self, *args, **kwargs):
"""Emulate calling |self| with |args| and |kwargs|.
Note that this call is synchronous.
Returns:
A Parallelizer emulating the value returned from calling |self| with
|args| and |kwargs|.
Raises:
AttributeError if the wrapped objects aren't callable.
"""
r = super(SyncParallelizer, self).__call__(*args, **kwargs)
r.pFinish(None)
return r
#override
def pMap(self, f, *args, **kwargs):
"""Map a function across the current wrapped objects in parallel.
This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
Note that this call is synchronous.
Args:
f: The function to call.
args: The positional args to pass to f.
kwargs: The keyword args to pass to f.
Returns:
A Parallelizer wrapping the ReraiserThreadGroup running the map in
parallel.
"""
r = super(SyncParallelizer, self).pMap(f, *args, **kwargs)
r.pFinish(None)
return r
| bsd-3-clause |
kenglishhi/gae-django-sandbox | django/contrib/gis/geos/prototypes/topology.py | 311 | 2226 | """
This module houses the GEOS ctypes prototype functions for the
topological operations on geometries.
"""
__all__ = ['geos_boundary', 'geos_buffer', 'geos_centroid', 'geos_convexhull',
'geos_difference', 'geos_envelope', 'geos_intersection',
'geos_linemerge', 'geos_pointonsurface', 'geos_preservesimplify',
'geos_simplify', 'geos_symdifference', 'geos_union', 'geos_relate']
from ctypes import c_char_p, c_double, c_int
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.prototypes.errcheck import check_geom, check_string
from django.contrib.gis.geos.prototypes.geom import geos_char_p
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
def topology(func, *args):
"For GEOS unary topology functions."
argtypes = [GEOM_PTR]
if args: argtypes += args
func.argtypes = argtypes
func.restype = GEOM_PTR
func.errcheck = check_geom
return func
### Topology Routines ###
geos_boundary = topology(GEOSFunc('GEOSBoundary'))
geos_buffer = topology(GEOSFunc('GEOSBuffer'), c_double, c_int)
geos_centroid = topology(GEOSFunc('GEOSGetCentroid'))
geos_convexhull = topology(GEOSFunc('GEOSConvexHull'))
geos_difference = topology(GEOSFunc('GEOSDifference'), GEOM_PTR)
geos_envelope = topology(GEOSFunc('GEOSEnvelope'))
geos_intersection = topology(GEOSFunc('GEOSIntersection'), GEOM_PTR)
geos_linemerge = topology(GEOSFunc('GEOSLineMerge'))
geos_pointonsurface = topology(GEOSFunc('GEOSPointOnSurface'))
geos_preservesimplify = topology(GEOSFunc('GEOSTopologyPreserveSimplify'), c_double)
geos_simplify = topology(GEOSFunc('GEOSSimplify'), c_double)
geos_symdifference = topology(GEOSFunc('GEOSSymDifference'), GEOM_PTR)
geos_union = topology(GEOSFunc('GEOSUnion'), GEOM_PTR)
# GEOSRelate returns a string, not a geometry.
geos_relate = GEOSFunc('GEOSRelate')
geos_relate.argtypes = [GEOM_PTR, GEOM_PTR]
geos_relate.restype = geos_char_p
geos_relate.errcheck = check_string
# Routines only in GEOS 3.1+
if GEOS_PREPARE:
geos_cascaded_union = GEOSFunc('GEOSUnionCascaded')
geos_cascaded_union.argtypes = [GEOM_PTR]
geos_cascaded_union.restype = GEOM_PTR
__all__.append('geos_cascaded_union')
| apache-2.0 |
roadmapper/ansible | lib/ansible/plugins/action/net_system.py | 648 | 1057 | # (c) 2017, Ansible Inc,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action.net_base import ActionModule as _ActionModule
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
return result
| gpl-3.0 |
SpaceKatt/CSPLN | apps/scaffolding/mac/web2py/web2py.app/Contents/Resources/gluon/tests/test_storage.py | 10 | 4537 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit tests for storage.py """
import unittest
from fix_path import fix_sys_path
fix_sys_path(__file__)
from storage import Storage, StorageList, List
from http import HTTP
import pickle
class TestStorage(unittest.TestCase):
""" Tests storage.Storage """
def test_attribute(self):
""" Tests Storage attribute handling """
s = Storage(a=1)
self.assertEqual(s.a, 1)
self.assertEqual(s['a'], 1)
self.assertEqual(s.b, None)
s.b = 2
self.assertEqual(s.a, 1)
self.assertEqual(s['a'], 1)
self.assertEqual(s.b, 2)
self.assertEqual(s['b'], 2)
s['c'] = 3
self.assertEqual(s.c, 3)
self.assertEqual(s['c'], 3)
s.d = list()
self.assertTrue(s.d is s['d'])
def test_store_none(self):
""" Test Storage store-None handling
s.key = None deletes an item
s['key'] = None sets the item to None
"""
s = Storage(a=1)
self.assertTrue('a' in s)
self.assertFalse('b' in s)
s.a = None
# self.assertFalse('a' in s) # how about this?
s.a = 1
self.assertTrue('a' in s)
s['a'] = None
self.assertTrue('a' in s)
self.assertTrue(s.a is None)
def test_item(self):
""" Tests Storage item handling """
s = Storage()
self.assertEqual(s.d, None)
self.assertEqual(s['d'], None)
#self.assertRaises(KeyError, lambda x: s[x], 'd') # old Storage
s.a = 1
s['a'] = None
self.assertEquals(s.a, None)
self.assertEquals(s['a'], None)
self.assertTrue('a' in s)
def test_pickling(self):
""" Test storage pickling """
s = Storage(a=1)
sd = pickle.dumps(s, pickle.HIGHEST_PROTOCOL)
news = pickle.loads(sd)
self.assertEqual(news.a, 1)
def test_getlist(self):
# usually used with request.vars
a = Storage()
a.x = 'abc'
a.y = ['abc', 'def']
self.assertEqual(a.getlist('x'), ['abc'])
self.assertEqual(a.getlist('y'), ['abc', 'def'])
self.assertEqual(a.getlist('z'), [])
def test_getfirst(self):
# usually with request.vars
a = Storage()
a.x = 'abc'
a.y = ['abc', 'def']
self.assertEqual(a.getfirst('x'), 'abc')
self.assertEqual(a.getfirst('y'), 'abc')
self.assertEqual(a.getfirst('z'), None)
def test_getlast(self):
# usually with request.vars
a = Storage()
a.x = 'abc'
a.y = ['abc', 'def']
self.assertEqual(a.getlast('x'), 'abc')
self.assertEqual(a.getlast('y'), 'def')
self.assertEqual(a.getlast('z'), None)
class TestStorageList(unittest.TestCase):
""" Tests storage.StorageList """
def test_attribute(self):
s = StorageList(a=1)
self.assertEqual(s.a, 1)
self.assertEqual(s['a'], 1)
self.assertEqual(s.b, [])
s.b.append(1)
self.assertEqual(s.b, [1])
class TestList(unittest.TestCase):
""" Tests Storage.List (fast-check for request.args()) """
def test_listcall(self):
a = List((1, 2, 3))
self.assertEqual(a(1), 2)
self.assertEqual(a(-1), 3)
self.assertEqual(a(-5), None)
self.assertEqual(a(-5, default='x'), 'x')
self.assertEqual(a(-3, cast=str), '1')
a.append('1234')
self.assertEqual(a(3), '1234')
self.assertEqual(a(3, cast=int), 1234)
a.append('x')
self.assertRaises(HTTP, a, 4, cast=int)
b = List()
# default is always returned when especified
self.assertEqual(b(0, cast=int, default=None), None)
self.assertEqual(b(0, cast=int, default=None, otherwise='teste'), None)
self.assertEqual(b(0, cast=int, default='a', otherwise='teste'), 'a')
# if don't have value and otherwise is especified it will called
self.assertEqual(b(0, otherwise=lambda: 'something'), 'something')
self.assertEqual(b(0, cast=int, otherwise=lambda: 'something'),
'something')
# except if default is especified
self.assertEqual(b(0, default=0, otherwise=lambda: 'something'), 0)
def test_listgetitem(self):
'''Mantains list behaviour.'''
a = List((1, 2, 3))
self.assertEqual(a[0], 1)
self.assertEqual(a[::-1], [3, 2, 1])
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
kyoshino/bedrock | bedrock/firefox/templatetags/helpers.py | 2 | 9753 | from django.conf import settings
import jinja2
from django.template.loader import render_to_string
from django_jinja import library
from bedrock.firefox.firefox_details import firefox_desktop, firefox_android, firefox_ios
from bedrock.base.urlresolvers import reverse
from lib.l10n_utils import get_locale
def desktop_builds(channel, builds=None, locale=None, force_direct=False,
force_full_installer=False, force_funnelcake=False,
funnelcake_id=False, locale_in_transition=False, classified=False):
builds = builds or []
l_version = firefox_desktop.latest_builds(locale, channel)
# Developer Edition is now based on the Beta channel, so the build list
# should be generated from the Beta locales.
if channel == 'alpha':
l_version = firefox_desktop.latest_builds(locale, 'beta')
if l_version:
version, platforms = l_version
else:
locale = 'en-US'
version, platforms = firefox_desktop.latest_builds('en-US', channel)
for plat_os, plat_os_pretty in firefox_desktop.platforms(channel, classified):
os_pretty = plat_os_pretty
# Firefox Nightly: The Windows stub installer is now universal,
# automatically detecting a 32-bit and 64-bit desktop, so the
# win64-specific entry can be skipped.
if channel == 'nightly':
if plat_os == 'win':
continue
if plat_os == 'win64':
plat_os = 'win'
os_pretty = 'Windows 32/64-bit'
# And generate all the info
download_link = firefox_desktop.get_download_url(
channel, version, plat_os, locale,
force_direct=force_direct,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
locale_in_transition=locale_in_transition,
)
# If download_link_direct is False the data-direct-link attr
# will not be output, and the JS won't attempt the IE popup.
if force_direct:
# no need to run get_download_url again with the same args
download_link_direct = False
else:
download_link_direct = firefox_desktop.get_download_url(
channel, version, plat_os, locale,
force_direct=True,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
if download_link_direct == download_link:
download_link_direct = False
builds.append({'os': plat_os,
'os_pretty': os_pretty,
'download_link': download_link,
'download_link_direct': download_link_direct})
return builds
def android_builds(channel, builds=None):
builds = builds or []
link = firefox_android.get_download_url(channel.lower())
builds.append({'os': 'android',
'os_pretty': 'Android',
'download_link': link})
return builds
def ios_builds(channel, builds=None):
builds = builds or []
link = firefox_ios.get_download_url(channel)
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': link})
return builds
@library.global_function
@jinja2.contextfunction
def download_firefox(ctx, channel='release', platform='all',
dom_id=None, locale=None, force_direct=False,
force_full_installer=False, force_funnelcake=False,
alt_copy=None, button_color='button-green',
locale_in_transition=False, download_location=None):
""" Output a "download firefox" button.
:param ctx: context from calling template.
:param channel: name of channel: 'release', 'beta', 'alpha', or 'nightly'.
:param platform: Target platform: 'desktop', 'android', 'ios', or 'all'.
:param dom_id: Use this string as the id attr on the element.
:param locale: The locale of the download. Default to locale of request.
:param force_direct: Force the download URL to be direct.
:param force_full_installer: Force the installer download to not be
the stub installer (for aurora).
:param force_funnelcake: Force the download version for en-US Windows to be
'latest', which bouncer will translate to the funnelcake build.
:param alt_copy: Specifies alternate copy to use for download buttons.
:param button_color: Color of download button. Default to 'button-green'.
:param locale_in_transition: Include the page locale in transitional download link.
:param download_location: Specify the location of download button for
GA reporting: 'primary cta', 'nav', 'sub nav', or 'other'.
"""
show_desktop = platform in ['all', 'desktop']
show_android = platform in ['all', 'android']
show_ios = platform in ['all', 'ios']
alt_channel = '' if channel == 'release' else channel
locale = locale or get_locale(ctx['request'])
funnelcake_id = ctx.get('funnelcake_id', False)
dom_id = dom_id or 'download-button-%s-%s' % (
'desktop' if platform == 'all' else platform, channel)
# Gather data about the build for each platform
builds = []
if show_desktop:
version = firefox_desktop.latest_version(channel)
builds = desktop_builds(channel, builds, locale, force_direct,
force_full_installer, force_funnelcake,
funnelcake_id, locale_in_transition)
if show_android:
version = firefox_android.latest_version(channel)
builds = android_builds(channel, builds)
if show_ios:
version = firefox_ios.latest_version(channel)
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': firefox_ios.get_download_url()})
# Get the native name for current locale
langs = firefox_desktop.languages
locale_name = langs[locale]['native'] if locale in langs else locale
data = {
'locale_name': locale_name,
'version': version,
'product': 'firefox-%s' % platform,
'builds': builds,
'id': dom_id,
'channel': alt_channel,
'show_desktop': show_desktop,
'show_android': show_android,
'show_ios': show_ios,
'alt_copy': alt_copy,
'button_color': button_color,
'download_location': download_location
}
html = render_to_string('firefox/includes/download-button.html', data,
request=ctx['request'])
return jinja2.Markup(html)
@library.global_function
@jinja2.contextfunction
def download_firefox_desktop_list(ctx, channel='release', dom_id=None, locale=None,
force_full_installer=False):
"""
Return a HTML list of platform download links for Firefox desktop
:param channel: name of channel: 'release', 'beta', 'alpha' or 'nightly'.
:param dom_id: Use this string as the id attr on the element.
:param locale: The locale of the download. Default to locale of request.
:param force_full_installer: Force the installer download to not be
the stub installer (for aurora).
"""
dom_id = dom_id or 'download-platform-list-%s' % (channel)
locale = locale or get_locale(ctx['request'])
# Make sure funnelcake_id is not passed as builds are often Windows only.
builds = desktop_builds(channel, None, locale, True, force_full_installer,
False, False, False, True)
recommended_builds = []
traditional_builds = []
for plat in builds:
# Add 32-bit label for Windows and Linux builds.
if channel != 'nightly':
if plat['os'] == 'win':
plat['os_pretty'] = 'Windows 32-bit'
if plat['os'] == 'linux':
plat['os_pretty'] = 'Linux 32-bit'
if (plat['os'] in firefox_desktop.platform_classification['recommended'] or
channel == 'nightly' and plat['os'] == 'win'):
recommended_builds.append(plat)
else:
traditional_builds.append(plat)
data = {
'id': dom_id,
'builds': {
'recommended': recommended_builds,
'traditional': traditional_builds,
},
}
html = render_to_string('firefox/includes/download-list.html', data,
request=ctx['request'])
return jinja2.Markup(html)
@library.global_function
def firefox_url(platform, page, channel=None):
"""
Return a product-related URL like /firefox/all/ or /mobile/beta/notes/.
Examples
========
In Template
-----------
{{ firefox_url('desktop', 'all', 'organizations') }}
{{ firefox_url('desktop', 'sysreq', channel) }}
{{ firefox_url('android', 'notes') }}
"""
kwargs = {}
# Tweak the channel name for the naming URL pattern in urls.py
if channel == 'release':
channel = None
if channel == 'alpha':
if platform == 'desktop':
channel = 'developer'
if platform == 'android':
channel = 'aurora'
if channel == 'esr':
channel = 'organizations'
if channel:
kwargs['channel'] = channel
if platform != 'desktop':
kwargs['platform'] = platform
# Firefox for Android and iOS have the system requirements page on SUMO
if platform in ['android', 'ios'] and page == 'sysreq':
return settings.FIREFOX_MOBILE_SYSREQ_URL
return reverse('firefox.%s' % page, kwargs=kwargs)
| mpl-2.0 |
Markus-Goetz/CDS-Invenio-Authorlist | modules/bibedit/lib/bibedit_templates.py | 3 | 19977 | ## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0103
"""BibEdit Templates."""
__revision__ = "$Id$"
from invenio.config import CFG_SITE_URL
from invenio.messages import gettext_set_language
class Template:
"""BibEdit Templates Class."""
def __init__(self):
"""Initialize."""
pass
def menu(self):
"""Create the menu."""
recordmenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sRecord\n' \
' %(imgNewRecord)s\n' \
' %(imgCloneRecord)s\n' \
' %(imgTemplateRecord)s\n' \
' </div>\n' \
' <table>\n' \
' <col width="28px">\n' \
' <col width="40px">\n' \
' <col width="40px">\n' \
' <col width="28px">\n' \
' <tr>\n' \
' <td colspan="2">\n' \
' <form onsubmit="return false;">\n' \
' %(txtSearchPattern)s\n' \
' </form>\n' \
' <td colspan="2">%(sctSearchType)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td colspan="4">%(btnSearch)s</td>\n' \
' </tr>\n' \
' <tr id="rowRecordBrowser" style="display: none">\n' \
' <td>%(btnPrev)s</td>\n' \
' <td colspan="2" id="cellRecordNo"\n' \
' style="text-align: center">1/1</td>\n' \
' <td>%(btnNext)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td colspan="2">%(btnSubmit)s</td>\n' \
' <td colspan="2">%(btnCancel)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td id="tickets" colspan="4"><!--filled by bibedit_menu.js--></td>\n' \
' </tr>\n' \
' <tr class="bibEditMenuMore">\n' \
' <td>%(imgDeleteRecord)s</td>\n' \
' <td colspan="3">%(btnDeleteRecord)s</td>\n' \
' </tr>\n' \
' <tr class="bibEditmenuMore">\n' \
' <td>Switch to:</td>\n' \
' <td colspan="3">%(btnSwitchReadOnly)s</td>\n' \
' </tr>' \
' </table>' % {
'imgCompressMenuSection': img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection', id='ImgRecordMenu'),
'imgNewRecord': img('/img/table.png', 'bibEditImgCtrlEnabled',
id='imgNewRecord', title='New record'), \
'imgCloneRecord': img('/img/table_multiple.png',
'bibEditImgCtrlDisabled', id='imgCloneRecord',
title='Clone record'), \
'imgTemplateRecord': img('/img/page_edit.png',
'bibEditImgCtrlEnabled', id='imgTemplateRecord',
title='Manage templates'), \
'txtSearchPattern': inp('text', id='txtSearchPattern'), \
'sctSearchType': '<select id="sctSearchType">\n' \
' <option value="recID">Rec ID</option>\n' \
' <option value="reportnumber">Rep No</option>\n' \
' <option value="anywhere">Anywhere</option>\n' \
' </select>',
'btnSearch': button('button', 'Search', 'bibEditBtnBold',
id='btnSearch'),
'btnPrev': button('button', '<', id='btnPrev', disabled='disabled'),
'btnNext': button('button', '>', id='btnNext', disabled='disabled'),
'btnSubmit': button('button', 'Submit', 'bibEditBtnBold',
id='btnSubmit', disabled='disabled'),
'btnCancel': button('button', 'Cancel', id='btnCancel',
disabled='disabled'),
'imgDeleteRecord': img('/img/table_delete.png'),
'btnDeleteRecord': button('button', 'Delete',
id='btnDeleteRecord', disabled='disabled'),
'btnSwitchReadOnly' : button('button', 'Read-only',
id='btnSwitchReadOnly')
}
fieldmenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sFields\n' \
' </div>\n' \
' <table class="bibEditMenuMore">\n' \
' <col width="28px">\n' \
' <col>\n' \
' <tr>\n' \
' <td>%(imgAddField)s</td>\n' \
' <td>%(btnAddField)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td>%(imgDeleteSelected)s</td>\n' \
' <td>%(btnDeleteSelected)s</td>\n' \
' </tr>\n' \
' </table>' % {
'imgCompressMenuSection': img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection', id='ImgFieldMenu'),
'imgAddField': img('/img/table_row_insert.png'),
'btnAddField': button('button', 'Add', id='btnAddField',
disabled='disabled'),
'imgDeleteSelected': img('/img/table_row_delete.png'),
'btnDeleteSelected': button('button', 'Delete selected',
id='btnDeleteSelected', disabled='disabled')}
viewmenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sView\n' \
' </div>\n' \
' <table>\n' \
' <col width="68px">\n' \
' <col width="68px">\n' \
' <tr class="bibEditMenuMore">\n' \
' <td>%(btnTagMARC)s</td>\n' \
' <td>%(btnTagNames)s</td>\n' \
' </tr>\n' \
' </table>' % {
'imgCompressMenuSection': img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection', id='ImgViewMenu'),
'btnTagMARC': button('button', 'MARC', id='btnMARCTags',
disabled='disabled'),
'btnTagNames': button('button', 'Human', id='btnHumanTags',
disabled='disabled')
}
historymenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sHistory\n' \
' </div>\n' \
' <div class="bibEditRevHistoryMenuSection">\n' \
' <table>\n' \
' <col width="136px">\n' \
' <tr class="bibEditMenuMore">\n' \
' <td id="bibEditRevisionsHistory"></td>'\
' </tr>\n' \
' </table>\n' \
' </div>\n'% {
'imgCompressMenuSection': img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection', id='ImgHistoryMenu')
}
undoredosection = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sUndo/Redo\n' \
' </div>\n<table>' \
' <tr class="bibEditMenuMore"><td>' \
' <div class="bibEditURMenuSection">\n' \
' <div class="bibEditURDetailsSection" id="bibEditURUndoListLayer">\n' \
' <div class="bibEditURButtonLayer"><button id="btnUndo"><</button></div>\n' \
' <div id="undoOperationVisualisationField" class="bibEditHiddenElement bibEditURPreviewBox">\n' \
' <div id="undoOperationVisualisationFieldContent"></div>\n' \
' </div>\n' \
' </div>' \
' <div class="bibEditURDetailsSection" id="bibEditURRedoListLayer">\n' \
' <div class="bibEditURButtonLayer"><button id="btnRedo">></button></div>' \
' <div id="redoOperationVisualisationField" class="bibEditHiddenElement bibEditURPreviewBox">\n' \
' <div id="redoOperationVisualisationFieldContent"></div>' \
' </div>\n' \
' </div>\n' \
' </div></td></tr></table>\n' % { \
'imgCompressMenuSection': img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection', id='ImgUndoRedoMenu') }
statusarea = '<table>\n' \
' <tr>\n' \
' <td id="cellIndicator">%(imgIndicator)s</td>\n' \
' <td id="cellStatus">%(lblChecking)s</td>\n' \
' </table>' % {
'imgIndicator': img('/img/indicator.gif'),
'lblChecking': 'Checking status' + '...'
}
holdingpenpanel = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sHolding Pen\n' \
'<table class="bibEditMenuMore">\n<tr><td>' \
' <div id="bibEditHoldingPenToolbar"> ' \
' <div id="bibeditHPChanges"></div>' \
' </div> </td></tr></table>' \
' </div>\n' % \
{ 'imgCompressMenuSection': img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection', id='ImgHoldingPenMenu') }
bibcirculationpanel = \
' <div class="bibEditMenuSection" ' \
' id="bibEditBibCircConnection">\n' \
'<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sPhysical Copies\n' \
' <table class="bibEditMenuMore">\n<tr><td ' \
' class="bibEditBibCircPanel">' \
' Number of copies: ' \
' <div id="bibEditBibCirculationCopies">0</div><br/>' \
' <button id="bibEditBibCirculationBtn">' \
'Edit physical copies</button>' \
' </td></tr></table></div></div>' \
% {
'imgCompressMenuSection': img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection', id='ImgBibCirculationMenu')
}
lnkSpecialChar = link('Special symbols', href='#', id='lnkSpecSymbols')
lnkhelp = img('/img/help.png', '', style='vertical-align: bottom') + \
link('Help', href='#', onclick='window.open(' \
'\'%s/help/admin/bibedit-admin-guide#2\', \'\', \'width=640,' \
'height=600,left=150,top=150,resizable=yes,scrollbars=yes\');' \
'return false;' % CFG_SITE_URL)
return ' %(page_style)s\n' \
' <div id="bibEditMenu">\n' \
' <div class="bibEditMenuSection">\n' \
' %(recordmenu)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(fieldmenu)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(viewmenu)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(holdingpenpanel)s\n'\
' </div>'\
' <div class="bibEditMenuSection">\n' \
' %(undoredosection)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(historymenu)s\n' \
' </div>\n' \
' %(circulationmenu)s\n' \
' <div id="bibEditMenuSection">\n' \
' %(statusarea)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection" align="right">\n' \
' %(lnkSpecialChar)s %(lnkhelp)s\n' \
' </div>\n' \
' </div>\n' % {
'page_style': page_style(),
'recordmenu': recordmenu,
'viewmenu': viewmenu,
'fieldmenu': fieldmenu,
'statusarea': statusarea,
'lnkhelp': lnkhelp,
'lnkSpecialChar': lnkSpecialChar,
'holdingpenpanel': holdingpenpanel,
'historymenu': historymenu,
'undoredosection': undoredosection,
'circulationmenu': bibcirculationpanel
}
def history_comparebox(self, ln, revdate, revdate_cmp, comparison):
""" Display the bibedit history comparison box. """
_ = gettext_set_language(ln)
title = '<b>%(comp)s</b><br />%(rev)s %(revdate)s<br />%(rev)s %(revdate_cmp)s' % {
'comp': _('Comparison of:'),
'rev': _('Revision'),
'revdate': revdate,
'revdate_cmp': revdate_cmp}
return '''
<div class="bibEditHistCompare">
<p>%s</p>
<p>
%s
</p>
</div>''' % (title, comparison)
def clean_value(self, value, format):
""" This function clean value for HTML interface and inverse. """
if format != "html":
value = value.replace('"', '"')
value = value.replace('<', '<')
value = value.replace('>', '>')
else:
value = value.replace('"', '"')
value = value.replace('<', '<')
value = value.replace('>', '>')
return value
def img(src, _class='', **kargs):
"""Create an HTML <img> element."""
src = 'src="%s" ' % src
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<img %s%s%s/>' % (src, _class, args)
def inp(_type, _class='', **kargs):
"""Create an HTML <input> element."""
_type = 'type="%s" ' % _type
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<input %s%s%s/>' % (_type, _class, args)
def button(_type, value, _class='', **kargs):
"""Create an HTML <button> element."""
_type = 'type="%s" ' % _type
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<button %s%s%s>%s</button>' % (_type, _class, args, value)
def link(value, _class='', **kargs):
"""Create an HTML <a> (link) element."""
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<a %s%s>%s</a>' % (_class, args, value)
def page_style():
"""Apply styling for bibedit elements"""
style = """<style type="text/css">"""
style += """
.pagefooter {
position: fixed;
bottom: 0px;
height: 0px;
margin-top: 0px;
}
.pagebodystripemiddle {
position:absolute;
margin: 0px;
padding: 0px;
height: 100%;
}
.headline_div {
position: absolute;
background: #ffffff;
padding-left : 175px;
width : 820px;
height: 45px;
text-indent: -15px;
}
#bibEditContent {
position: absolute;
margin-top: 45px;
margin-left: 175px;
overflow: auto;
width: 820px;
height: 80%;
z-index: -1;
}
#bibEditMenu {
width: 135px;
position: absolute;
top: 0px;
padding-left: 10px;
font-size: 0.8em;
left: 0px;
}
#bibEditMenu .bibEditMenuSection {
margin-bottom: 10px;
}
#bibEditMenu .bibEditMenuSectionHeader {
font-weight: bold;
}
#bibEditTable {
background-color: rgb(255, 255, 255);
border: 1px solid #A1A1A1;
border-collapse: collapse;
width: 100%;
table-layout:fixed;
}
#bibEditTable td {
max-width: 500px;
overflow:hidden;
word-wrap: break-word;
}
#bibEditTable td textarea{
max-width: 675px;
min-height: 65px;
max-height: 600px;
}
#bibEditMenu .bibEditMenuSection table {
width: 100%;
}
#bibEditMenu form {
margin: 0px;
}
#bibEditMenu .bibEditImgExpandMenuSection,
#bibEditMenu .bibEditImgCompressMenuSection {
margin: 0px;
text-align: left;
vertical-align: bottom;
}
#bibEditMenu a, #bibEditMenu button, #bibEditMenu input,
#bibEditMenu select {
font-size: 0.8em;
}
#bibEditMenu a, #bibEditMenu button, #bibEditMenu img,
#bibEditMenu input, #bibEditMenu select {
margin: 1px;
}
#bibEditMenu button, #bibEditMenu input, #bibEditMenu select {
width: 100%;
}
#bibEditMenu .bibEditImgCtrlEnabled {
cursor: pointer;
opacity: 1.0;
vertical-align: bottom;
}
#bibEditMenu .bibEditImgCtrlDisabled {
cursor: default;
opacity: 0.4;
vertical-align: bottom;
}
.revisionLine {
text-align: right;
padding-left: 125px;
}
.navtrailboxbody {
width: 700px;
}
.headline {
display: none;
}
#topToolbarRight {
position: absolute;
padding-left: 790px;
height: 48px;
}
#top_toolbar_hr {
padding-top: 37px;
}
"""
style += "</style>"
return style
| gpl-2.0 |
jblackburne/scikit-learn | examples/decomposition/plot_pca_vs_fa_model_selection.py | 70 | 4523 | """
===============================================================
Model selection with Probabilistic PCA and Factor Analysis (FA)
===============================================================
Probabilistic PCA and Factor Analysis are probabilistic models.
The consequence is that the likelihood of new data can be used
for model selection and covariance estimation.
Here we compare PCA and FA with cross-validation on low rank data corrupted
with homoscedastic noise (noise variance
is the same for each feature) or heteroscedastic noise (noise variance
is the different for each feature). In a second step we compare the model
likelihood to the likelihoods obtained from shrinkage covariance estimators.
One can observe that with homoscedastic noise both FA and PCA succeed
in recovering the size of the low rank subspace. The likelihood with PCA
is higher than FA in this case. However PCA fails and overestimates
the rank when heteroscedastic noise is present. Under appropriate
circumstances the low rank models are more likely than shrinkage models.
The automatic estimation from
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604
by Thomas P. Minka is also compared.
"""
# Authors: Alexandre Gramfort
# Denis A. Engemann
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from scipy import linalg
from sklearn.decomposition import PCA, FactorAnalysis
from sklearn.covariance import ShrunkCovariance, LedoitWolf
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import GridSearchCV
print(__doc__)
###############################################################################
# Create the data
n_samples, n_features, rank = 1000, 50, 10
sigma = 1.
rng = np.random.RandomState(42)
U, _, _ = linalg.svd(rng.randn(n_features, n_features))
X = np.dot(rng.randn(n_samples, rank), U[:, :rank].T)
# Adding homoscedastic noise
X_homo = X + sigma * rng.randn(n_samples, n_features)
# Adding heteroscedastic noise
sigmas = sigma * rng.rand(n_features) + sigma / 2.
X_hetero = X + rng.randn(n_samples, n_features) * sigmas
###############################################################################
# Fit the models
n_components = np.arange(0, n_features, 5) # options for n_components
def compute_scores(X):
pca = PCA(svd_solver='full')
fa = FactorAnalysis()
pca_scores, fa_scores = [], []
for n in n_components:
pca.n_components = n
fa.n_components = n
pca_scores.append(np.mean(cross_val_score(pca, X)))
fa_scores.append(np.mean(cross_val_score(fa, X)))
return pca_scores, fa_scores
def shrunk_cov_score(X):
shrinkages = np.logspace(-2, 0, 30)
cv = GridSearchCV(ShrunkCovariance(), {'shrinkage': shrinkages})
return np.mean(cross_val_score(cv.fit(X).best_estimator_, X))
def lw_score(X):
return np.mean(cross_val_score(LedoitWolf(), X))
for X, title in [(X_homo, 'Homoscedastic Noise'),
(X_hetero, 'Heteroscedastic Noise')]:
pca_scores, fa_scores = compute_scores(X)
n_components_pca = n_components[np.argmax(pca_scores)]
n_components_fa = n_components[np.argmax(fa_scores)]
pca = PCA(svd_solver='full', n_components='mle')
pca.fit(X)
n_components_pca_mle = pca.n_components_
print("best n_components by PCA CV = %d" % n_components_pca)
print("best n_components by FactorAnalysis CV = %d" % n_components_fa)
print("best n_components by PCA MLE = %d" % n_components_pca_mle)
plt.figure()
plt.plot(n_components, pca_scores, 'b', label='PCA scores')
plt.plot(n_components, fa_scores, 'r', label='FA scores')
plt.axvline(rank, color='g', label='TRUTH: %d' % rank, linestyle='-')
plt.axvline(n_components_pca, color='b',
label='PCA CV: %d' % n_components_pca, linestyle='--')
plt.axvline(n_components_fa, color='r',
label='FactorAnalysis CV: %d' % n_components_fa,
linestyle='--')
plt.axvline(n_components_pca_mle, color='k',
label='PCA MLE: %d' % n_components_pca_mle, linestyle='--')
# compare with other covariance estimators
plt.axhline(shrunk_cov_score(X), color='violet',
label='Shrunk Covariance MLE', linestyle='-.')
plt.axhline(lw_score(X), color='orange',
label='LedoitWolf MLE' % n_components_pca_mle, linestyle='-.')
plt.xlabel('nb of components')
plt.ylabel('CV scores')
plt.legend(loc='lower right')
plt.title(title)
plt.show()
| bsd-3-clause |
yuchangfu/pythonfun | packages/werkzeug/testsuite/multipart/collect.py | 248 | 1584 | #!/usr/bin/env python
"""
Hacky helper application to collect form data.
"""
from werkzeug.serving import run_simple
from werkzeug.wrappers import Request, Response
def copy_stream(request):
from os import mkdir
from time import time
folder = 'request-%d' % time()
mkdir(folder)
environ = request.environ
f = open(folder + '/request.txt', 'wb+')
f.write(environ['wsgi.input'].read(int(environ['CONTENT_LENGTH'])))
f.flush()
f.seek(0)
environ['wsgi.input'] = f
request.stat_folder = folder
def stats(request):
copy_stream(request)
f1 = request.files['file1']
f2 = request.files['file2']
text = request.form['text']
f1.save(request.stat_folder + '/file1.bin')
f2.save(request.stat_folder + '/file2.bin')
open(request.stat_folder + '/text.txt', 'w').write(text.encode('utf-8'))
return Response('Done.')
def upload_file(request):
return Response('''
<h1>Upload File</h1>
<form action="" method="post" enctype="multipart/form-data">
<input type="file" name="file1"><br>
<input type="file" name="file2"><br>
<textarea name="text"></textarea><br>
<input type="submit" value="Send">
</form>
''', mimetype='text/html')
def application(environ, start_responseonse):
request = Request(environ)
if request.method == 'POST':
response = stats(request)
else:
response = upload_file(request)
return response(environ, start_responseonse)
if __name__ == '__main__':
run_simple('localhost', 5000, application, use_debugger=True)
| gpl-3.0 |
szecsi/Gears | GearsPy/Project/Components/Warp/Clamp.py | 1 | 1310 | import Gears as gears
from .. import *
from .Base import *
class Clamp(Base) :
def applyWithArgs(
self,
spass,
functionName,
*,
minima : 'Horizontal and vertical minima [(um,um)].'
= ('field', 'field'),
maxima : 'Horizontal and vertical maxima [(um,um)].'
= ('field', 'field')
) :
sequence = spass.getSequence()
hmin = minima[0]
vmin = minima[1]
if hmin == 'field' :
hmin = -sequence.field_width_um * 0.5
if vmin == 'field' :
vmin = - sequence.field_height_um * 0.5
minima = (hmin, vmin)
hmax = maxima[0]
vmax = maxima[1]
if hmax == 'field' :
hmax = sequence.field_width_um * 0.5
if vmax == 'field' :
vmax = sequence.field_height_um * 0.5
maxima = (hmax, vmax)
spass.setShaderVector( name = functionName+'_minima', x= minima[0], y = minima[1] )
spass.setShaderVector( name = functionName+'_maxima', x= maxima[0], y = maxima[1] )
spass.setShaderFunction( name = functionName, src = self.glslEsc( '''
vec2 @<X>@(vec2 x){return clamp(x, `minima, `maxima);}
''').format( X=functionName ) )
| gpl-2.0 |
garwynn/L900_LJC_Kernel | tools/perf/python/twatch.py | 3213 | 1338 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, sample_period = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
MattsFleaMarket/python-for-android | python-modules/twisted/twisted/runner/procmontap.py | 49 | 2325 | # -*- test-case-name: twisted.runner.test.test_procmontap -*-
# Copyright (c) 2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Support for creating a service which runs a process monitor.
"""
from twisted.python import usage
from twisted.runner.procmon import ProcessMonitor
class Options(usage.Options):
"""
Define the options accepted by the I{twistd procmon} plugin.
"""
synopsis = "[procmon options] commandline"
optParameters = [["threshold", "t", 1, "How long a process has to live "
"before the death is considered instant, in seconds.",
float],
["killtime", "k", 5, "How long a process being killed "
"has to get its affairs in order before it gets killed "
"with an unmaskable signal.",
float],
["minrestartdelay", "m", 1, "The minimum time (in "
"seconds) to wait before attempting to restart a "
"process", float],
["maxrestartdelay", "M", 3600, "The maximum time (in "
"seconds) to wait before attempting to restart a "
"process", float]]
optFlags = []
zsh_actions = {}
longdesc = """\
procmon runs processes, monitors their progress, and restarts them when they
die.
procmon will not attempt to restart a process that appears to die instantly;
with each "instant" death (less than 1 second, by default), it will delay
approximately twice as long before restarting it. A successful run will reset
the counter.
Eg twistd procmon sleep 10"""
def parseArgs(self, *args):
"""
Grab the command line that is going to be started and monitored
"""
self['args'] = args
def postOptions(self):
"""
Check for dependencies.
"""
if len(self["args"]) < 1:
raise usage.UsageError("Please specify a process commandline")
def makeService(config):
s = ProcessMonitor()
s.threshold = config["threshold"]
s.killTime = config["killtime"]
s.minRestartDelay = config["minrestartdelay"]
s.maxRestartDelay = config["maxrestartdelay"]
s.addProcess(" ".join(config["args"]), config["args"])
return s
| apache-2.0 |
huanpc/IoT-1 | gui/controller/.venv/lib/python3.5/site-packages/django/db/models/fields/reverse_related.py | 106 | 11166 | """
"Rel objects" for related fields.
"Rel objects" (for lack of a better name) carry information about the relation
modeled by a related field and provide some utility functions. They're stored
in the ``remote_field`` attribute of the field.
They also act as reverse fields for the purposes of the Meta API because
they're the closest concept currently available.
"""
from __future__ import unicode_literals
import warnings
from django.core import exceptions
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import smart_text
from django.utils.functional import cached_property
from . import BLANK_CHOICE_DASH
class ForeignObjectRel(object):
"""
Used by ForeignObject to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
# Field flags
auto_created = True
concrete = False
editable = False
is_relation = True
# Reverse relations are always nullable (Django can't enforce that a
# foreign key on the related model points to this model).
null = True
def __init__(self, field, to, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, on_delete=None):
self.field = field
self.model = to
self.related_name = related_name
self.related_query_name = related_query_name
self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to
self.parent_link = parent_link
self.on_delete = on_delete
self.symmetrical = False
self.multiple = True
# Some of the following cached_properties can't be initialized in
# __init__ as the field doesn't have its model yet. Calling these methods
# before field.contribute_to_class() has been called will result in
# AttributeError
@property
def to(self):
warnings.warn(
"Usage of ForeignObjectRel.to attribute has been deprecated. "
"Use the model attribute instead.",
RemovedInDjango20Warning, 2)
return self.model
@cached_property
def hidden(self):
return self.is_hidden()
@cached_property
def name(self):
return self.field.related_query_name()
@property
def remote_field(self):
return self.field
@property
def target_field(self):
"""
When filtering against this relation, returns the field on the remote
model against which the filtering should happen.
"""
target_fields = self.get_path_info()[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError("Can't use target_field for multicolumn relations.")
return target_fields[0]
@cached_property
def related_model(self):
if not self.field.model:
raise AttributeError(
"This property can't be accessed before self.field.contribute_to_class has been called.")
return self.field.model
@cached_property
def many_to_many(self):
return self.field.many_to_many
@cached_property
def many_to_one(self):
return self.field.one_to_many
@cached_property
def one_to_many(self):
return self.field.many_to_one
@cached_property
def one_to_one(self):
return self.field.one_to_one
def get_prep_lookup(self, lookup_name, value):
return self.field.get_prep_lookup(lookup_name, value)
def get_lookup(self, lookup_name):
return self.field.get_lookup(lookup_name)
def get_internal_type(self):
return self.field.get_internal_type()
@property
def db_type(self):
return self.field.db_type
def __repr__(self):
return '<%s: %s.%s>' % (
type(self).__name__,
self.related_model._meta.app_label,
self.related_model._meta.model_name,
)
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH,
limit_to_currently_related=False):
"""
Return choices with a default blank choices included, for use as
SelectField choices for this field.
Analog of django.db.models.fields.Field.get_choices(), provided
initially for utilization by RelatedFieldListFilter.
"""
first_choice = blank_choice if include_blank else []
queryset = self.related_model._default_manager.all()
if limit_to_currently_related:
queryset = queryset.complex_filter(
{'%s__isnull' % self.related_model._meta.model_name: False}
)
lst = [(x._get_pk_val(), smart_text(x)) for x in queryset]
return first_choice + lst
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
# Defer to the actual field definition for db prep
return self.field.get_db_prep_lookup(lookup_type, value, connection=connection, prepared=prepared)
def is_hidden(self):
"Should the related object be hidden?"
return bool(self.related_name) and self.related_name[-1] == '+'
def get_joining_columns(self):
return self.field.get_reverse_joining_columns()
def get_extra_restriction(self, where_class, alias, related_alias):
return self.field.get_extra_restriction(where_class, related_alias, alias)
def set_field_name(self):
"""
Set the related field's name, this is not available until later stages
of app loading, so set_field_name is called from
set_attributes_from_rel()
"""
# By default foreign object doesn't relate to any remote field (for
# example custom multicolumn joins currently have no remote field).
self.field_name = None
def get_accessor_name(self, model=None):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lower-cased object_name + "_set",
# but this can be overridden with the "related_name" option.
# Due to backwards compatibility ModelForms need to be able to provide
# an alternate model. See BaseInlineFormSet.get_default_prefix().
opts = model._meta if model else self.related_model._meta
model = model or self.related_model
if self.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if self.symmetrical and model == self.model:
return None
if self.related_name:
return self.related_name
if opts.default_related_name:
return opts.default_related_name % {
'model_name': opts.model_name.lower(),
'app_label': opts.app_label.lower(),
}
return opts.model_name + ('_set' if self.multiple else '')
def get_cache_name(self):
return "_%s_cache" % self.get_accessor_name()
def get_path_info(self):
return self.field.get_reverse_path_info()
class ManyToOneRel(ForeignObjectRel):
"""
Used by the ForeignKey field to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
Note: Because we somewhat abuse the Rel objects by using them as reverse
fields we get the funny situation where
``ManyToOneRel.many_to_one == False`` and
``ManyToOneRel.one_to_many == True``. This is unfortunate but the actual
ManyToOneRel class is a private API and there is work underway to turn
reverse relations into actual fields.
"""
def __init__(self, field, to, field_name, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, on_delete=None):
super(ManyToOneRel, self).__init__(
field, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.field_name = field_name
def __getstate__(self):
state = self.__dict__.copy()
state.pop('related_model', None)
return state
def get_related_field(self):
"""
Return the Field in the 'to' object to which this relationship is tied.
"""
field = self.model._meta.get_field(self.field_name)
if not field.concrete:
raise exceptions.FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return field
def set_field_name(self):
self.field_name = self.field_name or self.model._meta.pk.name
class OneToOneRel(ManyToOneRel):
"""
Used by OneToOneField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(self, field, to, field_name, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(
field, to, field_name,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.multiple = False
class ManyToManyRel(ForeignObjectRel):
"""
Used by ManyToManyField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(self, field, to, related_name=None, related_query_name=None,
limit_choices_to=None, symmetrical=True, through=None, through_fields=None,
db_constraint=True):
super(ManyToManyRel, self).__init__(
field, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
if through and not db_constraint:
raise ValueError("Can't supply a through model and db_constraint=False")
self.through = through
if through_fields and not through:
raise ValueError("Cannot specify through_fields without a through model")
self.through_fields = through_fields
self.symmetrical = symmetrical
self.db_constraint = db_constraint
def get_related_field(self):
"""
Return the field in the 'to' object to which this relationship is tied.
Provided for symmetry with ManyToOneRel.
"""
opts = self.through._meta
if self.through_fields:
field = opts.get_field(self.through_fields[0])
else:
for field in opts.fields:
rel = getattr(field, 'remote_field', None)
if rel and rel.model == self.model:
break
return field.foreign_related_fields[0]
| mit |
rew4332/tensorflow | tensorflow/models/image/mnist/convolutional.py | 1 | 14738 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Simple, end-to-end, LeNet-5-like convolutional MNIST model example.
This should achieve a test error of 0.7%. Please keep this model as simple and
linear as possible, it is meant as a tutorial for simple convolutional models.
Run with --self_test on the command line to execute a short self-test.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import sys
import time
import numpy
from six.moves import urllib
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.python import pywrap_tensorflow
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
WORK_DIRECTORY = 'data'
IMAGE_SIZE = 28
NUM_CHANNELS = 1
PIXEL_DEPTH = 255
NUM_LABELS = 10
VALIDATION_SIZE = 5000 # Size of the validation set.
SEED = 66478 # Set to None for random seed.
BATCH_SIZE = 64
NUM_EPOCHS = 10
EVAL_BATCH_SIZE = 64
EVAL_FREQUENCY = 100 # Number of steps between evaluations.
tf.app.flags.DEFINE_boolean("self_test", False, "True if running a self test.")
tf.app.flags.DEFINE_boolean('use_fp16', False,
"Use half floats instead of full floats if True.")
FLAGS = tf.app.flags.FLAGS
def data_type():
"""Return the type of the activations, weights, and placeholder variables."""
if FLAGS.use_fp16:
return tf.float16
else:
return tf.float32
def maybe_download(filename):
"""Download the data from Yann's website, unless it's already here."""
if not tf.gfile.Exists(WORK_DIRECTORY):
tf.gfile.MakeDirs(WORK_DIRECTORY)
filepath = os.path.join(WORK_DIRECTORY, filename)
if not tf.gfile.Exists(filepath):
filepath, _ = urllib.request.urlretrieve(SOURCE_URL + filename, filepath)
with tf.gfile.GFile(filepath) as f:
size = f.Size()
print('Successfully downloaded', filename, size, 'bytes.')
return filepath
def extract_data(filename, num_images):
"""Extract the images into a 4D tensor [image index, y, x, channels].
Values are rescaled from [0, 255] down to [-0.5, 0.5].
"""
print('Extracting', filename)
with gzip.open(filename) as bytestream:
bytestream.read(16)
buf = bytestream.read(IMAGE_SIZE * IMAGE_SIZE * num_images * NUM_CHANNELS)
data = numpy.frombuffer(buf, dtype=numpy.uint8).astype(numpy.float32)
data = (data - (PIXEL_DEPTH / 2.0)) / PIXEL_DEPTH
data = data.reshape(num_images, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS)
return data
def extract_labels(filename, num_images):
"""Extract the labels into a vector of int64 label IDs."""
print('Extracting', filename)
with gzip.open(filename) as bytestream:
bytestream.read(8)
buf = bytestream.read(1 * num_images)
labels = numpy.frombuffer(buf, dtype=numpy.uint8).astype(numpy.int64)
return labels
def fake_data(num_images):
"""Generate a fake dataset that matches the dimensions of MNIST."""
data = numpy.ndarray(
shape=(num_images, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS),
dtype=numpy.float32)
labels = numpy.zeros(shape=(num_images,), dtype=numpy.int64)
for image in xrange(num_images):
label = image % 2
data[image, :, :, 0] = label - 0.5
labels[image] = label
return data, labels
def error_rate(predictions, labels):
"""Return the error rate based on dense predictions and sparse labels."""
return 100.0 - (
100.0 *
numpy.sum(numpy.argmax(predictions, 1) == labels) /
predictions.shape[0])
def main(argv=None): # pylint: disable=unused-argument
print("\ntimeStart:")
#print(pywrap_tensorflow.timer_use.op.getAdd())
#print("\n")
mainStartStamp=time.time()
if FLAGS.self_test:
print('Running self-test.')
train_data, train_labels = fake_data(256)
validation_data, validation_labels = fake_data(EVAL_BATCH_SIZE)
test_data, test_labels = fake_data(EVAL_BATCH_SIZE)
num_epochs = 1
else:
# Get the data.
train_data_filename = maybe_download('train-images-idx3-ubyte.gz')
train_labels_filename = maybe_download('train-labels-idx1-ubyte.gz')
test_data_filename = maybe_download('t10k-images-idx3-ubyte.gz')
test_labels_filename = maybe_download('t10k-labels-idx1-ubyte.gz')
# Extract it into numpy arrays.
train_data = extract_data(train_data_filename, 60000)
train_labels = extract_labels(train_labels_filename, 60000)
test_data = extract_data(test_data_filename, 10000)
test_labels = extract_labels(test_labels_filename, 10000)
# Generate a validation set.
validation_data = train_data[:VALIDATION_SIZE, ...]
validation_labels = train_labels[:VALIDATION_SIZE]
train_data = train_data[VALIDATION_SIZE:, ...]
train_labels = train_labels[VALIDATION_SIZE:]
num_epochs = NUM_EPOCHS
train_size = train_labels.shape[0]
# This is where training samples and labels are fed to the graph.
# These placeholder nodes will be fed a batch of training data at each
# training step using the {feed_dict} argument to the Run() call below.
train_data_node = tf.placeholder(
data_type(),
shape=(BATCH_SIZE, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS))
train_labels_node = tf.placeholder(tf.int64, shape=(BATCH_SIZE,))
eval_data = tf.placeholder(
data_type(),
shape=(EVAL_BATCH_SIZE, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS))
# The variables below hold all the trainable weights. They are passed an
# initial value which will be assigned when we call:
# {tf.initialize_all_variables().run()}
conv1_weights = tf.Variable(
tf.truncated_normal([5, 5, NUM_CHANNELS, 32], # 5x5 filter, depth 32.
stddev=0.1,
seed=SEED, dtype=data_type()))
conv1_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
conv2_weights = tf.Variable(tf.truncated_normal(
[5, 5, 32, 64], stddev=0.1,
seed=SEED, dtype=data_type()))
conv2_biases = tf.Variable(tf.constant(0.1, shape=[64], dtype=data_type()))
fc1_weights = tf.Variable( # fully connected, depth 512.
tf.truncated_normal([IMAGE_SIZE // 4 * IMAGE_SIZE // 4 * 64, 512],
stddev=0.1,
seed=SEED,
dtype=data_type()))
fc1_biases = tf.Variable(tf.constant(0.1, shape=[512], dtype=data_type()))
fc2_weights = tf.Variable(tf.truncated_normal([512, NUM_LABELS],
stddev=0.1,
seed=SEED,
dtype=data_type()))
fc2_biases = tf.Variable(tf.constant(
0.1, shape=[NUM_LABELS], dtype=data_type()))
# We will replicate the model structure for the training subgraph, as well
# as the evaluation subgraphs, while sharing the trainable parameters.
def model(data, train=False):
"""The Model definition."""
# 2D convolution, with 'SAME' padding (i.e. the output feature map has
# the same size as the input). Note that {strides} is a 4D array whose
# shape matches the data layout: [image index, y, x, depth].
conv = tf.nn.conv2d(data,
conv1_weights,
strides=[1, 1, 1, 1],
padding='SAME')
# Bias and rectified linear non-linearity.
relu = tf.nn.relu(tf.nn.bias_add(conv, conv1_biases))
# Max pooling. The kernel size spec {ksize} also follows the layout of
# the data. Here we have a pooling window of 2, and a stride of 2.
pool = tf.nn.max_pool(relu,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME')
conv = tf.nn.conv2d(pool,
conv2_weights,
strides=[1, 1, 1, 1],
padding='SAME')
relu = tf.nn.relu(tf.nn.bias_add(conv, conv2_biases))
pool = tf.nn.max_pool(relu,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME')
# Reshape the feature map cuboid into a 2D matrix to feed it to the
# fully connected layers.
pool_shape = pool.get_shape().as_list()
reshape = tf.reshape(
pool,
[pool_shape[0], pool_shape[1] * pool_shape[2] * pool_shape[3]])
# Fully connected layer. Note that the '+' operation automatically
# broadcasts the biases.
hidden = tf.nn.relu(tf.matmul(reshape, fc1_weights) + fc1_biases)
# Add a 50% dropout during training only. Dropout also scales
# activations such that no rescaling is needed at evaluation time.
if train:
hidden = tf.nn.dropout(hidden, 0.5, seed=SEED)
return tf.matmul(hidden, fc2_weights) + fc2_biases
# Training computation: logits + cross-entropy loss.
logits = model(train_data_node, True)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, train_labels_node))
# L2 regularization for the fully connected parameters.
regularizers = (tf.nn.l2_loss(fc1_weights) + tf.nn.l2_loss(fc1_biases) +
tf.nn.l2_loss(fc2_weights) + tf.nn.l2_loss(fc2_biases))
# Add the regularization term to the loss.
loss += 5e-4 * regularizers
# Optimizer: set up a variable that's incremented once per batch and
# controls the learning rate decay.
batch = tf.Variable(0, dtype=data_type())
# Decay once per epoch, using an exponential schedule starting at 0.01.
learning_rate = tf.train.exponential_decay(
0.01, # Base learning rate.
batch * BATCH_SIZE, # Current index into the dataset.
train_size, # Decay step.
0.95, # Decay rate.
staircase=True)
# Use simple momentum for the optimization.
optimizer = tf.train.MomentumOptimizer(learning_rate,
0.9).minimize(loss,
global_step=batch)
# Predictions for the current training minibatch.
train_prediction = tf.nn.softmax(logits)
# Predictions for the test and validation, which we'll compute less often.
eval_prediction = tf.nn.softmax(model(eval_data))
# Small utility function to evaluate a dataset by feeding batches of data to
# {eval_data} and pulling the results from {eval_predictions}.
# Saves memory and enables this to run on smaller GPUs.
def eval_in_batches(data, sess):
"""Get all predictions for a dataset by running it in small batches."""
size = data.shape[0]
if size < EVAL_BATCH_SIZE:
raise ValueError("batch size for evals larger than dataset: %d" % size)
predictions = numpy.ndarray(shape=(size, NUM_LABELS), dtype=numpy.float32)
for begin in xrange(0, size, EVAL_BATCH_SIZE):
end = begin + EVAL_BATCH_SIZE
if end <= size:
predictions[begin:end, :] = sess.run(
eval_prediction,
feed_dict={eval_data: data[begin:end, ...]})
else:
batch_predictions = sess.run(
eval_prediction,
feed_dict={eval_data: data[-EVAL_BATCH_SIZE:, ...]})
predictions[begin:, :] = batch_predictions[begin - size:, :]
return predictions
# Create a local session to run the training.
start_time = time.time()
with tf.Session() as sess:
# Run all the initializers to prepare the trainable parameters.
tf.initialize_all_variables().run()
print('Initialized!')
print("\nInitGPU time:"+str(pywrap_tensorflow.timer_use.getInitGPU())+"sec\n")
# Loop through training steps.
for step in xrange(int(num_epochs * train_size) // BATCH_SIZE):
# Compute the offset of the current minibatch in the data.
# Note that we could use better randomization across epochs.
offset = (step * BATCH_SIZE) % (train_size - BATCH_SIZE)
batch_data = train_data[offset:(offset + BATCH_SIZE), ...]
batch_labels = train_labels[offset:(offset + BATCH_SIZE)]
# This dictionary maps the batch data (as a numpy array) to the
# node in the graph it should be fed to.
feed_dict = {train_data_node: batch_data,
train_labels_node: batch_labels}
# Run the graph and fetch some of the nodes.
_, l, lr, predictions = sess.run(
[optimizer, loss, learning_rate, train_prediction],
feed_dict=feed_dict)
if step % EVAL_FREQUENCY == 0:
elapsed_time = time.time() - start_time
start_time = time.time()
print('Step %d (epoch %.2f), %.1f ms' %
(step, float(step) * BATCH_SIZE / train_size,
1000 * elapsed_time / EVAL_FREQUENCY))
print('Minibatch loss: %.3f, learning rate: %.6f' % (l, lr))
print('Minibatch error: %.1f%%' % error_rate(predictions, batch_labels))
print('Validation error: %.1f%%' % error_rate(
eval_in_batches(validation_data, sess), validation_labels))
#print("Cumulative H2D time:"+str(pywrap_tensorflow.timer_use.getMemH2D())+"sec")
#print("Cumulative D2H time:"+str(pywrap_tensorflow.timer_use.getMemD2H())+"sec\n")
sys.stdout.flush()
# Finally print the result!
test_error = error_rate(eval_in_batches(test_data, sess), test_labels)
print('Test error: %.1f%%' % test_error)
if FLAGS.self_test:
print('test_error', test_error)
assert test_error == 0.0, 'expected 0.0 test_error, got %.2f' % (
test_error,)
mainStopStamp=time.time()
main_elapse=mainStopStamp-mainStartStamp
print("\nTotal H2D time:"+str(pywrap_tensorflow.timer_use.getMemH2D())+"sec\n")
print("Total D2H time:"+str(pywrap_tensorflow.timer_use.getMemD2H())+"sec\n")
print("Total getOrCreateExecutor time:"+str(pywrap_tensorflow.timer_use.getCreateExe())+"sec\n")
print("Total sendInput time:"+str(pywrap_tensorflow.timer_use.getSendInput())+"sec\n")
print("Total receiveOuput time:"+str(pywrap_tensorflow.timer_use.getRecvOutput())+"sec\n")
print("Total RunAsync time:"+str(pywrap_tensorflow.timer_use.getRunAsync())+"sec\n")
print("Total run time:"+str(main_elapse)+"sec\n")
if __name__ == '__main__':
tf.app.run()
| apache-2.0 |
googleapis/python-dataproc | google/cloud/dataproc_v1/services/job_controller/transports/base.py | 1 | 12099 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import packaging.version
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.dataproc_v1.types import jobs
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-dataproc",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
try:
# google.auth.__version__ was added in 1.26.0
_GOOGLE_AUTH_VERSION = google.auth.__version__
except AttributeError:
try: # try pkg_resources if it is available
_GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
class JobControllerTransport(abc.ABC):
"""Abstract transport class for JobController."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "dataproc.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials is service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
# should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
def _get_scopes_kwargs(
cls, host: str, scopes: Optional[Sequence[str]]
) -> Dict[str, Optional[Sequence[str]]]:
"""Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
scopes_kwargs = {}
if _GOOGLE_AUTH_VERSION and (
packaging.version.parse(_GOOGLE_AUTH_VERSION)
>= packaging.version.parse("1.25.0")
):
scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
else:
scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.submit_job: gapic_v1.method.wrap_method(
self.submit_job,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
self.submit_job_as_operation: gapic_v1.method.wrap_method(
self.submit_job_as_operation,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
self.get_job: gapic_v1.method.wrap_method(
self.get_job,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
self.list_jobs: gapic_v1.method.wrap_method(
self.list_jobs,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
self.update_job: gapic_v1.method.wrap_method(
self.update_job,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
self.cancel_job: gapic_v1.method.wrap_method(
self.cancel_job,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
self.delete_job: gapic_v1.method.wrap_method(
self.delete_job,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
}
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def submit_job(
self,
) -> Callable[[jobs.SubmitJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]:
raise NotImplementedError()
@property
def submit_job_as_operation(
self,
) -> Callable[
[jobs.SubmitJobRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def get_job(
self,
) -> Callable[[jobs.GetJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]:
raise NotImplementedError()
@property
def list_jobs(
self,
) -> Callable[
[jobs.ListJobsRequest],
Union[jobs.ListJobsResponse, Awaitable[jobs.ListJobsResponse]],
]:
raise NotImplementedError()
@property
def update_job(
self,
) -> Callable[[jobs.UpdateJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]:
raise NotImplementedError()
@property
def cancel_job(
self,
) -> Callable[[jobs.CancelJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]:
raise NotImplementedError()
@property
def delete_job(
self,
) -> Callable[
[jobs.DeleteJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]]
]:
raise NotImplementedError()
__all__ = ("JobControllerTransport",)
| apache-2.0 |
DefyVentures/edx-platform | lms/djangoapps/certificates/migrations/0005_auto__add_field_generatedcertificate_name.py | 188 | 7270 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GeneratedCertificate.name'
db.add_column('certificates_generatedcertificate', 'name', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'GeneratedCertificate.name'
db.delete_column('certificates_generatedcertificate', 'name')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'certificates.generatedcertificate': {
'Meta': {'object_name': 'GeneratedCertificate'},
'certificate_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'download_url': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grade': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True'}),
'graded_certificate_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'graded_download_url': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['certificates']
| agpl-3.0 |
cctaylor/googleads-python-lib | examples/dfp/v201505/activity_service/create_activities.py | 3 | 2299 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new activities.
To determine which activities groups exist, run get_all_activities.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: ActivityService.createActivities
"""
__author__ = ('Vincent Tsao',
'Joseph DiLallo')
import uuid
# Import appropriate modules from the client library.
from googleads import dfp
# Set the ID of the activity group this activity is associated with.
ACTIVITY_GROUP_ID = 'INSERT_ACTIVITY_GROUP_ID_HERE'
def main(client, activity_group_id):
# Initialize appropriate service.
activity_service = client.GetService('ActivityService', version='v201505')
# Create a daily visits activity.
daily_visits_activity = {
'name': 'Activity #%s' % uuid.uuid4(),
'activityGroupId': activity_group_id,
'type': 'DAILY_VISITS'
}
# Create a custom activity.
custom_activity = {
'name': 'Activity #%s' % uuid.uuid4(),
'activityGroupId': activity_group_id,
'type': 'CUSTOM'
}
# Create the activities on the server.
activities = activity_service.createActivities([
daily_visits_activity, custom_activity])
# Display results.
for activity in activities:
print ('An activity with ID \'%s\', name \'%s\', and type \'%s\' was '
'created.' % (activity['id'], activity['name'], activity['type']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, ACTIVITY_GROUP_ID)
| apache-2.0 |
fhaoquan/kbengine | kbe/res/scripts/common/Lib/asyncio/queues.py | 63 | 9019 | """Queues"""
__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue',
'QueueFull', 'QueueEmpty']
import collections
import heapq
from . import events
from . import futures
from . import locks
from .tasks import coroutine
class QueueEmpty(Exception):
'Exception raised by Queue.get(block=0)/get_nowait().'
pass
class QueueFull(Exception):
'Exception raised by Queue.put(block=0)/put_nowait().'
pass
class Queue:
"""A queue, useful for coordinating producer and consumer coroutines.
If maxsize is less than or equal to zero, the queue size is infinite. If it
is an integer greater than 0, then "yield from put()" will block when the
queue reaches maxsize, until an item is removed by get().
Unlike the standard library Queue, you can reliably know this Queue's size
with qsize(), since your single-threaded asyncio application won't be
interrupted between calling qsize() and doing an operation on the Queue.
"""
def __init__(self, maxsize=0, *, loop=None):
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._maxsize = maxsize
# Futures.
self._getters = collections.deque()
# Pairs of (item, Future).
self._putters = collections.deque()
self._init(maxsize)
def _init(self, maxsize):
self._queue = collections.deque()
def _get(self):
return self._queue.popleft()
def _put(self, item):
self._queue.append(item)
def __repr__(self):
return '<{} at {:#x} {}>'.format(
type(self).__name__, id(self), self._format())
def __str__(self):
return '<{} {}>'.format(type(self).__name__, self._format())
def _format(self):
result = 'maxsize={!r}'.format(self._maxsize)
if getattr(self, '_queue', None):
result += ' _queue={!r}'.format(list(self._queue))
if self._getters:
result += ' _getters[{}]'.format(len(self._getters))
if self._putters:
result += ' _putters[{}]'.format(len(self._putters))
return result
def _consume_done_getters(self):
# Delete waiters at the head of the get() queue who've timed out.
while self._getters and self._getters[0].done():
self._getters.popleft()
def _consume_done_putters(self):
# Delete waiters at the head of the put() queue who've timed out.
while self._putters and self._putters[0][1].done():
self._putters.popleft()
def qsize(self):
"""Number of items in the queue."""
return len(self._queue)
@property
def maxsize(self):
"""Number of items allowed in the queue."""
return self._maxsize
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return not self._queue
def full(self):
"""Return True if there are maxsize items in the queue.
Note: if the Queue was initialized with maxsize=0 (the default),
then full() is never True.
"""
if self._maxsize <= 0:
return False
else:
return self.qsize() >= self._maxsize
@coroutine
def put(self, item):
"""Put an item into the queue.
If you yield from put(), wait until a free slot is available
before adding item.
"""
self._consume_done_getters()
if self._getters:
assert not self._queue, (
'queue non-empty, why are getters waiting?')
getter = self._getters.popleft()
# Use _put and _get instead of passing item straight to getter, in
# case a subclass has logic that must run (e.g. JoinableQueue).
self._put(item)
getter.set_result(self._get())
elif self._maxsize > 0 and self._maxsize <= self.qsize():
waiter = futures.Future(loop=self._loop)
self._putters.append((item, waiter))
yield from waiter
else:
self._put(item)
def put_nowait(self, item):
"""Put an item into the queue without blocking.
If no free slot is immediately available, raise QueueFull.
"""
self._consume_done_getters()
if self._getters:
assert not self._queue, (
'queue non-empty, why are getters waiting?')
getter = self._getters.popleft()
# Use _put and _get instead of passing item straight to getter, in
# case a subclass has logic that must run (e.g. JoinableQueue).
self._put(item)
getter.set_result(self._get())
elif self._maxsize > 0 and self._maxsize <= self.qsize():
raise QueueFull
else:
self._put(item)
@coroutine
def get(self):
"""Remove and return an item from the queue.
If you yield from get(), wait until a item is available.
"""
self._consume_done_putters()
if self._putters:
assert self.full(), 'queue not full, why are putters waiting?'
item, putter = self._putters.popleft()
self._put(item)
# When a getter runs and frees up a slot so this putter can
# run, we need to defer the put for a tick to ensure that
# getters and putters alternate perfectly. See
# ChannelTest.test_wait.
self._loop.call_soon(putter._set_result_unless_cancelled, None)
return self._get()
elif self.qsize():
return self._get()
else:
waiter = futures.Future(loop=self._loop)
self._getters.append(waiter)
return (yield from waiter)
def get_nowait(self):
"""Remove and return an item from the queue.
Return an item if one is immediately available, else raise QueueEmpty.
"""
self._consume_done_putters()
if self._putters:
assert self.full(), 'queue not full, why are putters waiting?'
item, putter = self._putters.popleft()
self._put(item)
# Wake putter on next tick.
putter.set_result(None)
return self._get()
elif self.qsize():
return self._get()
else:
raise QueueEmpty
class PriorityQueue(Queue):
"""A subclass of Queue; retrieves entries in priority order (lowest first).
Entries are typically tuples of the form: (priority number, data).
"""
def _init(self, maxsize):
self._queue = []
def _put(self, item, heappush=heapq.heappush):
heappush(self._queue, item)
def _get(self, heappop=heapq.heappop):
return heappop(self._queue)
class LifoQueue(Queue):
"""A subclass of Queue that retrieves most recently added entries first."""
def _init(self, maxsize):
self._queue = []
def _put(self, item):
self._queue.append(item)
def _get(self):
return self._queue.pop()
class JoinableQueue(Queue):
"""A subclass of Queue with task_done() and join() methods."""
def __init__(self, maxsize=0, *, loop=None):
super().__init__(maxsize=maxsize, loop=loop)
self._unfinished_tasks = 0
self._finished = locks.Event(loop=self._loop)
self._finished.set()
def _format(self):
result = Queue._format(self)
if self._unfinished_tasks:
result += ' tasks={}'.format(self._unfinished_tasks)
return result
def _put(self, item):
super()._put(item)
self._unfinished_tasks += 1
self._finished.clear()
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items have
been processed (meaning that a task_done() call was received for every
item that had been put() into the queue).
Raises ValueError if called more times than there were items placed in
the queue.
"""
if self._unfinished_tasks <= 0:
raise ValueError('task_done() called too many times')
self._unfinished_tasks -= 1
if self._unfinished_tasks == 0:
self._finished.set()
@coroutine
def join(self):
"""Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate that the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
if self._unfinished_tasks > 0:
yield from self._finished.wait()
| lgpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.