repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
ZachRiegel/scriptbin | pypyjs/modules/encodings/iso2022_jp.py | 816 | 1053 | #
# iso2022_jp.py: Python Unicode Codec for ISO2022_JP
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| gpl-3.0 |
WizardVan/VanFrame | Dependencies/protobuf/gtest/scripts/gen_gtest_pred_impl.py | 2538 | 21986 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""gen_gtest_pred_impl.py v0.1
Generates the implementation of Google Test predicate assertions and
accompanying tests.
Usage:
gen_gtest_pred_impl.py MAX_ARITY
where MAX_ARITY is a positive integer.
The command generates the implementation of up-to MAX_ARITY-ary
predicate assertions, and writes it to file gtest_pred_impl.h in the
directory where the script is. It also generates the accompanying
unit test in file gtest_pred_impl_unittest.cc.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import sys
import time
# Where this script is.
SCRIPT_DIR = os.path.dirname(sys.argv[0])
# Where to store the generated header.
HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
# Where to store the generated unit test.
UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
def HeaderPreamble(n):
"""Returns the preamble for the header file.
Args:
n: the maximum arity of the predicate macros to be generated.
"""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
'n' : n
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
//
// Implements a family of generic predicate assertion macros.
#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
// Makes sure this header is not included before gtest.h.
#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
# error Do not include gtest_pred_impl.h directly. Include gtest.h instead.
#endif // GTEST_INCLUDE_GTEST_GTEST_H_
// This header implements a family of generic predicate assertion
// macros:
//
// ASSERT_PRED_FORMAT1(pred_format, v1)
// ASSERT_PRED_FORMAT2(pred_format, v1, v2)
// ...
//
// where pred_format is a function or functor that takes n (in the
// case of ASSERT_PRED_FORMATn) values and their source expression
// text, and returns a testing::AssertionResult. See the definition
// of ASSERT_EQ in gtest.h for an example.
//
// If you don't care about formatting, you can use the more
// restrictive version:
//
// ASSERT_PRED1(pred, v1)
// ASSERT_PRED2(pred, v1, v2)
// ...
//
// where pred is an n-ary function or functor that returns bool,
// and the values v1, v2, ..., must support the << operator for
// streaming to std::ostream.
//
// We also define the EXPECT_* variations.
//
// For now we only support predicates whose arity is at most %(n)s.
// Please email googletestframework@googlegroups.com if you need
// support for higher arities.
// GTEST_ASSERT_ is the basic statement to which all of the assertions
// in this file reduce. Don't use this in your code.
#define GTEST_ASSERT_(expression, on_failure) \\
GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
if (const ::testing::AssertionResult gtest_ar = (expression)) \\
; \\
else \\
on_failure(gtest_ar.failure_message())
""" % DEFS)
def Arity(n):
"""Returns the English name of the given arity."""
if n < 0:
return None
elif n <= 3:
return ['nullary', 'unary', 'binary', 'ternary'][n]
else:
return '%s-ary' % n
def Title(word):
"""Returns the given word in title case. The difference between
this and string's title() method is that Title('4-ary') is '4-ary'
while '4-ary'.title() is '4-Ary'."""
return word[0].upper() + word[1:]
def OneTo(n):
"""Returns the list [1, 2, 3, ..., n]."""
return range(1, n + 1)
def Iter(n, format, sep=''):
"""Given a positive integer n, a format string that contains 0 or
more '%s' format specs, and optionally a separator string, returns
the join of n strings, each formatted with the format string on an
iterator ranged from 1 to n.
Example:
Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
"""
# How many '%s' specs are in format?
spec_count = len(format.split('%s')) - 1
return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
def ImplementationForArity(n):
"""Returns the implementation of n-ary predicate assertions."""
# A map the defines the values used in the implementation template.
DEFS = {
'n' : str(n),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'arity' : Arity(n),
'Arity' : Title(Arity(n))
}
impl = """
// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
template <typename Pred""" % DEFS
impl += Iter(n, """,
typename T%s""")
impl += """>
AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
impl += Iter(n, """,
const char* e%s""")
impl += """,
Pred pred"""
impl += Iter(n, """,
const T%s& v%s""")
impl += """) {
if (pred(%(vs)s)) return AssertionSuccess();
""" % DEFS
impl += ' return AssertionFailure() << pred_text << "("'
impl += Iter(n, """
<< e%s""", sep=' << ", "')
impl += ' << ") evaluates to false, where"'
impl += Iter(n, """
<< "\\n" << e%s << " evaluates to " << v%s""")
impl += """;
}
// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
// Don't use this in your code.
#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s), \\
on_failure)
// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
impl += Iter(n, """, \\
#v%s""")
impl += """, \\
pred"""
impl += Iter(n, """, \\
v%s""")
impl += """), on_failure)
// %(Arity)s predicate assertion macros.
#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
""" % DEFS
return impl
def HeaderPostamble():
"""Returns the postamble for the header file."""
return """
#endif // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
"""
def GenerateFile(path, content):
"""Given a file path and a content string, overwrites it with the
given content."""
print 'Updating file %s . . .' % path
f = file(path, 'w+')
print >>f, content,
f.close()
print 'File %s has been updated.' % path
def GenerateHeader(n):
"""Given the maximum arity n, updates the header file that implements
the predicate assertions."""
GenerateFile(HEADER,
HeaderPreamble(n)
+ ''.join([ImplementationForArity(i) for i in OneTo(n)])
+ HeaderPostamble())
def UnitTestPreamble():
"""Returns the preamble for the unit test file."""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
// Regression test for gtest_pred_impl.h
//
// This file is generated by a script and quite long. If you intend to
// learn how Google Test works by reading its unit tests, read
// gtest_unittest.cc instead.
//
// This is intended as a regression test for the Google Test predicate
// assertions. We compile it as part of the gtest_unittest target
// only to keep the implementation tidy and compact, as it is quite
// involved to set up the stage for testing Google Test using Google
// Test itself.
//
// Currently, gtest_unittest takes ~11 seconds to run in the testing
// daemon. In the future, if it grows too large and needs much more
// time to finish, we should consider separating this file into a
// stand-alone regression test.
#include <iostream>
#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
// A user-defined data type.
struct Bool {
explicit Bool(int val) : value(val != 0) {}
bool operator>(int n) const { return value > Bool(n).value; }
Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
bool operator==(const Bool& rhs) const { return value == rhs.value; }
bool value;
};
// Enables Bool to be used in assertions.
std::ostream& operator<<(std::ostream& os, const Bool& x) {
return os << (x.value ? "true" : "false");
}
""" % DEFS)
def TestsForArity(n):
"""Returns the tests for n-ary predicate assertions."""
# A map that defines the values used in the template for the tests.
DEFS = {
'n' : n,
'es' : Iter(n, 'e%s', sep=', '),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'tvs' : Iter(n, 'T%s v%s', sep=', '),
'int_vs' : Iter(n, 'int v%s', sep=', '),
'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
'types' : Iter(n, 'typename T%s', sep=', '),
'v_sum' : Iter(n, 'v%s', sep=' + '),
'arity' : Arity(n),
'Arity' : Title(Arity(n)),
}
tests = (
"""// Sample functions/functors for testing %(arity)s predicate assertions.
// A %(arity)s predicate function.
template <%(types)s>
bool PredFunction%(n)s(%(tvs)s) {
return %(v_sum)s > 0;
}
// The following two functions are needed to circumvent a bug in
// gcc 2.95.3, which sometimes has problem with the above template
// function.
bool PredFunction%(n)sInt(%(int_vs)s) {
return %(v_sum)s > 0;
}
bool PredFunction%(n)sBool(%(Bool_vs)s) {
return %(v_sum)s > 0;
}
""" % DEFS)
tests += """
// A %(arity)s predicate functor.
struct PredFunctor%(n)s {
template <%(types)s>
bool operator()(""" % DEFS
tests += Iter(n, 'const T%s& v%s', sep=""",
""")
tests += """) {
return %(v_sum)s > 0;
}
};
""" % DEFS
tests += """
// A %(arity)s predicate-formatter function.
template <%(types)s>
testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) {
if (PredFunction%(n)s(%(vs)s))
return testing::AssertionSuccess();
return testing::AssertionFailure()
<< """ % DEFS
tests += Iter(n, 'e%s', sep=' << " + " << ')
tests += """
<< " is expected to be positive, but evaluates to "
<< %(v_sum)s << ".";
}
""" % DEFS
tests += """
// A %(arity)s predicate-formatter functor.
struct PredFormatFunctor%(n)s {
template <%(types)s>
testing::AssertionResult operator()(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) const {
return PredFormatFunction%(n)s(%(es)s, %(vs)s);
}
};
""" % DEFS
tests += """
// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
class Predicate%(n)sTest : public testing::Test {
protected:
virtual void SetUp() {
expected_to_finish_ = true;
finished_ = false;""" % DEFS
tests += """
""" + Iter(n, 'n%s_ = ') + """0;
}
"""
tests += """
virtual void TearDown() {
// Verifies that each of the predicate's arguments was evaluated
// exactly once."""
tests += ''.join(["""
EXPECT_EQ(1, n%s_) <<
"The predicate assertion didn't evaluate argument %s "
"exactly once.";""" % (i, i + 1) for i in OneTo(n)])
tests += """
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
FAIL() << "The predicate assertion unexpactedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
}
}
// true iff the test function is expected to run to finish.
static bool expected_to_finish_;
// true iff the test function did run to finish.
static bool finished_;
""" % DEFS
tests += Iter(n, """
static int n%s_;""")
tests += """
};
bool Predicate%(n)sTest::expected_to_finish_;
bool Predicate%(n)sTest::finished_;
""" % DEFS
tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
""") % DEFS
tests += """
typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
""" % DEFS
def GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type):
"""Returns the test for a predicate assertion macro.
Args:
use_format: true iff the assertion is a *_PRED_FORMAT*.
use_assert: true iff the assertion is a ASSERT_*.
expect_failure: true iff the assertion is expected to fail.
use_functor: true iff the first argument of the assertion is
a functor (as opposed to a function)
use_user_type: true iff the predicate functor/function takes
argument(s) of a user-defined type.
Example:
GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
of a successful EXPECT_PRED_FORMATn() that takes a functor
whose arguments have built-in types."""
if use_assert:
assrt = 'ASSERT' # 'assert' is reserved, so we cannot use
# that identifier here.
else:
assrt = 'EXPECT'
assertion = assrt + '_PRED'
if use_format:
pred_format = 'PredFormat'
assertion += '_FORMAT'
else:
pred_format = 'Pred'
assertion += '%(n)s' % DEFS
if use_functor:
pred_format_type = 'functor'
pred_format += 'Functor%(n)s()'
else:
pred_format_type = 'function'
pred_format += 'Function%(n)s'
if not use_format:
if use_user_type:
pred_format += 'Bool'
else:
pred_format += 'Int'
test_name = pred_format_type.title()
if use_user_type:
arg_type = 'user-defined type (Bool)'
test_name += 'OnUserType'
if expect_failure:
arg = 'Bool(n%s_++)'
else:
arg = 'Bool(++n%s_)'
else:
arg_type = 'built-in type (int)'
test_name += 'OnBuiltInType'
if expect_failure:
arg = 'n%s_++'
else:
arg = '++n%s_'
if expect_failure:
successful_or_failed = 'failed'
expected_or_not = 'expected.'
test_name += 'Failure'
else:
successful_or_failed = 'successful'
expected_or_not = 'UNEXPECTED!'
test_name += 'Success'
# A map that defines the values used in the test template.
defs = DEFS.copy()
defs.update({
'assert' : assrt,
'assertion' : assertion,
'test_name' : test_name,
'pf_type' : pred_format_type,
'pf' : pred_format,
'arg_type' : arg_type,
'arg' : arg,
'successful' : successful_or_failed,
'expected' : expected_or_not,
})
test = """
// Tests a %(successful)s %(assertion)s where the
// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
indent = (len(assertion) + 3)*' '
extra_indent = ''
if expect_failure:
extra_indent = ' '
if use_assert:
test += """
expected_to_finish_ = false;
EXPECT_FATAL_FAILURE({ // NOLINT"""
else:
test += """
EXPECT_NONFATAL_FAILURE({ // NOLINT"""
test += '\n' + extra_indent + """ %(assertion)s(%(pf)s""" % defs
test = test % defs
test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
test += ');\n' + extra_indent + ' finished_ = true;\n'
if expect_failure:
test += ' }, "");\n'
test += '}\n'
return test
# Generates tests for all 2**6 = 64 combinations.
tests += ''.join([GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type)
for use_format in [0, 1]
for use_assert in [0, 1]
for expect_failure in [0, 1]
for use_functor in [0, 1]
for use_user_type in [0, 1]
])
return tests
def UnitTestPostamble():
"""Returns the postamble for the tests."""
return ''
def GenerateUnitTest(n):
"""Returns the tests for up-to n-ary predicate assertions."""
GenerateFile(UNIT_TEST,
UnitTestPreamble()
+ ''.join([TestsForArity(i) for i in OneTo(n)])
+ UnitTestPostamble())
def _Main():
"""The entry point of the script. Generates the header file and its
unit test."""
if len(sys.argv) != 2:
print __doc__
print 'Author: ' + __author__
sys.exit(1)
n = int(sys.argv[1])
GenerateHeader(n)
GenerateUnitTest(n)
if __name__ == '__main__':
_Main()
| apache-2.0 |
kaplun/invenio | modules/bibsched/lib/tasklets/bst_weblinkback_updater.py | 24 | 2836 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.bibtask import write_message
from invenio.weblinkback_config import CFG_WEBLINKBACK_TYPE
from invenio.weblinkback import update_linkbacks, \
delete_linkbacks_on_blacklist, \
send_pending_linkbacks_notification
def bst_weblinkback_updater(mode):
"""
Update linkbacks
@param mode: 1 delete rejected, broken and pending linkbacks whose URLs is on blacklist
2 update page titles of new linkbacks
3 update page titles of old linkbacks
4 update manually set page titles
5 detect and disable broken linkbacks
6 send notification email for all pending linkbacks
@type mode: int
"""
mode = int(mode)
if mode == 1:
write_message("Starting to delete rejected and pending linkbacks URLs on blacklist")
delete_linkbacks_on_blacklist()
write_message("Completed to delete rejected and pending linkbacks URLs on blacklist")
elif mode == 2:
write_message("Starting to update the page titles of new linkbacks")
update_linkbacks(1)
write_message("Completed to update the page titles of new linkbacks")
elif mode == 3:
write_message("Starting to update the page titles of old linkbacks")
update_linkbacks(2)
write_message("Completed to update the page titles of old linkbacks")
elif mode == 4:
write_message("Starting to update manually set page titles")
update_linkbacks(3)
write_message("Completed to update manually set page titles")
elif mode == 5:
write_message("Starting to detect and disable broken linkbacks")
update_linkbacks(4)
write_message("Completed to detect and disable broken linkbacks")
elif mode == 6:
write_message("Starting to send notification email")
send_pending_linkbacks_notification(CFG_WEBLINKBACK_TYPE['TRACKBACK'])
write_message("Completed to send notification email")
| gpl-2.0 |
pannal/Subliminal.bundle | Contents/Libraries/Shared/chardet/jisfreq.py | 342 | 25777 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
#
# Japanese frequency table, applied to both S-JIS and EUC-JP
# They are sorted in order.
# 128 --> 0.77094
# 256 --> 0.85710
# 512 --> 0.92635
# 1024 --> 0.97130
# 2048 --> 0.99431
#
# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
#
# Typical Distribution Ratio, 25% of IDR
JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
# Char to FreqOrder table ,
JIS_TABLE_SIZE = 4368
JIS_CHAR_TO_FREQ_ORDER = (
40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
)
| mit |
MPBA/pyphysio | pyphysio/estimators/Estimators.py | 2 | 15168 | # coding=utf-8
from __future__ import division
import numpy as _np
from ..BaseEstimator import Estimator as _Estimator
from ..Signal import UnevenlySignal as _UnevenlySignal, EvenlySignal as _EvenlySignal
from ..filters.Filters import IIRFilter as _IIRFilter, DeConvolutionalFilter as _DeConvolutionalFilter, \
ConvolutionalFilter as _ConvolutionalFilter
from ..tools.Tools import SignalRange as _SignalRange, PeakDetection as _PeakDetection, Minima as _Minima, \
PeakSelection as _PeakSelection, Diff as _Diff
__author__ = 'AleB'
# IBI ESTIMATION
class BeatFromBP(_Estimator):
"""
Identify the beats in a Blood Pulse (BP) signal and compute the IBIs.
Optimized to identify the percussion peak.
Optional parameters
-------------------
bpm_max : int, (1, 400], default=120
Maximal expected heart rate (in beats per minute)
win_pre : float, (0, 1], default=0.25
Portion (in seconds) to consider before the candidate beat position where to look for the beat
win_post : float, (0, 1], default=0.05
Portion (in seconds) to consider after the candidate beat position where to look for the beat
Returns
-------
ibi : UnevenlySignal
Inter beat interval values at percussion peaks
Notes
-----
Please cite:
Bizzego, Andrea, and Cesare Furlanello. "DBD-RCO: Derivative Based Detection And Reverse Combinatorial Optimization To Improve Heart Beat Detection For Wearable Devices." bioRxiv (2017): 118943.
"""
def __init__(self, bpm_max=120, win_pre=.25, win_post=.05):
if not 10 < bpm_max < 400:
self.warn("Parameter bpm_max out of reasonable range (10, 400)")
assert 0 < win_pre <= 1, "Window pre peak value should be in (0 and 1]"
assert 0 < win_post <= 1, "Window post peak value should be in (0 and 1]"
_Estimator.__init__(self, bpm_max=bpm_max, win_pre=win_pre, win_post=win_post)
@classmethod
def algorithm(cls, signal, params):
fsamp = signal.get_sampling_freq()
bpm_max = params["bpm_max"]
win_pre = params["win_pre"] * fsamp
win_post = params["win_post"] * fsamp
fmax = bpm_max / 60
refractory = 1 / fmax
# STAGE 1 - EXTRACT BEAT POSITION SIGNAL
# filtering
signal_f = _IIRFilter(fp=1.2 * fmax, fs=3 * fmax, ftype='ellip')(signal)
# find range for the adaptive peak detection
delta = 0.5 * _SignalRange(win_len=1.5 / fmax, win_step=1 / fmax)(signal_f)
#adjust for delta values equal to 0
idx_delta_zeros = _np.where(delta==0)[0]
idx_delta_nozeros = _np.where(delta>0)[0]
delta[idx_delta_zeros] = _np.min(delta[idx_delta_nozeros])
# detection of candidate peaks
maxp, minp, ignored, ignored = _PeakDetection(delta=delta, refractory=refractory, start_max=True)(signal_f)
if maxp[0] == 0:
maxp = maxp[1:]
# STAGE 2 - IDENTIFY PEAKS using the signal derivative
# compute the signal derivative
dxdt = _Diff()(signal)
true_peaks = []
# for each candidate peak find the correct peak
for idx_beat in maxp:
start_ = int(idx_beat - win_pre)
if start_ < 0:
start_ = 0
stop_ = int(idx_beat + win_post)
if stop_ > len(dxdt):
stop_ = -1
# select portion of derivative where to search
obs = dxdt[start_:stop_]
peak_obs = _np.argmax(obs)
true_obs = dxdt[start_ + peak_obs: stop_]
# find the 'first minimum' (zero) the derivative (peak)
idx_mins, mins = _Minima(win_len=0.1, win_step=0.025, method='windowing')(abs(true_obs))
if len(idx_mins) >= 1:
peak = idx_mins[0]
true_peaks.append(start_ + peak_obs + peak + 1)
else:
cls.warn('Peak not found; idx_beat: ' + str(idx_beat))
pass
# STAGE 3 - FINALIZE computing IBI
ibi_values = _np.diff(true_peaks) / fsamp
ibi_values = _np.r_[ibi_values[0], ibi_values]
idx_ibi = _np.array(true_peaks)
ibi = _UnevenlySignal(values=ibi_values,
sampling_freq=fsamp,
start_time=signal.get_start_time(),
signal_type='IBI',
x_values=idx_ibi,
x_type='indices',
duration=signal.get_duration())
return ibi
class BeatFromECG(_Estimator):
"""
Identify the beats in an ECG signal and compute the IBIs.
Optional parameters
-------------------
bpm_max : int, (1, 400], default=120
Maximal expected heart rate (in beats per minute)
delta : float, >=0, default=0
Threshold for the peak detection. By default it is computed from the signal (adaptive thresholding)
k : float, (0,1), default=0.7
Ratio at which the signal range is multiplied (when delta = 0)
Returns
-------
ibi : UnevenlySignal
Inter beat interval values at percussion peaks
Notes
-----
This algorithms looks for maxima in the signal which are followed by values lower than a delta value.
The adaptive version estimates the delta value adaptively.
"""
def __init__(self, bpm_max=120, delta=0, k=0.7):
if not 10 < bpm_max < 400:
self.warn("Parameter bpm_max out of reasonable range (10, 400)")
assert delta >= 0, "Delta value should be positive (or equal to 0 if automatically computed)"
assert 0 < k < 1, "K coefficient must be in the range (0,1)"
_Estimator.__init__(self, bpm_max=bpm_max, delta=delta, k=k)
@classmethod
def algorithm(cls, signal, params):
bpm_max, delta, k = params["bpm_max"], params["delta"], params["k"]
fmax = bpm_max / 60
if delta == 0:
delta = k * _SignalRange(win_len=2 / fmax, win_step=0.5 / fmax, smooth=False)(signal)
#adjust for delta values equal to 0
idx_delta_zeros = _np.where(delta==0)[0]
idx_delta_nozeros = _np.where(delta>0)[0]
delta[idx_delta_zeros] = _np.min(delta[idx_delta_nozeros])
refractory = 1 / fmax
maxp, minp, maxv, minv = _PeakDetection(delta=delta, refractory=refractory, start_max=True)(signal)
if maxp[0] == 0:
maxp = maxp[1:]
fsamp = signal.get_sampling_freq()
ibi_values = _np.diff(maxp) / fsamp
ibi_values = _np.r_[ibi_values[0], ibi_values]
idx_ibi = _np.array(maxp)
ibi = _UnevenlySignal(values=ibi_values,
sampling_freq=fsamp,
start_time=signal.get_start_time(),
signal_type='IBI',
x_values=idx_ibi,
x_type='indices',
duration=signal.get_duration())
return ibi
# PHASIC ESTIMATION
class DriverEstim(_Estimator):
"""
Estimates the driver of an EDA signal according to (see Notes)
The estimation uses a deconvolution using a Bateman function as Impulsive Response Function.
The version of the Bateman function here adopted is:
:math:`b = e^{-t/T1} - e^{-t/T2}`
Optional parameters
-------------------
t1 : float, >0, default = 0.75
Value of the T1 parameter of the bateman function
t2 : float, >0, default = 2
Value of the T2 parameter of the bateman function
Returns
-------
driver : EvenlySignal
The EDA driver function
Notes
-----
Please cite:
"""
#TODO: add citation
def __init__(self, t1=.75, t2=2):
assert t1 > 0, "t1 value has to be positive"
assert t2 > 0, "t2 value has to be positive"
_Estimator.__init__(self, t1=t1, t2=t2)
@classmethod
def algorithm(cls, signal, params):
t1 = params['t1']
t2 = params['t2']
fsamp = signal.get_sampling_freq()
bateman = DriverEstim._gen_bateman(fsamp, [t1, t2])
idx_max_bat = _np.argmax(bateman)
# Prepare the input signal to avoid starting/ending peaks in the driver
bateman_first_half = bateman[0:idx_max_bat + 1]
bateman_first_half = signal[0] * (bateman_first_half - _np.min(bateman_first_half)) / (
_np.max(bateman_first_half) - _np.min(bateman_first_half))
bateman_second_half = bateman[idx_max_bat:]
bateman_second_half = signal[-1] * (bateman_second_half - _np.min(bateman_second_half)) / (
_np.max(bateman_second_half) - _np.min(bateman_second_half))
signal_in = _np.r_[bateman_first_half, signal.get_values(), bateman_second_half]
signal_in = _EvenlySignal(signal_in, fsamp)
# deconvolution
driver = _DeConvolutionalFilter(irf=bateman, normalize=True, deconv_method='fft')(signal_in)
driver = driver[idx_max_bat + 1: idx_max_bat + len(signal)]
# gaussian smoothing
driver = _ConvolutionalFilter(irftype='gauss', win_len=_np.max([0.2, 1 / fsamp]) * 8, normalize=True)(driver)
driver = _EvenlySignal(driver, sampling_freq=fsamp, start_time=signal.get_start_time(),signal_type="dEDA")
return driver
@staticmethod
def _gen_bateman(fsamp, par_bat):
"""
Generates the bateman function:
:math:`b = e^{-t/T1} - e^{-t/T2}`
Parameters
----------
fsamp : float
Sampling frequency
par_bat: list (T1, T2)
Parameters of the bateman function
Returns
-------
bateman : array
The bateman function
"""
idx_T1 = par_bat[0] * fsamp
idx_T2 = par_bat[1] * fsamp
len_bat = idx_T2 * 10
idx_bat = _np.arange(len_bat)
bateman = _np.exp(-idx_bat / idx_T2) - _np.exp(-idx_bat / idx_T1)
# normalize
bateman = fsamp * bateman / _np.sum(bateman)
return bateman
class PhasicEstim(_Estimator):
"""
Estimates the phasic and tonic components of a EDA driver function.
It uses a detection algorithm based on the derivative of the driver.
Parameters:
-----------
delta : float, >0
Minimum amplitude of the peaks in the driver
Optional parameters
-------------------
grid_size : float, >0, default = 1
Sampling size of the interpolation grid
pre_max : float, >0, default = 2
Duration (in seconds) of interval before the peak where to search the start of the peak
post_max : float, >0, default = 2
Duration (in seconds) of interval after the peak where to search the end of the peak
Returns:
--------
phasic : EvenlySignal
The phasic component
tonic : EvenlySignal
The tonic component
driver_no_peak : EvenlySignal
The "de-peaked" driver signal used to generate the interpolation grid
Notes
-----
Please cite:
"""
#TODO: add citation
def __init__(self, delta, grid_size=1, win_pre=2, win_post=2):
assert delta > 0, "Delta value has to be positive"
assert grid_size > 0, "Step of the interpolation grid has to be positive"
assert win_pre > 0, "Window pre peak value has to be positive"
assert win_post > 0, "Window post peak value has to be positive"
_Estimator.__init__(self, delta=delta, grid_size=grid_size, win_pre=win_pre, win_post=win_post)
@classmethod
def algorithm(cls, signal, params):
delta = params["delta"]
grid_size = params["grid_size"]
win_pre = params['win_pre']
win_post = params['win_post']
fsamp = signal.get_sampling_freq()
# find peaks in the driver
idx_max, idx_min, val_max, val_min = _PeakDetection(delta=delta, refractory=1, start_max=True)(signal)
# identify start and stop of the peak
idx_pre, idx_post = _PeakSelection(indices=idx_max, win_pre=win_pre, win_post=win_post)(signal)
# Linear interpolation to substitute the peaks
driver_no_peak = _np.copy(signal)
for I in range(len(idx_pre)):
i_st = idx_pre[I]
i_sp = idx_post[I]
if not _np.isnan(i_st) and not _np.isnan(i_sp):
idx_base = _np.arange(i_sp - i_st)
coeff = (signal[i_sp] - signal[i_st]) / len(idx_base)
driver_base = idx_base * coeff + signal[i_st]
driver_no_peak[i_st:i_sp] = driver_base
# generate the grid for the interpolation
idx_grid = _np.arange(0, len(driver_no_peak) - 1, grid_size * fsamp)
idx_grid = _np.r_[idx_grid, len(driver_no_peak) - 1]
driver_grid = _UnevenlySignal(driver_no_peak[idx_grid], sampling_freq = fsamp, start_time= signal.get_start_time(), signal_type="dEDA",
x_values=idx_grid, x_type='indices', duration=signal.get_duration())
tonic = driver_grid.to_evenly(kind='cubic')
phasic = signal - tonic
return phasic, tonic, driver_no_peak
class Energy(_Estimator):
"""
Estimate the local energy of the signal, by windowing
Parameters
----------
win_len : float, >0
Length of the window in seconds
win_step : float, >0
Shift of the window to start the next window
Optional parameters
-------------------
smooth : boolean, default = True
Whether to convolve the result with a gaussian window
Returns
-------
energy : numpy.array
Local energy
"""
def __init__(self, win_len, win_step, smooth=True):
assert win_len > 0, "Window length has to be positive"
assert win_step > 0, "Window step has to be positive"
_Estimator.__init__(self, win_len=win_len, win_step=win_step, smooth=smooth)
@classmethod
def algorithm(cls, signal, params):
win_len = params['win_len']
win_step = params['win_step']
smooth = params['smooth']
fsamp = signal.get_sampling_freq()
idx_len = win_len * fsamp
idx_step = win_step * fsamp
windows = _np.arange(0, len(signal) - idx_len + 1, idx_step)
energy = _np.empty(len(windows) + 2)
for i in range(1, len(windows) + 1):
start = windows[i - 1]
portion_curr = signal.segment_idx(start, start + idx_len)
energy[i] = _np.nanmean(_np.power(portion_curr, 2))
energy[0] = energy[1]
energy[-1] = energy[-2]
idx_interp = _np.r_[0, windows + round(idx_len / 2), len(signal)-1]
energy_out = _UnevenlySignal(energy, signal.get_sampling_freq(), start_time = signal.get_start_time(), x_values=idx_interp,
x_type='indices', duration=signal.get_duration()).to_evenly('linear')
if smooth:
energy_out = _ConvolutionalFilter(irftype='gauss', win_len=2, normalize=True)(energy_out)
return energy_out
| gpl-3.0 |
Intel-tensorflow/tensorflow | tensorflow/python/saved_model/revived_types.py | 6 | 8394 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Handles types registrations for tf.saved_model.load."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import versions_pb2
from tensorflow.core.protobuf import saved_object_graph_pb2
from tensorflow.python.util.tf_export import tf_export
@tf_export("__internal__.saved_model.load.VersionedTypeRegistration", v1=[])
class VersionedTypeRegistration(object):
"""Holds information about one version of a revived type."""
def __init__(self, object_factory, version, min_producer_version,
min_consumer_version, bad_consumers=None, setter=setattr):
"""Identify a revived type version.
Args:
object_factory: A callable which takes a SavedUserObject proto and returns
a trackable object. Dependencies are added later via `setter`.
version: An integer, the producer version of this wrapper type. When
making incompatible changes to a wrapper, add a new
`VersionedTypeRegistration` with an incremented `version`. The most
recent version will be saved, and all registrations with a matching
identifier will be searched for the highest compatible version to use
when loading.
min_producer_version: The minimum producer version number required to use
this `VersionedTypeRegistration` when loading a proto.
min_consumer_version: `VersionedTypeRegistration`s with a version number
less than `min_consumer_version` will not be used to load a proto saved
with this object. `min_consumer_version` should be set to the lowest
version number which can successfully load protos saved by this
object. If no matching registration is available on load, the object
will be revived with a generic trackable type.
`min_consumer_version` and `bad_consumers` are a blunt tool, and using
them will generally break forward compatibility: previous versions of
TensorFlow will revive newly saved objects as opaque trackable
objects rather than wrapped objects. When updating wrappers, prefer
saving new information but preserving compatibility with previous
wrapper versions. They are, however, useful for ensuring that
previously-released buggy wrapper versions degrade gracefully rather
than throwing exceptions when presented with newly-saved SavedModels.
bad_consumers: A list of consumer versions which are incompatible (in
addition to any version less than `min_consumer_version`).
setter: A callable with the same signature as `setattr` to use when adding
dependencies to generated objects.
"""
self.setter = setter
self.identifier = None # Set after registration
self._object_factory = object_factory
self.version = version
self._min_consumer_version = min_consumer_version
self._min_producer_version = min_producer_version
if bad_consumers is None:
bad_consumers = []
self._bad_consumers = bad_consumers
def to_proto(self):
"""Create a SavedUserObject proto."""
# For now wrappers just use dependencies to save their state, so the
# SavedUserObject doesn't depend on the object being saved.
# TODO(allenl): Add a wrapper which uses its own proto.
return saved_object_graph_pb2.SavedUserObject(
identifier=self.identifier,
version=versions_pb2.VersionDef(
producer=self.version,
min_consumer=self._min_consumer_version,
bad_consumers=self._bad_consumers))
def from_proto(self, proto):
"""Recreate a trackable object from a SavedUserObject proto."""
return self._object_factory(proto)
def should_load(self, proto):
"""Checks if this object should load the SavedUserObject `proto`."""
if proto.identifier != self.identifier:
return False
if self.version < proto.version.min_consumer:
return False
if proto.version.producer < self._min_producer_version:
return False
for bad_version in proto.version.bad_consumers:
if self.version == bad_version:
return False
return True
# string identifier -> (predicate, [VersionedTypeRegistration])
_REVIVED_TYPE_REGISTRY = {}
_TYPE_IDENTIFIERS = []
@tf_export("__internal__.saved_model.load.register_revived_type", v1=[])
def register_revived_type(identifier, predicate, versions):
"""Register a type for revived objects.
Args:
identifier: A unique string identifying this class of objects.
predicate: A Boolean predicate for this registration. Takes a
trackable object as an argument. If True, `type_registration` may be
used to save and restore the object.
versions: A list of `VersionedTypeRegistration` objects.
"""
# Keep registrations in order of version. We always use the highest matching
# version (respecting the min consumer version and bad consumers).
versions.sort(key=lambda reg: reg.version, reverse=True)
if not versions:
raise AssertionError("Need at least one version of a registered type.")
version_numbers = set()
for registration in versions:
# Copy over the identifier for use in generating protos
registration.identifier = identifier
if registration.version in version_numbers:
raise AssertionError(
"Got multiple registrations with version {} for type {}".format(
registration.version, identifier))
version_numbers.add(registration.version)
# TODO(kathywu): Remove the "optimizer" special case here after the Keras
# repo optimizer registration has been submitted.
if identifier in _REVIVED_TYPE_REGISTRY and identifier != "optimizer":
raise AssertionError(
"Duplicate registrations for type {}".format(identifier))
_REVIVED_TYPE_REGISTRY[identifier] = (predicate, versions)
_TYPE_IDENTIFIERS.append(identifier)
def serialize(obj):
"""Create a SavedUserObject from a trackable object."""
for identifier in _TYPE_IDENTIFIERS:
predicate, versions = _REVIVED_TYPE_REGISTRY[identifier]
if predicate(obj):
# Always uses the most recent version to serialize.
return versions[0].to_proto()
return None
def deserialize(proto):
"""Create a trackable object from a SavedUserObject proto.
Args:
proto: A SavedUserObject to deserialize.
Returns:
A tuple of (trackable, assignment_fn) where assignment_fn has the same
signature as setattr and should be used to add dependencies to
`trackable` when they are available.
"""
_, type_registrations = _REVIVED_TYPE_REGISTRY.get(
proto.identifier, (None, None))
if type_registrations is not None:
for type_registration in type_registrations:
if type_registration.should_load(proto):
return (type_registration.from_proto(proto), type_registration.setter)
return None
@tf_export("__internal__.saved_model.load.registered_identifiers", v1=[])
def registered_identifiers():
"""Return all the current registered revived object identifiers.
Returns:
A set of strings.
"""
return _REVIVED_TYPE_REGISTRY.keys()
@tf_export("__internal__.saved_model.load.get_setter", v1=[])
def get_setter(proto):
"""Gets the registered setter function for the SavedUserObject proto.
See VersionedTypeRegistration for info about the setter function.
Args:
proto: SavedUserObject proto
Returns:
setter function
"""
_, type_registrations = _REVIVED_TYPE_REGISTRY.get(
proto.identifier, (None, None))
if type_registrations is not None:
for type_registration in type_registrations:
if type_registration.should_load(proto):
return type_registration.setter
return None
| apache-2.0 |
rlustin/letsencrypt | letsencrypt-compatibility-test/letsencrypt_compatibility_test/configurators/apache/common.py | 26 | 10466 | """Provides a common base for Apache proxies"""
import re
import os
import subprocess
import mock
import zope.interface
from letsencrypt import configuration
from letsencrypt import errors as le_errors
from letsencrypt_apache import configurator
from letsencrypt_compatibility_test import errors
from letsencrypt_compatibility_test import interfaces
from letsencrypt_compatibility_test import util
from letsencrypt_compatibility_test.configurators import common as configurators_common
APACHE_VERSION_REGEX = re.compile(r"Apache/([0-9\.]*)", re.IGNORECASE)
APACHE_COMMANDS = ["apachectl", "a2enmod", "a2dismod"]
class Proxy(configurators_common.Proxy):
# pylint: disable=too-many-instance-attributes
"""A common base for Apache test configurators"""
zope.interface.implements(interfaces.IConfiguratorProxy)
def __init__(self, args):
"""Initializes the plugin with the given command line args"""
super(Proxy, self).__init__(args)
self.le_config.apache_le_vhost_ext = "-le-ssl.conf"
self._setup_mock()
self.modules = self.server_root = self.test_conf = self.version = None
self._apache_configurator = self._all_names = self._test_names = None
def _setup_mock(self):
"""Replaces specific modules with mock.MagicMock"""
mock_subprocess = mock.MagicMock()
mock_subprocess.check_call = self.check_call
mock_subprocess.Popen = self.popen
mock.patch(
"letsencrypt_apache.configurator.subprocess",
mock_subprocess).start()
mock.patch(
"letsencrypt_apache.parser.subprocess",
mock_subprocess).start()
mock.patch(
"letsencrypt.le_util.subprocess",
mock_subprocess).start()
mock.patch(
"letsencrypt_apache.configurator.le_util.exe_exists",
_is_apache_command).start()
patch = mock.patch(
"letsencrypt_apache.configurator.display_ops.select_vhost")
mock_display = patch.start()
mock_display.side_effect = le_errors.PluginError(
"Unable to determine vhost")
def check_call(self, command, *args, **kwargs):
"""If command is an Apache command, command is executed in the
running docker image. Otherwise, subprocess.check_call is used.
"""
if _is_apache_command(command):
command = _modify_command(command)
return super(Proxy, self).check_call(command, *args, **kwargs)
else:
return subprocess.check_call(command, *args, **kwargs)
def popen(self, command, *args, **kwargs):
"""If command is an Apache command, command is executed in the
running docker image. Otherwise, subprocess.Popen is used.
"""
if _is_apache_command(command):
command = _modify_command(command)
return super(Proxy, self).popen(command, *args, **kwargs)
else:
return subprocess.Popen(command, *args, **kwargs)
def __getattr__(self, name):
"""Wraps the Apache Configurator methods"""
method = getattr(self._apache_configurator, name, None)
if callable(method):
return method
else:
raise AttributeError()
def load_config(self):
"""Loads the next configuration for the plugin to test"""
if hasattr(self.le_config, "apache_init_script"):
try:
self.check_call([self.le_config.apache_init_script, "stop"])
except errors.Error:
raise errors.Error(
"Failed to stop previous apache config from running")
config = super(Proxy, self).load_config()
self.modules = _get_modules(config)
self.version = _get_version(config)
self._all_names, self._test_names = _get_names(config)
server_root = _get_server_root(config)
with open(os.path.join(config, "config_file")) as f:
config_file = os.path.join(server_root, f.readline().rstrip())
self.test_conf = _create_test_conf(server_root, config_file)
self.preprocess_config(server_root)
self._prepare_configurator(server_root, config_file)
try:
self.check_call("apachectl -d {0} -f {1} -k start".format(
server_root, config_file))
except errors.Error:
raise errors.Error(
"Apache failed to load {0} before tests started".format(
config))
return config
def preprocess_config(self, server_root):
# pylint: disable=anomalous-backslash-in-string, no-self-use
"""Prepares the configuration for use in the Docker"""
find = subprocess.Popen(
["find", server_root, "-type", "f"],
stdout=subprocess.PIPE)
subprocess.check_call([
"xargs", "sed", "-e", "s/DocumentRoot.*/DocumentRoot "
"\/usr\/local\/apache2\/htdocs/I",
"-e", "s/SSLPassPhraseDialog.*/SSLPassPhraseDialog builtin/I",
"-e", "s/TypesConfig.*/TypesConfig "
"\/usr\/local\/apache2\/conf\/mime.types/I",
"-e", "s/LoadModule/#LoadModule/I",
"-e", "s/SSLCertificateFile.*/SSLCertificateFile "
"\/usr\/local\/apache2\/conf\/empty_cert.pem/I",
"-e", "s/SSLCertificateKeyFile.*/SSLCertificateKeyFile "
"\/usr\/local\/apache2\/conf\/rsa1024_key2.pem/I",
"-i"], stdin=find.stdout)
def _prepare_configurator(self, server_root, config_file):
"""Prepares the Apache plugin for testing"""
self.le_config.apache_server_root = server_root
self.le_config.apache_ctl = "apachectl -d {0} -f {1}".format(
server_root, config_file)
self.le_config.apache_enmod = "a2enmod.sh {0}".format(server_root)
self.le_config.apache_dismod = "a2dismod.sh {0}".format(server_root)
self.le_config.apache_init_script = self.le_config.apache_ctl + " -k"
self._apache_configurator = configurator.ApacheConfigurator(
config=configuration.NamespaceConfig(self.le_config),
name="apache")
self._apache_configurator.prepare()
def cleanup_from_tests(self):
"""Performs any necessary cleanup from running plugin tests"""
super(Proxy, self).cleanup_from_tests()
mock.patch.stopall()
def get_all_names_answer(self):
"""Returns the set of domain names that the plugin should find"""
if self._all_names:
return self._all_names
else:
raise errors.Error("No configuration file loaded")
def get_testable_domain_names(self):
"""Returns the set of domain names that can be tested against"""
if self._test_names:
return self._test_names
else:
return {"example.com"}
def deploy_cert(self, domain, cert_path, key_path, chain_path=None):
"""Installs cert"""
cert_path, key_path, chain_path = self.copy_certs_and_keys(
cert_path, key_path, chain_path)
self._apache_configurator.deploy_cert(
domain, cert_path, key_path, chain_path)
def _is_apache_command(command):
"""Returns true if command is an Apache command"""
if isinstance(command, list):
command = command[0]
for apache_command in APACHE_COMMANDS:
if command.startswith(apache_command):
return True
return False
def _modify_command(command):
"""Modifies command so configtest works inside the docker image"""
if isinstance(command, list):
for i in xrange(len(command)):
if command[i] == "configtest":
command[i] = "-t"
else:
command = command.replace("configtest", "-t")
return command
def _create_test_conf(server_root, apache_config):
"""Creates a test config file and adds it to the Apache config"""
test_conf = os.path.join(server_root, "test.conf")
open(test_conf, "w").close()
subprocess.check_call(
["sed", "-i", "1iInclude test.conf", apache_config])
return test_conf
def _get_server_root(config):
"""Returns the server root directory in config"""
subdirs = [
name for name in os.listdir(config)
if os.path.isdir(os.path.join(config, name))]
if len(subdirs) != 1:
errors.Error("Malformed configuration directiory {0}".format(config))
return os.path.join(config, subdirs[0].rstrip())
def _get_names(config):
"""Returns all and testable domain names in config"""
all_names = set()
non_ip_names = set()
with open(os.path.join(config, "vhosts")) as f:
for line in f:
# If parsing a specific vhost
if line[0].isspace():
words = line.split()
if words[0] == "alias":
all_names.add(words[1])
non_ip_names.add(words[1])
# If for port 80 and not IP vhost
elif words[1] == "80" and not util.IP_REGEX.match(words[3]):
all_names.add(words[3])
non_ip_names.add(words[3])
elif "NameVirtualHost" not in line:
words = line.split()
if (words[0].endswith("*") or words[0].endswith("80") and
not util.IP_REGEX.match(words[1]) and
words[1].find(".") != -1):
all_names.add(words[1])
return all_names, non_ip_names
def _get_modules(config):
"""Returns the list of modules found in module_list"""
modules = []
with open(os.path.join(config, "modules")) as f:
for line in f:
# Modules list is indented, everything else is headers/footers
if line[0].isspace():
words = line.split()
# Modules redundantly end in "_module" which we can discard
modules.append(words[0][:-7])
return modules
def _get_version(config):
"""Return version of Apache Server.
Version is returned as tuple. (ie. 2.4.7 = (2, 4, 7)). Code taken from
the Apache plugin.
"""
with open(os.path.join(config, "version")) as f:
# Should be on first line of input
matches = APACHE_VERSION_REGEX.findall(f.readline())
if len(matches) != 1:
raise errors.Error("Unable to find Apache version")
return tuple([int(i) for i in matches[0].split(".")])
| apache-2.0 |
cr/fxos-certsuite | web-platform-tests/tests/tools/pywebsocket/src/example/echo_noext_wsh.py | 465 | 2404 | # Copyright 2013, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
_GOODBYE_MESSAGE = u'Goodbye'
def web_socket_do_extra_handshake(request):
"""Received Sec-WebSocket-Extensions header value is parsed into
request.ws_requested_extensions. pywebsocket creates extension
processors using it before do_extra_handshake call and never looks at it
after the call.
To reject requested extensions, clear the processor list.
"""
request.ws_extension_processors = []
def web_socket_transfer_data(request):
"""Echo. Same as echo_wsh.py."""
while True:
line = request.ws_stream.receive_message()
if line is None:
return
if isinstance(line, unicode):
request.ws_stream.send_message(line, binary=False)
if line == _GOODBYE_MESSAGE:
return
else:
request.ws_stream.send_message(line, binary=True)
# vi:sts=4 sw=4 et
| mpl-2.0 |
ramaro/pomares | nectar/proto.py | 1 | 5923 | from msgpack import packb, unpackb
from collections import namedtuple
from zlib import compress, decompress
from struct import pack, unpack
import asyncio
from nectar.utils import logger
from concurrent.futures import ThreadPoolExecutor
Ack = namedtuple('Ack', ('value',))
PubKeyReply = namedtuple('PubKeyReply', ('key',))
PlantTreeRequest = namedtuple('PlantTreeRequest', ('tree'))
IOReadChunkRequest = namedtuple('IOReadChunkRequest',
('filename', 'offset', 'nbytes'))
msg_dict = {'Ack': (1, Ack),
'PubKeyReply': (2, PubKeyReply),
'PlantTreeRequest': (3, PlantTreeRequest),
'IOReadChunkRequest': (4, IOReadChunkRequest),
}
# Reverse lookup:
msg_dict_rev = dict((v[0], v[1]) for k, v in msg_dict.items())
class PomaresHandler():
def __init__(self, transport):
self.transport = transport
self.handshaked = False
self.io_transport = None
def send_data(self, payload):
payload_size = len(payload)
payload = pack('<I{:d}s'.format(payload_size), payload_size, payload)
logger.debug('sending payload ({} bytes): {}'.format(payload_size,
payload))
self.transport.write(payload)
class PomaresAdminHandler():
def __init__(self, transport):
self.transport = transport
self.index_writer = None
def send_data(self, payload):
self.transport.write(bytes('{}\n'.format(payload).encode()))
class PomaresAdminProtocol(asyncio.Protocol):
def __init__(self, payload=None):
self.payload = payload
def connection_made(self, transport):
logger.debug('admin connection made')
self.handler = PomaresAdminHandler(transport)
self.data_buffer = bytearray()
self.data_buffer_size = 0
if self.payload:
self.handler.send_data(self.payload)
self.payload = None
def data_received(self, data):
logger.debug('received admin data: {}'.format(data))
# connection is made
self.data_buffer.extend(data)
for line in self.data_buffer.splitlines(keepends=True):
if line.endswith(b'\n'):
self.route(self.handler, line[:-1])
else:
self.data_buffer = line
def route(self, handler, msg):
logger.debug('got admin message: {}'.format(msg))
def connection_lost(self, exc):
logger.debug('admin lost connection')
# commit index writer here
if self.handler.index_writer:
self.handler.index_writer.commit()
logger.debug('(admin handler) committed data in index_writer {}'.
format(id(self.handler.index_writer)))
class PomaresProtocol(asyncio.Protocol):
def __init__(self, payload=None, handler_type=PomaresHandler):
self.payload = payload
self.header_size = 4
self.handler_type = handler_type
def connection_made(self, transport):
self.handler = self.handler_type(transport)
self.data_buffer = bytearray()
self.data_buffer_size = 0
self.msg_size = 0
logger.debug('connection made')
if self.payload:
self.handler.send_data(self.payload)
self.payload = None
def data_received(self, data):
logger.debug('received data: {}'.format(data))
# connection is made
self.data_buffer.extend(data)
self.data_buffer_size += len(data)
if (not self.msg_size) and (self.data_buffer_size >= self.header_size):
self.msg_size = self.encoded_size(self.data_buffer)
logger.debug('set msg_size to {}'.format(self.msg_size))
logger.debug('data_buffer_size: {}'.format(self.data_buffer_size))
logger.debug('msg_size: {}'.format(self.msg_size))
if (self.data_buffer_size - self.header_size) >= self.msg_size:
# got a complete msg, do stuff with it:
logger.debug('got a complete msg, call route')
self.route(self.handler, data[self.header_size:])
# reset for next msg
logger.debug('## RESET ##')
self.msg_size = 0
self.data_buffer = bytearray(data[self.data_buffer_size:])
self.data_buffer_size = len(self.data_buffer)
def connection_lost(self, exc):
logger.debug('lost connection')
def encoded_size(self, data):
"return size based on header_size (in bytes)"
return unpack('<I', data[:self.header_size])[0]
def route(self, handler, msg):
logger.debug('got message: {}'.format(msg))
class PomaresIOHandler(PomaresHandler):
def __init__(self, transport):
super().__init__(transport)
del self.handshaked
del self.io_transport
class PomaresIOProtocol(PomaresProtocol):
def __init__(self, payload=None, workers=4):
super().__init__(payload, handler_type=PomaresIOHandler)
self.executor = ThreadPoolExecutor(max_workers=workers)
def connection_made(self, transport):
super().connection_made(transport)
def route(self, handler, msg):
logger.debug('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> got message: {}'.format(msg))
def pack_proto(msg):
msg_t = msg.__class__.__name__
return tuple((msg_dict[msg_t][0],) +
tuple((getattr(msg, f) for f in msg._fields)))
def unpack_proto(msg):
msg_t = msg_dict_rev[msg[0]]
return msg_t(*msg[1:])
def encode(msg):
return packb(pack_proto(msg))
def decode(msg_buff):
return unpack_proto(unpackb(msg_buff))
def compress_buff(buff):
return compress(buff)
def decompress_buff(buff):
return decompress(buff)
class EncodeError(Exception):
pass
class DecodeError(Exception):
pass
class SetValuesRequestError(Exception):
pass
class BadHandshake(Exception):
pass
| mit |
Sankluj/PyWidget | src/test.py | 1 | 2188 | import pyglet
from dialog import Dialog
from button import Button
from vbox import VBox
from hbox import HBox
from slider import Slider
from checkbox import Checkbox
from label import Label
from radiobutton import Radiobutton
window = pyglet.window.Window(resizable=True)
label1 = Label(text='<font face="Helvetica,Arial" size="2" color=white>First Label</font>',
x=50, y=50)
label2 = Label(text='<font face="Helvetica,Arial" size="2" color=white>second Label</font>',
x=50, y=50)
label3 = Label(text='<font face="Helvetica,Arial" size="2" color=white>third Label</font>',
x=50, y=50)
rbutton = Radiobutton(x=50, y=50, height=90, width=100, elements=[label1, label2, label3])
button1 = Button(text='<font face="Helvetica,Arial" size="2" color="white">Click me 1</font>')
button2 = Button(text='<font face="Helvetica,Arial" size="2" color=white>Click me 2</font>')
button3 = Button(text='<font face="Helvetica,Arial" size="2" color=white>Click me 3</font>')
button4 = Button(text='<font face="Helvetica,Arial" size="2" color=white>Click me 4</font>')
checkbox = Checkbox()
label = Label(text='<font face="Helvetica,Arial" size="2" color=white>Some text</font>')
slider = Slider(x=50, y=50)
vbox = VBox(elements=[
slider,
HBox(elements=[button2, button3]),
button1,
rbutton,
HBox(elements=[checkbox, label, button4])])
dialog = Dialog(title='My Dialog', x=100, y=100, content=vbox, width=300, height=160)
window.push_handlers(dialog)
@window.event
def on_draw():
window.clear()
dialog.on_draw()
@rbutton.event
def on_Radiobutton_press(radiobutton):
print 'change'
@slider.event
def on_value_change(slider):
print 'Value change : ', round(slider.value, 2)
@button1.event
def on_button_press(button):
print 'Button 1'
@button2.event
def on_button_press(button):
print 'Button 2'
@button3.event
def on_button_press(button):
print 'Button 3'
@button4.event
def on_button_press(button):
print 'Button 4'
@checkbox.event
def on_value_change(checkbox):
print 'Checkbox : ', checkbox.checked
pyglet.app.run() | bsd-3-clause |
grantCelley/Todo | main.py | 1 | 1347 | import sys
def saveData(data):
with open("list.txt", mode = "w", encoding="utf-8") as myFile:
for str in data:
if(not (str is data[0])):
myFile.write('\n')
myFile.write(str)
def readData():
with open("list.txt", mode = 'r', encoding = 'utf-8') as myFile:
str = myFile.read()
list = str.split("\n")
return list
return []
def main():
print("Hello welcome to the checklist program\n")
con = True #a variable for us to continue
while con:
print("\n\n")
#read the data
list = readData()
#display it
for data in range(len(list)):
print(data,': ', list[data])
print("press the number to say you compleated the action")
print("press a to add a new item")
print('press q to quit')
answer = input()
if(answer is 'q'):
con = False
elif(answer is 'a'):
print("What would you like to add to your to do list?")
str = input()
list.append(str)
else:
try:
i = int(answer)
list.remove(list[i])
except Exception as e:
print("what you entered did not work")
print(e)
#save the list
saveData(list)
main()
| gpl-3.0 |
hejuna/bite-project | tools/bugs/server/appengine/kind.py | 17 | 1205 | # Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define the kinds of data returned from the bugs tool."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from common.util import class_attr
class Kind(object):
"""Define kind keywords for the bugs tool."""
BUG = 'bugs#bug'
ID = 'bugs#id'
URL_BUG_MAP = 'bugs#url-bug-map'
URLS = 'bugs#urls'
# List of valid kinds.
_ALL_KINDS = class_attr.GetPODAttrsValue(Kind)
def IsValid(value):
"""Determine if the given value is a valid kind.
Args:
value: The value to test. (string)
Returns:
Whether or not the value is a kind. (boolean)
"""
return value in _ALL_KINDS
| apache-2.0 |
maniteja123/scipy | scipy/optimize/tests/test__numdiff.py | 38 | 17617 | from __future__ import division
import math
from itertools import product
import numpy as np
from numpy.testing import (assert_raises, assert_allclose, assert_equal,
assert_, run_module_suite)
from scipy.sparse import csr_matrix, csc_matrix, lil_matrix
from scipy.optimize._numdiff import (
_adjust_scheme_to_bounds, approx_derivative, check_derivative,
group_columns)
def test_group_columns():
structure = [
[1, 1, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0]
]
for transform in [np.asarray, csr_matrix, csc_matrix, lil_matrix]:
A = transform(structure)
order = np.arange(6)
groups_true = np.array([0, 1, 2, 0, 1, 2])
groups = group_columns(A, order)
assert_equal(groups, groups_true)
order = [1, 2, 4, 3, 5, 0]
groups_true = np.array([2, 0, 1, 2, 0, 1])
groups = group_columns(A, order)
assert_equal(groups, groups_true)
# Test repeatability.
groups_1 = group_columns(A)
groups_2 = group_columns(A)
assert_equal(groups_1, groups_2)
class TestAdjustSchemeToBounds(object):
def test_no_bounds(self):
x0 = np.zeros(3)
h = np.ones(3) * 1e-2
inf_lower = np.empty_like(x0)
inf_upper = np.empty_like(x0)
inf_lower.fill(-np.inf)
inf_upper.fill(np.inf)
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 1, '1-sided', inf_lower, inf_upper)
assert_allclose(h_adjusted, h)
assert_(np.all(one_sided))
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 2, '1-sided', inf_lower, inf_upper)
assert_allclose(h_adjusted, h)
assert_(np.all(one_sided))
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 1, '2-sided', inf_lower, inf_upper)
assert_allclose(h_adjusted, h)
assert_(np.all(~one_sided))
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 2, '2-sided', inf_lower, inf_upper)
assert_allclose(h_adjusted, h)
assert_(np.all(~one_sided))
def test_with_bound(self):
x0 = np.array([0.0, 0.85, -0.85])
lb = -np.ones(3)
ub = np.ones(3)
h = np.array([1, 1, -1]) * 1e-1
h_adjusted, _ = _adjust_scheme_to_bounds(x0, h, 1, '1-sided', lb, ub)
assert_allclose(h_adjusted, h)
h_adjusted, _ = _adjust_scheme_to_bounds(x0, h, 2, '1-sided', lb, ub)
assert_allclose(h_adjusted, np.array([1, -1, 1]) * 1e-1)
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 1, '2-sided', lb, ub)
assert_allclose(h_adjusted, np.abs(h))
assert_(np.all(~one_sided))
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 2, '2-sided', lb, ub)
assert_allclose(h_adjusted, np.array([1, -1, 1]) * 1e-1)
assert_equal(one_sided, np.array([False, True, True]))
def test_tight_bounds(self):
lb = np.array([-0.03, -0.03])
ub = np.array([0.05, 0.05])
x0 = np.array([0.0, 0.03])
h = np.array([-0.1, -0.1])
h_adjusted, _ = _adjust_scheme_to_bounds(x0, h, 1, '1-sided', lb, ub)
assert_allclose(h_adjusted, np.array([0.05, -0.06]))
h_adjusted, _ = _adjust_scheme_to_bounds(x0, h, 2, '1-sided', lb, ub)
assert_allclose(h_adjusted, np.array([0.025, -0.03]))
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 1, '2-sided', lb, ub)
assert_allclose(h_adjusted, np.array([0.03, -0.03]))
assert_equal(one_sided, np.array([False, True]))
h_adjusted, one_sided = _adjust_scheme_to_bounds(
x0, h, 2, '2-sided', lb, ub)
assert_allclose(h_adjusted, np.array([0.015, -0.015]))
assert_equal(one_sided, np.array([False, True]))
class TestApproxDerivativesDense(object):
def fun_scalar_scalar(self, x):
return np.sinh(x)
def jac_scalar_scalar(self, x):
return np.cosh(x)
def fun_scalar_vector(self, x):
return np.array([x[0]**2, np.tan(x[0]), np.exp(x[0])])
def jac_scalar_vector(self, x):
return np.array(
[2 * x[0], np.cos(x[0]) ** -2, np.exp(x[0])]).reshape(-1, 1)
def fun_vector_scalar(self, x):
return np.sin(x[0] * x[1]) * np.log(x[0])
def wrong_dimensions_fun(self, x):
return np.array([x**2, np.tan(x), np.exp(x)])
def jac_vector_scalar(self, x):
return np.array([
x[1] * np.cos(x[0] * x[1]) * np.log(x[0]) +
np.sin(x[0] * x[1]) / x[0],
x[0] * np.cos(x[0] * x[1]) * np.log(x[0])
])
def fun_vector_vector(self, x):
return np.array([
x[0] * np.sin(x[1]),
x[1] * np.cos(x[0]),
x[0] ** 3 * x[1] ** -0.5
])
def jac_vector_vector(self, x):
return np.array([
[np.sin(x[1]), x[0] * np.cos(x[1])],
[-x[1] * np.sin(x[0]), np.cos(x[0])],
[3 * x[0] ** 2 * x[1] ** -0.5, -0.5 * x[0] ** 3 * x[1] ** -1.5]
])
def fun_parametrized(self, x, c0, c1=1.0):
return np.array([np.exp(c0 * x[0]), np.exp(c1 * x[1])])
def jac_parametrized(self, x, c0, c1=0.1):
return np.array([
[c0 * np.exp(c0 * x[0]), 0],
[0, c1 * np.exp(c1 * x[1])]
])
def fun_with_nan(self, x):
return x if np.abs(x) <= 1e-8 else np.nan
def jac_with_nan(self, x):
return 1.0 if np.abs(x) <= 1e-8 else np.nan
def fun_zero_jacobian(self, x):
return np.array([x[0] * x[1], np.cos(x[0] * x[1])])
def jac_zero_jacobian(self, x):
return np.array([
[x[1], x[0]],
[-x[1] * np.sin(x[0] * x[1]), -x[0] * np.sin(x[0] * x[1])]
])
def fun_non_numpy(self, x):
return math.exp(x)
def jac_non_numpy(self, x):
return math.exp(x)
def test_scalar_scalar(self):
x0 = 1.0
jac_diff_2 = approx_derivative(self.fun_scalar_scalar, x0,
method='2-point')
jac_diff_3 = approx_derivative(self.fun_scalar_scalar, x0)
jac_diff_4 = approx_derivative(self.fun_scalar_scalar, x0,
method='cs')
jac_true = self.jac_scalar_scalar(x0)
assert_allclose(jac_diff_2, jac_true, rtol=1e-6)
assert_allclose(jac_diff_3, jac_true, rtol=1e-9)
assert_allclose(jac_diff_4, jac_true, rtol=1e-12)
def test_scalar_vector(self):
x0 = 0.5
jac_diff_2 = approx_derivative(self.fun_scalar_vector, x0,
method='2-point')
jac_diff_3 = approx_derivative(self.fun_scalar_vector, x0)
jac_diff_4 = approx_derivative(self.fun_scalar_vector, x0,
method='cs')
jac_true = self.jac_scalar_vector(np.atleast_1d(x0))
assert_allclose(jac_diff_2, jac_true, rtol=1e-6)
assert_allclose(jac_diff_3, jac_true, rtol=1e-9)
assert_allclose(jac_diff_4, jac_true, rtol=1e-12)
def test_vector_scalar(self):
x0 = np.array([100.0, -0.5])
jac_diff_2 = approx_derivative(self.fun_vector_scalar, x0,
method='2-point')
jac_diff_3 = approx_derivative(self.fun_vector_scalar, x0)
jac_diff_4 = approx_derivative(self.fun_vector_scalar, x0,
method='cs')
jac_true = self.jac_vector_scalar(x0)
assert_allclose(jac_diff_2, jac_true, rtol=1e-6)
assert_allclose(jac_diff_3, jac_true, rtol=1e-7)
assert_allclose(jac_diff_4, jac_true, rtol=1e-12)
def test_vector_vector(self):
x0 = np.array([-100.0, 0.2])
jac_diff_2 = approx_derivative(self.fun_vector_vector, x0,
method='2-point')
jac_diff_3 = approx_derivative(self.fun_vector_vector, x0)
jac_diff_4 = approx_derivative(self.fun_vector_vector, x0,
method='cs')
jac_true = self.jac_vector_vector(x0)
assert_allclose(jac_diff_2, jac_true, rtol=1e-5)
assert_allclose(jac_diff_3, jac_true, rtol=1e-6)
assert_allclose(jac_diff_4, jac_true, rtol=1e-12)
def test_wrong_dimensions(self):
x0 = 1.0
assert_raises(RuntimeError, approx_derivative,
self.wrong_dimensions_fun, x0)
f0 = self.wrong_dimensions_fun(np.atleast_1d(x0))
assert_raises(ValueError, approx_derivative,
self.wrong_dimensions_fun, x0, f0=f0)
def test_custom_rel_step(self):
x0 = np.array([-0.1, 0.1])
jac_diff_2 = approx_derivative(self.fun_vector_vector, x0,
method='2-point', rel_step=1e-4)
jac_diff_3 = approx_derivative(self.fun_vector_vector, x0,
rel_step=1e-4)
jac_true = self.jac_vector_vector(x0)
assert_allclose(jac_diff_2, jac_true, rtol=1e-2)
assert_allclose(jac_diff_3, jac_true, rtol=1e-4)
def test_options(self):
x0 = np.array([1.0, 1.0])
c0 = -1.0
c1 = 1.0
lb = 0.0
ub = 2.0
f0 = self.fun_parametrized(x0, c0, c1=c1)
rel_step = np.array([-1e-6, 1e-7])
jac_true = self.jac_parametrized(x0, c0, c1)
jac_diff_2 = approx_derivative(
self.fun_parametrized, x0, method='2-point', rel_step=rel_step,
f0=f0, args=(c0,), kwargs=dict(c1=c1), bounds=(lb, ub))
jac_diff_3 = approx_derivative(
self.fun_parametrized, x0, rel_step=rel_step,
f0=f0, args=(c0,), kwargs=dict(c1=c1), bounds=(lb, ub))
assert_allclose(jac_diff_2, jac_true, rtol=1e-6)
assert_allclose(jac_diff_3, jac_true, rtol=1e-9)
def test_with_bounds_2_point(self):
lb = -np.ones(2)
ub = np.ones(2)
x0 = np.array([-2.0, 0.2])
assert_raises(ValueError, approx_derivative,
self.fun_vector_vector, x0, bounds=(lb, ub))
x0 = np.array([-1.0, 1.0])
jac_diff = approx_derivative(self.fun_vector_vector, x0,
method='2-point', bounds=(lb, ub))
jac_true = self.jac_vector_vector(x0)
assert_allclose(jac_diff, jac_true, rtol=1e-6)
def test_with_bounds_3_point(self):
lb = np.array([1.0, 1.0])
ub = np.array([2.0, 2.0])
x0 = np.array([1.0, 2.0])
jac_true = self.jac_vector_vector(x0)
jac_diff = approx_derivative(self.fun_vector_vector, x0)
assert_allclose(jac_diff, jac_true, rtol=1e-9)
jac_diff = approx_derivative(self.fun_vector_vector, x0,
bounds=(lb, np.inf))
assert_allclose(jac_diff, jac_true, rtol=1e-9)
jac_diff = approx_derivative(self.fun_vector_vector, x0,
bounds=(-np.inf, ub))
assert_allclose(jac_diff, jac_true, rtol=1e-9)
jac_diff = approx_derivative(self.fun_vector_vector, x0,
bounds=(lb, ub))
assert_allclose(jac_diff, jac_true, rtol=1e-9)
def test_tight_bounds(self):
x0 = np.array([10.0, 10.0])
lb = x0 - 3e-9
ub = x0 + 2e-9
jac_true = self.jac_vector_vector(x0)
jac_diff = approx_derivative(
self.fun_vector_vector, x0, method='2-point', bounds=(lb, ub))
assert_allclose(jac_diff, jac_true, rtol=1e-6)
jac_diff = approx_derivative(
self.fun_vector_vector, x0, method='2-point',
rel_step=1e-6, bounds=(lb, ub))
assert_allclose(jac_diff, jac_true, rtol=1e-6)
jac_diff = approx_derivative(
self.fun_vector_vector, x0, bounds=(lb, ub))
assert_allclose(jac_diff, jac_true, rtol=1e-6)
jac_diff = approx_derivative(
self.fun_vector_vector, x0, rel_step=1e-6, bounds=(lb, ub))
assert_allclose(jac_true, jac_diff, rtol=1e-6)
def test_bound_switches(self):
lb = -1e-8
ub = 1e-8
x0 = 0.0
jac_true = self.jac_with_nan(x0)
jac_diff_2 = approx_derivative(
self.fun_with_nan, x0, method='2-point', rel_step=1e-6,
bounds=(lb, ub))
jac_diff_3 = approx_derivative(
self.fun_with_nan, x0, rel_step=1e-6, bounds=(lb, ub))
assert_allclose(jac_diff_2, jac_true, rtol=1e-6)
assert_allclose(jac_diff_3, jac_true, rtol=1e-9)
x0 = 1e-8
jac_true = self.jac_with_nan(x0)
jac_diff_2 = approx_derivative(
self.fun_with_nan, x0, method='2-point', rel_step=1e-6,
bounds=(lb, ub))
jac_diff_3 = approx_derivative(
self.fun_with_nan, x0, rel_step=1e-6, bounds=(lb, ub))
assert_allclose(jac_diff_2, jac_true, rtol=1e-6)
assert_allclose(jac_diff_3, jac_true, rtol=1e-9)
def test_non_numpy(self):
x0 = 1.0
jac_true = self.jac_non_numpy(x0)
jac_diff_2 = approx_derivative(self.jac_non_numpy, x0,
method='2-point')
jac_diff_3 = approx_derivative(self.jac_non_numpy, x0)
assert_allclose(jac_diff_2, jac_true, rtol=1e-6)
assert_allclose(jac_diff_3, jac_true, rtol=1e-8)
# math.exp cannot handle complex arguments, hence this raises
assert_raises(TypeError, approx_derivative, self.jac_non_numpy, x0,
**dict(method='cs'))
def test_check_derivative(self):
x0 = np.array([-10.0, 10])
accuracy = check_derivative(self.fun_vector_vector,
self.jac_vector_vector, x0)
assert_(accuracy < 1e-9)
accuracy = check_derivative(self.fun_vector_vector,
self.jac_vector_vector, x0)
assert_(accuracy < 1e-6)
x0 = np.array([0.0, 0.0])
accuracy = check_derivative(self.fun_zero_jacobian,
self.jac_zero_jacobian, x0)
assert_(accuracy == 0)
accuracy = check_derivative(self.fun_zero_jacobian,
self.jac_zero_jacobian, x0)
assert_(accuracy == 0)
class TestApproxDerivativeSparse(object):
# Example from Numerical Optimization 2nd edition, p. 198.
def __init__(self):
np.random.seed(0)
self.n = 50
self.lb = -0.1 * (1 + np.arange(self.n))
self.ub = 0.1 * (1 + np.arange(self.n))
self.x0 = np.empty(self.n)
self.x0[::2] = (1 - 1e-7) * self.lb[::2]
self.x0[1::2] = (1 - 1e-7) * self.ub[1::2]
self.J_true = self.jac(self.x0)
def fun(self, x):
e = x[1:]**3 - x[:-1]**2
return np.hstack((0, 3 * e)) + np.hstack((2 * e, 0))
def jac(self, x):
n = x.size
J = np.zeros((n, n))
J[0, 0] = -4 * x[0]
J[0, 1] = 6 * x[1]**2
for i in range(1, n - 1):
J[i, i - 1] = -6 * x[i-1]
J[i, i] = 9 * x[i]**2 - 4 * x[i]
J[i, i + 1] = 6 * x[i+1]**2
J[-1, -1] = 9 * x[-1]**2
J[-1, -2] = -6 * x[-2]
return J
def structure(self, n):
A = np.zeros((n, n), dtype=int)
A[0, 0] = 1
A[0, 1] = 1
for i in range(1, n - 1):
A[i, i - 1: i + 2] = 1
A[-1, -1] = 1
A[-1, -2] = 1
return A
def test_all(self):
A = self.structure(self.n)
order = np.arange(self.n)
groups_1 = group_columns(A, order)
np.random.shuffle(order)
groups_2 = group_columns(A, order)
for method, groups, l, u in product(
['2-point', '3-point', 'cs'], [groups_1, groups_2],
[-np.inf, self.lb], [np.inf, self.ub]):
J = approx_derivative(self.fun, self.x0, method=method,
bounds=(l, u), sparsity=(A, groups))
assert_(isinstance(J, csr_matrix))
assert_allclose(J.toarray(), self.J_true, rtol=1e-6)
rel_step = 1e-8 * np.ones_like(self.x0)
rel_step[::2] *= -1
J = approx_derivative(self.fun, self.x0, method=method,
rel_step=rel_step, sparsity=(A, groups))
assert_allclose(J.toarray(), self.J_true, rtol=1e-5)
def test_no_precomputed_groups(self):
A = self.structure(self.n)
J = approx_derivative(self.fun, self.x0, sparsity=A)
assert_allclose(J.toarray(), self.J_true, rtol=1e-6)
def test_equivalence(self):
structure = np.ones((self.n, self.n), dtype=int)
groups = np.arange(self.n)
for method in ['2-point', '3-point', 'cs']:
J_dense = approx_derivative(self.fun, self.x0, method=method)
J_sparse = approx_derivative(
self.fun, self.x0, sparsity=(structure, groups), method=method)
assert_equal(J_dense, J_sparse.toarray())
def test_check_derivative(self):
def jac(x):
return csr_matrix(self.jac(x))
accuracy = check_derivative(self.fun, jac, self.x0,
bounds=(self.lb, self.ub))
assert_(accuracy < 1e-9)
accuracy = check_derivative(self.fun, jac, self.x0,
bounds=(self.lb, self.ub))
assert_(accuracy < 1e-9)
if __name__ == '__main__':
run_module_suite()
| bsd-3-clause |
cah0211/namebench | libnamebench/data_sources.py | 173 | 15580 | #!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides data sources to use for benchmarking."""
import ConfigParser
import glob
import os
import os.path
import random
import re
import subprocess
import sys
import time
# relative
import addr_util
import selectors
import util
# Pick the most accurate timer for a platform. Stolen from timeit.py:
if sys.platform[:3] == 'win':
DEFAULT_TIMER = time.clock
else:
DEFAULT_TIMER = time.time
GLOBAL_DATA_CACHE = {}
DEFAULT_CONFIG_PATH = 'config/data_sources.cfg'
MAX_NON_UNIQUE_RECORD_COUNT = 500000
MAX_FILE_MTIME_AGE_DAYS = 45
MIN_FILE_SIZE = 10000
MIN_RECOMMENDED_RECORD_COUNT = 200
MAX_FQDN_SYNTHESIZE_PERCENT = 4
class DataSources(object):
"""A collection of methods related to available hostname data sources."""
def __init__(self, config_path=DEFAULT_CONFIG_PATH, status_callback=None):
self.source_cache = GLOBAL_DATA_CACHE
self.source_config = {}
self.status_callback = status_callback
self._LoadConfigFromPath(config_path)
def msg(self, msg, **kwargs):
if self.status_callback:
self.status_callback(msg, **kwargs)
else:
print '- %s' % msg
def _LoadConfigFromPath(self, path):
"""Load a configuration file describing data sources that may be available."""
conf_file = util.FindDataFile(path)
config = ConfigParser.ConfigParser()
config.read(conf_file)
for section in config.sections():
if section not in self.source_config:
self.source_config[section] = {
'name': None,
'search_paths': set(),
'full_hostnames': True,
# Store whether or not this data source contains personal data
'synthetic': False,
'include_duplicates': False,
'max_mtime_days': MAX_FILE_MTIME_AGE_DAYS
}
for (key, value) in config.items(section):
if key == 'name':
self.source_config[section]['name'] = value
elif key == 'full_hostnames' and int(value) == 0:
self.source_config[section]['full_hostnames'] = False
elif key == 'max_mtime_days':
self.source_config[section]['max_mtime_days'] = int(value)
elif key == 'include_duplicates':
self.source_config[section]['include_duplicates'] = bool(value)
elif key == 'synthetic':
self.source_config[section]['synthetic'] = bool(value)
else:
self.source_config[section]['search_paths'].add(value)
def ListSourceTypes(self):
"""Get a list of all data sources we know about."""
return sorted(self.source_config.keys())
def ListSourcesWithDetails(self):
"""Get a list of all data sources found with total counts.
Returns:
List of tuples in form of (short_name, full_name, full_hosts, # of entries)
"""
for source in self.ListSourceTypes():
max_mtime = self.source_config[source]['max_mtime_days']
self._GetHostsFromSource(source, min_file_size=MIN_FILE_SIZE,
max_mtime_age_days=max_mtime)
details = []
for source in self.source_cache:
details.append((source,
self.source_config[source]['name'],
self.source_config[source]['synthetic'],
len(self.source_cache[source])))
return sorted(details, key=lambda x: (x[2], x[3] * -1))
def ListSourceTitles(self):
"""Return a list of sources in title + count format."""
titles = []
seen_synthetic = False
seen_organic = False
for (unused_type, name, is_synthetic, count) in self.ListSourcesWithDetails():
if not is_synthetic:
seen_organic = True
if is_synthetic and seen_organic and not seen_synthetic:
titles.append('-' * 36)
seen_synthetic = True
titles.append('%s (%s)' % (name, count))
return titles
def ConvertSourceTitleToType(self, detail):
"""Convert a detail name to a source type."""
for source_type in self.source_config:
if detail.startswith(self.source_config[source_type]['name']):
return source_type
def GetBestSourceDetails(self):
return self.ListSourcesWithDetails()[0]
def GetNameForSource(self, source):
if source in self.source_config:
return self.source_config[source]['name']
else:
# Most likely a custom file path
return source
def GetCachedRecordCountForSource(self, source):
return len(self.source_cache[source])
def _CreateRecordsFromHostEntries(self, entries, include_duplicates=False):
"""Create records from hosts, removing duplicate entries and IP's.
Args:
entries: A list of test-data entries.
include_duplicates: Whether or not to filter duplicates (optional: False)
Returns:
A tuple of (filtered records, full_host_names (Boolean)
Raises:
ValueError: If no records could be grokked from the input.
"""
last_entry = None
records = []
full_host_count = 0
for entry in entries:
if entry == last_entry and not include_duplicates:
continue
else:
last_entry = entry
if ' ' in entry:
(record_type, host) = entry.split(' ')
else:
record_type = 'A'
host = entry
if not addr_util.IP_RE.match(host) and not addr_util.INTERNAL_RE.search(host):
if not host.endswith('.'):
host += '.'
records.append((record_type, host))
if addr_util.FQDN_RE.match(host):
full_host_count += 1
if not records:
raise ValueError('No records could be created from: %s' % entries)
# Now that we've read everything, are we dealing with domains or full hostnames?
full_host_percent = full_host_count / float(len(records)) * 100
if full_host_percent < MAX_FQDN_SYNTHESIZE_PERCENT:
full_host_names = True
else:
full_host_names = False
return (records, full_host_names)
def GetTestsFromSource(self, source, count=50, select_mode=None):
"""Parse records from source, and return tuples to use for testing.
Args:
source: A source name (str) that has been configured.
count: Number of tests to generate from the source (int)
select_mode: automatic, weighted, random, chunk (str)
Returns:
A list of record tuples in the form of (req_type, hostname)
Raises:
ValueError: If no usable records are found from the data source.
This is tricky because we support 3 types of input data:
- List of domains
- List of hosts
- List of record_type + hosts
"""
records = []
if source in self.source_config:
include_duplicates = self.source_config[source].get('include_duplicates', False)
else:
include_duplicates = False
records = self._GetHostsFromSource(source)
if not records:
raise ValueError('Unable to generate records from %s (nothing found)' % source)
self.msg('Generating tests from %s (%s records, selecting %s %s)'
% (self.GetNameForSource(source), len(records), count, select_mode))
(records, are_records_fqdn) = self._CreateRecordsFromHostEntries(records,
include_duplicates=include_duplicates)
# First try to resolve whether to use weighted or random.
if select_mode in ('weighted', 'automatic', None):
# If we are in include_duplicates mode (cachemiss, cachehit, etc.), we have different rules.
if include_duplicates:
if count > len(records):
select_mode = 'random'
else:
select_mode = 'chunk'
elif len(records) != len(set(records)):
if select_mode == 'weighted':
self.msg('%s data contains duplicates, switching select_mode to random' % source)
select_mode = 'random'
else:
select_mode = 'weighted'
self.msg('Selecting %s out of %s sanitized records (%s mode).' %
(count, len(records), select_mode))
if select_mode == 'weighted':
records = selectors.WeightedDistribution(records, count)
elif select_mode == 'chunk':
records = selectors.ChunkSelect(records, count)
elif select_mode == 'random':
records = selectors.RandomSelect(records, count, include_duplicates=include_duplicates)
else:
raise ValueError('No such final selection mode: %s' % select_mode)
# For custom filenames
if source not in self.source_config:
self.source_config[source] = {'synthetic': True}
if are_records_fqdn:
self.source_config[source]['full_hostnames'] = False
self.msg('%s input appears to be predominantly domain names. Synthesizing FQDNs' % source)
synthesized = []
for (req_type, hostname) in records:
if not addr_util.FQDN_RE.match(hostname):
hostname = self._GenerateRandomHostname(hostname)
synthesized.append((req_type, hostname))
return synthesized
else:
return records
def _GenerateRandomHostname(self, domain):
"""Generate a random hostname f or a given domain."""
oracle = random.randint(0, 100)
if oracle < 70:
return 'www.%s' % domain
elif oracle < 95:
return domain
elif oracle < 98:
return 'static.%s' % domain
else:
return 'cache-%s.%s' % (random.randint(0, 10), domain)
def _GetHostsFromSource(self, source, min_file_size=None, max_mtime_age_days=None):
"""Get data for a particular source. This needs to be fast.
Args:
source: A configured source type (str)
min_file_size: What the minimum allowable file size is for this source (int)
max_mtime_age_days: Maximum days old the file can be for this source (int)
Returns:
list of hostnames gathered from data source.
The results of this function are cached by source type.
"""
if source in self.source_cache:
return self.source_cache[source]
filename = self._FindBestFileForSource(source, min_file_size=min_file_size,
max_mtime_age_days=max_mtime_age_days)
if not filename:
return None
size_mb = os.path.getsize(filename) / 1024.0 / 1024.0
# Minimize our output
if not self.source_config[source]['synthetic']:
self.msg('Reading %s: %s (%0.1fMB)' % (self.GetNameForSource(source), filename, size_mb))
start_clock = DEFAULT_TIMER()
if filename.endswith('.pcap') or filename.endswith('.tcp'):
hosts = self._ExtractHostsFromPcapFile(filename)
else:
hosts = self._ExtractHostsFromHistoryFile(filename)
if not hosts:
hosts = self._ReadDataFile(filename)
duration = DEFAULT_TIMER() - start_clock
if duration > 5:
self.msg('%s data took %1.1fs to read!' % (self.GetNameForSource(source), duration))
self.source_cache[source] = hosts
return hosts
def _ExtractHostsFromHistoryFile(self, path):
"""Get a list of sanitized records from a history file containing URLs."""
# This regexp is fairly general (no ip filtering), since we need speed more
# than precision at this stage.
parse_re = re.compile('https*://([\-\w]+\.[\-\w\.]+)')
return parse_re.findall(open(path, 'rb').read())
def _ExtractHostsFromPcapFile(self, path):
"""Get a list of requests out of a pcap file - requires tcpdump."""
self.msg('Extracting requests from pcap file using tcpdump')
cmd = 'tcpdump -r %s -n port 53' % path
pipe = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout
parse_re = re.compile(' ([A-Z]+)\? ([\-\w\.]+)')
requests = []
for line in pipe:
if '?' not in line:
continue
match = parse_re.search(line)
if match:
requests.append(' '.join(match.groups()))
return requests
def _ReadDataFile(self, path):
"""Read a line-based datafile."""
records = []
domains_re = re.compile('^\w[\w\.-]+[a-zA-Z]$')
requests_re = re.compile('^[A-Z]{1,4} \w[\w\.-]+\.$')
for line in open(path):
if domains_re.match(line) or requests_re.match(line):
records.append(line.rstrip())
return records
def _GetSourceSearchPaths(self, source):
"""Get a list of possible search paths (globs) for a given source."""
# This is likely a custom file path
if source not in self.source_config:
return [source]
search_paths = []
environment_re = re.compile('%(\w+)%')
# First get through resolving environment variables
for path in self.source_config[source]['search_paths']:
env_vars = set(environment_re.findall(path))
if env_vars:
for variable in env_vars:
env_var = os.getenv(variable, False)
if env_var:
path = path.replace('%%%s%%' % variable, env_var)
else:
path = None
# If everything is good, replace all '/' chars with the os path variable.
if path:
path = path.replace('/', os.sep)
search_paths.append(path)
# This moment of weirdness brought to you by Windows XP(tm). If we find
# a Local or Roaming keyword in path, add the other forms to the search
# path.
if sys.platform[:3] == 'win':
keywords = ['Local', 'Roaming']
for keyword in keywords:
if keyword in path:
replacement = keywords[keywords.index(keyword)-1]
search_paths.append(path.replace('\\%s' % keyword, '\\%s' % replacement))
search_paths.append(path.replace('\\%s' % keyword, ''))
return search_paths
def _FindBestFileForSource(self, source, min_file_size=None, max_mtime_age_days=None):
"""Find the best file (newest over X size) to use for a given source type.
Args:
source: source type
min_file_size: What the minimum allowable file size is for this source (int)
max_mtime_age_days: Maximum days old the file can be for this source (int)
Returns:
A file path.
"""
found = []
for path in self._GetSourceSearchPaths(source):
if not os.path.isabs(path):
path = util.FindDataFile(path)
for filename in glob.glob(path):
if min_file_size and os.path.getsize(filename) < min_file_size:
self.msg('Skipping %s (only %sb)' % (filename, os.path.getsize(filename)))
else:
try:
fp = open(filename, 'rb')
fp.close()
found.append(filename)
except IOError:
self.msg('Skipping %s (could not open)' % filename)
if found:
newest = sorted(found, key=os.path.getmtime)[-1]
age_days = (time.time() - os.path.getmtime(newest)) / 86400
if max_mtime_age_days and age_days > max_mtime_age_days:
pass
# self.msg('Skipping %s (%2.0fd old)' % (newest, age_days))
else:
return newest
else:
return None
if __name__ == '__main__':
parser = DataSources()
print parser.ListSourceTypes()
print parser.ListSourcesWithDetails()
best = parser.ListSourcesWithDetails()[0][0]
print len(parser.GetRecordsFromSource(best))
| apache-2.0 |
LearnEra/LearnEraPlaftform | common/test/acceptance/tests/studio/base_studio_test.py | 18 | 3318 | from ...pages.studio.auto_auth import AutoAuthPage
from ...fixtures.course import CourseFixture
from ..helpers import UniqueCourseTest
from ...pages.studio.overview import CourseOutlinePage
from ...pages.studio.utils import verify_ordering
class StudioCourseTest(UniqueCourseTest):
"""
Base class for all Studio course tests.
"""
def setUp(self, is_staff=False):
"""
Install a course with no content using a fixture.
"""
super(StudioCourseTest, self).setUp()
self.course_fixture = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
self.populate_course_fixture(self.course_fixture)
self.course_fixture.install()
self.user = self.course_fixture.user
self.log_in(self.user, is_staff)
def populate_course_fixture(self, course_fixture):
"""
Populate the children of the test course fixture.
"""
pass
def log_in(self, user, is_staff=False):
"""
Log in as the user that created the course. The user will be given instructor access
to the course and enrolled in it. By default the user will not have staff access unless
is_staff is passed as True.
"""
self.auth_page = AutoAuthPage(
self.browser,
staff=is_staff,
username=user.get('username'),
email=user.get('email'),
password=user.get('password')
)
self.auth_page.visit()
class ContainerBase(StudioCourseTest):
"""
Base class for tests that do operations on the container page.
"""
def setUp(self):
"""
Create a unique identifier for the course used in this test.
"""
# Ensure that the superclass sets up
super(ContainerBase, self).setUp()
self.outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
def go_to_nested_container_page(self):
"""
Go to the nested container page.
"""
unit = self.go_to_unit_page()
# The 0th entry is the unit page itself.
container = unit.xblocks[1].go_to_container()
return container
def go_to_unit_page(self, section_name='Test Section', subsection_name='Test Subsection', unit_name='Test Unit'):
"""
Go to the test unit page.
If make_draft is true, the unit page will be put into draft mode.
"""
self.outline.visit()
subsection = self.outline.section(section_name).subsection(subsection_name)
return subsection.toggle_expand().unit(unit_name).go_to()
def do_action_and_verify(self, action, expected_ordering):
"""
Perform the supplied action and then verify the resulting ordering.
"""
container = self.go_to_nested_container_page()
action(container)
verify_ordering(self, container, expected_ordering)
# Reload the page to see that the change was persisted.
container = self.go_to_nested_container_page()
verify_ordering(self, container, expected_ordering)
| agpl-3.0 |
meteorcloudy/tensorflow | tensorflow/contrib/nearest_neighbor/python/ops/nearest_neighbor_ops.py | 79 | 2852 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrappers for nearest neighbor operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.util import loader
from tensorflow.python.framework import ops
from tensorflow.python.platform import resource_loader
_nearest_neighbor_ops = loader.load_op_library(
resource_loader.get_path_to_datafile("_nearest_neighbor_ops.so"))
def hyperplane_lsh_probes(point_hyperplane_product,
num_tables,
num_hyperplanes_per_table,
num_probes,
name=None):
"""Computes probes for the hyperplane hash.
The op supports multiprobing, i.e., the number of requested probes can be
larger than the number of tables. In that case, the same table can be probed
multiple times.
The first `num_tables` probes are always the primary hashes for each table.
Args:
point_hyperplane_product: a matrix of inner products between the hyperplanes
and the points to be hashed. These values should not be quantized so that
we can correctly compute the probing sequence. The expected shape is
`batch_size` times `num_tables * num_hyperplanes_per_table`, i.e., each
element of the batch corresponds to one row of the matrix.
num_tables: the number of tables to compute probes for.
num_hyperplanes_per_table: the number of hyperplanes per table.
num_probes: the requested number of probes per table.
name: A name prefix for the returned tensors (optional).
Returns:
probes: the output matrix of probes. Size `batch_size` times `num_probes`.
table_ids: the output matrix of tables ids. Size `batch_size` times
`num_probes`.
"""
return _nearest_neighbor_ops.hyperplane_lsh_probes(point_hyperplane_product,
num_tables,
num_hyperplanes_per_table,
num_probes,
name=name)
ops.NotDifferentiable("HyperplaneLSHProbes")
| apache-2.0 |
GiladE/birde | venv/lib/python2.7/site-packages/gunicorn/workers/base.py | 25 | 7331 | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from datetime import datetime
import os
import signal
import sys
import time
from random import randint
from gunicorn import util
from gunicorn.workers.workertmp import WorkerTmp
from gunicorn.reloader import Reloader
from gunicorn.http.errors import (
InvalidHeader, InvalidHeaderName, InvalidRequestLine, InvalidRequestMethod,
InvalidHTTPVersion, LimitRequestLine, LimitRequestHeaders,
)
from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
from gunicorn.http.wsgi import default_environ, Response
from gunicorn.six import MAXSIZE
class Worker(object):
SIGNALS = [getattr(signal, "SIG%s" % x)
for x in "ABRT HUP QUIT INT TERM USR1 USR2 WINCH CHLD".split()]
PIPE = []
def __init__(self, age, ppid, sockets, app, timeout, cfg, log):
"""\
This is called pre-fork so it shouldn't do anything to the
current process. If there's a need to make process wide
changes you'll want to do that in ``self.init_process()``.
"""
self.age = age
self.ppid = ppid
self.sockets = sockets
self.app = app
self.timeout = timeout
self.cfg = cfg
self.booted = False
self.aborted = False
self.reloader = None
self.nr = 0
jitter = randint(0, cfg.max_requests_jitter)
self.max_requests = cfg.max_requests + jitter or MAXSIZE
self.alive = True
self.log = log
self.tmp = WorkerTmp(cfg)
def __str__(self):
return "<Worker %s>" % self.pid
@property
def pid(self):
return os.getpid()
def notify(self):
"""\
Your worker subclass must arrange to have this method called
once every ``self.timeout`` seconds. If you fail in accomplishing
this task, the master process will murder your workers.
"""
self.tmp.notify()
def run(self):
"""\
This is the mainloop of a worker process. You should override
this method in a subclass to provide the intended behaviour
for your particular evil schemes.
"""
raise NotImplementedError()
def init_process(self):
"""\
If you override this method in a subclass, the last statement
in the function should be to call this method with
super(MyWorkerClass, self).init_process() so that the ``run()``
loop is initiated.
"""
# start the reloader
if self.cfg.reload:
def changed(fname):
self.log.info("Worker reloading: %s modified", fname)
os.kill(self.pid, signal.SIGQUIT)
self.reloader = Reloader(callback=changed).start()
# set environment' variables
if self.cfg.env:
for k, v in self.cfg.env.items():
os.environ[k] = v
util.set_owner_process(self.cfg.uid, self.cfg.gid)
# Reseed the random number generator
util.seed()
# For waking ourselves up
self.PIPE = os.pipe()
for p in self.PIPE:
util.set_non_blocking(p)
util.close_on_exec(p)
# Prevent fd inheritance
[util.close_on_exec(s) for s in self.sockets]
util.close_on_exec(self.tmp.fileno())
self.log.close_on_exec()
self.init_signals()
self.wsgi = self.app.wsgi()
self.cfg.post_worker_init(self)
# Enter main run loop
self.booted = True
self.run()
def init_signals(self):
# reset signaling
[signal.signal(s, signal.SIG_DFL) for s in self.SIGNALS]
# init new signaling
signal.signal(signal.SIGQUIT, self.handle_quit)
signal.signal(signal.SIGTERM, self.handle_exit)
signal.signal(signal.SIGINT, self.handle_quit)
signal.signal(signal.SIGWINCH, self.handle_winch)
signal.signal(signal.SIGUSR1, self.handle_usr1)
signal.signal(signal.SIGABRT, self.handle_abort)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
if hasattr(signal, 'siginterrupt'): # python >= 2.6
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
def handle_usr1(self, sig, frame):
self.log.reopen_files()
def handle_exit(self, sig, frame):
self.alive = False
def handle_quit(self, sig, frame):
self.alive = False
# worker_int callback
self.cfg.worker_int(self)
time.sleep(0.1)
sys.exit(0)
def handle_abort(self, sig, frame):
self.alive = False
self.cfg.worker_abort(self)
sys.exit(1)
def handle_error(self, req, client, addr, exc):
request_start = datetime.now()
addr = addr or ('', -1) # unix socket case
if isinstance(exc, (InvalidRequestLine, InvalidRequestMethod,
InvalidHTTPVersion, InvalidHeader, InvalidHeaderName,
LimitRequestLine, LimitRequestHeaders,
InvalidProxyLine, ForbiddenProxyRequest)):
status_int = 400
reason = "Bad Request"
if isinstance(exc, InvalidRequestLine):
mesg = "Invalid Request Line '%s'" % str(exc)
elif isinstance(exc, InvalidRequestMethod):
mesg = "Invalid Method '%s'" % str(exc)
elif isinstance(exc, InvalidHTTPVersion):
mesg = "Invalid HTTP Version '%s'" % str(exc)
elif isinstance(exc, (InvalidHeaderName, InvalidHeader,)):
mesg = "%s" % str(exc)
if not req and hasattr(exc, "req"):
req = exc.req # for access log
elif isinstance(exc, LimitRequestLine):
mesg = "%s" % str(exc)
elif isinstance(exc, LimitRequestHeaders):
mesg = "Error parsing headers: '%s'" % str(exc)
elif isinstance(exc, InvalidProxyLine):
mesg = "'%s'" % str(exc)
elif isinstance(exc, ForbiddenProxyRequest):
reason = "Forbidden"
mesg = "Request forbidden"
status_int = 403
msg = "Invalid request from ip={ip}: {error}"
self.log.debug(msg.format(ip=addr[0], error=str(exc)))
else:
self.log.exception("Error handling request")
status_int = 500
reason = "Internal Server Error"
mesg = ""
if req is not None:
request_time = datetime.now() - request_start
environ = default_environ(req, client, self.cfg)
environ['REMOTE_ADDR'] = addr[0]
environ['REMOTE_PORT'] = str(addr[1])
resp = Response(req, client, self.cfg)
resp.status = "%s %s" % (status_int, reason)
resp.response_length = len(mesg)
self.log.access(resp, req, environ, request_time)
try:
util.write_error(client, status_int, reason, mesg)
except:
self.log.debug("Failed to send error message.")
def handle_winch(self, sig, fname):
# Ignore SIGWINCH in worker. Fixes a crash on OpenBSD.
return
| mit |
nihilus/processstalker | Python Utilities/ps_idc_gen.py | 2 | 2755 | #!/usr/bin/python
#!c:\python\python.exe
#
# Process Stalker - Python Utility
# Copyright (C) 2005 Pedram Amini <pamini@idefense.com,pedram.amini@gmail.com>
#
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free
# Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# notes:
#
# this utility will parse a GML file enumerating the following information:
#
# - function names
# - comments
#
# an IDC script will be generated that can be used to import changes made from within the graph editor back to the
# IDA database. example usage:
#
# ps_idc_gen.py *.gml > import_changes.idc
#
import sys
sys.path.append(sys.argv[0].rsplit('/', 1)[0] + "/ps_api")
from gml import *
from psx import *
if len(sys.argv) < 2:
sys.stderr.write("usage: ps_idc_gen <file 1> [file 2] [...] > import_changes.idc")
sys.exit(1)
# create the IDC script header.
print "// Process Stalker - IDC Generator"
print "#include <idc.idc>"
print "static main("
print "{"
# step through the input files.
for input_file in sys.argv[1:]:
graph_parser = gml_graph()
try:
graph_parser.parse_file(input_file)
except psx, x:
sys.stderr.write(x.__str__())
sys.exit(1)
# step through each node in the graph.
for i in xrange(graph_parser.num_nodes()):
node = graph_parser.get_node(i)
label = node.get_label_stripped()
#
# extract information from this node label (HTML stripped).
#
# if this node is a function, extract the function name.
if node.get_block() == node.get_function():
function_name = node.get_name()
if not function_name.startswith("sub_"):
" MakeName(0x%s, \"%s\");" % (node.get_function(), function_name)
# extract comments.
for line in label.split("\n"):
if line.count(";"):
matches = re.search("^([a-fA-f0-9]+).*;\s*(.*)", line)
if matches:
(addr, comment) = matches.groups()
print" MakeComm(0x%s, \"%s\");" % (addr, comment)
print "}" | gpl-2.0 |
KASHIHARAAkira/LM60Pi | lm60.py | 1 | 1378 | '''
Copyright 2017 Akira Kashihara
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
#!usr/bin/env python
from MCP3008Pi import mcp3008
class LM60:
def __init__(self,bus,device,ch):
self.mcp = mcp3008.MCP3008(bus,device)
self.channel = ch;
def read(self):
vol = self.mcp.read(self.channel)
temp = (vol - 0.424) / 0.00625
return temp
def end(self):
self.mcp.close()
| mit |
bsmrstu-warriors/Moytri--The-Drone-Aider | Lib/encodings/uu_codec.py | 383 | 3738 | """ Python 'uu_codec' Codec - UU content transfer encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg (mal@lemburg.com). Some details were
adapted from uu.py which was written by Lance Ellinghouse and
modified by Jack Jansen and Fredrik Lundh.
"""
import codecs, binascii
### Codec APIs
def uu_encode(input,errors='strict',filename='<data>',mode=0666):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
from cStringIO import StringIO
from binascii import b2a_uu
# using str() because of cStringIO's Unicode undesired Unicode behavior.
infile = StringIO(str(input))
outfile = StringIO()
read = infile.read
write = outfile.write
# Encode
write('begin %o %s\n' % (mode & 0777, filename))
chunk = read(45)
while chunk:
write(b2a_uu(chunk))
chunk = read(45)
write(' \nend\n')
return (outfile.getvalue(), len(input))
def uu_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
Note: filename and file mode information in the input data is
ignored.
"""
assert errors == 'strict'
from cStringIO import StringIO
from binascii import a2b_uu
infile = StringIO(str(input))
outfile = StringIO()
readline = infile.readline
write = outfile.write
# Find start of encoded data
while 1:
s = readline()
if not s:
raise ValueError, 'Missing "begin" line in input data'
if s[:5] == 'begin':
break
# Decode
while 1:
s = readline()
if not s or \
s == 'end\n':
break
try:
data = a2b_uu(s)
except binascii.Error, v:
# Workaround for broken uuencoders by /Fredrik Lundh
nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3
data = a2b_uu(s[:nbytes])
#sys.stderr.write("Warning: %s\n" % str(v))
write(data)
if not s:
raise ValueError, 'Truncated input data'
return (outfile.getvalue(), len(input))
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return uu_encode(input,errors)
def decode(self,input,errors='strict'):
return uu_decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return uu_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return uu_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='uu',
encode=uu_encode,
decode=uu_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| gpl-3.0 |
RockySteveJobs/python-for-android | python-build/python-libs/gdata/build/lib/gdata/tlslite/utils/Python_RSAKey.py | 239 | 7707 | """Pure-Python RSA implementation."""
from cryptomath import *
import xmltools
from ASN1Parser import ASN1Parser
from RSAKey import *
class Python_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if (n and not e) or (e and not n):
raise AssertionError()
self.n = n
self.e = e
self.d = d
self.p = p
self.q = q
self.dP = dP
self.dQ = dQ
self.qInv = qInv
self.blinder = 0
self.unblinder = 0
def hasPrivateKey(self):
return self.d != 0
def hash(self):
s = self.writeXMLPublicKey('\t\t')
return hashAndBase64(s.strip())
def _rawPrivateKeyOp(self, m):
#Create blinding values, on the first pass:
if not self.blinder:
self.unblinder = getRandomNumber(2, self.n)
self.blinder = powMod(invMod(self.unblinder, self.n), self.e,
self.n)
#Blind the input
m = (m * self.blinder) % self.n
#Perform the RSA operation
c = self._rawPrivateKeyOpHelper(m)
#Unblind the output
c = (c * self.unblinder) % self.n
#Update blinding values
self.blinder = (self.blinder * self.blinder) % self.n
self.unblinder = (self.unblinder * self.unblinder) % self.n
#Return the output
return c
def _rawPrivateKeyOpHelper(self, m):
#Non-CRT version
#c = powMod(m, self.d, self.n)
#CRT version (~3x faster)
s1 = powMod(m, self.dP, self.p)
s2 = powMod(m, self.dQ, self.q)
h = ((s1 - s2) * self.qInv) % self.p
c = s2 + self.q * h
return c
def _rawPublicKeyOp(self, c):
m = powMod(c, self.e, self.n)
return m
def acceptsPassword(self): return False
def write(self, indent=''):
if self.d:
s = indent+'<privateKey xmlns="http://trevp.net/rsa">\n'
else:
s = indent+'<publicKey xmlns="http://trevp.net/rsa">\n'
s += indent+'\t<n>%s</n>\n' % numberToBase64(self.n)
s += indent+'\t<e>%s</e>\n' % numberToBase64(self.e)
if self.d:
s += indent+'\t<d>%s</d>\n' % numberToBase64(self.d)
s += indent+'\t<p>%s</p>\n' % numberToBase64(self.p)
s += indent+'\t<q>%s</q>\n' % numberToBase64(self.q)
s += indent+'\t<dP>%s</dP>\n' % numberToBase64(self.dP)
s += indent+'\t<dQ>%s</dQ>\n' % numberToBase64(self.dQ)
s += indent+'\t<qInv>%s</qInv>\n' % numberToBase64(self.qInv)
s += indent+'</privateKey>'
else:
s += indent+'</publicKey>'
#Only add \n if part of a larger structure
if indent != '':
s += '\n'
return s
def writeXMLPublicKey(self, indent=''):
return Python_RSAKey(self.n, self.e).write(indent)
def generate(bits):
key = Python_RSAKey()
p = getRandomPrime(bits/2, False)
q = getRandomPrime(bits/2, False)
t = lcm(p-1, q-1)
key.n = p * q
key.e = 3L #Needed to be long, for Java
key.d = invMod(key.e, t)
key.p = p
key.q = q
key.dP = key.d % (p-1)
key.dQ = key.d % (q-1)
key.qInv = invMod(q, p)
return key
generate = staticmethod(generate)
def parsePEM(s, passwordCallback=None):
"""Parse a string containing a <privateKey> or <publicKey>, or
PEM-encoded key."""
start = s.find("-----BEGIN PRIVATE KEY-----")
if start != -1:
end = s.find("-----END PRIVATE KEY-----")
if end == -1:
raise SyntaxError("Missing PEM Postfix")
s = s[start+len("-----BEGIN PRIVATE KEY -----") : end]
bytes = base64ToBytes(s)
return Python_RSAKey._parsePKCS8(bytes)
else:
start = s.find("-----BEGIN RSA PRIVATE KEY-----")
if start != -1:
end = s.find("-----END RSA PRIVATE KEY-----")
if end == -1:
raise SyntaxError("Missing PEM Postfix")
s = s[start+len("-----BEGIN RSA PRIVATE KEY -----") : end]
bytes = base64ToBytes(s)
return Python_RSAKey._parseSSLeay(bytes)
raise SyntaxError("Missing PEM Prefix")
parsePEM = staticmethod(parsePEM)
def parseXML(s):
element = xmltools.parseAndStripWhitespace(s)
return Python_RSAKey._parseXML(element)
parseXML = staticmethod(parseXML)
def _parsePKCS8(bytes):
p = ASN1Parser(bytes)
version = p.getChild(0).value[0]
if version != 0:
raise SyntaxError("Unrecognized PKCS8 version")
rsaOID = p.getChild(1).value
if list(rsaOID) != [6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0]:
raise SyntaxError("Unrecognized AlgorithmIdentifier")
#Get the privateKey
privateKeyP = p.getChild(2)
#Adjust for OCTET STRING encapsulation
privateKeyP = ASN1Parser(privateKeyP.value)
return Python_RSAKey._parseASN1PrivateKey(privateKeyP)
_parsePKCS8 = staticmethod(_parsePKCS8)
def _parseSSLeay(bytes):
privateKeyP = ASN1Parser(bytes)
return Python_RSAKey._parseASN1PrivateKey(privateKeyP)
_parseSSLeay = staticmethod(_parseSSLeay)
def _parseASN1PrivateKey(privateKeyP):
version = privateKeyP.getChild(0).value[0]
if version != 0:
raise SyntaxError("Unrecognized RSAPrivateKey version")
n = bytesToNumber(privateKeyP.getChild(1).value)
e = bytesToNumber(privateKeyP.getChild(2).value)
d = bytesToNumber(privateKeyP.getChild(3).value)
p = bytesToNumber(privateKeyP.getChild(4).value)
q = bytesToNumber(privateKeyP.getChild(5).value)
dP = bytesToNumber(privateKeyP.getChild(6).value)
dQ = bytesToNumber(privateKeyP.getChild(7).value)
qInv = bytesToNumber(privateKeyP.getChild(8).value)
return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
_parseASN1PrivateKey = staticmethod(_parseASN1PrivateKey)
def _parseXML(element):
try:
xmltools.checkName(element, "privateKey")
except SyntaxError:
xmltools.checkName(element, "publicKey")
#Parse attributes
xmltools.getReqAttribute(element, "xmlns", "http://trevp.net/rsa\Z")
xmltools.checkNoMoreAttributes(element)
#Parse public values (<n> and <e>)
n = base64ToNumber(xmltools.getText(xmltools.getChild(element, 0, "n"), xmltools.base64RegEx))
e = base64ToNumber(xmltools.getText(xmltools.getChild(element, 1, "e"), xmltools.base64RegEx))
d = 0
p = 0
q = 0
dP = 0
dQ = 0
qInv = 0
#Parse private values, if present
if element.childNodes.length>=3:
d = base64ToNumber(xmltools.getText(xmltools.getChild(element, 2, "d"), xmltools.base64RegEx))
p = base64ToNumber(xmltools.getText(xmltools.getChild(element, 3, "p"), xmltools.base64RegEx))
q = base64ToNumber(xmltools.getText(xmltools.getChild(element, 4, "q"), xmltools.base64RegEx))
dP = base64ToNumber(xmltools.getText(xmltools.getChild(element, 5, "dP"), xmltools.base64RegEx))
dQ = base64ToNumber(xmltools.getText(xmltools.getChild(element, 6, "dQ"), xmltools.base64RegEx))
qInv = base64ToNumber(xmltools.getText(xmltools.getLastChild(element, 7, "qInv"), xmltools.base64RegEx))
return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
_parseXML = staticmethod(_parseXML)
| apache-2.0 |
felliott/osf.io | framework/auth/signing.py | 6 | 2195 | # encoding: utf-8
import hmac
import json
import time
import base64
import collections
from website import settings
# Written by @jmcarp originally
def order_recursive(data):
"""Recursively sort keys of input data and all its nested dictionaries.
Used to ensure consistent ordering of JSON payloads.
"""
if isinstance(data, dict):
return collections.OrderedDict(
sorted(
(
(key, order_recursive(value))
for key, value in data.items()
),
key=lambda item: item[0]
)
)
if isinstance(data, list):
return [
order_recursive(value)
for value in data
]
return data
def serialize_payload(payload):
ordered = order_recursive(payload)
return base64.b64encode(json.dumps(ordered).encode())
def unserialize_payload(message):
payload = json.loads(base64.b64decode(message))
return order_recursive(payload)
class Signer(object):
def __init__(self, secret, digest):
assert callable(digest)
self.secret = secret
self.digest = digest
def sign_message(self, message):
return hmac.new(
key=self.secret.encode(),
digestmod=self.digest,
msg=message,
).hexdigest()
def sign_payload(self, payload):
message = serialize_payload(payload)
signature = self.sign_message(message)
return message.decode(), signature
def verify_message(self, signature, message):
expected = self.sign_message(message)
return signature == expected
def verify_payload(self, signature, payload):
_, expected = self.sign_payload(payload)
return signature == expected
def sign_data(signer, data, ttl=100):
target = {'time': int(time.time() + ttl)}
target.update(data)
payload, signature = signer.sign_payload(target)
payload = payload.decode() if isinstance(payload, bytes) else payload
return {
'payload': payload,
'signature': signature,
}
default_signer = Signer(settings.DEFAULT_HMAC_SECRET, settings.DEFAULT_HMAC_ALGORITHM)
| apache-2.0 |
fmacias64/MoodJournalAmerica | download_scripts/daily_downloader.py | 2 | 13952 | """
This script allows for parsing readable online PDF documents that are otherwise encrypted and annoying
@authored malam,habdulkafi 30 June 2014
"""
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.pdfdevice import PDFDevice
from pdfminer.layout import LAParams
from pdfminer.converter import TextConverter
from StringIO import StringIO
import logging
import logging.handlers
import Queue
import re
import socket
import threading
import time
import warnings
import MySQLdb
import requests
import pickle
import us
import datetime
import json
import sentiment # qac module
regexcid = re.compile('\(cid\:\d+\)')
logger = logging.getLogger('Stream_Logger')
formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')
# Create a handler to write low-priority messages to a file.
handler = logging.FileHandler(filename='daily_news.log')
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
with open('') as f:
p = json.load(f)
conn = MySQLdb.connect(host=p['mysql']['host'],
user=p['mysql']['user'],
passwd=p['mysql']['passwd'],
db=p['mysql']['db'],
charset='utf8')
c = conn.cursor()
warnings.filterwarnings('ignore', category=MySQLdb.Warning)
# TO DO: JSON file?
#### Dictionaries for Geo / date mapping #####
statesdict = {'WA': ['COL', 'DN', 'TH', 'NT', 'PDN', 'ST', 'SR', 'TCH', 'YHR'], 'DE': ['DSN', 'NJ'], 'DC': ['WP', 'WT'], 'WI': ['BNR', 'BDDC', 'CH', 'DT', 'GBP', 'HTR', 'JG', 'LCT', 'LT', 'MNH', 'MJS', 'ON', 'PDR', 'PC', 'TR', 'SP', 'SPJ', 'WDH', 'WSJ'], 'WV': ['CDM', 'DP', 'HD', 'TJ', 'PNS', 'TWV'], 'HI': ['GI', 'SA'], 'FL': ['CS', 'CCC', 'DLA', 'FTU', 'FT', 'GS', 'HT', 'JCF', 'LCR', 'TL', 'NDN', 'NP', 'NFDN', 'OS', 'PBP', 'PNJ', 'VBPJ', 'SAR', 'SLNT', 'SB', 'SN', 'SS', 'TD', 'TIMES', 'TT', 'TB', 'VDS'], 'WY': ['CST', 'JHD', 'LB', 'WTE'], 'NH': ['COL', 'CM', 'ET', 'PH', 'TT'], 'NJ': ['APP', 'BCT', 'CN', 'CP', 'DJ', 'DR', 'HN', 'HNT', 'JJ', 'NJH', 'PAC', 'SJT', 'SL', 'TTT', 'TT'], 'NM': ['ADN', 'AJ', 'CCA', 'DT', 'DH', 'LCSN', 'RDR', 'SFNM', 'SCSN'], 'TX': ['AAS', 'BH', 'CCCT', 'DS', 'DMN', 'DRC', 'TE', 'EPT', 'FWST', 'GCDN', 'HC', 'DH', 'LNJ', 'LDN', 'TM', 'SAEN', 'TDT', 'TG', 'VA'], 'LA': ['TA', 'AP', 'DA', 'DW', 'NOA', 'NS', 'TT', 'TP', 'TTT'], 'NC': ['ACT', 'CO', 'DA', 'DC', 'DD', 'DR', 'FO', 'GG', 'HS', 'HDR', 'HPE', 'NO', 'NR', 'NH', 'NT', 'RMT', 'SH', 'TS', 'STAR', 'SRL', 'WSJ'], 'ND': ['BT', 'TF', 'GFH'], 'NE': ['BDS', 'CT', 'FT', 'LJS', 'OWH'], 'TN': ['CTFP', 'CA', 'DT', 'JS', 'JCP', 'LC', 'KNS', 'TT'], 'NY': ['AMNY', 'BDN', 'BN', 'TC', 'DCO', 'DF', 'DG', 'DM', 'DN', 'TD', 'EDLP', 'ET', 'HAM', 'IJ', 'JN', 'MT', 'MET', 'NYP', 'NYT', 'ND', 'OJ', 'PS', 'TPS', 'PJ', 'PSB', 'TR', 'RS', 'RDC', 'TS', 'SG', 'THR', 'TU', 'WDT'], 'PA': ['AM', 'BCT', 'BCCT', 'CDT', 'CPO', 'CV', 'DA', 'DLN', 'DCDT', 'ETN', 'GT', 'HES', 'TI', 'IJ', 'LB', 'LDN', 'MER', 'MET', 'MC', 'NI', 'PN', 'PDN', 'PI', 'PPG', 'PTR', 'PR', 'RE', 'TR', 'TS', 'SS', 'TH', 'TT', 'TD', 'GTR', 'WSG', 'YDR', 'YD'], 'CA': ['AD', 'AJ', 'BC', 'CCT', 'DB', 'DN', 'TDN', 'DP', 'DS', 'ER', 'ETS', 'FB', 'MCH', 'IVDB', 'LO', 'LNS', 'LR', 'LAR', 'LAT', 'MANT', 'MIJ', 'MSS', 'MB', 'NVR', 'OT', 'OCR', 'PSN', 'PRP', 'PE', 'PT', 'TR', 'RBDN', 'RDF', 'REP', 'SB', 'SC', 'SDUT', 'SFC', 'SFE', 'SGVT', 'SJMN', 'SMDJ', 'SCS', 'SMT', 'TH', 'TT', 'TAR', 'TU', 'VTD', 'WRP', 'WDN'], 'NV': ['SUN', 'RGJ'], 'VA': ['CSE', 'DNR', 'DPRESS', 'DP', 'DRB', 'FLS', 'NA', 'NL', 'NV', 'NVD', 'RTD', 'VP', 'WS'], 'CO': ['AT', 'CCDR', 'CD', 'DC', 'DS', 'DP', 'FCC', 'TG', 'PI', 'GT', 'LTC', 'LRH', 'SPT', 'VD'], 'AK': ['ADN', 'FDNM', 'JE'], 'AL': ['AS', 'DS', 'DD', 'DE', 'EL', 'GT', 'MA', 'OAN', 'TD', 'TJ', 'TN'], 'AR': ['ADG', 'BB', 'HDT', 'SR'], 'VT': ['BFP', 'RH', 'TA'], 'IL': ['BND', 'CST', 'CT', 'DCN', 'DC', 'DH', 'DHR', 'TD', 'HOY', 'JG', 'JS', 'KCC', 'LISLE', 'NG', 'NH', 'TP', 'RE', 'RM', 'RIA', 'SI'], 'GA': ['AH', 'AJC', 'AC', 'BN', 'GDN', 'LE', 'MDJ', 'NC', 'RC', 'RNT', 'SMN', 'TT', 'GT', 'TG'], 'IN': ['ET', 'ECP', 'HT', 'IS', 'JC', 'JG', 'KT', 'NAT', 'PI', 'PHA', 'PDC', 'RR', 'SBT', 'SP', 'TT', 'TS', 'VSC'], 'IA': ['CCP', 'DR', 'TG', 'HE', 'PC', 'MCGG', 'MJ', 'QCT', 'SCJ', 'TH', 'TR', 'WC'], 'OK': ['NT', 'DOK', 'PDJ', 'TDP', 'TW', 'WEDN'], 'AZ': ['SUN', 'AI', 'AR', 'DC', 'KDM'], 'ID': ['BCDB', 'IPT', 'IS', 'TN'], 'CT': ['TA', 'CP', 'TD', 'GT', 'HC', 'TH', 'MP', 'NHR', 'NT', 'NB', 'RC'], 'ME': ['BDN', 'KJ', 'MS', 'PPH', 'SJ'], 'MD': ['TS', 'CCT', 'DT', 'FNP', 'SD'], 'MA': ['BG', 'BH', 'CCT', 'TE', 'HN', 'MET', 'MWDN', 'MDN', 'PL', 'SE', 'ST', 'TS', 'SC', 'TDG', 'TG'], 'OH': ['ABJ', 'AM', 'TB', 'CT', 'CE', 'CH', 'CD', 'TC', 'CN', 'DDN', 'TI', 'JN', 'MT', 'MG', 'TMJ', 'NH', 'CPD', 'RC', 'REP', 'SR', 'SNS', 'TR', 'TV'], 'UT': ['DH', 'DN', 'HJ', 'SLT', 'TS'], 'MO': ['FS', 'JG', 'KCS', 'LS', 'NL', 'RDN', 'SJNP', 'SLPD'], 'MN': ['BP', 'BD', 'DNT', 'FP', 'SCT', 'ST', 'WCT', 'WDN'], 'MI': ['BCE', 'DFP', 'DN', 'GRP', 'HS', 'JCP', 'KG', 'LSJ', 'MD', 'MNA', 'MEN', 'MS', 'MC', 'OP', 'PIO', 'TH', 'TCRE'], 'RI': ['NDN', 'PJ'], 'KS': ['DU', 'HN', 'LJW', 'OH', 'SJ', 'TCJ', 'WE'], 'MT': ['DIL', 'GFT', 'IR', 'MIS'], 'MS': ['CL', 'NMDJ', 'SH'], 'SC': ['AS', 'BG', 'GN', 'IJ', 'IP', 'TI', 'MN', 'PC', 'TS', 'TD'], 'KY': ['AM', 'CJ', 'TG', 'KE', 'LHL', 'TM', 'MI', 'NE'], 'OR': ['CGT', 'DT', 'EO', 'HN', 'MT', 'TO', 'RG', 'SJ'], 'SD': ['AN', 'SFAL', 'RCJ', 'YDP']}
cities = ['AL_AS', 'AL_DS', 'AL_DD', 'AL_DE', 'AL_EL', 'AL_GT', 'AL_MA', 'AL_OAN', 'AL_TD', 'AL_TJ', 'AL_TN', 'AK_ADN', 'AK_FDNM', 'AK_JE', 'AZ_SUN', 'AZ_AI', 'AZ_AR', 'AZ_DC', 'AZ_KDM', 'AR_ADG', 'AR_BB', 'AR_HDT', 'AR_SR', 'CA_AD', 'CA_AJ', 'CA_BC', 'CA_CCT', 'CA_DB', 'CA_DN', 'CA_TDN', 'CA_DP', 'CA_DS', 'CA_ER', 'CA_ETS', 'CA_FB', 'CA_MCH', 'CA_IVDB', 'CA_LO', 'CA_LNS', 'CA_LR', 'CA_LAR', 'CA_LAT', 'CA_MANT', 'CA_MIJ', 'CA_MSS', 'CA_MB', 'CA_NVR', 'CA_OT', 'CA_OCR', 'CA_PSN', 'CA_PRP', 'CA_PE', 'CA_PT', 'CA_TR', 'CA_RBDN', 'CA_RDF', 'CA_REP', 'CA_SB', 'CA_SC', 'CA_SDUT', 'CA_SFC', 'CA_SFE', 'CA_SGVT', 'CA_SJMN', 'CA_SMDJ', 'CA_SCS', 'CA_SMT', 'CA_TH', 'CA_TT', 'CA_TAR', 'CA_TU', 'CA_VTD', 'CA_WRP', 'CA_WDN', 'CO_AT', 'CO_CCDR', 'CO_CD', 'CO_DC', 'CO_DS', 'CO_DP', 'CO_FCC', 'CO_TG', 'CO_PI', 'CO_GT', 'CO_LTC', 'CO_LRH', 'CO_SPT', 'CO_VD', 'CT_TA', 'CT_CP', 'CT_TD', 'CT_GT', 'CT_HC', 'CT_TH', 'CT_MP', 'CT_NHR', 'CT_NT', 'CT_NB', 'CT_RC', 'DE_DSN', 'DE_NJ', 'DC_WP', 'DC_WT', 'FL_CS', 'FL_CCC', 'FL_DLA', 'FL_FTU', 'FL_FT', 'FL_GS', 'FL_HT', 'FL_JCF', 'FL_LCR', 'FL_TL', 'FL_NDN', 'FL_NP', 'FL_NFDN', 'FL_OS', 'FL_PBP', 'FL_PNJ', 'FL_VBPJ', 'FL_SAR', 'FL_SLNT', 'FL_SB', 'FL_SN', 'FL_SS', 'FL_TD', 'FL_TIMES', 'FL_TT', 'FL_TB', 'FL_VDS', 'GA_AH', 'GA_AJC', 'GA_AC', 'GA_BN', 'GA_GDN', 'GA_LE', 'GA_MDJ', 'GA_NC', 'GA_RC', 'GA_RNT', 'GA_SMN', 'GA_TT', 'GA_GT', 'GA_TG', 'HI_GI', 'HI_SA', 'ID_BCDB', 'ID_IPT', 'ID_IS', 'ID_TN', 'IL_BND', 'IL_CST', 'IL_CT', 'IL_DCN', 'IL_DC', 'IL_DH', 'IL_DHR', 'IL_TD', 'IL_HOY', 'IL_JG', 'IL_JS', 'IL_KCC', 'IL_LISLE', 'IL_NG', 'IL_NH', 'IL_TP', 'IL_RE', 'IL_RM', 'IL_RIA', 'IL_SI', 'IN_ET', 'IN_ECP', 'IN_HT', 'IN_IS', 'IN_JC', 'IN_JG', 'IN_KT', 'IN_NAT', 'IN_PI', 'IN_PHA', 'IN_PDC', 'IN_RR', 'IN_SBT', 'IN_SP', 'IN_TT', 'IN_TS', 'IN_VSC', 'IA_CCP', 'IA_DR', 'IA_TG', 'IA_HE', 'IA_PC', 'IA_MCGG', 'IA_MJ', 'IA_QCT', 'IA_SCJ', 'IA_TH', 'IA_TR', 'IA_WC', 'KS_DU', 'KS_HN', 'KS_LJW', 'KS_OH', 'KS_SJ', 'KS_TCJ', 'KS_WE', 'KY_AM', 'KY_CJ', 'KY_TG', 'KY_KE', 'KY_LHL', 'KY_TM', 'KY_MI', 'KY_NE', 'LA_TA', 'LA_AP', 'LA_DA', 'LA_DW', 'LA_NOA', 'LA_NS', 'LA_TT', 'LA_TP', 'LA_TTT', 'ME_BDN', 'ME_KJ', 'ME_MS', 'ME_PPH', 'ME_SJ', 'MD_TS', 'MD_CCT', 'MD_DT', 'MD_FNP', 'MD_SD', 'MA_BG', 'MA_BH', 'MA_CCT', 'MA_TE', 'MA_HN', 'MA_MET', 'MA_MWDN', 'MA_MDN', 'MA_PL', 'MA_SE', 'MA_ST', 'MA_TS', 'MA_SC', 'MA_TDG', 'MA_TG', 'MI_BCE', 'MI_DFP', 'MI_DN', 'MI_GRP', 'MI_HS', 'MI_JCP', 'MI_KG', 'MI_LSJ', 'MI_MD', 'MI_MNA', 'MI_MEN', 'MI_MS', 'MI_MC', 'MI_OP', 'MI_PIO', 'MI_TH', 'MI_TCRE', 'MN_BP', 'MN_BD', 'MN_DNT', 'MN_FP', 'MN_SCT', 'MN_ST', 'MN_WCT', 'MN_WDN', 'MS_CL', 'MS_NMDJ', 'MS_SH', 'MO_FS', 'MO_JG', 'MO_KCS', 'MO_LS', 'MO_NL', 'MO_RDN', 'MO_SJNP', 'MO_SLPD', 'MT_DIL', 'MT_GFT', 'MT_IR', 'MT_MIS', 'NE_BDS', 'NE_CT', 'NE_FT', 'NE_LJS', 'NE_OWH', 'NV_SUN', 'NV_RGJ', 'NH_COL', 'NH_CM', 'NH_ET', 'NH_PH', 'NH_TT', 'NJ_APP', 'NJ_BCT', 'NJ_CN', 'NJ_CP', 'NJ_DJ', 'NJ_DR', 'NJ_HN', 'NJ_HNT', 'NJ_JJ', 'NJ_NJH', 'NJ_PAC', 'NJ_SJT', 'NJ_SL', 'NJ_TTT', 'NJ_TT', 'NM_ADN', 'NM_AJ', 'NM_CCA', 'NM_DT', 'NM_DH', 'NM_LCSN', 'NM_RDR', 'NM_SFNM', 'NM_SCSN', 'NY_AMNY', 'NY_BDN', 'NY_BN', 'NY_TC', 'NY_DCO', 'NY_DF', 'NY_DG', 'NY_DM', 'NY_DN', 'NY_TD', 'NY_EDLP', 'NY_ET', 'NY_HAM', 'NY_IJ', 'NY_JN', 'NY_MT', 'NY_MET', 'NY_NYP', 'NY_NYT', 'NY_ND', 'NY_OJ', 'NY_PS', 'NY_TPS', 'NY_PJ', 'NY_PSB', 'NY_TR', 'NY_RS', 'NY_RDC', 'NY_TS', 'NY_SG', 'NY_THR', 'NY_TU', 'NY_WDT', 'NC_ACT', 'NC_CO', 'NC_DA', 'NC_DC', 'NC_DD', 'NC_DR', 'NC_FO', 'NC_GG', 'NC_HS', 'NC_HDR', 'NC_HPE', 'NC_NO', 'NC_NR', 'NC_NH', 'NC_NT', 'NC_RMT', 'NC_SH', 'NC_TS', 'NC_STAR', 'NC_SRL', 'NC_WSJ', 'ND_BT', 'ND_TF', 'ND_GFH', 'OH_ABJ', 'OH_AM', 'OH_TB', 'OH_CT', 'OH_CE', 'OH_CH', 'OH_CD', 'OH_TC', 'OH_CN', 'OH_DDN', 'OH_TI', 'OH_JN', 'OH_MT', 'OH_MG', 'OH_TMJ', 'OH_NH', 'OH_CPD', 'OH_RC', 'OH_REP', 'OH_SR', 'OH_SNS', 'OH_TR', 'OH_TV', 'OK_NT', 'OK_DOK', 'OK_PDJ', 'OK_TDP', 'OK_TW', 'OK_WEDN', 'OR_CGT', 'OR_DT', 'OR_EO', 'OR_HN', 'OR_MT', 'OR_TO', 'OR_RG', 'OR_SJ', 'PA_AM', 'PA_BCT', 'PA_BCCT', 'PA_CDT', 'PA_CPO', 'PA_CV', 'PA_DA', 'PA_DLN', 'PA_DCDT', 'PA_ETN', 'PA_GT', 'PA_HES', 'PA_TI', 'PA_IJ', 'PA_LB', 'PA_LDN', 'PA_MER', 'PA_MET', 'PA_MC', 'PA_NI', 'PA_PN', 'PA_PDN', 'PA_PI', 'PA_PPG', 'PA_PTR', 'PA_PR', 'PA_RE', 'PA_TR', 'PA_TS', 'PA_SS', 'PA_TH', 'PA_TT', 'PA_TD', 'PA_GTR', 'PA_WSG', 'PA_YDR', 'PA_YD', 'RI_NDN', 'RI_PJ', 'SC_AS', 'SC_BG', 'SC_GN', 'SC_IJ', 'SC_IP', 'SC_TI', 'SC_MN', 'SC_PC', 'SC_TS', 'SC_TD', 'SD_AN', 'SD_SFAL', 'SD_RCJ', 'SD_YDP', 'TN_CTFP', 'TN_CA', 'TN_DT', 'TN_JS', 'TN_JCP', 'TN_LC', 'TN_KNS', 'TN_TT', 'TX_AAS', 'TX_BH', 'TX_CCCT', 'TX_DS', 'TX_DMN', 'TX_DRC', 'TX_TE', 'TX_EPT', 'TX_FWST', 'TX_GCDN', 'TX_HC', 'TX_DH', 'TX_LNJ', 'TX_LDN', 'TX_TM', 'TX_SAEN', 'TX_TDT', 'TX_TG', 'TX_VA', 'UT_DH', 'UT_DN', 'UT_HJ', 'UT_SLT', 'UT_TS', 'VT_BFP', 'VT_RH', 'VT_TA', 'VA_CSE', 'VA_DNR', 'VA_DPRESS', 'VA_DP', 'VA_DRB', 'VA_FLS', 'VA_NA', 'VA_NL', 'VA_NV', 'VA_NVD', 'VA_RTD', 'VA_VP', 'VA_WS', 'WA_COL', 'WA_DN', 'WA_TH', 'WA_NT', 'WA_PDN', 'WA_ST', 'WA_SR', 'WA_TCH', 'WA_YHR', 'WV_CDM', 'WV_DP', 'WV_HD', 'WV_TJ', 'WV_PNS', 'WV_TWV', 'WI_BNR', 'WI_BDDC', 'WI_CH', 'WI_DT', 'WI_GBP', 'WI_HTR', 'WI_JG', 'WI_LCT', 'WI_LT', 'WI_MNH', 'WI_MJS', 'WI_ON', 'WI_PDR', 'WI_PC', 'WI_TR', 'WI_SP', 'WI_SPJ', 'WI_WDH', 'WI_WSJ', 'WY_CST', 'WY_JHD', 'WY_LB', 'WY_WTE']
day_to_month = {'01':'January','02':'February','03':'March','04':'April','05':'May','06':'June','07':'July','08':'August','09':'September','10':'October','11':'November','12':'December'}
fipsdict = {'Northeast':{'New England':['09','23','25','33','44','50'],'Middle Atlantic':['34','36','42']},'Midwest':{'East North Central':['18','17','26','39','55'],'West North Central':['19','20','27','29','31','38','46']},'South':{'South Atlantic':['10','11','12','13','24','37','45','51','54'],'East South Central':['01','21','28','47'],'West South Central':['05','22','40','48']},'West':{'Mountain':['04','08','16','35','30','49','32','56'],'Pacific':['02','06','15','41','53']}}
fipsabbr = us.states.mapping('fips','abbr')
# get today's date, YYYY-DD-MM
todaydate = datetime.date.today()
todaydaynum = todaydate.strftime('%d')
if todaydaynum[0] == '0':
todaydaynum = todaydaynum[1]
def dwn_pdf_txt(url):
""" Given a readable but encrypted PDF URL, parses document to text """
r = requests.get(url)
memory_file = StringIO(r.content)
# Create a PDF parser object associated with the StringIO object
parser = PDFParser(memory_file)
# Create a PDF document object that stores the document structure
document = PDFDocument(parser)
# Define parameters to the PDF device objet
rsrcmgr = PDFResourceManager()
retstr = StringIO()
laparams = LAParams()
codec = 'utf-8'
# Create a PDF device object
device = TextConverter(rsrcmgr, retstr, codec = codec, laparams = laparams)
# Create a PDF interpreter object
interpreter = PDFPageInterpreter(rsrcmgr, device)
# Process each page contained in the document
for page in PDFPage.create_pages(document):
interpreter.process_page(page)
parsed_document = retstr.getvalue()
return parsed_document # everything is stored here, needs to be cleaned up
docstringdict = {}
def on_download():
for state in statesdict.keys():
for statext in statesdict[state]:
try:
url = "http://webmedia.newseum.org/newseum-multimedia/dfp/pdf" + todaydaynum + "/" + state + '_' + statext + ".pdf"
to_up = dwn_pdf_txt("http://webmedia.newseum.org/newseum-multimedia/dfp/pdf" + todaydaynum + "/" + state + '_' + statext + ".pdf")
to_up = re.sub(regexcid,'',to_up)
docstringdict[state + '_' + statext] = to_up
datestamp = todaydate.strftime('%Y-%m-%d')
uniqid = idmaker(todaydate.strftime('%m'),todaydate.strftime("%d"),state + '_' + statext)
bag_of_words = sentiment.preprocess(to_up)
p, n = sentiment.score(bag_of_words,
sentiment.positive_words,
sentiment.negative_words)
q = u'INSERT INTO daily_download (daily_id,text,url,state,timestamp,ext,pos,neg) VALUES (%s,%s,%s,%s,%s,%s,%s,%s);'
c.execute(q, (uniqid,to_up,url,state,datestamp,statext,p,n))
except:
print state + '_' + statext
pickle.dump(docstringdict,open('docstringdict','wb'))
def idmaker(month, day, fullstname):
month = str(month)
day = str(day)
if len(day) == 1:
day = '0' + day
if len(month) == 1:
month = '0' + month
return fullstname.replace('_','-') + '-' + month + day
on_download() | bsd-3-clause |
iproduct/course-social-robotics | 11-dnn-keras/venv/Lib/site-packages/pygments/lexers/elm.py | 2 | 3003 | # -*- coding: utf-8 -*-
"""
pygments.lexers.elm
~~~~~~~~~~~~~~~~~~~
Lexer for the Elm programming language.
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, include
from pygments.token import Comment, Keyword, Name, Number, Punctuation, String, Text
__all__ = ['ElmLexer']
class ElmLexer(RegexLexer):
"""
For `Elm <http://elm-lang.org/>`_ source code.
.. versionadded:: 2.1
"""
name = 'Elm'
aliases = ['elm']
filenames = ['*.elm']
mimetypes = ['text/x-elm']
validName = r'[a-z_][a-zA-Z0-9_\']*'
specialName = r'^main '
builtinOps = (
'~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
'=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
'..', '.', '->', '-', '++', '+', '*', '&&', '%',
)
reservedWords = words((
'alias', 'as', 'case', 'else', 'if', 'import', 'in',
'let', 'module', 'of', 'port', 'then', 'type', 'where',
), suffix=r'\b')
tokens = {
'root': [
# Comments
(r'\{-', Comment.Multiline, 'comment'),
(r'--.*', Comment.Single),
# Whitespace
(r'\s+', Text),
# Strings
(r'"', String, 'doublequote'),
# Modules
(r'^\s*module\s*', Keyword.Namespace, 'imports'),
# Imports
(r'^\s*import\s*', Keyword.Namespace, 'imports'),
# Shaders
(r'\[glsl\|.*', Name.Entity, 'shader'),
# Keywords
(reservedWords, Keyword.Reserved),
# Types
(r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
# Main
(specialName, Keyword.Reserved),
# Prefix Operators
(words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
# Infix Operators
(words(builtinOps), Name.Function),
# Numbers
include('numbers'),
# Variable Names
(validName, Name.Variable),
# Parens
(r'[,()\[\]{}]', Punctuation),
],
'comment': [
(r'-(?!\})', Comment.Multiline),
(r'\{-', Comment.Multiline, 'comment'),
(r'[^-}]', Comment.Multiline),
(r'-\}', Comment.Multiline, '#pop'),
],
'doublequote': [
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\[nrfvb\\"]', String.Escape),
(r'[^"]', String),
(r'"', String, '#pop'),
],
'imports': [
(r'\w+(\.\w+)*', Name.Class, '#pop'),
],
'numbers': [
(r'_?\d+\.(?=\d+)', Number.Float),
(r'_?\d+', Number.Integer),
],
'shader': [
(r'\|(?!\])', Name.Entity),
(r'\|\]', Name.Entity, '#pop'),
(r'.*\n', Name.Entity),
],
}
| gpl-2.0 |
alexeyum/scikit-learn | examples/semi_supervised/plot_label_propagation_digits.py | 55 | 2723 | """
===================================================
Label Propagation digits: Demonstrating performance
===================================================
This example demonstrates the power of semisupervised learning by
training a Label Spreading model to classify handwritten digits
with sets of very few labels.
The handwritten digit dataset has 1797 total points. The model will
be trained using all points, but only 30 will be labeled. Results
in the form of a confusion matrix and a series of metrics over each
class will be very good.
At the end, the top 10 most uncertain predictions will be shown.
"""
print(__doc__)
# Authors: Clay Woolam <clay@woolam.org>
# License: BSD
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn import datasets
from sklearn.semi_supervised import label_propagation
from sklearn.metrics import confusion_matrix, classification_report
digits = datasets.load_digits()
rng = np.random.RandomState(0)
indices = np.arange(len(digits.data))
rng.shuffle(indices)
X = digits.data[indices[:330]]
y = digits.target[indices[:330]]
images = digits.images[indices[:330]]
n_total_samples = len(y)
n_labeled_points = 30
indices = np.arange(n_total_samples)
unlabeled_set = indices[n_labeled_points:]
# shuffle everything around
y_train = np.copy(y)
y_train[unlabeled_set] = -1
###############################################################################
# Learn with LabelSpreading
lp_model = label_propagation.LabelSpreading(gamma=0.25, max_iter=5)
lp_model.fit(X, y_train)
predicted_labels = lp_model.transduction_[unlabeled_set]
true_labels = y[unlabeled_set]
cm = confusion_matrix(true_labels, predicted_labels, labels=lp_model.classes_)
print("Label Spreading model: %d labeled & %d unlabeled points (%d total)" %
(n_labeled_points, n_total_samples - n_labeled_points, n_total_samples))
print(classification_report(true_labels, predicted_labels))
print("Confusion matrix")
print(cm)
# calculate uncertainty values for each transduced distribution
pred_entropies = stats.distributions.entropy(lp_model.label_distributions_.T)
# pick the top 10 most uncertain labels
uncertainty_index = np.argsort(pred_entropies)[-10:]
###############################################################################
# plot
f = plt.figure(figsize=(7, 5))
for index, image_index in enumerate(uncertainty_index):
image = images[image_index]
sub = f.add_subplot(2, 5, index + 1)
sub.imshow(image, cmap=plt.cm.gray_r)
plt.xticks([])
plt.yticks([])
sub.set_title('predict: %i\ntrue: %i' % (
lp_model.transduction_[image_index], y[image_index]))
f.suptitle('Learning with small amount of labeled data')
plt.show()
| bsd-3-clause |
HadiOfBBG/pegasusrises | gdata/geo/__init__.py | 249 | 6006 | # -*-*- encoding: utf-8 -*-*-
#
# This is gdata.photos.geo, implementing geological positioning in gdata structures
#
# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
#
# Copyright 2007 Håvard Gulldahl
# Portions copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Picasa Web Albums uses the georss and gml namespaces for
elements defined in the GeoRSS and Geography Markup Language specifications.
Specifically, Picasa Web Albums uses the following elements:
georss:where
gml:Point
gml:pos
http://code.google.com/apis/picasaweb/reference.html#georss_reference
Picasa Web Albums also accepts geographic-location data in two other formats:
W3C format and plain-GeoRSS (without GML) format.
"""
#
#Over the wire, the Picasa Web Albums only accepts and sends the
#elements mentioned above, but this module will let you seamlessly convert
#between the different formats (TODO 2007-10-18 hg)
__author__ = u'havard@gulldahl.no'# (Håvard Gulldahl)' #BUG: api chokes on non-ascii chars in __author__
__license__ = 'Apache License v2'
import atom
import gdata
GEO_NAMESPACE = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
GML_NAMESPACE = 'http://www.opengis.net/gml'
GEORSS_NAMESPACE = 'http://www.georss.org/georss'
class GeoBaseElement(atom.AtomBase):
"""Base class for elements.
To add new elements, you only need to add the element tag name to self._tag
and the namespace to self._namespace
"""
_tag = ''
_namespace = GML_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Pos(GeoBaseElement):
"""(string) Specifies a latitude and longitude, separated by a space,
e.g. `35.669998 139.770004'"""
_tag = 'pos'
def PosFromString(xml_string):
return atom.CreateClassFromXMLString(Pos, xml_string)
class Point(GeoBaseElement):
"""(container) Specifies a particular geographical point, by means of
a <gml:pos> element."""
_tag = 'Point'
_children = atom.AtomBase._children.copy()
_children['{%s}pos' % GML_NAMESPACE] = ('pos', Pos)
def __init__(self, pos=None, extension_elements=None, extension_attributes=None, text=None):
GeoBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
if pos is None:
pos = Pos()
self.pos=pos
def PointFromString(xml_string):
return atom.CreateClassFromXMLString(Point, xml_string)
class Where(GeoBaseElement):
"""(container) Specifies a geographical location or region.
A container element, containing a single <gml:Point> element.
(Not to be confused with <gd:where>.)
Note that the (only) child attribute, .Point, is title-cased.
This reflects the names of elements in the xml stream
(principle of least surprise).
As a convenience, you can get a tuple of (lat, lon) with Where.location(),
and set the same data with Where.setLocation( (lat, lon) ).
Similarly, there are methods to set and get only latitude and longitude.
"""
_tag = 'where'
_namespace = GEORSS_NAMESPACE
_children = atom.AtomBase._children.copy()
_children['{%s}Point' % GML_NAMESPACE] = ('Point', Point)
def __init__(self, point=None, extension_elements=None, extension_attributes=None, text=None):
GeoBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
if point is None:
point = Point()
self.Point=point
def location(self):
"(float, float) Return Where.Point.pos.text as a (lat,lon) tuple"
try:
return tuple([float(z) for z in self.Point.pos.text.split(' ')])
except AttributeError:
return tuple()
def set_location(self, latlon):
"""(bool) Set Where.Point.pos.text from a (lat,lon) tuple.
Arguments:
lat (float): The latitude in degrees, from -90.0 to 90.0
lon (float): The longitude in degrees, from -180.0 to 180.0
Returns True on success.
"""
assert(isinstance(latlon[0], float))
assert(isinstance(latlon[1], float))
try:
self.Point.pos.text = "%s %s" % (latlon[0], latlon[1])
return True
except AttributeError:
return False
def latitude(self):
"(float) Get the latitude value of the geo-tag. See also .location()"
lat, lon = self.location()
return lat
def longitude(self):
"(float) Get the longtitude value of the geo-tag. See also .location()"
lat, lon = self.location()
return lon
longtitude = longitude
def set_latitude(self, lat):
"""(bool) Set the latitude value of the geo-tag.
Args:
lat (float): The new latitude value
See also .set_location()
"""
_lat, lon = self.location()
return self.set_location(lat, lon)
def set_longitude(self, lon):
"""(bool) Set the longtitude value of the geo-tag.
Args:
lat (float): The new latitude value
See also .set_location()
"""
lat, _lon = self.location()
return self.set_location(lat, lon)
set_longtitude = set_longitude
def WhereFromString(xml_string):
return atom.CreateClassFromXMLString(Where, xml_string)
| apache-2.0 |
Taapat/enigma2-openpli-fulan | lib/python/Tools/Notifications.py | 12 | 1988 |
notifications = [ ]
notificationAdded = [ ]
# notifications which are currently on screen (and might be closed by similiar notifications)
current_notifications = [ ]
def __AddNotification(fnc, screen, id, *args, **kwargs):
if ".MessageBox'>" in `screen`:
kwargs["simple"] = True
if ".Standby'>" in `screen`:
removeCIdialog()
notifications.append((fnc, screen, args, kwargs, id))
for x in notificationAdded:
x()
def AddNotification(screen, *args, **kwargs):
AddNotificationWithCallback(None, screen, *args, **kwargs)
def AddNotificationWithCallback(fnc, screen, *args, **kwargs):
__AddNotification(fnc, screen, None, *args, **kwargs)
def AddNotificationParentalControl(fnc, screen, *args, **kwargs):
RemovePopup("Parental control")
__AddNotification(fnc, screen, "Parental control", *args, **kwargs)
def AddNotificationWithID(id, screen, *args, **kwargs):
__AddNotification(None, screen, id, *args, **kwargs)
# we don't support notifications with callback and ID as this
# would require manually calling the callback on cancelled popups.
def RemovePopup(id):
# remove similiar notifications
print "RemovePopup, id =", id
for x in notifications:
if x[4] and x[4] == id:
print "(found in notifications)"
notifications.remove(x)
for x in current_notifications:
if x[0] == id:
print "(found in current notifications)"
x[1].close()
from Screens.MessageBox import MessageBox
def AddPopup(text, type, timeout, id = None):
if id is not None:
RemovePopup(id)
print "AddPopup, id =", id
AddNotificationWithID(id, MessageBox, text = text, type = type, timeout = timeout, close_on_any_key = True)
def removeCIdialog():
import NavigationInstance
if NavigationInstance.instance and NavigationInstance.instance.wasTimerWakeup():
import Screens.Ci
for slot in Screens.Ci.CiHandler.dlgs:
if hasattr(Screens.Ci.CiHandler.dlgs[slot], "forceExit"):
Screens.Ci.CiHandler.dlgs[slot].tag = "WAIT"
Screens.Ci.CiHandler.dlgs[slot].forceExit()
| gpl-2.0 |
tempbottle/python-driver | tests/integration/cqlengine/query/test_datetime_queries.py | 8 | 2565 | # Copyright 2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timedelta
from uuid import uuid4
from cassandra.cqlengine.functions import get_total_seconds
from tests.integration.cqlengine.base import BaseCassEngTestCase
from cassandra.cqlengine.management import sync_table
from cassandra.cqlengine.management import drop_table
from cassandra.cqlengine.models import Model, ModelException
from cassandra.cqlengine import columns
from cassandra.cqlengine import query
class DateTimeQueryTestModel(Model):
user = columns.Integer(primary_key=True)
day = columns.DateTime(primary_key=True)
data = columns.Text()
class TestDateTimeQueries(BaseCassEngTestCase):
@classmethod
def setUpClass(cls):
super(TestDateTimeQueries, cls).setUpClass()
sync_table(DateTimeQueryTestModel)
cls.base_date = datetime.now() - timedelta(days=10)
for x in range(7):
for y in range(10):
DateTimeQueryTestModel.create(
user=x,
day=(cls.base_date+timedelta(days=y)),
data=str(uuid4())
)
@classmethod
def tearDownClass(cls):
super(TestDateTimeQueries, cls).tearDownClass()
drop_table(DateTimeQueryTestModel)
def test_range_query(self):
""" Tests that loading from a range of dates works properly """
start = datetime(*self.base_date.timetuple()[:3])
end = start + timedelta(days=3)
results = DateTimeQueryTestModel.filter(user=0, day__gte=start, day__lt=end)
assert len(results) == 3
def test_datetime_precision(self):
""" Tests that millisecond resolution is preserved when saving datetime objects """
now = datetime.now()
pk = 1000
obj = DateTimeQueryTestModel.create(user=pk, day=now, data='energy cheese')
load = DateTimeQueryTestModel.get(user=pk)
self.assertAlmostEqual(get_total_seconds(now - load.day), 0, 2)
obj.delete()
| apache-2.0 |
ljgabc/lfs | usr/lib/python2.7/test/test_asynchat.py | 93 | 9203 | # test asynchat
import asyncore, asynchat, socket, time
import unittest
import sys
from test import test_support
try:
import threading
except ImportError:
threading = None
HOST = test_support.HOST
SERVER_QUIT = 'QUIT\n'
if threading:
class echo_server(threading.Thread):
# parameter to determine the number of bytes passed back to the
# client each send
chunk_size = 1
def __init__(self, event):
threading.Thread.__init__(self)
self.event = event
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = test_support.bind_port(self.sock)
# This will be set if the client wants us to wait before echoing data
# back.
self.start_resend_event = None
def run(self):
self.sock.listen(1)
self.event.set()
conn, client = self.sock.accept()
self.buffer = ""
# collect data until quit message is seen
while SERVER_QUIT not in self.buffer:
data = conn.recv(1)
if not data:
break
self.buffer = self.buffer + data
# remove the SERVER_QUIT message
self.buffer = self.buffer.replace(SERVER_QUIT, '')
if self.start_resend_event:
self.start_resend_event.wait()
# re-send entire set of collected data
try:
# this may fail on some tests, such as test_close_when_done, since
# the client closes the channel when it's done sending
while self.buffer:
n = conn.send(self.buffer[:self.chunk_size])
time.sleep(0.001)
self.buffer = self.buffer[n:]
except:
pass
conn.close()
self.sock.close()
class echo_client(asynchat.async_chat):
def __init__(self, terminator, server_port):
asynchat.async_chat.__init__(self)
self.contents = []
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((HOST, server_port))
self.set_terminator(terminator)
self.buffer = ''
def handle_connect(self):
pass
if sys.platform == 'darwin':
# select.poll returns a select.POLLHUP at the end of the tests
# on darwin, so just ignore it
def handle_expt(self):
pass
def collect_incoming_data(self, data):
self.buffer += data
def found_terminator(self):
self.contents.append(self.buffer)
self.buffer = ""
def start_echo_server():
event = threading.Event()
s = echo_server(event)
s.start()
event.wait()
event.clear()
time.sleep(0.01) # Give server time to start accepting.
return s, event
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestAsynchat(unittest.TestCase):
usepoll = False
def setUp (self):
self._threads = test_support.threading_setup()
def tearDown (self):
test_support.threading_cleanup(*self._threads)
def line_terminator_check(self, term, server_chunk):
event = threading.Event()
s = echo_server(event)
s.chunk_size = server_chunk
s.start()
event.wait()
event.clear()
time.sleep(0.01) # Give server time to start accepting.
c = echo_client(term, s.port)
c.push("hello ")
c.push("world%s" % term)
c.push("I'm not dead yet!%s" % term)
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"])
# the line terminator tests below check receiving variously-sized
# chunks back from the server in order to exercise all branches of
# async_chat.handle_read
def test_line_terminator1(self):
# test one-character terminator
for l in (1,2,3):
self.line_terminator_check('\n', l)
def test_line_terminator2(self):
# test two-character terminator
for l in (1,2,3):
self.line_terminator_check('\r\n', l)
def test_line_terminator3(self):
# test three-character terminator
for l in (1,2,3):
self.line_terminator_check('qqq', l)
def numeric_terminator_check(self, termlen):
# Try reading a fixed number of bytes
s, event = start_echo_server()
c = echo_client(termlen, s.port)
data = "hello world, I'm not dead yet!\n"
c.push(data)
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, [data[:termlen]])
def test_numeric_terminator1(self):
# check that ints & longs both work (since type is
# explicitly checked in async_chat.handle_read)
self.numeric_terminator_check(1)
self.numeric_terminator_check(1L)
def test_numeric_terminator2(self):
self.numeric_terminator_check(6L)
def test_none_terminator(self):
# Try reading a fixed number of bytes
s, event = start_echo_server()
c = echo_client(None, s.port)
data = "hello world, I'm not dead yet!\n"
c.push(data)
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, [])
self.assertEqual(c.buffer, data)
def test_simple_producer(self):
s, event = start_echo_server()
c = echo_client('\n', s.port)
data = "hello world\nI'm not dead yet!\n"
p = asynchat.simple_producer(data+SERVER_QUIT, buffer_size=8)
c.push_with_producer(p)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"])
def test_string_producer(self):
s, event = start_echo_server()
c = echo_client('\n', s.port)
data = "hello world\nI'm not dead yet!\n"
c.push_with_producer(data+SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"])
def test_empty_line(self):
# checks that empty lines are handled correctly
s, event = start_echo_server()
c = echo_client('\n', s.port)
c.push("hello world\n\nI'm not dead yet!\n")
c.push(SERVER_QUIT)
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
s.join()
self.assertEqual(c.contents, ["hello world", "", "I'm not dead yet!"])
def test_close_when_done(self):
s, event = start_echo_server()
s.start_resend_event = threading.Event()
c = echo_client('\n', s.port)
c.push("hello world\nI'm not dead yet!\n")
c.push(SERVER_QUIT)
c.close_when_done()
asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01)
# Only allow the server to start echoing data back to the client after
# the client has closed its connection. This prevents a race condition
# where the server echoes all of its data before we can check that it
# got any down below.
s.start_resend_event.set()
s.join()
self.assertEqual(c.contents, [])
# the server might have been able to send a byte or two back, but this
# at least checks that it received something and didn't just fail
# (which could still result in the client not having received anything)
self.assertTrue(len(s.buffer) > 0)
class TestAsynchat_WithPoll(TestAsynchat):
usepoll = True
class TestHelperFunctions(unittest.TestCase):
def test_find_prefix_at_end(self):
self.assertEqual(asynchat.find_prefix_at_end("qwerty\r", "\r\n"), 1)
self.assertEqual(asynchat.find_prefix_at_end("qwertydkjf", "\r\n"), 0)
class TestFifo(unittest.TestCase):
def test_basic(self):
f = asynchat.fifo()
f.push(7)
f.push('a')
self.assertEqual(len(f), 2)
self.assertEqual(f.first(), 7)
self.assertEqual(f.pop(), (1, 7))
self.assertEqual(len(f), 1)
self.assertEqual(f.first(), 'a')
self.assertEqual(f.is_empty(), False)
self.assertEqual(f.pop(), (1, 'a'))
self.assertEqual(len(f), 0)
self.assertEqual(f.is_empty(), True)
self.assertEqual(f.pop(), (0, None))
def test_given_list(self):
f = asynchat.fifo(['x', 17, 3])
self.assertEqual(len(f), 3)
self.assertEqual(f.pop(), (1, 'x'))
self.assertEqual(f.pop(), (1, 17))
self.assertEqual(f.pop(), (1, 3))
self.assertEqual(f.pop(), (0, None))
def test_main(verbose=None):
test_support.run_unittest(TestAsynchat, TestAsynchat_WithPoll,
TestHelperFunctions, TestFifo)
if __name__ == "__main__":
test_main(verbose=True)
| gpl-2.0 |
s0undt3ch/Deluge | deluge/ui/console/commands/status.py | 8 | 5301 | #
# status.py
#
# Copyright (C) 2011 Nick Lanham <nick@afternight.org>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
from optparse import make_option
from twisted.internet import defer
from deluge.ui.console.main import BaseCommand
from deluge.ui.client import client
import deluge.common
import deluge.component as component
class Command(BaseCommand):
"""Shows a various status information from the daemon."""
option_list = BaseCommand.option_list + (
make_option('-r', '--raw', action='store_true', default=False, dest='raw',
help='Don\'t format upload/download rates in KiB/s (useful for scripts that want to do their own parsing)'),
make_option('-n', '--no-torrents', action='store_false', default=True, dest='show_torrents',
help='Don\'t show torrent status (this will make the command a bit faster)'),
)
usage = "Usage: status [-r] [-n]"
def handle(self, *args, **options):
self.console = component.get("ConsoleUI")
self.status = None
self.connections = None
if options["show_torrents"]:
self.torrents = None
else:
self.torrents = -2
self.raw = options["raw"]
def on_session_status(status):
self.status = status
if self.status != None and self.connections != None and self.torrents != None:
self.print_status()
def on_num_connections(conns):
self.connections = conns
if self.status != None and self.connections != None and self.torrents != None:
self.print_status()
def on_torrents_status(status):
self.torrents = status
if self.status != None and self.connections != None and self.torrents != None:
self.print_status()
def on_torrents_status_fail(reason):
self.torrents = -1
if self.status != None and self.connections != None and self.torrents != None:
self.print_status()
deferreds = []
ds = client.core.get_session_status(["payload_upload_rate","payload_download_rate","dht_nodes"])
ds.addCallback(on_session_status)
deferreds.append(ds)
dc = client.core.get_num_connections()
dc.addCallback(on_num_connections)
deferreds.append(dc)
if options["show_torrents"]:
dt = client.core.get_torrents_status({}, ["state"])
dt.addCallback(on_torrents_status)
dt.addErrback(on_torrents_status_fail)
deferreds.append(dt)
return defer.DeferredList(deferreds)
def print_status(self):
self.console.set_batch_write(True)
if self.raw:
self.console.write("{!info!}Total upload: %f"%self.status["payload_upload_rate"])
self.console.write("{!info!}Total download: %f"%self.status["payload_download_rate"])
else:
self.console.write("{!info!}Total upload: %s"%deluge.common.fspeed(self.status["payload_upload_rate"]))
self.console.write("{!info!}Total download: %s"%deluge.common.fspeed(self.status["payload_download_rate"]))
self.console.write("{!info!}DHT Nodes: %i"%self.status["dht_nodes"])
self.console.write("{!info!}Total connections: %i"%self.connections)
if self.torrents == -1:
self.console.write("{!error!}Error getting torrent info")
elif self.torrents != -2:
self.console.write("{!info!}Total torrents: %i"%len(self.torrents))
states = ["Downloading","Seeding","Paused","Checking","Error","Queued"]
state_counts = {}
for state in states:
state_counts[state] = 0
for t in self.torrents:
s = self.torrents[t]
state_counts[s["state"]] += 1
for state in states:
self.console.write("{!info!} %s: %i"%(state,state_counts[state]))
self.console.set_batch_write(False)
| gpl-3.0 |
foobert/ansible-modules-core | cloud/rackspace/rax_files.py | 157 | 11980 | #!/usr/bin/python
# (c) 2013, Paul Durivage <paul.durivage@rackspace.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_files
short_description: Manipulate Rackspace Cloud Files Containers
description:
- Manipulate Rackspace Cloud Files Containers
version_added: "1.5"
options:
clear_meta:
description:
- Optionally clear existing metadata when applying metadata to existing containers.
Selecting this option is only appropriate when setting type=meta
choices:
- "yes"
- "no"
default: "no"
container:
description:
- The container to use for container or metadata operations.
required: true
meta:
description:
- A hash of items to set as metadata values on a container
private:
description:
- Used to set a container as private, removing it from the CDN. B(Warning!)
Private containers, if previously made public, can have live objects
available until the TTL on cached objects expires
public:
description:
- Used to set a container as public, available via the Cloud Files CDN
region:
description:
- Region to create an instance in
default: DFW
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
ttl:
description:
- In seconds, set a container-wide TTL for all objects cached on CDN edge nodes.
Setting a TTL is only appropriate for containers that are public
type:
description:
- Type of object to do work on, i.e. metadata object or a container object
choices:
- file
- meta
default: file
web_error:
description:
- Sets an object to be presented as the HTTP error page when accessed by the CDN URL
web_index:
description:
- Sets an object to be presented as the HTTP index page when accessed by the CDN URL
author: "Paul Durivage (@angstwad)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: "Test Cloud Files Containers"
hosts: local
gather_facts: no
tasks:
- name: "List all containers"
rax_files: state=list
- name: "Create container called 'mycontainer'"
rax_files: container=mycontainer
- name: "Create container 'mycontainer2' with metadata"
rax_files:
container: mycontainer2
meta:
key: value
file_for: someuser@example.com
- name: "Set a container's web index page"
rax_files: container=mycontainer web_index=index.html
- name: "Set a container's web error page"
rax_files: container=mycontainer web_error=error.html
- name: "Make container public"
rax_files: container=mycontainer public=yes
- name: "Make container public with a 24 hour TTL"
rax_files: container=mycontainer public=yes ttl=86400
- name: "Make container private"
rax_files: container=mycontainer private=yes
- name: "Test Cloud Files Containers Metadata Storage"
hosts: local
gather_facts: no
tasks:
- name: "Get mycontainer2 metadata"
rax_files:
container: mycontainer2
type: meta
- name: "Set mycontainer2 metadata"
rax_files:
container: mycontainer2
type: meta
meta:
uploaded_by: someuser@example.com
- name: "Remove mycontainer2 metadata"
rax_files:
container: "mycontainer2"
type: meta
state: absent
meta:
key: ""
file_for: ""
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError, e:
HAS_PYRAX = False
EXIT_DICT = dict(success=True)
META_PREFIX = 'x-container-meta-'
def _get_container(module, cf, container):
try:
return cf.get_container(container)
except pyrax.exc.NoSuchContainer, e:
module.fail_json(msg=e.message)
def _fetch_meta(module, container):
EXIT_DICT['meta'] = dict()
try:
for k, v in container.get_metadata().items():
split_key = k.split(META_PREFIX)[-1]
EXIT_DICT['meta'][split_key] = v
except Exception, e:
module.fail_json(msg=e.message)
def meta(cf, module, container_, state, meta_, clear_meta):
c = _get_container(module, cf, container_)
if meta_ and state == 'present':
try:
meta_set = c.set_metadata(meta_, clear=clear_meta)
except Exception, e:
module.fail_json(msg=e.message)
elif meta_ and state == 'absent':
remove_results = []
for k, v in meta_.items():
c.remove_metadata_key(k)
remove_results.append(k)
EXIT_DICT['deleted_meta_keys'] = remove_results
elif state == 'absent':
remove_results = []
for k, v in c.get_metadata().items():
c.remove_metadata_key(k)
remove_results.append(k)
EXIT_DICT['deleted_meta_keys'] = remove_results
_fetch_meta(module, c)
_locals = locals().keys()
EXIT_DICT['container'] = c.name
if 'meta_set' in _locals or 'remove_results' in _locals:
EXIT_DICT['changed'] = True
module.exit_json(**EXIT_DICT)
def container(cf, module, container_, state, meta_, clear_meta, ttl, public,
private, web_index, web_error):
if public and private:
module.fail_json(msg='container cannot be simultaneously '
'set to public and private')
if state == 'absent' and (meta_ or clear_meta or public or private or web_index or web_error):
module.fail_json(msg='state cannot be omitted when setting/removing '
'attributes on a container')
if state == 'list':
# We don't care if attributes are specified, let's list containers
EXIT_DICT['containers'] = cf.list_containers()
module.exit_json(**EXIT_DICT)
try:
c = cf.get_container(container_)
except pyrax.exc.NoSuchContainer, e:
# Make the container if state=present, otherwise bomb out
if state == 'present':
try:
c = cf.create_container(container_)
except Exception, e:
module.fail_json(msg=e.message)
else:
EXIT_DICT['changed'] = True
EXIT_DICT['created'] = True
else:
module.fail_json(msg=e.message)
else:
# Successfully grabbed a container object
# Delete if state is absent
if state == 'absent':
try:
cont_deleted = c.delete()
except Exception, e:
module.fail_json(msg=e.message)
else:
EXIT_DICT['deleted'] = True
if meta_:
try:
meta_set = c.set_metadata(meta_, clear=clear_meta)
except Exception, e:
module.fail_json(msg=e.message)
finally:
_fetch_meta(module, c)
if ttl:
try:
c.cdn_ttl = ttl
except Exception, e:
module.fail_json(msg=e.message)
else:
EXIT_DICT['ttl'] = c.cdn_ttl
if public:
try:
cont_public = c.make_public()
except Exception, e:
module.fail_json(msg=e.message)
else:
EXIT_DICT['container_urls'] = dict(url=c.cdn_uri,
ssl_url=c.cdn_ssl_uri,
streaming_url=c.cdn_streaming_uri,
ios_uri=c.cdn_ios_uri)
if private:
try:
cont_private = c.make_private()
except Exception, e:
module.fail_json(msg=e.message)
else:
EXIT_DICT['set_private'] = True
if web_index:
try:
cont_web_index = c.set_web_index_page(web_index)
except Exception, e:
module.fail_json(msg=e.message)
else:
EXIT_DICT['set_index'] = True
finally:
_fetch_meta(module, c)
if web_error:
try:
cont_err_index = c.set_web_error_page(web_error)
except Exception, e:
module.fail_json(msg=e.message)
else:
EXIT_DICT['set_error'] = True
finally:
_fetch_meta(module, c)
EXIT_DICT['container'] = c.name
EXIT_DICT['objs_in_container'] = c.object_count
EXIT_DICT['total_bytes'] = c.total_bytes
_locals = locals().keys()
if ('cont_deleted' in _locals
or 'meta_set' in _locals
or 'cont_public' in _locals
or 'cont_private' in _locals
or 'cont_web_index' in _locals
or 'cont_err_index' in _locals):
EXIT_DICT['changed'] = True
module.exit_json(**EXIT_DICT)
def cloudfiles(module, container_, state, meta_, clear_meta, typ, ttl, public,
private, web_index, web_error):
""" Dispatch from here to work with metadata or file objects """
cf = pyrax.cloudfiles
if cf is None:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
if typ == "container":
container(cf, module, container_, state, meta_, clear_meta, ttl,
public, private, web_index, web_error)
else:
meta(cf, module, container_, state, meta_, clear_meta)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
container=dict(),
state=dict(choices=['present', 'absent', 'list'],
default='present'),
meta=dict(type='dict', default=dict()),
clear_meta=dict(default=False, type='bool'),
type=dict(choices=['container', 'meta'], default='container'),
ttl=dict(type='int'),
public=dict(default=False, type='bool'),
private=dict(default=False, type='bool'),
web_index=dict(),
web_error=dict()
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
container_ = module.params.get('container')
state = module.params.get('state')
meta_ = module.params.get('meta')
clear_meta = module.params.get('clear_meta')
typ = module.params.get('type')
ttl = module.params.get('ttl')
public = module.params.get('public')
private = module.params.get('private')
web_index = module.params.get('web_index')
web_error = module.params.get('web_error')
if state in ['present', 'absent'] and not container_:
module.fail_json(msg='please specify a container name')
if clear_meta and not typ == 'meta':
module.fail_json(msg='clear_meta can only be used when setting '
'metadata')
setup_rax_module(module, pyrax)
cloudfiles(module, container_, state, meta_, clear_meta, typ, ttl, public,
private, web_index, web_error)
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
main()
| gpl-3.0 |
gfxprim/gfxprim | pylib/gfxprim/grabbers/__init__.py | 1 | 1047 | """
Module for GFXprim grabbers.
"""
# Import the SWIG wrapper
from . import c_grabbers
from ..utils import extend
def extend_grabber(_Grabber):
"""
Extends _grabber class with convenience methods.
Called once on module initialization.
"""
@extend(_Grabber)
def start(self):
"Starts frame capture."
c_grabbers.gp_grabber_start(self)
@extend(_Grabber)
def stop(self):
"Stops frame capture."
c_grabbers.gp_grabber_stop(self)
@extend(_Grabber)
def poll(self):
"Polls for a frame."
return c_grabbers.gp_grabber_poll(self)
# Pull gp_grabber
Grabber = c_grabbers.gp_grabber
def _init(module):
# Extend gp_grabber with convenience methods
extend_grabber(Grabber)
# Imports from the SWIG module
import re
def strip_gp(s):
return re.sub('^gp_', '', s)
# Import some members from the SWIG module
from ..utils import import_members
import_members(c_grabbers, module, sub=strip_gp,
exclude=[
'^gfxprim$',
'^\w+_swigregister$',
'^_\w+$'])
_init(locals())
del _init
| lgpl-2.1 |
Korkki/django | tests/template_tests/filter_tests/test_ljust.py | 521 | 1081 | from django.template.defaultfilters import ljust
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class LjustTests(SimpleTestCase):
@setup({'ljust01': '{% autoescape off %}.{{ a|ljust:"5" }}. .{{ b|ljust:"5" }}.{% endautoescape %}'})
def test_ljust01(self):
output = self.engine.render_to_string('ljust01', {"a": "a&b", "b": mark_safe("a&b")})
self.assertEqual(output, ".a&b . .a&b .")
@setup({'ljust02': '.{{ a|ljust:"5" }}. .{{ b|ljust:"5" }}.'})
def test_ljust02(self):
output = self.engine.render_to_string('ljust02', {"a": "a&b", "b": mark_safe("a&b")})
self.assertEqual(output, ".a&b . .a&b .")
class FunctionTests(SimpleTestCase):
def test_ljust(self):
self.assertEqual(ljust('test', 10), 'test ')
self.assertEqual(ljust('test', 3), 'test')
def test_less_than_string_length(self):
self.assertEqual(ljust('test', 3), 'test')
def test_non_string_input(self):
self.assertEqual(ljust(123, 4), '123 ')
| bsd-3-clause |
torufuru/OFPatchPanel | ryu/contrib/ncclient/operations/flowmon.py | 82 | 1213 | # Copyright 2h009 Shikhar Bhushan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'Power-control operations'
from ncclient.xml_ import *
from rpc import RPC
PC_URN = "urn:liberouter:params:xml:ns:netconf:power-control:1.0"
class PoweroffMachine(RPC):
"*poweroff-machine* RPC (flowmon)"
DEPENDS = ["urn:liberouter:param:netconf:capability:power-control:1.0"]
def request(self):
return self._request(new_ele(qualify("poweroff-machine", PC_URN)))
class RebootMachine(RPC):
"*reboot-machine* RPC (flowmon)"
DEPENDS = ["urn:liberouter:params:netconf:capability:power-control:1.0"]
def request(self):
return self._request(new_ele(qualify("reboot-machine", PC_URN)))
| apache-2.0 |
wayneicn/crazyflie-clients-python | lib/cfclient/ui/tabs/FlightTab.py | 12 | 18725 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
The flight control tab shows telimitry data and flight settings.
"""
__author__ = 'Bitcraze AB'
__all__ = ['FlightTab']
import sys
import logging
logger = logging.getLogger(__name__)
from time import time
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtCore import Qt, pyqtSlot, pyqtSignal, QThread, SIGNAL
from PyQt4.QtGui import QMessageBox
from cflib.crazyflie import Crazyflie
from cfclient.ui.widgets.ai import AttitudeIndicator
from cfclient.utils.guiconfig import GuiConfig
from cflib.crazyflie.log import Log, LogVariable, LogConfig
from cfclient.ui.tab import Tab
flight_tab_class = uic.loadUiType(sys.path[0] +
"/cfclient/ui/tabs/flightTab.ui")[0]
MAX_THRUST = 65365.0
class FlightTab(Tab, flight_tab_class):
uiSetupReadySignal = pyqtSignal()
_motor_data_signal = pyqtSignal(int, object, object)
_imu_data_signal = pyqtSignal(int, object, object)
_althold_data_signal = pyqtSignal(int, object, object)
_baro_data_signal = pyqtSignal(int, object, object)
_input_updated_signal = pyqtSignal(float, float, float, float)
_rp_trim_updated_signal = pyqtSignal(float, float)
_emergency_stop_updated_signal = pyqtSignal(bool)
_log_error_signal = pyqtSignal(object, str)
#UI_DATA_UPDATE_FPS = 10
connectionFinishedSignal = pyqtSignal(str)
disconnectedSignal = pyqtSignal(str)
def __init__(self, tabWidget, helper, *args):
super(FlightTab, self).__init__(*args)
self.setupUi(self)
self.tabName = "Flight Control"
self.menuName = "Flight Control"
self.tabWidget = tabWidget
self.helper = helper
self.disconnectedSignal.connect(self.disconnected)
self.connectionFinishedSignal.connect(self.connected)
# Incomming signals
self.helper.cf.connected.add_callback(
self.connectionFinishedSignal.emit)
self.helper.cf.disconnected.add_callback(self.disconnectedSignal.emit)
self._input_updated_signal.connect(self.updateInputControl)
self.helper.inputDeviceReader.input_updated.add_callback(
self._input_updated_signal.emit)
self._rp_trim_updated_signal.connect(self.calUpdateFromInput)
self.helper.inputDeviceReader.rp_trim_updated.add_callback(
self._rp_trim_updated_signal.emit)
self._emergency_stop_updated_signal.connect(self.updateEmergencyStop)
self.helper.inputDeviceReader.emergency_stop_updated.add_callback(
self._emergency_stop_updated_signal.emit)
self.helper.inputDeviceReader.althold_updated.add_callback(
lambda enabled: self.helper.cf.param.set_value("flightmode.althold", enabled))
self._imu_data_signal.connect(self._imu_data_received)
self._baro_data_signal.connect(self._baro_data_received)
self._althold_data_signal.connect(self._althold_data_received)
self._motor_data_signal.connect(self._motor_data_received)
self._log_error_signal.connect(self._logging_error)
# Connect UI signals that are in this tab
self.flightModeCombo.currentIndexChanged.connect(self.flightmodeChange)
self.minThrust.valueChanged.connect(self.minMaxThrustChanged)
self.maxThrust.valueChanged.connect(self.minMaxThrustChanged)
self.thrustLoweringSlewRateLimit.valueChanged.connect(
self.thrustLoweringSlewRateLimitChanged)
self.slewEnableLimit.valueChanged.connect(
self.thrustLoweringSlewRateLimitChanged)
self.targetCalRoll.valueChanged.connect(self._trim_roll_changed)
self.targetCalPitch.valueChanged.connect(self._trim_pitch_changed)
self.maxAngle.valueChanged.connect(self.maxAngleChanged)
self.maxYawRate.valueChanged.connect(self.maxYawRateChanged)
self.uiSetupReadySignal.connect(self.uiSetupReady)
self.clientXModeCheckbox.toggled.connect(self.changeXmode)
self.isInCrazyFlightmode = False
self.uiSetupReady()
self.clientXModeCheckbox.setChecked(GuiConfig().get("client_side_xmode"))
self.crazyflieXModeCheckbox.clicked.connect(
lambda enabled:
self.helper.cf.param.set_value("flightmode.x",
str(enabled)))
self.helper.cf.param.add_update_callback(
group="flightmode", name="xmode",
cb=( lambda name, checked:
self.crazyflieXModeCheckbox.setChecked(eval(checked))))
self.ratePidRadioButton.clicked.connect(
lambda enabled:
self.helper.cf.param.set_value("flightmode.ratepid",
str(enabled)))
self.angularPidRadioButton.clicked.connect(
lambda enabled:
self.helper.cf.param.set_value("flightmode.ratepid",
str(not enabled)))
self.helper.cf.param.add_update_callback(
group="flightmode", name="ratepid",
cb=(lambda name, checked:
self.ratePidRadioButton.setChecked(eval(checked))))
self.helper.cf.param.add_update_callback(
group="flightmode", name="althold",
cb=(lambda name, enabled:
self.helper.inputDeviceReader.setAltHold(eval(enabled))))
self.helper.cf.param.add_update_callback(
group="imu_sensors",
cb=self._set_available_sensors)
self.logBaro = None
self.logAltHold = None
self.ai = AttitudeIndicator()
self.verticalLayout_4.addWidget(self.ai)
self.splitter.setSizes([1000,1])
self.targetCalPitch.setValue(GuiConfig().get("trim_pitch"))
self.targetCalRoll.setValue(GuiConfig().get("trim_roll"))
def thrustToPercentage(self, thrust):
return ((thrust / MAX_THRUST) * 100.0)
def uiSetupReady(self):
flightComboIndex = self.flightModeCombo.findText(
GuiConfig().get("flightmode"), Qt.MatchFixedString)
if (flightComboIndex < 0):
self.flightModeCombo.setCurrentIndex(0)
self.flightModeCombo.currentIndexChanged.emit(0)
else:
self.flightModeCombo.setCurrentIndex(flightComboIndex)
self.flightModeCombo.currentIndexChanged.emit(flightComboIndex)
def _logging_error(self, log_conf, msg):
QMessageBox.about(self, "Log error", "Error when starting log config"
" [%s]: %s" % (log_conf.name, msg))
def _motor_data_received(self, timestamp, data, logconf):
if self.isVisible():
self.actualM1.setValue(data["motor.m1"])
self.actualM2.setValue(data["motor.m2"])
self.actualM3.setValue(data["motor.m3"])
self.actualM4.setValue(data["motor.m4"])
def _baro_data_received(self, timestamp, data, logconf):
if self.isVisible():
self.actualASL.setText(("%.2f" % data["baro.aslLong"]))
self.ai.setBaro(data["baro.aslLong"])
def _althold_data_received(self, timestamp, data, logconf):
if self.isVisible():
target = data["altHold.target"]
if target>0:
if not self.targetASL.isEnabled():
self.targetASL.setEnabled(True)
self.targetASL.setText(("%.2f" % target))
self.ai.setHover(target)
elif self.targetASL.isEnabled():
self.targetASL.setEnabled(False)
self.targetASL.setText("Not set")
self.ai.setHover(0)
def _imu_data_received(self, timestamp, data, logconf):
if self.isVisible():
self.actualRoll.setText(("%.2f" % data["stabilizer.roll"]))
self.actualPitch.setText(("%.2f" % data["stabilizer.pitch"]))
self.actualYaw.setText(("%.2f" % data["stabilizer.yaw"]))
self.actualThrust.setText("%.2f%%" %
self.thrustToPercentage(
data["stabilizer.thrust"]))
self.ai.setRollPitch(-data["stabilizer.roll"],
data["stabilizer.pitch"])
def connected(self, linkURI):
# IMU & THRUST
lg = LogConfig("Stabalizer", GuiConfig().get("ui_update_period"))
lg.add_variable("stabilizer.roll", "float")
lg.add_variable("stabilizer.pitch", "float")
lg.add_variable("stabilizer.yaw", "float")
lg.add_variable("stabilizer.thrust", "uint16_t")
self.helper.cf.log.add_config(lg)
if (lg.valid):
lg.data_received_cb.add_callback(self._imu_data_signal.emit)
lg.error_cb.add_callback(self._log_error_signal.emit)
lg.start()
else:
logger.warning("Could not setup logconfiguration after "
"connection!")
# MOTOR
lg = LogConfig("Motors", GuiConfig().get("ui_update_period"))
lg.add_variable("motor.m1")
lg.add_variable("motor.m2")
lg.add_variable("motor.m3")
lg.add_variable("motor.m4")
self.helper.cf.log.add_config(lg)
if lg.valid:
lg.data_received_cb.add_callback(self._motor_data_signal.emit)
lg.error_cb.add_callback(self._log_error_signal.emit)
lg.start()
else:
logger.warning("Could not setup logconfiguration after "
"connection!")
def _set_available_sensors(self, name, available):
logger.info("[%s]: %s", name, available)
available = eval(available)
if ("HMC5883L" in name):
if (not available):
self.actualASL.setText("N/A")
self.actualASL.setEnabled(False)
else:
self.actualASL.setEnabled(True)
self.helper.inputDeviceReader.setAltHoldAvailable(available)
if (not self.logBaro and not self.logAltHold):
# The sensor is available, set up the logging
self.logBaro = LogConfig("Baro", 200)
self.logBaro.add_variable("baro.aslLong", "float")
self.helper.cf.log.add_config(self.logBaro)
if self.logBaro.valid:
self.logBaro.data_received_cb.add_callback(
self._baro_data_signal.emit)
self.logBaro.error_cb.add_callback(
self._log_error_signal.emit)
self.logBaro.start()
else:
logger.warning("Could not setup logconfiguration after "
"connection!")
self.logAltHold = LogConfig("AltHold", 200)
self.logAltHold.add_variable("altHold.target", "float")
self.helper.cf.log.add_config(self.logAltHold)
if self.logAltHold.valid:
self.logAltHold.data_received_cb.add_callback(
self._althold_data_signal.emit)
self.logAltHold.error_cb.add_callback(
self._log_error_signal.emit)
self.logAltHold.start()
else:
logger.warning("Could not setup logconfiguration after "
"connection!")
def disconnected(self, linkURI):
self.ai.setRollPitch(0, 0)
self.actualM1.setValue(0)
self.actualM2.setValue(0)
self.actualM3.setValue(0)
self.actualM4.setValue(0)
self.actualRoll.setText("")
self.actualPitch.setText("")
self.actualYaw.setText("")
self.actualThrust.setText("")
self.actualASL.setText("")
self.targetASL.setText("Not Set")
self.targetASL.setEnabled(False)
self.actualASL.setEnabled(False)
self.logBaro = None
self.logAltHold = None
def minMaxThrustChanged(self):
self.helper.inputDeviceReader.set_thrust_limits(
self.minThrust.value(), self.maxThrust.value())
if (self.isInCrazyFlightmode == True):
GuiConfig().set("min_thrust", self.minThrust.value())
GuiConfig().set("max_thrust", self.maxThrust.value())
def thrustLoweringSlewRateLimitChanged(self):
self.helper.inputDeviceReader.set_thrust_slew_limiting(
self.thrustLoweringSlewRateLimit.value(),
self.slewEnableLimit.value())
if (self.isInCrazyFlightmode == True):
GuiConfig().set("slew_limit", self.slewEnableLimit.value())
GuiConfig().set("slew_rate", self.thrustLoweringSlewRateLimit.value())
def maxYawRateChanged(self):
logger.debug("MaxYawrate changed to %d", self.maxYawRate.value())
self.helper.inputDeviceReader.set_yaw_limit(self.maxYawRate.value())
if (self.isInCrazyFlightmode == True):
GuiConfig().set("max_yaw", self.maxYawRate.value())
def maxAngleChanged(self):
logger.debug("MaxAngle changed to %d", self.maxAngle.value())
self.helper.inputDeviceReader.set_rp_limit(self.maxAngle.value())
if (self.isInCrazyFlightmode == True):
GuiConfig().set("max_rp", self.maxAngle.value())
def _trim_pitch_changed(self, value):
logger.debug("Pitch trim updated to [%f]" % value)
self.helper.inputDeviceReader.set_trim_pitch(value)
GuiConfig().set("trim_pitch", value)
def _trim_roll_changed(self, value):
logger.debug("Roll trim updated to [%f]" % value)
self.helper.inputDeviceReader.set_trim_roll(value)
GuiConfig().set("trim_roll", value)
def calUpdateFromInput(self, rollCal, pitchCal):
logger.debug("Trim changed on joystick: roll=%.2f, pitch=%.2f",
rollCal, pitchCal)
self.targetCalRoll.setValue(rollCal)
self.targetCalPitch.setValue(pitchCal)
def updateInputControl(self, roll, pitch, yaw, thrust):
self.targetRoll.setText(("%0.2f" % roll))
self.targetPitch.setText(("%0.2f" % pitch))
self.targetYaw.setText(("%0.2f" % yaw))
self.targetThrust.setText(("%0.2f %%" %
self.thrustToPercentage(thrust)))
self.thrustProgress.setValue(thrust)
def setMotorLabelsEnabled(self, enabled):
self.M1label.setEnabled(enabled)
self.M2label.setEnabled(enabled)
self.M3label.setEnabled(enabled)
self.M4label.setEnabled(enabled)
def emergencyStopStringWithText(self, text):
return ("<html><head/><body><p>"
"<span style='font-weight:600; color:#7b0005;'>{}</span>"
"</p></body></html>".format(text))
def updateEmergencyStop(self, emergencyStop):
if emergencyStop:
self.setMotorLabelsEnabled(False)
self.emergency_stop_label.setText(
self.emergencyStopStringWithText("Kill switch active"))
else:
self.setMotorLabelsEnabled(True)
self.emergency_stop_label.setText("")
def flightmodeChange(self, item):
GuiConfig().set("flightmode", self.flightModeCombo.itemText(item))
logger.info("Changed flightmode to %s",
self.flightModeCombo.itemText(item))
self.isInCrazyFlightmode = False
if (item == 0): # Normal
self.maxAngle.setValue(GuiConfig().get("normal_max_rp"))
self.maxThrust.setValue(GuiConfig().get("normal_max_thrust"))
self.minThrust.setValue(GuiConfig().get("normal_min_thrust"))
self.slewEnableLimit.setValue(GuiConfig().get("normal_slew_limit"))
self.thrustLoweringSlewRateLimit.setValue(
GuiConfig().get("normal_slew_rate"))
self.maxYawRate.setValue(GuiConfig().get("normal_max_yaw"))
if (item == 1): # Advanced
self.maxAngle.setValue(GuiConfig().get("max_rp"))
self.maxThrust.setValue(GuiConfig().get("max_thrust"))
self.minThrust.setValue(GuiConfig().get("min_thrust"))
self.slewEnableLimit.setValue(GuiConfig().get("slew_limit"))
self.thrustLoweringSlewRateLimit.setValue(
GuiConfig().get("slew_rate"))
self.maxYawRate.setValue(GuiConfig().get("max_yaw"))
self.isInCrazyFlightmode = True
if (item == 0):
newState = False
else:
newState = True
self.maxThrust.setEnabled(newState)
self.maxAngle.setEnabled(newState)
self.minThrust.setEnabled(newState)
self.thrustLoweringSlewRateLimit.setEnabled(newState)
self.slewEnableLimit.setEnabled(newState)
self.maxYawRate.setEnabled(newState)
@pyqtSlot(bool)
def changeXmode(self, checked):
self.helper.cf.commander.set_client_xmode(checked)
GuiConfig().set("client_side_xmode", checked)
logger.info("Clientside X-mode enabled: %s", checked)
| gpl-2.0 |
andhit-r/account-financial-tools | account_constraints/model/account_move.py | 36 | 1613 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author Joel Grand-Guillaume. Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, exceptions, _
class AccountMove(models.Model):
_inherit = "account.move"
@api.constrains('journal_id', 'period_id', 'date')
def _check_fiscal_year(self):
for move in self:
if move.journal_id.allow_date_fy:
date_start = move.period_id.fiscalyear_id.date_start
date_stop = move.period_id.fiscalyear_id.date_stop
if not date_start <= move.date <= date_stop:
raise exceptions.Warning(
_('You cannot create entries with date not in the '
'fiscal year of the chosen period'))
return True
| agpl-3.0 |
lzppp/mylearning | ryu/services/protocols/bgp/operator/commands/show/count.py | 52 | 1840 | import logging
from ryu.services.protocols.bgp.operator.command import Command
from ryu.services.protocols.bgp.operator.command import CommandsResponse
from ryu.services.protocols.bgp.operator.command import STATUS_ERROR
from ryu.services.protocols.bgp.operator.command import STATUS_OK
from ryu.services.protocols.bgp.operator.commands.responses import \
WrongParamResp
LOG = logging.getLogger('bgpspeaker.operator.commands.show.count')
class Count(Command):
help_msg = 'show counters'
param_help_msg = '<vpn-name> <route-family>{ipv4, ipv6}'
command = 'count'
cli_resp_line_template = 'BGP route count for VPN {0} is {1}\n'
def __init__(self, *args, **kwargs):
super(Count, self).__init__(*args, **kwargs)
self.subcommands = {
'all': self.All
}
def action(self, params):
if len(params) < 1:
return CommandsResponse(STATUS_ERROR, 'Not enough params')
else:
vrf_name = params[0]
if len(params) == 2:
vrf_rf = params[1]
else:
vrf_rf = 'ipv4'
from ryu.services.protocols.bgp.operator.internal_api import \
WrongParamError
try:
return CommandsResponse(
STATUS_OK,
self.api.count_single_vrf_routes(vrf_name, vrf_rf)
)
except WrongParamError as e:
return WrongParamResp(e)
class All(Command):
help_msg = 'shows number of routes for all VRFs'
command = 'all'
cli_resp_line_template = 'BGP route count for VPN {0} is {1}\n'
def action(self, params):
if len(params) > 0:
return WrongParamResp()
return CommandsResponse(STATUS_OK, self.api.count_all_vrf_routes())
| apache-2.0 |
bop/rango | lib/python2.7/site-packages/django/contrib/localflavor/kw/forms.py | 110 | 2016 | """
Kuwait-specific Form helpers
"""
from __future__ import unicode_literals
import re
from datetime import date
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field
from django.utils.translation import gettext as _
id_re = re.compile(r'^(?P<initial>\d{1})(?P<yy>\d\d)(?P<mm>\d\d)(?P<dd>\d\d)(?P<mid>\d{4})(?P<checksum>\d{1})')
class KWCivilIDNumberField(Field):
"""
Kuwaiti Civil ID numbers are 12 digits, second to seventh digits
represents the person's birthdate.
Checks the following rules to determine the validty of the number:
* The number consist of 12 digits.
* The birthdate of the person is a valid date.
* The calculated checksum equals to the last digit of the Civil ID.
"""
default_error_messages = {
'invalid': _('Enter a valid Kuwaiti Civil ID number'),
}
def has_valid_checksum(self, value):
weight = (2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2)
calculated_checksum = 0
for i in range(11):
calculated_checksum += int(value[i]) * weight[i]
remainder = calculated_checksum % 11
checkdigit = 11 - remainder
if checkdigit != int(value[11]):
return False
return True
def clean(self, value):
super(KWCivilIDNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
if not re.match(r'^\d{12}$', value):
raise ValidationError(self.error_messages['invalid'])
match = re.match(id_re, value)
if not match:
raise ValidationError(self.error_messages['invalid'])
gd = match.groupdict()
try:
d = date(int(gd['yy']), int(gd['mm']), int(gd['dd']))
except ValueError:
raise ValidationError(self.error_messages['invalid'])
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['invalid'])
return value
| gpl-2.0 |
gauribhoite/personfinder | env/site-packages/django/db/backends/dummy/base.py | 94 | 2567 | """
Dummy database backend for Django.
Django uses this if the database ENGINE setting is empty (None or empty string).
Each of these API functions, except connection.close(), raises
ImproperlyConfigured.
"""
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.base.client import BaseDatabaseClient
from django.db.backends.base.creation import BaseDatabaseCreation
from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.backends.base.introspection import BaseDatabaseIntrospection
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.backends.base.validation import BaseDatabaseValidation
def complain(*args, **kwargs):
raise ImproperlyConfigured("settings.DATABASES is improperly configured. "
"Please supply the ENGINE value. Check "
"settings documentation for more details.")
def ignore(*args, **kwargs):
pass
class DatabaseError(Exception):
pass
class IntegrityError(DatabaseError):
pass
class DatabaseOperations(BaseDatabaseOperations):
quote_name = complain
class DatabaseClient(BaseDatabaseClient):
runshell = complain
class DatabaseCreation(BaseDatabaseCreation):
create_test_db = ignore
destroy_test_db = ignore
class DatabaseIntrospection(BaseDatabaseIntrospection):
get_table_list = complain
get_table_description = complain
get_relations = complain
get_indexes = complain
get_key_columns = complain
class DatabaseWrapper(BaseDatabaseWrapper):
operators = {}
# Override the base class implementations with null
# implementations. Anything that tries to actually
# do something raises complain; anything that tries
# to rollback or undo something raises ignore.
_cursor = complain
ensure_connection = complain
_commit = complain
_rollback = ignore
_close = ignore
_savepoint = ignore
_savepoint_commit = complain
_savepoint_rollback = ignore
_set_autocommit = complain
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = BaseDatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def is_usable(self):
return True
| apache-2.0 |
toenuff/treadmill | tests/websocket/api/endpoint_test.py | 1 | 2612 | """
Unit test for endpoint websocket API.
"""
import unittest
# Disable W0611: Unused import
import tests.treadmill_test_deps # pylint: disable=W0611
from treadmill.websocket.api import endpoint
class WSEndpointAPITest(unittest.TestCase):
"""Tests for endpoint websocket API."""
def test_subscribe(self):
"""Test subscription registration."""
api = endpoint.EndpointAPI()
self.assertEquals(
[('/endpoints/foo', 'bar#*:tcp:http')],
api.subscribe({'filter': 'foo.bar',
'proto': 'tcp',
'endpoint': 'http'})
)
self.assertEquals(
[('/endpoints/foo', '*#*:*:*')],
api.subscribe({'filter': 'foo.*',
'proto': '*',
'endpoint': '*'})
)
self.assertEquals(
[('/endpoints/foo', '*#*:*:*')],
api.subscribe({'filter': 'foo.*',
'proto': None,
'endpoint': None})
)
def test_on_event(self):
"""Tests payload generation."""
api = endpoint.EndpointAPI()
self.assertEquals(
{'endpoint': 'http',
'name': 'foo.bar#1234',
'proto': 'tcp',
'topic': '/endpoints',
'host': 'xxx',
'sow': True,
'port': '1234'},
api.on_event(
'/endpoints/foo/bar#1234:tcp:http',
None,
'xxx:1234'
)
)
self.assertEquals(
{'endpoint': 'http',
'name': 'foo.bar#1234',
'proto': 'tcp',
'topic': '/endpoints',
'host': 'xxx',
'sow': False,
'port': '1234'},
api.on_event(
'/endpoints/foo/bar#1234:tcp:http',
'm',
'xxx:1234'
)
)
self.assertEquals(
{'endpoint': 'http',
'name': 'foo.bar#1234',
'proto': 'tcp',
'topic': '/endpoints',
'host': None,
'sow': False,
'port': None},
api.on_event(
'/endpoints/foo/bar#1234:tcp:http',
'd',
None
)
)
# IGnore create event.
self.assertIsNone(
api.on_event(
'/endpoints/foo/bar#1234:tcp:http',
'c',
'xxx:1234'
)
)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
evelkey/vahun | vahun/Autoencoder_FFNN.py | 1 | 1714 | import tensorflow as tf
import numpy as np
from vahun.autoencoder import Autoencoder
class Autoencoder_FFNN(Autoencoder):
def create_graph(self):
network_weights = self._initialize_weights()
self.weights = network_weights
self._create_layers()
# cost
self.cost = 0.5*tf.reduce_sum(tf.pow(tf.subtract(self.reconstruction, self.y), 2.0))
self.optimizer = self.optimizer_function.minimize(self.cost)
def _initialize_weights(self):
all_weights = dict()
all_weights['w1']=tf.Variable(self.xavier_init(self.n_input, self.layerlist[0]))
all_weights['b1'] = tf.Variable(tf.random_normal([self.layerlist[0]], dtype=tf.float32))
for i in range(1,self.layernum):
all_weights['w'+str(i+1)]=tf.Variable(self.xavier_init(self.layerlist[i-1], self.layerlist[i]))
all_weights['b'+str(i+1)] = tf.Variable(tf.random_normal([self.layerlist[i]], dtype=tf.float32))
return all_weights
def _create_layers(self):
"""
"""
self.x = tf.placeholder(tf.float32, [None, self.n_input])
self.y = tf.placeholder(tf.float32, [None, self.n_input])
layer=self.nonlinearity(tf.add(tf.matmul(self.x, self.weights['w1']), self.weights['b1']))
self.encoded=layer
for i in range(1,self.layernum-1):
if i==self.encode_index:
self.encoded=layer
layer=self.nonlinearity(tf.add(tf.matmul(layer, self.weights['w'+str(i+1)]), self.weights['b'+str(i+1)]))
self.reconstruction=tf.add(tf.matmul(layer, self.weights['w'+str(self.layernum)]), self.weights['b'+str(self.layernum)])
| apache-2.0 |
jswope00/GAI | cms/envs/dev_with_worker.py | 148 | 1153 | """
This config file follows the dev enviroment, but adds the
requirement of a celery worker running in the background to process
celery tasks.
The worker can be executed using:
django_admin.py celery worker
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
from dev import *
################################# CELERY ######################################
# Requires a separate celery worker
CELERY_ALWAYS_EAGER = False
# Use django db as the broker and result store
BROKER_URL = 'django://'
INSTALLED_APPS += ('djcelery.transport', )
CELERY_RESULT_BACKEND = 'database'
DJKOMBU_POLLING_INTERVAL = 1.0
# Disable transaction management because we are using a worker. Views
# that request a task and wait for the result will deadlock otherwise.
MIDDLEWARE_CLASSES = tuple(
c for c in MIDDLEWARE_CLASSES
if c != 'django.middleware.transaction.TransactionMiddleware')
# Note: other alternatives for disabling transactions don't work in 1.4
# https://code.djangoproject.com/ticket/2304
# https://code.djangoproject.com/ticket/16039
| agpl-3.0 |
annegabrielle/secure_adhoc_network_ns-3 | ns3_source_code/ns-3.10/bindings/python/apidefs/gcc-ILP32/ns3_module_udp_client_server.py | 8 | 11996 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
def register_types(module):
root_module = module.get_root()
## packet-loss-counter.h: ns3::PacketLossCounter [class]
module.add_class('PacketLossCounter')
## udp-client.h: ns3::UdpClient [class]
module.add_class('UdpClient', parent=root_module['ns3::Application'])
## udp-server.h: ns3::UdpServer [class]
module.add_class('UdpServer', parent=root_module['ns3::Application'])
## udp-trace-client.h: ns3::UdpTraceClient [class]
module.add_class('UdpTraceClient', parent=root_module['ns3::Application'])
## Register a nested module for the namespace Config
nested_module = module.add_cpp_namespace('Config')
register_types_ns3_Config(nested_module)
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
## Register a nested module for the namespace aodv
nested_module = module.add_cpp_namespace('aodv')
register_types_ns3_aodv(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
## Register a nested module for the namespace olsr
nested_module = module.add_cpp_namespace('olsr')
register_types_ns3_olsr(nested_module)
def register_types_ns3_Config(module):
root_module = module.get_root()
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_types_ns3_aodv(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
def register_types_ns3_dsdv(module):
root_module = module.get_root()
def register_types_ns3_flame(module):
root_module = module.get_root()
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_types_ns3_olsr(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3PacketLossCounter_methods(root_module, root_module['ns3::PacketLossCounter'])
register_Ns3UdpClient_methods(root_module, root_module['ns3::UdpClient'])
register_Ns3UdpServer_methods(root_module, root_module['ns3::UdpServer'])
register_Ns3UdpTraceClient_methods(root_module, root_module['ns3::UdpTraceClient'])
return
def register_Ns3PacketLossCounter_methods(root_module, cls):
## packet-loss-counter.h: ns3::PacketLossCounter::PacketLossCounter(ns3::PacketLossCounter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketLossCounter const &', 'arg0')])
## packet-loss-counter.h: ns3::PacketLossCounter::PacketLossCounter(uint8_t bitmapSize) [constructor]
cls.add_constructor([param('uint8_t', 'bitmapSize')])
## packet-loss-counter.h: uint16_t ns3::PacketLossCounter::GetBitMapSize() const [member function]
cls.add_method('GetBitMapSize',
'uint16_t',
[],
is_const=True)
## packet-loss-counter.h: uint32_t ns3::PacketLossCounter::GetLost() const [member function]
cls.add_method('GetLost',
'uint32_t',
[],
is_const=True)
## packet-loss-counter.h: void ns3::PacketLossCounter::NotifyReceived(uint32_t seq) [member function]
cls.add_method('NotifyReceived',
'void',
[param('uint32_t', 'seq')])
## packet-loss-counter.h: void ns3::PacketLossCounter::SetBitMapSize(uint16_t size) [member function]
cls.add_method('SetBitMapSize',
'void',
[param('uint16_t', 'size')])
return
def register_Ns3UdpClient_methods(root_module, cls):
## udp-client.h: ns3::UdpClient::UdpClient(ns3::UdpClient const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UdpClient const &', 'arg0')])
## udp-client.h: ns3::UdpClient::UdpClient() [constructor]
cls.add_constructor([])
## udp-client.h: static ns3::TypeId ns3::UdpClient::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## udp-client.h: void ns3::UdpClient::SetRemote(ns3::Ipv4Address ip, uint16_t port) [member function]
cls.add_method('SetRemote',
'void',
[param('ns3::Ipv4Address', 'ip'), param('uint16_t', 'port')])
## udp-client.h: void ns3::UdpClient::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## udp-client.h: void ns3::UdpClient::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## udp-client.h: void ns3::UdpClient::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3UdpServer_methods(root_module, cls):
## udp-server.h: ns3::UdpServer::UdpServer(ns3::UdpServer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UdpServer const &', 'arg0')])
## udp-server.h: ns3::UdpServer::UdpServer() [constructor]
cls.add_constructor([])
## udp-server.h: uint32_t ns3::UdpServer::GetLost() const [member function]
cls.add_method('GetLost',
'uint32_t',
[],
is_const=True)
## udp-server.h: uint16_t ns3::UdpServer::GetPacketWindowSize() const [member function]
cls.add_method('GetPacketWindowSize',
'uint16_t',
[],
is_const=True)
## udp-server.h: uint32_t ns3::UdpServer::GetReceived() const [member function]
cls.add_method('GetReceived',
'uint32_t',
[],
is_const=True)
## udp-server.h: static ns3::TypeId ns3::UdpServer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## udp-server.h: void ns3::UdpServer::SetPacketWindowSize(uint16_t size) [member function]
cls.add_method('SetPacketWindowSize',
'void',
[param('uint16_t', 'size')])
## udp-server.h: void ns3::UdpServer::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## udp-server.h: void ns3::UdpServer::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## udp-server.h: void ns3::UdpServer::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3UdpTraceClient_methods(root_module, cls):
## udp-trace-client.h: ns3::UdpTraceClient::UdpTraceClient(ns3::UdpTraceClient const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UdpTraceClient const &', 'arg0')])
## udp-trace-client.h: ns3::UdpTraceClient::UdpTraceClient() [constructor]
cls.add_constructor([])
## udp-trace-client.h: ns3::UdpTraceClient::UdpTraceClient(ns3::Ipv4Address ip, uint16_t port, char * traceFile) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ip'), param('uint16_t', 'port'), param('char *', 'traceFile')])
## udp-trace-client.h: uint16_t ns3::UdpTraceClient::GetMaxPacketSize() [member function]
cls.add_method('GetMaxPacketSize',
'uint16_t',
[])
## udp-trace-client.h: static ns3::TypeId ns3::UdpTraceClient::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## udp-trace-client.h: void ns3::UdpTraceClient::SetMaxPacketSize(uint16_t maxPacketSize) [member function]
cls.add_method('SetMaxPacketSize',
'void',
[param('uint16_t', 'maxPacketSize')])
## udp-trace-client.h: void ns3::UdpTraceClient::SetRemote(ns3::Ipv4Address ip, uint16_t port) [member function]
cls.add_method('SetRemote',
'void',
[param('ns3::Ipv4Address', 'ip'), param('uint16_t', 'port')])
## udp-trace-client.h: void ns3::UdpTraceClient::SetTraceFile(std::string filename) [member function]
cls.add_method('SetTraceFile',
'void',
[param('std::string', 'filename')])
## udp-trace-client.h: void ns3::UdpTraceClient::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## udp-trace-client.h: void ns3::UdpTraceClient::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## udp-trace-client.h: void ns3::UdpTraceClient::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_Config(module.get_submodule('Config'), root_module)
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
register_functions_ns3_aodv(module.get_submodule('aodv'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
register_functions_ns3_olsr(module.get_submodule('olsr'), root_module)
return
def register_functions_ns3_Config(module, root_module):
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
return
def register_functions_ns3_aodv(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def register_functions_ns3_flame(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def register_functions_ns3_olsr(module, root_module):
return
| gpl-2.0 |
goodwordalchemy/thinkstats_notes_and_exercises | code/chap08ex.py | 1 | 3809 | """This file contains code used in "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function
import thinkstats2
import thinkplot
import math
import random
import numpy as np
def MeanError(estimates, actual):
"""Computes the mean error of a sequence of estimates.
estimate: sequence of numbers
actual: actual value
returns: float mean error
"""
errors = [estimate-actual for estimate in estimates]
return np.mean(errors)
def RMSE(estimates, actual):
"""Computes the root mean squared error of a sequence of estimates.
estimate: sequence of numbers
actual: actual value
returns: float RMSE
"""
e2 = [(estimate-actual)**2 for estimate in estimates]
mse = np.mean(e2)
return math.sqrt(mse)
def Estimate1(n=7, m=1000):
"""Evaluates RMSE of sample mean and median as estimators.
n: sample size
m: number of iterations
"""
mu = 0
sigma = 1
means = []
medians = []
for _ in range(m):
xs = [random.gauss(mu, sigma) for _ in range(n)]
xbar = np.mean(xs)
median = np.median(xs)
means.append(xbar)
medians.append(median)
print('Experiment 1: m = %d' % m)
print('rmse xbar', RMSE(means, mu))
print('\tmean error xbar', MeanError(means, mu))
print('rmse median', RMSE(medians, mu))
print('\tmean error median', MeanError(medians, mu))
def Estimate2(n=7, m=1000):
"""Evaluates S and Sn-1 as estimators of sample variance.
n: sample size
m: number of iterations
"""
mu = 0
sigma = 1
estimates1 = []
estimates2 = []
for _ in range(m):
xs = [random.gauss(mu, sigma) for _ in range(n)]
biased = np.var(xs)
unbiased = np.var(xs, ddof=1)
estimates1.append(biased)
estimates2.append(unbiased)
print('Experiment 2')
print('mean error biased', MeanError(estimates1, sigma**2))
print('mean error unbiased', MeanError(estimates2, sigma**2))
def Estimate3(n=7, m=1000):
"""Evaluates L and Lm as estimators of the exponential parameter.
n: sample size
m: number of iterations
"""
lam = 2
means = []
medians = []
for _ in range(m):
xs = np.random.exponential(1.0/lam, n)
L = 1 / np.mean(xs)
Lm = math.log(2) / np.median(xs)
means.append(L)
medians.append(Lm)
print('Experiment 3')
print('rmse L', RMSE(means, lam))
print('rmse Lm', RMSE(medians, lam))
print('mean error L', MeanError(means, lam))
print('mean error Lm', MeanError(medians, lam))
def SimulateSample(mu=90, sigma=7.5, n=9, m=1000):
"""Plots the sampling distribution of the sample mean.
mu: hypothetical population mean
sigma: hypothetical population standard deviation
n: sample size
m: number of iterations
"""
def VertLine(x, y=1):
thinkplot.Plot([x, x], [0, y], color='0.8', linewidth=3)
means = []
for _ in range(m):
xs = np.random.normal(mu, sigma, n)
xbar = np.mean(xs)
means.append(xbar)
stderr = RMSE(means, mu)
print('standard error', stderr)
cdf = thinkstats2.Cdf(means)
ci = cdf.Percentile(5), cdf.Percentile(95)
print('confidence interval', ci)
VertLine(ci[0])
VertLine(ci[1])
# plot the CDF
thinkplot.Cdf(cdf)
thinkplot.Save(root='estimation1',
xlabel='sample mean',
ylabel='CDF',
title='Sampling distribution')
def main():
thinkstats2.RandomSeed(17)
Estimate1(m=10)
Estimate1(m=100)
Estimate1(m=1000)
Estimate1(m=10000)
if __name__ == '__main__':
main()
| gpl-3.0 |
legrosbuffle/or-tools | examples/python/map.py | 5 | 3176 | # Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Map coloring problem in Google CP Solver.
From Pascal Van Hentenryck 'The OPL Optimization Programming Language',
page 7, 42.
Compare with the following models:
* Comet: http://www.hakank.org/comet/map.co
* Tailor/Essence': http://hakank.org/tailor/map_coloring.eprime
* SICStus: http://hakank.org/sicstus/map_coloring.pl
* ECLiPSe: http://hakank.org/eclipse/map.ecl
* Gecode: http://hakank.org/gecode/map.cpp
* MiniZinc: http://hakank.org/minizinc/map.mzn
* Zinc: http://hakank.org/minizinc/map.zinc
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from __future__ import print_function
from ortools.constraint_solver import pywrapcp
def main():
# Create the solver.
solver = pywrapcp.Solver("Map coloring")
#
# data
#
Belgium = 0
Denmark = 1
France = 2
Germany = 3
Netherlands = 4
Luxembourg = 5
n = 6
max_num_colors = 4
# declare variables
color = [solver.IntVar(1, max_num_colors, "x%i" % i) for i in range(n)]
#
# constraints
#
solver.Add(color[Belgium] == 1) # Symmetry breaking
solver.Add(color[France] != color[Belgium])
solver.Add(color[France] != color[Luxembourg])
solver.Add(color[France] != color[Germany])
solver.Add(color[Luxembourg] != color[Germany])
solver.Add(color[Luxembourg] != color[Belgium])
solver.Add(color[Belgium] != color[Netherlands])
solver.Add(color[Belgium] != color[Germany])
solver.Add(color[Germany] != color[Netherlands])
solver.Add(color[Germany] != color[Denmark])
#
# solution and search
#
solution = solver.Assignment()
solution.Add([color[i] for i in range(n)])
collector = solver.AllSolutionCollector(solution)
# collector = solver.FirstSolutionCollector(solution)
# search_log = solver.SearchLog(100, x[0])
solver.Solve(solver.Phase([color[i] for i in range(n)],
solver.INT_VAR_SIMPLE,
solver.ASSIGN_MIN_VALUE),
[collector])
num_solutions = collector.SolutionCount()
print("num_solutions: ", num_solutions)
if num_solutions > 0:
for s in range(num_solutions):
colorval = [collector.Value(s, color[i]) for i in range(n)]
print("color:", colorval)
print()
print("num_solutions:", num_solutions)
print("failures:", solver.Failures())
print("branches:", solver.Branches())
print("WallTime:", solver.WallTime())
else:
print("No solutions found")
if __name__ == "__main__":
main()
| apache-2.0 |
alex/sqlalchemy | test/ext/test_associationproxy.py | 2 | 50005 | from sqlalchemy.testing import eq_, assert_raises
import copy
import pickle
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.orm.collections import collection, attribute_mapped_collection
from sqlalchemy.ext.associationproxy import *
from sqlalchemy.ext.associationproxy import _AssociationList
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
class DictCollection(dict):
@collection.appender
def append(self, obj):
self[obj.foo] = obj
@collection.remover
def remove(self, obj):
del self[obj.foo]
class SetCollection(set):
pass
class ListCollection(list):
pass
class ObjectCollection(object):
def __init__(self):
self.values = list()
@collection.appender
def append(self, obj):
self.values.append(obj)
@collection.remover
def remove(self, obj):
self.values.remove(obj)
def __iter__(self):
return iter(self.values)
class _CollectionOperations(fixtures.TestBase):
def setup(self):
collection_class = self.collection_class
metadata = MetaData(testing.db)
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(128)))
children_table = Table('Children', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer,
ForeignKey('Parent.id')),
Column('foo', String(128)),
Column('name', String(128)))
class Parent(object):
children = association_proxy('_children', 'name')
def __init__(self, name):
self.name = name
class Child(object):
if collection_class and issubclass(collection_class, dict):
def __init__(self, foo, name):
self.foo = foo
self.name = name
else:
def __init__(self, name):
self.name = name
mapper(Parent, parents_table, properties={
'_children': relationship(Child, lazy='joined',
collection_class=collection_class)})
mapper(Child, children_table)
metadata.create_all()
self.metadata = metadata
self.session = create_session()
self.Parent, self.Child = Parent, Child
def teardown(self):
self.metadata.drop_all()
def roundtrip(self, obj):
if obj not in self.session:
self.session.add(obj)
self.session.flush()
id, type_ = obj.id, type(obj)
self.session.expunge_all()
return self.session.query(type_).get(id)
def _test_sequence_ops(self):
Parent, Child = self.Parent, self.Child
p1 = Parent('P1')
self.assert_(not p1._children)
self.assert_(not p1.children)
ch = Child('regular')
p1._children.append(ch)
self.assert_(ch in p1._children)
self.assert_(len(p1._children) == 1)
self.assert_(p1.children)
self.assert_(len(p1.children) == 1)
self.assert_(ch not in p1.children)
self.assert_('regular' in p1.children)
p1.children.append('proxied')
self.assert_('proxied' in p1.children)
self.assert_('proxied' not in p1._children)
self.assert_(len(p1.children) == 2)
self.assert_(len(p1._children) == 2)
self.assert_(p1._children[0].name == 'regular')
self.assert_(p1._children[1].name == 'proxied')
del p1._children[1]
self.assert_(len(p1._children) == 1)
self.assert_(len(p1.children) == 1)
self.assert_(p1._children[0] == ch)
del p1.children[0]
self.assert_(len(p1._children) == 0)
self.assert_(len(p1.children) == 0)
p1.children = ['a', 'b', 'c']
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
del ch
p1 = self.roundtrip(p1)
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
popped = p1.children.pop()
self.assert_(len(p1.children) == 2)
self.assert_(popped not in p1.children)
p1 = self.roundtrip(p1)
self.assert_(len(p1.children) == 2)
self.assert_(popped not in p1.children)
p1.children[1] = 'changed-in-place'
self.assert_(p1.children[1] == 'changed-in-place')
inplace_id = p1._children[1].id
p1 = self.roundtrip(p1)
self.assert_(p1.children[1] == 'changed-in-place')
assert p1._children[1].id == inplace_id
p1.children.append('changed-in-place')
self.assert_(p1.children.count('changed-in-place') == 2)
p1.children.remove('changed-in-place')
self.assert_(p1.children.count('changed-in-place') == 1)
p1 = self.roundtrip(p1)
self.assert_(p1.children.count('changed-in-place') == 1)
p1._children = []
self.assert_(len(p1.children) == 0)
after = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']
p1.children = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']
self.assert_(len(p1.children) == 10)
self.assert_([c.name for c in p1._children] == after)
p1.children[2:6] = ['x'] * 4
after = ['a', 'b', 'x', 'x', 'x', 'x', 'g', 'h', 'i', 'j']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children[2:6] = ['y']
after = ['a', 'b', 'y', 'g', 'h', 'i', 'j']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children[2:3] = ['z'] * 4
after = ['a', 'b', 'z', 'z', 'z', 'z', 'g', 'h', 'i', 'j']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children[2::2] = ['O'] * 4
after = ['a', 'b', 'O', 'z', 'O', 'z', 'O', 'h', 'O', 'j']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
assert_raises(TypeError, set, [p1.children])
p1.children *= 0
after = []
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children += ['a', 'b']
after = ['a', 'b']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children += ['c']
after = ['a', 'b', 'c']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children *= 1
after = ['a', 'b', 'c']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children *= 2
after = ['a', 'b', 'c', 'a', 'b', 'c']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
p1.children = ['a']
after = ['a']
self.assert_(p1.children == after)
self.assert_([c.name for c in p1._children] == after)
self.assert_((p1.children * 2) == ['a', 'a'])
self.assert_((2 * p1.children) == ['a', 'a'])
self.assert_((p1.children * 0) == [])
self.assert_((0 * p1.children) == [])
self.assert_((p1.children + ['b']) == ['a', 'b'])
self.assert_((['b'] + p1.children) == ['b', 'a'])
try:
p1.children + 123
assert False
except TypeError:
assert True
class DefaultTest(_CollectionOperations):
def __init__(self, *args, **kw):
super(DefaultTest, self).__init__(*args, **kw)
self.collection_class = None
def test_sequence_ops(self):
self._test_sequence_ops()
class ListTest(_CollectionOperations):
def __init__(self, *args, **kw):
super(ListTest, self).__init__(*args, **kw)
self.collection_class = list
def test_sequence_ops(self):
self._test_sequence_ops()
class CustomListTest(ListTest):
def __init__(self, *args, **kw):
super(CustomListTest, self).__init__(*args, **kw)
self.collection_class = list
# No-can-do until ticket #213
class DictTest(_CollectionOperations):
pass
class CustomDictTest(DictTest):
def __init__(self, *args, **kw):
super(DictTest, self).__init__(*args, **kw)
self.collection_class = DictCollection
def test_mapping_ops(self):
Parent, Child = self.Parent, self.Child
p1 = Parent('P1')
self.assert_(not p1._children)
self.assert_(not p1.children)
ch = Child('a', 'regular')
p1._children.append(ch)
self.assert_(ch in list(p1._children.values()))
self.assert_(len(p1._children) == 1)
self.assert_(p1.children)
self.assert_(len(p1.children) == 1)
self.assert_(ch not in p1.children)
self.assert_('a' in p1.children)
self.assert_(p1.children['a'] == 'regular')
self.assert_(p1._children['a'] == ch)
p1.children['b'] = 'proxied'
self.assert_('proxied' in list(p1.children.values()))
self.assert_('b' in p1.children)
self.assert_('proxied' not in p1._children)
self.assert_(len(p1.children) == 2)
self.assert_(len(p1._children) == 2)
self.assert_(p1._children['a'].name == 'regular')
self.assert_(p1._children['b'].name == 'proxied')
del p1._children['b']
self.assert_(len(p1._children) == 1)
self.assert_(len(p1.children) == 1)
self.assert_(p1._children['a'] == ch)
del p1.children['a']
self.assert_(len(p1._children) == 0)
self.assert_(len(p1.children) == 0)
p1.children = {'d': 'v d', 'e': 'v e', 'f': 'v f'}
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
self.assert_(set(p1.children) == set(['d', 'e', 'f']))
del ch
p1 = self.roundtrip(p1)
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
p1.children['e'] = 'changed-in-place'
self.assert_(p1.children['e'] == 'changed-in-place')
inplace_id = p1._children['e'].id
p1 = self.roundtrip(p1)
self.assert_(p1.children['e'] == 'changed-in-place')
self.assert_(p1._children['e'].id == inplace_id)
p1._children = {}
self.assert_(len(p1.children) == 0)
try:
p1._children = []
self.assert_(False)
except TypeError:
self.assert_(True)
try:
p1._children = None
self.assert_(False)
except TypeError:
self.assert_(True)
assert_raises(TypeError, set, [p1.children])
class SetTest(_CollectionOperations):
def __init__(self, *args, **kw):
super(SetTest, self).__init__(*args, **kw)
self.collection_class = set
def test_set_operations(self):
Parent, Child = self.Parent, self.Child
p1 = Parent('P1')
self.assert_(not p1._children)
self.assert_(not p1.children)
ch1 = Child('regular')
p1._children.add(ch1)
self.assert_(ch1 in p1._children)
self.assert_(len(p1._children) == 1)
self.assert_(p1.children)
self.assert_(len(p1.children) == 1)
self.assert_(ch1 not in p1.children)
self.assert_('regular' in p1.children)
p1.children.add('proxied')
self.assert_('proxied' in p1.children)
self.assert_('proxied' not in p1._children)
self.assert_(len(p1.children) == 2)
self.assert_(len(p1._children) == 2)
self.assert_(set([o.name for o in p1._children]) ==
set(['regular', 'proxied']))
ch2 = None
for o in p1._children:
if o.name == 'proxied':
ch2 = o
break
p1._children.remove(ch2)
self.assert_(len(p1._children) == 1)
self.assert_(len(p1.children) == 1)
self.assert_(p1._children == set([ch1]))
p1.children.remove('regular')
self.assert_(len(p1._children) == 0)
self.assert_(len(p1.children) == 0)
p1.children = ['a', 'b', 'c']
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
del ch1
p1 = self.roundtrip(p1)
self.assert_(len(p1._children) == 3)
self.assert_(len(p1.children) == 3)
self.assert_('a' in p1.children)
self.assert_('b' in p1.children)
self.assert_('d' not in p1.children)
self.assert_(p1.children == set(['a', 'b', 'c']))
assert_raises(
KeyError,
p1.children.remove, "d"
)
self.assert_(len(p1.children) == 3)
p1.children.discard('d')
self.assert_(len(p1.children) == 3)
p1 = self.roundtrip(p1)
self.assert_(len(p1.children) == 3)
popped = p1.children.pop()
self.assert_(len(p1.children) == 2)
self.assert_(popped not in p1.children)
p1 = self.roundtrip(p1)
self.assert_(len(p1.children) == 2)
self.assert_(popped not in p1.children)
p1.children = ['a', 'b', 'c']
p1 = self.roundtrip(p1)
self.assert_(p1.children == set(['a', 'b', 'c']))
p1.children.discard('b')
p1 = self.roundtrip(p1)
self.assert_(p1.children == set(['a', 'c']))
p1.children.remove('a')
p1 = self.roundtrip(p1)
self.assert_(p1.children == set(['c']))
p1._children = set()
self.assert_(len(p1.children) == 0)
try:
p1._children = []
self.assert_(False)
except TypeError:
self.assert_(True)
try:
p1._children = None
self.assert_(False)
except TypeError:
self.assert_(True)
assert_raises(TypeError, set, [p1.children])
def test_set_comparisons(self):
Parent, Child = self.Parent, self.Child
p1 = Parent('P1')
p1.children = ['a', 'b', 'c']
control = set(['a', 'b', 'c'])
for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']),
set(['a']), set(['a', 'b']),
set(['c', 'd']), set(['e', 'f', 'g']),
set()):
eq_(p1.children.union(other),
control.union(other))
eq_(p1.children.difference(other),
control.difference(other))
eq_((p1.children - other),
(control - other))
eq_(p1.children.intersection(other),
control.intersection(other))
eq_(p1.children.symmetric_difference(other),
control.symmetric_difference(other))
eq_(p1.children.issubset(other),
control.issubset(other))
eq_(p1.children.issuperset(other),
control.issuperset(other))
self.assert_((p1.children == other) == (control == other))
self.assert_((p1.children != other) == (control != other))
self.assert_((p1.children < other) == (control < other))
self.assert_((p1.children <= other) == (control <= other))
self.assert_((p1.children > other) == (control > other))
self.assert_((p1.children >= other) == (control >= other))
def test_set_mutation(self):
Parent, Child = self.Parent, self.Child
# mutations
for op in ('update', 'intersection_update',
'difference_update', 'symmetric_difference_update'):
for base in (['a', 'b', 'c'], []):
for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']),
set(['a']), set(['a', 'b']),
set(['c', 'd']), set(['e', 'f', 'g']),
set()):
p = Parent('p')
p.children = base[:]
control = set(base[:])
getattr(p.children, op)(other)
getattr(control, op)(other)
try:
self.assert_(p.children == control)
except:
print('Test %s.%s(%s):' % (set(base), op, other))
print('want', repr(control))
print('got', repr(p.children))
raise
p = self.roundtrip(p)
try:
self.assert_(p.children == control)
except:
print('Test %s.%s(%s):' % (base, op, other))
print('want', repr(control))
print('got', repr(p.children))
raise
# in-place mutations
for op in ('|=', '-=', '&=', '^='):
for base in (['a', 'b', 'c'], []):
for other in (set(['a', 'b', 'c']), set(['a', 'b', 'c', 'd']),
set(['a']), set(['a', 'b']),
set(['c', 'd']), set(['e', 'f', 'g']),
frozenset(['e', 'f', 'g']),
set()):
p = Parent('p')
p.children = base[:]
control = set(base[:])
exec("p.children %s other" % op)
exec("control %s other" % op)
try:
self.assert_(p.children == control)
except:
print('Test %s %s %s:' % (set(base), op, other))
print('want', repr(control))
print('got', repr(p.children))
raise
p = self.roundtrip(p)
try:
self.assert_(p.children == control)
except:
print('Test %s %s %s:' % (base, op, other))
print('want', repr(control))
print('got', repr(p.children))
raise
class CustomSetTest(SetTest):
def __init__(self, *args, **kw):
super(CustomSetTest, self).__init__(*args, **kw)
self.collection_class = SetCollection
class CustomObjectTest(_CollectionOperations):
def __init__(self, *args, **kw):
super(CustomObjectTest, self).__init__(*args, **kw)
self.collection_class = ObjectCollection
def test_basic(self):
Parent, Child = self.Parent, self.Child
p = Parent('p1')
self.assert_(len(list(p.children)) == 0)
p.children.append('child')
self.assert_(len(list(p.children)) == 1)
p = self.roundtrip(p)
self.assert_(len(list(p.children)) == 1)
# We didn't provide an alternate _AssociationList implementation
# for our ObjectCollection, so indexing will fail.
assert_raises(
TypeError,
p.children.__getitem__, 1
)
class ProxyFactoryTest(ListTest):
def setup(self):
metadata = MetaData(testing.db)
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(128)))
children_table = Table('Children', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer,
ForeignKey('Parent.id')),
Column('foo', String(128)),
Column('name', String(128)))
class CustomProxy(_AssociationList):
def __init__(
self,
lazy_collection,
creator,
value_attr,
parent,
):
getter, setter = parent._default_getset(lazy_collection)
_AssociationList.__init__(
self,
lazy_collection,
creator,
getter,
setter,
parent,
)
class Parent(object):
children = association_proxy('_children', 'name',
proxy_factory=CustomProxy,
proxy_bulk_set=CustomProxy.extend
)
def __init__(self, name):
self.name = name
class Child(object):
def __init__(self, name):
self.name = name
mapper(Parent, parents_table, properties={
'_children': relationship(Child, lazy='joined',
collection_class=list)})
mapper(Child, children_table)
metadata.create_all()
self.metadata = metadata
self.session = create_session()
self.Parent, self.Child = Parent, Child
def test_sequence_ops(self):
self._test_sequence_ops()
class ScalarTest(fixtures.TestBase):
def test_scalar_proxy(self):
metadata = MetaData(testing.db)
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(128)))
children_table = Table('Children', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer,
ForeignKey('Parent.id')),
Column('foo', String(128)),
Column('bar', String(128)),
Column('baz', String(128)))
class Parent(object):
foo = association_proxy('child', 'foo')
bar = association_proxy('child', 'bar',
creator=lambda v: Child(bar=v))
baz = association_proxy('child', 'baz',
creator=lambda v: Child(baz=v))
def __init__(self, name):
self.name = name
class Child(object):
def __init__(self, **kw):
for attr in kw:
setattr(self, attr, kw[attr])
mapper(Parent, parents_table, properties={
'child': relationship(Child, lazy='joined',
backref='parent', uselist=False)})
mapper(Child, children_table)
metadata.create_all()
session = create_session()
def roundtrip(obj):
if obj not in session:
session.add(obj)
session.flush()
id, type_ = obj.id, type(obj)
session.expunge_all()
return session.query(type_).get(id)
p = Parent('p')
# No child
assert_raises(
AttributeError,
getattr, p, "foo"
)
p.child = Child(foo='a', bar='b', baz='c')
self.assert_(p.foo == 'a')
self.assert_(p.bar == 'b')
self.assert_(p.baz == 'c')
p.bar = 'x'
self.assert_(p.foo == 'a')
self.assert_(p.bar == 'x')
self.assert_(p.baz == 'c')
p = roundtrip(p)
self.assert_(p.foo == 'a')
self.assert_(p.bar == 'x')
self.assert_(p.baz == 'c')
p.child = None
# No child again
assert_raises(
AttributeError,
getattr, p, "foo"
)
# Bogus creator for this scalar type
assert_raises(
TypeError,
setattr, p, "foo", "zzz"
)
p.bar = 'yyy'
self.assert_(p.foo is None)
self.assert_(p.bar == 'yyy')
self.assert_(p.baz is None)
del p.child
p = roundtrip(p)
self.assert_(p.child is None)
p.baz = 'xxx'
self.assert_(p.foo is None)
self.assert_(p.bar is None)
self.assert_(p.baz == 'xxx')
p = roundtrip(p)
self.assert_(p.foo is None)
self.assert_(p.bar is None)
self.assert_(p.baz == 'xxx')
# Ensure an immediate __set__ works.
p2 = Parent('p2')
p2.bar = 'quux'
class LazyLoadTest(fixtures.TestBase):
def setup(self):
metadata = MetaData(testing.db)
parents_table = Table('Parent', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(128)))
children_table = Table('Children', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer,
ForeignKey('Parent.id')),
Column('foo', String(128)),
Column('name', String(128)))
class Parent(object):
children = association_proxy('_children', 'name')
def __init__(self, name):
self.name = name
class Child(object):
def __init__(self, name):
self.name = name
mapper(Child, children_table)
metadata.create_all()
self.metadata = metadata
self.session = create_session()
self.Parent, self.Child = Parent, Child
self.table = parents_table
def teardown(self):
self.metadata.drop_all()
def roundtrip(self, obj):
self.session.add(obj)
self.session.flush()
id, type_ = obj.id, type(obj)
self.session.expunge_all()
return self.session.query(type_).get(id)
def test_lazy_list(self):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='select',
collection_class=list)})
p = Parent('p')
p.children = ['a', 'b', 'c']
p = self.roundtrip(p)
# Is there a better way to ensure that the association_proxy
# didn't convert a lazy load to an eager load? This does work though.
self.assert_('_children' not in p.__dict__)
self.assert_(len(p._children) == 3)
self.assert_('_children' in p.__dict__)
def test_eager_list(self):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='joined',
collection_class=list)})
p = Parent('p')
p.children = ['a', 'b', 'c']
p = self.roundtrip(p)
self.assert_('_children' in p.__dict__)
self.assert_(len(p._children) == 3)
def test_lazy_scalar(self):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='select', uselist=False)})
p = Parent('p')
p.children = 'value'
p = self.roundtrip(p)
self.assert_('_children' not in p.__dict__)
self.assert_(p._children is not None)
def test_eager_scalar(self):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
'_children': relationship(Child, lazy='joined', uselist=False)})
p = Parent('p')
p.children = 'value'
p = self.roundtrip(p)
self.assert_('_children' in p.__dict__)
self.assert_(p._children is not None)
class Parent(object):
def __init__(self, name):
self.name = name
class Child(object):
def __init__(self, name):
self.name = name
class KVChild(object):
def __init__(self, name, value):
self.name = name
self.value = value
class ReconstitutionTest(fixtures.TestBase):
def setup(self):
metadata = MetaData(testing.db)
parents = Table('parents', metadata, Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True), Column('name',
String(30)))
children = Table('children', metadata, Column('id', Integer,
primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer,
ForeignKey('parents.id')), Column('name',
String(30)))
metadata.create_all()
parents.insert().execute(name='p1')
self.metadata = metadata
self.parents = parents
self.children = children
Parent.kids = association_proxy('children', 'name')
def teardown(self):
self.metadata.drop_all()
clear_mappers()
def test_weak_identity_map(self):
mapper(Parent, self.parents,
properties=dict(children=relationship(Child)))
mapper(Child, self.children)
session = create_session(weak_identity_map=True)
def add_child(parent_name, child_name):
parent = \
session.query(Parent).filter_by(name=parent_name).one()
parent.kids.append(child_name)
add_child('p1', 'c1')
gc_collect()
add_child('p1', 'c2')
session.flush()
p = session.query(Parent).filter_by(name='p1').one()
assert set(p.kids) == set(['c1', 'c2']), p.kids
def test_copy(self):
mapper(Parent, self.parents,
properties=dict(children=relationship(Child)))
mapper(Child, self.children)
p = Parent('p1')
p.kids.extend(['c1', 'c2'])
p_copy = copy.copy(p)
del p
gc_collect()
assert set(p_copy.kids) == set(['c1', 'c2']), p.kids
def test_pickle_list(self):
mapper(Parent, self.parents,
properties=dict(children=relationship(Child)))
mapper(Child, self.children)
p = Parent('p1')
p.kids.extend(['c1', 'c2'])
r1 = pickle.loads(pickle.dumps(p))
assert r1.kids == ['c1', 'c2']
# can't do this without parent having a cycle
#r2 = pickle.loads(pickle.dumps(p.kids))
#assert r2 == ['c1', 'c2']
def test_pickle_set(self):
mapper(Parent, self.parents,
properties=dict(children=relationship(Child,
collection_class=set)))
mapper(Child, self.children)
p = Parent('p1')
p.kids.update(['c1', 'c2'])
r1 = pickle.loads(pickle.dumps(p))
assert r1.kids == set(['c1', 'c2'])
# can't do this without parent having a cycle
#r2 = pickle.loads(pickle.dumps(p.kids))
#assert r2 == set(['c1', 'c2'])
def test_pickle_dict(self):
mapper(Parent, self.parents,
properties=dict(children=relationship(KVChild,
collection_class=
collections.mapped_collection(PickleKeyFunc('name')))))
mapper(KVChild, self.children)
p = Parent('p1')
p.kids.update({'c1': 'v1', 'c2': 'v2'})
assert p.kids == {'c1': 'c1', 'c2': 'c2'}
r1 = pickle.loads(pickle.dumps(p))
assert r1.kids == {'c1': 'c1', 'c2': 'c2'}
# can't do this without parent having a cycle
#r2 = pickle.loads(pickle.dumps(p.kids))
#assert r2 == {'c1': 'c1', 'c2': 'c2'}
class PickleKeyFunc(object):
def __init__(self, name):
self.name = name
def __call__(self, obj):
return getattr(obj, self.name)
class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = 'default'
run_inserts = 'once'
run_deletes = None
run_setup_mappers = 'once'
run_setup_classes = 'once'
@classmethod
def define_tables(cls, metadata):
Table('userkeywords', metadata,
Column('keyword_id', Integer, ForeignKey('keywords.id'), primary_key=True),
Column('user_id', Integer, ForeignKey('users.id'))
)
Table('users', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('name', String(64)),
Column('singular_id', Integer, ForeignKey('singular.id'))
)
Table('keywords', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('keyword', String(64)),
Column('singular_id', Integer, ForeignKey('singular.id'))
)
Table('singular', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('value', String(50))
)
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
def __init__(self, name):
self.name = name
# o2m -> m2o
# uselist -> nonuselist
keywords = association_proxy('user_keywords', 'keyword',
creator=lambda k: UserKeyword(keyword=k))
# m2o -> o2m
# nonuselist -> uselist
singular_keywords = association_proxy('singular', 'keywords')
# m2o -> scalar
# nonuselist
singular_value = association_proxy('singular', 'value')
class Keyword(cls.Comparable):
def __init__(self, keyword):
self.keyword = keyword
# o2o -> m2o
# nonuselist -> nonuselist
user = association_proxy('user_keyword', 'user')
class UserKeyword(cls.Comparable):
def __init__(self, user=None, keyword=None):
self.user = user
self.keyword = keyword
class Singular(cls.Comparable):
def __init__(self, value=None):
self.value = value
@classmethod
def setup_mappers(cls):
users, Keyword, UserKeyword, singular, \
userkeywords, User, keywords, Singular = (cls.tables.users,
cls.classes.Keyword,
cls.classes.UserKeyword,
cls.tables.singular,
cls.tables.userkeywords,
cls.classes.User,
cls.tables.keywords,
cls.classes.Singular)
mapper(User, users, properties={
'singular': relationship(Singular)
})
mapper(Keyword, keywords, properties={
'user_keyword': relationship(UserKeyword, uselist=False)
})
mapper(UserKeyword, userkeywords, properties={
'user': relationship(User, backref='user_keywords'),
'keyword': relationship(Keyword)
})
mapper(Singular, singular, properties={
'keywords': relationship(Keyword)
})
@classmethod
def insert_data(cls):
UserKeyword, User, Keyword, Singular = (cls.classes.UserKeyword,
cls.classes.User,
cls.classes.Keyword,
cls.classes.Singular)
session = sessionmaker()()
words = (
'quick', 'brown',
'fox', 'jumped', 'over',
'the', 'lazy',
)
for ii in range(16):
user = User('user%d' % ii)
if ii % 2 == 0:
user.singular = Singular(value=("singular%d" % ii)
if ii % 4 == 0 else None)
session.add(user)
for jj in words[(ii % len(words)):((ii + 3) % len(words))]:
k = Keyword(jj)
user.keywords.append(k)
if ii % 3 == None:
user.singular.keywords.append(k)
orphan = Keyword('orphan')
orphan.user_keyword = UserKeyword(keyword=orphan, user=None)
session.add(orphan)
keyword_with_nothing = Keyword('kwnothing')
session.add(keyword_with_nothing)
session.commit()
cls.u = user
cls.kw = user.keywords[0]
cls.session = session
def _equivalent(self, q_proxy, q_direct):
eq_(q_proxy.all(), q_direct.all())
def test_filter_any_kwarg_ul_nul(self):
UserKeyword, User = self.classes.UserKeyword, self.classes.User
self._equivalent(self.session.query(User).
filter(User.keywords.any(keyword='jumped'
)),
self.session.query(User).filter(
User.user_keywords.any(
UserKeyword.keyword.has(keyword='jumped'
))))
def test_filter_has_kwarg_nul_nul(self):
UserKeyword, Keyword = self.classes.UserKeyword, self.classes.Keyword
self._equivalent(self.session.query(Keyword).
filter(Keyword.user.has(name='user2'
)),
self.session.query(Keyword).
filter(Keyword.user_keyword.has(
UserKeyword.user.has(name='user2'
))))
def test_filter_has_kwarg_nul_ul(self):
User, Singular = self.classes.User, self.classes.Singular
self._equivalent(
self.session.query(User).\
filter(User.singular_keywords.any(keyword='jumped')),
self.session.query(User).\
filter(
User.singular.has(
Singular.keywords.any(keyword='jumped')
)
)
)
def test_filter_any_criterion_ul_nul(self):
UserKeyword, User, Keyword = (self.classes.UserKeyword,
self.classes.User,
self.classes.Keyword)
self._equivalent(self.session.query(User).
filter(User.keywords.any(Keyword.keyword
== 'jumped')),
self.session.query(User).
filter(User.user_keywords.any(
UserKeyword.keyword.has(Keyword.keyword
== 'jumped'))))
def test_filter_has_criterion_nul_nul(self):
UserKeyword, User, Keyword = (self.classes.UserKeyword,
self.classes.User,
self.classes.Keyword)
self._equivalent(self.session.query(Keyword).
filter(Keyword.user.has(User.name == 'user2')),
self.session.query(Keyword).
filter(Keyword.user_keyword.has(
UserKeyword.user.has(User.name == 'user2'))))
def test_filter_any_criterion_nul_ul(self):
User, Keyword, Singular = (self.classes.User,
self.classes.Keyword,
self.classes.Singular)
self._equivalent(
self.session.query(User).
filter(User.singular_keywords.any(
Keyword.keyword == 'jumped')),
self.session.query(User).
filter(
User.singular.has(
Singular.keywords.any(Keyword.keyword == 'jumped')
)
)
)
def test_filter_contains_ul_nul(self):
User = self.classes.User
self._equivalent(self.session.query(User).
filter(User.keywords.contains(self.kw)),
self.session.query(User).
filter(User.user_keywords.any(keyword=self.kw)))
def test_filter_contains_nul_ul(self):
User, Singular = self.classes.User, self.classes.Singular
self._equivalent(
self.session.query(User).filter(
User.singular_keywords.contains(self.kw)
),
self.session.query(User).filter(
User.singular.has(
Singular.keywords.contains(self.kw)
)
),
)
def test_filter_eq_nul_nul(self):
Keyword = self.classes.Keyword
self._equivalent(self.session.query(Keyword).filter(Keyword.user
== self.u),
self.session.query(Keyword).
filter(Keyword.user_keyword.has(user=self.u)))
def test_filter_ne_nul_nul(self):
Keyword = self.classes.Keyword
self._equivalent(self.session.query(Keyword).filter(Keyword.user != self.u),
self.session.query(Keyword).
filter(
Keyword.user_keyword.has(Keyword.user != self.u)
)
)
def test_filter_eq_null_nul_nul(self):
UserKeyword, Keyword = self.classes.UserKeyword, self.classes.Keyword
self._equivalent(
self.session.query(Keyword).filter(Keyword.user == None),
self.session.query(Keyword).
filter(
or_(
Keyword.user_keyword.has(UserKeyword.user == None),
Keyword.user_keyword == None
)
)
)
def test_filter_ne_null_nul_nul(self):
UserKeyword, Keyword = self.classes.UserKeyword, self.classes.Keyword
self._equivalent(
self.session.query(Keyword).filter(Keyword.user != None),
self.session.query(Keyword).
filter(
Keyword.user_keyword.has(UserKeyword.user != None),
)
)
def test_filter_eq_None_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
self.session.query(User).filter(User.singular_value == None),
self.session.query(User).filter(
or_(
User.singular.has(Singular.value == None),
User.singular == None
)
)
)
def test_filter_ne_value_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
self.session.query(User).filter(User.singular_value != "singular4"),
self.session.query(User).filter(
User.singular.has(Singular.value != "singular4"),
)
)
def test_filter_eq_value_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
self.session.query(User).filter(User.singular_value == "singular4"),
self.session.query(User).filter(
User.singular.has(Singular.value == "singular4"),
)
)
def test_filter_ne_None_nul(self):
User = self.classes.User
Singular = self.classes.Singular
self._equivalent(
self.session.query(User).filter(User.singular_value != None),
self.session.query(User).filter(
User.singular.has(Singular.value != None),
)
)
def test_has_nul(self):
# a special case where we provide an empty has() on a
# non-object-targeted association proxy.
User = self.classes.User
self.classes.Singular
self._equivalent(
self.session.query(User).filter(User.singular_value.has()),
self.session.query(User).filter(
User.singular.has(),
)
)
def test_nothas_nul(self):
# a special case where we provide an empty has() on a
# non-object-targeted association proxy.
User = self.classes.User
self.classes.Singular
self._equivalent(
self.session.query(User).filter(~User.singular_value.has()),
self.session.query(User).filter(
~User.singular.has(),
)
)
def test_has_criterion_nul(self):
# but we don't allow that with any criterion...
User = self.classes.User
self.classes.Singular
assert_raises_message(
exc.ArgumentError,
"Non-empty has\(\) not allowed",
User.singular_value.has,
User.singular_value == "singular4"
)
def test_has_kwargs_nul(self):
# ... or kwargs
User = self.classes.User
self.classes.Singular
assert_raises_message(
exc.ArgumentError,
"Non-empty has\(\) not allowed",
User.singular_value.has, singular_value="singular4"
)
def test_filter_scalar_contains_fails_nul_nul(self):
Keyword = self.classes.Keyword
assert_raises(exc.InvalidRequestError,
lambda: Keyword.user.contains(self.u))
def test_filter_scalar_any_fails_nul_nul(self):
Keyword = self.classes.Keyword
assert_raises(exc.InvalidRequestError,
lambda: Keyword.user.any(name='user2'))
def test_filter_collection_has_fails_ul_nul(self):
User = self.classes.User
assert_raises(exc.InvalidRequestError,
lambda: User.keywords.has(keyword='quick'))
def test_filter_collection_eq_fails_ul_nul(self):
User = self.classes.User
assert_raises(exc.InvalidRequestError,
lambda: User.keywords == self.kw)
def test_filter_collection_ne_fails_ul_nul(self):
User = self.classes.User
assert_raises(exc.InvalidRequestError,
lambda: User.keywords != self.kw)
def test_join_separate_attr(self):
User = self.classes.User
self.assert_compile(
self.session.query(User).join(
User.keywords.local_attr,
User.keywords.remote_attr),
"SELECT users.id AS users_id, users.name AS users_name, "
"users.singular_id AS users_singular_id "
"FROM users JOIN userkeywords ON users.id = "
"userkeywords.user_id JOIN keywords ON keywords.id = "
"userkeywords.keyword_id"
)
def test_join_single_attr(self):
User = self.classes.User
self.assert_compile(
self.session.query(User).join(
*User.keywords.attr),
"SELECT users.id AS users_id, users.name AS users_name, "
"users.singular_id AS users_singular_id "
"FROM users JOIN userkeywords ON users.id = "
"userkeywords.user_id JOIN keywords ON keywords.id = "
"userkeywords.keyword_id"
)
class DictOfTupleUpdateTest(fixtures.TestBase):
def setup(self):
class B(object):
def __init__(self, key, elem):
self.key = key
self.elem = elem
class A(object):
elements = association_proxy("orig", "elem", creator=B)
m = MetaData()
a = Table('a', m, Column('id', Integer, primary_key=True))
b = Table('b', m, Column('id', Integer, primary_key=True),
Column('aid', Integer, ForeignKey('a.id')))
mapper(A, a, properties={
'orig': relationship(B, collection_class=attribute_mapped_collection('key'))
})
mapper(B, b)
self.A = A
self.B = B
def test_update_one_elem_dict(self):
a1 = self.A()
a1.elements.update({("B", 3): 'elem2'})
eq_(a1.elements, {("B", 3): 'elem2'})
def test_update_multi_elem_dict(self):
a1 = self.A()
a1.elements.update({("B", 3): 'elem2', ("C", 4): "elem3"})
eq_(a1.elements, {("B", 3): 'elem2', ("C", 4): "elem3"})
def test_update_one_elem_list(self):
a1 = self.A()
a1.elements.update([(("B", 3), 'elem2')])
eq_(a1.elements, {("B", 3): 'elem2'})
def test_update_multi_elem_list(self):
a1 = self.A()
a1.elements.update([(("B", 3), 'elem2'), (("C", 4), "elem3")])
eq_(a1.elements, {("B", 3): 'elem2', ("C", 4): "elem3"})
def test_update_one_elem_varg(self):
a1 = self.A()
assert_raises_message(
ValueError,
"dictionary update sequence requires "
"2-element tuples",
a1.elements.update, (("B", 3), 'elem2')
)
def test_update_multi_elem_varg(self):
a1 = self.A()
assert_raises_message(
TypeError,
"update expected at most 1 arguments, got 2",
a1.elements.update,
(("B", 3), 'elem2'), (("C", 4), "elem3")
)
| mit |
lidiamcfreitas/FenixScheduleMaker | ScheduleMaker/brython/www/tests/test_suite.py | 4 | 6547 | # numbers
assert 2+2==4
assert (50-5*6)/4 == 5.0
assert 8/4*2 == 4.0
assert 8/5 == 1.6
assert 7//3 == 2
assert 7//-3 == -3
assert 4-2-2 == 0
width=20
height=5*9
assert width*height == 900
x = 6
x += 7+8
assert x == 21
x=y=z=0
assert x==0
assert y==0
assert z==0
# hex, octal, binary literals
a=0xaf
assert a==175
a=0Xaf
assert a==175
a=0o754
assert a==492
a=0O754
assert a==492
a=0b10100110
assert a==166
a=0B10100110
assert a==166
# bitwise operators
assert ~3 == -4
x=3
assert ~x == -4
assert ~1 & ~10 | 8 == -4
assert 2<<16 == 131072
assert 131072 >> 16 == 2
# __neg__
assert -x == -3
y = 2.1
assert -y == -2.1
#not sure how to convert this to assert (raise)?
try:
print(n)
print("Failed.. n should be undefined, but n:", n)
except:
pass
assert 3 * 3.75 / 1.5 == 7.5
assert 7.0 / 2 == 3.5
# strings
assert 'spam eggs' == "spam eggs"
assert 'doesn\'t' == "doesn't"
assert '"Yes," he said.' == "\"Yes,\" he said."
assert '"Isn\'t," she said.' == "\"Isn't,\" she said."
hello = "This is a rather long string containing\n\
several lines of text just as you would do in C.\n\
Note that whitespace at the beginning of the line is\
significant."
#print(hello)
assert len(hello) == 158
hello="""\
Usage: thingy [OPTIONS]
-h Display this usage message
-H hostname Hostname to connect to
"""
assert len(hello) == 136
hello1="""This is a rather long string containing
several lines of text just as you would do in C.
Note that whitespace at the beginning of the line is
significant."""
assert len(hello1) == 159
hello = r"This is a rather long string containing\n\
several lines of text much as you would do in C."
#print(hello)
assert len(hello) == 91
word = 'Help' + 'A'
assert word == 'HelpA'
assert word*5 == "HelpAHelpAHelpAHelpAHelpA"
assert 'str' 'ing' == 'string'
assert 'str'.strip() + 'ing' == 'string'
assert ' str '.strip() + 'ing' == 'string'
# string methods
x='fooss'
assert x.replace('o', 'X', 20) == 'fXXss'
assert 'GhFF'.lower() == 'ghff'
assert x.lstrip('of') == 'ss'
x='aZjhkhZyuy'
assert x.find('Z') == 1
assert x.rfind('Z') == 6
assert x.rindex('Z') == 6
try:
x.rindex('K')
print("Failed.. Should have raised ValueError, instead returned %s" % x.rindex('K'))
except ValueError:
pass
assert x.split('h') == ['aZj', 'k', 'Zyuy']
assert x.split('h',1) == ['aZj', 'khZyuy']
assert x.split('h', 2) == ['aZj', 'k', 'Zyuy']
assert x.rsplit('h') == ['aZj', 'k', 'Zyuy']
assert x.rsplit('h', 1) == ['aZjhk', 'Zyuy']
assert x.rsplit('y', 2) == ['aZjhkhZ', 'u', '']
assert x.startswith('aZ')
assert x.strip('auy') == 'ZjhkhZ'
assert x.upper() == 'AZJHKHZYUY'
# list examples
a=['spam','eggs',100,1234]
assert a[:2] + ['bacon', 2*2] == ['spam', 'eggs', 'bacon', 4]
assert 3*a[:3]+['Boo!'] == ['spam', 'eggs', 100, 'spam', 'eggs', 100, 'spam', 'eggs', 100, 'Boo!']
assert a[:] == ['spam', 'eggs', 100, 1234]
a[2]=a[2]+23
assert a == ['spam', 'eggs', 123, 1234]
a[0:2]=[1,12]
assert a == [1, 12, 123, 1234]
a[0:2]=[]
assert a == [123, 1234]
a[1:1]=['bletch','xyzzy']
assert a == [123, 'bletch', 'xyzzy', 1234]
a[:0]=a
assert a == [123, 'bletch', 'xyzzy', 1234, 123, 'bletch', 'xyzzy', 1234]
a[:]=[]
assert a == []
a.extend('ab')
assert a == ['a', 'b']
a.extend([1,2,33])
assert a == ['a', 'b', 1, 2, 33]
# lambda
g = lambda x,y=99: 2*x+y
assert g(10,6)==26
assert g(10)==119
x = [lambda x:x*2,lambda y:y*3]
assert x[0](5)==10
assert x[1](10)==30
# inline functions and classes
def foo(x):return 2*x
assert foo(3)==6
class foo(list):pass
class bar(foo):pass
assert str(bar())=="[]"
i=10
while i>0:i-=1
if not True:print('true!')
else:pass
assert bin(12) == '0b1100'
assert oct(12) == '0o14'
assert hex(12) == '0xc'
assert bin(-12) == '-0b1100'
assert oct(-12) == '-0o14'
assert hex(-12) == '-0xc'
# bytes
b = b'12345'
assert len(b) == 5
# enumerate
enum_obj = enumerate('abcdefghij')
enum_first = next(enum_obj)
assert isinstance(enum_first, tuple)
assert enum_first[0] == 0
enum_obj = enumerate(['first', 'second'], start=1)
enum_first = next(enum_obj)
assert enum_first[0] == 1
# filter
test_list = [0, -1, 1, 2, -2]
true_values = list(filter(None, test_list))
assert true_values == [-1, 1, 2, -2]
negative_values = list(filter(lambda x: x<0, test_list))
assert negative_values == [-1, -2]
# dir
class FooParent():
const = 0
class Foo(FooParent):
def do_something(self):
pass
foo = Foo()
foo_contents = dir(foo)
assert 'do_something' in foo_contents
assert 'const' in foo_contents
# non-ASCII variable names
donnée = 10
машина = 9
ήλιος = 4
assert donnée + машина + ήλιος == 23
# Korean
def 안녕하세요():
return "hello"
assert 안녕하세요()=="hello"
# functions and methods
class foo:
def method(self, x):
return(x)
assert foo().method(5)==5
a = foo.method
assert foo.method == foo.method
x = foo()
assert x.method==x.method
def m1(self,x):
return 2*x
foo.method = m1
b = foo.method
assert a != b
assert foo().method(5)==10
y = foo()
assert x.method != y.method
def f():
pass
def g():
pass
assert f != g
# use of "global" in functions
a = 9
def f():
global a
res = [x for x in range(a)]
a = 8
return res
assert f()==[0,1,2,3,4,5,6,7,8]
assert a==8
# nested function scopes
def f(method, arg):
def cb(ev):
return method(ev, arg)
return cb
def g(*z):
return z
a = f(g,5)
b = f(g,11)
assert a(8) == (8, 5)
assert b(13) == (13, 11)
# nonlocal and global
x = 0
def f():
x = 1
res = []
def g():
global x
return x
res.append(g())
def h():
nonlocal x
return x
res.append(h())
return res
assert f()==[0, 1]
def P():
b=1
def Q():
nonlocal b
b+=1
return b
return Q()
assert P()==2
# use imported names
from a import *
res = []
for i in range(10):
res.append(i)
assert res == ['a', 'b', 'c']
# __setattr__ defined in a class
class A:
def __init__(self, x):
self.x = x
def __setattr__(self, k, v):
object.__setattr__(self, k, 2*v)
a = A(4)
assert a.x == 8
# nested scopes
def f():
x = 1
def g():
assert x == 1
def h():
assert x == 1
return x+1
return h()
return g()
assert f()==2
# check that name "constructor" is valid
constructor = 0
# exception attributes
try:
'a'+2
except TypeError as exc:
assert exc.args[0] == "Can't convert int to str implicitely"
print('passed all tests...') | bsd-2-clause |
ncoghlan/dnf | tests/test_comps.py | 2 | 10840 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from tests import support
from tests.support import mock
import dnf.comps
import dnf.exceptions
import dnf.persistor
import dnf.util
import libcomps
import operator
TRANSLATION=u"""Tato skupina zahrnuje nejmenší možnou množinu balíčků. Je vhodná například na instalace malých routerů nebo firewallů."""
class LangsTest(support.TestCase):
@mock.patch('locale.getlocale', return_value=('cs_CZ', 'UTF-8'))
def test_get(self, _unused):
langs = dnf.comps._Langs().get()
self.assertEqual(langs, ['cs_CZ.UTF-8', 'cs_CZ', 'cs.UTF-8', 'cs', 'C'])
class CompsTest(support.TestCase):
def setUp(self):
comps = dnf.comps.Comps()
comps.add_from_xml_filename(support.COMPS_PATH)
self.comps = comps
def test_by_pattern(self):
comps = self.comps
self.assertLength(comps.groups_by_pattern('Base'), 1)
self.assertLength(comps.groups_by_pattern('*'), support.TOTAL_GROUPS)
self.assertLength(comps.groups_by_pattern('Solid*'), 1)
group = dnf.util.first(comps.groups_by_pattern('Base'))
self.assertIsInstance(group, dnf.comps.Group)
def test_environments(self):
env = self.comps.environments[0]
self.assertEqual(env.name_by_lang['cs'], u'Prostředí Sugar')
self.assertEqual(env.desc_by_lang['de'],
u'Eine Software-Spielwiese zum Lernen des Lernens.')
self.assertCountEqual((id_.name for id_ in env.group_ids),
('somerset', 'Peppers'))
self.assertEqual(2, len(env.mandatory_groups))
self.assertTrue(all(isinstance(grp, dnf.comps.Group)
for grp in env.mandatory_groups))
self.assertCountEqual((id_.default for id_ in env.group_ids),
(True, False))
self.assertCountEqual((id_.name for id_ in env.option_ids),
('base',))
self.assertEqual(1, len(env.optional_groups))
self.assertTrue(all(isinstance(grp, dnf.comps.Group)
for grp in env.optional_groups))
self.assertTrue(all(isinstance(grp, dnf.comps.Group)
for grp in env.groups_iter()))
def test_groups(self):
g = self.comps.group_by_pattern('base')
self.assertTrue(g.visible)
g = self.comps.group_by_pattern('somerset')
self.assertFalse(g.visible)
def test_group_packages(self):
g = self.comps.group_by_pattern('base')
self.assertCountEqual(map(operator.attrgetter('name'), g.packages_iter()),
('tour', 'pepper'))
def test_iteration(self):
comps = self.comps
self.assertEqual([g.name for g in comps.groups_iter()],
['Base', 'Solid Ground', "Pepper's"])
self.assertEqual([c.name for c in comps.categories_iter()],
['Base System'])
g = dnf.util.first(comps.groups_iter())
self.assertEqual(g.desc_by_lang['cs'], TRANSLATION)
def test_group_display_order(self):
self.assertEqual([g.name for g in self.comps.groups],
["Pepper's", 'Base', 'Solid Ground'])
def test_packages(self):
comps = self.comps
group = dnf.util.first(comps.groups_iter())
self.assertSequenceEqual([pkg.name for pkg in group.packages],
(u'pepper', u'tour'))
self.assertSequenceEqual([pkg.name for pkg in group.mandatory_packages],
(u'pepper', u'tour'))
def test_size(self):
comps = self.comps
self.assertLength(comps, 5)
self.assertLength(comps.groups, support.TOTAL_GROUPS)
self.assertLength(comps.categories, 1)
self.assertLength(comps.environments, 1)
@mock.patch('locale.getlocale', return_value=('cs_CZ', 'UTF-8'))
def test_ui_name(self, _unused):
comps = self.comps
group = dnf.util.first(comps.groups_by_pattern('base'))
self.assertEqual(group.ui_name, u'Kritická cesta (Základ)')
@mock.patch('locale.getlocale', return_value=('cs_CZ', 'UTF-8'))
def test_ui_desc(self, _unused):
comps = self.comps
env = dnf.util.first(comps.environments_by_pattern('sugar-*'))
self.assertEqual(env.ui_description, u'Software pro výuku o vyučování.')
class PackageTest(support.TestCase):
def test_instance(self):
lc_pkg = libcomps.Package('weather', libcomps.PACKAGE_TYPE_OPTIONAL)
pkg = dnf.comps.Package(lc_pkg)
self.assertEqual(pkg.name, 'weather')
self.assertEqual(pkg.option_type, dnf.comps.OPTIONAL)
class TestTransactionBunch(support.TestCase):
def test_adding(self):
t1 = dnf.comps.TransactionBunch()
t1.install = {'right'}
t1.upgrade = {'tour'}
t1.remove = {'pepper'}
t2 = dnf.comps.TransactionBunch()
t2.install = {'pepper'}
t2.upgrade = {'right'}
t1 += t2
self.assertCountEqual(t1.install, ('right', 'pepper'))
self.assertCountEqual(t1.upgrade, ('tour', 'right'))
self.assertEmpty(t1.remove)
class SolverTestMixin(object):
def setUp(self):
comps = dnf.comps.Comps()
comps.add_from_xml_filename(support.COMPS_PATH)
self.comps = comps
self.persistor = support.MockGroupPersistor()
self.solver = dnf.comps.Solver(self.persistor, support.REASONS.get)
class SolverGroupTest(SolverTestMixin, support.TestCase):
def test_install(self):
grp = self.comps.group_by_pattern('base')
trans = self.solver.group_install(grp, dnf.comps.MANDATORY, ['right'])
self.assertLength(trans.install, 2)
p_grp = self.persistor.group('base')
self.assertCountEqual(p_grp.full_list, ['pepper', 'tour'])
self.assertCountEqual(p_grp.pkg_exclude, ['right'])
self.assertEqual(p_grp.pkg_types, dnf.comps.MANDATORY)
def test_install_opt(self):
grp = self.comps.group_by_pattern('somerset')
types = dnf.comps.DEFAULT | dnf.comps.OPTIONAL
trans = self.solver.group_install(grp, types, [])
self.assertLength(trans.install, 0)
self.assertLength(trans.install_opt, 1)
def test_removable_pkg(self):
p_grp1 = self.persistor.group('base')
p_grp2 = self.persistor.group('tune')
p_grp1.full_list.extend(('pepper', 'tour', 'right'))
p_grp2.full_list.append('tour')
self.assertTrue(self.solver._removable_pkg('pepper'))
# right's reason is "dep"
self.assertFalse(self.solver._removable_pkg('right'))
# tour appears in more than one group
self.assertFalse(self.solver._removable_pkg('tour'))
def test_remove(self):
# setup of the "current state"
p_grp = self.persistor.group('base')
p_grp.pkg_types = dnf.comps.MANDATORY
p_grp.full_list.extend(('pepper', 'tour'))
p_grp2 = self.persistor.group('tune')
p_grp2.full_list.append('pepper')
grp = self.comps.group_by_pattern('base')
trans = self.solver.group_remove(grp)
self.assertFalse(p_grp.installed)
self.assertCountEqual(trans.remove, ('tour',))
def test_upgrade(self):
# setup of the "current state"
p_grp = self.persistor.group('base')
p_grp.pkg_types = dnf.comps.MANDATORY
p_grp.full_list.extend(('pepper', 'handerson'))
grp = self.comps.group_by_pattern('base')
trans = self.solver.group_upgrade(grp)
self.assertCountEqual(trans.install, ('tour',))
self.assertCountEqual(trans.remove, ('handerson',))
self.assertCountEqual(trans.upgrade, ('pepper',))
self.assertCountEqual(p_grp.full_list, ('tour', 'pepper'))
class SolverEnvironmentTest(SolverTestMixin, support.TestCase):
def _install(self, env):
return self.solver.environment_install(env, dnf.comps.MANDATORY,
('lotus',))
def test_install(self):
env = self.comps.environment_by_pattern('sugar-desktop-environment')
trans = self._install(env)
self.assertCountEqual(trans.install, ('pepper', 'trampoline', 'hole',
'lotus'))
sugar = self.persistor.environment('sugar-desktop-environment')
self.assertCountEqual(sugar.full_list, ('Peppers', 'somerset'))
somerset = self.persistor.group('somerset')
self.assertTrue(somerset.installed)
self.assertEqual(somerset.pkg_types, dnf.comps.MANDATORY)
self.assertCountEqual(somerset.pkg_exclude, ('lotus',))
base = self.persistor.group('somerset')
self.assertTrue(base.installed)
def test_remove(self):
env = self.comps.environment_by_pattern('sugar-desktop-environment')
self._install(env)
trans = self.solver.environment_remove(env)
p_env = self.persistor.environment('sugar-desktop-environment')
self.assertCountEqual(trans.remove, ('pepper', 'trampoline', 'hole'))
self.assertFalse(p_env.grp_types)
self.assertFalse(p_env.pkg_types)
def test_upgrade(self):
"""Upgrade environment, the one group it knows is no longer installed."""
p_env = self.persistor.environment('sugar-desktop-environment')
p_env.full_list.extend(['somerset'])
p_env.grp_types = dnf.comps.ALL_TYPES
p_env.pkg_types = dnf.comps.ALL_TYPES
env = self.comps.environment_by_pattern('sugar-desktop-environment')
trans = self.solver.environment_upgrade(env)
self.assertCountEqual(trans.install, ('hole', 'lotus'))
self.assertEmpty(trans.upgrade)
| gpl-2.0 |
mnach/suds-py3k | suds/umx/attrlist.py | 210 | 2811 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides filtered attribute list classes.
"""
from suds import *
from suds.umx import *
from suds.sax import Namespace
class AttrList:
"""
A filtered attribute list.
Items are included during iteration if they are in either the (xs) or
(xml) namespaces.
@ivar raw: The I{raw} attribute list.
@type raw: list
"""
def __init__(self, attributes):
"""
@param attributes: A list of attributes
@type attributes: list
"""
self.raw = attributes
def real(self):
"""
Get list of I{real} attributes which exclude xs and xml attributes.
@return: A list of I{real} attributes.
@rtype: I{generator}
"""
for a in self.raw:
if self.skip(a): continue
yield a
def rlen(self):
"""
Get the number of I{real} attributes which exclude xs and xml attributes.
@return: A count of I{real} attributes.
@rtype: L{int}
"""
n = 0
for a in self.real():
n += 1
return n
def lang(self):
"""
Get list of I{filtered} attributes which exclude xs.
@return: A list of I{filtered} attributes.
@rtype: I{generator}
"""
for a in self.raw:
if a.qname() == 'xml:lang':
return a.value
return None
def skip(self, attr):
"""
Get whether to skip (filter-out) the specified attribute.
@param attr: An attribute.
@type attr: I{Attribute}
@return: True if should be skipped.
@rtype: bool
"""
ns = attr.namespace()
skip = (
Namespace.xmlns[1],
'http://schemas.xmlsoap.org/soap/encoding/',
'http://schemas.xmlsoap.org/soap/envelope/',
'http://www.w3.org/2003/05/soap-envelope',
)
return ( Namespace.xs(ns) or ns[1] in skip )
| lgpl-3.0 |
perezg/infoxchange | BASE/lib/python2.7/site-packages/django/contrib/gis/utils/ogrinfo.py | 236 | 1982 | """
This module includes some utility functions for inspecting the layout
of a GDAL data source -- the functionality is analogous to the output
produced by the `ogrinfo` utility.
"""
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal.geometries import GEO_CLASSES
def ogrinfo(data_source, num_features=10):
"""
Walks the available layers in the supplied `data_source`, displaying
the fields for the first `num_features` features.
"""
# Checking the parameters.
if isinstance(data_source, str):
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise Exception('Data source parameter must be a string or a DataSource object.')
for i, layer in enumerate(data_source):
print("data source : %s" % data_source.name)
print("==== layer %s" % i)
print(" shape type: %s" % GEO_CLASSES[layer.geom_type.num].__name__)
print(" # features: %s" % len(layer))
print(" srs: %s" % layer.srs)
extent_tup = layer.extent.tuple
print(" extent: %s - %s" % (extent_tup[0:2], extent_tup[2:4]))
print("Displaying the first %s features ====" % num_features)
width = max(*map(len,layer.fields))
fmt = " %%%ss: %%s" % width
for j, feature in enumerate(layer[:num_features]):
print("=== Feature %s" % j)
for fld_name in layer.fields:
type_name = feature[fld_name].type_name
output = fmt % (fld_name, type_name)
val = feature.get(fld_name)
if val:
if isinstance(val, str):
val_fmt = ' ("%s")'
else:
val_fmt = ' (%s)'
output += val_fmt % val
else:
output += ' (None)'
print(output)
# For backwards compatibility.
sample = ogrinfo
| apache-2.0 |
hoosteeno/mozillians | vendor-local/lib/python/south/management/commands/datamigration.py | 92 | 5038 | """
Data migration creation command
"""
from __future__ import print_function
import sys
import os
import re
from optparse import make_option
try:
set
except NameError:
from sets import Set as set
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.db import models
from django.conf import settings
from south.migration import Migrations
from south.exceptions import NoMigrations
from south.creator import freezer
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--freeze', action='append', dest='freeze_list', type='string',
help='Freeze the specified app(s). Provide an app name with each; use the option multiple times for multiple apps'),
make_option('--stdout', action='store_true', dest='stdout', default=False,
help='Print the migration to stdout instead of writing it to a file.'),
)
help = "Creates a new template data migration for the given app"
usage_str = "Usage: ./manage.py datamigration appname migrationname [--stdout] [--freeze appname]"
def handle(self, app=None, name="", freeze_list=None, stdout=False, verbosity=1, **options):
verbosity = int(verbosity)
# Any supposed lists that are None become empty lists
freeze_list = freeze_list or []
# --stdout means name = -
if stdout:
name = "-"
# Only allow valid names
if re.search('[^_\w]', name) and name != "-":
self.error("Migration names should contain only alphanumeric characters and underscores.")
# If not name, there's an error
if not name:
self.error("You must provide a name for this migration.\n" + self.usage_str)
if not app:
self.error("You must provide an app to create a migration for.\n" + self.usage_str)
# Ensure that verbosity is not a string (Python 3)
try:
verbosity = int(verbosity)
except ValueError:
self.error("Verbosity must be an number.\n" + self.usage_str)
# Get the Migrations for this app (creating the migrations dir if needed)
migrations = Migrations(app, force_creation=True, verbose_creation=verbosity > 0)
# See what filename is next in line. We assume they use numbers.
new_filename = migrations.next_filename(name)
# Work out which apps to freeze
apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list)
# So, what's in this file, then?
file_contents = self.get_migration_template() % {
"frozen_models": freezer.freeze_apps_to_string(apps_to_freeze),
"complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or ""
}
# - is a special name which means 'print to stdout'
if name == "-":
print(file_contents)
# Write the migration file if the name isn't -
else:
fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w")
fp.write(file_contents)
fp.close()
print("Created %s." % new_filename, file=sys.stderr)
def calc_frozen_apps(self, migrations, freeze_list):
"""
Works out, from the current app, settings, and the command line options,
which apps should be frozen.
"""
apps_to_freeze = []
for to_freeze in freeze_list:
if "." in to_freeze:
self.error("You cannot freeze %r; you must provide an app label, like 'auth' or 'books'." % to_freeze)
# Make sure it's a real app
if not models.get_app(to_freeze):
self.error("You cannot freeze %r; it's not an installed app." % to_freeze)
# OK, it's fine
apps_to_freeze.append(to_freeze)
if getattr(settings, 'SOUTH_AUTO_FREEZE_APP', True):
apps_to_freeze.append(migrations.app_label())
return apps_to_freeze
def error(self, message, code=1):
"""
Prints the error, and exits with the given code.
"""
print(message, file=sys.stderr)
sys.exit(code)
def get_migration_template(self):
return MIGRATION_TEMPLATE
MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
def backwards(self, orm):
"Write your backwards methods here."
models = %(frozen_models)s
%(complete_apps)s
symmetrical = True
"""
| bsd-3-clause |
ael-code/pydirl | test/test_zipped.py | 1 | 1924 | import unittest
import shutil
import tempfile
from zipfile import ZipFile
from pydirl.app import create_app
from . import populate_directory
class PydirlZippedTestCase(unittest.TestCase):
def setUp(self):
self.root = tempfile.mkdtemp(prefix='pydirl_test_')
populate_directory(self.root)
def tearDown(self):
shutil.rmtree(self.root)
def get_zipped_root(self, regex=None):
app = create_app({'ROOT': self.root, 'EXCLUDE': regex})
tc = app.test_client()
rsp = tc.get('/?download=True')
self.assertTrue(rsp.is_streamed)
# store the result byte stream in a temporary file
tempF = tempfile.TemporaryFile()
tempF.write(rsp.get_data())
return tempF
def test_zipped(self):
zippedRoot = self.get_zipped_root()
expected_entries = ['2/2-1/2-1.txt', '1/1-1/1-1.txt']
zf = ZipFile(zippedRoot)
self.assertTrue(zf.testzip() is None)
for elem in expected_entries:
self.assertTrue(elem in zf.namelist())
self.assertEqual(len(expected_entries), len(zf.namelist()))
zf.close()
def test_zipped_excluded_file(self):
zippedRoot = self.get_zipped_root('1-1.txt')
expected_entries = ['2/2-1/2-1.txt']
zf = ZipFile(zippedRoot)
self.assertTrue(zf.testzip() is None)
for elem in expected_entries:
self.assertTrue(elem in zf.namelist())
self.assertEqual(len(expected_entries), len(zf.namelist()))
zf.close()
def test_zipped_excluded_directory(self):
zippedRoot = self.get_zipped_root('1/')
expected_entries = ['2/2-1/2-1.txt']
zf = ZipFile(zippedRoot)
self.assertTrue(zf.testzip() is None)
for elem in expected_entries:
self.assertTrue(elem in zf.namelist())
self.assertEqual(len(expected_entries), len(zf.namelist()))
zf.close()
| gpl-3.0 |
farhaanbukhsh/sympy | sympy/physics/optics/gaussopt.py | 66 | 20059 | # -*- encoding: utf-8 -*-
"""
Gaussian optics.
The module implements:
- Ray transfer matrices for geometrical and gaussian optics.
See RayTransferMatrix, GeometricRay and BeamParameter
- Conjugation relations for geometrical and gaussian optics.
See geometric_conj*, gauss_conj and conjugate_gauss_beams
The conventions for the distances are as follows:
focal distance
positive for convergent lenses
object distance
positive for real objects
image distance
positive for real images
"""
from __future__ import print_function, division
__all__ = [
'RayTransferMatrix',
'FreeSpace',
'FlatRefraction',
'CurvedRefraction',
'FlatMirror',
'CurvedMirror',
'ThinLens',
'GeometricRay',
'BeamParameter',
'waist2rayleigh',
'rayleigh2waist',
'geometric_conj_ab',
'geometric_conj_af',
'geometric_conj_bf',
'gaussian_conj',
'conjugate_gauss_beams',
]
from sympy import (atan2, Expr, I, im, Matrix, oo, pi, re, sqrt, sympify,
together)
from sympy.utilities.misc import filldedent
###
# A, B, C, D matrices
###
class RayTransferMatrix(Matrix):
"""
Base class for a Ray Transfer Matrix.
It should be used if there isn't already a more specific subclass mentioned
in See Also.
Parameters
==========
parameters : A, B, C and D or 2x2 matrix (Matrix(2, 2, [A, B, C, D]))
Examples
========
>>> from sympy.physics.optics import RayTransferMatrix, ThinLens
>>> from sympy import Symbol, Matrix
>>> mat = RayTransferMatrix(1, 2, 3, 4)
>>> mat
Matrix([
[1, 2],
[3, 4]])
>>> RayTransferMatrix(Matrix([[1, 2], [3, 4]]))
Matrix([
[1, 2],
[3, 4]])
>>> mat.A
1
>>> f = Symbol('f')
>>> lens = ThinLens(f)
>>> lens
Matrix([
[ 1, 0],
[-1/f, 1]])
>>> lens.C
-1/f
See Also
========
GeometricRay, BeamParameter,
FreeSpace, FlatRefraction, CurvedRefraction,
FlatMirror, CurvedMirror, ThinLens
References
==========
.. [1] http://en.wikipedia.org/wiki/Ray_transfer_matrix_analysis
"""
def __new__(cls, *args):
if len(args) == 4:
temp = ((args[0], args[1]), (args[2], args[3]))
elif len(args) == 1 \
and isinstance(args[0], Matrix) \
and args[0].shape == (2, 2):
temp = args[0]
else:
raise ValueError(filldedent('''
Expecting 2x2 Matrix or the 4 elements of
the Matrix but got %s''' % str(args)))
return Matrix.__new__(cls, temp)
def __mul__(self, other):
if isinstance(other, RayTransferMatrix):
return RayTransferMatrix(Matrix.__mul__(self, other))
elif isinstance(other, GeometricRay):
return GeometricRay(Matrix.__mul__(self, other))
elif isinstance(other, BeamParameter):
temp = self*Matrix(((other.q,), (1,)))
q = (temp[0]/temp[1]).expand(complex=True)
return BeamParameter(other.wavelen,
together(re(q)),
z_r=together(im(q)))
else:
return Matrix.__mul__(self, other)
@property
def A(self):
"""
The A parameter of the Matrix.
Examples
========
>>> from sympy.physics.optics import RayTransferMatrix
>>> mat = RayTransferMatrix(1, 2, 3, 4)
>>> mat.A
1
"""
return self[0, 0]
@property
def B(self):
"""
The B parameter of the Matrix.
Examples
========
>>> from sympy.physics.optics import RayTransferMatrix
>>> mat = RayTransferMatrix(1, 2, 3, 4)
>>> mat.B
2
"""
return self[0, 1]
@property
def C(self):
"""
The C parameter of the Matrix.
Examples
========
>>> from sympy.physics.optics import RayTransferMatrix
>>> mat = RayTransferMatrix(1, 2, 3, 4)
>>> mat.C
3
"""
return self[1, 0]
@property
def D(self):
"""
The D parameter of the Matrix.
Examples
========
>>> from sympy.physics.optics import RayTransferMatrix
>>> mat = RayTransferMatrix(1, 2, 3, 4)
>>> mat.D
4
"""
return self[1, 1]
class FreeSpace(RayTransferMatrix):
"""
Ray Transfer Matrix for free space.
Parameters
==========
distance
See Also
========
RayTransferMatrix
Examples
========
>>> from sympy.physics.optics import FreeSpace
>>> from sympy import symbols
>>> d = symbols('d')
>>> FreeSpace(d)
Matrix([
[1, d],
[0, 1]])
"""
def __new__(cls, d):
return RayTransferMatrix.__new__(cls, 1, d, 0, 1)
class FlatRefraction(RayTransferMatrix):
"""
Ray Transfer Matrix for refraction.
Parameters
==========
n1 : refractive index of one medium
n2 : refractive index of other medium
See Also
========
RayTransferMatrix
Examples
========
>>> from sympy.physics.optics import FlatRefraction
>>> from sympy import symbols
>>> n1, n2 = symbols('n1 n2')
>>> FlatRefraction(n1, n2)
Matrix([
[1, 0],
[0, n1/n2]])
"""
def __new__(cls, n1, n2):
n1, n2 = map(sympify, (n1, n2))
return RayTransferMatrix.__new__(cls, 1, 0, 0, n1/n2)
class CurvedRefraction(RayTransferMatrix):
"""
Ray Transfer Matrix for refraction on curved interface.
Parameters
==========
R : radius of curvature (positive for concave)
n1 : refractive index of one medium
n2 : refractive index of other medium
See Also
========
RayTransferMatrix
Examples
========
>>> from sympy.physics.optics import CurvedRefraction
>>> from sympy import symbols
>>> R, n1, n2 = symbols('R n1 n2')
>>> CurvedRefraction(R, n1, n2)
Matrix([
[ 1, 0],
[(n1 - n2)/(R*n2), n1/n2]])
"""
def __new__(cls, R, n1, n2):
R, n1, n2 = map(sympify, (R, n1, n2))
return RayTransferMatrix.__new__(cls, 1, 0, (n1 - n2)/R/n2, n1/n2)
class FlatMirror(RayTransferMatrix):
"""
Ray Transfer Matrix for reflection.
See Also
========
RayTransferMatrix
Examples
========
>>> from sympy.physics.optics import FlatMirror
>>> FlatMirror()
Matrix([
[1, 0],
[0, 1]])
"""
def __new__(cls):
return RayTransferMatrix.__new__(cls, 1, 0, 0, 1)
class CurvedMirror(RayTransferMatrix):
"""
Ray Transfer Matrix for reflection from curved surface.
Parameters
==========
R : radius of curvature (positive for concave)
See Also
========
RayTransferMatrix
Examples
========
>>> from sympy.physics.optics import CurvedMirror
>>> from sympy import symbols
>>> R = symbols('R')
>>> CurvedMirror(R)
Matrix([
[ 1, 0],
[-2/R, 1]])
"""
def __new__(cls, R):
R = sympify(R)
return RayTransferMatrix.__new__(cls, 1, 0, -2/R, 1)
class ThinLens(RayTransferMatrix):
"""
Ray Transfer Matrix for a thin lens.
Parameters
==========
f : the focal distance
See Also
========
RayTransferMatrix
Examples
========
>>> from sympy.physics.optics import ThinLens
>>> from sympy import symbols
>>> f = symbols('f')
>>> ThinLens(f)
Matrix([
[ 1, 0],
[-1/f, 1]])
"""
def __new__(cls, f):
f = sympify(f)
return RayTransferMatrix.__new__(cls, 1, 0, -1/f, 1)
###
# Representation for geometric ray
###
class GeometricRay(Matrix):
"""
Representation for a geometric ray in the Ray Transfer Matrix formalism.
Parameters
==========
h : height, and
angle : angle, or
matrix : a 2x1 matrix (Matrix(2, 1, [height, angle]))
Examples
========
>>> from sympy.physics.optics import GeometricRay, FreeSpace
>>> from sympy import symbols, Matrix
>>> d, h, angle = symbols('d, h, angle')
>>> GeometricRay(h, angle)
Matrix([
[ h],
[angle]])
>>> FreeSpace(d)*GeometricRay(h, angle)
Matrix([
[angle*d + h],
[ angle]])
>>> GeometricRay( Matrix( ((h,), (angle,)) ) )
Matrix([
[ h],
[angle]])
See Also
========
RayTransferMatrix
"""
def __new__(cls, *args):
if len(args) == 1 and isinstance(args[0], Matrix) \
and args[0].shape == (2, 1):
temp = args[0]
elif len(args) == 2:
temp = ((args[0],), (args[1],))
else:
raise ValueError(filldedent('''
Expecting 2x1 Matrix or the 2 elements of
the Matrix but got %s''' % str(args)))
return Matrix.__new__(cls, temp)
@property
def height(self):
"""
The distance from the optical axis.
Examples
========
>>> from sympy.physics.optics import GeometricRay
>>> from sympy import symbols
>>> h, angle = symbols('h, angle')
>>> gRay = GeometricRay(h, angle)
>>> gRay.height
h
"""
return self[0]
@property
def angle(self):
"""
The angle with the optical axis.
Examples
========
>>> from sympy.physics.optics import GeometricRay
>>> from sympy import symbols
>>> h, angle = symbols('h, angle')
>>> gRay = GeometricRay(h, angle)
>>> gRay.angle
angle
"""
return self[1]
###
# Representation for gauss beam
###
class BeamParameter(Expr):
"""
Representation for a gaussian ray in the Ray Transfer Matrix formalism.
Parameters
==========
wavelen : the wavelength,
z : the distance to waist, and
w : the waist, or
z_r : the rayleigh range
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.q
1 + 1.88679245283019*I*pi
>>> p.q.n()
1.0 + 5.92753330865999*I
>>> p.w_0.n()
0.00100000000000000
>>> p.z_r.n()
5.92753330865999
>>> from sympy.physics.optics import FreeSpace
>>> fs = FreeSpace(10)
>>> p1 = fs*p
>>> p.w.n()
0.00101413072159615
>>> p1.w.n()
0.00210803120913829
See Also
========
RayTransferMatrix
References
==========
.. [1] http://en.wikipedia.org/wiki/Complex_beam_parameter
.. [2] http://en.wikipedia.org/wiki/Gaussian_beam
"""
#TODO A class Complex may be implemented. The BeamParameter may
# subclass it. See:
# https://groups.google.com/d/topic/sympy/7XkU07NRBEs/discussion
__slots__ = ['z', 'z_r', 'wavelen']
def __new__(cls, wavelen, z, **kwargs):
wavelen, z = map(sympify, (wavelen, z))
inst = Expr.__new__(cls, wavelen, z)
inst.wavelen = wavelen
inst.z = z
if len(kwargs) != 1:
raise ValueError('Constructor expects exactly one named argument.')
elif 'z_r' in kwargs:
inst.z_r = sympify(kwargs['z_r'])
elif 'w' in kwargs:
inst.z_r = waist2rayleigh(sympify(kwargs['w']), wavelen)
else:
raise ValueError('The constructor needs named argument w or z_r')
return inst
@property
def q(self):
"""
The complex parameter representing the beam.
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.q
1 + 1.88679245283019*I*pi
"""
return self.z + I*self.z_r
@property
def radius(self):
"""
The radius of curvature of the phase front.
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.radius
1 + 3.55998576005696*pi**2
"""
return self.z*(1 + (self.z_r/self.z)**2)
@property
def w(self):
"""
The beam radius at `1/e^2` intensity.
See Also
========
w_0 : the minimal radius of beam
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.w
0.001*sqrt(0.2809/pi**2 + 1)
"""
return self.w_0*sqrt(1 + (self.z/self.z_r)**2)
@property
def w_0(self):
"""
The beam waist (minimal radius).
See Also
========
w : the beam radius at `1/e^2` intensity
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.w_0
0.00100000000000000
"""
return sqrt(self.z_r/pi*self.wavelen)
@property
def divergence(self):
"""
Half of the total angular spread.
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.divergence
0.00053/pi
"""
return self.wavelen/pi/self.w_0
@property
def gouy(self):
"""
The Gouy phase.
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.gouy
atan(0.53/pi)
"""
return atan2(self.z, self.z_r)
@property
def waist_approximation_limit(self):
"""
The minimal waist for which the gauss beam approximation is valid.
The gauss beam is a solution to the paraxial equation. For curvatures
that are too great it is not a valid approximation.
Examples
========
>>> from sympy.physics.optics import BeamParameter
>>> p = BeamParameter(530e-9, 1, w=1e-3)
>>> p.waist_approximation_limit
1.06e-6/pi
"""
return 2*self.wavelen/pi
###
# Utilities
###
def waist2rayleigh(w, wavelen):
"""
Calculate the rayleigh range from the waist of a gaussian beam.
See Also
========
rayleigh2waist, BeamParameter
Examples
========
>>> from sympy.physics.optics import waist2rayleigh
>>> from sympy import symbols
>>> w, wavelen = symbols('w wavelen')
>>> waist2rayleigh(w, wavelen)
pi*w**2/wavelen
"""
w, wavelen = map(sympify, (w, wavelen))
return w**2*pi/wavelen
def rayleigh2waist(z_r, wavelen):
"""Calculate the waist from the rayleigh range of a gaussian beam.
See Also
========
waist2rayleigh, BeamParameter
Examples
========
>>> from sympy.physics.optics import rayleigh2waist
>>> from sympy import symbols
>>> z_r, wavelen = symbols('z_r wavelen')
>>> rayleigh2waist(z_r, wavelen)
sqrt(wavelen*z_r)/sqrt(pi)
"""
z_r, wavelen = map(sympify, (z_r, wavelen))
return sqrt(z_r/pi*wavelen)
def geometric_conj_ab(a, b):
"""
Conjugation relation for geometrical beams under paraxial conditions.
Takes the distances to the optical element and returns the needed
focal distance.
See Also
========
geometric_conj_af, geometric_conj_bf
Examples
========
>>> from sympy.physics.optics import geometric_conj_ab
>>> from sympy import symbols
>>> a, b = symbols('a b')
>>> geometric_conj_ab(a, b)
a*b/(a + b)
"""
a, b = map(sympify, (a, b))
if abs(a) == oo or abs(b) == oo:
return a if abs(b) == oo else b
else:
return a*b/(a + b)
def geometric_conj_af(a, f):
"""
Conjugation relation for geometrical beams under paraxial conditions.
Takes the object distance (for geometric_conj_af) or the image distance
(for geometric_conj_bf) to the optical element and the focal distance.
Then it returns the other distance needed for conjugation.
See Also
========
geometric_conj_ab
Examples
========
>>> from sympy.physics.optics.gaussopt import geometric_conj_af, geometric_conj_bf
>>> from sympy import symbols
>>> a, b, f = symbols('a b f')
>>> geometric_conj_af(a, f)
a*f/(a - f)
>>> geometric_conj_bf(b, f)
b*f/(b - f)
"""
a, f = map(sympify, (a, f))
return -geometric_conj_ab(a, -f)
geometric_conj_bf = geometric_conj_af
def gaussian_conj(s_in, z_r_in, f):
"""
Conjugation relation for gaussian beams.
Parameters
==========
s_in : the distance to optical element from the waist
z_r_in : the rayleigh range of the incident beam
f : the focal length of the optical element
Returns
=======
a tuple containing (s_out, z_r_out, m)
s_out : the distance between the new waist and the optical element
z_r_out : the rayleigh range of the emergent beam
m : the ration between the new and the old waists
Examples
========
>>> from sympy.physics.optics import gaussian_conj
>>> from sympy import symbols
>>> s_in, z_r_in, f = symbols('s_in z_r_in f')
>>> gaussian_conj(s_in, z_r_in, f)[0]
1/(-1/(s_in + z_r_in**2/(-f + s_in)) + 1/f)
>>> gaussian_conj(s_in, z_r_in, f)[1]
z_r_in/(1 - s_in**2/f**2 + z_r_in**2/f**2)
>>> gaussian_conj(s_in, z_r_in, f)[2]
1/sqrt(1 - s_in**2/f**2 + z_r_in**2/f**2)
"""
s_in, z_r_in, f = map(sympify, (s_in, z_r_in, f))
s_out = 1 / ( -1/(s_in + z_r_in**2/(s_in - f)) + 1/f )
m = 1/sqrt((1 - (s_in/f)**2) + (z_r_in/f)**2)
z_r_out = z_r_in / ((1 - (s_in/f)**2) + (z_r_in/f)**2)
return (s_out, z_r_out, m)
def conjugate_gauss_beams(wavelen, waist_in, waist_out, **kwargs):
"""
Find the optical setup conjugating the object/image waists.
Parameters
==========
wavelen : the wavelength of the beam
waist_in and waist_out : the waists to be conjugated
f : the focal distance of the element used in the conjugation
Returns
=======
a tuple containing (s_in, s_out, f)
s_in : the distance before the optical element
s_out : the distance after the optical element
f : the focal distance of the optical element
Examples
========
>>> from sympy.physics.optics import conjugate_gauss_beams
>>> from sympy import symbols, factor
>>> l, w_i, w_o, f = symbols('l w_i w_o f')
>>> conjugate_gauss_beams(l, w_i, w_o, f=f)[0]
f*(-sqrt(w_i**2/w_o**2 - pi**2*w_i**4/(f**2*l**2)) + 1)
>>> factor(conjugate_gauss_beams(l, w_i, w_o, f=f)[1])
f*w_o**2*(w_i**2/w_o**2 - sqrt(w_i**2/w_o**2 -
pi**2*w_i**4/(f**2*l**2)))/w_i**2
>>> conjugate_gauss_beams(l, w_i, w_o, f=f)[2]
f
"""
#TODO add the other possible arguments
wavelen, waist_in, waist_out = map(sympify, (wavelen, waist_in, waist_out))
m = waist_out / waist_in
z = waist2rayleigh(waist_in, wavelen)
if len(kwargs) != 1:
raise ValueError("The function expects only one named argument")
elif 'dist' in kwargs:
raise NotImplementedError(filldedent('''
Currently only focal length is supported as a parameter'''))
elif 'f' in kwargs:
f = sympify(kwargs['f'])
s_in = f * (1 - sqrt(1/m**2 - z**2/f**2))
s_out = gaussian_conj(s_in, z, f)[0]
elif 's_in' in kwargs:
raise NotImplementedError(filldedent('''
Currently only focal length is supported as a parameter'''))
else:
raise ValueError(filldedent('''
The functions expects the focal length as a named argument'''))
return (s_in, s_out, f)
#TODO
#def plot_beam():
# """Plot the beam radius as it propagates in space."""
# pass
#TODO
#def plot_beam_conjugation():
# """
# Plot the intersection of two beams.
#
# Represents the conjugation relation.
#
# See Also
# ========
#
# conjugate_gauss_beams
# """
# pass
| bsd-3-clause |
chand3040/cloud_that | common/test/acceptance/pages/lms/course_nav.py | 96 | 7770 | """
Course navigation page object
"""
import re
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
class CourseNavPage(PageObject):
"""
Navigate sections and sequences in the courseware.
"""
url = None
def is_browser_on_page(self):
return self.q(css='div.course-index').present
@property
def sections(self):
"""
Return a dictionary representation of sections and subsections.
Example:
{
'Introduction': ['Course Overview'],
'Week 1': ['Lesson 1', 'Lesson 2', 'Homework']
'Final Exam': ['Final Exam']
}
You can use these titles in `go_to_section` to navigate to the section.
"""
# Dict to store the result
nav_dict = dict()
section_titles = self._section_titles()
# Get the section titles for each chapter
for sec_index, sec_title in enumerate(section_titles):
if len(section_titles) < 1:
self.warning("Could not find subsections for '{0}'".format(sec_title))
else:
# Add one to convert list index (starts at 0) to CSS index (starts at 1)
nav_dict[sec_title] = self._subsection_titles(sec_index + 1)
return nav_dict
@property
def sequence_items(self):
"""
Return a list of sequence items on the page.
Sequence items are one level below subsections in the course nav.
Example return value:
['Chemical Bonds Video', 'Practice Problems', 'Homework']
"""
seq_css = 'ol#sequence-list>li>a>p'
return self.q(css=seq_css).map(self._clean_seq_titles).results
def go_to_section(self, section_title, subsection_title):
"""
Go to the section in the courseware.
Every section must have at least one subsection, so specify
both the section and subsection title.
Example:
go_to_section("Week 1", "Lesson 1")
"""
# For test stability, disable JQuery animations (opening / closing menus)
self.browser.execute_script("jQuery.fx.off = true;")
# Get the section by index
try:
sec_index = self._section_titles().index(section_title)
except ValueError:
self.warning("Could not find section '{0}'".format(section_title))
return
# Click the section to ensure it's open (no harm in clicking twice if it's already open)
# Add one to convert from list index to CSS index
section_css = 'nav>div.chapter:nth-of-type({0})>h3>a'.format(sec_index + 1)
self.q(css=section_css).first.click()
# Get the subsection by index
try:
subsec_index = self._subsection_titles(sec_index + 1).index(subsection_title)
except ValueError:
msg = "Could not find subsection '{0}' in section '{1}'".format(subsection_title, section_title)
self.warning(msg)
return
# Convert list indices (start at zero) to CSS indices (start at 1)
subsection_css = "nav>div.chapter:nth-of-type({0})>ul>li:nth-of-type({1})>a".format(
sec_index + 1, subsec_index + 1
)
# Click the subsection and ensure that the page finishes reloading
self.q(css=subsection_css).first.click()
self._on_section_promise(section_title, subsection_title).fulfill()
def go_to_sequential(self, sequential_title):
"""
Within a section/subsection, navigate to the sequential with `sequential_title`.
"""
# Get the index of the item in the sequence
all_items = self.sequence_items
try:
seq_index = all_items.index(sequential_title)
except ValueError:
msg = "Could not find sequential '{0}'. Available sequentials: [{1}]".format(
sequential_title, ", ".join(all_items)
)
self.warning(msg)
else:
# Click on the sequence item at the correct index
# Convert the list index (starts at 0) to a CSS index (starts at 1)
seq_css = "ol#sequence-list>li:nth-of-type({0})>a".format(seq_index + 1)
self.q(css=seq_css).first.click()
def _section_titles(self):
"""
Return a list of all section titles on the page.
"""
chapter_css = 'nav > div.chapter > h3 > a'
return self.q(css=chapter_css).map(lambda el: el.text.strip()).results
def _subsection_titles(self, section_index):
"""
Return a list of all subsection titles on the page
for the section at index `section_index` (starts at 1).
"""
# Retrieve the subsection title for the section
# Add one to the list index to get the CSS index, which starts at one
subsection_css = 'nav>div.chapter:nth-of-type({0})>ul>li>a>p:nth-of-type(1)'.format(section_index)
# If the element is visible, we can get its text directly
# Otherwise, we need to get the HTML
# It *would* make sense to always get the HTML, but unfortunately
# the open tab has some child <span> tags that we don't want.
return self.q(
css=subsection_css
).map(
lambda el: el.text.strip().split('\n')[0] if el.is_displayed() else el.get_attribute('innerHTML').strip()
).results
def _on_section_promise(self, section_title, subsection_title):
"""
Return a `Promise` that is fulfilled when the user is on
the correct section and subsection.
"""
desc = "currently at section '{0}' and subsection '{1}'".format(section_title, subsection_title)
return EmptyPromise(
lambda: self._is_on_section(section_title, subsection_title), desc
)
def _is_on_section(self, section_title, subsection_title):
"""
Return a boolean indicating whether the user is on the section and subsection
with the specified titles.
This assumes that the currently expanded section is the one we're on
That's true right after we click the section/subsection, but not true in general
(the user could go to a section, then expand another tab).
"""
current_section_list = self.q(css='nav>div.chapter.is-open>h3>a').text
current_subsection_list = self.q(css='nav>div.chapter.is-open li.active>a>p').text
if len(current_section_list) == 0:
self.warning("Could not find the current section")
return False
elif len(current_subsection_list) == 0:
self.warning("Could not find current subsection")
return False
else:
return (
current_section_list[0].strip() == section_title and
current_subsection_list[0].strip().split('\n')[0] == subsection_title
)
# Regular expression to remove HTML span tags from a string
REMOVE_SPAN_TAG_RE = re.compile(r'<span.+/span>')
def _clean_seq_titles(self, element):
"""
Clean HTML of sequence titles, stripping out span tags and returning the first line.
"""
return self.REMOVE_SPAN_TAG_RE.sub('', element.get_attribute('innerHTML')).strip().split('\n')[0]
def go_to_sequential_position(self, sequential_position):
"""
Within a section/subsection navigate to the sequential position specified by `sequential_position`.
Arguments:
sequential_position (int): position in sequential bar
"""
sequential_position_css = '#tab_{0}'.format(sequential_position - 1)
self.q(css=sequential_position_css).first.click()
| agpl-3.0 |
softappeal/yass | py3/yass.py | 1 | 24242 | import inspect
from collections import OrderedDict
from io import StringIO
from struct import Struct
from enum import Enum
from typing import cast, Any, Dict, List, TypeVar, Generic, Optional, Callable, Set
def abstract(abstractClass):
def newDecorator(abstractNew):
def decoratedNew(subClass, *args, **kwargs):
if abstractClass is subClass:
raise RuntimeError(f"can't instantiate abstract {abstractClass}")
return abstractNew(subClass) if inspect.isbuiltin(abstractNew) else abstractNew(subClass, *args, **kwargs)
return decoratedNew
abstractClass.__new__ = newDecorator(abstractClass.__new__)
return abstractClass
DOUBLE_STRUCT = Struct('>d')
INT_STRUCT = Struct('>i')
class Writer:
def __init__(self, writeBytes: Callable[[bytes], None]) -> None:
self.writeBytes = writeBytes
def writeByte(self, value: int) -> None:
self.writeBytes(bytes([value]))
def writeVarInt(self, value: int) -> None:
while True:
if (value & 0x_ffff_ff80) == 0:
self.writeByte(value)
return
self.writeByte((value & 0x_7f) | 0x_80)
value = (value if value >= 0 else (value + 0x_1_0000_0000)) >> 7
def writeZigZagInt(self, value: int) -> None:
self.writeVarInt((value << 1) ^ (value >> 31))
def writeInt(self, value: int) -> None:
self.writeBytes(INT_STRUCT.pack(value))
def writeDouble(self, value: float) -> None:
self.writeBytes(DOUBLE_STRUCT.pack(value))
class Reader:
def __init__(self, readBytes: Callable[[int], bytes]) -> None:
self.readBytes = readBytes
def readByte(self) -> int:
return self.readBytes(1)[0]
def readVarInt(self) -> int:
shift = 0
value = 0
while shift < 32:
b = self.readByte()
value |= ((b & 0x_7f) << shift)
if (b & 0x_80) == 0:
return value if value <= 0x_7fff_ffff else value - 0x_1_0000_0000
shift += 7
raise RuntimeError("malformed VarInt input")
def readZigZagInt(self) -> int:
value = self.readVarInt()
return ((value if value >= 0 else (value + 0x_1_0000_0000)) >> 1) ^ -(value & 1)
def readInt(self) -> int:
return INT_STRUCT.unpack(self.readBytes(4))[0]
def readDouble(self) -> float:
return DOUBLE_STRUCT.unpack(self.readBytes(8))[0]
@abstract
class TypeHandler:
def read(self, input: 'Input') -> Optional[Any]:
raise NotImplementedError()
def write(self, value: Any, output: 'Output') -> None:
raise NotImplementedError()
def writeWithId(self, id: int, value: Any, output: 'Output') -> None:
output.writer.writeVarInt(id)
self.write(value, output)
class TypeDesc:
def __init__(self, id: int, handler: TypeHandler) -> None:
self.id = id
self.handler = handler
def write(self, value: Optional[Any], output: 'Output') -> None:
self.handler.writeWithId(self.id, value, output)
TYPE_DESC = "TYPE_DESC"
def typeDesc(value: Any) -> TypeDesc:
return getattr(value, TYPE_DESC)
class Input:
def __init__(self, reader: Reader, id2typeHandler: Dict[int, TypeHandler]) -> None:
self.reader = reader
self.id2typeHandler = id2typeHandler
self.referenceableObjects: Optional[List[Any]] = None
def read(self) -> Optional[Any]:
return self.id2typeHandler[self.reader.readVarInt()].read(self)
class Output:
def __init__(self, writer: Writer) -> None:
self.writer = writer
self.object2reference: Optional[Dict[Any, int]] = None
def write(self, value: Optional[Any]) -> None:
if value is None:
NULL_DESC.write(None, self)
elif isinstance(value, list):
LIST_DESC.write(value, self)
elif isinstance(value, bool):
BOOLEAN_DESC.write(value, self)
elif isinstance(value, float):
DOUBLE_DESC.write(value, self)
elif isinstance(value, str):
STRING_DESC.write(value, self)
elif isinstance(value, bytes):
BYTES_DESC.write(value, self)
else:
try:
td = typeDesc(value)
except:
raise RuntimeError(f"missing type {value.__class__} in serializer")
td.write(value, self)
class NullTypeHandler(TypeHandler):
def read(self, input: Input) -> None:
return None
def write(self, value: None, output: Output) -> None:
pass
class ReferenceTypeHandler(TypeHandler):
def read(self, input: Input) -> Any:
return cast(List[Any], input.referenceableObjects)[input.reader.readVarInt()]
def write(self, value: int, output: Output) -> None:
output.writer.writeVarInt(value)
class ListTypeHandler(TypeHandler):
def read(self, input: Input) -> List[Any]:
return [input.read() for dummy in range(input.reader.readVarInt())]
def write(self, value: List[Any], output: Output) -> None:
output.writer.writeVarInt(len(value))
for e in value:
output.write(e)
@abstract
class BaseTypeHandler(TypeHandler):
def readBase(self, reader: Reader) -> Any:
raise NotImplementedError()
def read(self, input: Input) -> Any:
return self.readBase(input.reader)
def writeBase(self, value: Any, writer: Writer) -> None:
raise NotImplementedError()
def write(self, value: Any, output: Output) -> None:
self.writeBase(value, output.writer)
class BooleanTypeHandler(BaseTypeHandler):
def readBase(self, reader: Reader) -> bool:
return reader.readByte() != 0
def writeBase(self, value: bool, writer: Writer) -> None:
writer.writeByte(1 if value else 0)
class DoubleTypeHandler(BaseTypeHandler):
def readBase(self, reader: Reader) -> float:
return reader.readDouble()
def writeBase(self, value: float, writer: Writer) -> None:
writer.writeDouble(value)
class StringTypeHandler(BaseTypeHandler):
def readBase(self, reader: Reader) -> str:
return reader.readBytes(reader.readVarInt()).decode()
def writeBase(self, value: str, writer: Writer) -> None:
b = value.encode()
writer.writeVarInt(len(b))
writer.writeBytes(b)
class BytesTypeHandler(BaseTypeHandler):
def readBase(self, reader: Reader) -> bytes:
return reader.readBytes(reader.readVarInt())
def writeBase(self, value: bytes, writer: Writer) -> None:
writer.writeVarInt(len(value))
writer.writeBytes(value)
NULL_DESC = TypeDesc(0, NullTypeHandler())
REFERENCE_DESC = TypeDesc(1, ReferenceTypeHandler())
LIST_DESC = TypeDesc(2, ListTypeHandler())
BOOLEAN_DESC = TypeDesc(3, BooleanTypeHandler())
DOUBLE_DESC = TypeDesc(4, DoubleTypeHandler())
STRING_DESC = TypeDesc(5, StringTypeHandler())
BYTES_DESC = TypeDesc(6, BytesTypeHandler())
FIRST_DESC_ID = 7
class EnumTypeHandler(BaseTypeHandler):
def __init__(self, type: Any) -> None:
self.type = type
def readBase(self, reader: Reader) -> Any:
return self.type(reader.readVarInt())
def writeBase(self, value: Any, writer: Writer) -> None:
writer.writeVarInt(value.value)
def enumDesc(id: int, type: Any) -> None:
setattr(type, TYPE_DESC, TypeDesc(id, EnumTypeHandler(type)))
class FieldHandler:
def __init__(self, field: str, typeHandler: Optional[TypeHandler]) -> None:
self.field = field
self.typeHandler = typeHandler
def read(self, object: Any, input: Input) -> None:
setattr(object, self.field, input.read() if self.typeHandler is None else self.typeHandler.read(input))
def write(self, id: int, object: Any, output: Output) -> None:
value = getattr(object, self.field)
if value is not None:
output.writer.writeVarInt(id)
if self.typeHandler is None:
output.write(value)
else:
self.typeHandler.write(value, output)
class FieldDesc:
def __init__(self, id: int, field: str, typeInfo: Optional[Any]) -> None:
self.id = id
self.handler = cast(FieldHandler, None)
if typeInfo is None:
self.handler = FieldHandler(field, None)
elif isinstance(typeInfo, TypeDesc):
self.handler = FieldHandler(field, typeInfo.handler)
else:
self.handler = FieldHandler(field, typeDesc(typeInfo).handler)
class ClassTypeHandler(TypeHandler):
FIELD_END = 0
def __init__(self, type: Any, referenceable: bool) -> None:
self.type = type
self.referenceable = referenceable
self.id2fieldHandler: Dict[int, FieldHandler] = OrderedDict()
def addField(self, id: int, handler: FieldHandler) -> None:
self.id2fieldHandler[id] = handler
def read(self, input: Input) -> None:
object = self.type()
if self.referenceable:
if input.referenceableObjects is None:
input.referenceableObjects = []
input.referenceableObjects.append(object)
while True:
id = input.reader.readVarInt()
if id == ClassTypeHandler.FIELD_END:
return object
self.id2fieldHandler[id].read(object, input)
def write(self, value: Any, output: Output) -> None:
for id, handler in self.id2fieldHandler.items():
handler.write(id, value, output)
output.writer.writeVarInt(ClassTypeHandler.FIELD_END)
def writeWithId(self, id: int, value: Any, output: Output) -> None:
if self.referenceable:
if output.object2reference is None:
output.object2reference = {}
object2reference = output.object2reference
reference = object2reference.get(value)
if reference is not None:
REFERENCE_DESC.write(reference, output)
return
object2reference[value] = len(object2reference)
TypeHandler.writeWithId(self, id, value, output)
def fieldDescs(type: Any, fieldDescs: List[FieldDesc]) -> None:
handler = cast(ClassTypeHandler, typeDesc(type).handler)
for fieldDesc in fieldDescs:
handler.addField(fieldDesc.id, fieldDesc.handler)
def classDesc(id: int, type: Any, referenceable: bool) -> None:
setattr(type, TYPE_DESC, TypeDesc(id, ClassTypeHandler(type, referenceable)))
@abstract
class Serializer:
def read(self, reader: Reader) -> Optional[Any]:
raise NotImplementedError()
def write(self, value: Any, writer: Writer) -> None:
raise NotImplementedError()
class FastSerializer(Serializer):
def __init__(self, typeInfos: List[Any]) -> None:
self.id2typeHandler: Dict[int, TypeHandler] = {}
for typeInfo in [NULL_DESC, REFERENCE_DESC, LIST_DESC, BOOLEAN_DESC, DOUBLE_DESC, STRING_DESC, BYTES_DESC] + typeInfos:
td = typeInfo if isinstance(typeInfo, TypeDesc) else typeDesc(typeInfo)
self.id2typeHandler[td.id] = td.handler
def read(self, reader: Reader) -> Optional[Any]:
return Input(reader, self.id2typeHandler).read()
def write(self, value: Optional[Any], writer: Writer) -> None:
Output(writer).write(value)
@abstract
class Message:
pass
class Request(Message):
def __init__(self, serviceId: int, methodId: int, arguments: List[Optional[Any]]) -> None:
self.serviceId = serviceId
self.methodId = methodId
self.arguments = arguments
@abstract
class Reply(Message):
def process(self) -> Optional[Any]:
raise NotImplementedError()
class ValueReply(Reply):
def __init__(self, value: Optional[Any]) -> None:
Reply.__init__(self)
self.value = value
def process(self) -> Optional[Any]:
return self.value
class ExceptionReply(Reply):
def __init__(self, exception: Exception) -> None:
Reply.__init__(self)
self.exception = exception
def process(self) -> None:
raise self.exception
class MethodMapping:
def __init__(self, id: int, method: str, oneWay: bool) -> None:
self.id = id
self.method = method
self.oneWay = oneWay
class MethodMapper:
def __init__(self, mappings: List[MethodMapping]) -> None:
self.id2mapping: Dict[int, MethodMapping] = {}
self.method2Mapping: Dict[str, MethodMapping] = {}
for mapping in mappings:
self.id2mapping[mapping.id] = mapping
self.method2Mapping[mapping.method] = mapping
def mapId(self, id: int) -> MethodMapping:
return cast(MethodMapping, self.id2mapping.get(id))
def mapMethod(self, method: str) -> MethodMapping:
return cast(MethodMapping, self.method2Mapping.get(method))
MAPPER = "MAPPER"
def methodMapper(type: Any, mappings: List[MethodMapping]) -> None:
setattr(type, MAPPER, MethodMapper(mappings))
Invocation = Callable[[], Optional[Any]]
Interceptor = Callable[
[MethodMapping, List[Optional[Any]], Invocation], # mapping, arguments, invocation
Optional[Any]
]
def directInterceptor(mapping: MethodMapping, arguments: List[Optional[Any]], invocation: Invocation) -> Optional[Any]:
return invocation()
class Service:
def __init__(self, contractId: 'ContractId[Any]', implementation: Any, interceptor: Interceptor) -> None:
self.id = contractId.id
self.implementation = implementation
self.interceptor = interceptor
self.mapper = contractId.mapper
def invoke(self, request: Request) -> Reply:
mapping = self.mapper.mapId(request.methodId)
if mapping is None:
raise RuntimeError(f"no methodId {request.methodId} found for serviceId {request.serviceId}")
try:
return ValueReply(self.interceptor(
mapping,
request.arguments,
lambda: getattr(self.implementation, mapping.method)(*request.arguments)
))
except Exception as e:
return ExceptionReply(e)
class Server:
def __init__(self, services: List[Service]) -> None:
self.id2service: Dict[int, Service] = {}
for service in services:
id = service.id
if id in self.id2service:
raise RuntimeError(f"serviceId {id} already added")
self.id2service[id] = service
def invoke(self, request: Request) -> Reply:
service = self.id2service.get(request.serviceId)
if service is None:
raise RuntimeError(f"no serviceId {request.serviceId} found")
return service.invoke(request)
C = TypeVar('C')
class ContractId(Generic[C]):
def __init__(self, contract: Any, id: int) -> None:
self.mapper: MethodMapper = getattr(contract, MAPPER)
self.id = id
def service(self, implementation: C, interceptor: Interceptor = directInterceptor) -> Service:
return Service(self, implementation, interceptor)
@abstract
class Client:
def proxy(self, contractId: ContractId[C], interceptor: Interceptor = directInterceptor) -> C:
client = self
class Proxy:
def __getattr__(self, method: str):
mapping = contractId.mapper.mapMethod(method)
if mapping is None:
raise RuntimeError(f"no method '{method}' found for serviceId {contractId.id}")
class Method:
def __call__(self, *arguments):
args = list(arguments)
return interceptor(
mapping,
args,
lambda: client.invoke(Request(contractId.id, mapping.id, args)).process()
)
return Method()
return cast(C, Proxy())
def invoke(self, request: Request) -> Reply:
raise NotImplementedError()
class MessageSerializer(Serializer):
REQUEST = 0
VALUE_REPLY = 1
EXCEPTION_REPLY = 2
def __init__(self, contractSerializer: Serializer) -> None:
self.contractSerializer = contractSerializer
def read(self, reader: Reader) -> Message:
type = reader.readByte()
if type == MessageSerializer.REQUEST:
return Request(reader.readZigZagInt(), reader.readZigZagInt(), cast(List, self.contractSerializer.read(reader)))
elif type == MessageSerializer.VALUE_REPLY:
return ValueReply(self.contractSerializer.read(reader))
else:
return ExceptionReply(cast(Exception, self.contractSerializer.read(reader)))
def write(self, message: Message, writer: Writer) -> None:
if isinstance(message, Request):
writer.writeByte(MessageSerializer.REQUEST)
writer.writeZigZagInt(message.serviceId)
writer.writeZigZagInt(message.methodId)
self.contractSerializer.write(message.arguments, writer)
elif isinstance(message, ValueReply):
writer.writeByte(MessageSerializer.VALUE_REPLY)
self.contractSerializer.write(message.value, writer)
else:
writer.writeByte(MessageSerializer.EXCEPTION_REPLY)
self.contractSerializer.write(cast(ExceptionReply, message).exception, writer)
class SimpleTransportSetup:
def __init__(self, messageSerializer: Serializer, server: Server) -> None:
self.messageSerializer = messageSerializer
self.server = server
class SimplePathResolver:
def __init__(self, pathMappings: Dict[Any, SimpleTransportSetup]) -> None:
self.pathMappings = pathMappings
def resolvePath(self, path: Any) -> SimpleTransportSetup:
setup = self.pathMappings.get(path)
if setup is None:
raise RuntimeError(f"no mapping for path '{path}'")
return setup
class PathSerializer(Serializer):
DEFAULT = 0
def read(self, reader: Reader) -> int:
return reader.readInt()
def write(self, value: int, writer: Writer) -> None:
writer.writeInt(value)
@abstract
class Stream:
def writeBytes(self, value: bytes) -> None:
raise NotImplementedError()
def writeDone(self) -> None:
pass
def readBytes(self, length: int) -> bytes:
raise NotImplementedError()
def readDone(self) -> None:
pass
class ServerTransport:
def __init__(self, pathSerializer: Serializer, pathResolver: SimplePathResolver) -> None:
self.pathSerializer = pathSerializer
self.pathResolver = pathResolver
def invoke(self, stream: Stream) -> None:
reader = Reader(stream.readBytes)
setup = self.pathResolver.resolvePath(self.pathSerializer.read(reader))
request = cast(Request, setup.messageSerializer.read(reader))
stream.readDone()
setup.messageSerializer.write(setup.server.invoke(request), Writer(stream.writeBytes))
stream.writeDone()
def defaultServerTransport(contractSerializer: Serializer, server: Server) -> ServerTransport:
return ServerTransport(
PathSerializer(),
SimplePathResolver({
PathSerializer.DEFAULT: SimpleTransportSetup(MessageSerializer(contractSerializer), server)
})
)
class ClientTransport:
def __init__(self, pathSerializer: Serializer, path: Any, messageSerializer: Serializer) -> None:
self.pathSerializer = pathSerializer
self.path = path
self.messageSerializer = messageSerializer
def invoke(self, request: Request, stream: Stream) -> Reply:
writer = Writer(stream.writeBytes)
self.pathSerializer.write(self.path, writer)
self.messageSerializer.write(request, writer)
stream.writeDone()
reply = cast(Reply, self.messageSerializer.read(Reader(stream.readBytes)))
stream.readDone()
return reply
def defaultClientTransport(contractSerializer: Serializer) -> ClientTransport:
return ClientTransport(PathSerializer(), PathSerializer.DEFAULT, MessageSerializer(contractSerializer))
class Dumper:
def __init__(self, compact: bool, referenceables: bool, concreteValueClasses: Set[Any] = set()) -> None:
"""
:param compact: one-liner or multiple lines
:param referenceables: True: dumps graphs (objects are marked with #); False: dumps trees
:param concreteValueClasses: only allowed if (referenceables); these objects should not reference others; do not print # for these classes
"""
if (not referenceables) and (len(concreteValueClasses) != 0):
raise RuntimeError("concreteValueClasses only allowed if (referenceables)")
self.compact = compact
self.referenceables = referenceables
self.concreteValueClasses = concreteValueClasses
def dumpValueClass(self, value: Any, write: Callable[[str], None]) -> bool:
"""
:return True: if we dumped value; False: use default implementation
Could dump a value class (these should not reference other objects). Should be an one-liner.
This implementation does nothing and returns False.
"""
return False
def isConcreteValueClass(self, value: Any) -> bool:
return value.__class__ in self.concreteValueClasses
def dump(self, value: Optional[Any], write: Callable[[str], None]) -> None:
alreadyDumped: Optional[Dict[Any, int]] = {} if self.referenceables else None
tabs = 0
def dumpValue(value: Optional[Any]) -> None:
nonlocal tabs
if value is None:
write("null")
elif isinstance(value, str):
write('"' + value + '"')
elif isinstance(value, (bool, float, bytes)):
write(str(value))
elif isinstance(value, Enum):
write(value.name)
elif isinstance(value, list):
if self.compact:
write("[ ")
for element in value:
dumpValue(element)
write(" ")
write("]")
else:
write("[\n")
tabs += 1
for element in value:
write(tabs * " ")
dumpValue(element)
write("\n")
tabs -= 1
write(tabs * " " + "]")
else:
referenceables = self.referenceables and (not self.isConcreteValueClass(value))
if referenceables:
index = cast(int, cast(Dict[Any, int], alreadyDumped).get(value))
if index is not None:
write("#" + str(index))
return
index = len(alreadyDumped)
alreadyDumped[value] = index
if not self.dumpValueClass(value, write):
if self.compact:
write(value.__class__.__name__ + "( ")
for name, value in sorted(value.__dict__.items(), key=lambda item: item[0]):
if value is not None:
write(name + "=")
dumpValue(value)
write(" ")
write(")")
else:
write(value.__class__.__name__ + "(\n")
tabs += 1
for name, value in sorted(value.__dict__.items(), key=lambda item: item[0]):
if value is not None:
write(tabs * " " + name + " = ")
dumpValue(value)
write("\n")
tabs -= 1
write(tabs * " " + ")")
if referenceables:
write("#" + str(index))
dumpValue(value)
def toString(self, value: Optional[Any]) -> str:
io = StringIO()
def write(s: str) -> None:
io.write(s)
self.dump(value, write)
return io.getvalue()
| bsd-3-clause |
jcftang/ansible | contrib/inventory/fleet.py | 56 | 3069 | #!/usr/bin/env python
"""
fleetctl base external inventory script. Automatically finds the IPs of the booted coreos instances and
returns it under the host group 'coreos'
"""
# Copyright (C) 2014 Andrew Rothstein <andrew.rothstein at gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Thanks to the vagrant.py inventory script for giving me the basic structure
# of this.
#
import sys
import subprocess
import re
import string
from optparse import OptionParser
try:
import json
except:
import simplejson as json
# Options
#------------------------------
parser = OptionParser(usage="%prog [options] --list | --host <machine>")
parser.add_option('--list', default=False, dest="list", action="store_true",
help="Produce a JSON consumable grouping of servers in your fleet")
parser.add_option('--host', default=None, dest="host",
help="Generate additional host specific details for given host for Ansible")
(options, args) = parser.parse_args()
#
# helper functions
#
def get_ssh_config():
configs = []
for box in list_running_boxes():
config = get_a_ssh_config(box)
configs.append(config)
return configs
#list all the running instances in the fleet
def list_running_boxes():
boxes = []
for line in subprocess.check_output(["fleetctl", "list-machines"]).split('\n'):
matcher = re.search("[^\s]+[\s]+([^\s]+).+", line)
if matcher and matcher.group(1) != "IP":
boxes.append(matcher.group(1))
return boxes
def get_a_ssh_config(box_name):
config = {}
config['Host'] = box_name
config['ansible_ssh_user'] = 'core'
config['ansible_python_interpreter'] = '/opt/bin/python'
return config
# List out servers that vagrant has running
#------------------------------
if options.list:
ssh_config = get_ssh_config()
hosts = { 'coreos': []}
for data in ssh_config:
hosts['coreos'].append(data['Host'])
print(json.dumps(hosts))
sys.exit(1)
# Get out the host details
#------------------------------
elif options.host:
result = {}
ssh_config = get_ssh_config()
details = filter(lambda x: (x['Host'] == options.host), ssh_config)
if len(details) > 0:
#pass through the port, in case it's non standard.
result = details[0]
result
print(json.dumps(result))
sys.exit(1)
# Print out help
#------------------------------
else:
parser.print_help()
sys.exit(1)
| gpl-3.0 |
natsheh/sensim | utils/__init__.py | 1 | 1318 | # -*- coding: utf-8 -*-
#
# This file is part of sensim
"""Helpers for sentence semantic similarity model.
.. Author:: Hussein AL-NATSHEH <hussein.al-natsheh@ish-lyon.cnrs.fr>
"""
"""Helper functions."""
from .util import get_text
from .util import group_by_sentence
from .util import to_numeric
from .util import sts_score
from .util import de_contraction
from .wordvec import word2glove
from .transformers import FuncTransformer
from .transformers import Shaper
from .load_data import read_tsv
from .load_data import df_2_dset
from .load_data import load_dataset
from .combiners import PairCosine
from .combiners import SmallerOtherParing
from .combiners import RefGroupPairCosine
from .combiners import GetMatches
from .combiners import SolveDuplicate
from .combiners import AvgPOSCombiner
from .combiners import NumCombiner
__all__ = ("get_text",
"group_by_sentence",
"to_numeric",
"sts_score",
"de_contraction",
"word2glove",
"FuncTransformer",
"Shaper",
"read_tsv",
"df_2_dset",
"load_dataset",
"PairCosine",
"SmallerOtherParing",
"RefGroupPairCosine",
"GetMatches",
"SolveDuplicate",
"AvgPOSCombiner",
"NumCombiner")
| bsd-3-clause |
ospalh/libanki3 | libanki3/sound.py | 1 | 9284 | # -*- coding: utf-8 -*-
# Copyright: Damien Elmes <anki@ichi2.net>
# Copyright © 2014 Roland Sieker <ospalh@gmail.com>
#
# License: GNU AGPL, version 3 or later;
# http://www.gnu.org/licenses/agpl.html
import atexit
import os
import random
import re
import subprocess
import sys
import threading
import time
from .hooks import addHook
from .lang import _
from .utils import isMac, isWin, tmpdir
# Shared utils
##########################################################################
_soundReg = "\[sound:(.*?)\]"
def playFromText(text):
for match in re.findall(_soundReg, text):
play(match)
def stripSounds(text):
return re.sub(_soundReg, "", text)
def hasSound(text):
return re.search(_soundReg, text) is not None
##########################################################################
processingSrc = "rec.wav"
processingDst = "rec.mp3"
processingChain = []
recFiles = []
processingChain = [
["lame", "rec.wav", processingDst, "--noreplaygain", "--quiet"], ]
# don't show box on windows
if isWin:
si = subprocess.STARTUPINFO()
try:
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
except:
# python2.7+
si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
else:
si = None
if isMac:
# make sure lame, which is installed in /usr/local/bin, is in the path
os.environ['PATH'] += ":" + "/usr/local/bin"
dir = os.path.dirname(os.path.abspath(__file__))
dir = os.path.abspath(dir + "/../../../..")
os.environ['PATH'] += ":" + dir + "/audio"
def retryWait(proc):
# osx throws interrupted system call errors frequently
while 1:
try:
return proc.wait()
except OSError:
continue
# Mplayer settings
##########################################################################
if isWin:
mplayerCmd = ["mplayer.exe", "-ao", "win32"]
dir = os.path.dirname(os.path.abspath(sys.argv[0]))
os.environ['PATH'] += ";" + dir
os.environ['PATH'] += ";" + dir + "\\..\\win\\top" # for testing
else:
mplayerCmd = ["mplayer"]
mplayerCmd += ["-really-quiet", "-noautosub"]
# Mplayer in slave mode
##########################################################################
mplayerQueue = []
mplayerManager = None
mplayerReader = None
mplayerEvt = threading.Event()
mplayerClear = False
class MplayerMonitor(threading.Thread):
def run(self):
global mplayerClear
self.mplayer = None
self.deadPlayers = []
while 1:
mplayerEvt.wait()
mplayerEvt.clear()
# clearing queue?
if mplayerClear and self.mplayer:
try:
self.mplayer.stdin.write("stop\n")
except:
# mplayer quit by user (likely video)
self.deadPlayers.append(self.mplayer)
self.mplayer = None
# loop through files to play
while mplayerQueue:
# ensure started
if not self.mplayer:
self.startProcess()
# pop a file
try:
item = mplayerQueue.pop(0)
except IndexError:
# queue was cleared by main thread
continue
if mplayerClear:
mplayerClear = False
extra = ""
else:
extra = " 1"
cmd = 'loadfile "%s"%s\n' % (item, extra)
try:
self.mplayer.stdin.write(cmd)
except:
# mplayer has quit and needs restarting
self.deadPlayers.append(self.mplayer)
self.mplayer = None
self.startProcess()
self.mplayer.stdin.write(cmd)
# if we feed mplayer too fast it loses files
time.sleep(1)
# wait() on finished processes. we don't want to block on the
# wait, so we keep trying each time we're reactivated
def clean(pl):
if pl.poll() is not None:
pl.wait()
return False
else:
return True
self.deadPlayers = [pl for pl in self.deadPlayers if clean(pl)]
def kill(self):
if not self.mplayer:
return
try:
self.mplayer.stdin.write("quit\n")
self.deadPlayers.append(self.mplayer)
except:
pass
self.mplayer = None
def startProcess(self):
try:
cmd = mplayerCmd + ["-slave", "-idle"]
devnull = file(os.devnull, "w")
self.mplayer = subprocess.Popen(
cmd, startupinfo=si, stdin=subprocess.PIPE,
stdout=devnull, stderr=devnull)
except OSError:
mplayerEvt.clear()
raise Exception("Did you install mplayer?")
def queueMplayer(path):
ensureMplayerThreads()
if isWin and os.path.exists(path):
# mplayer on windows doesn't like the encoding, so we create a
# temporary file instead. oddly, foreign characters in the dirname
# don't seem to matter.
dir_ = tmpdir()
name = os.path.join(dir_, "audio%s%s" % (
random.randrange(0, 1000000), os.path.splitext(path)[1]))
f = open(name, "wb")
f.write(open(path, "rb").read())
f.close()
# it wants unix paths, too!
path = name.replace("\\", "/")
path = path.encode(sys.getfilesystemencoding())
else:
path = path.encode("utf-8")
mplayerQueue.append(path)
mplayerEvt.set()
def clearMplayerQueue():
global mplayerClear, mplayerQueue
mplayerQueue = []
mplayerClear = True
mplayerEvt.set()
def ensureMplayerThreads():
global mplayerManager
if not mplayerManager:
mplayerManager = MplayerMonitor()
mplayerManager.daemon = True
mplayerManager.start()
# ensure the tmpdir() exit handler is registered first so it runs
# after the mplayer exit
tmpdir()
# clean up mplayer on exit
atexit.register(stopMplayer)
def stopMplayer(*args):
if not mplayerManager:
return
mplayerManager.kill()
addHook("unloadProfile", stopMplayer)
# PyAudio recording
##########################################################################
try:
import pyaudio
import wave
PYAU_FORMAT = pyaudio.paInt16
PYAU_CHANNELS = 1
PYAU_INPUT_INDEX = None
except:
pass
class _Recorder(object):
def postprocess(self, encode=True):
self.encode = encode
for c in processingChain:
# print c
if not self.encode and c[0] == 'lame':
continue
try:
ret = retryWait(subprocess.Popen(c, startupinfo=si))
except:
ret = True
if ret:
raise Exception(_("Error running %s") % " ".join(c))
class PyAudioThreadedRecorder(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.finish = False
def run(self):
chunk = 1024
try:
p = pyaudio.PyAudio()
except NameError:
raise Exception(
"Pyaudio not installed (recording not supported on OSX10.3)")
rate = int(p.get_default_input_device_info()['defaultSampleRate'])
stream = p.open(format=PYAU_FORMAT,
channels=PYAU_CHANNELS,
rate=rate,
input=True,
input_device_index=PYAU_INPUT_INDEX,
frames_per_buffer=chunk)
all = []
while not self.finish:
try:
data = stream.read(chunk)
except IOError as e:
if e[1] == pyaudio.paInputOverflowed:
data = None
else:
raise
if data:
all.append(data)
stream.close()
p.terminate()
data = ''.join(all)
wf = wave.open(processingSrc, 'wb')
wf.setnchannels(PYAU_CHANNELS)
wf.setsampwidth(p.get_sample_size(PYAU_FORMAT))
wf.setframerate(rate)
wf.writeframes(data)
wf.close()
class PyAudioRecorder(_Recorder):
def __init__(self):
for t in recFiles + [processingSrc, processingDst]:
try:
os.unlink(t)
except OSError:
pass
self.encode = False
def start(self):
self.thread = PyAudioThreadedRecorder()
self.thread.start()
def stop(self):
self.thread.finish = True
self.thread.join()
def file(self):
if self.encode:
tgt = "rec%d.mp3" % time.time()
os.rename(processingDst, tgt)
return tgt
else:
return processingSrc
# Audio interface
##########################################################################
_player = queueMplayer
_queueEraser = clearMplayerQueue
def play(path):
_player(path)
def clearAudioQueue():
_queueEraser()
Recorder = PyAudioRecorder
| agpl-3.0 |
rgeleta/odoo | addons/account/wizard/account_invoice_state.py | 340 | 2875 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class account_invoice_confirm(osv.osv_memory):
"""
This wizard will confirm the all the selected draft invoices
"""
_name = "account.invoice.confirm"
_description = "Confirm the selected invoices"
def invoice_confirm(self, cr, uid, ids, context=None):
if context is None:
context = {}
active_ids = context.get('active_ids', []) or []
proxy = self.pool['account.invoice']
for record in proxy.browse(cr, uid, active_ids, context=context):
if record.state not in ('draft', 'proforma', 'proforma2'):
raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be confirmed as they are not in 'Draft' or 'Pro-Forma' state."))
record.signal_workflow('invoice_open')
return {'type': 'ir.actions.act_window_close'}
class account_invoice_cancel(osv.osv_memory):
"""
This wizard will cancel the all the selected invoices.
If in the journal, the option allow cancelling entry is not selected then it will give warning message.
"""
_name = "account.invoice.cancel"
_description = "Cancel the Selected Invoices"
def invoice_cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
proxy = self.pool['account.invoice']
active_ids = context.get('active_ids', []) or []
for record in proxy.browse(cr, uid, active_ids, context=context):
if record.state in ('cancel','paid'):
raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be cancelled as they are already in 'Cancelled' or 'Done' state."))
record.signal_workflow('invoice_cancel')
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
BadSingleton/pyside2 | tests/signals/disconnect_test.py | 3 | 1550 | import unittest
from PySide2.QtCore import *
from testbinding import TestObject
class Foo(QObject):
bar = Signal()
class TestDisconnect(unittest.TestCase):
def theSlot1(self):
self.called1 = True
def theSlot2(self):
self.called2 = True
def testIt(self):
self.called1 = False
self.called2 = False
f = Foo()
f.bar.connect(self.theSlot1)
f.bar.connect(self.theSlot2)
f.bar.emit()
self.assertTrue(self.called1)
self.assertTrue(self.called2)
self.called1 = False
self.called2 = False
f.bar.disconnect()
f.bar.emit()
self.assertFalse(self.called1)
self.assertFalse(self.called2)
def testDuringCallback(self):
""" Test to see if the C++ object for a connection is accessed after the
method returns. This causes a segfault if the memory that was used by the
C++ object has been reused. """
self.called = False
obj = TestObject(0)
def callback():
obj.signalWithDefaultValue.disconnect(callback)
# Connect more callbacks to try to overwrite memory
for i in range(1000):
obj.signalWithDefaultValue.connect(lambda: None)
self.called = True
# A non-None return value is needed
return True
obj.signalWithDefaultValue.connect(callback)
obj.signalWithDefaultValue.emit()
self.assert_(self.called)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 |
WilsonRimberg/Final-Project | ggame/headlessdeps.py | 228 | 9672 | def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('PIL'):
from PIL import Image
class _body(object):
def __init__(self):
self.events = {}
def appendChild(self, obj):
self.child = obj
def bind(self, evt, action):
self.events[evt] = action
print("Binding {} to {}".format(evt, action))
class _document(object):
def __init__(self):
self.body = _body()
class _window(object):
def __init__(self):
self.document = _document()
self.animatex = 0
def open(self, s1, s2):
return self
def requestAnimationFrame(self, target):
if self.animatex < 10:
self.animatex += 1
target('dummy')
print("Animation frame")
class _Container(object):
def __init__(self):
self.things = []
def destroy(self):
del self.things
def addChild(self, obj):
self.things.append(obj)
def removeChild(self, obj):
self.things.remove(obj)
class _Renderer(object):
def __init__(self, x, y, argsdict):
self.x = x
self.y = y
self.argsdict = argsdict
self.view = 'view'
print("Rendering created with {}x{} area".format(x, y))
def render(self, stage):
pass
class _GFX(object):
def __init__(self):
self.Container = _Container
self.autoDetectRenderer = _Renderer
window = _window()
GFX = _GFX()
#document = object()
def JSConstructor(cls):
return cls
def JSObject(obj):
return obj
class _GFX_Rectangle(object):
def __init__(self, x, y, w, h):
self.x = x
self.y = y
self.width = w
self.height = h
GFX_Rectangle = _GFX_Rectangle
class _Texture(object):
def __init__(self, img='', crossdomain=False):
self.name = img
self.crossdomain = crossdomain
if img == '':
self.img = None
self.basewidth = 0
self.baseheight = 0
self.width = 0
self.height = 0
else:
self.img = Image.open(img)
self.basewidth = self.img.width
self.baseheight = self.img.height
self.width = self.basewidth
self.height = self.baseheight
print("Texture from image {}, {}x{} pixels".format(img, self.basewidth, self.baseheight))
self.baserect = _GFX_Rectangle(0, 0, self.basewidth, self.baseheight)
self.framerect = self.baserect
@classmethod
def fromTexture(cls, texture, frame):
inst = cls()
inst.img = texture.img
inst.name = texture.name
inst.basewidth = texture.basewidth
inst.baseheight = texture.baseheight
inst.baserect = texture.baserect
inst.framerect = frame
inst.width = frame.width
inst.height = frame.height
print("Texture from base texture {}, {}x{} subframe {}x{}".format(inst.name, inst.basewidth, inst.baseheight, inst.framerect.width, inst.framerect.height))
return inst
def destroy(self):
try:
self.img.close()
print("Destroying an image")
except:
print("Destroying a non-image")
GFX_Texture = _Texture.fromTexture
GFX_Texture_fromImage = _Texture
class vector(object):
def __init__(self, x, y):
self.x = x
self.y = y
def __getitem__(self, key):
if key == 0:
return self.x
elif key == 1:
return self.y
else:
raise KeyError
def __setitem(self, key, value):
if key == 0:
self.x = value
elif key == 1:
self.y = value
else:
raise KeyError
class GFX_Sprite(object):
def __init__(self, texture):
self.texture = texture
self.visible = True
self.pos = vector(0,0)
self.anch = vector(0,0)
self.scal = vector(1.0, 1.0)
self.width = texture.width
self.height = texture.height
self.rotation = 0.0
@property
def position(self):
return self.pos
@position.setter
def position(self, value):
self.pos.x = value[0]
self.pos.y = value[1]
@property
def anchor(self):
return self.anch
@anchor.setter
def anchor(self, value):
self.anch.x = value[0]
self.anch.y = value[1]
@property
def scale(self):
return self.scal
@scale.setter
def scale(self, value):
self.scal.x = value[0]
self.scal.y = value[1]
def destroy(self):
pass
class _GFX_Graphics(object):
def __init__(self):
self.clear()
def clear(self):
self.cleared = True
self.visible = True
self.lwidth = None
self.color = None
self.alpha = None
self.fillcolor = None
self.fillalpha = None
self.x = None
self.y = None
self.rwidth = None
self.rheight = None
self.radius = None
self.ehw = None
self.ehh = None
self.xto = None
self.yto = None
self.jpath = None
self.width = None
self.height = None
self.position = vector(0,0)
def destroy(self):
self.clear()
def clone(self):
clone = type(self)()
clone.cleared = self.cleared
clone.visible = self.visible
clone.lwidth = self.lwidth
clone.color = self.color
clone.alpha = self.alpha
clone.fillalpha = self.fillalpha
clone.fillcolor = self.fillcolor
clone.x = self.x
clone.y = self.y
clone.rwidth = self.rwidth
clone.rheight = self.rheight
clone.radius = self.radius
clone.ehw = self.ehw
clone.ehh = self.ehh
clone.xto = self.xto
clone.yto = self.yto
clone.jpath = self.jpath
clone.width = self.width
clone.height = self.height
clone.position = self.position
return clone
def lineStyle(self, width, color, alpha):
self.width = width
self.color = color
self.alpha = alpha
def beginFill(self, color, alpha):
self.fillcolor = color
self.fillalpha = alpha
def drawRect(self, x, y, w, h):
self.x = x
self.y = y
self.position = vector(x,y)
self.rwidth = w
self.rheight = h
self.width = w
self.height = h
self.cleared = False
print("Rectangle {}x{} at {},{}".format(w,h,x,y))
return self
def drawCircle(self, x, y, radius):
self.x = x
self.y = y
self.position = vector(x,y)
self.radius = radius
self.cleared = False
self.width = radius*2
self.height = radius*2
print("Circle, radius {} at {},{}".format(radius,x,y))
return self
def drawEllipse(self, x, y, hw, hh):
self.x = x
self.y = y
self.position = vector(x,y)
self.ehw = hw
self.ehh = hh
self.width = hw*2
self.height = hh*2
self.cleared = False
print("Ellipse, {}x{} at {},{}".format(hw,hh,x,y))
return self
def drawPolygon(self, jpath):
self.jpath = jpath
self.cleared = False
self.position = vector(jpath[0],jpath[1])
x = []
y = []
for i in range(0,len(jpath)-1,2):
x.append(jpath[i])
y.append(jpath[i+1])
self.width = max(x)-min(x)
self.height = max(y)-min(y)
print("Polygon")
return self
def moveTo(self, x, y):
self.x = x
self.y = y
self.position = vector(x,y)
return self
def lineTo(self, x, y):
self.xto = x
self.yto = y
self.width = abs(x)
self.height = abs(y)
self.cleared = False
print("Line from {},{} to {},{}".format(self.x, self.y, x, y))
return self
class _GFX_Text(object):
def __init__(self, text, styledict):
self.text = text
self.styledict = styledict
self.alpha = None
self.visible = None
self.width = 99
self.height = 99
self.position = vector(0,0)
print("Text: {} in {}".format(text, styledict['font']))
def clone(self):
clone = type(self)(self.text, self.styledict)
return clone
def destroy(self):
self.text = ''
GFX_Text = _GFX_Text
_globalGraphics = _GFX_Graphics()
GFX_Graphics = _globalGraphics
def GFX_DetectRenderer():
pass
class _SND_all(object):
def __init__(self):
pass
def stop(self):
print("Stopping all sounds")
class _SND(object):
def __init__(self):
self.all = _SND_all
SND = _SND()
class _SND_Sound(object):
def __init__(self, url):
self.url = url
print("Creating sound object {}".format(url))
def load(self):
pass
def play(self):
print("Playing sound object {}".format(self.url))
SND_Sound = _SND_Sound
class GFX_Window(object):
def __init__(self, width, height, onclose):
self._w = window.open("", "")
self.width = width if width != 0 else 100
self.height = height if height != 0 else 100
self._stage = JSConstructor(GFX.Container)()
self._renderer = GFX.autoDetectRenderer(width, height, {'transparent':True})
self._w.document.body.appendChild(self._renderer.view)
self._w.onunload = onclose
def bind(self, evtspec, callback):
self._w.document.body.bind(evtspec, callback)
def add(self, obj):
self._stage.addChild(obj)
def remove(self, obj):
self._stage.removeChild(obj)
def animate(self, stepcallback):
self._renderer.render(self._stage)
self._w.requestAnimationFrame(stepcallback)
def destroy(self):
SND.all().stop()
self._stage.destroy()
| mit |
vainotuisk/icecreamratings | ENV/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py | 2040 | 8935 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
| bsd-3-clause |
kennedyshead/home-assistant | homeassistant/components/buienradar/camera.py | 1 | 7518 | """Provide animated GIF loops of Buienradar imagery."""
from __future__ import annotations
import asyncio
from datetime import datetime, timedelta
import logging
import aiohttp
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, Camera
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import dt as dt_util
from .const import (
CONF_COUNTRY,
CONF_DELTA,
CONF_DIMENSION,
DEFAULT_COUNTRY,
DEFAULT_DELTA,
DEFAULT_DIMENSION,
)
_LOGGER = logging.getLogger(__name__)
# Maximum range according to docs
DIM_RANGE = vol.All(vol.Coerce(int), vol.Range(min=120, max=700))
# Multiple choice for available Radar Map URL
SUPPORTED_COUNTRY_CODES = ["NL", "BE"]
PLATFORM_SCHEMA = vol.All(
PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DIMENSION, default=512): DIM_RANGE,
vol.Optional(CONF_DELTA, default=600.0): cv.positive_float,
vol.Optional(CONF_NAME, default="Buienradar loop"): cv.string,
vol.Optional(CONF_COUNTRY, default="NL"): vol.All(
vol.Coerce(str), vol.In(SUPPORTED_COUNTRY_CODES)
),
}
)
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up buienradar camera platform."""
_LOGGER.warning(
"Platform configuration is deprecated, will be removed in a future release"
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up buienradar radar-loop camera component."""
config = entry.data
options = entry.options
country = options.get(CONF_COUNTRY, config.get(CONF_COUNTRY, DEFAULT_COUNTRY))
delta = options.get(CONF_DELTA, config.get(CONF_DELTA, DEFAULT_DELTA))
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
async_add_entities([BuienradarCam(latitude, longitude, delta, country)])
class BuienradarCam(Camera):
"""
A camera component producing animated buienradar radar-imagery GIFs.
Rain radar imagery camera based on image URL taken from [0].
[0]: https://www.buienradar.nl/overbuienradar/gratis-weerdata
"""
def __init__(
self, latitude: float, longitude: float, delta: float, country: str
) -> None:
"""
Initialize the component.
This constructor must be run in the event loop.
"""
super().__init__()
self._name = "Buienradar"
# dimension (x and y) of returned radar image
self._dimension = DEFAULT_DIMENSION
# time a cached image stays valid for
self._delta = delta
# country location
self._country = country
# Condition that guards the loading indicator.
#
# Ensures that only one reader can cause an http request at the same
# time, and that all readers are notified after this request completes.
#
# invariant: this condition is private to and owned by this instance.
self._condition = asyncio.Condition()
self._last_image: bytes | None = None
# value of the last seen last modified header
self._last_modified: str | None = None
# loading status
self._loading = False
# deadline for image refresh - self.delta after last successful load
self._deadline: datetime | None = None
self._unique_id = f"{latitude:2.6f}{longitude:2.6f}"
@property
def name(self) -> str:
"""Return the component name."""
return self._name
def __needs_refresh(self) -> bool:
if not (self._delta and self._deadline and self._last_image):
return True
return dt_util.utcnow() > self._deadline
async def __retrieve_radar_image(self) -> bool:
"""Retrieve new radar image and return whether this succeeded."""
session = async_get_clientsession(self.hass)
url = (
f"https://api.buienradar.nl/image/1.0/RadarMap{self._country}"
f"?w={self._dimension}&h={self._dimension}"
)
if self._last_modified:
headers = {"If-Modified-Since": self._last_modified}
else:
headers = {}
try:
async with session.get(url, timeout=5, headers=headers) as res:
res.raise_for_status()
if res.status == 304:
_LOGGER.debug("HTTP 304 - success")
return True
last_modified = res.headers.get("Last-Modified")
if last_modified:
self._last_modified = last_modified
self._last_image = await res.read()
_LOGGER.debug("HTTP 200 - Last-Modified: %s", last_modified)
return True
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
_LOGGER.error("Failed to fetch image, %s", type(err))
return False
async def async_camera_image(self) -> bytes | None:
"""
Return a still image response from the camera.
Uses ayncio conditions to make sure only one task enters the critical
section at the same time. Otherwise, two http requests would start
when two tabs with Home Assistant are open.
The condition is entered in two sections because otherwise the lock
would be held while doing the http request.
A boolean (_loading) is used to indicate the loading status instead of
_last_image since that is initialized to None.
For reference:
* :func:`asyncio.Condition.wait` releases the lock and acquires it
again before continuing.
* :func:`asyncio.Condition.notify_all` requires the lock to be held.
"""
if not self.__needs_refresh():
return self._last_image
# get lock, check iff loading, await notification if loading
async with self._condition:
# can not be tested - mocked http response returns immediately
if self._loading:
_LOGGER.debug("already loading - waiting for notification")
await self._condition.wait()
return self._last_image
# Set loading status **while holding lock**, makes other tasks wait
self._loading = True
try:
now = dt_util.utcnow()
was_updated = await self.__retrieve_radar_image()
# was updated? Set new deadline relative to now before loading
if was_updated:
self._deadline = now + timedelta(seconds=self._delta)
return self._last_image
finally:
# get lock, unset loading status, notify all waiting tasks
async with self._condition:
self._loading = False
self._condition.notify_all()
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def entity_registry_enabled_default(self) -> bool:
"""Disable entity by default."""
return False
| apache-2.0 |
NickDaly/GemRB-MultipleConfigs | gemrb/GUIScripts/ie_slots.py | 11 | 1707 | # -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2006 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ie_slots.py - definitions of slottypes
# !!! NOTE: Keep this file synchronized with gemrb/plugins/Core/Inventory.h !!!
SLOT_HELM = 1
SLOT_ARMOUR = 2
SLOT_SHIELD = 4
SLOT_GLOVE = 8
SLOT_RING = 16
SLOT_AMULET = 32
SLOT_BELT = 64
SLOT_BOOT = 128
SLOT_WEAPON = 256
SLOT_QUIVER = 512
SLOT_CLOAK = 1024
SLOT_ITEM = 2048 #quick item
SLOT_SCROLL = 4096
SLOT_BAG = 8192
SLOT_POTION = 16384
SLOT_INVENTORY = 32768
SLOT_ANY = 32767 #any except inventory
SLOT_ALL = 65535 #all including inventory
TYPE_NORMAL = 0 #inventory
TYPE_ARMOR = 1 #normal armor
TYPE_FIST = 2 #fist weapon
TYPE_MAGIC = 3 #magic weapon
TYPE_WEAPON = 4 #normal weapon
TYPE_QUIVER = 5 #projectile slots
TYPE_OFFHAND = 6 #offhand (shield/weapon)
TYPE_HELMET = 7 #critical hit protection
# End of file ie_slots.py
| gpl-2.0 |
selboo/pyotp | test.py | 1 | 4768 | from __future__ import with_statement
import datetime
import os
import sys
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src'))
import pyotp
class HOTPExampleValuesFromTheRFC(unittest.TestCase):
def testMatchTheRFC(self):
# 12345678901234567890 in Bas32
# GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ
hotp = pyotp.HOTP('GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ')
self.assertEqual(hotp.at(0), 755224)
self.assertEqual(hotp.at(1), 287082)
self.assertEqual(hotp.at(2), 359152)
self.assertEqual(hotp.at(3), 969429)
self.assertEqual(hotp.at(4), 338314)
self.assertEqual(hotp.at(5), 254676)
self.assertEqual(hotp.at(6), 287922)
self.assertEqual(hotp.at(7), 162583)
self.assertEqual(hotp.at(8), 399871)
self.assertEqual(hotp.at(9), 520489)
def testVerifyAnOTPAndNowAllowReuse(self):
hotp = pyotp.HOTP('GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ')
self.assertTrue(hotp.verify(520489, 9))
self.assertFalse(hotp.verify(520489, 10))
self.assertFalse(hotp.verify("520489", 10))
def testProvisioningURI(self):
hotp = pyotp.HOTP('wrn3pqx5uqxqvnqr')
self.assertEqual(
hotp.provisioning_uri('mark@percival'),
'otpauth://hotp/mark@percival?secret=wrn3pqx5uqxqvnqr&counter=0')
self.assertEqual(
hotp.provisioning_uri('mark@percival', initial_count=12),
'otpauth://hotp/mark@percival?secret=wrn3pqx5uqxqvnqr&counter=12')
self.assertEqual(
hotp.provisioning_uri('mark@percival', issuer_name='FooCorp!'),
'otpauth://hotp/FooCorp%21:mark@percival?secret=wrn3pqx5uqxqvnqr&counter=0&issuer=FooCorp%21')
class TOTPExampleValuesFromTheRFC(unittest.TestCase):
def testMatchTheRFC(self):
totp = pyotp.TOTP('GEZDGNBVGY3TQOJQGEZDGNBVGY3TQOJQ')
self.assertEqual(totp.at(1111111111), 50471)
self.assertEqual(totp.at(1234567890), 5924)
self.assertEqual(totp.at(2000000000), 279037)
def testMatchTheGoogleAuthenticatorOutput(self):
totp = pyotp.TOTP('wrn3pqx5uqxqvnqr')
with Timecop(1297553958):
self.assertEqual(totp.now(), 102705)
def testValidateATimeBasedOTP(self):
totp = pyotp.TOTP('wrn3pqx5uqxqvnqr')
with Timecop(1297553958):
self.assertTrue(totp.verify(102705))
self.assertTrue(totp.verify("102705"))
with Timecop(1297553958 + 30):
self.assertFalse(totp.verify(102705))
def testProvisioningURI(self):
totp = pyotp.TOTP('wrn3pqx5uqxqvnqr')
self.assertEqual(
totp.provisioning_uri('mark@percival'),
'otpauth://totp/mark@percival?secret=wrn3pqx5uqxqvnqr')
self.assertEqual(
totp.provisioning_uri('mark@percival', issuer_name='FooCorp!'),
'otpauth://totp/FooCorp%21:mark@percival?secret=wrn3pqx5uqxqvnqr&issuer=FooCorp%21')
class StringComparisonTest(unittest.TestCase):
def testComparisons(self):
self.assertTrue(pyotp.utils.strings_equal("", ""))
self.assertTrue(pyotp.utils.strings_equal(u"", u""))
self.assertTrue(pyotp.utils.strings_equal("a", "a"))
self.assertTrue(pyotp.utils.strings_equal(u"a", u"a"))
self.assertTrue(pyotp.utils.strings_equal(u"a", u"a"))
self.assertTrue(pyotp.utils.strings_equal("a" * 1000, "a" * 1000))
self.assertTrue(pyotp.utils.strings_equal(u"a" * 1000, u"a" * 1000))
self.assertFalse(pyotp.utils.strings_equal("", "a"))
self.assertFalse(pyotp.utils.strings_equal(u"", u"a"))
self.assertFalse(pyotp.utils.strings_equal("a", ""))
self.assertFalse(pyotp.utils.strings_equal(u"a", u""))
self.assertFalse(pyotp.utils.strings_equal("a" * 999 + "b", "a" * 1000))
self.assertFalse(pyotp.utils.strings_equal(u"a" * 999 + u"b", u"a" * 1000))
class Timecop(object):
"""
Half-assed clone of timecop.rb, just enough to pass our tests.
"""
def __init__(self, freeze_timestamp):
self.freeze_timestamp = freeze_timestamp
def __enter__(self):
self.real_datetime = datetime.datetime
datetime.datetime = self.frozen_datetime()
def __exit__(self, type, value, traceback):
datetime.datetime = self.real_datetime
def frozen_datetime(self):
class FrozenDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls.fromtimestamp(timecop.freeze_timestamp)
timecop = self
return FrozenDateTime
if __name__ == '__main__':
unittest.main()
| mit |
eatbyte/depot_tools | third_party/boto/roboto/param.py | 91 | 4533 | # Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
class Converter(object):
@classmethod
def convert_string(cls, param, value):
# TODO: could do length validation, etc. here
if not isinstance(value, basestring):
raise ValueError
return value
@classmethod
def convert_integer(cls, param, value):
# TODO: could do range checking here
return int(value)
@classmethod
def convert_boolean(cls, param, value):
"""
For command line arguments, just the presence
of the option means True so just return True
"""
return True
@classmethod
def convert_file(cls, param, value):
if os.path.isfile(value):
return value
raise ValueError
@classmethod
def convert_dir(cls, param, value):
if os.path.isdir(value):
return value
raise ValueError
@classmethod
def convert(cls, param, value):
try:
if hasattr(cls, 'convert_'+param.ptype):
mthd = getattr(cls, 'convert_'+param.ptype)
else:
mthd = cls.convert_string
return mthd(param, value)
except:
raise ValidationException(param, '')
class Param(object):
def __init__(self, name=None, ptype='string', optional=True,
short_name=None, long_name=None, doc='',
metavar=None, cardinality=1, default=None,
choices=None, encoder=None, request_param=True):
self.name = name
self.ptype = ptype
self.optional = optional
self.short_name = short_name
self.long_name = long_name
self.doc = doc
self.metavar = metavar
self.cardinality = cardinality
self.default = default
self.choices = choices
self.encoder = encoder
self.request_param = request_param
@property
def optparse_long_name(self):
ln = None
if self.long_name:
ln = '--%s' % self.long_name
return ln
@property
def synopsis_long_name(self):
ln = None
if self.long_name:
ln = '--%s' % self.long_name
return ln
@property
def getopt_long_name(self):
ln = None
if self.long_name:
ln = '%s' % self.long_name
if self.ptype != 'boolean':
ln += '='
return ln
@property
def optparse_short_name(self):
sn = None
if self.short_name:
sn = '-%s' % self.short_name
return sn
@property
def synopsis_short_name(self):
sn = None
if self.short_name:
sn = '-%s' % self.short_name
return sn
@property
def getopt_short_name(self):
sn = None
if self.short_name:
sn = '%s' % self.short_name
if self.ptype != 'boolean':
sn += ':'
return sn
def convert(self, value):
"""
Convert a string value as received in the command line
tools and convert to the appropriate type of value.
Raise a ValidationError if the value can't be converted.
:type value: str
:param value: The value to convert. This should always
be a string.
"""
return Converter.convert(self, value)
| bsd-3-clause |
murrown/cyder | vendor-local/src/django-tastypie/tastypie/utils/mime.py | 27 | 1763 | import mimeparse
def determine_format(request, serializer, default_format='application/json'):
"""
Tries to "smartly" determine which output format is desired.
First attempts to find a ``format`` override from the request and supplies
that if found.
If no request format was demanded, it falls back to ``mimeparse`` and the
``Accepts`` header, allowing specification that way.
If still no format is found, returns the ``default_format`` (which defaults
to ``application/json`` if not provided).
"""
# First, check if they forced the format.
if request.GET.get('format'):
if request.GET['format'] in serializer.formats:
return serializer.get_mime_for_format(request.GET['format'])
# If callback parameter is present, use JSONP.
if request.GET.has_key('callback'):
return serializer.get_mime_for_format('jsonp')
# Try to fallback on the Accepts header.
if request.META.get('HTTP_ACCEPT', '*/*') != '*/*':
formats = list(serializer.supported_formats) or []
# Reverse the list, because mimeparse is weird like that. See also
# https://github.com/toastdriven/django-tastypie/issues#issue/12 for
# more information.
formats.reverse()
best_format = mimeparse.best_match(formats, request.META['HTTP_ACCEPT'])
if best_format:
return best_format
# No valid 'Accept' header/formats. Sane default.
return default_format
def build_content_type(format, encoding='utf-8'):
"""
Appends character encoding to the provided format if not already present.
"""
if 'charset' in format:
return format
return "%s; charset=%s" % (format, encoding)
| bsd-3-clause |
Lilykos/invenio | invenio/modules/workflows/tasks/simplified_data_tasks.py | 21 | 1341 | # -*- coding: utf-8 -*-
# This file is part of Invenio.
# Copyright (C) 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Basic simplified data test functions - NOT FOR XML."""
from functools import wraps
def task_a(a):
"""Function task_a docstring."""
@wraps(task_a)
def _task_a(obj, eng):
eng.log.info("executing task a " + str(a))
obj.data += a
return _task_a
def task_b(obj, eng):
"""Function task_b docstring."""
eng.log.info("executing task b")
if obj.data < 20:
eng.log.info("data < 20")
obj.add_task_result("task_b", {'a': 12, 'b': 13, 'c': 14})
eng.halt("Value of filed: data in object is too small.")
| gpl-2.0 |
ghbenjamin/TestingGui | TestingGui/RunDialog.py | 1 | 2433 |
import wx
from RunManager import RunManager
import StringMap
class RunDialog ( wx.Dialog ):
def __init__( self, parent, tests ):
wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 400, 300 ), style = wx.DEFAULT_DIALOG_STYLE )
self.initUI()
self.Bind( wx.EVT_BUTTON, self.onPressRun, self.m_runButton )
self.Bind( wx.EVT_BUTTON, self.onPressStop, self.m_stopButton )
self.m_selection = tests
self._populateListControl( tests )
def __del__( self ):
pass
def _populateListControl(self, selection):
index = 0
for test in selection:
self.m_listCtrl.InsertStringItem( index=index, label=test )
index += 1
def onPressRun(self, event):
pass
def onPressStop(self, event):
pass
def initUI(self):
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
m_mainSizer = wx.BoxSizer( wx.VERTICAL )
self.m_dialogText = wx.StaticText( self, wx.ID_ANY, StringMap.Dialogs.RUN_DLG_TEXT, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_dialogText.Wrap( -1 )
m_mainSizer.Add( self.m_dialogText, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.ALL, 10 )
m_contentSizer = wx.BoxSizer( wx.HORIZONTAL )
m_listContainer = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, wx.EmptyString ), wx.VERTICAL )
self.m_listCtrl = wx.ListCtrl( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_ICON )
m_listContainer.Add( self.m_listCtrl, 1, wx.ALL|wx.EXPAND, 5 )
m_contentSizer.Add( m_listContainer, 1, wx.EXPAND|wx.TOP|wx.BOTTOM|wx.LEFT, 5 )
m_buttonSizer = wx.BoxSizer( wx.VERTICAL )
self.m_runButton = wx.Button( self, wx.ID_ANY, u"Run", wx.DefaultPosition, wx.DefaultSize, 0 )
m_buttonSizer.Add( self.m_runButton, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.m_stopButton = wx.Button( self, wx.ID_ANY, u"Stop", wx.DefaultPosition, wx.DefaultSize, 0 )
m_buttonSizer.Add( self.m_stopButton, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
m_buttonSizer.AddSpacer( ( 0, 40), 1, wx.EXPAND, 5 )
self.m_cancelButton = wx.Button( self, wx.ID_CANCEL, u"Cancel", wx.DefaultPosition, wx.DefaultSize, 0 )
m_buttonSizer.Add( self.m_cancelButton, 0, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
m_contentSizer.Add( m_buttonSizer, 0, wx.ALIGN_CENTER_VERTICAL, 5 )
m_mainSizer.Add( m_contentSizer, 1, wx.EXPAND, 5 )
self.SetSizer( m_mainSizer )
self.Layout()
self.Centre( wx.BOTH )
| mit |
russomi/slask-app | vendor.py | 142 | 2812 | #
# Copyright 2014 Jon Wayne Parrott, [proppy], Michael R. Bernstein
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Notes:
# - Imported from https://github.com/jonparrott/Darth-Vendor/.
# - Added license header.
# - Renamed `darth.vendor` to `vendor.add` to match upcoming SDK interface.
# - Renamed `position` param to `index` to match upcoming SDK interface.
# - Removed funny arworks docstring.
import site
import os.path
import sys
def add(folder, index=1):
"""
Adds the given folder to the python path. Supports namespaced packages.
By default, packages in the given folder take precedence over site-packages
and any previous path manipulations.
Args:
folder: Path to the folder containing packages, relative to ``os.getcwd()``
position: Where in ``sys.path`` to insert the vendor packages. By default
this is set to 1. It is inadvisable to set it to 0 as it will override
any modules in the current working directory.
"""
# Check if the path contains a virtualenv.
site_dir = os.path.join(folder, 'lib', 'python' + sys.version[:3], 'site-packages')
if os.path.exists(site_dir):
folder = site_dir
# Otherwise it's just a normal path, make it absolute.
else:
folder = os.path.join(os.path.dirname(__file__), folder)
# Use site.addsitedir() because it appropriately reads .pth
# files for namespaced packages. Unfortunately, there's not an
# option to choose where addsitedir() puts its paths in sys.path
# so we have to do a little bit of magic to make it play along.
# We're going to grab the current sys.path and split it up into
# the first entry and then the rest. Essentially turning
# ['.', '/site-packages/x', 'site-packages/y']
# into
# ['.'] and ['/site-packages/x', 'site-packages/y']
# The reason for this is we want '.' to remain at the top of the
# list but we want our vendor files to override everything else.
sys.path, remainder = sys.path[:1], sys.path[1:]
# Now we call addsitedir which will append our vendor directories
# to sys.path (which was truncated by the last step.)
site.addsitedir(folder)
# Finally, we'll add the paths we removed back.
# The final product is something like this:
# ['.', '/vendor-folder', /site-packages/x', 'site-packages/y']
sys.path.extend(remainder)
| apache-2.0 |
jdgwartney/meter-plugin-sdk-python | meterplugin/exec_proc.py | 2 | 1802 | # Copyright 2014 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen,PIPE
import shlex
import logging
class ExecProc:
def __init__(self):
self._command = None
self._debug = False
@property
def debug(self):
return self._debug
@debug.setter
def debug(self, debug):
self._debug = debug
@property
def command(self):
return self._command
@command.setter
def command(self, command):
if type(command) != str:
raise ValueError
self._command = command
def execute(self):
if self.command is None:
raise ValueError
args = shlex.split(self.command)
if self.debug:
logging.info("command=\"{0}\"".format(args))
p = Popen(args, stdout=PIPE)
o, e = p.communicate()
if self.debug:
logging.info("before: " + ':'.join(x.encode('hex') for x in o))
# Remove carriage returns from output
o = o.replace('\r', '')
if self.debug:
logging.info("after: " + ':'.join(x.encode('hex') for x in o))
if self.debug:
logging.info("output=\"%s\"",o)
logging.info(':'.join(x.encode('hex') for x in o))
return o
| apache-2.0 |
vibhorag/scikit-learn | sklearn/cross_decomposition/pls_.py | 187 | 28507 | """
The :mod:`sklearn.pls` module implements Partial Least Squares (PLS).
"""
# Author: Edouard Duchesnay <edouard.duchesnay@cea.fr>
# License: BSD 3 clause
from ..base import BaseEstimator, RegressorMixin, TransformerMixin
from ..utils import check_array, check_consistent_length
from ..externals import six
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy import linalg
from ..utils import arpack
from ..utils.validation import check_is_fitted
__all__ = ['PLSCanonical', 'PLSRegression', 'PLSSVD']
def _nipals_twoblocks_inner_loop(X, Y, mode="A", max_iter=500, tol=1e-06,
norm_y_weights=False):
"""Inner loop of the iterative NIPALS algorithm.
Provides an alternative to the svd(X'Y); returns the first left and right
singular vectors of X'Y. See PLS for the meaning of the parameters. It is
similar to the Power method for determining the eigenvectors and
eigenvalues of a X'Y.
"""
y_score = Y[:, [0]]
x_weights_old = 0
ite = 1
X_pinv = Y_pinv = None
eps = np.finfo(X.dtype).eps
# Inner loop of the Wold algo.
while True:
# 1.1 Update u: the X weights
if mode == "B":
if X_pinv is None:
X_pinv = linalg.pinv(X) # compute once pinv(X)
x_weights = np.dot(X_pinv, y_score)
else: # mode A
# Mode A regress each X column on y_score
x_weights = np.dot(X.T, y_score) / np.dot(y_score.T, y_score)
# 1.2 Normalize u
x_weights /= np.sqrt(np.dot(x_weights.T, x_weights)) + eps
# 1.3 Update x_score: the X latent scores
x_score = np.dot(X, x_weights)
# 2.1 Update y_weights
if mode == "B":
if Y_pinv is None:
Y_pinv = linalg.pinv(Y) # compute once pinv(Y)
y_weights = np.dot(Y_pinv, x_score)
else:
# Mode A regress each Y column on x_score
y_weights = np.dot(Y.T, x_score) / np.dot(x_score.T, x_score)
# 2.2 Normalize y_weights
if norm_y_weights:
y_weights /= np.sqrt(np.dot(y_weights.T, y_weights)) + eps
# 2.3 Update y_score: the Y latent scores
y_score = np.dot(Y, y_weights) / (np.dot(y_weights.T, y_weights) + eps)
# y_score = np.dot(Y, y_weights) / np.dot(y_score.T, y_score) ## BUG
x_weights_diff = x_weights - x_weights_old
if np.dot(x_weights_diff.T, x_weights_diff) < tol or Y.shape[1] == 1:
break
if ite == max_iter:
warnings.warn('Maximum number of iterations reached')
break
x_weights_old = x_weights
ite += 1
return x_weights, y_weights, ite
def _svd_cross_product(X, Y):
C = np.dot(X.T, Y)
U, s, Vh = linalg.svd(C, full_matrices=False)
u = U[:, [0]]
v = Vh.T[:, [0]]
return u, v
def _center_scale_xy(X, Y, scale=True):
""" Center X, Y and scale if the scale parameter==True
Returns
-------
X, Y, x_mean, y_mean, x_std, y_std
"""
# center
x_mean = X.mean(axis=0)
X -= x_mean
y_mean = Y.mean(axis=0)
Y -= y_mean
# scale
if scale:
x_std = X.std(axis=0, ddof=1)
x_std[x_std == 0.0] = 1.0
X /= x_std
y_std = Y.std(axis=0, ddof=1)
y_std[y_std == 0.0] = 1.0
Y /= y_std
else:
x_std = np.ones(X.shape[1])
y_std = np.ones(Y.shape[1])
return X, Y, x_mean, y_mean, x_std, y_std
class _PLS(six.with_metaclass(ABCMeta), BaseEstimator, TransformerMixin,
RegressorMixin):
"""Partial Least Squares (PLS)
This class implements the generic PLS algorithm, constructors' parameters
allow to obtain a specific implementation such as:
- PLS2 regression, i.e., PLS 2 blocks, mode A, with asymmetric deflation
and unnormalized y weights such as defined by [Tenenhaus 1998] p. 132.
With univariate response it implements PLS1.
- PLS canonical, i.e., PLS 2 blocks, mode A, with symmetric deflation and
normalized y weights such as defined by [Tenenhaus 1998] (p. 132) and
[Wegelin et al. 2000]. This parametrization implements the original Wold
algorithm.
We use the terminology defined by [Wegelin et al. 2000].
This implementation uses the PLS Wold 2 blocks algorithm based on two
nested loops:
(i) The outer loop iterate over components.
(ii) The inner loop estimates the weights vectors. This can be done
with two algo. (a) the inner loop of the original NIPALS algo. or (b) a
SVD on residuals cross-covariance matrices.
n_components : int, number of components to keep. (default 2).
scale : boolean, scale data? (default True)
deflation_mode : str, "canonical" or "regression". See notes.
mode : "A" classical PLS and "B" CCA. See notes.
norm_y_weights: boolean, normalize Y weights to one? (default False)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, the maximum number of iterations (default 500)
of the NIPALS inner loop (used only if algorithm="nipals")
tol : non-negative real, default 1e-06
The tolerance used in the iterative algorithm.
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effects.
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
coef_: array, [p, q]
The coefficients of the linear model: ``Y = X coef_ + Err``
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component. Not useful if the algorithm given is "svd".
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In French but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
PLSCanonical
PLSRegression
CCA
PLS_SVD
"""
@abstractmethod
def __init__(self, n_components=2, scale=True, deflation_mode="regression",
mode="A", algorithm="nipals", norm_y_weights=False,
max_iter=500, tol=1e-06, copy=True):
self.n_components = n_components
self.deflation_mode = deflation_mode
self.mode = mode
self.norm_y_weights = norm_y_weights
self.scale = scale
self.algorithm = algorithm
self.max_iter = max_iter
self.tol = tol
self.copy = copy
def fit(self, X, Y):
"""Fit model to data.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vectors, where n_samples in the number of samples and
n_features is the number of predictors.
Y : array-like of response, shape = [n_samples, n_targets]
Target vectors, where n_samples in the number of samples and
n_targets is the number of response variables.
"""
# copy since this will contains the residuals (deflated) matrices
check_consistent_length(X, Y)
X = check_array(X, dtype=np.float64, copy=self.copy)
Y = check_array(Y, dtype=np.float64, copy=self.copy, ensure_2d=False)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
n = X.shape[0]
p = X.shape[1]
q = Y.shape[1]
if self.n_components < 1 or self.n_components > p:
raise ValueError('Invalid number of components: %d' %
self.n_components)
if self.algorithm not in ("svd", "nipals"):
raise ValueError("Got algorithm %s when only 'svd' "
"and 'nipals' are known" % self.algorithm)
if self.algorithm == "svd" and self.mode == "B":
raise ValueError('Incompatible configuration: mode B is not '
'implemented with svd algorithm')
if self.deflation_mode not in ["canonical", "regression"]:
raise ValueError('The deflation mode is unknown')
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_\
= _center_scale_xy(X, Y, self.scale)
# Residuals (deflated) matrices
Xk = X
Yk = Y
# Results matrices
self.x_scores_ = np.zeros((n, self.n_components))
self.y_scores_ = np.zeros((n, self.n_components))
self.x_weights_ = np.zeros((p, self.n_components))
self.y_weights_ = np.zeros((q, self.n_components))
self.x_loadings_ = np.zeros((p, self.n_components))
self.y_loadings_ = np.zeros((q, self.n_components))
self.n_iter_ = []
# NIPALS algo: outer loop, over components
for k in range(self.n_components):
if np.all(np.dot(Yk.T, Yk) < np.finfo(np.double).eps):
# Yk constant
warnings.warn('Y residual constant at iteration %s' % k)
break
# 1) weights estimation (inner loop)
# -----------------------------------
if self.algorithm == "nipals":
x_weights, y_weights, n_iter_ = \
_nipals_twoblocks_inner_loop(
X=Xk, Y=Yk, mode=self.mode, max_iter=self.max_iter,
tol=self.tol, norm_y_weights=self.norm_y_weights)
self.n_iter_.append(n_iter_)
elif self.algorithm == "svd":
x_weights, y_weights = _svd_cross_product(X=Xk, Y=Yk)
# compute scores
x_scores = np.dot(Xk, x_weights)
if self.norm_y_weights:
y_ss = 1
else:
y_ss = np.dot(y_weights.T, y_weights)
y_scores = np.dot(Yk, y_weights) / y_ss
# test for null variance
if np.dot(x_scores.T, x_scores) < np.finfo(np.double).eps:
warnings.warn('X scores are null at iteration %s' % k)
break
# 2) Deflation (in place)
# ----------------------
# Possible memory footprint reduction may done here: in order to
# avoid the allocation of a data chunk for the rank-one
# approximations matrix which is then subtracted to Xk, we suggest
# to perform a column-wise deflation.
#
# - regress Xk's on x_score
x_loadings = np.dot(Xk.T, x_scores) / np.dot(x_scores.T, x_scores)
# - subtract rank-one approximations to obtain remainder matrix
Xk -= np.dot(x_scores, x_loadings.T)
if self.deflation_mode == "canonical":
# - regress Yk's on y_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, y_scores)
/ np.dot(y_scores.T, y_scores))
Yk -= np.dot(y_scores, y_loadings.T)
if self.deflation_mode == "regression":
# - regress Yk's on x_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, x_scores)
/ np.dot(x_scores.T, x_scores))
Yk -= np.dot(x_scores, y_loadings.T)
# 3) Store weights, scores and loadings # Notation:
self.x_scores_[:, k] = x_scores.ravel() # T
self.y_scores_[:, k] = y_scores.ravel() # U
self.x_weights_[:, k] = x_weights.ravel() # W
self.y_weights_[:, k] = y_weights.ravel() # C
self.x_loadings_[:, k] = x_loadings.ravel() # P
self.y_loadings_[:, k] = y_loadings.ravel() # Q
# Such that: X = TP' + Err and Y = UQ' + Err
# 4) rotations from input space to transformed space (scores)
# T = X W(P'W)^-1 = XW* (W* : p x k matrix)
# U = Y C(Q'C)^-1 = YC* (W* : q x k matrix)
self.x_rotations_ = np.dot(
self.x_weights_,
linalg.pinv(np.dot(self.x_loadings_.T, self.x_weights_)))
if Y.shape[1] > 1:
self.y_rotations_ = np.dot(
self.y_weights_,
linalg.pinv(np.dot(self.y_loadings_.T, self.y_weights_)))
else:
self.y_rotations_ = np.ones(1)
if True or self.deflation_mode == "regression":
# FIXME what's with the if?
# Estimate regression coefficient
# Regress Y on T
# Y = TQ' + Err,
# Then express in function of X
# Y = X W(P'W)^-1Q' + Err = XB + Err
# => B = W*Q' (p x q)
self.coef_ = np.dot(self.x_rotations_, self.y_loadings_.T)
self.coef_ = (1. / self.x_std_.reshape((p, 1)) * self.coef_ *
self.y_std_)
return self
def transform(self, X, Y=None, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
check_is_fitted(self, 'x_mean_')
X = check_array(X, copy=copy)
# Normalize
X -= self.x_mean_
X /= self.x_std_
# Apply rotation
x_scores = np.dot(X, self.x_rotations_)
if Y is not None:
Y = check_array(Y, ensure_2d=False, copy=copy)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
Y -= self.y_mean_
Y /= self.y_std_
y_scores = np.dot(Y, self.y_rotations_)
return x_scores, y_scores
return x_scores
def predict(self, X, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Notes
-----
This call requires the estimation of a p x q matrix, which may
be an issue in high dimensional space.
"""
check_is_fitted(self, 'x_mean_')
X = check_array(X, copy=copy)
# Normalize
X -= self.x_mean_
X /= self.x_std_
Ypred = np.dot(X, self.coef_)
return Ypred + self.y_mean_
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean, default True
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
check_is_fitted(self, 'x_mean_')
return self.fit(X, y, **fit_params).transform(X, y)
class PLSRegression(_PLS):
"""PLS regression
PLSRegression implements the PLS 2 blocks regression known as PLS2 or PLS1
in case of one dimensional response.
This class inherits from _PLS with mode="A", deflation_mode="regression",
norm_y_weights=False and algorithm="nipals".
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, (default 2)
Number of components to keep.
scale : boolean, (default True)
whether to scale the data
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real
Tolerance used in the iterative algorithm default 1e-06.
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
coef_: array, [p, q]
The coefficients of the linear model: ``Y = X coef_ + Err``
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component.
Notes
-----
For each component k, find weights u, v that optimizes:
``max corr(Xk u, Yk v) * var(Xk u) var(Yk u)``, such that ``|u| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on
the current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current X score. This performs the PLS regression known as PLS2. This
mode is prediction oriented.
This implementation provides the same results that 3 PLS packages
provided in the R language (R-project):
- "mixOmics" with function pls(X, Y, mode = "regression")
- "plspm " with function plsreg2(X, Y)
- "pls" with function oscorespls.fit(X, Y)
Examples
--------
>>> from sklearn.cross_decomposition import PLSRegression
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> pls2 = PLSRegression(n_components=2)
>>> pls2.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSRegression(copy=True, max_iter=500, n_components=2, scale=True,
tol=1e-06)
>>> Y_pred = pls2.predict(X)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In french but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
"""
def __init__(self, n_components=2, scale=True,
max_iter=500, tol=1e-06, copy=True):
_PLS.__init__(self, n_components=n_components, scale=scale,
deflation_mode="regression", mode="A",
norm_y_weights=False, max_iter=max_iter, tol=tol,
copy=copy)
class PLSCanonical(_PLS):
""" PLSCanonical implements the 2 blocks canonical PLS of the original Wold
algorithm [Tenenhaus 1998] p.204, referred as PLS-C2A in [Wegelin 2000].
This class inherits from PLS with mode="A" and deflation_mode="canonical",
norm_y_weights=True and algorithm="nipals", but svd should provide similar
results up to numerical errors.
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
scale : boolean, scale data? (default True)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real, default 1e-06
the tolerance used in the iterative algorithm
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
n_components : int, number of components to keep. (default 2).
Attributes
----------
x_weights_ : array, shape = [p, n_components]
X block weights vectors.
y_weights_ : array, shape = [q, n_components]
Y block weights vectors.
x_loadings_ : array, shape = [p, n_components]
X block loadings vectors.
y_loadings_ : array, shape = [q, n_components]
Y block loadings vectors.
x_scores_ : array, shape = [n_samples, n_components]
X scores.
y_scores_ : array, shape = [n_samples, n_components]
Y scores.
x_rotations_ : array, shape = [p, n_components]
X block to latents rotations.
y_rotations_ : array, shape = [q, n_components]
Y block to latents rotations.
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component. Not useful if the algorithm provided is "svd".
Notes
-----
For each component k, find weights u, v that optimize::
max corr(Xk u, Yk v) * var(Xk u) var(Yk u), such that ``|u| = |v| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on the
current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current Y score. This performs a canonical symmetric version of the PLS
regression. But slightly different than the CCA. This is mostly used
for modeling.
This implementation provides the same results that the "plspm" package
provided in the R language (R-project), using the function plsca(X, Y).
Results are equal or collinear with the function
``pls(..., mode = "canonical")`` of the "mixOmics" package. The difference
relies in the fact that mixOmics implementation does not exactly implement
the Wold algorithm since it does not normalize y_weights to one.
Examples
--------
>>> from sklearn.cross_decomposition import PLSCanonical
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> plsca = PLSCanonical(n_components=2)
>>> plsca.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSCanonical(algorithm='nipals', copy=True, max_iter=500, n_components=2,
scale=True, tol=1e-06)
>>> X_c, Y_c = plsca.transform(X, Y)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
CCA
PLSSVD
"""
def __init__(self, n_components=2, scale=True, algorithm="nipals",
max_iter=500, tol=1e-06, copy=True):
_PLS.__init__(self, n_components=n_components, scale=scale,
deflation_mode="canonical", mode="A",
norm_y_weights=True, algorithm=algorithm,
max_iter=max_iter, tol=tol, copy=copy)
class PLSSVD(BaseEstimator, TransformerMixin):
"""Partial Least Square SVD
Simply perform a svd on the crosscovariance matrix: X'Y
There are no iterative deflation here.
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, default 2
Number of components to keep.
scale : boolean, default True
Whether to scale X and Y.
copy : boolean, default True
Whether to copy X and Y, or perform in-place computations.
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
See also
--------
PLSCanonical
CCA
"""
def __init__(self, n_components=2, scale=True, copy=True):
self.n_components = n_components
self.scale = scale
self.copy = copy
def fit(self, X, Y):
# copy since this will contains the centered data
check_consistent_length(X, Y)
X = check_array(X, dtype=np.float64, copy=self.copy)
Y = check_array(Y, dtype=np.float64, copy=self.copy, ensure_2d=False)
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
if self.n_components > max(Y.shape[1], X.shape[1]):
raise ValueError("Invalid number of components n_components=%d"
" with X of shape %s and Y of shape %s."
% (self.n_components, str(X.shape), str(Y.shape)))
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_ =\
_center_scale_xy(X, Y, self.scale)
# svd(X'Y)
C = np.dot(X.T, Y)
# The arpack svds solver only works if the number of extracted
# components is smaller than rank(X) - 1. Hence, if we want to extract
# all the components (C.shape[1]), we have to use another one. Else,
# let's use arpacks to compute only the interesting components.
if self.n_components >= np.min(C.shape):
U, s, V = linalg.svd(C, full_matrices=False)
else:
U, s, V = arpack.svds(C, k=self.n_components)
V = V.T
self.x_scores_ = np.dot(X, U)
self.y_scores_ = np.dot(Y, V)
self.x_weights_ = U
self.y_weights_ = V
return self
def transform(self, X, Y=None):
"""Apply the dimension reduction learned on the train data."""
check_is_fitted(self, 'x_mean_')
X = check_array(X, dtype=np.float64)
Xr = (X - self.x_mean_) / self.x_std_
x_scores = np.dot(Xr, self.x_weights_)
if Y is not None:
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
Yr = (Y - self.y_mean_) / self.y_std_
y_scores = np.dot(Yr, self.y_weights_)
return x_scores, y_scores
return x_scores
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
return self.fit(X, y, **fit_params).transform(X, y)
| bsd-3-clause |
xindus40223115/2015cda_g1 | static/Brython3.1.3-20150514-095342/Lib/browser/indexed_db.py | 632 | 3008 | class EventListener:
def __init__(self, events=[]):
self._events=events
def append(self, event):
self._events.append(event)
def fire(self, e):
for _event in self._events:
_event(e)
class IndexedDB:
def __init__(self):
if not __BRYTHON__.has_indexedDB:
raise NotImplementedError("Your browser doesn't support indexedDB")
return
self._indexedDB=__BRYTHON__.indexedDB()
self._db=None
self._version=None
def _onsuccess(self, event):
self._db=event.target.result
def open(self, name, onsuccess, version=1.0, onerror=None,
onupgradeneeded=None):
self._version=version
_result=self._indexedDB.open(name, version)
_success=EventListener([self._onsuccess, onsuccess])
_result.onsuccess=_success.fire
_result.onupgradeneeded=onupgradeneeded
#if onerror is None:
def onerror(e):
print("onerror: %s:%s" % (e.type, e.target.result))
def onblocked(e):
print("blocked: %s:%s" % (e.type, e.result))
_result.onerror=onerror
_result.onblocked=onblocked
def transaction(self, entities, mode='read'):
return Transaction(self._db.transaction(entities, mode))
class Transaction:
def __init__(self, transaction):
self._transaction=transaction
def objectStore(self, name):
return ObjectStore(self._transaction.objectStore(name))
class ObjectStore:
def __init__(self, objectStore):
self._objectStore=objectStore
self._data=[]
def clear(self, onsuccess=None, onerror=None):
_result=self._objectStore.clear()
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def _helper(self, func, object, onsuccess=None, onerror=None):
_result=func(object)
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def put(self, obj, key=None, onsuccess=None, onerror=None):
_r = self._objectStore.put(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
def add(self, obj, key, onsuccess=None, onerror=None):
_r = self._objectStore.add(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
#self._helper(self._objectStore.add, object, onsuccess, onerror)
def delete(self, index, onsuccess=None, onerror=None):
self._helper(self._objectStore.delete, index, onsuccess, onerror)
def query(self, *args):
self._data=[]
def onsuccess(event):
cursor=event.target.result
if cursor is not None:
self._data.append(cursor.value)
getattr(cursor,"continue")() # cursor.continue() is illegal
self._objectStore.openCursor(args).onsuccess=onsuccess
def fetchall(self):
yield self._data
def get(self, key, onsuccess=None, onerror=None):
self._helper(self._objectStore.get, key, onsuccess, onerror)
| gpl-3.0 |
LodewijkSikkel/paparazzi | sw/ground_segment/python/dashboard/radiowatchframe.py | 2 | 2295 | import wx
import sys
import os
import time
import threading
import math
import pynotify
import pygame.mixer
sys.path.append(os.getenv("PAPARAZZI_HOME") + "/sw/ext/pprzlink/lib/v1.0/python")
from ivy_msg_interface import IvyMessagesInterface
WIDTH = 150
HEIGHT = 40
UPDATE_INTERVAL = 250
class RadioWatchFrame(wx.Frame):
def message_recv(self, ac_id, msg):
if msg.name == "ROTORCRAFT_STATUS":
self.rc_status = int(msg['rc_status'])
if self.rc_status != 0 and not self.alertChannel.get_busy():
self.warn_timer = wx.CallLater(5, self.rclink_alert)
# else:
# self.notification.close()
def gui_update(self):
self.rc_statusText.SetLabel(["OK", "LOST", "REALLY LOST"][self.rc_status])
self.update_timer.Restart(UPDATE_INTERVAL)
def rclink_alert(self):
self.alertChannel.queue(self.alertSound)
self.notification.show()
time.sleep(5)
def setFont(self, control):
font = control.GetFont()
size = font.GetPointSize()
font.SetPointSize(size * 1.4)
control.SetFont(font)
def __init__(self):
wx.Frame.__init__(self, id=-1, parent=None, name=u'RCWatchFrame',
size=wx.Size(WIDTH, HEIGHT), title=u'RC Status')
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.rc_statusText = wx.StaticText(self, -1, "UNKWN")
pygame.mixer.init()
self.alertSound = pygame.mixer.Sound("crossing.wav")
self.alertChannel = pygame.mixer.Channel(False)
self.setFont(self.rc_statusText)
self.notification = pynotify.Notification("RC Link Warning!",
"RC Link status not OK!",
"dialog-warning")
self.rc_status = -1
pynotify.init("RC Status")
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.rc_statusText, 1, wx.EXPAND)
self.SetSizer(sizer)
sizer.Layout()
self.interface = IvyMessagesInterface("radiowatchframe")
self.interface.subscribe(self.message_recv)
self.update_timer = wx.CallLater(UPDATE_INTERVAL, self.gui_update)
def OnClose(self, event):
self.interface.shutdown()
self.Destroy()
| gpl-2.0 |
yongtang/tensorflow | tensorflow/python/kernel_tests/sparse_conditional_accumulator_test.py | 14 | 27252 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.platform import test
def _indexedslice(x, noshape=False):
x = np.array(x)
dense_shape = x.shape
ndim = len(dense_shape)
indices = np.where(np.sum(x, tuple(range(1, ndim))))[0]
values = x[indices]
if noshape:
dense_shape = None
return ops.IndexedSlices(
indices=indices.tolist(), values=values, dense_shape=dense_shape)
class IndexedSlicesConditionalAccumulatorTest(test.TestCase):
def _assertEqual_indexedslices(self, expected_tensor, result):
self.assertAllEqual(expected_tensor.indices, result.indices)
self.assertAllEqual(expected_tensor.values, result.values)
if (result.dense_shape is not None and
expected_tensor.dense_shape is not None):
self.assertAllEqual(expected_tensor.dense_shape, result.dense_shape)
def _assertEqual_nparray(self, expected_array, result, sess):
expected_tensor = _indexedslice(expected_array)
self._assertEqual_indexedslices(expected_tensor, result)
def testConstructor(self):
with ops.Graph().as_default():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q")
self.assertTrue(isinstance(q.accumulator_ref, ops.Tensor))
self.assertProtoEquals(
"""
name:'Q' op:'SparseConditionalAccumulator'
attr { key: 'dtype' value { type: DT_FLOAT } }
attr { key: 'shape' value { shape { unknown_rank: true} } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
attr { key: 'reduction_type' value {s: 'MEAN'} }
""", q.accumulator_ref.op.node_def)
def testConstructorWithInvalidArg(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError):
data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", reduction_type="Invalid")
def testConstructorWithShape(self):
with ops.Graph().as_default():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32,
name="Q",
shape=tensor_shape.TensorShape([1, 5, 2, 8]))
self.assertTrue(isinstance(q.accumulator_ref, ops.Tensor))
self.assertProtoEquals(
"""
name:'Q' op:'SparseConditionalAccumulator'
attr { key: 'dtype' value { type: DT_FLOAT } }
attr { key: 'shape' value { shape { dim {size: 1 }
dim {size: 5 }
dim {size: 2 }
dim {size: 8 }
} } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
attr { key: 'reduction_type' value {s: 'MEAN'} }
""", q.accumulator_ref.op.node_def)
@test_util.run_deprecated_v1
def testAccumulatorSizeEmpty(self):
with self.cached_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q")
self.assertEqual(q.num_accumulated().eval(), 0)
@test_util.run_deprecated_v1
def testAccumulatorSetGlobalStep(self):
with self.cached_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([1]))
set_global_step_op = q.set_global_step(1)
set_global_step_op.run()
@test_util.run_deprecated_v1
def testAccumulatorApplyGradFloat32(self):
with self.cached_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
accum_op = q.apply_indexed_slices_grad(
ops.IndexedSlices(
indices=[0, 2],
values=np.array([[0, 0, 1], [3, 0, 4]]).astype(np.float32)))
accum_op.run()
self.assertEqual(q.num_accumulated().eval(), 1)
@test_util.run_deprecated_v1
def testDtypes(self):
with self.cached_session() as sess:
dtypes = [dtypes_lib.float16, dtypes_lib.float32, dtypes_lib.float64]
for i in range(len(dtypes)):
dtype = dtypes[i]
q = data_flow_ops.SparseConditionalAccumulator(
dtype, shape=tensor_shape.TensorShape([3, 3, 3]))
elems = np.arange(2)
sum_elems = np.zeros([3, 3, 3]).astype(dtype.as_numpy_dtype)
for e in elems:
mat_to_add = np.zeros([3, 3, 3]).astype(dtype.as_numpy_dtype)
mat_to_add[i, i, i] = e + 1
sum_elems += mat_to_add
t = _indexedslice(mat_to_add)
q.apply_indexed_slices_grad(t).run()
result = self.evaluate(q.take_indexed_slices_grad(1))
self._assertEqual_nparray(sum_elems / len(elems), result, sess)
@test_util.run_deprecated_v1
def testAccumulatorMultipleAccumulators(self):
with self.cached_session() as sess:
q_f32_0 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f32_1 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f16_0 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float16, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f16_1 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float16, name="Q", shape=tensor_shape.TensorShape([2, 2]))
accums = [q_f16_0, q_f16_1, q_f32_0, q_f32_1]
elems = [[[1, 0], [0, 0]], [[0, 1], [0, 0]], [[0, 0], [1, 0]], [[0, 0],
[0, 1]]]
expected_tensors = []
for i in range(len(accums)):
tensor_to_add = np.array(elems[i]).astype(accums[i]
.dtype.as_numpy_dtype)
expected_tensor = _indexedslice(tensor_to_add)
expected_tensors.append(expected_tensor)
st = _indexedslice(tensor_to_add)
accums[i].apply_indexed_slices_grad(st).run()
for i in range(len(accums)):
result = sess.run(accums[i].take_indexed_slices_grad(1))
self._assertEqual_indexedslices(expected_tensors[i], result)
@test_util.run_deprecated_v1
def testAccumulatorTakeGradMean(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=())
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1], values=np.array([[1, 0], [0, 2]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices)
accum_op.run()
accum_op = q.apply_grad([0, 2],
np.array([[0, 1], [3, 0]]).astype(np.float32),
[3, 2])
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = self.evaluate(takeg_t)
self.assertAllEqual([0, 1, 2], val.indices)
self.assertAllEqual([[0.5, 0.5], [0, 2], [3, 0]], val.values)
self.assertAllEqual([-1, 2], val.dense_shape)
@test_util.run_deprecated_v1
def testAccumulatorTakeGradSum(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=(), reduction_type="SUM")
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1], values=np.array([[1, 0], [0, 2]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices)
accum_op.run()
accum_op = q.apply_grad([0, 2],
np.array([[0, 1], [3, 0]]).astype(np.float32),
[3, 2])
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = self.evaluate(takeg_t)
self.assertAllEqual([0, 1, 2], val.indices)
self.assertAllEqual([[1, 1], [0, 2], [3, 0]], val.values)
self.assertAllEqual([-1, 2], val.dense_shape)
@test_util.run_deprecated_v1
def testAccumulatorTakeGradInvalidReductionType(self):
with self.assertRaises(ValueError):
data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=(), reduction_type="Invalid")
@test_util.run_deprecated_v1
def testAccumulatorRepeatedTakeGrad(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=())
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1], values=np.array([[1, 0], [0, 2]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices, local_step=0)
accum_op.run()
accum_op = q.apply_grad(
[0, 2],
np.array([[0, 1], [3, 0]]).astype(np.float32), [3, 2],
local_step=0)
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = self.evaluate(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[0.5, 0.5], [0, 2], [3, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1],
values=np.array([[10, 0], [0, 20]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices, local_step=1)
accum_op.run()
accum_op = q.apply_grad(
[0, 2],
np.array([[0, 10], [30, 0]]).astype(np.float32), [3, 2],
local_step=1)
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = self.evaluate(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[5, 5], [0, 20], [30, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
@test_util.run_v1_only("b/120545219")
def testParallelApplyGradMean(self):
# We need each thread to keep its own device stack or the device scopes
# won't be properly nested.
ops.get_default_graph().switch_to_thread_local()
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
accum_ops = []
for x in elems:
x = _indexedslice(np.array([[x, 0], [0, x]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(x, local_step=0))
takeg_t = q.take_indexed_slices_grad(1)
def apply_indexed_slices_grad(accum_op):
self.evaluate(accum_op)
threads = [
self.checkedThread(
target=apply_indexed_slices_grad, args=(o,)) for o in accum_ops
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
val = self.evaluate(takeg_t)
expected_val = sum(elems) / len(elems)
self._assertEqual_nparray(
np.array([[expected_val, 0], [0, expected_val]]).astype(np.float32),
val, sess)
@test_util.run_v1_only("b/120545219")
def testParallelApplyGradSum(self):
# We need each thread to keep its own device stack or the device scopes
# won't be properly nested.
ops.get_default_graph().switch_to_thread_local()
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32,
name="Q",
shape=tensor_shape.TensorShape([2, 2]),
reduction_type="SUM")
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
accum_ops = []
for x in elems:
x = _indexedslice(np.array([[x, 0], [0, x]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(x, local_step=0))
takeg_t = q.take_indexed_slices_grad(1)
def apply_indexed_slices_grad(accum_op):
self.evaluate(accum_op)
threads = [
self.checkedThread(target=apply_indexed_slices_grad, args=(o,))
for o in accum_ops
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
val = self.evaluate(takeg_t)
expected_val = 550.0
self._assertEqual_nparray(
np.array([[expected_val, 0], [0, expected_val]]).astype(np.float32),
val, sess)
@test_util.run_v1_only("b/120545219")
def testParallelTakeGrad(self):
# We need each thread to keep its own device stack or the device scopes
# won't be properly nested.
ops.get_default_graph().switch_to_thread_local()
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [e + 1 for e in range(10)]
accum_ops = []
for e in elems:
v = _indexedslice(np.array([[0, 0], [e, 0]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(v, local_step=e - 1))
takeg_t = q.take_indexed_slices_grad(1)
results = []
def apply_indexed_slices_grad():
for accum_op in accum_ops:
time.sleep(1.0)
self.evaluate(accum_op)
apply_indexed_slices_grad_thread = self.checkedThread(
target=apply_indexed_slices_grad)
def take_grad():
t = self.evaluate(takeg_t)
results.append(t)
threads = [self.checkedThread(target=take_grad) for _ in range(10)]
for thread in threads:
thread.start()
apply_indexed_slices_grad_thread.start()
for thread in threads:
thread.join()
apply_indexed_slices_grad_thread.join()
for i in range(len(accum_ops)):
self._assertEqual_nparray(
np.array([[0, 0], [elems[i], 0]]), results[i], sess)
@test_util.run_v1_only("b/120545219")
def testAccumulatorApplyAndBlockingTake(self):
# We need each thread to keep its own device stack or the device scopes
# won't be properly nested.
ops.get_default_graph().switch_to_thread_local()
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [10.0, 20.0, 30.0]
elems_ave = sum(elems) / len(elems)
accum_ops = []
for x in elems:
x = _indexedslice(np.array([[0, x], [0, 0]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(x, local_step=0))
takeg_t = q.take_indexed_slices_grad(3)
results = []
def apply_indexed_slices_grad():
for accum_op in accum_ops:
self.evaluate(accum_op)
def take_grad():
results.append(self.evaluate(takeg_t))
accum_thread = self.checkedThread(target=apply_indexed_slices_grad)
takeg_thread = self.checkedThread(target=take_grad)
accum_thread.start()
takeg_thread.start()
accum_thread.join()
takeg_thread.join()
self._assertEqual_nparray([[0, elems_ave], [0, 0]], results[0], sess)
def _blocking_takeg(self, sess, takeg_op):
with self.assertRaisesOpError("was cancelled"):
self.evaluate(takeg_op)
@test_util.run_v1_only("b/120545219")
def testAccumulatorCancel(self):
# We need each thread to keep its own device stack or the device scopes
# won't be properly nested.
ops.get_default_graph().switch_to_thread_local()
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32,
name="Q",
shape=tensor_shape.TensorShape([1, 2, 3]))
takeg_t = q.take_indexed_slices_grad(1)
takeg_thread = self.checkedThread(
self._blocking_takeg, args=(sess, takeg_t))
takeg_thread.start()
time.sleep(1.0)
sess.close() # Will cancel blocked operation
takeg_thread.join()
@test_util.run_v1_only("b/120545219")
def testNonVectorIndices(self):
with self.cached_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Input indices should be vector but received shape:"):
q.apply_grad(
grad_indices=[[0, 1], [1, 0]],
grad_values=np.array([1, 2]).astype(np.float32)).run()
@test_util.run_v1_only("b/120545219")
def testZeroDimensionValues(self):
with self.cached_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(
grad_indices=[0], grad_values=np.array(1).astype(np.float32)).run()
@test_util.run_v1_only("b/120545219")
def testWrongNonEmptyInputValues(self):
with self.cached_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
" non-empty input values, got "):
q.apply_grad(
grad_indices=[0, 1],
grad_values=np.array([[0, 1, 1]]).astype(np.float32)).run()
@test_util.run_v1_only("b/120545219")
def testDynamicNonVectorIndices(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
x_indices = array_ops.placeholder(dtypes_lib.int64)
x_values = array_ops.placeholder(dtypes_lib.float32)
accum_op = q.apply_grad(grad_indices=x_indices, grad_values=x_values)
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Input indices should be vector but received shape:"):
sess.run(accum_op,
feed_dict={
x_indices: [[0, 1], [1, 0]],
x_values: np.array([1, 2]).astype(np.float32)
})
@test_util.run_v1_only("b/120545219")
def testDynamicWrongNonEmptyInputValues(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
x_indices = array_ops.placeholder(dtypes_lib.int64)
x_values = array_ops.placeholder(dtypes_lib.float32)
accum_op = q.apply_grad(grad_indices=x_indices, grad_values=x_values)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
" non-empty input values, got "):
sess.run(accum_op,
feed_dict={
x_indices: [0, 1],
x_values: np.array([[0, 1, 1]]).astype(np.float32)
})
@test_util.run_v1_only("b/120545219")
def testEmptyShapeApply(self):
with self.cached_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([]))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Input indices should be vector"):
q.apply_grad(grad_indices=0, grad_values=[1.0], grad_shape=[]).run()
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Input indices should be vector"):
q.apply_grad(grad_indices=0, grad_values=[1.0]).run()
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(grad_indices=[0], grad_values=1.0, grad_shape=[]).run()
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(grad_indices=[0], grad_values=1.0).run()
# The right way to apply a scalar
q.apply_grad(grad_indices=[0], grad_values=[1.0], grad_shape=[]).run()
q.apply_grad(grad_indices=[0], grad_values=[1.0]).run()
@test_util.run_v1_only("b/120545219")
def testValidateShape(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[2, 2, None])
# Provided shape has wrong rank
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected shape rank at least 3, got 2"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[1, 2]]).astype(np.float32),
grad_shape=[2, 2]).run()
# Provided shape has wrong dim
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected shape dim 1 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[[1, 2], [3, 4], [5, 6]]]).astype(np.float32),
grad_shape=[2, 3, 2]).run()
# Indices exceeded accumulator's shape's limits
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: index of slice 0 exceeded limits of shape;"
" index is 3 exceeded 2"):
q.apply_grad(
grad_indices=[3],
grad_values=np.array([[[1, 2], [3, 4]]]).astype(np.float32)).run()
# Values' rank does not match shape
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values rank at least 3, got 2"):
q.apply_grad(
grad_indices=[0, 1],
grad_values=np.array([[1, 2], [3, 4]]).astype(np.float32)).run()
# Values' dim does not match shape
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 1 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[1, 2], [3, 4], [5, 6]]]).astype(np.float32)).run()
# First successful gradient creates additional constraints
# Shape will be additionally be constrained to [None,2,2,2] hereafter.
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32)).run()
# Values' rank does not match accumulated gradient
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values rank 4, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[[1, 2], [3, 4]]]).astype(np.float32)).run()
# Values' dim does not match accumulated gradient
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 3 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32)).run()
# After take grad, constraints on accumulated gradient are removed
self.evaluate(q.take_grad(1))
# First successful gradient imposes new constraints.
# Hereafter, shape will additionally constrained to [None,2,2,3]
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32),
local_step=1).run()
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 3 to be 3, got 2"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32),
local_step=1).run()
@test_util.run_deprecated_v1
def testReturnShape(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[2, None])
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32)).run()
val = self.evaluate(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.dense_shape, [2, 2, 2, 2])
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[None, 2])
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32)).run()
val = self.evaluate(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.dense_shape, [-1, 2, 2, 3])
@test_util.run_deprecated_v1
def testApplyGradtInt32IndicesAndShape(self):
with self.cached_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
accum_op = q.apply_grad(
grad_indices=constant_op.constant(
[0, 2], dtype=dtypes_lib.int32),
grad_values=constant_op.constant(
[[0, 0, 1], [3, 0, 4]], dtype=dtypes_lib.float32),
grad_shape=constant_op.constant(
[3, 3], dtype=dtypes_lib.int32))
accum_op.run()
accum_op = q.apply_indexed_slices_grad(
ops.IndexedSlices(
indices=constant_op.constant(
[0, 2], dtype=dtypes_lib.int32),
values=constant_op.constant(
[[0, 0, 1], [3, 0, 4]], dtype=dtypes_lib.float32),
dense_shape=constant_op.constant(
[3, 3], dtype=dtypes_lib.int32)))
accum_op.run()
self.assertEqual(q.num_accumulated().eval(), 2)
val = self.evaluate(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.indices, [0, 2])
self.assertAllEqual(val.values, [[0, 0, 1], [3, 0, 4]])
self.assertAllEqual(val.dense_shape, [3, 3])
if __name__ == "__main__":
test.main()
| apache-2.0 |
snailwalker/v2ex | html5lib/filters/inject_meta_charset.py | 101 | 2665 | import _base
class Filter(_base.Filter):
def __init__(self, source, encoding):
_base.Filter.__init__(self, source)
self.encoding = encoding
def __iter__(self):
state = "pre_head"
meta_found = (self.encoding is None)
pending = []
for token in _base.Filter.__iter__(self):
type = token["type"]
if type == "StartTag":
if token["name"].lower() == u"head":
state = "in_head"
elif type == "EmptyTag":
if token["name"].lower() == u"meta":
# replace charset with actual encoding
has_http_equiv_content_type = False
for (namespace,name),value in token["data"].iteritems():
if namespace != None:
continue
elif name.lower() == u'charset':
token["data"][(namespace,name)] = self.encoding
meta_found = True
break
elif name == u'http-equiv' and value.lower() == u'content-type':
has_http_equiv_content_type = True
else:
if has_http_equiv_content_type and (None, u"content") in token["data"]:
token["data"][(None, u"content")] = u'text/html; charset=%s' % self.encoding
meta_found = True
elif token["name"].lower() == u"head" and not meta_found:
# insert meta into empty head
yield {"type": "StartTag", "name": u"head",
"data": token["data"]}
yield {"type": "EmptyTag", "name": u"meta",
"data": {(None, u"charset"): self.encoding}}
yield {"type": "EndTag", "name": u"head"}
meta_found = True
continue
elif type == "EndTag":
if token["name"].lower() == u"head" and pending:
# insert meta into head (if necessary) and flush pending queue
yield pending.pop(0)
if not meta_found:
yield {"type": "EmptyTag", "name": u"meta",
"data": {(None, u"charset"): self.encoding}}
while pending:
yield pending.pop(0)
meta_found = True
state = "post_head"
if state == "in_head":
pending.append(token)
else:
yield token
| bsd-3-clause |
TeamEOS/external_chromium_org | chrome/common/extensions/docs/server2/blob_reference_store.py | 146 | 1119 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from appengine_wrappers import db
from appengine_wrappers import BlobReferenceProperty
BLOB_REFERENCE_BLOBSTORE = 'BlobReferenceBlobstore'
class _Model(db.Model):
key_ = db.StringProperty()
value = BlobReferenceProperty()
class BlobReferenceStore(object):
"""A wrapper around the datastore API that can store blob keys.
"""
def _Query(self, namespace, key):
return _Model.gql('WHERE key_ = :1', self._MakeKey(namespace, key)).get()
def _MakeKey(self, namespace, key):
return '.'.join((namespace, key))
def Set(self, namespace, key, value):
_Model(key_=self._MakeKey(namespace, key), value=value).put()
def Get(self, namespace, key):
result = self._Query(namespace, key)
if not result:
return None
return result.value
def Delete(self, namespace, key):
result = self._Query(namespace, key)
if not result:
return None
blob_key = result.value
result.delete()
return blob_key
| bsd-3-clause |
DMLoy/ECommerceBasic | lib/python2.7/site-packages/pip/log.py | 143 | 6371 | """Logging
"""
import sys
import logging
from pip import backwardcompat
class Logger(object):
"""
Logging object for use in command-line script. Allows ranges of
levels, to avoid some redundancy of displayed information.
"""
VERBOSE_DEBUG = logging.DEBUG - 1
DEBUG = logging.DEBUG
INFO = logging.INFO
NOTIFY = (logging.INFO + logging.WARN) / 2
WARN = WARNING = logging.WARN
ERROR = logging.ERROR
FATAL = logging.FATAL
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
def __init__(self):
self.consumers = []
self.indent = 0
self.explicit_levels = False
self.in_progress = None
self.in_progress_hanging = False
def debug(self, msg, *args, **kw):
self.log(self.DEBUG, msg, *args, **kw)
def info(self, msg, *args, **kw):
self.log(self.INFO, msg, *args, **kw)
def notify(self, msg, *args, **kw):
self.log(self.NOTIFY, msg, *args, **kw)
def warn(self, msg, *args, **kw):
self.log(self.WARN, msg, *args, **kw)
def error(self, msg, *args, **kw):
self.log(self.WARN, msg, *args, **kw)
def fatal(self, msg, *args, **kw):
self.log(self.FATAL, msg, *args, **kw)
def log(self, level, msg, *args, **kw):
if args:
if kw:
raise TypeError(
"You may give positional or keyword arguments, not both")
args = args or kw
rendered = None
for consumer_level, consumer in self.consumers:
if self.level_matches(level, consumer_level):
if (self.in_progress_hanging
and consumer in (sys.stdout, sys.stderr)):
self.in_progress_hanging = False
sys.stdout.write('\n')
sys.stdout.flush()
if rendered is None:
if args:
rendered = msg % args
else:
rendered = msg
rendered = ' ' * self.indent + rendered
if self.explicit_levels:
## FIXME: should this be a name, not a level number?
rendered = '%02i %s' % (level, rendered)
if hasattr(consumer, 'write'):
rendered += '\n'
backwardcompat.fwrite(consumer, rendered)
else:
consumer(rendered)
def _show_progress(self):
"""Should we display download progress?"""
return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
def start_progress(self, msg):
assert not self.in_progress, (
"Tried to start_progress(%r) while in_progress %r"
% (msg, self.in_progress))
if self._show_progress():
sys.stdout.write(' ' * self.indent + msg)
sys.stdout.flush()
self.in_progress_hanging = True
else:
self.in_progress_hanging = False
self.in_progress = msg
self.last_message = None
def end_progress(self, msg='done.'):
assert self.in_progress, (
"Tried to end_progress without start_progress")
if self._show_progress():
if not self.in_progress_hanging:
# Some message has been printed out since start_progress
sys.stdout.write('...' + self.in_progress + msg + '\n')
sys.stdout.flush()
else:
# These erase any messages shown with show_progress (besides .'s)
logger.show_progress('')
logger.show_progress('')
sys.stdout.write(msg + '\n')
sys.stdout.flush()
self.in_progress = None
self.in_progress_hanging = False
def show_progress(self, message=None):
"""If we are in a progress scope, and no log messages have been
shown, write out another '.'"""
if self.in_progress_hanging:
if message is None:
sys.stdout.write('.')
sys.stdout.flush()
else:
if self.last_message:
padding = ' ' * max(0, len(self.last_message) - len(message))
else:
padding = ''
sys.stdout.write('\r%s%s%s%s' %
(' ' * self.indent, self.in_progress, message, padding))
sys.stdout.flush()
self.last_message = message
def stdout_level_matches(self, level):
"""Returns true if a message at this level will go to stdout"""
return self.level_matches(level, self._stdout_level())
def _stdout_level(self):
"""Returns the level that stdout runs at"""
for level, consumer in self.consumers:
if consumer is sys.stdout:
return level
return self.FATAL
def level_matches(self, level, consumer_level):
"""
>>> l = Logger()
>>> l.level_matches(3, 4)
False
>>> l.level_matches(3, 2)
True
>>> l.level_matches(slice(None, 3), 3)
False
>>> l.level_matches(slice(None, 3), 2)
True
>>> l.level_matches(slice(1, 3), 1)
True
>>> l.level_matches(slice(2, 3), 1)
False
"""
if isinstance(level, slice):
start, stop = level.start, level.stop
if start is not None and start > consumer_level:
return False
if stop is not None or stop <= consumer_level:
return False
return True
else:
return level >= consumer_level
@classmethod
def level_for_integer(cls, level):
levels = cls.LEVELS
if level < 0:
return levels[0]
if level >= len(levels):
return levels[-1]
return levels[level]
def move_stdout_to_stderr(self):
to_remove = []
to_add = []
for consumer_level, consumer in self.consumers:
if consumer == sys.stdout:
to_remove.append((consumer_level, consumer))
to_add.append((consumer_level, sys.stderr))
for item in to_remove:
self.consumers.remove(item)
self.consumers.extend(to_add)
logger = Logger()
| mit |
hainn8x/pybombs | RPCPybombs.py | 9 | 3556 | #!/usr/bin/env python2
#
# Copyright 2013 Tim O'Shea
#
# This file is part of PyBOMBS
#
# PyBOMBS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# PyBOMBS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyBOMBS; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import Ice,sys,traceback
Ice.loadSlice("RPCPybombs.ice")
import RPCPybombs;
from mod_pybombs import *;
class ManagerI(RPCPybombs.Manager):
def __init__(self,name):
RPCPybombs.Manager.__init__(self)
print "rpcpybombs manager init (%s)!"%(name)
recipe_loader.load_all()
print "recipes loaded..."
def install(self, pkglist,current=None):
print "install: %s"%(pkglist);
for p in pkglist:
install(p,True);
return 0;
def remove(self,pkglist,current=None):
remove(pkglist);
return 0;
def rnd(self,pkglist,current=None):
remove_nd(pkglist);
return 0;
def update(self,pkglist,current=None):
update(pkglist);
return 0;
def list(self,filterlist,current=None):
cats = get_catpkgs();
catss = cats.keys();
catss.sort();
pl = [];
for cat in catss:
cats[cat].sort();
for pkg in cats[cat]:
if ((len(filterlist)==0) or any(pkg in s for s in filterlist)):
pi = RPCPybombs.PkgInfo()
pi.name = pkg;
pi.category = cat;
pi.version = "";
pi.source = "";
pi.installed = global_recipes[pkg].satisfy();
pi.satisfier = global_recipes[pkg].satisfier;
if(pi.satisfier == "inventory"):
pi.source = inv.try_get_prop(pkg,"source");
pi.version = inv.try_get_prop(pkg,"version");
if(not pi.installed):
state = "";
if(inv.has(pkg)):
state = inv.state(pkg);
pi.satisfier = "inventory"
pl.append(pi);
return pl;
class Server(Ice.Application):
def run(self, args):
properties = self.communicator().getProperties()
# TODO: only set these if they are not provided from passed config
#self.communicator().getProperties().setProperty("RPCPybombs.Endpoints","tcp -p 8999");
self.communicator().getProperties().setProperty("RPCPybombs.Endpoints","tcp");
self.communicator().getProperties().setProperty("Identity","RPCPybombs");
print self.communicator().getProperties();
adapter = self.communicator().createObjectAdapter("RPCPybombs")
id = self.communicator().stringToIdentity(properties.getProperty("Identity"))
prx = adapter.add(ManagerI(properties.getProperty("Ice.ServerId")), id)
print "adapter: %s"%(prx);
adapter.activate()
self.communicator().waitForShutdown()
return 0
app = Server()
sys.exit(app.main(sys.argv))
| gpl-3.0 |
Bismarrck/tensorflow | tensorflow/python/kernel_tests/record_input_test.py | 6 | 6155 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for record_input_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.framework import test_util
from tensorflow.python.framework.errors_impl import NotFoundError
from tensorflow.python.lib.io import tf_record
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class RecordInputOpTest(test.TestCase):
def generateTestData(self,
prefix,
n,
m,
compression_type=tf_record.TFRecordCompressionType.NONE):
options = tf_record.TFRecordOptions(compression_type)
for i in range(n):
f = os.path.join(self.get_temp_dir(), prefix + "." + str(i))
w = tf_record.TFRecordWriter(f, options=options)
for j in range(m):
w.write("{0:0{width}}".format(i * m + j, width=10).encode("utf-8"))
w.close()
def testRecordInputSimple(self):
with self.cached_session() as sess:
self.generateTestData("basic", 1, 1)
yield_op = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=1,
batch_size=1,
name="record_input").get_yield_op()
self.assertEqual(self.evaluate(yield_op), b"0000000000")
def testRecordInputSimpleGzip(self):
with self.cached_session() as sess:
self.generateTestData(
"basic",
1,
1,
compression_type=tf_record.TFRecordCompressionType.GZIP)
yield_op = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=1,
batch_size=1,
name="record_input",
compression_type=tf_record.TFRecordCompressionType.GZIP).get_yield_op(
)
self.assertEqual(self.evaluate(yield_op), b"0000000000")
def testRecordInputSimpleZlib(self):
with self.cached_session() as sess:
self.generateTestData(
"basic",
1,
1,
compression_type=tf_record.TFRecordCompressionType.ZLIB)
yield_op = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=1,
batch_size=1,
name="record_input",
compression_type=tf_record.TFRecordCompressionType.ZLIB).get_yield_op(
)
self.assertEqual(self.evaluate(yield_op), b"0000000000")
@test_util.run_deprecated_v1
def testRecordInputEpochs(self):
files = 100
records_per_file = 100
batches = 2
with self.cached_session() as sess:
self.generateTestData("basic", files, records_per_file)
records = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=2,
buffer_size=2000,
batch_size=1,
shift_ratio=0.33,
seed=10,
name="record_input",
batches=batches)
yield_op = records.get_yield_op()
# cycle over 3 epochs and make sure we never duplicate
for _ in range(3):
epoch_set = set()
for _ in range(int(files * records_per_file / batches)):
op_list = self.evaluate(yield_op)
self.assertTrue(len(op_list) is batches)
for r in op_list:
self.assertTrue(r[0] not in epoch_set)
epoch_set.add(r[0])
def testDoesNotDeadlock(self):
# Iterate multiple times to cause deadlock if there is a chance it can occur
for _ in range(30):
with self.cached_session() as sess:
self.generateTestData("basic", 1, 1)
records = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=100,
batch_size=1,
name="record_input")
yield_op = records.get_yield_op()
for _ in range(50):
self.evaluate(yield_op)
@test_util.run_deprecated_v1
def testEmptyGlob(self):
with self.cached_session() as sess:
record_input = data_flow_ops.RecordInput(file_pattern="foo")
yield_op = record_input.get_yield_op()
self.evaluate(variables.global_variables_initializer())
with self.assertRaises(NotFoundError):
self.evaluate(yield_op)
@test_util.run_deprecated_v1
def testBufferTooSmall(self):
files = 10
records_per_file = 10
batches = 2
with self.cached_session() as sess:
self.generateTestData("basic", files, records_per_file)
records = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=2,
buffer_size=2000,
batch_size=1,
shift_ratio=0.33,
seed=10,
name="record_input",
batches=batches)
yield_op = records.get_yield_op()
# cycle over 3 epochs and make sure we never duplicate
for _ in range(3):
epoch_set = set()
for _ in range(int(files * records_per_file / batches)):
op_list = self.evaluate(yield_op)
self.assertTrue(len(op_list) is batches)
for r in op_list:
self.assertTrue(r[0] not in epoch_set)
epoch_set.add(r[0])
if __name__ == "__main__":
test.main()
| apache-2.0 |
ftrain/django-ftrain | dotcom/models.py | 1 | 5083 | from django.db import models
from django.conf import settings
from datetime import datetime
import time, random
# Adapted from the custom XSLT Ftrain.com, written from 1998-2005.
# This is not promoted as the right, or even a good, way to program a
# CMS--it's a distillation of custom code in Perl, PHP, and XSLT/Java,
# over ten years, seeking to find a compromise between the arbitrary
# nature of RDF/triples where everything is metadata capable of
# interconnection and the convenience of web-based editing in a
# relational model, where some data is privileged as the "real" data
# for the convenience of all.
# And the author is in many ways still an amateur. This data model is
# more organized around his needs; it's assumed that the system will
# convert the data to something useful for the reader; in this case by
# feeding it into SOLR and making it easily searchable.
#-------------------------------------------------------------------------------
# Arbs
#-------------------------------------------------------------------------------
class Arb(models.Model):
"""
An arb is an ARBitrary publishing unit for a website--dates, some
text, a parent relationship. And a title, although that's
metadata. It's so often there that w hen it's not that's the
exception that proves the rule. Plus it leads to consistency in
the admin interface to have a title.
"""
created = models.DateTimeField(default=datetime.now)
text = models.TextField(blank=True)
title = models.CharField(max_length=255)
def __unicode__(self):
return self.title + " (Arb)"
class Item(Arb):
"""
An item is the root form (an article or post or link--a thing that
is itself). We don't say what _type_ of thing an item is; we count
on the object to do that for us.
"""
publish = models.DateTimeField(default=datetime.now)
slug = models.SlugField()
description = models.TextField(blank=True)
parent = models.ForeignKey("Arb",related_name="arb_parent",blank=True,null=True)
def __unicode__(self):
return self.title + ' (Item)'
class Post(Item):
"""
A Post is an Item that can be either live or not, and can reside
on the front page or not. And has a subtitle.
"""
live = models.BooleanField(default=True)
front_page = models.BooleanField(default=True)
subtitle = models.CharField(max_length=255,blank=True)
def __unicode__(self):
return self.title + ' (Post)'
# In addition to Post we may want to eventually have essays, jokes,
# slideshows, photos, etc. They can all have large and arbitrary
# data structures, or be apps unto themselves.
class Link(Arb):
"""
A link is an arb of convenience--useful for a quick one-off
sideblog. It's assumed that data will be tugged out of the code
rather than put in via tags, which can get too complicated too
quickly.
It's halfway between an Item and a Thing.
"""
# The link can get parsed into the text; otherwise it will cover the entire section.
href = models.URLField(blank=True, max_length=300)
# We may cache RSS and Twitter feeds; this way we can compare to
# old feed data to make sure we don't duplicate items.
identifier = models.CharField(max_length=255)
def __unicode__(self):
return "Link " + self.text
#-------------------------------------------------------------------------------
# Things
#-------------------------------------------------------------------------------
class Thing(Arb):
"""
A Thing is an arb that represents something. Like a tag, or a
place. A Thing, cast as a Place, might be "China." An Item called
"China" would be about China; a Thing called "China" would be an
identifier to represent China.
We care about Authors, and Places, for instance.
"""
slug = models.SlugField()
# We may want to have a different version of the title for sorting
title_sort = models.CharField(max_length=255, blank=True)
description = models.TextField(blank=True)
def __unicode__(self):
return self.title + '(Thing)'
class Person(Thing):
"""
People, usually people who write things
"""
def __unicode__(self):
return self.title + '(Person)'
#-------------------------------------------------------------------------------
# Relationships
#-------------------------------------------------------------------------------
class Relationship(models.Model):
"""
We use relationships to relate Arbs, most particularly Things and Items
"""
def __unicode__(self):
return "Relationship: " + self.title
class By(Relationship):
item = models.ForeignKey("Item")
author = models.ForeignKey("Person")
def __unicode__(self):
return self.item.title + " BY " + self.author.title
class SeeAlso(Relationship):
from_arb = models.ForeignKey("Arb",related_name="from")
to_arb = models.ForeignKey("Arb",related_name="to")
def __unicode__(self):
return "See Also: " + self.to_arb
| bsd-3-clause |
lexqt/django-pipeline | tests/tests/test_glob.py | 6 | 3671 | from __future__ import unicode_literals
import os
import shutil
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage
from django.test import TestCase
from pipeline import glob
def local_path(path):
return os.path.join(os.path.dirname(__file__), path)
class GlobTest(TestCase):
def normpath(self, *parts):
return os.path.normpath(os.path.join(*parts))
def mktemp(self, *parts):
filename = self.normpath(*parts)
base, file = os.path.split(filename)
base = os.path.join(self.storage.location, base)
if not os.path.exists(base):
os.makedirs(base)
self.storage.save(filename, ContentFile(""))
def assertSequenceEqual(self, l1, l2):
self.assertEqual(set(l1), set(l2))
def setUp(self):
self.storage = FileSystemStorage(local_path('glob_dir'))
self.old_storage = glob.staticfiles_storage
glob.staticfiles_storage = self.storage
self.mktemp('a', 'D')
self.mktemp('aab', 'F')
self.mktemp('aaa', 'zzzF')
self.mktemp('ZZZ')
self.mktemp('a', 'bcd', 'EF')
self.mktemp('a', 'bcd', 'efg', 'ha')
def glob(self, *parts):
if len(parts) == 1:
pattern = parts[0]
else:
pattern = os.path.join(*parts)
return glob.glob(pattern)
def tearDown(self):
shutil.rmtree(self.storage.location)
glob.staticfiles_storage = self.old_storage
def test_glob_literal(self):
self.assertSequenceEqual(self.glob('a'), [self.normpath('a')])
self.assertSequenceEqual(self.glob('a', 'D'), [self.normpath('a', 'D')])
self.assertSequenceEqual(self.glob('aab'), [self.normpath('aab')])
def test_glob_one_directory(self):
self.assertSequenceEqual(
self.glob('a*'), map(self.normpath, ['a', 'aab', 'aaa']))
self.assertSequenceEqual(
self.glob('*a'), map(self.normpath, ['a', 'aaa']))
self.assertSequenceEqual(
self.glob('aa?'), map(self.normpath, ['aaa', 'aab']))
self.assertSequenceEqual(
self.glob('aa[ab]'), map(self.normpath, ['aaa', 'aab']))
self.assertSequenceEqual(self.glob('*q'), [])
def test_glob_nested_directory(self):
if os.path.normcase("abCD") == "abCD":
# case-sensitive filesystem
self.assertSequenceEqual(
self.glob('a', 'bcd', 'E*'), [self.normpath('a', 'bcd', 'EF')])
else:
# case insensitive filesystem
self.assertSequenceEqual(self.glob('a', 'bcd', 'E*'), [
self.normpath('a', 'bcd', 'EF'),
self.normpath('a', 'bcd', 'efg')
])
self.assertSequenceEqual(
self.glob('a', 'bcd', '*g'), [self.normpath('a', 'bcd', 'efg')])
def test_glob_directory_names(self):
self.assertSequenceEqual(
self.glob('*', 'D'), [self.normpath('a', 'D')])
self.assertSequenceEqual(self.glob('*', '*a'), [])
self.assertSequenceEqual(
self.glob('a', '*', '*', '*a'),
[self.normpath('a', 'bcd', 'efg', 'ha')])
self.assertSequenceEqual(
self.glob('?a?', '*F'),
map(self.normpath, [os.path.join('aaa', 'zzzF'),
os.path.join('aab', 'F')]))
def test_glob_directory_with_trailing_slash(self):
# We are verifying that when there is wildcard pattern which
# ends with os.sep doesn't blow up.
paths = glob.glob('*' + os.sep)
self.assertEqual(len(paths), 4)
self.assertTrue(all([os.sep in path for path in paths]))
| mit |
rowinggolfer/openmolar2 | src/lib_openmolar/admin/qt4/classes/known_server_widget.py | 1 | 7038 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
## ##
## Copyright 2010-2012, Neil Wallace <neil@openmolar.com> ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see <http://www.gnu.org/licenses/>. ##
## ##
###############################################################################
from PyQt4 import QtCore, QtGui
from lib_openmolar.common.qt4.dialogs import BaseDialog
from lib_openmolar.admin.qt4.classes.browser import Browser
class KnownServerWidget(QtGui.QFrame):
'''
loads the 230 servers @ startup.
shows status of each.
(most users will never use more than one 230 server)
'''
_servers = []
shortcut_clicked = QtCore.pyqtSignal(object)
'''
emits a signal containing a tuple
(proxyclient, shortcut)
'''
server_changed = QtCore.pyqtSignal(object)
'''
emits a signal when the server number changes
'''
def __init__(self, parent=None):
QtGui.QFrame.__init__(self, parent)
self.list_widget = QtGui.QListWidget()
self.browser = Browser()
label = QtGui.QLabel(
_("The following OM Servers are configured for use."))
label.setWordWrap(True)
r_button = QtGui.QPushButton(_("Refresh"))
r_button.setToolTip(
_("Poll all configured OMServers for status and refresh the page"))
h_button = QtGui.QPushButton(_("View Source"))
h_button.setToolTip(_("view the html of shown page"))
left_frame = QtGui.QFrame()
left_layout = QtGui.QVBoxLayout(left_frame)
left_layout.setMargin(0)
left_layout.addWidget(label)
left_layout.addWidget(self.list_widget)
left_layout.addWidget(r_button)
left_layout.addWidget(h_button)
splitter = QtGui.QSplitter(self)
splitter.addWidget(left_frame)
splitter.addWidget(self.browser)
splitter.setSizes([80,320])
splitter.setObjectName("KnownServerWidgetSplitter")
layout = QtGui.QVBoxLayout(self)
layout.addWidget(splitter)
r_button.clicked.connect(self.call_refresh)
h_button.clicked.connect(self.view_html)
self.list_widget.currentRowChanged.connect(self._server_chosen)
self.browser.shortcut_clicked.connect(self.browser_shortcut_clicked)
def sizeHint(self):
return QtCore.QSize(400,400)
def clear(self):
self._servers = []
self.list_widget.clear()
def refresh(self):
'''
update the status of all the clients
'''
for i in range(self.list_widget.count()):
proxy_client = self._servers[i]
item = self.list_widget.item(i)
item_text = proxy_client.brief_name
if proxy_client.is_connected:
icon = QtGui.QIcon(":icons/openmolar-server.png")
else:
icon = QtGui.QIcon(":icons/database.png")
item_text += u" (%s)"% _("NOT CONNECTED")
item.setIcon(icon)
item.setText(item_text)
item.setToolTip(proxy_client.name)
def add_proxy_client(self, proxy_client):
'''
add a :doc:`ProxyClient`
'''
self._servers.append(proxy_client)
item_text = proxy_client.brief_name
item = QtGui.QListWidgetItem(item_text, self.list_widget)
if self.list_widget.currentItem() is None:
self.list_widget.setCurrentRow(0)
self.refresh()
def set_html(self, html):
'''
update the html on the embedded browser
'''
self.browser.setHtml(html)
def view_html(self):
'''
view the displayed html in plain text
'''
html = self.browser.page().currentFrame().toHtml()
text_browser = QtGui.QTextEdit()
text_browser.setReadOnly(True)
text_browser.setFont(QtGui.QFont("courier", 10))
text_browser.setLineWrapMode(QtGui.QTextEdit.NoWrap)
text_browser.setPlainText(html)
dl = BaseDialog(self, remove_stretch=True)
dl.setWindowTitle("html source view")
dl.insertWidget(text_browser)
dl.setMinimumWidth(600)
dl.cancel_but.hide()
dl.set_accept_button_text(_("Ok"))
dl.enableApply()
dl.exec_()
def _server_chosen(self, row):
'''
private function called by a gui interaction
'''
self.refresh()
self.set_html("please wait....")
try:
pm = self._servers[row]
self.set_html(pm.html)
except IndexError:
self.browser.setHtml("<h1>No proxy server chosen</h1>")
self.server_changed.emit(row)
def call_refresh(self):
'''
function called when the refresh button is clicked.
'''
self.refresh()
row = self.list_widget.currentRow()
self.server_changed.emit(row)
@property
def current_client(self):
'''
the active :doc:`ProxyClient`
'''
return self._servers[self.list_widget.currentRow()]
def browser_shortcut_clicked(self, shortcut):
'''
pass on the signal from the browser, adding information
'''
self.shortcut_clicked.emit(shortcut)
self.refresh()
def _test():
class duck_client(object):
def __init__(self, i):
self.brief_name = "item %d"% i
self.name = "test tool tip for client %d"% i
self.html = "<h1>Client %d Works!</h1>"% i
self.is_connected = False
import gettext
gettext.install("")
app = QtGui.QApplication([])
ksw = KnownServerWidget()
ksw.add_proxy_client(duck_client(1))
ksw.add_proxy_client(duck_client(2))
mw = QtGui.QMainWindow()
mw.setCentralWidget(ksw)
mw.show()
app.exec_()
if __name__ == "__main__":
_test()
| gpl-3.0 |
sanjuro/RCJK | vendor/django/utils/module_loading.py | 112 | 2190 | import imp
import os
import sys
def module_has_submodule(package, module_name):
"""See if 'module' is in 'package'."""
name = ".".join([package.__name__, module_name])
if name in sys.modules:
return True
for finder in sys.meta_path:
if finder.find_module(name):
return True
for entry in package.__path__: # No __path__, then not a package.
try:
# Try the cached finder.
finder = sys.path_importer_cache[entry]
if finder is None:
# Implicit import machinery should be used.
try:
file_, _, _ = imp.find_module(module_name, [entry])
if file_:
file_.close()
return True
except ImportError:
continue
# Else see if the finder knows of a loader.
elif finder.find_module(name):
return True
else:
continue
except KeyError:
# No cached finder, so try and make one.
for hook in sys.path_hooks:
try:
finder = hook(entry)
# XXX Could cache in sys.path_importer_cache
if finder.find_module(name):
return True
else:
# Once a finder is found, stop the search.
break
except ImportError:
# Continue the search for a finder.
continue
else:
# No finder found.
# Try the implicit import machinery if searching a directory.
if os.path.isdir(entry):
try:
file_, _, _ = imp.find_module(module_name, [entry])
if file_:
file_.close()
return True
except ImportError:
pass
# XXX Could insert None or NullImporter
else:
# Exhausted the search, so the module cannot be found.
return False
| apache-2.0 |
theheros/kbengine | demo/res/scripts/common/KBEDebug.py | 2 | 1307 | # -*- coding: utf-8 -*-
import sys
def getClassName(frame):
try:
# 取得目前工作在实例
self = f.f_locals[ 'self' ].__class__
try:
# 只有内置的class类型才有这个属性 __mro__, 比如tuple, object等
mro = self.__mro__
except AttributeError:
stack = [self]
mro = []
while stack:
curr = stack.pop(0)
mro.append(curr)
stack += curr.__bases__
# 获得当前所在函数名
funcName = f.f_code.co_name
for c in mro:
try:
method = c.__dict__[ funcName ]
if method.func_code == f.f_code:
return c.__name__ + '.'
except KeyError:
pass
except:
return ""
def printMsg(keyword, args, isPrintPath):
f = sys._getframe(2)
# 打印文件名和行数
#if isPrintPath:
# print f.f_code.co_filename + "(" + str(f.f_lineno) + ") :"
#print "%s:%s%s: " % ( keyword, getClassName( f ), f.f_code.co_name ),
for m in args:print (m)
def TRACE_MSG(*args):
printMsg("Trace:", args, False)
def DEBUG_MSG(*args):
printMsg("Debug:", args, True)
def INFO_MSG(*args):
printMsg("Info:", args, False)
def WARNING_MSG(*args):
printMsg("Warning:", args, True)
def ERROR_MSG(*args):
printMsg("Error:", args, True)
def HOOK_MSG(*args) :
s = "HOOK_MSG: "
| lgpl-3.0 |
rwth-ti/gr-ofdm | python/ofdm/qa_channel_estimator_02.py | 1 | 1295 | #!/usr/bin/env python
#
# Copyright 2014 Institute for Theoretical Information Technology,
# RWTH Aachen University
# www.ti.rwth-aachen.de
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import ofdm_swig as ofdm
class qa_channel_estimator_02 (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self):
# set up fg
self.tb.run ()
# check data
if __name__ == '__main__':
gr_unittest.run(qa_channel_estimator_02, "qa_channel_estimator_02.xml")
| gpl-3.0 |
dmillington/ansible-modules-core | network/nxos/nxos_acl.py | 19 | 26908 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: nxos_acl
version_added: "2.2"
short_description: Manages access list entries for ACLs.
description:
- Manages access list entries for ACLs.
extends_documentation_fragment: nxos
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- C(state=absent) removes the ACE if it exists.
- C(state=delete_acl) deleted the ACL if it exists.
- For idempotency, use port numbers for the src/dest port
params like I(src_port1) and names for the well defined protocols
for the I(proto) param.
- Although this module is idempotent in that if the ace as presented in
the task is identical to the one on the switch, no changes will be made.
If there is any difference, what is in Ansible will be pushed (configured
options will be overridden). This is to improve security, but at the
same time remember an ACE is removed, then re-added, so if there is a
change, the new ACE will be exactly what parameters you are sending to
the module.
options:
seq:
description:
- Sequence number of the entry (ACE).
required: false
default: null
name:
description:
- Case sensitive name of the access list (ACL).
required: true
action:
description:
- Action of the ACE.
required: false
default: null
choices: ['permit', 'deny', 'remark']
remark:
description:
- If action is set to remark, this is the description.
required: false
default: null
proto:
description:
- Port number or protocol (as supported by the switch).
required: false
default: null
src:
description:
- Source ip and mask using IP/MASK notation and
supports keyword 'any'.
required: false
default: null
src_port_op:
description:
- Source port operands such as eq, neq, gt, lt, range.
required: false
default: null
choices: ['any', 'eq', 'gt', 'lt', 'neq', 'range']
src_port1:
description:
- Port/protocol and also first (lower) port when using range
operand.
required: false
default: null
src_port2:
description:
- Second (end) port when using range operand.
required: false
default: null
dest:
description:
- Destination ip and mask using IP/MASK notation and supports the
keyword 'any'.
required: false
default: null
dest_port_op:
description:
- Destination port operands such as eq, neq, gt, lt, range.
required: false
default: null
choices: ['any', 'eq', 'gt', 'lt', 'neq', 'range']
dest_port1:
description:
- Port/protocol and also first (lower) port when using range
operand.
required: false
default: null
dest_port2:
description:
- Second (end) port when using range operand.
required: false
default: null
log:
description:
- Log matches against this entry.
required: false
default: null
choices: ['enable']
urg:
description:
- Match on the URG bit.
required: false
default: null
choices: ['enable']
ack:
description:
- Match on the ACK bit.
required: false
default: null
choices: ['enable']
psh:
description:
- Match on the PSH bit.
required: false
default: null
choices: ['enable']
rst:
description:
- Match on the RST bit.
required: false
default: null
choices: ['enable']
syn:
description:
- Match on the SYN bit.
required: false
default: null
choices: ['enable']
fin:
description:
- Match on the FIN bit.
required: false
default: null
choices: ['enable']
established:
description:
- Match established connections.
required: false
default: null
choices: ['enable']
fragments:
description:
- Check non-initial fragments.
required: false
default: null
choices: ['enable']
time-range:
description:
- Name of time-range to apply.
required: false
default: null
precedence:
description:
- Match packets with given precedence.
required: false
default: null
choices: ['critical', 'flash', 'flash-override', 'immediate',
'internet', 'network', 'priority', 'routine']
dscp:
description:
- Match packets with given dscp value.
required: false
default: null
choices: ['af11', 'af12', 'af13', 'af21', 'af22', 'af23','af31','af32',
'af33', 'af41', 'af42', 'af43', 'cs1', 'cs2', 'cs3', 'cs4',
'cs5', 'cs6', 'cs7', 'default', 'ef']
state:
description:
- Specify desired state of the resource.
required: false
default: present
choices: ['present','absent','delete_acl']
'''
EXAMPLES = '''
# configure ACL ANSIBLE
- nxos_acl:
name: ANSIBLE
seq: 10
action: permit
proto: tcp
src: 1.1.1.1/24
dest: any
state: present
provider: "{{ nxos_provider }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module.
returned: always
type: dict
sample: {"action": "permit", "dest": "any", "name": "ANSIBLE",
"proto": "tcp", "seq": "10", "src": "1.1.1.1/24"}
existing:
description: k/v pairs of existing ACL entries.
returned: always
type: dict
sample: {}
end_state:
description: k/v pairs of ACL entries after module execution.
returned: always
type: dict
sample: {"action": "permit", "dest": "any", "name": "ANSIBLE",
"proto": "tcp", "seq": "10", "src": "1.1.1.1/24"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["ip access-list ANSIBLE", "10 permit tcp 1.1.1.1/24 any"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import collections
import json
# COMMON CODE FOR MIGRATION
import re
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.shell import ShellError
try:
from ansible.module_utils.nxos import get_module
except ImportError:
from ansible.module_utils.nxos import NetworkModule
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
def get_cli_body_ssh(command, response, module):
"""Get response for when transport=cli. This is kind of a hack and mainly
needed because these modules were originally written for NX-API. And
not every command supports "| json" when using cli/ssh. As such, we assume
if | json returns an XML string, it is a valid command, but that the
resource doesn't exist yet. Instead, we assume if '^' is found in response,
it is an invalid command.
"""
if 'xml' in response[0]:
body = []
elif '^' in response[0]:
body = response
else:
try:
body = [json.loads(response[0])]
except ValueError:
module.fail_json(msg='Command does not support JSON output',
command=command)
return body
def execute_show(cmds, module, command_type=None):
command_type_map = {
'cli_show': 'json',
'cli_show_ascii': 'text'
}
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
except AttributeError:
try:
if command_type:
command_type = command_type_map.get(command_type)
module.cli.add_commands(cmds, output=command_type)
response = module.cli.run_commands()
else:
module.cli.add_commands(cmds, raw=True)
response = module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
command += ' | json'
cmds = [command]
response = execute_show(cmds, module)
body = get_cli_body_ssh(command, response, module)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = execute_show(cmds, module, command_type=command_type)
return body
def get_acl(module, acl_name, seq_number):
command = 'show ip access-list'
new_acl = []
saveme = {}
seqs = []
acl_body = {}
body = execute_show_command(command, module)[0]
all_acl_body = body['TABLE_ip_ipv6_mac']['ROW_ip_ipv6_mac']
for acl in all_acl_body:
if acl.get('acl_name') == acl_name:
acl_body = acl
try:
acl_entries = acl_body['TABLE_seqno']['ROW_seqno']
acl_name = acl_body.get('acl_name')
except KeyError: # could be raised if no ACEs are configured for an ACL
return saveme, [{'acl': 'no_entries'}], seqs
if isinstance(acl_entries, dict):
acl_entries = [acl_entries]
for each in acl_entries:
temp = collections.OrderedDict()
keep = {}
temp['name'] = acl_name
temp['seq'] = str(each.get('seqno'))
temp['options'] = {}
remark = each.get('remark')
if remark:
temp['remark'] = remark
temp['action'] = 'remark'
else:
temp['action'] = each.get('permitdeny')
temp['proto'] = each.get('proto', each.get('proto_str', each.get('ip')))
temp['src'] = each.get('src_any', each.get('src_ip_prefix'))
temp['src_port_op'] = each.get('src_port_op')
temp['src_port1'] = each.get('src_port1_num')
temp['src_port2'] = each.get('src_port2_num')
temp['dest'] = each.get('dest_any', each.get('dest_ip_prefix'))
temp['dest_port_op'] = each.get('dest_port_op')
temp['dest_port1'] = each.get('dest_port1_num')
temp['dest_port2'] = each.get('dest_port2_num')
options = collections.OrderedDict()
options['log'] = each.get('log')
options['urg'] = each.get('urg')
options['ack'] = each.get('ack')
options['psh'] = each.get('psh')
options['rst'] = each.get('rst')
options['syn'] = each.get('syn')
options['fin'] = each.get('fin')
options['established'] = each.get('established')
options['dscp'] = each.get('dscp_str')
options['precedence'] = each.get('precedence_str')
options['fragments'] = each.get('fragments')
options['time_range'] = each.get('timerange')
options_no_null = {}
for key, value in options.iteritems():
if value is not None:
options_no_null[key] = value
keep['options'] = options_no_null
for key, value in temp.iteritems():
if value:
keep[key] = value
# ensure options is always in the dict
if keep.get('options', 'DNE') == 'DNE':
keep['options'] = {}
if keep.get('seq') == seq_number:
saveme = dict(keep)
seqs.append(str(keep.get('seq')))
new_acl.append(keep)
return saveme, new_acl, seqs
def _acl_operand(operand, srcp1, sprcp2):
sub_entry = ' ' + operand
if operand == 'range':
sub_entry += ' ' + srcp1 + ' ' + sprcp2
else:
sub_entry += ' ' + srcp1
return sub_entry
def config_core_acl(proposed):
seq = proposed.get('seq')
action = proposed.get('action')
remark = proposed.get('remark')
proto = proposed.get('proto')
src = proposed.get('src')
src_port_op = proposed.get('src_port_op')
src_port1 = proposed.get('src_port1')
src_port2 = proposed.get('src_port2')
dest = proposed.get('dest')
dest_port_op = proposed.get('dest_port_op')
dest_port1 = proposed.get('dest_port1')
dest_port2 = proposed.get('dest_port2')
ace_start_entries = [action, proto, src]
if not remark:
ace = seq + ' ' + ' '.join(ace_start_entries)
if src_port_op:
ace += _acl_operand(src_port_op, src_port1, src_port2)
ace += ' ' + dest
if dest_port_op:
ace += _acl_operand(dest_port_op, dest_port1, dest_port2)
else:
ace = seq + ' remark ' + remark
return ace
def config_acl_options(options):
ENABLE_ONLY = ['psh', 'urg', 'log', 'ack', 'syn',
'established', 'rst', 'fin', 'fragments',
'log']
OTHER = ['dscp', 'precedence', 'time-range']
# packet-length is the only option not currently supported
if options.get('time_range'):
options['time-range'] = options.get('time_range')
options.pop('time_range')
command = ''
for option, value in options.iteritems():
if option in ENABLE_ONLY:
if value == 'enable':
command += ' ' + option
elif option in OTHER:
command += ' ' + option + ' ' + value
if command:
command = command.strip()
return command
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def execute_config_command(commands, module):
try:
module.configure(commands)
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
except AttributeError:
try:
commands.insert(0, 'configure')
module.cli.add_commands(commands, output='config')
module.cli.run_commands()
except ShellError:
clie = get_exception()
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
def main():
argument_spec = dict(
seq=dict(required=False, type='str'),
name=dict(required=True, type='str'),
action=dict(required=False, choices=['remark', 'permit', 'deny']),
remark=dict(required=False, type='str'),
proto=dict(required=False, type='str'),
src=dict(required=False, type='str'),
src_port_op=dict(required=False),
src_port1=dict(required=False, type='str'),
src_port2=dict(required=False, type='str'),
dest=dict(required=False, type='str'),
dest_port_op=dict(required=False),
dest_port1=dict(required=False, type='str'),
dest_port2=dict(required=False, type='str'),
log=dict(required=False, choices=['enable']),
urg=dict(required=False, choices=['enable']),
ack=dict(required=False, choices=['enable']),
psh=dict(required=False, choices=['enable']),
rst=dict(required=False, choices=['enable']),
syn=dict(required=False, choices=['enable']),
fragments=dict(required=False, choices=['enable']),
fin=dict(required=False, choices=['enable']),
established=dict(required=False, choices=['enable']),
time_range=dict(required=False),
precedence=dict(required=False, choices=['critical', 'flash',
'flash-override',
'immediate', 'internet',
'network', 'priority',
'routine']),
dscp=dict(required=False, choices=['af11', 'af12', 'af13', 'af21',
'af22', 'af23', 'af31', 'af32',
'af33', 'af41', 'af42', 'af43',
'cs1', 'cs2', 'cs3', 'cs4',
'cs5', 'cs6', 'cs7', 'default',
'ef']),
state=dict(choices=['absent', 'present', 'delete_acl'],
default='present'),
protocol=dict(choices=['http', 'https'], default='http'),
host=dict(required=True),
username=dict(type='str'),
password=dict(no_log=True, type='str'),
include_defaults=dict(default=False),
config=dict(),
save=dict(type='bool', default=False)
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
state = module.params['state']
action = module.params['action']
remark = module.params['remark']
dscp = module.params['dscp']
precedence = module.params['precedence']
seq = module.params['seq']
name = module.params['name']
seq = module.params['seq']
if action == 'remark' and not remark:
module.fail_json(msg='when state is action, remark param is also '
'required')
REQUIRED = ['seq', 'name', 'action', 'proto', 'src', 'dest']
ABSENT = ['name', 'seq']
if state == 'present':
if action and remark and seq:
pass
else:
for each in REQUIRED:
if module.params[each] is None:
module.fail_json(msg="req'd params when state is present:",
params=REQUIRED)
elif state == 'absent':
for each in ABSENT:
if module.params[each] is None:
module.fail_json(msg='require params when state is absent',
params=ABSENT)
elif state == 'delete_acl':
if module.params['name'] is None:
module.fail_json(msg="param name req'd when state is delete_acl")
if dscp and precedence:
module.fail_json(msg='only one of the params dscp/precedence '
'are allowed')
OPTIONS_NAMES = ['log', 'urg', 'ack', 'psh', 'rst', 'syn', 'fin',
'established', 'dscp', 'precedence', 'fragments',
'time_range']
CORE = ['seq', 'name', 'action', 'proto', 'src', 'src_port_op',
'src_port1', 'src_port2', 'dest', 'dest_port_op',
'dest_port1', 'dest_port2', 'remark']
proposed_core = dict((param, value) for (param, value) in
module.params.iteritems()
if param in CORE and value is not None)
proposed_options = dict((param, value) for (param, value) in
module.params.iteritems()
if param in OPTIONS_NAMES and value is not None)
proposed = {}
proposed.update(proposed_core)
proposed.update(proposed_options)
existing_options = {}
# getting existing existing_core=dict, acl=list, seq=list
existing_core, acl, seqs = get_acl(module, name, seq)
if existing_core:
existing_options = existing_core.get('options')
existing_core.pop('options')
end_state = acl
commands = []
changed = False
delta_core = {}
delta_options = {}
if not existing_core.get('remark'):
delta_core = dict(
set(proposed_core.iteritems()).difference(
existing_core.iteritems())
)
delta_options = dict(
set(proposed_options.iteritems()).difference(
existing_options.iteritems())
)
if state == 'present':
if delta_core or delta_options:
if existing_core: # if the ace exists already
commands.append(['no {0}'.format(seq)])
if delta_options:
myacl_str = config_core_acl(proposed_core)
myacl_str += ' ' + config_acl_options(proposed_options)
else:
myacl_str = config_core_acl(proposed_core)
command = [myacl_str]
commands.append(command)
elif state == 'absent':
if existing_core:
commands.append(['no {0}'.format(seq)])
elif state == 'delete_acl':
if acl[0].get('acl') != 'no_entries':
commands.append(['no ip access-list {0}'.format(name)])
results = {}
cmds = []
if commands:
preface = []
if state in ['present', 'absent']:
preface = ['ip access-list {0}'.format(name)]
commands.insert(0, preface)
cmds = flatten_list(commands)
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
execute_config_command(cmds, module)
changed = True
new_existing_core, end_state, seqs = get_acl(module, name, seq)
if 'configure' in cmds:
cmds.pop(0)
results['proposed'] = proposed
results['existing'] = existing_core
results['changed'] = changed
results['updates'] = cmds
results['end_state'] = end_state
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
chronicwaffle/PokemonGo-DesktopMap | app/pylibs/osx64/Cryptodome/SelfTest/Hash/test_SHA1.py | 2 | 2289 | # -*- coding: utf-8 -*-
#
# SelfTest/Hash/SHA1.py: Self-test for the SHA-1 hash function
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Cryptodome.Hash.SHA"""
from Cryptodome.Util.py3compat import *
# Test vectors from various sources
# This is a list of (expected_result, input[, description]) tuples.
test_data = [
# FIPS PUB 180-2, A.1 - "One-Block Message"
('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'),
# FIPS PUB 180-2, A.2 - "Multi-Block Message"
('84983e441c3bd26ebaae4aa1f95129e5e54670f1',
'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'),
# FIPS PUB 180-2, A.3 - "Long Message"
# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f',
# 'a' * 10**6,
# '"a" * 10**6'),
# RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits)
('dea356a2cddd90c7a7ecedc5ebb563934f460452',
'01234567' * 80,
'"01234567" * 80'),
]
def get_tests(config={}):
from Cryptodome.Hash import SHA1
from common import make_hash_tests
return make_hash_tests(SHA1, "SHA1", test_data,
digest_size=20,
oid="1.3.14.3.2.26")
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit |
jeremiedecock/snippets | python/pyserial/read.py | 1 | 2181 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jeremie Decock (http://www.jdhp.org)
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import argparse
import serial
import time
def main():
# PARSE OPTIONS
parser = argparse.ArgumentParser(description='A pyserial snippet.')
parser.add_argument("--baudrate", "-b", help="The baudrate speed (e.g. 9600)", metavar="INTEGER", type=int, default=9600)
parser.add_argument("--timeout", "-t", help="The timeout value for the connection", metavar="FLOAT", type=float, default=0.1)
parser.add_argument("--port", "-p", help="The serial device to connect with (e.g. '/dev/ttyUSB0' for Unix users)", metavar="STRING", default="/dev/ttyUSB0")
args = parser.parse_args()
# CONNECT TO THE SERIAL PORT
serial_connection = serial.Serial(port=args.port,
baudrate=args.baudrate,
timeout=args.timeout,
bytesize=serial.EIGHTBITS,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE)
serial_connection.flushInput()
# READ DATA
while(True):
time.sleep(0.01)
num_bytes_in_read_buffer = serial_connection.inWaiting()
read_byte_array = serial_connection.read(num_bytes_in_read_buffer)
if len(read_byte_array) > 0:
print(read_byte_array)
if __name__ == '__main__':
main()
| mit |
evandromr/python_scitools | plotperiodogram.py | 1 | 1814 | #!/bin/env python
import numpy as np
import scipy.signal as ss
import astropy.io.fits as fits
import matplotlib.pyplot as plt
inpt = str(raw_input("Nome do Arquivo: "))
lc = fits.open(inpt)
bin = float(raw_input("bin size (or camera resolution): "))
# Convert to big-endian array is necessary to the lombscargle function
rate = np.array(lc[1].data["RATE"], dtype='float64')
time = np.array(lc[1].data["TIME"], dtype='float64')
time -= time.min()
# Exclue NaN values -------------------------
print ''
print 'Excluding nan and negative values...'
print ''
exclude = []
for i in xrange(len(rate)):
if rate[i] > 0:
pass
else:
exclude.append(i)
exclude = np.array(exclude)
nrate = np.delete(rate, exclude)
ntime = np.delete(time, exclude)
# --------------------------------------------
# normalize count rate
nrate -= nrate.mean()
# maximum frequecy limited by resolution
freqmax = 1.0/bin
# Ther periodogram itself
f, p = ss.periodogram(nrate, fs=freqmax)#, nfft=1500)
print 'TIME =', max(time)
# Plot lightcurve on top panel
#plt.subplot(2, 1, 1)
#plt.plot(ntime, nrate, 'bo-')
#plt.xlabel('Time [s]', fontsize=12)
#plt.ylabel('Normalized Count Rate [counts/s]', fontsize=12)
# Plot powerspectrum on bottom panel
#plt.subplot(2, 1, 2)
#plt.plot(f, p, 'b.-', label='f = {0:.3e}'.format(f[np.argmax(p)]))
#plt.xlabel('Frequency [Hz]', fontsize=12)
#plt.ylabel('Power', fontsize=12)
#plt.legend(loc='best')
# show plot
#plt.show()
#plt.plot(f, p)
plt.plot(f, p, linestyle='steps', label='T$_{{peak}}$ = {0:.3f} s'.format(1.0/f[np.argmax(p)]))
plt.xlabel('Frequency (Hz)')
plt.ylabel('Power')
plt.xlim(min(f), max(f))
plt.legend(loc='best', frameon=False)
plt.savefig("periodogram.pdf", orientation='landscape', papertype='a4',
format='pdf', bbox_inches='tight')
plt.show()
| mit |
Paczesiowa/youtube-dl | youtube_dl/extractor/anysex.py | 224 | 2085 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
)
class AnySexIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?anysex\.com/(?P<id>\d+)'
_TEST = {
'url': 'http://anysex.com/156592/',
'md5': '023e9fbb7f7987f5529a394c34ad3d3d',
'info_dict': {
'id': '156592',
'ext': 'mp4',
'title': 'Busty and sexy blondie in her bikini strips for you',
'description': 'md5:de9e418178e2931c10b62966474e1383',
'categories': ['Erotic'],
'duration': 270,
'age_limit': 18,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r"video_url\s*:\s*'([^']+)'", webpage, 'video URL')
title = self._html_search_regex(r'<title>(.*?)</title>', webpage, 'title')
description = self._html_search_regex(
r'<div class="description"[^>]*>([^<]+)</div>', webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'preview_url\s*:\s*\'(.*?)\'', webpage, 'thumbnail', fatal=False)
categories = re.findall(
r'<a href="http://anysex\.com/categories/[^"]+" title="[^"]*">([^<]+)</a>', webpage)
duration = parse_duration(self._search_regex(
r'<b>Duration:</b> (?:<q itemprop="duration">)?(\d+:\d+)', webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'<b>Views:</b> (\d+)', webpage, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': title,
'description': description,
'thumbnail': thumbnail,
'categories': categories,
'duration': duration,
'view_count': view_count,
'age_limit': 18,
}
| unlicense |
sudheesh001/RFID-DBSync | venv/lib/python2.7/site-packages/setuptools/tests/server.py | 452 | 2651 | """Basic http server for tests to simulate PyPI or custom indexes
"""
import sys
import time
import threading
from setuptools.compat import BaseHTTPRequestHandler
from setuptools.compat import (urllib2, URLError, HTTPServer,
SimpleHTTPRequestHandler)
class IndexServer(HTTPServer):
"""Basic single-threaded http server simulating a package index
You can use this server in unittest like this::
s = IndexServer()
s.start()
index_url = s.base_url() + 'mytestindex'
# do some test requests to the index
# The index files should be located in setuptools/tests/indexes
s.stop()
"""
def __init__(self, server_address=('', 0),
RequestHandlerClass=SimpleHTTPRequestHandler):
HTTPServer.__init__(self, server_address, RequestHandlerClass)
self._run = True
def serve(self):
while self._run:
self.handle_request()
def start(self):
self.thread = threading.Thread(target=self.serve)
self.thread.start()
def stop(self):
"Stop the server"
# Let the server finish the last request and wait for a new one.
time.sleep(0.1)
# self.shutdown is not supported on python < 2.6, so just
# set _run to false, and make a request, causing it to
# terminate.
self._run = False
url = 'http://127.0.0.1:%(server_port)s/' % vars(self)
try:
if sys.version_info >= (2, 6):
urllib2.urlopen(url, timeout=5)
else:
urllib2.urlopen(url)
except URLError:
# ignore any errors; all that's important is the request
pass
self.thread.join()
self.socket.close()
def base_url(self):
port = self.server_port
return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
class RequestRecorder(BaseHTTPRequestHandler):
def do_GET(self):
requests = vars(self.server).setdefault('requests', [])
requests.append(self)
self.send_response(200, 'OK')
class MockServer(HTTPServer, threading.Thread):
"""
A simple HTTP Server that records the requests made to it.
"""
def __init__(self, server_address=('', 0),
RequestHandlerClass=RequestRecorder):
HTTPServer.__init__(self, server_address, RequestHandlerClass)
threading.Thread.__init__(self)
self.setDaemon(True)
self.requests = []
def run(self):
self.serve_forever()
def url(self):
return 'http://localhost:%(server_port)s/' % vars(self)
url = property(url)
| gpl-2.0 |
mateor/pants | src/python/pants/build_graph/intermediate_target_factory.py | 5 | 2572 | # coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from hashlib import sha1
import six
from pants.base.exceptions import TargetDefinitionException
from pants.build_graph.address import Address
from pants.util.meta import AbstractClass
def hash_target(address, suffix):
hasher = sha1()
hasher.update(address)
hasher.update(suffix)
return hasher.hexdigest()
class IntermediateTargetFactoryBase(AbstractClass):
"""Convenience factory which constructs an intermediate target with the appropriate attributes."""
_targets = set()
class ExpectedAddressError(TargetDefinitionException):
"""Thrown if an object that is not an address is used as the dependency spec."""
@classmethod
def reset(cls):
cls._targets.clear()
def __init__(self, parse_context):
self._parse_context = parse_context
@property
def extra_target_arguments(self):
"""Extra keyword arguments to pass to the target constructor."""
return {}
def _create_intermediate_target(self, address, suffix):
"""
:param string address: A target address.
:param string suffix: A string used as a suffix of the intermediate target name.
:returns: The address of a synthetic intermediary target.
"""
if not isinstance(address, six.string_types):
raise self.ExpectedAddressError("Expected string address argument, got type {type}"
.format(type=type(address)))
address = Address.parse(address, self._parse_context.rel_path)
# NB(gmalmquist): Ideally there should be a way to indicate that these targets are synthetic
# and shouldn't show up in `./pants list` etc, because we really don't want people to write
# handwritten dependencies on them. For now just give them names containing "-unstable-" as a
# hint.
hash_str = hash_target(str(address), suffix)
name = '{name}-unstable-{suffix}-{index}'.format(
name=address.target_name,
suffix=suffix.replace(' ', '.'),
index=hash_str,
)
if (name, self._parse_context.rel_path) not in self._targets:
self._parse_context.create_object(
'target',
name=name,
dependencies=[address.spec],
**self.extra_target_arguments
)
self._targets.add((name, self._parse_context.rel_path))
return ':{}'.format(name)
| apache-2.0 |
mcardillo55/django | django/contrib/syndication/views.py | 192 | 8680 | from __future__ import unicode_literals
from calendar import timegm
from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.template import TemplateDoesNotExist, loader
from django.utils import feedgenerator, six
from django.utils.encoding import force_text, iri_to_uri, smart_text
from django.utils.html import escape
from django.utils.http import http_date
from django.utils.timezone import get_default_timezone, is_naive, make_aware
def add_domain(domain, url, secure=False):
protocol = 'https' if secure else 'http'
if url.startswith('//'):
# Support network-path reference (see #16753) - RSS requires a protocol
url = '%s:%s' % (protocol, url)
elif not url.startswith(('http://', 'https://', 'mailto:')):
url = iri_to_uri('%s://%s%s' % (protocol, domain, url))
return url
class FeedDoesNotExist(ObjectDoesNotExist):
pass
class Feed(object):
feed_type = feedgenerator.DefaultFeed
title_template = None
description_template = None
def __call__(self, request, *args, **kwargs):
try:
obj = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
raise Http404('Feed object does not exist.')
feedgen = self.get_feed(obj, request)
response = HttpResponse(content_type=feedgen.content_type)
if hasattr(self, 'item_pubdate') or hasattr(self, 'item_updateddate'):
# if item_pubdate or item_updateddate is defined for the feed, set
# header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED
response['Last-Modified'] = http_date(
timegm(feedgen.latest_post_date().utctimetuple()))
feedgen.write(response, 'utf-8')
return response
def item_title(self, item):
# Titles should be double escaped by default (see #6533)
return escape(force_text(item))
def item_description(self, item):
return force_text(item)
def item_link(self, item):
try:
return item.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured(
'Give your %s class a get_absolute_url() method, or define an '
'item_link() method in your Feed class.' % item.__class__.__name__
)
def __get_dynamic_attr(self, attname, obj, default=None):
try:
attr = getattr(self, attname)
except AttributeError:
return default
if callable(attr):
# Check co_argcount rather than try/excepting the function and
# catching the TypeError, because something inside the function
# may raise the TypeError. This technique is more accurate.
try:
code = six.get_function_code(attr)
except AttributeError:
code = six.get_function_code(attr.__call__)
if code.co_argcount == 2: # one argument is 'self'
return attr(obj)
else:
return attr()
return attr
def feed_extra_kwargs(self, obj):
"""
Returns an extra keyword arguments dictionary that is used when
initializing the feed generator.
"""
return {}
def item_extra_kwargs(self, item):
"""
Returns an extra keyword arguments dictionary that is used with
the `add_item` call of the feed generator.
"""
return {}
def get_object(self, request, *args, **kwargs):
return None
def get_context_data(self, **kwargs):
"""
Returns a dictionary to use as extra context if either
``self.description_template`` or ``self.item_template`` are used.
Default implementation preserves the old behavior
of using {'obj': item, 'site': current_site} as the context.
"""
return {'obj': kwargs.get('item'), 'site': kwargs.get('site')}
def get_feed(self, obj, request):
"""
Returns a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raises FeedDoesNotExist for invalid parameters.
"""
current_site = get_current_site(request)
link = self.__get_dynamic_attr('link', obj)
link = add_domain(current_site.domain, link, request.is_secure())
feed = self.feed_type(
title=self.__get_dynamic_attr('title', obj),
subtitle=self.__get_dynamic_attr('subtitle', obj),
link=link,
description=self.__get_dynamic_attr('description', obj),
language=settings.LANGUAGE_CODE,
feed_url=add_domain(
current_site.domain,
self.__get_dynamic_attr('feed_url', obj) or request.path,
request.is_secure(),
),
author_name=self.__get_dynamic_attr('author_name', obj),
author_link=self.__get_dynamic_attr('author_link', obj),
author_email=self.__get_dynamic_attr('author_email', obj),
categories=self.__get_dynamic_attr('categories', obj),
feed_copyright=self.__get_dynamic_attr('feed_copyright', obj),
feed_guid=self.__get_dynamic_attr('feed_guid', obj),
ttl=self.__get_dynamic_attr('ttl', obj),
**self.feed_extra_kwargs(obj)
)
title_tmp = None
if self.title_template is not None:
try:
title_tmp = loader.get_template(self.title_template)
except TemplateDoesNotExist:
pass
description_tmp = None
if self.description_template is not None:
try:
description_tmp = loader.get_template(self.description_template)
except TemplateDoesNotExist:
pass
for item in self.__get_dynamic_attr('items', obj):
context = self.get_context_data(item=item, site=current_site,
obj=obj, request=request)
if title_tmp is not None:
title = title_tmp.render(context, request)
else:
title = self.__get_dynamic_attr('item_title', item)
if description_tmp is not None:
description = description_tmp.render(context, request)
else:
description = self.__get_dynamic_attr('item_description', item)
link = add_domain(
current_site.domain,
self.__get_dynamic_attr('item_link', item),
request.is_secure(),
)
enc = None
enc_url = self.__get_dynamic_attr('item_enclosure_url', item)
if enc_url:
enc = feedgenerator.Enclosure(
url=smart_text(enc_url),
length=smart_text(self.__get_dynamic_attr('item_enclosure_length', item)),
mime_type=smart_text(self.__get_dynamic_attr('item_enclosure_mime_type', item))
)
author_name = self.__get_dynamic_attr('item_author_name', item)
if author_name is not None:
author_email = self.__get_dynamic_attr('item_author_email', item)
author_link = self.__get_dynamic_attr('item_author_link', item)
else:
author_email = author_link = None
tz = get_default_timezone()
pubdate = self.__get_dynamic_attr('item_pubdate', item)
if pubdate and is_naive(pubdate):
pubdate = make_aware(pubdate, tz)
updateddate = self.__get_dynamic_attr('item_updateddate', item)
if updateddate and is_naive(updateddate):
updateddate = make_aware(updateddate, tz)
feed.add_item(
title=title,
link=link,
description=description,
unique_id=self.__get_dynamic_attr('item_guid', item, link),
unique_id_is_permalink=self.__get_dynamic_attr(
'item_guid_is_permalink', item),
enclosure=enc,
pubdate=pubdate,
updateddate=updateddate,
author_name=author_name,
author_email=author_email,
author_link=author_link,
categories=self.__get_dynamic_attr('item_categories', item),
item_copyright=self.__get_dynamic_attr('item_copyright', item),
**self.item_extra_kwargs(item)
)
return feed
| bsd-3-clause |
hrayr-artunyan/shuup | shuup_tests/core/test_settings.py | 2 | 1185 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from django.apps import apps
from django.test import override_settings
from shuup.core import MissingSettingException
@pytest.mark.parametrize("setting_key, value, should_raise", [
("PARLER_DEFAULT_LANGUAGE_CODE", None, True),
("PARLER_DEFAULT_LANGUAGE_CODE", False, True),
("PARLER_DEFAULT_LANGUAGE_CODE", "", True),
("PARLER_DEFAULT_LANGUAGE_CODE", "en", False),
("PARLER_LANGUAGES", None, True),
("PARLER_LANGUAGES", False, True),
("PARLER_LANGUAGES", "", True),
("PARLER_LANGUAGES", {}, True),
("PARLER_LANGUAGES", {None: [1,2]}, False),
("PARLER_LANGUAGES", {"en": [1,2]}, False),
])
@pytest.mark.django_db
def test_parler_language_code(setting_key, value, should_raise):
kwargs = {setting_key: value}
with override_settings(**kwargs):
if should_raise:
with pytest.raises(MissingSettingException):
apps.get_app_config('shuup').ready()
| agpl-3.0 |
cc13ny/Allin | lintcode/234-000-Palindrome-Linked-List/PalindromeLinkedList.py | 5 | 1381 | # Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# @param head, a ListNode
# @return a boolean
def isPalindrome(self, head):
# Write your code here
if head is None:
return True
slow, fast = head, head
while fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
second = slow.next
p = second
print 'slow ' + str(slow.val)
while p:
print p.val
p = p.next
dumpy = ListNode(0)
dumpy.next = head
while head.next:
if fast.next and head.next == second:
head.next = None
break
if not fast.next and head.next == slow:
head.next = None
break
tmp = head.next
head.next = tmp.next
tmp.next = dumpy.next
dumpy.next = tmp
first = dumpy.next
p = first
print
while p:
print p.val
p = p.next
while first and second:
if first.val != second.val:
return False
first = first.next
second = second.next
return True | mit |
ovnicraft/edx-platform | common/djangoapps/util/model_utils.py | 37 | 6903 | """
Utilities for django models.
"""
import unicodedata
import re
from eventtracking import tracker
from django.conf import settings
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django_countries.fields import Country
# The setting name used for events when "settings" (account settings, preferences, profile information) change.
USER_SETTINGS_CHANGED_EVENT_NAME = u'edx.user.settings.changed'
def get_changed_fields_dict(instance, model_class):
"""
Helper method for tracking field changes on a model.
Given a model instance and class, return a dict whose keys are that
instance's fields which differ from the last saved ones and whose values
are the old values of those fields. Related fields are not considered.
Args:
instance (Model instance): the model instance with changes that are
being tracked
model_class (Model class): the class of the model instance we are
tracking
Returns:
dict: a mapping of field names to current database values of those
fields, or an empty dict if the model is new
"""
try:
old_model = model_class.objects.get(pk=instance.pk)
except model_class.DoesNotExist:
# Object is new, so fields haven't technically changed. We'll return
# an empty dict as a default value.
return {}
else:
# We want to compare all of the scalar fields on the model, but none of
# the relations.
field_names = [f.name for f in model_class._meta.get_fields() if not f.is_relation] # pylint: disable=protected-access
changed_fields = {
field_name: getattr(old_model, field_name) for field_name in field_names
if getattr(old_model, field_name) != getattr(instance, field_name)
}
return changed_fields
def emit_field_changed_events(instance, user, db_table, excluded_fields=None, hidden_fields=None):
"""Emits a settings changed event for each field that has changed.
Note that this function expects that a `_changed_fields` dict has been set
as an attribute on `instance` (see `get_changed_fields_dict`.
Args:
instance (Model instance): the model instance that is being saved
user (User): the user that this instance is associated with
db_table (str): the name of the table that we're modifying
excluded_fields (list): a list of field names for which events should
not be emitted
hidden_fields (list): a list of field names specifying fields whose
values should not be included in the event (None will be used
instead)
Returns:
None
"""
def clean_field(field_name, value):
"""
Prepare a field to be emitted in a JSON serializable format. If
`field_name` is a hidden field, return None.
"""
if field_name in hidden_fields:
return None
# Country is not JSON serializable. Return the country code.
if isinstance(value, Country):
if value.code:
return value.code
else:
return None
return value
excluded_fields = excluded_fields or []
hidden_fields = hidden_fields or []
changed_fields = getattr(instance, '_changed_fields', {})
for field_name in changed_fields:
if field_name not in excluded_fields:
old_value = clean_field(field_name, changed_fields[field_name])
new_value = clean_field(field_name, getattr(instance, field_name))
emit_setting_changed_event(user, db_table, field_name, old_value, new_value)
# Remove the now inaccurate _changed_fields attribute.
if hasattr(instance, '_changed_fields'):
del instance._changed_fields
def truncate_fields(old_value, new_value):
"""
Truncates old_value and new_value for analytics event emission if necessary.
Args:
old_value(obj): the value before the change
new_value(obj): the new value being saved
Returns:
a dictionary with the following fields:
'old': the truncated old value
'new': the truncated new value
'truncated': the list of fields that have been truncated
"""
# Compute the maximum value length so that two copies can fit into the maximum event size
# in addition to all the other fields recorded.
max_value_length = settings.TRACK_MAX_EVENT / 4
serialized_old_value, old_was_truncated = _get_truncated_setting_value(old_value, max_length=max_value_length)
serialized_new_value, new_was_truncated = _get_truncated_setting_value(new_value, max_length=max_value_length)
truncated_values = []
if old_was_truncated:
truncated_values.append("old")
if new_was_truncated:
truncated_values.append("new")
return {'old': serialized_old_value, 'new': serialized_new_value, 'truncated': truncated_values}
def emit_setting_changed_event(user, db_table, setting_name, old_value, new_value):
"""Emits an event for a change in a setting.
Args:
user (User): the user that this setting is associated with.
db_table (str): the name of the table that we're modifying.
setting_name (str): the name of the setting being changed.
old_value (object): the value before the change.
new_value (object): the new value being saved.
Returns:
None
"""
truncated_fields = truncate_fields(old_value, new_value)
truncated_fields['setting'] = setting_name
truncated_fields['user_id'] = user.id
truncated_fields['table'] = db_table
tracker.emit(
USER_SETTINGS_CHANGED_EVENT_NAME,
truncated_fields
)
def _get_truncated_setting_value(value, max_length=None):
"""
Returns the truncated form of a setting value.
Returns:
truncated_value (object): the possibly truncated version of the value.
was_truncated (bool): returns true if the serialized value was truncated.
"""
if isinstance(value, basestring) and max_length is not None and len(value) > max_length:
return value[0:max_length], True
else:
return value, False
# Taken from Django 1.8 source code because it's not supported in 1.4
def slugify(value):
"""Converts value into a string suitable for readable URLs.
Converts to ASCII. Converts spaces to hyphens. Removes characters that
aren't alphanumerics, underscores, or hyphens. Converts to lowercase.
Also strips leading and trailing whitespace.
Args:
value (string): String to slugify.
"""
value = force_unicode(value)
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub(r'[^\w\s-]', '', value).strip().lower()
return mark_safe(re.sub(r'[-\s]+', '-', value))
| agpl-3.0 |
alexcuellar/odoo | openerp/http.py | 19 | 61695 | # -*- coding: utf-8 -*-
#----------------------------------------------------------
# OpenERP HTTP layer
#----------------------------------------------------------
import ast
import collections
import contextlib
import datetime
import errno
import functools
import getpass
import inspect
import logging
import mimetypes
import os
import pprint
import random
import re
import sys
import tempfile
import threading
import time
import traceback
import urlparse
import warnings
from zlib import adler32
import babel.core
import psycopg2
import simplejson
import werkzeug.contrib.sessions
import werkzeug.datastructures
import werkzeug.exceptions
import werkzeug.local
import werkzeug.routing
import werkzeug.wrappers
import werkzeug.wsgi
from werkzeug.wsgi import wrap_file
try:
import psutil
except ImportError:
psutil = None
import openerp
from openerp import SUPERUSER_ID
from openerp.service.server import memory_info
from openerp.service import security, model as service_model
from openerp.tools.func import lazy_property
from openerp.tools import ustr
_logger = logging.getLogger(__name__)
rpc_request = logging.getLogger(__name__ + '.rpc.request')
rpc_response = logging.getLogger(__name__ + '.rpc.response')
# 1 week cache for statics as advised by Google Page Speed
STATIC_CACHE = 60 * 60 * 24 * 7
#----------------------------------------------------------
# RequestHandler
#----------------------------------------------------------
# Thread local global request object
_request_stack = werkzeug.local.LocalStack()
request = _request_stack()
"""
A global proxy that always redirect to the current request object.
"""
def replace_request_password(args):
# password is always 3rd argument in a request, we replace it in RPC logs
# so it's easier to forward logs for diagnostics/debugging purposes...
if len(args) > 2:
args = list(args)
args[2] = '*'
return tuple(args)
# don't trigger debugger for those exceptions, they carry user-facing warnings
# and indications, they're not necessarily indicative of anything being
# *broken*
NO_POSTMORTEM = (openerp.osv.orm.except_orm,
openerp.exceptions.AccessError,
openerp.exceptions.AccessDenied,
openerp.exceptions.Warning,
openerp.exceptions.RedirectWarning)
def dispatch_rpc(service_name, method, params):
""" Handle a RPC call.
This is pure Python code, the actual marshalling (from/to XML-RPC) is done
in a upper layer.
"""
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
start_time = time.time()
start_rss, start_vms = 0, 0
if psutil:
start_rss, start_vms = memory_info(psutil.Process(os.getpid()))
if rpc_request and rpc_response_flag:
openerp.netsvc.log(rpc_request, logging.DEBUG, '%s.%s' % (service_name, method), replace_request_password(params))
threading.current_thread().uid = None
threading.current_thread().dbname = None
if service_name == 'common':
dispatch = openerp.service.common.dispatch
elif service_name == 'db':
dispatch = openerp.service.db.dispatch
elif service_name == 'object':
dispatch = openerp.service.model.dispatch
elif service_name == 'report':
dispatch = openerp.service.report.dispatch
else:
dispatch = openerp.service.wsgi_server.rpc_handlers.get(service_name)
result = dispatch(method, params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
end_rss, end_vms = 0, 0
if psutil:
end_rss, end_vms = memory_info(psutil.Process(os.getpid()))
logline = '%s.%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (service_name, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
openerp.netsvc.log(rpc_response, logging.DEBUG, logline, result)
else:
openerp.netsvc.log(rpc_request, logging.DEBUG, logline, replace_request_password(params), depth=1)
return result
except NO_POSTMORTEM:
raise
except openerp.exceptions.DeferredException, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, e.traceback)
raise
except Exception, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, sys.exc_info())
raise
def local_redirect(path, query=None, keep_hash=False, forward_debug=True, code=303):
url = path
if not query:
query = {}
if request and request.debug:
if forward_debug:
query['debug'] = ''
else:
query['debug'] = None
if query:
url += '?' + werkzeug.url_encode(query)
if keep_hash:
return redirect_with_hash(url, code)
else:
return werkzeug.utils.redirect(url, code)
def redirect_with_hash(url, code=303):
# Most IE and Safari versions decided not to preserve location.hash upon
# redirect. And even if IE10 pretends to support it, it still fails
# inexplicably in case of multiple redirects (and we do have some).
# See extensive test page at http://greenbytes.de/tech/tc/httpredirects/
if request.httprequest.user_agent.browser in ('firefox',):
return werkzeug.utils.redirect(url, code)
if urlparse.urlparse(url, scheme='http').scheme not in ('http', 'https'):
url = 'http://' + url
url = url.replace("'", "%27").replace("<", "%3C")
return "<html><head><script>window.location = '%s' + location.hash;</script></head></html>" % url
class WebRequest(object):
""" Parent class for all Odoo Web request types, mostly deals with
initialization and setup of the request object (the dispatching itself has
to be handled by the subclasses)
:param httprequest: a wrapped werkzeug Request object
:type httprequest: :class:`werkzeug.wrappers.BaseRequest`
.. attribute:: httprequest
the original :class:`werkzeug.wrappers.Request` object provided to the
request
.. attribute:: params
:class:`~collections.Mapping` of request parameters, not generally
useful as they're provided directly to the handler method as keyword
arguments
"""
def __init__(self, httprequest):
self.httprequest = httprequest
self.httpresponse = None
self.httpsession = httprequest.session
self.disable_db = False
self.uid = None
self.endpoint = None
self.endpoint_arguments = None
self.auth_method = None
self._cr = None
# prevents transaction commit, use when you catch an exception during handling
self._failed = None
# set db/uid trackers - they're cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
if self.db:
threading.current_thread().dbname = self.db
if self.session.uid:
threading.current_thread().uid = self.session.uid
@lazy_property
def env(self):
"""
The :class:`~openerp.api.Environment` bound to current request.
Raises a :class:`RuntimeError` if the current requests is not bound
to a database.
"""
if not self.db:
raise RuntimeError('request not bound to a database')
return openerp.api.Environment(self.cr, self.uid, self.context)
@lazy_property
def context(self):
"""
:class:`~collections.Mapping` of context values for the current
request
"""
return dict(self.session.context)
@lazy_property
def lang(self):
self.session._fix_lang(self.context)
return self.context["lang"]
@lazy_property
def session(self):
"""
a :class:`OpenERPSession` holding the HTTP session data for the
current http session
"""
return self.httprequest.session
@property
def cr(self):
"""
:class:`~openerp.sql_db.Cursor` initialized for the current method
call.
Accessing the cursor when the current request uses the ``none``
authentication will raise an exception.
"""
# can not be a lazy_property because manual rollback in _call_function
# if already set (?)
if not self.db:
raise RuntimeError('request not bound to a database')
if not self._cr:
self._cr = self.registry.cursor()
return self._cr
def __enter__(self):
_request_stack.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
_request_stack.pop()
if self._cr:
if exc_type is None and not self._failed:
self._cr.commit()
self._cr.close()
# just to be sure no one tries to re-use the request
self.disable_db = True
self.uid = None
def set_handler(self, endpoint, arguments, auth):
# is this needed ?
arguments = dict((k, v) for k, v in arguments.iteritems()
if not k.startswith("_ignored_"))
self.endpoint_arguments = arguments
self.endpoint = endpoint
self.auth_method = auth
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
self._failed = exception # prevent tx commit
if not isinstance(exception, NO_POSTMORTEM) \
and not isinstance(exception, werkzeug.exceptions.HTTPException):
openerp.tools.debugger.post_mortem(
openerp.tools.config, sys.exc_info())
raise
def _call_function(self, *args, **kwargs):
request = self
if self.endpoint.routing['type'] != self._request_type:
msg = "%s, %s: Function declared as capable of handling request of type '%s' but called with a request of type '%s'"
params = (self.endpoint.original, self.httprequest.path, self.endpoint.routing['type'], self._request_type)
_logger.error(msg, *params)
raise werkzeug.exceptions.BadRequest(msg % params)
if self.endpoint_arguments:
kwargs.update(self.endpoint_arguments)
# Backward for 7.0
if self.endpoint.first_arg_is_req:
args = (request,) + args
# Correct exception handling and concurency retry
@service_model.check
def checked_call(___dbname, *a, **kw):
# The decorator can call us more than once if there is an database error. In this
# case, the request cursor is unusable. Rollback transaction to create a new one.
if self._cr:
self._cr.rollback()
self.env.clear()
return self.endpoint(*a, **kw)
if self.db:
return checked_call(self.db, *args, **kwargs)
return self.endpoint(*args, **kwargs)
@property
def debug(self):
""" Indicates whether the current request is in "debug" mode
"""
return 'debug' in self.httprequest.args
@contextlib.contextmanager
def registry_cr(self):
warnings.warn('please use request.registry and request.cr directly', DeprecationWarning)
yield (self.registry, self.cr)
@lazy_property
def session_id(self):
"""
opaque identifier for the :class:`OpenERPSession` instance of
the current request
.. deprecated:: 8.0
Use the ``sid`` attribute on :attr:`.session`
"""
return self.session.sid
@property
def registry(self):
"""
The registry to the database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
.. deprecated:: 8.0
use :attr:`.env`
"""
return openerp.modules.registry.RegistryManager.get(self.db) if self.db else None
@property
def db(self):
"""
The database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
"""
return self.session.db if not self.disable_db else None
@lazy_property
def httpsession(self):
""" HTTP session data
.. deprecated:: 8.0
Use :attr:`.session` instead.
"""
return self.session
def route(route=None, **kw):
"""
Decorator marking the decorated method as being a handler for
requests. The method must be part of a subclass of ``Controller``.
:param route: string or array. The route part that will determine which
http requests will match the decorated method. Can be a
single string or an array of strings. See werkzeug's routing
documentation for the format of route expression (
http://werkzeug.pocoo.org/docs/routing/ ).
:param type: The type of request, can be ``'http'`` or ``'json'``.
:param auth: The type of authentication method, can on of the following:
* ``user``: The user must be authenticated and the current request
will perform using the rights of the user.
* ``public``: The user may or may not be authenticated. If she isn't,
the current request will perform using the shared Public user.
* ``none``: The method is always active, even if there is no
database. Mainly used by the framework and authentication
modules. There request code will not have any facilities to access
the database nor have any configuration indicating the current
database nor the current user.
:param methods: A sequence of http methods this route applies to. If not
specified, all methods are allowed.
:param cors: The Access-Control-Allow-Origin cors directive value.
"""
routing = kw.copy()
assert not 'type' in routing or routing['type'] in ("http", "json")
def decorator(f):
if route:
if isinstance(route, list):
routes = route
else:
routes = [route]
routing['routes'] = routes
@functools.wraps(f)
def response_wrap(*args, **kw):
response = f(*args, **kw)
if isinstance(response, Response) or f.routing_type == 'json':
return response
if isinstance(response, basestring):
return Response(response)
if isinstance(response, werkzeug.exceptions.HTTPException):
response = response.get_response(request.httprequest.environ)
if isinstance(response, werkzeug.wrappers.BaseResponse):
response = Response.force_type(response)
response.set_default()
return response
_logger.warn("<function %s.%s> returns an invalid response type for an http request" % (f.__module__, f.__name__))
return response
response_wrap.routing = routing
response_wrap.original_func = f
return response_wrap
return decorator
class JsonRequest(WebRequest):
""" Request handler for `JSON-RPC 2
<http://www.jsonrpc.org/specification>`_ over HTTP
* ``method`` is ignored
* ``params`` must be a JSON object (not an array) and is passed as keyword
arguments to the handler method
* the handler method's result is returned as JSON-RPC ``result`` and
wrapped in the `JSON-RPC Response
<http://www.jsonrpc.org/specification#response_object>`_
Sucessful request::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"result": { "res1": "val1" },
"id": null}
Request producing a error::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"error": {"code": 1,
"message": "End user error message.",
"data": {"code": "codestring",
"debug": "traceback" } },
"id": null}
"""
_request_type = "json"
def __init__(self, *args):
super(JsonRequest, self).__init__(*args)
self.jsonp_handler = None
args = self.httprequest.args
jsonp = args.get('jsonp')
self.jsonp = jsonp
request = None
request_id = args.get('id')
if jsonp and self.httprequest.method == 'POST':
# jsonp 2 steps step1 POST: save call
def handler():
self.session['jsonp_request_%s' % (request_id,)] = self.httprequest.form['r']
self.session.modified = True
headers=[('Content-Type', 'text/plain; charset=utf-8')]
r = werkzeug.wrappers.Response(request_id, headers=headers)
return r
self.jsonp_handler = handler
return
elif jsonp and args.get('r'):
# jsonp method GET
request = args.get('r')
elif jsonp and request_id:
# jsonp 2 steps step2 GET: run and return result
request = self.session.pop('jsonp_request_%s' % (request_id,), '{}')
else:
# regular jsonrpc2
request = self.httprequest.stream.read()
# Read POST content or POST Form Data named "request"
try:
self.jsonrequest = simplejson.loads(request)
except simplejson.JSONDecodeError:
msg = 'Invalid JSON data: %r' % (request,)
_logger.error('%s: %s', self.httprequest.path, msg)
raise werkzeug.exceptions.BadRequest(msg)
self.params = dict(self.jsonrequest.get("params", {}))
self.context = self.params.pop('context', dict(self.session.context))
def _json_response(self, result=None, error=None):
response = {
'jsonrpc': '2.0',
'id': self.jsonrequest.get('id')
}
if error is not None:
response['error'] = error
if result is not None:
response['result'] = result
if self.jsonp:
# If we use jsonp, that's mean we are called from another host
# Some browser (IE and Safari) do no allow third party cookies
# We need then to manage http sessions manually.
response['session_id'] = self.session_id
mime = 'application/javascript'
body = "%s(%s);" % (self.jsonp, simplejson.dumps(response),)
else:
mime = 'application/json'
body = simplejson.dumps(response)
return Response(
body, headers=[('Content-Type', mime),
('Content-Length', len(body))])
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to arbitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(JsonRequest, self)._handle_exception(exception)
except Exception:
if not isinstance(exception, (openerp.exceptions.Warning, SessionExpiredException)):
_logger.exception("Exception during JSON request handling.")
error = {
'code': 200,
'message': "Odoo Server Error",
'data': serialize_exception(exception)
}
if isinstance(exception, AuthenticationError):
error['code'] = 100
error['message'] = "Odoo Session Invalid"
if isinstance(exception, SessionExpiredException):
error['code'] = 100
error['message'] = "Odoo Session Expired"
return self._json_response(error=error)
def dispatch(self):
if self.jsonp_handler:
return self.jsonp_handler()
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
endpoint = self.endpoint.method.__name__
model = self.params.get('model')
method = self.params.get('method')
args = self.params.get('args', [])
start_time = time.time()
_, start_vms = 0, 0
if psutil:
_, start_vms = memory_info(psutil.Process(os.getpid()))
if rpc_request and rpc_response_flag:
rpc_request.debug('%s: %s %s, %s',
endpoint, model, method, pprint.pformat(args))
result = self._call_function(**self.params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
_, end_vms = 0, 0
if psutil:
_, end_vms = memory_info(psutil.Process(os.getpid()))
logline = '%s: %s %s: time:%.3fs mem: %sk -> %sk (diff: %sk)' % (
endpoint, model, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
rpc_response.debug('%s, %s', logline, pprint.pformat(result))
else:
rpc_request.debug(logline)
return self._json_response(result)
except Exception, e:
return self._handle_exception(e)
def serialize_exception(e):
tmp = {
"name": type(e).__module__ + "." + type(e).__name__ if type(e).__module__ else type(e).__name__,
"debug": traceback.format_exc(),
"message": ustr(e),
"arguments": to_jsonable(e.args),
}
if isinstance(e, openerp.osv.osv.except_osv):
tmp["exception_type"] = "except_osv"
elif isinstance(e, openerp.exceptions.Warning):
tmp["exception_type"] = "warning"
elif isinstance(e, openerp.exceptions.AccessError):
tmp["exception_type"] = "access_error"
elif isinstance(e, openerp.exceptions.AccessDenied):
tmp["exception_type"] = "access_denied"
return tmp
def to_jsonable(o):
if isinstance(o, str) or isinstance(o,unicode) or isinstance(o, int) or isinstance(o, long) \
or isinstance(o, bool) or o is None or isinstance(o, float):
return o
if isinstance(o, list) or isinstance(o, tuple):
return [to_jsonable(x) for x in o]
if isinstance(o, dict):
tmp = {}
for k, v in o.items():
tmp[u"%s" % k] = to_jsonable(v)
return tmp
return ustr(o)
def jsonrequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="json", auth="user", combine=True)(f)
class HttpRequest(WebRequest):
""" Handler for the ``http`` request type.
matched routing parameters, query string parameters, form_ parameters
and files are passed to the handler method as keyword arguments.
In case of name conflict, routing parameters have priority.
The handler method's result can be:
* a falsy value, in which case the HTTP response will be an
`HTTP 204`_ (No Content)
* a werkzeug Response object, which is returned as-is
* a ``str`` or ``unicode``, will be wrapped in a Response object and
interpreted as HTML
.. _form: http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.2
.. _HTTP 204: http://tools.ietf.org/html/rfc7231#section-6.3.5
"""
_request_type = "http"
def __init__(self, *args):
super(HttpRequest, self).__init__(*args)
params = self.httprequest.args.to_dict()
params.update(self.httprequest.form.to_dict())
params.update(self.httprequest.files.to_dict())
params.pop('session_id', None)
self.params = params
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(HttpRequest, self)._handle_exception(exception)
except SessionExpiredException:
if not request.params.get('noredirect'):
query = werkzeug.urls.url_encode({
'redirect': request.httprequest.url,
})
return werkzeug.utils.redirect('/web/login?%s' % query)
except werkzeug.exceptions.HTTPException, e:
return e
def dispatch(self):
if request.httprequest.method == 'OPTIONS' and request.endpoint and request.endpoint.routing.get('cors'):
headers = {
'Access-Control-Max-Age': 60 * 60 * 24,
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept, X-Debug-Mode'
}
return Response(status=200, headers=headers)
r = self._call_function(**self.params)
if not r:
r = Response(status=204) # no content
return r
def make_response(self, data, headers=None, cookies=None):
""" Helper for non-HTML responses, or HTML responses with custom
response headers or cookies.
While handlers can just return the HTML markup of a page they want to
send as a string if non-HTML data is returned they need to create a
complete response object, or the returned data will not be correctly
interpreted by the clients.
:param basestring data: response body
:param headers: HTTP headers to set on the response
:type headers: ``[(name, value)]``
:param collections.Mapping cookies: cookies to set on the client
"""
response = Response(data, headers=headers)
if cookies:
for k, v in cookies.iteritems():
response.set_cookie(k, v)
return response
def render(self, template, qcontext=None, lazy=True, **kw):
""" Lazy render of a QWeb template.
The actual rendering of the given template will occur at then end of
the dispatching. Meanwhile, the template and/or qcontext can be
altered or even replaced by a static response.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param bool lazy: whether the template rendering should be deferred
until the last possible moment
:param kw: forwarded to werkzeug's Response object
"""
response = Response(template=template, qcontext=qcontext, **kw)
if not lazy:
return response.render()
return response
def not_found(self, description=None):
""" Shortcut for a `HTTP 404
<http://tools.ietf.org/html/rfc7231#section-6.5.4>`_ (Not Found)
response
"""
return werkzeug.exceptions.NotFound(description)
def httprequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="http", auth="user", combine=True)(f)
#----------------------------------------------------------
# Controller and route registration
#----------------------------------------------------------
addons_module = {}
addons_manifest = {}
controllers_per_module = collections.defaultdict(list)
class ControllerType(type):
def __init__(cls, name, bases, attrs):
super(ControllerType, cls).__init__(name, bases, attrs)
# flag old-style methods with req as first argument
for k, v in attrs.items():
if inspect.isfunction(v) and hasattr(v, 'original_func'):
# Set routing type on original functions
routing_type = v.routing.get('type')
parent = [claz for claz in bases if isinstance(claz, ControllerType) and hasattr(claz, k)]
parent_routing_type = getattr(parent[0], k).original_func.routing_type if parent else routing_type or 'http'
if routing_type is not None and routing_type is not parent_routing_type:
routing_type = parent_routing_type
_logger.warn("Subclass re-defines <function %s.%s.%s> with different type than original."
" Will use original type: %r" % (cls.__module__, cls.__name__, k, parent_routing_type))
v.original_func.routing_type = routing_type or parent_routing_type
spec = inspect.getargspec(v.original_func)
first_arg = spec.args[1] if len(spec.args) >= 2 else None
if first_arg in ["req", "request"]:
v._first_arg_is_req = True
# store the controller in the controllers list
name_class = ("%s.%s" % (cls.__module__, cls.__name__), cls)
class_path = name_class[0].split(".")
if not class_path[:2] == ["openerp", "addons"]:
module = ""
else:
# we want to know all modules that have controllers
module = class_path[2]
# but we only store controllers directly inheriting from Controller
if not "Controller" in globals() or not Controller in bases:
return
controllers_per_module[module].append(name_class)
class Controller(object):
__metaclass__ = ControllerType
class EndPoint(object):
def __init__(self, method, routing):
self.method = method
self.original = getattr(method, 'original_func', method)
self.routing = routing
self.arguments = {}
@property
def first_arg_is_req(self):
# Backward for 7.0
return getattr(self.method, '_first_arg_is_req', False)
def __call__(self, *args, **kw):
return self.method(*args, **kw)
def routing_map(modules, nodb_only, converters=None):
routing_map = werkzeug.routing.Map(strict_slashes=False, converters=converters)
def get_subclasses(klass):
def valid(c):
return c.__module__.startswith('openerp.addons.') and c.__module__.split(".")[2] in modules
subclasses = klass.__subclasses__()
result = []
for subclass in subclasses:
if valid(subclass):
result.extend(get_subclasses(subclass))
if not result and valid(klass):
result = [klass]
return result
uniq = lambda it: collections.OrderedDict((id(x), x) for x in it).values()
for module in modules:
if module not in controllers_per_module:
continue
for _, cls in controllers_per_module[module]:
subclasses = uniq(c for c in get_subclasses(cls) if c is not cls)
if subclasses:
name = "%s (extended by %s)" % (cls.__name__, ', '.join(sub.__name__ for sub in subclasses))
cls = type(name, tuple(reversed(subclasses)), {})
o = cls()
members = inspect.getmembers(o, inspect.ismethod)
for _, mv in members:
if hasattr(mv, 'routing'):
routing = dict(type='http', auth='user', methods=None, routes=None)
methods_done = list()
# update routing attributes from subclasses(auth, methods...)
for claz in reversed(mv.im_class.mro()):
fn = getattr(claz, mv.func_name, None)
if fn and hasattr(fn, 'routing') and fn not in methods_done:
methods_done.append(fn)
routing.update(fn.routing)
if not nodb_only or routing['auth'] == "none":
assert routing['routes'], "Method %r has not route defined" % mv
endpoint = EndPoint(mv, routing)
for url in routing['routes']:
if routing.get("combine", False):
# deprecated v7 declaration
url = o._cp_path.rstrip('/') + '/' + url.lstrip('/')
if url.endswith("/") and len(url) > 1:
url = url[: -1]
xtra_keys = 'defaults subdomain build_only strict_slashes redirect_to alias host'.split()
kw = {k: routing[k] for k in xtra_keys if k in routing}
routing_map.add(werkzeug.routing.Rule(url, endpoint=endpoint, methods=routing['methods'], **kw))
return routing_map
#----------------------------------------------------------
# HTTP Sessions
#----------------------------------------------------------
class AuthenticationError(Exception):
pass
class SessionExpiredException(Exception):
pass
class Service(object):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
def __init__(self, session, service_name):
self.session = session
self.service_name = service_name
def __getattr__(self, method):
def proxy_method(*args):
result = dispatch_rpc(self.service_name, method, args)
return result
return proxy_method
class Model(object):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
def __init__(self, session, model):
self.session = session
self.model = model
self.proxy = self.session.proxy('object')
def __getattr__(self, method):
self.session.assert_valid()
def proxy(*args, **kw):
# Can't provide any retro-compatibility for this case, so we check it and raise an Exception
# to tell the programmer to adapt his code
if not request.db or not request.uid or self.session.db != request.db \
or self.session.uid != request.uid:
raise Exception("Trying to use Model with badly configured database or user.")
if method.startswith('_'):
raise Exception("Access denied")
mod = request.registry[self.model]
meth = getattr(mod, method)
# make sure to instantiate an environment
cr = request.env.cr
result = meth(cr, request.uid, *args, **kw)
# reorder read
if method == "read":
if isinstance(result, list) and len(result) > 0 and "id" in result[0]:
index = {}
for r in result:
index[r['id']] = r
result = [index[x] for x in args[0] if x in index]
return result
return proxy
class OpenERPSession(werkzeug.contrib.sessions.Session):
def __init__(self, *args, **kwargs):
self.inited = False
self.modified = False
self.rotate = False
super(OpenERPSession, self).__init__(*args, **kwargs)
self.inited = True
self._default_values()
self.modified = False
def __getattr__(self, attr):
return self.get(attr, None)
def __setattr__(self, k, v):
if getattr(self, "inited", False):
try:
object.__getattribute__(self, k)
except:
return self.__setitem__(k, v)
object.__setattr__(self, k, v)
def authenticate(self, db, login=None, password=None, uid=None):
"""
Authenticate the current user with the given db, login and
password. If successful, store the authentication parameters in the
current session and request.
:param uid: If not None, that user id will be used instead the login
to authenticate the user.
"""
if uid is None:
wsgienv = request.httprequest.environ
env = dict(
base_location=request.httprequest.url_root.rstrip('/'),
HTTP_HOST=wsgienv['HTTP_HOST'],
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
)
uid = dispatch_rpc('common', 'authenticate', [db, login, password, env])
else:
security.check(db, uid, password)
self.db = db
self.uid = uid
self.login = login
self.password = password
request.uid = uid
request.disable_db = False
if uid: self.get_context()
return uid
def check_security(self):
"""
Check the current authentication parameters to know if those are still
valid. This method should be called at each request. If the
authentication fails, a :exc:`SessionExpiredException` is raised.
"""
if not self.db or not self.uid:
raise SessionExpiredException("Session expired")
security.check(self.db, self.uid, self.password)
def logout(self, keep_db=False):
for k in self.keys():
if not (keep_db and k == 'db'):
del self[k]
self._default_values()
self.rotate = True
def _default_values(self):
self.setdefault("db", None)
self.setdefault("uid", None)
self.setdefault("login", None)
self.setdefault("password", None)
self.setdefault("context", {})
def get_context(self):
"""
Re-initializes the current user's session context (based on his
preferences) by calling res.users.get_context() with the old context.
:returns: the new context
"""
assert self.uid, "The user needs to be logged-in to initialize his context"
self.context = request.registry.get('res.users').context_get(request.cr, request.uid) or {}
self.context['uid'] = self.uid
self._fix_lang(self.context)
return self.context
def _fix_lang(self, context):
""" OpenERP provides languages which may not make sense and/or may not
be understood by the web client's libraries.
Fix those here.
:param dict context: context to fix
"""
lang = context.get('lang')
# inane OpenERP locale
if lang == 'ar_AR':
lang = 'ar'
# lang to lang_REGION (datejs only handles lang_REGION, no bare langs)
if lang in babel.core.LOCALE_ALIASES:
lang = babel.core.LOCALE_ALIASES[lang]
context['lang'] = lang or 'en_US'
# Deprecated to be removed in 9
"""
Damn properties for retro-compatibility. All of that is deprecated,
all of that.
"""
@property
def _db(self):
return self.db
@_db.setter
def _db(self, value):
self.db = value
@property
def _uid(self):
return self.uid
@_uid.setter
def _uid(self, value):
self.uid = value
@property
def _login(self):
return self.login
@_login.setter
def _login(self, value):
self.login = value
@property
def _password(self):
return self.password
@_password.setter
def _password(self, value):
self.password = value
def send(self, service_name, method, *args):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return dispatch_rpc(service_name, method, args)
def proxy(self, service):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return Service(self, service)
def assert_valid(self, force=False):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
Ensures this session is valid (logged into the openerp server)
"""
if self.uid and not force:
return
# TODO use authenticate instead of login
self.uid = self.proxy("common").login(self.db, self.login, self.password)
if not self.uid:
raise AuthenticationError("Authentication failure")
def ensure_valid(self):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
"""
if self.uid:
try:
self.assert_valid(True)
except Exception:
self.uid = None
def execute(self, model, func, *l, **d):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
model = self.model(model)
r = getattr(model, func)(*l, **d)
return r
def exec_workflow(self, model, id, signal):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
self.assert_valid()
r = self.proxy('object').exec_workflow(self.db, self.uid, self.password, model, signal, id)
return r
def model(self, model):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
Get an RPC proxy for the object ``model``, bound to this session.
:param model: an OpenERP model name
:type model: str
:rtype: a model object
"""
if not self.db:
raise SessionExpiredException("Session expired")
return Model(self, model)
def save_action(self, action):
"""
This method store an action object in the session and returns an integer
identifying that action. The method get_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
saved_actions = self.setdefault('saved_actions', {"next": 1, "actions": {}})
# we don't allow more than 10 stored actions
if len(saved_actions["actions"]) >= 10:
del saved_actions["actions"][min(saved_actions["actions"])]
key = saved_actions["next"]
saved_actions["actions"][key] = action
saved_actions["next"] = key + 1
self.modified = True
return key
def get_action(self, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
saved_actions = self.get('saved_actions', {})
return saved_actions.get("actions", {}).get(key)
def session_gc(session_store):
if random.random() < 0.001:
# we keep session one week
last_week = time.time() - 60*60*24*7
for fname in os.listdir(session_store.path):
path = os.path.join(session_store.path, fname)
try:
if os.path.getmtime(path) < last_week:
os.unlink(path)
except OSError:
pass
#----------------------------------------------------------
# WSGI Layer
#----------------------------------------------------------
# Add potentially missing (older ubuntu) font mime types
mimetypes.add_type('application/font-woff', '.woff')
mimetypes.add_type('application/vnd.ms-fontobject', '.eot')
mimetypes.add_type('application/x-font-ttf', '.ttf')
class Response(werkzeug.wrappers.Response):
""" Response object passed through controller route chain.
In addition to the :class:`werkzeug.wrappers.Response` parameters, this
class's constructor can take the following additional parameters
for QWeb Lazy Rendering.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param int uid: User id to use for the ir.ui.view render call,
``None`` to use the request's user (the default)
these attributes are available as parameters on the Response object and
can be altered at any time before rendering
Also exposes all the attributes and methods of
:class:`werkzeug.wrappers.Response`.
"""
default_mimetype = 'text/html'
def __init__(self, *args, **kw):
template = kw.pop('template', None)
qcontext = kw.pop('qcontext', None)
uid = kw.pop('uid', None)
super(Response, self).__init__(*args, **kw)
self.set_default(template, qcontext, uid)
def set_default(self, template=None, qcontext=None, uid=None):
self.template = template
self.qcontext = qcontext or dict()
self.uid = uid
# Support for Cross-Origin Resource Sharing
if request.endpoint and 'cors' in request.endpoint.routing:
self.headers.set('Access-Control-Allow-Origin', request.endpoint.routing['cors'])
methods = 'GET, POST'
if request.endpoint.routing['type'] == 'json':
methods = 'POST'
elif request.endpoint.routing.get('methods'):
methods = ', '.join(request.endpoint.routing['methods'])
self.headers.set('Access-Control-Allow-Methods', methods)
@property
def is_qweb(self):
return self.template is not None
def render(self):
""" Renders the Response's template, returns the result
"""
view_obj = request.registry["ir.ui.view"]
uid = self.uid or request.uid or openerp.SUPERUSER_ID
return view_obj.render(
request.cr, uid, self.template, self.qcontext,
context=request.context)
def flatten(self):
""" Forces the rendering of the response's template, sets the result
as response body and unsets :attr:`.template`
"""
self.response.append(self.render())
self.template = None
class DisableCacheMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
def start_wrapped(status, headers):
referer = environ.get('HTTP_REFERER', '')
parsed = urlparse.urlparse(referer)
debug = parsed.query.count('debug') >= 1
new_headers = []
unwanted_keys = ['Last-Modified']
if debug:
new_headers = [('Cache-Control', 'no-cache')]
unwanted_keys += ['Expires', 'Etag', 'Cache-Control']
for k, v in headers:
if k not in unwanted_keys:
new_headers.append((k, v))
start_response(status, new_headers)
return self.app(environ, start_wrapped)
class Root(object):
"""Root WSGI application for the OpenERP Web Client.
"""
def __init__(self):
self._loaded = False
@lazy_property
def session_store(self):
# Setup http sessions
path = openerp.tools.config.session_dir
_logger.debug('HTTP sessions stored in: %s', path)
return werkzeug.contrib.sessions.FilesystemSessionStore(path, session_class=OpenERPSession)
@lazy_property
def nodb_routing_map(self):
_logger.info("Generating nondb routing")
return routing_map([''] + openerp.conf.server_wide_modules, True)
def __call__(self, environ, start_response):
""" Handle a WSGI request
"""
if not self._loaded:
self._loaded = True
self.load_addons()
return self.dispatch(environ, start_response)
def load_addons(self):
""" Load all addons from addons path containing static files and
controllers and configure them. """
# TODO should we move this to ir.http so that only configured modules are served ?
statics = {}
for addons_path in openerp.modules.module.ad_paths:
for module in sorted(os.listdir(str(addons_path))):
if module not in addons_module:
manifest_path = os.path.join(addons_path, module, '__openerp__.py')
path_static = os.path.join(addons_path, module, 'static')
if os.path.isfile(manifest_path) and os.path.isdir(path_static):
manifest = ast.literal_eval(open(manifest_path).read())
if not manifest.get('installable', True):
continue
manifest['addons_path'] = addons_path
_logger.debug("Loading %s", module)
if 'openerp.addons' in sys.modules:
m = __import__('openerp.addons.' + module)
else:
m = None
addons_module[module] = m
addons_manifest[module] = manifest
statics['/%s/static' % module] = path_static
if statics:
_logger.info("HTTP Configuring static files")
app = werkzeug.wsgi.SharedDataMiddleware(self.dispatch, statics, cache_timeout=STATIC_CACHE)
self.dispatch = DisableCacheMiddleware(app)
def setup_session(self, httprequest):
# recover or create session
session_gc(self.session_store)
sid = httprequest.args.get('session_id')
explicit_session = True
if not sid:
sid = httprequest.headers.get("X-Openerp-Session-Id")
if not sid:
sid = httprequest.cookies.get('session_id')
explicit_session = False
if sid is None:
httprequest.session = self.session_store.new()
else:
httprequest.session = self.session_store.get(sid)
return explicit_session
def setup_db(self, httprequest):
db = httprequest.session.db
# Check if session.db is legit
if db:
if db not in db_filter([db], httprequest=httprequest):
_logger.warn("Logged into database '%s', but dbfilter "
"rejects it; logging session out.", db)
httprequest.session.logout()
db = None
if not db:
httprequest.session.db = db_monodb(httprequest)
def setup_lang(self, httprequest):
if "lang" not in httprequest.session.context:
alang = httprequest.accept_languages.best or "en-US"
try:
code, territory, _, _ = babel.core.parse_locale(alang, sep='-')
if territory:
lang = '%s_%s' % (code, territory)
else:
lang = babel.core.LOCALE_ALIASES[code]
except (ValueError, KeyError):
lang = 'en_US'
httprequest.session.context["lang"] = lang
def get_request(self, httprequest):
# deduce type of request
if httprequest.args.get('jsonp'):
return JsonRequest(httprequest)
if httprequest.mimetype in ("application/json", "application/json-rpc"):
return JsonRequest(httprequest)
else:
return HttpRequest(httprequest)
def get_response(self, httprequest, result, explicit_session):
if isinstance(result, Response) and result.is_qweb:
try:
result.flatten()
except(Exception), e:
if request.db:
result = request.registry['ir.http']._handle_exception(e)
else:
raise
if isinstance(result, basestring):
response = Response(result, mimetype='text/html')
else:
response = result
if httprequest.session.should_save:
if httprequest.session.rotate:
self.session_store.delete(httprequest.session)
httprequest.session.sid = self.session_store.generate_key()
httprequest.session.modified = True
self.session_store.save(httprequest.session)
# We must not set the cookie if the session id was specified using a http header or a GET parameter.
# There are two reasons to this:
# - When using one of those two means we consider that we are overriding the cookie, which means creating a new
# session on top of an already existing session and we don't want to create a mess with the 'normal' session
# (the one using the cookie). That is a special feature of the Session Javascript class.
# - It could allow session fixation attacks.
if not explicit_session and hasattr(response, 'set_cookie'):
response.set_cookie('session_id', httprequest.session.sid, max_age=90 * 24 * 60 * 60)
return response
def dispatch(self, environ, start_response):
"""
Performs the actual WSGI dispatching for the application.
"""
try:
httprequest = werkzeug.wrappers.Request(environ)
httprequest.app = self
explicit_session = self.setup_session(httprequest)
self.setup_db(httprequest)
self.setup_lang(httprequest)
request = self.get_request(httprequest)
def _dispatch_nodb():
try:
func, arguments = self.nodb_routing_map.bind_to_environ(request.httprequest.environ).match()
except werkzeug.exceptions.HTTPException, e:
return request._handle_exception(e)
request.set_handler(func, arguments, "none")
result = request.dispatch()
return result
with request:
db = request.session.db
if db:
openerp.modules.registry.RegistryManager.check_registry_signaling(db)
try:
with openerp.tools.mute_logger('openerp.sql_db'):
ir_http = request.registry['ir.http']
except (AttributeError, psycopg2.OperationalError):
# psycopg2 error or attribute error while constructing
# the registry. That means the database probably does
# not exists anymore or the code doesnt match the db.
# Log the user out and fall back to nodb
request.session.logout()
result = _dispatch_nodb()
else:
result = ir_http._dispatch()
openerp.modules.registry.RegistryManager.signal_caches_change(db)
else:
result = _dispatch_nodb()
response = self.get_response(httprequest, result, explicit_session)
return response(environ, start_response)
except werkzeug.exceptions.HTTPException, e:
return e(environ, start_response)
def get_db_router(self, db):
if not db:
return self.nodb_routing_map
return request.registry['ir.http'].routing_map()
def db_list(force=False, httprequest=None):
dbs = dispatch_rpc("db", "list", [force])
return db_filter(dbs, httprequest=httprequest)
def db_filter(dbs, httprequest=None):
httprequest = httprequest or request.httprequest
h = httprequest.environ.get('HTTP_HOST', '').split(':')[0]
d, _, r = h.partition('.')
if d == "www" and r:
d = r.partition('.')[0]
r = openerp.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
dbs = [i for i in dbs if re.match(r, i)]
return dbs
def db_monodb(httprequest=None):
"""
Magic function to find the current database.
Implementation details:
* Magic
* More magic
Returns ``None`` if the magic is not magic enough.
"""
httprequest = httprequest or request.httprequest
dbs = db_list(True, httprequest)
# try the db already in the session
db_session = httprequest.session.db
if db_session in dbs:
return db_session
# if there is only one possible db, we take that one
if len(dbs) == 1:
return dbs[0]
return None
def send_file(filepath_or_fp, mimetype=None, as_attachment=False, filename=None, mtime=None,
add_etags=True, cache_timeout=STATIC_CACHE, conditional=True):
"""This is a modified version of Flask's send_file()
Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first.
:param filepath_or_fp: the filename of the file to send.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param filename: the filename for the attachment if it differs from the file's filename or
if using file object without 'name' attribute (eg: E-tags with StringIO).
:param mtime: last modification time to use for contitional response.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `False` to disable conditional responses.
:param cache_timeout: the timeout in seconds for the headers.
"""
if isinstance(filepath_or_fp, (str, unicode)):
if not filename:
filename = os.path.basename(filepath_or_fp)
file = open(filepath_or_fp, 'rb')
if not mtime:
mtime = os.path.getmtime(filepath_or_fp)
else:
file = filepath_or_fp
if not filename:
filename = getattr(file, 'name', None)
file.seek(0, 2)
size = file.tell()
file.seek(0)
if mimetype is None and filename:
mimetype = mimetypes.guess_type(filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = werkzeug.datastructures.Headers()
if as_attachment:
if filename is None:
raise TypeError('filename unavailable, required for sending as attachment')
headers.add('Content-Disposition', 'attachment', filename=filename)
headers['Content-Length'] = size
data = wrap_file(request.httprequest.environ, file)
rv = Response(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
if isinstance(mtime, str):
try:
server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT
mtime = datetime.datetime.strptime(mtime.split('.')[0], server_format)
except Exception:
mtime = None
if mtime is not None:
rv.last_modified = mtime
rv.cache_control.public = True
if cache_timeout:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time.time() + cache_timeout)
if add_etags and filename and mtime:
rv.set_etag('odoo-%s-%s-%s' % (
mtime,
size,
adler32(
filename.encode('utf-8') if isinstance(filename, unicode)
else filename
) & 0xffffffff
))
if conditional:
rv = rv.make_conditional(request.httprequest)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
#----------------------------------------------------------
# RPC controller
#----------------------------------------------------------
class CommonController(Controller):
@route('/jsonrpc', type='json', auth="none")
def jsonrpc(self, service, method, args):
""" Method used by client APIs to contact OpenERP. """
return dispatch_rpc(service, method, args)
@route('/gen_session_id', type='json', auth="none")
def gen_session_id(self):
nsession = root.session_store.new()
return nsession.sid
# register main wsgi handler
root = Root()
openerp.service.wsgi_server.register_wsgi_handler(root)
# vim:et:ts=4:sw=4:
| agpl-3.0 |
BizzCloud/PosBox | addons/website_certification/__init__.py | 385 | 1030 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import certification
import controllers
| agpl-3.0 |
Xion/taipan | taipan/testing/asserts.py | 1 | 7587 | """
Additional asserts to supplement the :class:`TestCase` class.
"""
from taipan._compat import imap
from taipan.collections import is_countable, is_iterable
from taipan.lang import ABSENT
from taipan.strings import BaseString, is_string
__all__ = ['AssertsMixin']
class AssertsMixin(object):
"""Mixin providing additional assert methods."""
def assertZero(self, argument, msg=None):
"""Assert that ``argument`` is equal to zero."""
if msg is None:
msg = "%r is not equal to zero" % (argument,)
self.assertEqual(0, argument, msg=msg)
def assertEmpty(self, argument, msg=None):
"""Assert that ``argument`` is an empty collection."""
if is_countable(argument):
nonempty = len(argument) > 0
else:
if not is_iterable(argument):
self.__fail(msg, "%r is not an iterable" % (argument,))
nonempty = False
for _ in argument:
nonempty = True
break
if nonempty:
self.__fail(msg, "%r is not empty" % (argument,))
def assertNotEmpty(self, argument, msg=None):
"""Assert that ``argument`` is not an empty collection."""
if is_countable(argument):
empty = len(argument) == 0
else:
if not is_iterable(argument):
self.__fail(msg, "%r is not an iterable" % (argument,))
empty = True
for _ in argument:
empty = False
break
if empty:
self.__fail(msg, "%r is empty" % (argument,))
def assertStartsWith(self, prefix, string, msg=None):
"""Assert that ``string`` starts with given ``prefix``."""
self.__fail_unless_strings(prefix)
self.assertIsInstance(string, BaseString)
if not string.startswith(prefix):
self.__fail(msg, "%r does not start with %r" % (string, prefix))
def assertEndsWith(self, suffix, string, msg=None):
"""Assert that ``string`` ends with given ``suffix``."""
self.__fail_unless_strings(suffix)
self.assertIsInstance(string, BaseString)
if not string.endswith(suffix):
self.__fail(msg, "%r does not end with %r" % (string, suffix))
def assertIsSubclass(self, class_, superclass, msg=None):
"""Assert that ``class_`` inherits from given ``superclass``.
.. versionadded:: 0.0.3
"""
for c in (class_, superclass):
if not isinstance(c, type):
self.fail("%r is not a class" % (c,))
if not issubclass(class_, superclass):
self.__fail(
msg, "%r is not a subclass of %r" % (class_, superclass))
def assertHasAttr(self, attr, obj, msg=None):
"""Assert that ``obj``\ ect has given ``attr``\ ibute."""
self.assertIsInstance(attr, BaseString)
if not attr:
self.fail("attribute name is empty")
if not hasattr(obj, attr):
self.__fail(msg, "%r does not have attribute %r" % (obj, attr))
def assertThat(self, predicate, argument=ABSENT, msg=None):
"""Assert that a ``predicate`` applies to given ``argument``.
Example::
self.assertThat(is_pair, some_tuple)
"""
if not callable(predicate):
self.fail("%r is not a callable predicate" % (predicate,))
satisfied = predicate() if argument is ABSENT else predicate(argument)
if not satisfied:
argument_part = ("" if argument is ABSENT
else " for %r" % (argument,))
self.__fail(msg, "predicate not satisfied%s" % (argument_part,))
def assertAll(self, arg, iterable=ABSENT, msg=None):
"""Assert that all elements of an iterable are truthy
or satisfy given predicate.
:param arg: Predicate, or iterable of elements to check for truthiness
:param iterable: Iterable of predicate arguments
(if predicate was given)
Examples::
# check if all elements satisfy a predicate
self.assertAll(is_valid, iterable)
# check if all elements are already truthy
self.assertAll(iterable_of_maybe_truthies)
"""
if callable(arg):
self.__fail_unless_iterable(iterable)
predicate = arg
for i, elem in enumerate(iterable):
if not predicate(elem):
self.__fail(
msg, "predicate not satisfied for element #%d: %r" % (
i, elem))
else:
self.__fail_unless_iterable(arg)
# shift arguments to the left
if msg is None and iterable is not ABSENT:
msg = iterable
iterable = arg
for i, elem in enumerate(iterable):
if not elem:
self.__fail(msg, "falsy element #%d: %r" % (i, elem))
def assertAny(self, arg, iterable=ABSENT, msg=None):
"""Assert that at least one element of an iterable is truthy
or satisfies given predicate.
:param arg: Predicate, or iterable of elements to check for truthiness
:param iterable: Iterable of predicate arguments
(if predicate was given)
Examples::
# check if any element satisfies a predicate
self.assertAny(is_valid, iterable)
# check if any element is already truthy
self.assertAny(iterable_of_maybe_truthies)
"""
if callable(arg):
self.__fail_unless_iterable(iterable)
if not any(imap(arg, iterable)):
self.__fail(msg, "predicate not satisfied for any element")
else:
self.__fail_unless_iterable(arg)
# shift arguments to the left
if msg is None and iterable is not ABSENT:
msg = iterable
if not any(arg):
self.__fail(msg, "no truthy elements found")
def assertNoop(self, function, argument, msg=None):
"""Assert that ``function`` returns given ``argument`` verbatim
when applied to it.
Example::
self.assertNoop(str.upper, "WAT")
"""
if not callable(function):
self.fail("%r is not a callable" % (function,))
result = function(argument)
if result != argument:
self.__fail(
msg, "result %r of function %r differs from argument %r" % (
result, function, argument))
def assertResultsEqual(self, func1, func2, msg=None):
"""Assert that both functions evaluate to the same result."""
self.__fail_unless_callable(func1)
self.__fail_unless_callable(func2)
self.assertEqual(func1(), func2(), msg=msg)
# Utility functions
def __fail(self, custom_msg, standard_msg):
self.fail(self._formatMessage(custom_msg, standard_msg))
def __fail_unless_iterable(self, arg):
if not is_iterable(arg):
self.fail("%r is not an iterable" % (arg,))
def __fail_unless_callable(self, arg):
if not callable(arg):
self.fail("%r is not a callable" % (arg,))
def __fail_unless_strings(self, arg):
"""Fail the test unless argument is a string or iterable thereof."""
if not is_string(arg):
if not (is_iterable(arg) and all(imap(is_string, arg))):
self.fail("%r is not a string or iterable of strings" % (arg,))
| bsd-2-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.