repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
kevinr/750book-web | 750book-web-env/lib/python2.7/site-packages/django/contrib/auth/handlers/modpython.py | 436 | 1899 | from mod_python import apache
import os
def authenhandler(req, **kwargs):
"""
Authentication handler that checks against Django's auth database.
"""
# mod_python fakes the environ, and thus doesn't process SetEnv. This fixes
# that so that the following import works
os.environ.update(req.subprocess_env)
# apache 2.2 requires a call to req.get_basic_auth_pw() before
# req.user and friends are available.
req.get_basic_auth_pw()
# check for PythonOptions
_str_to_bool = lambda s: s.lower() in ('1', 'true', 'on', 'yes')
options = req.get_options()
permission_name = options.get('DjangoPermissionName', None)
staff_only = _str_to_bool(options.get('DjangoRequireStaffStatus', "on"))
superuser_only = _str_to_bool(options.get('DjangoRequireSuperuserStatus', "off"))
settings_module = options.get('DJANGO_SETTINGS_MODULE', None)
if settings_module:
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
from django.contrib.auth.models import User
from django import db
db.reset_queries()
# check that the username is valid
kwargs = {'username': req.user, 'is_active': True}
if staff_only:
kwargs['is_staff'] = True
if superuser_only:
kwargs['is_superuser'] = True
try:
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
return apache.HTTP_UNAUTHORIZED
# check the password and any permission given
if user.check_password(req.get_basic_auth_pw()):
if permission_name:
if user.has_perm(permission_name):
return apache.OK
else:
return apache.HTTP_UNAUTHORIZED
else:
return apache.OK
else:
return apache.HTTP_UNAUTHORIZED
finally:
db.connection.close()
| mit |
kool79/intellij-community | python/lib/Lib/distutils/text_file.py | 85 | 15152 | """text_file
provides the TextFile class, which gives an interface to text files
that (optionally) takes care of stripping comments, ignoring blank
lines, and joining lines with backslashes."""
__revision__ = "$Id: text_file.py 29687 2002-11-14 02:25:42Z akuchling $"
from types import *
import sys, os, string
class TextFile:
"""Provides a file-like object that takes care of all the things you
commonly want to do when processing a text file that has some
line-by-line syntax: strip comments (as long as "#" is your
comment character), skip blank lines, join adjacent lines by
escaping the newline (ie. backslash at end of line), strip
leading and/or trailing whitespace. All of these are optional
and independently controllable.
Provides a 'warn()' method so you can generate warning messages that
report physical line number, even if the logical line in question
spans multiple physical lines. Also provides 'unreadline()' for
implementing line-at-a-time lookahead.
Constructor is called as:
TextFile (filename=None, file=None, **options)
It bombs (RuntimeError) if both 'filename' and 'file' are None;
'filename' should be a string, and 'file' a file object (or
something that provides 'readline()' and 'close()' methods). It is
recommended that you supply at least 'filename', so that TextFile
can include it in warning messages. If 'file' is not supplied,
TextFile creates its own using the 'open()' builtin.
The options are all boolean, and affect the value returned by
'readline()':
strip_comments [default: true]
strip from "#" to end-of-line, as well as any whitespace
leading up to the "#" -- unless it is escaped by a backslash
lstrip_ws [default: false]
strip leading whitespace from each line before returning it
rstrip_ws [default: true]
strip trailing whitespace (including line terminator!) from
each line before returning it
skip_blanks [default: true}
skip lines that are empty *after* stripping comments and
whitespace. (If both lstrip_ws and rstrip_ws are false,
then some lines may consist of solely whitespace: these will
*not* be skipped, even if 'skip_blanks' is true.)
join_lines [default: false]
if a backslash is the last non-newline character on a line
after stripping comments and whitespace, join the following line
to it to form one "logical line"; if N consecutive lines end
with a backslash, then N+1 physical lines will be joined to
form one logical line.
collapse_join [default: false]
strip leading whitespace from lines that are joined to their
predecessor; only matters if (join_lines and not lstrip_ws)
Note that since 'rstrip_ws' can strip the trailing newline, the
semantics of 'readline()' must differ from those of the builtin file
object's 'readline()' method! In particular, 'readline()' returns
None for end-of-file: an empty string might just be a blank line (or
an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
not."""
default_options = { 'strip_comments': 1,
'skip_blanks': 1,
'lstrip_ws': 0,
'rstrip_ws': 1,
'join_lines': 0,
'collapse_join': 0,
}
def __init__ (self, filename=None, file=None, **options):
"""Construct a new TextFile object. At least one of 'filename'
(a string) and 'file' (a file-like object) must be supplied.
They keyword argument options are described above and affect
the values returned by 'readline()'."""
if filename is None and file is None:
raise RuntimeError, \
"you must supply either or both of 'filename' and 'file'"
# set values for all options -- either from client option hash
# or fallback to default_options
for opt in self.default_options.keys():
if options.has_key (opt):
setattr (self, opt, options[opt])
else:
setattr (self, opt, self.default_options[opt])
# sanity check client option hash
for opt in options.keys():
if not self.default_options.has_key (opt):
raise KeyError, "invalid TextFile option '%s'" % opt
if file is None:
self.open (filename)
else:
self.filename = filename
self.file = file
self.current_line = 0 # assuming that file is at BOF!
# 'linebuf' is a stack of lines that will be emptied before we
# actually read from the file; it's only populated by an
# 'unreadline()' operation
self.linebuf = []
def open (self, filename):
"""Open a new file named 'filename'. This overrides both the
'filename' and 'file' arguments to the constructor."""
self.filename = filename
self.file = open (self.filename, 'r')
self.current_line = 0
def close (self):
"""Close the current file and forget everything we know about it
(filename, current line number)."""
self.file.close ()
self.file = None
self.filename = None
self.current_line = None
def gen_error (self, msg, line=None):
outmsg = []
if line is None:
line = self.current_line
outmsg.append(self.filename + ", ")
if type (line) in (ListType, TupleType):
outmsg.append("lines %d-%d: " % tuple (line))
else:
outmsg.append("line %d: " % line)
outmsg.append(str(msg))
return string.join(outmsg, "")
def error (self, msg, line=None):
raise ValueError, "error: " + self.gen_error(msg, line)
def warn (self, msg, line=None):
"""Print (to stderr) a warning message tied to the current logical
line in the current file. If the current logical line in the
file spans multiple physical lines, the warning refers to the
whole range, eg. "lines 3-5". If 'line' supplied, it overrides
the current line number; it may be a list or tuple to indicate a
range of physical lines, or an integer for a single physical
line."""
sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
def readline (self):
"""Read and return a single logical line from the current file (or
from an internal buffer if lines have previously been "unread"
with 'unreadline()'). If the 'join_lines' option is true, this
may involve reading multiple physical lines concatenated into a
single string. Updates the current line number, so calling
'warn()' after 'readline()' emits a warning about the physical
line(s) just read. Returns None on end-of-file, since the empty
string can occur if 'rstrip_ws' is true but 'strip_blanks' is
not."""
# If any "unread" lines waiting in 'linebuf', return the top
# one. (We don't actually buffer read-ahead data -- lines only
# get put in 'linebuf' if the client explicitly does an
# 'unreadline()'.
if self.linebuf:
line = self.linebuf[-1]
del self.linebuf[-1]
return line
buildup_line = ''
while 1:
# read the line, make it None if EOF
line = self.file.readline()
if line == '': line = None
if self.strip_comments and line:
# Look for the first "#" in the line. If none, never
# mind. If we find one and it's the first character, or
# is not preceded by "\", then it starts a comment --
# strip the comment, strip whitespace before it, and
# carry on. Otherwise, it's just an escaped "#", so
# unescape it (and any other escaped "#"'s that might be
# lurking in there) and otherwise leave the line alone.
pos = string.find (line, "#")
if pos == -1: # no "#" -- no comments
pass
# It's definitely a comment -- either "#" is the first
# character, or it's elsewhere and unescaped.
elif pos == 0 or line[pos-1] != "\\":
# Have to preserve the trailing newline, because it's
# the job of a later step (rstrip_ws) to remove it --
# and if rstrip_ws is false, we'd better preserve it!
# (NB. this means that if the final line is all comment
# and has no trailing newline, we will think that it's
# EOF; I think that's OK.)
eol = (line[-1] == '\n') and '\n' or ''
line = line[0:pos] + eol
# If all that's left is whitespace, then skip line
# *now*, before we try to join it to 'buildup_line' --
# that way constructs like
# hello \\
# # comment that should be ignored
# there
# result in "hello there".
if string.strip(line) == "":
continue
else: # it's an escaped "#"
line = string.replace (line, "\\#", "#")
# did previous line end with a backslash? then accumulate
if self.join_lines and buildup_line:
# oops: end of file
if line is None:
self.warn ("continuation line immediately precedes "
"end-of-file")
return buildup_line
if self.collapse_join:
line = string.lstrip (line)
line = buildup_line + line
# careful: pay attention to line number when incrementing it
if type (self.current_line) is ListType:
self.current_line[1] = self.current_line[1] + 1
else:
self.current_line = [self.current_line,
self.current_line+1]
# just an ordinary line, read it as usual
else:
if line is None: # eof
return None
# still have to be careful about incrementing the line number!
if type (self.current_line) is ListType:
self.current_line = self.current_line[1] + 1
else:
self.current_line = self.current_line + 1
# strip whitespace however the client wants (leading and
# trailing, or one or the other, or neither)
if self.lstrip_ws and self.rstrip_ws:
line = string.strip (line)
elif self.lstrip_ws:
line = string.lstrip (line)
elif self.rstrip_ws:
line = string.rstrip (line)
# blank line (whether we rstrip'ed or not)? skip to next line
# if appropriate
if (line == '' or line == '\n') and self.skip_blanks:
continue
if self.join_lines:
if line[-1] == '\\':
buildup_line = line[:-1]
continue
if line[-2:] == '\\\n':
buildup_line = line[0:-2] + '\n'
continue
# well, I guess there's some actual content there: return it
return line
# readline ()
def readlines (self):
"""Read and return the list of all logical lines remaining in the
current file."""
lines = []
while 1:
line = self.readline()
if line is None:
return lines
lines.append (line)
def unreadline (self, line):
"""Push 'line' (a string) onto an internal buffer that will be
checked by future 'readline()' calls. Handy for implementing
a parser with line-at-a-time lookahead."""
self.linebuf.append (line)
if __name__ == "__main__":
test_data = """# test file
line 3 \\
# intervening comment
continues on next line
"""
# result 1: no fancy options
result1 = map (lambda x: x + "\n", string.split (test_data, "\n")[0:-1])
# result 2: just strip comments
result2 = ["\n",
"line 3 \\\n",
" continues on next line\n"]
# result 3: just strip blank lines
result3 = ["# test file\n",
"line 3 \\\n",
"# intervening comment\n",
" continues on next line\n"]
# result 4: default, strip comments, blank lines, and trailing whitespace
result4 = ["line 3 \\",
" continues on next line"]
# result 5: strip comments and blanks, plus join lines (but don't
# "collapse" joined lines
result5 = ["line 3 continues on next line"]
# result 6: strip comments and blanks, plus join lines (and
# "collapse" joined lines
result6 = ["line 3 continues on next line"]
def test_input (count, description, file, expected_result):
result = file.readlines ()
# result = string.join (result, '')
if result == expected_result:
print "ok %d (%s)" % (count, description)
else:
print "not ok %d (%s):" % (count, description)
print "** expected:"
print expected_result
print "** received:"
print result
filename = "test.txt"
out_file = open (filename, "w")
out_file.write (test_data)
out_file.close ()
in_file = TextFile (filename, strip_comments=0, skip_blanks=0,
lstrip_ws=0, rstrip_ws=0)
test_input (1, "no processing", in_file, result1)
in_file = TextFile (filename, strip_comments=1, skip_blanks=0,
lstrip_ws=0, rstrip_ws=0)
test_input (2, "strip comments", in_file, result2)
in_file = TextFile (filename, strip_comments=0, skip_blanks=1,
lstrip_ws=0, rstrip_ws=0)
test_input (3, "strip blanks", in_file, result3)
in_file = TextFile (filename)
test_input (4, "default processing", in_file, result4)
in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
join_lines=1, rstrip_ws=1)
test_input (5, "join lines without collapsing", in_file, result5)
in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
join_lines=1, rstrip_ws=1, collapse_join=1)
test_input (6, "join lines with collapsing", in_file, result6)
os.remove (filename)
| apache-2.0 |
agabert/zeus | stages/keystone_services/fabfile.py | 1 | 2703 |
import os
from zeus.config import ConfigManager
from zeus.common import FabricManager
from zeus.common import PasswordManager
from fabric.api import parallel, roles, run, env
metadata = ConfigManager(os.environ["CONFIGFILE"])
passwords = PasswordManager(os.environ["PASSWORDCACHE"]).passwords
FabricManager.setup(metadata.roles_ports)
#
# http://docs.openstack.org/mitaka/install-guide-ubuntu/keystone-services.html
#
@parallel
@roles('openstack_keystone')
def keystone_services():
this = env.host_string.split(":")[0]
run("""
export OS_TOKEN=%s
CONTROLLER="%s"
ADMIN_PASS="%s"
DEMO_PASS="%s"
export OS_URL=http://$CONTROLLER:35357/v3
export OS_IDENTITY_API_VERSION=3
openstack service list | grep 'keystone' | grep 'identity' || openstack service create --name keystone --description "OpenStack Identity" identity
openstack endpoint list | grep 'RegionOne' | grep 'keystone' | grep 'identity' | grep 'public' || openstack endpoint create --region RegionOne identity public http://$CONTROLLER:5000/v3
openstack endpoint list | grep 'RegionOne' | grep 'keystone' | grep 'identity' | grep 'internal' || openstack endpoint create --region RegionOne identity internal http://$CONTROLLER:5000/v3
openstack endpoint list | grep 'RegionOne' | grep 'keystone' | grep 'identity' | grep 'admin' || openstack endpoint create --region RegionOne identity admin http://$CONTROLLER:35357/v3
openstack domain list | grep 'default' || openstack domain create --description "Default Domain" default
openstack project list | grep admin || openstack project create --domain default --description "Admin Project" admin
openstack project list | grep service || openstack project create --domain default --description "Service Project" service
openstack project list | grep demo || openstack project create --domain default --description "Demo Project" demo
openstack user list --domain default | grep admin || openstack user create --domain default --password "${ADMIN_PASS}" admin
openstack user list --domain default | grep admin && openstack user set --password "${ADMIN_PASS}" admin
openstack role list | grep admin || openstack role create admin
openstack role add --project admin --user admin admin
openstack user list --domain default | grep demo || openstack user create --domain default --password "${DEMO_PASS}" demo
openstack user list --domain default | grep demo && openstack user set --password "${DEMO_PASS}" demo
openstack role list | grep user || openstack role create user
openstack role add --project demo --user demo user
""" % (
passwords["ADMIN_TOKEN"],
metadata.servers[this]['ip'],
passwords["ADMIN_PASS"],
passwords["DEMO_PASS"]))
| apache-2.0 |
sankha93/selenium | py/selenium/webdriver/support/select.py | 13 | 9249 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, UnexpectedTagNameException
class Select:
def __init__(self, webelement):
"""
Constructor. A check is made that the given element is, indeed, a SELECT tag. If it is not,
then an UnexpectedTagNameException is thrown.
:Args:
- webelement - element SELECT element to wrap
Example:
from selenium.webdriver.support.ui import Select \n
Select(driver.find_element_by_tag_name("select")).select_by_index(2)
"""
if webelement.tag_name.lower() != "select":
raise UnexpectedTagNameException(
"Select only works on <select> elements, not on <%s>" %
webelement.tag_name)
self._el = webelement
multi = self._el.get_attribute("multiple")
self.is_multiple = multi and multi != "false"
@property
def options(self):
"""Returns a list of all options belonging to this select tag"""
return self._el.find_elements(By.TAG_NAME, 'option')
@property
def all_selected_options(self):
"""Returns a list of all selected options belonging to this select tag"""
ret = []
for opt in self.options:
if opt.is_selected():
ret.append(opt)
return ret
@property
def first_selected_option(self):
"""The first selected option in this select tag (or the currently selected option in a
normal select)"""
for opt in self.options:
if opt.is_selected():
return opt
raise NoSuchElementException("No options are selected")
def select_by_value(self, value):
"""Select all options that have a value matching the argument. That is, when given "foo" this
would select an option like:
<option value="foo">Bar</option>
:Args:
- value - The value to match against
throws NoSuchElementException If there is no option with specisied value in SELECT
"""
css = "option[value =%s]" % self._escapeString(value)
opts = self._el.find_elements(By.CSS_SELECTOR, css)
matched = False
for opt in opts:
self._setSelected(opt)
if not self.is_multiple:
return
matched = True
if not matched:
raise NoSuchElementException("Cannot locate option with value: %s" % value)
def select_by_index(self, index):
"""Select the option at the given index. This is done by examing the "index" attribute of an
element, and not merely by counting.
:Args:
- index - The option at this index will be selected
throws NoSuchElementException If there is no option with specisied index in SELECT
"""
match = str(index)
for opt in self.options:
if opt.get_attribute("index") == match:
self._setSelected(opt)
return
raise NoSuchElementException("Could not locate element with index %d" % index)
def select_by_visible_text(self, text):
"""Select all options that display text matching the argument. That is, when given "Bar" this
would select an option like:
<option value="foo">Bar</option>
:Args:
- text - The visible text to match against
throws NoSuchElementException If there is no option with specisied text in SELECT
"""
xpath = ".//option[normalize-space(.) = %s]" % self._escapeString(text)
opts = self._el.find_elements(By.XPATH, xpath)
matched = False
for opt in opts:
self._setSelected(opt)
if not self.is_multiple:
return
matched = True
if len(opts) == 0 and " " in text:
subStringWithoutSpace = self._get_longest_token(text)
if subStringWithoutSpace == "":
candidates = self.options
else:
xpath = ".//option[contains(.,%s)]" % self._escapeString(subStringWithoutSpace)
candidates = self._el.find_elements(By.XPATH, xpath)
for candidate in candidates:
if text == candidate.text:
self._setSelected(candidate)
if not self.is_multiple:
return
matched = True
if not matched:
raise NoSuchElementException("Could not locate element with visible text: %s" % text)
def deselect_all(self):
"""Clear all selected entries. This is only valid when the SELECT supports multiple selections.
throws NotImplementedError If the SELECT does not support multiple selections
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect all options of a multi-select")
for opt in self.options:
self._unsetSelected(opt)
def deselect_by_value(self, value):
"""Deselect all options that have a value matching the argument. That is, when given "foo" this
would deselect an option like:
<option value="foo">Bar</option>
:Args:
- value - The value to match against
throws NoSuchElementException If there is no option with specisied value in SELECT
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect options of a multi-select")
matched = False
css = "option[value = %s]" % self._escapeString(value)
opts = self._el.find_elements(By.CSS_SELECTOR, css)
for opt in opts:
self._unsetSelected(opt)
matched = True
if not matched:
raise NoSuchElementException("Could not locate element with value: %s" % value)
def deselect_by_index(self, index):
"""Deselect the option at the given index. This is done by examing the "index" attribute of an
element, and not merely by counting.
:Args:
- index - The option at this index will be deselected
throws NoSuchElementException If there is no option with specisied index in SELECT
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect options of a multi-select")
for opt in self.options:
if opt.get_attribute("index") == str(index):
self._unsetSelected(opt)
return
raise NoSuchElementException("Could not locate element with index %d" % index)
def deselect_by_visible_text(self, text):
"""Deselect all options that display text matching the argument. That is, when given "Bar" this
would deselect an option like:
<option value="foo">Bar</option>
:Args:
- text - The visible text to match against
"""
if not self.is_multiple:
raise NotImplementedError("You may only deselect options of a multi-select")
matched = False
xpath = ".//option[normalize-space(.) = %s]" % self._escapeString(text)
opts = self._el.find_elements(By.XPATH, xpath)
for opt in opts:
self._unsetSelected(opt)
matched = True
if not matched:
raise NoSuchElementException("Could not locate element with visible text: %s" % text)
def _setSelected(self, option):
if not option.is_selected():
option.click()
def _unsetSelected(self, option):
if option.is_selected():
option.click()
def _escapeString(self, value):
if '"' in value and "'" in value:
substrings = value.split("\"")
result = ["concat("]
for substring in substrings:
result.append("\"%s\"" % substring)
result.append(", '\"', ")
result = result[0:-1]
if value.endswith('"'):
result.append(", '\"'")
return "".join(result) + ")"
if '"' in value:
return "'%s'" % value
return "\"%s\"" % value
def _get_longest_token(self, value):
items = value.split(" ")
longest = ""
for item in items:
if len(item) > len(longest):
longest = item
return longest
| apache-2.0 |
kevinchen3315/gyp-git | test/variables/filelist/gyptest-filelist.py | 102 | 1583 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Test variable expansion of '<|(list.txt ...)' syntax commands.
"""
import os
import sys
import TestGyp
test = TestGyp.TestGyp(format='gypd')
expect = test.read('filelist.gyp.stdout')
if sys.platform == 'win32':
expect = expect.replace('/', r'\\').replace('\r\n', '\n')
test.run_gyp('src/filelist.gyp',
'--debug', 'variables',
stdout=expect, ignore_line_numbers=True)
# Verify the filelist.gypd against the checked-in expected contents.
#
# Normally, we should canonicalize line endings in the expected
# contents file setting the Subversion svn:eol-style to native,
# but that would still fail if multiple systems are sharing a single
# workspace on a network-mounted file system. Consequently, we
# massage the Windows line endings ('\r\n') in the output to the
# checked-in UNIX endings ('\n').
contents = test.read('src/filelist.gypd').replace(
'\r', '').replace('\\\\', '/')
expect = test.read('filelist.gypd.golden').replace('\r', '')
if not test.match(contents, expect):
print "Unexpected contents of `src/filelist.gypd'"
test.diff(expect, contents, 'src/filelist.gypd ')
test.fail_test()
contents = test.read('src/names.txt')
expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
if not test.match(contents, expect):
print "Unexpected contents of `src/names.txt'"
test.diff(expect, contents, 'src/names.txt ')
test.fail_test()
test.pass_test()
| bsd-3-clause |
DarkEnergyScienceCollaboration/SLCosmo | python/desc/slcosmo/TDC2.py | 1 | 6769 | import numpy as np
import scipy.misc
c = 3e5 #km/s
class TDC2ensemble(object):
"""
In TDC2, we expect time delays to be inferred by the Good Teams and
submitted as ensembles of posterior samples, in plain text tables,
one time delay per column (AB, AC, AD) for quads, and just (AB) for
doubles. The headers of these files will contain the same Fermat
potential information that was provided in the data file, ie an
'observed' FP difference and its uncertainty for each image pair,
plus an overall 'Q' factor that enables the conversion between time
delay, FP, and time delay distance.
This class is a data structure, for storing all the information
provided in a TDC2 inferred time delay sample file.
It could be nice if we moved to using pandas dataframes, so that we
can refer to the time delays as eg dt['AB'], and the corresponding
FP differences as DeltaFP['AB'] +/- DeltaFP_err['AB'].
Use cases:
1. Get samples and header information from a file
2. Enable SLCosmo factory method to create mock TDC2 ensembles from scratch
3. Write mock samples and header information to a file
"""
def __init__(self):
self.source = None
self.Nsamples = None
self.dt_obs = []
return
@staticmethod
def read_in_from(tdc2samplefile):
"""
Read in both the posterior sample time delays and the Fermat potential header information, and store it for re-use.
Parameters:
-----------
tdc2samplefile : string
Name of the file to read from.
Returns:
--------
TDC2ensemble object
A TDC2ensemble object filled with the read in data.
Notes:
------
The samples are stored in a 2D numpy array with one row for each
lens, and one column for each time delay. Doubles will only have
one time delay ('AB'), while quads will have at least three
('AB', 'AC', 'AD').
Possible failure modes:
1. File does not exist
2. File has no samples in it
3. File has no header in it
4. Samples are not 2D numpy array
5. Array has wrong number of columns (time delays - should be 1 or 3, and equal to Ndt)
"""
my_object = TDC2ensemble()
my_object.source = tdc2samplefile
my_object._read_header()
my_object.dt_obs = np.loadtxt(my_object.source)
if len(my_object.dt_obs.shape) == 1:
my_object.Nim = 2
else:
my_object.Nim = 4
my_object.Nsamples = len(my_object.dt_obs)
return my_object
def _read_header(self):
self.DeltaFP_obs = []
self.DeltaFP_err = []
with open(self.source) as input_:
for line in input_:
if line.startswith('# Q'):
self.Q = float(line.strip().split(':')[1])
if line.startswith('# Delta'):
key, value = line.strip()[1:].split(':')
if key.find('err') != -1:
self.DeltaFP_err.append(float(value))
else:
self.DeltaFP_obs.append(float(value))
self.DeltaFP_obs = np.array(self.DeltaFP_obs)
self.DeltaFP_err = np.array(self.DeltaFP_err)
def write_out_to(self, tdc2samplefile):
"""
Write out both the posterior sample time delays and the Fermat
potential header information in a plain text file.
Parameters:
-----------
tdc2samplefile : string
The name of the file to be written to.
Notes:
------
Possible failure modes:
1. Samples array has no samples in it, even if Nsamples is not None
2. File is not actually written
"""
if self.Nsamples is None:
print("No samples to write out, skipping.")
else:
# First write out the header, over-writing the file:
self.form_header()
np.savetxt(tdc2samplefile, self.dt_obs,
header=self.header, comments='# ')
return
def log_likelihood(self, H0, fast=True):
"""
Compute the likelihood of the proposed Hubble constant H0 given
the Fermat potential difference data, marginalizing over the
time delay PDF provided (approximately) in the samples.
Parameters:
-----------
H0 : float
The Hubble constant under evaluation.
fast : Boolean, optional
Just in case you want to do the calculation
without vectorisation.
Returns:
--------
logL : float
The value of the log likelihood.
See Also:
---------
SLCosmo.compute_the_joint_log_likelihood
Notes:
------
Don't choose `fast=False`.
"""
if fast:
x = self.DeltaFP_obs - (c * self.dt_obs * H0 / self.Q)
chisq = (x/self.DeltaFP_err)**2.0
logL = -0.5 * chisq \
- np.log(np.sqrt(2*np.pi) * self.DeltaFP_err)
else:
logL = np.array([])
Ns = 0
for i in range(self.Nsamples):
for j in range(self.Nim - 1):
Ns += 1
x = self.DeltaFP_obs[j] - \
(c * self.dt_obs[i,j] * H0 / self.Q)
chisq = (x/self.DeltaFP_err[j])**2.0
logL_el = -0.5 * chisq \
- np.log(np.sqrt(2*np.pi) * self.DeltaFP_err[j])
logL = np.append(logL,logL_el)
return scipy.misc.logsumexp(logL) - np.log(len(np.ravel(logL)))
def form_header(self):
self.header = \
"Time Delay Challenge 2 Posterior Sample Time Delays\n\
\n\
Notes:\n\
* Time delays should be given in days. Positive dt_AB means that light\n\
curve A leads (not lags) light curve B.\n\
* Q is the H0-free time delay distance, a function of zd, zs and\n\
cosmology. Q has units of km / s: D_dt = Q / H0\n\
* Fermat potential differences DeltaFP are given in units of\n\
day km / s / Mpc, such that the predicted time delay is\n\
dt = (Q / (c * H0)) * DeltaFP, in days. c = 3.00e5 km/s\n\
\n\
Q: "+str(self.Q)+"\n"
names = ['AB', 'AC', 'AD']
for k in range(self.Nim - 1):
self.header = self.header + \
"DeltaFP_"+names[k]+": "+str(self.DeltaFP_obs[k])+"\n" + \
"DeltaFP_"+names[k]+"_err: "+str(self.DeltaFP_err[k])+"\n"
self.header = self.header + "\n"
for k in range(self.Nim - 1):
self.header = self.header + \
" dt_"+names[k]
return
| bsd-3-clause |
jimberlage/servo | tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_cookies.py | 6 | 2086 | import sys
import unittest
import pytest
wptserve = pytest.importorskip("wptserve")
from .base import TestUsingServer
class TestResponseSetCookie(TestUsingServer):
def test_name_value(self):
@wptserve.handlers.handler
def handler(request, response):
response.set_cookie("name", "value")
return "Test"
route = ("GET", "/test/name_value", handler)
self.server.router.register(*route)
resp = self.request(route[1])
self.assertEqual(resp.info()["Set-Cookie"], "name=value; Path=/")
def test_unset(self):
@wptserve.handlers.handler
def handler(request, response):
response.set_cookie("name", "value")
response.unset_cookie("name")
return "Test"
route = ("GET", "/test/unset", handler)
self.server.router.register(*route)
resp = self.request(route[1])
self.assertTrue("Set-Cookie" not in resp.info())
def test_delete(self):
@wptserve.handlers.handler
def handler(request, response):
response.delete_cookie("name")
return "Test"
route = ("GET", "/test/delete", handler)
self.server.router.register(*route)
resp = self.request(route[1])
parts = dict(item.split("=") for
item in resp.info()["Set-Cookie"].split("; ") if item)
self.assertEqual(parts["name"], "")
self.assertEqual(parts["Path"], "/")
#Should also check that expires is in the past
class TestRequestCookies(TestUsingServer):
@pytest.mark.xfail(sys.version_info >= (3,), reason="wptserve only works on Py2")
def test_set_cookie(self):
@wptserve.handlers.handler
def handler(request, response):
return request.cookies["name"].value
route = ("GET", "/test/set_cookie", handler)
self.server.router.register(*route)
resp = self.request(route[1], headers={"Cookie": "name=value"})
self.assertEqual(resp.read(), b"value")
if __name__ == '__main__':
unittest.main()
| mpl-2.0 |
Titulacion-Sistemas/PythonTitulacion-EV | Lib/site-packages/django/views/decorators/csrf.py | 228 | 2201 | from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.utils.decorators import decorator_from_middleware, available_attrs
from functools import wraps
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# We need this to behave just like the CsrfViewMiddleware, but not reject
# requests or log warnings.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = 'requires_csrf_token'
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
class _EnsureCsrfCookie(CsrfViewMiddleware):
def _reject(self, request, reason):
return None
def process_view(self, request, callback, callback_args, callback_kwargs):
retval = super(_EnsureCsrfCookie, self).process_view(request, callback, callback_args, callback_kwargs)
# Forces process_response to send the cookie
get_token(request)
return retval
ensure_csrf_cookie = decorator_from_middleware(_EnsureCsrfCookie)
ensure_csrf_cookie.__name__ = 'ensure_csrf_cookie'
ensure_csrf_cookie.__doc__ = """
Use this decorator to ensure that a view sets a CSRF cookie, whether or not it
uses the csrf_token template tag, or the CsrfViewMiddleware is used.
"""
def csrf_exempt(view_func):
"""
Marks a view function as being exempt from the CSRF view protection.
"""
# We could just do view_func.csrf_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.csrf_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| mit |
rvalyi/l10n-brazil | l10n_br_zip_correios/models/webservice_client.py | 1 | 3178 | # -*- coding: utf-8 -*-
# Address from Brazilian Localization ZIP by Correios to Odoo
# Copyright (C) 2015 KMEE (http://www.kmee.com.br)
# @author Michell Stuttgart <michell.stuttgart@kmee.com.br>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
import logging
from openerp.exceptions import Warning as UserError
from openerp.tools.translate import _
try:
from suds import WebFault
from suds.client import Client, TransportError
except ImportError:
raise UserError(_(u'Erro!'), _(u"Biblioteca Suds não instalada!"))
_logger = logging.getLogger(__name__)
class WebServiceClient(object):
def get_address(self, zip_code):
if not zip_code:
return
zip_str = zip_code.replace('-', '')
if len(zip_str) == 8:
if not self.env['l10n_br.zip'].search([('zip', '=', zip_str)]):
# SigepWeb webservice url
url_prod = 'https://apps.correios.com.br/SigepMasterJPA' \
'/AtendeClienteService/AtendeCliente?wsdl'
try:
# Connect Brazil Correios webservice
res = Client(url_prod).service.consultaCEP(zip_str)
# Search Brazil id
country_ids = self.env['res.country'].search(
[('code', '=', 'BR')])
# Search state with state_code and country id
state_ids = self.env['res.country.state'].search([
('code', '=', str(res.uf)),
('country_id.id', 'in', country_ids.ids)])
# city name
city_name = str(res.cidade.encode('utf8'))
# search city with name and state
city_ids = self.env['l10n_br_base.city'].search([
('name', '=', city_name),
('state_id.id', 'in', state_ids.ids)])
values = {
'zip': zip_str,
'street': str(
res.end.encode('utf8')) if res.end else '',
'district': str(
res.bairro.encode('utf8')) if res.bairro
else '',
'street_type': str(
res.complemento.encode('utf8')) if res.complemento
else '',
'l10n_br_city_id': city_ids.ids[
0] if city_ids else False,
'state_id': state_ids.ids[0] if state_ids else False,
'country_id': country_ids.ids[
0] if country_ids else False,
}
# Create zip object
self.env['l10n_br.zip'].create(values)
except TransportError as e:
_logger.error(e.message, exc_info=True)
raise UserError(_('Error!'), e.message)
except WebFault as e:
_logger.error(e.message, exc_info=True)
raise UserError(_('Error!'), e.message)
| agpl-3.0 |
kashif/scikit-learn | sklearn/learning_curve.py | 30 | 14601 | """Utilities to evaluate models with respect to a variable
"""
# Author: Alexander Fabisch <afabisch@informatik.uni-bremen.de>
#
# License: BSD 3 clause
import warnings
import numpy as np
from .base import is_classifier, clone
from .cross_validation import check_cv
from .externals.joblib import Parallel, delayed
from .cross_validation import _safe_split, _score, _fit_and_score
from .metrics.scorer import check_scoring
from .utils import indexable
from .utils.fixes import astype
warnings.warn("This module has been deprecated in favor of the "
"model_selection module into which all the functions are moved."
" This module will be removed in 0.20",
DeprecationWarning)
__all__ = ['learning_curve', 'validation_curve']
def learning_curve(estimator, X, y, train_sizes=np.linspace(0.1, 1.0, 5),
cv=None, scoring=None, exploit_incremental_learning=False,
n_jobs=1, pre_dispatch="all", verbose=0):
"""Learning curve.
Determines cross-validated training and test scores for different training
set sizes.
A cross-validation generator splits the whole dataset k times in training
and test data. Subsets of the training set with varying sizes will be used
to train the estimator and a score for each training subset size and the
test set will be computed. Afterwards, the scores will be averaged over
all k runs for each training subset size.
Read more in the :ref:`User Guide <learning_curves>`.
Parameters
----------
estimator : object type that implements the "fit" and "predict" methods
An object of that type which is cloned for each validation.
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
train_sizes : array-like, shape (n_ticks,), dtype float or int
Relative or absolute numbers of training examples that will be used to
generate the learning curve. If the dtype is float, it is regarded as a
fraction of the maximum size of the training set (that is determined
by the selected validation method), i.e. it has to be within (0, 1].
Otherwise it is interpreted as absolute sizes of the training sets.
Note that for classification the number of samples usually have to
be big enough to contain at least one sample from each class.
(default: np.linspace(0.1, 1.0, 5))
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if the estimator is a classifier and ``y`` is
either binary or multiclass, :class:`StratifiedKFold` used. In all
other cases, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
scoring : string, callable or None, optional, default: None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
exploit_incremental_learning : boolean, optional, default: False
If the estimator supports incremental learning, this will be
used to speed up fitting for different training set sizes.
n_jobs : integer, optional
Number of jobs to run in parallel (default 1).
pre_dispatch : integer or string, optional
Number of predispatched jobs for parallel execution (default is
all). The option can reduce the allocated memory. The string can
be an expression like '2*n_jobs'.
verbose : integer, optional
Controls the verbosity: the higher, the more messages.
Returns
-------
train_sizes_abs : array, shape = (n_unique_ticks,), dtype int
Numbers of training examples that has been used to generate the
learning curve. Note that the number of ticks might be less
than n_ticks because duplicate entries will be removed.
train_scores : array, shape (n_ticks, n_cv_folds)
Scores on training sets.
test_scores : array, shape (n_ticks, n_cv_folds)
Scores on test set.
Notes
-----
See :ref:`examples/model_selection/plot_learning_curve.py
<example_model_selection_plot_learning_curve.py>`
"""
if exploit_incremental_learning and not hasattr(estimator, "partial_fit"):
raise ValueError("An estimator must support the partial_fit interface "
"to exploit incremental learning")
X, y = indexable(X, y)
# Make a list since we will be iterating multiple times over the folds
cv = list(check_cv(cv, X, y, classifier=is_classifier(estimator)))
scorer = check_scoring(estimator, scoring=scoring)
# HACK as long as boolean indices are allowed in cv generators
if cv[0][0].dtype == bool:
new_cv = []
for i in range(len(cv)):
new_cv.append((np.nonzero(cv[i][0])[0], np.nonzero(cv[i][1])[0]))
cv = new_cv
n_max_training_samples = len(cv[0][0])
# Because the lengths of folds can be significantly different, it is
# not guaranteed that we use all of the available training data when we
# use the first 'n_max_training_samples' samples.
train_sizes_abs = _translate_train_sizes(train_sizes,
n_max_training_samples)
n_unique_ticks = train_sizes_abs.shape[0]
if verbose > 0:
print("[learning_curve] Training set sizes: " + str(train_sizes_abs))
parallel = Parallel(n_jobs=n_jobs, pre_dispatch=pre_dispatch,
verbose=verbose)
if exploit_incremental_learning:
classes = np.unique(y) if is_classifier(estimator) else None
out = parallel(delayed(_incremental_fit_estimator)(
clone(estimator), X, y, classes, train, test, train_sizes_abs,
scorer, verbose) for train, test in cv)
else:
out = parallel(delayed(_fit_and_score)(
clone(estimator), X, y, scorer, train[:n_train_samples], test,
verbose, parameters=None, fit_params=None, return_train_score=True)
for train, test in cv for n_train_samples in train_sizes_abs)
out = np.array(out)[:, :2]
n_cv_folds = out.shape[0] // n_unique_ticks
out = out.reshape(n_cv_folds, n_unique_ticks, 2)
out = np.asarray(out).transpose((2, 1, 0))
return train_sizes_abs, out[0], out[1]
def _translate_train_sizes(train_sizes, n_max_training_samples):
"""Determine absolute sizes of training subsets and validate 'train_sizes'.
Examples:
_translate_train_sizes([0.5, 1.0], 10) -> [5, 10]
_translate_train_sizes([5, 10], 10) -> [5, 10]
Parameters
----------
train_sizes : array-like, shape (n_ticks,), dtype float or int
Numbers of training examples that will be used to generate the
learning curve. If the dtype is float, it is regarded as a
fraction of 'n_max_training_samples', i.e. it has to be within (0, 1].
n_max_training_samples : int
Maximum number of training samples (upper bound of 'train_sizes').
Returns
-------
train_sizes_abs : array, shape (n_unique_ticks,), dtype int
Numbers of training examples that will be used to generate the
learning curve. Note that the number of ticks might be less
than n_ticks because duplicate entries will be removed.
"""
train_sizes_abs = np.asarray(train_sizes)
n_ticks = train_sizes_abs.shape[0]
n_min_required_samples = np.min(train_sizes_abs)
n_max_required_samples = np.max(train_sizes_abs)
if np.issubdtype(train_sizes_abs.dtype, np.float):
if n_min_required_samples <= 0.0 or n_max_required_samples > 1.0:
raise ValueError("train_sizes has been interpreted as fractions "
"of the maximum number of training samples and "
"must be within (0, 1], but is within [%f, %f]."
% (n_min_required_samples,
n_max_required_samples))
train_sizes_abs = astype(train_sizes_abs * n_max_training_samples,
dtype=np.int, copy=False)
train_sizes_abs = np.clip(train_sizes_abs, 1,
n_max_training_samples)
else:
if (n_min_required_samples <= 0 or
n_max_required_samples > n_max_training_samples):
raise ValueError("train_sizes has been interpreted as absolute "
"numbers of training samples and must be within "
"(0, %d], but is within [%d, %d]."
% (n_max_training_samples,
n_min_required_samples,
n_max_required_samples))
train_sizes_abs = np.unique(train_sizes_abs)
if n_ticks > train_sizes_abs.shape[0]:
warnings.warn("Removed duplicate entries from 'train_sizes'. Number "
"of ticks will be less than than the size of "
"'train_sizes' %d instead of %d)."
% (train_sizes_abs.shape[0], n_ticks), RuntimeWarning)
return train_sizes_abs
def _incremental_fit_estimator(estimator, X, y, classes, train, test,
train_sizes, scorer, verbose):
"""Train estimator on training subsets incrementally and compute scores."""
train_scores, test_scores = [], []
partitions = zip(train_sizes, np.split(train, train_sizes)[:-1])
for n_train_samples, partial_train in partitions:
train_subset = train[:n_train_samples]
X_train, y_train = _safe_split(estimator, X, y, train_subset)
X_partial_train, y_partial_train = _safe_split(estimator, X, y,
partial_train)
X_test, y_test = _safe_split(estimator, X, y, test, train_subset)
if y_partial_train is None:
estimator.partial_fit(X_partial_train, classes=classes)
else:
estimator.partial_fit(X_partial_train, y_partial_train,
classes=classes)
train_scores.append(_score(estimator, X_train, y_train, scorer))
test_scores.append(_score(estimator, X_test, y_test, scorer))
return np.array((train_scores, test_scores)).T
def validation_curve(estimator, X, y, param_name, param_range, cv=None,
scoring=None, n_jobs=1, pre_dispatch="all", verbose=0):
"""Validation curve.
Determine training and test scores for varying parameter values.
Compute scores for an estimator with different values of a specified
parameter. This is similar to grid search with one parameter. However, this
will also compute training scores and is merely a utility for plotting the
results.
Read more in the :ref:`User Guide <validation_curve>`.
Parameters
----------
estimator : object type that implements the "fit" and "predict" methods
An object of that type which is cloned for each validation.
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
param_name : string
Name of the parameter that will be varied.
param_range : array-like, shape (n_values,)
The values of the parameter that will be evaluated.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if the estimator is a classifier and ``y`` is
either binary or multiclass, :class:`StratifiedKFold` used. In all
other cases, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
scoring : string, callable or None, optional, default: None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
n_jobs : integer, optional
Number of jobs to run in parallel (default 1).
pre_dispatch : integer or string, optional
Number of predispatched jobs for parallel execution (default is
all). The option can reduce the allocated memory. The string can
be an expression like '2*n_jobs'.
verbose : integer, optional
Controls the verbosity: the higher, the more messages.
Returns
-------
train_scores : array, shape (n_ticks, n_cv_folds)
Scores on training sets.
test_scores : array, shape (n_ticks, n_cv_folds)
Scores on test set.
Notes
-----
See
:ref:`examples/model_selection/plot_validation_curve.py
<example_model_selection_plot_validation_curve.py>`
"""
X, y = indexable(X, y)
cv = check_cv(cv, X, y, classifier=is_classifier(estimator))
scorer = check_scoring(estimator, scoring=scoring)
parallel = Parallel(n_jobs=n_jobs, pre_dispatch=pre_dispatch,
verbose=verbose)
out = parallel(delayed(_fit_and_score)(
estimator, X, y, scorer, train, test, verbose,
parameters={param_name: v}, fit_params=None, return_train_score=True)
for train, test in cv for v in param_range)
out = np.asarray(out)[:, :2]
n_params = len(param_range)
n_cv_folds = out.shape[0] // n_params
out = out.reshape(n_cv_folds, n_params, 2).transpose((2, 1, 0))
return out[0], out[1]
| bsd-3-clause |
pybrain/pybrain | pybrain/rl/environments/mazes/tasks/cheesemaze.py | 25 | 1074 | __author__ = 'Tom Schaul, tom@idsia.ch'
from scipy import zeros, array
from .maze import MazeTask
class CheeseMaze(MazeTask):
"""
#######
# #
# # # #
# #*# #
#######
"""
observations = 7
discount = 0.95
topology = array([[1] * 7,
[1, 0, 1, 0, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 1],
[1, 0, 0, 0, 0, 0, 1],
[1] * 7])
goal = (1, 3)
def getObservation(self):
""" observations are encoded in a 1-n encoding of possible wall combinations. """
res = zeros(7)
obs = self.env.getSensors()
if self.env.perseus == self.env.goal:
res[6] = 1
elif sum(obs) == 3:
res[0] = 1
elif sum(obs) == 1:
res[5] = 1
elif obs[0] == obs[1]:
res[1] = 1
elif obs[0] == obs[3]:
res[2] = 1
elif obs[0] == obs[2]:
if obs[0] == 1:
res[3] = 1
else:
res[4] = 1
return res
| bsd-3-clause |
ArunMichaelDsouza/sass-foundations | node_modules/grunt-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/xcode.py | 426 | 56534 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import filecmp
import gyp.common
import gyp.xcodeproj_file
import gyp.xcode_ninja
import errno
import os
import sys
import posixpath
import re
import shutil
import subprocess
import tempfile
# Project files generated by this module will use _intermediate_var as a
# custom Xcode setting whose value is a DerivedSources-like directory that's
# project-specific and configuration-specific. The normal choice,
# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
# as it is likely that multiple targets within a single project file will want
# to access the same set of generated files. The other option,
# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
# it is not configuration-specific. INTERMEDIATE_DIR is defined as
# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
_intermediate_var = 'INTERMEDIATE_DIR'
# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
# targets that share the same BUILT_PRODUCTS_DIR.
_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.dylib',
# INTERMEDIATE_DIR is a place for targets to build up intermediate products.
# It is specific to each build environment. It is only guaranteed to exist
# and be constant within the context of a project, corresponding to a single
# input file. Some build environments may allow their intermediate directory
# to be shared on a wider scale, but this is not guaranteed.
'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
'OS': 'mac',
'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
'CONFIGURATION_NAME': '$(CONFIGURATION)',
}
# The Xcode-specific sections that hold paths.
generator_additional_path_sections = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
# 'mac_framework_dirs', input already handles _dirs endings.
]
# The Xcode-specific keys that exist on targets and aren't moved down to
# configurations.
generator_additional_non_configuration_keys = [
'ios_app_extension',
'ios_watch_app',
'ios_watchkit_extension',
'mac_bundle',
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
'xcode_create_dependents_test_runner',
]
# We want to let any rules apply to files that are resources also.
generator_extra_sources_for_rules = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
]
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
'$(SDKROOT)/usr/lib',
'$(SDKROOT)/usr/local/lib',
])
def CreateXCConfigurationList(configuration_names):
xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
if len(configuration_names) == 0:
configuration_names = ['Default']
for configuration_name in configuration_names:
xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
'name': configuration_name})
xccl.AppendProperty('buildConfigurations', xcbc)
xccl.SetProperty('defaultConfigurationName', configuration_names[0])
return xccl
class XcodeProject(object):
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
self.project = gyp.xcodeproj_file.PBXProject(path=path)
projectDirPath = gyp.common.RelativePath(
os.path.dirname(os.path.abspath(self.gyp_path)),
os.path.dirname(path) or '.')
self.project.SetProperty('projectDirPath', projectDirPath)
self.project_file = \
gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
self.build_file_dict = build_file_dict
# TODO(mark): add destructor that cleans up self.path if created_dir is
# True and things didn't complete successfully. Or do something even
# better with "try"?
self.created_dir = False
try:
os.makedirs(self.path)
self.created_dir = True
except OSError, e:
if e.errno != errno.EEXIST:
raise
def Finalize1(self, xcode_targets, serialize_all_tests):
# Collect a list of all of the build configuration names used by the
# various targets in the file. It is very heavily advised to keep each
# target in an entire project (even across multiple project files) using
# the same set of configuration names.
configurations = []
for xct in self.project.GetProperty('targets'):
xccl = xct.GetProperty('buildConfigurationList')
xcbcs = xccl.GetProperty('buildConfigurations')
for xcbc in xcbcs:
name = xcbc.GetProperty('name')
if name not in configurations:
configurations.append(name)
# Replace the XCConfigurationList attached to the PBXProject object with
# a new one specifying all of the configuration names used by the various
# targets.
try:
xccl = CreateXCConfigurationList(configurations)
self.project.SetProperty('buildConfigurationList', xccl)
except:
sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
raise
# The need for this setting is explained above where _intermediate_var is
# defined. The comments below about wanting to avoid project-wide build
# settings apply here too, but this needs to be set on a project-wide basis
# so that files relative to the _intermediate_var setting can be displayed
# properly in the Xcode UI.
#
# Note that for configuration-relative files such as anything relative to
# _intermediate_var, for the purposes of UI tree view display, Xcode will
# only resolve the configuration name once, when the project file is
# opened. If the active build configuration is changed, the project file
# must be closed and reopened if it is desired for the tree view to update.
# This is filed as Apple radar 6588391.
xccl.SetBuildSetting(_intermediate_var,
'$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
xccl.SetBuildSetting(_shared_intermediate_var,
'$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
# Set user-specified project-wide build settings and config files. This
# is intended to be used very sparingly. Really, almost everything should
# go into target-specific build settings sections. The project-wide
# settings are only intended to be used in cases where Xcode attempts to
# resolve variable references in a project context as opposed to a target
# context, such as when resolving sourceTree references while building up
# the tree tree view for UI display.
# Any values set globally are applied to all configurations, then any
# per-configuration values are applied.
for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
xccl.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in self.build_file_dict:
config_ref = self.project.AddOrGetFileInRootGroup(
self.build_file_dict['xcode_config_file'])
xccl.SetBaseConfiguration(config_ref)
build_file_configurations = self.build_file_dict.get('configurations', {})
if build_file_configurations:
for config_name in configurations:
build_file_configuration_named = \
build_file_configurations.get(config_name, {})
if build_file_configuration_named:
xcc = xccl.ConfigurationNamed(config_name)
for xck, xcv in build_file_configuration_named.get('xcode_settings',
{}).iteritems():
xcc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in build_file_configuration_named:
config_ref = self.project.AddOrGetFileInRootGroup(
build_file_configurations[config_name]['xcode_config_file'])
xcc.SetBaseConfiguration(config_ref)
# Sort the targets based on how they appeared in the input.
# TODO(mark): Like a lot of other things here, this assumes internal
# knowledge of PBXProject - in this case, of its "targets" property.
# ordinary_targets are ordinary targets that are already in the project
# file. run_test_targets are the targets that run unittests and should be
# used for the Run All Tests target. support_targets are the action/rule
# targets used by GYP file targets, just kept for the assert check.
ordinary_targets = []
run_test_targets = []
support_targets = []
# targets is full list of targets in the project.
targets = []
# does the it define it's own "all"?
has_custom_all = False
# targets_for_all is the list of ordinary_targets that should be listed
# in this project's "All" target. It includes each non_runtest_target
# that does not have suppress_wildcard set.
targets_for_all = []
for target in self.build_file_dict['targets']:
target_name = target['target_name']
toolset = target['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
toolset)
xcode_target = xcode_targets[qualified_target]
# Make sure that the target being added to the sorted list is already in
# the unsorted list.
assert xcode_target in self.project._properties['targets']
targets.append(xcode_target)
ordinary_targets.append(xcode_target)
if xcode_target.support_target:
support_targets.append(xcode_target.support_target)
targets.append(xcode_target.support_target)
if not int(target.get('suppress_wildcard', False)):
targets_for_all.append(xcode_target)
if target_name.lower() == 'all':
has_custom_all = True;
# If this target has a 'run_as' attribute, add its target to the
# targets, and add it to the test targets.
if target.get('run_as'):
# Make a target to run something. It should have one
# dependency, the parent xcode target.
xccl = CreateXCConfigurationList(configurations)
run_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run ' + target_name,
'productName': xcode_target.GetProperty('productName'),
'buildConfigurationList': xccl,
},
parent=self.project)
run_target.AddDependency(xcode_target)
command = target['run_as']
script = ''
if command.get('working_directory'):
script = script + 'cd "%s"\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
command.get('working_directory'))
if command.get('environment'):
script = script + "\n".join(
['export %s="%s"' %
(key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
for (key, val) in command.get('environment').iteritems()]) + "\n"
# Some test end up using sockets, files on disk, etc. and can get
# confused if more then one test runs at a time. The generator
# flag 'xcode_serialize_all_test_runs' controls the forcing of all
# tests serially. It defaults to True. To get serial runs this
# little bit of python does the same as the linux flock utility to
# make sure only one runs at a time.
command_prefix = ''
if serialize_all_tests:
command_prefix = \
"""python -c "import fcntl, subprocess, sys
file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
fcntl.flock(file.fileno(), fcntl.LOCK_EX)
sys.exit(subprocess.call(sys.argv[1:]))" """
# If we were unable to exec for some reason, we want to exit
# with an error, and fixup variable references to be shell
# syntax instead of xcode syntax.
script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
gyp.common.EncodePOSIXShellList(command.get('action')))
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'shellScript': script,
'showEnvVarsInLog': 0,
})
run_target.AppendProperty('buildPhases', ssbp)
# Add the run target to the project file.
targets.append(run_target)
run_test_targets.append(run_target)
xcode_target.test_runner = run_target
# Make sure that the list of targets being replaced is the same length as
# the one replacing it, but allow for the added test runner targets.
assert len(self.project._properties['targets']) == \
len(ordinary_targets) + len(support_targets)
self.project._properties['targets'] = targets
# Get rid of unnecessary levels of depth in groups like the Source group.
self.project.RootGroupsTakeOverOnlyChildren(True)
# Sort the groups nicely. Do this after sorting the targets, because the
# Products group is sorted based on the order of the targets.
self.project.SortGroups()
# Create an "All" target if there's more than one target in this project
# file and the project didn't define its own "All" target. Put a generated
# "All" target first so that people opening up the project for the first
# time will build everything by default.
if len(targets_for_all) > 1 and not has_custom_all:
xccl = CreateXCConfigurationList(configurations)
all_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'All',
},
parent=self.project)
for target in targets_for_all:
all_target.AddDependency(target)
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._properties. It's important to get the "All" target first,
# though.
self.project._properties['targets'].insert(0, all_target)
# The same, but for run_test_targets.
if len(run_test_targets) > 1:
xccl = CreateXCConfigurationList(configurations)
run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'Run All Tests',
},
parent=self.project)
for run_test_target in run_test_targets:
run_all_tests_target.AddDependency(run_test_target)
# Insert after the "All" target, which must exist if there is more than
# one run_test_target.
self.project._properties['targets'].insert(1, run_all_tests_target)
def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
# Finalize2 needs to happen in a separate step because the process of
# updating references to other projects depends on the ordering of targets
# within remote project files. Finalize1 is responsible for sorting duty,
# and once all project files are sorted, Finalize2 can come in and update
# these references.
# To support making a "test runner" target that will run all the tests
# that are direct dependents of any given target, we look for
# xcode_create_dependents_test_runner being set on an Aggregate target,
# and generate a second target that will run the tests runners found under
# the marked target.
for bf_tgt in self.build_file_dict['targets']:
if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
tgt_name = bf_tgt['target_name']
toolset = bf_tgt['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
tgt_name, toolset)
xcode_target = xcode_targets[qualified_target]
if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
# Collect all the run test targets.
all_run_tests = []
pbxtds = xcode_target.GetProperty('dependencies')
for pbxtd in pbxtds:
pbxcip = pbxtd.GetProperty('targetProxy')
dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
if hasattr(dependency_xct, 'test_runner'):
all_run_tests.append(dependency_xct.test_runner)
# Directly depend on all the runners as they depend on the target
# that builds them.
if len(all_run_tests) > 0:
run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run %s Tests' % tgt_name,
'productName': tgt_name,
},
parent=self.project)
for run_test_target in all_run_tests:
run_all_target.AddDependency(run_test_target)
# Insert the test runner after the related target.
idx = self.project._properties['targets'].index(xcode_target)
self.project._properties['targets'].insert(idx + 1, run_all_target)
# Update all references to other projects, to make sure that the lists of
# remote products are complete. Otherwise, Xcode will fill them in when
# it opens the project file, which will result in unnecessary diffs.
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._other_pbxprojects.
for other_pbxproject in self.project._other_pbxprojects.keys():
self.project.AddOrGetProjectReference(other_pbxproject)
self.project.SortRemoteProductReferences()
# Give everything an ID.
self.project_file.ComputeIDs()
# Make sure that no two objects in the project file have the same ID. If
# multiple objects wind up with the same ID, upon loading the file, Xcode
# will only recognize one object (the last one in the file?) and the
# results are unpredictable.
self.project_file.EnsureNoIDCollisions()
def Write(self):
# Write the project file to a temporary location first. Xcode watches for
# changes to the project file and presents a UI sheet offering to reload
# the project when it does change. However, in some cases, especially when
# multiple projects are open or when Xcode is busy, things don't work so
# seamlessly. Sometimes, Xcode is able to detect that a project file has
# changed but can't unload it because something else is referencing it.
# To mitigate this problem, and to avoid even having Xcode present the UI
# sheet when an open project is rewritten for inconsequential changes, the
# project file is written to a temporary file in the xcodeproj directory
# first. The new temporary file is then compared to the existing project
# file, if any. If they differ, the new file replaces the old; otherwise,
# the new project file is simply deleted. Xcode properly detects a file
# being renamed over an open project file as a change and so it remains
# able to present the "project file changed" sheet under this system.
# Writing to a temporary file first also avoids the possible problem of
# Xcode rereading an incomplete project file.
(output_fd, new_pbxproj_path) = \
tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
dir=self.path)
try:
output_file = os.fdopen(output_fd, 'wb')
self.project_file.Print(output_file)
output_file.close()
pbxproj_path = os.path.join(self.path, 'project.pbxproj')
same = False
try:
same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(new_pbxproj_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(new_pbxproj_path, 0666 & ~umask)
os.rename(new_pbxproj_path, pbxproj_path)
except Exception:
# Don't leave turds behind. In fact, if this code was responsible for
# creating the xcodeproj directory, get rid of that too.
os.unlink(new_pbxproj_path)
if self.created_dir:
shutil.rmtree(self.path, True)
raise
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
# .o is conceptually more of a "source" than a "library," but Xcode thinks
# of "sources" as things to compile and "libraries" (or "frameworks") as
# things to link with. Adding an object file to an Xcode target's frameworks
# phase works properly.
library_extensions = ['a', 'dylib', 'framework', 'o']
basename = posixpath.basename(source)
(root, ext) = posixpath.splitext(basename)
if ext:
ext = ext[1:].lower()
if ext in source_extensions and type != 'none':
xct.SourcesPhase().AddFile(source)
elif ext in library_extensions and type != 'none':
xct.FrameworksPhase().AddFile(source)
else:
# Files that aren't added to a sources or frameworks build phase can still
# go into the project file, just not as part of a build phase.
pbxp.AddOrGetFileInRootGroup(source)
def AddResourceToTarget(resource, pbxp, xct):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
xct.ResourcesPhase().AddFile(resource)
def AddHeaderToTarget(header, pbxp, xct, is_public):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
xct.HeadersPhase().AddFile(header, settings)
_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
In some rare cases, it is appropriate to expand Xcode variables when a
project file is generated. For any substring $(VAR) in string, if VAR is a
key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
Any $(VAR) substring in string for which VAR is not a key in the expansions
dict will remain in the returned string.
"""
matches = _xcode_variable_re.findall(string)
if matches == None:
return string
matches.reverse()
for match in matches:
(to_replace, variable) = match
if not variable in expansions:
continue
replacement = expansions[variable]
string = re.sub(re.escape(to_replace), replacement, string)
return string
_xcode_define_re = re.compile(r'([\\\"\' ])')
def EscapeXcodeDefine(s):
"""We must escape the defines that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals. However, we
must not quote the define, or Xcode will incorrectly intepret variables
especially $(inherited)."""
return re.sub(_xcode_define_re, r'\\\1', s)
def PerformBuild(data, configurations, params):
options = params['options']
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
# Optionally configure each spec to use ninja as the external builder.
ninja_wrapper = params.get('flavor') == 'ninja'
if ninja_wrapper:
(target_list, target_dicts, data) = \
gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
xcode_projects[build_file] = xcp
pbxp = xcp.project
if parallel_builds:
pbxp.SetProperty('attributes',
{'BuildIndependentTargetsInParallel': 'YES'})
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
main_group = pbxp.GetProperty('mainGroup')
build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
main_group.AppendChild(build_group)
for included_file in build_file_dict['included_files']:
build_group.AddOrGetFileByPath(included_file, False)
xcode_targets = {}
xcode_target_to_target_dict = {}
for qualified_target in target_list:
[build_file, target_name, toolset] = \
gyp.common.ParseQualifiedTarget(qualified_target)
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in xcode build (target %s)' %
qualified_target)
configuration_names = [spec['default_configuration']]
for configuration_name in sorted(spec['configurations'].keys()):
if configuration_name not in configuration_names:
configuration_names.append(configuration_name)
xcp = xcode_projects[build_file]
pbxp = xcp.project
# Set up the configurations for the target according to the list of names
# supplied.
xccl = CreateXCConfigurationList(configuration_names)
# Create an XCTarget subclass object for the target. The type with
# "+bundle" appended will be used if the target has "mac_bundle" set.
# loadable_modules not in a mac_bundle are mapped to
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
# to create a single-file mh_bundle.
_types = {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
'shared_library+bundle': 'com.apple.product-type.framework',
'executable+extension+bundle': 'com.apple.product-type.app-extension',
'executable+watch+extension+bundle':
'com.apple.product-type.watchkit-extension',
'executable+watch+bundle': 'com.apple.product-type.application.watchapp',
}
target_properties = {
'buildConfigurationList': xccl,
'name': target_name,
}
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
is_app_extension = int(spec.get('ios_app_extension', 0))
is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
is_watch_app = int(spec.get('ios_watch_app', 0))
if type != 'none':
type_bundle_key = type
if is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
'(target %s)' % target_name)
elif is_app_extension:
assert is_bundle, ('ios_app_extension flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+extension+bundle'
elif is_watchkit_extension:
assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+watch+extension+bundle'
elif is_watch_app:
assert is_bundle, ('ios_watch_app flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+watch+bundle'
elif is_bundle:
type_bundle_key += '+bundle'
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
except KeyError, e:
gyp.common.ExceptionAppend(e, "-- unknown product type while "
"writing target %s" % target_name)
raise
else:
xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
target_properties['productName'] = target_product_name
xct = xctarget_type(target_properties, parent=pbxp,
force_outdir=spec.get('product_dir'),
force_prefix=spec.get('product_prefix'),
force_extension=spec.get('product_extension'))
pbxp.AppendProperty('targets', xct)
xcode_targets[qualified_target] = xct
xcode_target_to_target_dict[xct] = spec
spec_actions = spec.get('actions', [])
spec_rules = spec.get('rules', [])
# Xcode has some "issues" with checking dependencies for the "Compile
# sources" step with any source files/headers generated by actions/rules.
# To work around this, if a target is building anything directly (not
# type "none"), then a second target is used to run the GYP actions/rules
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
# The Xcode "issues" don't affect xcode-ninja builds, since the dependency
# logic all happens in ninja. Don't bother creating the extra targets in
# that case.
if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
support_xccl = CreateXCConfigurationList(configuration_names);
support_target_suffix = generator_flags.get(
'support_target_suffix', ' Support')
support_target_properties = {
'buildConfigurationList': support_xccl,
'name': target_name + support_target_suffix,
}
if target_product_name:
support_target_properties['productName'] = \
target_product_name + ' Support'
support_xct = \
gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
parent=pbxp)
pbxp.AppendProperty('targets', support_xct)
xct.AddDependency(support_xct)
# Hang the support target off the main target so it can be tested/found
# by the generator during Finalize.
xct.support_target = support_xct
prebuild_index = 0
# Add custom shell script phases for "actions" sections.
for action in spec_actions:
# There's no need to write anything into the script to ensure that the
# output directories already exist, because Xcode will look at the
# declared outputs and automatically ensure that they exist for us.
# Do we have a message to print when this action runs?
message = action.get('message')
if message:
message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
else:
message = ''
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(action['action'])
# Convert Xcode-type variable references to sh-compatible environment
# variable references.
message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
action_string)
script = ''
# Include the optional message
if message_sh:
script += message_sh + '\n'
# Be sure the script runs in exec, and that if exec fails, the script
# exits signalling an error.
script += 'exec ' + action_string_sh + '\nexit 1\n'
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': action['inputs'],
'name': 'Action "' + action['action_name'] + '"',
'outputPaths': action['outputs'],
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# TODO(mark): Should verify that at most one of these is specified.
if int(action.get('process_outputs_as_sources', False)):
for output in action['outputs']:
AddSourceToTarget(output, type, pbxp, xct)
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
for output in action['outputs']:
AddResourceToTarget(output, pbxp, xct)
# tgt_mac_bundle_resources holds the list of bundle resources so
# the rule processing can check against it.
if is_bundle:
tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
else:
tgt_mac_bundle_resources = []
# Add custom shell script phases driving "make" for "rules" sections.
#
# Xcode's built-in rule support is almost powerful enough to use directly,
# but there are a few significant deficiencies that render them unusable.
# There are workarounds for some of its inadequacies, but in aggregate,
# the workarounds added complexity to the generator, and some workarounds
# actually require input files to be crafted more carefully than I'd like.
# Consequently, until Xcode rules are made more capable, "rules" input
# sections will be handled in Xcode output by shell script build phases
# performed prior to the compilation phase.
#
# The following problems with Xcode rules were found. The numbers are
# Apple radar IDs. I hope that these shortcomings are addressed, I really
# liked having the rules handled directly in Xcode during the period that
# I was prototyping this.
#
# 6588600 Xcode compiles custom script rule outputs too soon, compilation
# fails. This occurs when rule outputs from distinct inputs are
# interdependent. The only workaround is to put rules and their
# inputs in a separate target from the one that compiles the rule
# outputs. This requires input file cooperation and it means that
# process_outputs_as_sources is unusable.
# 6584932 Need to declare that custom rule outputs should be excluded from
# compilation. A possible workaround is to lie to Xcode about a
# rule's output, giving it a dummy file it doesn't know how to
# compile. The rule action script would need to touch the dummy.
# 6584839 I need a way to declare additional inputs to a custom rule.
# A possible workaround is a shell script phase prior to
# compilation that touches a rule's primary input files if any
# would-be additional inputs are newer than the output. Modifying
# the source tree - even just modification times - feels dirty.
# 6564240 Xcode "custom script" build rules always dump all environment
# variables. This is a low-prioroty problem and is not a
# show-stopper.
rules_by_ext = {}
for rule in spec_rules:
rules_by_ext[rule['extension']] = rule
# First, some definitions:
#
# A "rule source" is a file that was listed in a target's "sources"
# list and will have a rule applied to it on the basis of matching the
# rule's "extensions" attribute. Rule sources are direct inputs to
# rules.
#
# Rule definitions may specify additional inputs in their "inputs"
# attribute. These additional inputs are used for dependency tracking
# purposes.
#
# A "concrete output" is a rule output with input-dependent variables
# resolved. For example, given a rule with:
# 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
# if the target's "sources" list contained "one.ext" and "two.ext",
# the "concrete output" for rule input "two.ext" would be "two.cc". If
# a rule specifies multiple outputs, each input file that the rule is
# applied to will have the same number of concrete outputs.
#
# If any concrete outputs are outdated or missing relative to their
# corresponding rule_source or to any specified additional input, the
# rule action must be performed to generate the concrete outputs.
# concrete_outputs_by_rule_source will have an item at the same index
# as the rule['rule_sources'] that it corresponds to. Each item is a
# list of all of the concrete outputs for the rule_source.
concrete_outputs_by_rule_source = []
# concrete_outputs_all is a flat list of all concrete outputs that this
# rule is able to produce, given the known set of input files
# (rule_sources) that apply to it.
concrete_outputs_all = []
# messages & actions are keyed by the same indices as rule['rule_sources']
# and concrete_outputs_by_rule_source. They contain the message and
# action to perform after resolving input-dependent variables. The
# message is optional, in which case None is stored for each rule source.
messages = []
actions = []
for rule_source in rule.get('rule_sources', []):
rule_source_dirname, rule_source_basename = \
posixpath.split(rule_source)
(rule_source_root, rule_source_ext) = \
posixpath.splitext(rule_source_basename)
# These are the same variable names that Xcode uses for its own native
# rule support. Because Xcode's rule engine is not being used, they
# need to be expanded as they are written to the makefile.
rule_input_dict = {
'INPUT_FILE_BASE': rule_source_root,
'INPUT_FILE_SUFFIX': rule_source_ext,
'INPUT_FILE_NAME': rule_source_basename,
'INPUT_FILE_PATH': rule_source,
'INPUT_FILE_DIRNAME': rule_source_dirname,
}
concrete_outputs_for_this_rule_source = []
for output in rule.get('outputs', []):
# Fortunately, Xcode and make both use $(VAR) format for their
# variables, so the expansion is the only transformation necessary.
# Any remaning $(VAR)-type variables in the string can be given
# directly to make, which will pick up the correct settings from
# what Xcode puts into the environment.
concrete_output = ExpandXcodeVariables(output, rule_input_dict)
concrete_outputs_for_this_rule_source.append(concrete_output)
# Add all concrete outputs to the project.
pbxp.AddOrGetFileInRootGroup(concrete_output)
concrete_outputs_by_rule_source.append( \
concrete_outputs_for_this_rule_source)
concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
# TODO(mark): Should verify that at most one of these is specified.
if int(rule.get('process_outputs_as_sources', False)):
for output in concrete_outputs_for_this_rule_source:
AddSourceToTarget(output, type, pbxp, xct)
# If the file came from the mac_bundle_resources list or if the rule
# is marked to process outputs as bundle resource, do so.
was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
for output in concrete_outputs_for_this_rule_source:
AddResourceToTarget(output, pbxp, xct)
# Do we have a message to print when this rule runs?
message = rule.get('message')
if message:
message = gyp.common.EncodePOSIXShellArgument(message)
message = ExpandXcodeVariables(message, rule_input_dict)
messages.append(message)
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(rule['action'])
action = ExpandXcodeVariables(action_string, rule_input_dict)
actions.append(action)
if len(concrete_outputs_all) > 0:
# TODO(mark): There's a possibilty for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = '%s.make' % re.sub(
'[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
makefile_path = os.path.join(xcode_projects[build_file].path,
makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only
# if it's got changes?
makefile = open(makefile_path, 'wb')
# make will build the first target in the makefile by default. By
# convention, it's called "all". List all (or at least one)
# concrete output for each rule source as a prerequisite of the "all"
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
xrange(0, len(concrete_outputs_by_rule_source)):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
# Otherwise, "all" could just contain the entire list of
# concrete_outputs_all.
concrete_output = \
concrete_outputs_by_rule_source[concrete_output_index][0]
if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (concrete_output, eol))
for (rule_source, concrete_outputs, message, action) in \
zip(rule['rule_sources'], concrete_outputs_by_rule_source,
messages, actions):
makefile.write('\n')
# Add a rule that declares it can build each concrete output of a
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
for concrete_output_index in xrange(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
if concrete_output_index == 0:
bol = ''
else:
bol = ' '
makefile.write('%s%s \\\n' % (bol, concrete_output))
concrete_output_dir = posixpath.dirname(concrete_output)
if (concrete_output_dir and
concrete_output_dir not in concrete_output_dirs):
concrete_output_dirs.append(concrete_output_dir)
makefile.write(' : \\\n')
# The prerequisites for this rule are the rule source itself and
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
for prerequisite_index in xrange(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
if prerequisite_index == len(prerequisites) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (prerequisite, eol))
# Make sure that output directories exist before executing the rule
# action.
if len(concrete_output_dirs) > 0:
makefile.write('\t@mkdir -p "%s"\n' %
'" "'.join(concrete_output_dirs))
# The rule message and action have already had the necessary variable
# substitutions performed.
if message:
# Mark it with note: so Xcode picks it up in build output.
makefile.write('\t@echo note: %s\n' % message)
makefile.write('\t%s\n' % action)
makefile.close()
# It might be nice to ensure that needed output directories exist
# here rather than in each target in the Makefile, but that wouldn't
# work if there ever was a concrete output that had an input-dependent
# variable anywhere other than in the leaf position.
# Don't declare any inputPaths or outputPaths. If they're present,
# Xcode will provide a slight optimization by only running the script
# phase if any output is missing or outdated relative to any input.
# Unfortunately, it will also assume that all outputs are touched by
# the script, and if the outputs serve as files in a compilation
# phase, they will be unconditionally rebuilt. Since make might not
# rebuild everything that could be declared here as an output, this
# extra compilation activity is unnecessary. With inputPaths and
# outputPaths not supplied, make will always be called, but it knows
# enough to not do anything when everything is up-to-date.
# To help speed things up, pass -j COUNT to make so it does some work
# in parallel. Don't use ncpus because Xcode will build ncpus targets
# in parallel and if each target happens to have a rules step, there
# would be ncpus^2 things going. With a machine that has 2 quad-core
# Xeons, a build can quickly run out of processes based on
# scheduling/other tasks, and randomly failing builds are no good.
script = \
"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'name': 'Rule "' + rule['rule_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# Extra rule inputs also go into the project file. Concrete outputs were
# already added when they were computed.
groups = ['inputs', 'inputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for group in groups:
for item in rule.get(group, []):
pbxp.AddOrGetFileInRootGroup(item)
# Add "sources".
for source in spec.get('sources', []):
(source_root, source_extension) = posixpath.splitext(source)
if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not
# already there.
AddSourceToTarget(source, type, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(source)
# Add "mac_bundle_resources" and "mac_framework_private_headers" if
# it's a bundle of any type.
if is_bundle:
for resource in tgt_mac_bundle_resources:
(resource_root, resource_extension) = posixpath.splitext(resource)
if resource_extension[1:] not in rules_by_ext:
AddResourceToTarget(resource, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(resource)
for header in spec.get('mac_framework_private_headers', []):
AddHeaderToTarget(header, pbxp, xct, False)
# Add "mac_framework_headers". These can be valid for both frameworks
# and static libraries.
if is_bundle or type == 'static_library':
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
# Add "copies".
pbxcp_dict = {}
for copy_group in spec.get('copies', []):
dest = copy_group['destination']
if dest[0] not in ('/', '$'):
# Relative paths are relative to $(SRCROOT).
dest = '$(SRCROOT)/' + dest
code_sign = int(copy_group.get('xcode_code_sign', 0))
settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
# Coalesce multiple "copies" sections in the same target with the same
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
# they'll wind up with ID collisions.
pbxcp = pbxcp_dict.get(dest, None)
if pbxcp is None:
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
'name': 'Copy to ' + copy_group['destination']
},
parent=xct)
pbxcp.SetDestination(dest)
# TODO(mark): The usual comment about this knowing too much about
# gyp.xcodeproj_file internals applies.
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
pbxcp_dict[dest] = pbxcp
for file in copy_group['files']:
pbxcp.AddFile(file, settings)
# Excluded files can also go into the project file.
if not skip_excluded_files:
for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
'mac_framework_private_headers']:
excluded_key = key + '_excluded'
for item in spec.get(excluded_key, []):
pbxp.AddOrGetFileInRootGroup(item)
# So can "inputs" and "outputs" sections of "actions" groups.
groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for action in spec.get('actions', []):
for group in groups:
for item in action.get(group, []):
# Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
# sources.
if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
pbxp.AddOrGetFileInRootGroup(item)
for postbuild in spec.get('postbuilds', []):
action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
script = 'exec ' + action_string_sh + '\nexit 1\n'
# Make the postbuild step depend on the output of ld or ar from this
# target. Apparently putting the script step after the link step isn't
# sufficient to ensure proper ordering in all cases. With an input
# declared but no outputs, the script step should run every time, as
# desired.
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
xct.AppendProperty('buildPhases', ssbp)
# Add dependencies before libraries, because adding a dependency may imply
# adding a library. It's preferable to keep dependencies listed first
# during a link phase so that they can override symbols that would
# otherwise be provided by libraries, which will usually include system
# libraries. On some systems, ld is finicky and even requires the
# libraries to be ordered in such a way that unresolved symbols in
# earlier-listed libraries may only be resolved by later-listed libraries.
# The Mac linker doesn't work that way, but other platforms do, and so
# their linker invocations need to be constructed in this way. There's
# no compelling reason for Xcode's linker invocations to differ.
if 'dependencies' in spec:
for dependency in spec['dependencies']:
xct.AddDependency(xcode_targets[dependency])
# The support project also gets the dependencies (in case they are
# needed for the actions/rules to work).
if support_xct:
support_xct.AddDependency(xcode_targets[dependency])
if 'libraries' in spec:
for library in spec['libraries']:
xct.FrameworksPhase().AddFile(library)
# Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
# I wish Xcode handled this automatically.
library_dir = posixpath.dirname(library)
if library_dir not in xcode_standard_library_dirs and (
not xct.HasBuildSetting(_library_search_paths_var) or
library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
xct.AppendBuildSetting(_library_search_paths_var, library_dir)
for configuration_name in configuration_names:
configuration = spec['configurations'][configuration_name]
xcbc = xct.ConfigurationNamed(configuration_name)
for include_dir in configuration.get('mac_framework_dirs', []):
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
for library_dir in configuration.get('library_dirs', []):
if library_dir not in xcode_standard_library_dirs and (
not xcbc.HasBuildSetting(_library_search_paths_var) or
library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
if 'defines' in configuration:
for define in configuration['defines']:
set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():
xcbc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in configuration:
config_ref = pbxp.AddOrGetFileInRootGroup(
configuration['xcode_config_file'])
xcbc.SetBaseConfiguration(config_ref)
build_files = []
for build_file, build_file_dict in data.iteritems():
if build_file.endswith('.gyp'):
build_files.append(build_file)
for build_file in build_files:
xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
for build_file in build_files:
xcode_projects[build_file].Finalize2(xcode_targets,
xcode_target_to_target_dict)
for build_file in build_files:
xcode_projects[build_file].Write()
| mit |
lexus24/w16b_test | static/Brython3.1.3-20150514-095342/Lib/socket.py | 730 | 14913 | # Wrapper module for _socket, providing some additional facilities
# implemented in Python.
"""\
This module provides socket operations and some related functions.
On Unix, it supports IP (Internet Protocol) and Unix domain sockets.
On other systems, it only supports IP. Functions specific for a
socket are available as methods of the socket object.
Functions:
socket() -- create a new socket object
socketpair() -- create a pair of new socket objects [*]
fromfd() -- create a socket object from an open file descriptor [*]
fromshare() -- create a socket object from data received from socket.share() [*]
gethostname() -- return the current hostname
gethostbyname() -- map a hostname to its IP number
gethostbyaddr() -- map an IP number or hostname to DNS info
getservbyname() -- map a service name and a protocol name to a port number
getprotobyname() -- map a protocol name (e.g. 'tcp') to a number
ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order
htons(), htonl() -- convert 16, 32 bit int from host to network byte order
inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format
inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)
socket.getdefaulttimeout() -- get the default timeout value
socket.setdefaulttimeout() -- set the default timeout value
create_connection() -- connects to an address, with an optional timeout and
optional source address.
[*] not available on all platforms!
Special objects:
SocketType -- type object for socket objects
error -- exception raised for I/O errors
has_ipv6 -- boolean value indicating if IPv6 is supported
Integer constants:
AF_INET, AF_UNIX -- socket domains (first argument to socket() call)
SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)
Many other constants may be defined; these may be used in calls to
the setsockopt() and getsockopt() methods.
"""
import _socket
from _socket import *
import os, sys, io
try:
import errno
except ImportError:
errno = None
EBADF = getattr(errno, 'EBADF', 9)
EAGAIN = getattr(errno, 'EAGAIN', 11)
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
__all__ = ["getfqdn", "create_connection"]
__all__.extend(os._get_exports_list(_socket))
_realsocket = socket
# WSA error codes
if sys.platform.lower().startswith("win"):
errorTab = {}
errorTab[10004] = "The operation was interrupted."
errorTab[10009] = "A bad file handle was passed."
errorTab[10013] = "Permission denied."
errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
errorTab[10022] = "An invalid operation was attempted."
errorTab[10035] = "The socket operation would block"
errorTab[10036] = "A blocking operation is already in progress."
errorTab[10048] = "The network address is in use."
errorTab[10054] = "The connection has been reset."
errorTab[10058] = "The network has been shut down."
errorTab[10060] = "The operation timed out."
errorTab[10061] = "Connection refused."
errorTab[10063] = "The name is too long."
errorTab[10064] = "The host is down."
errorTab[10065] = "The host is unreachable."
__all__.append("errorTab")
class socket(_socket.socket):
"""A subclass of _socket.socket adding the makefile() method."""
__slots__ = ["__weakref__", "_io_refs", "_closed"]
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None):
_socket.socket.__init__(self, family, type, proto, fileno)
self._io_refs = 0
self._closed = False
def __enter__(self):
return self
def __exit__(self, *args):
if not self._closed:
self.close()
def __repr__(self):
"""Wrap __repr__() to reveal the real class name."""
s = _socket.socket.__repr__(self)
if s.startswith("<socket object"):
s = "<%s.%s%s%s" % (self.__class__.__module__,
self.__class__.__name__,
getattr(self, '_closed', False) and " [closed] " or "",
s[7:])
return s
def __getstate__(self):
raise TypeError("Cannot serialize socket object")
def dup(self):
"""dup() -> socket object
Return a new socket object connected to the same system resource.
"""
fd = dup(self.fileno())
sock = self.__class__(self.family, self.type, self.proto, fileno=fd)
sock.settimeout(self.gettimeout())
return sock
def accept(self):
"""accept() -> (socket object, address info)
Wait for an incoming connection. Return a new socket
representing the connection, and the address of the client.
For IP sockets, the address info is a pair (hostaddr, port).
"""
fd, addr = self._accept()
sock = socket(self.family, self.type, self.proto, fileno=fd)
# Issue #7995: if no default timeout is set and the listening
# socket had a (non-zero) timeout, force the new socket in blocking
# mode to override platform-specific socket flags inheritance.
if getdefaulttimeout() is None and self.gettimeout():
sock.setblocking(True)
return sock, addr
def makefile(self, mode="r", buffering=None, *,
encoding=None, errors=None, newline=None):
"""makefile(...) -> an I/O stream connected to the socket
The arguments are as for io.open() after the filename,
except the only mode characters supported are 'r', 'w' and 'b'.
The semantics are similar too. (XXX refactor to share code?)
"""
for c in mode:
if c not in {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)")
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
def _decref_socketios(self):
if self._io_refs > 0:
self._io_refs -= 1
if self._closed:
self.close()
def _real_close(self, _ss=_socket.socket):
# This function should not reference any globals. See issue #808164.
_ss.close(self)
def close(self):
# This function should not reference any globals. See issue #808164.
self._closed = True
if self._io_refs <= 0:
self._real_close()
def detach(self):
"""detach() -> file descriptor
Close the socket object without closing the underlying file descriptor.
The object cannot be used after this call, but the file descriptor
can be reused for other purposes. The file descriptor is returned.
"""
self._closed = True
return super().detach()
def fromfd(fd, family, type, proto=0):
""" fromfd(fd, family, type[, proto]) -> socket object
Create a socket object from a duplicate of the given file
descriptor. The remaining arguments are the same as for socket().
"""
nfd = dup(fd)
return socket(family, type, proto, nfd)
if hasattr(_socket.socket, "share"):
def fromshare(info):
""" fromshare(info) -> socket object
Create a socket object from a the bytes object returned by
socket.share(pid).
"""
return socket(0, 0, 0, info)
if hasattr(_socket, "socketpair"):
def socketpair(family=None, type=SOCK_STREAM, proto=0):
"""socketpair([family[, type[, proto]]]) -> (socket object, socket object)
Create a pair of socket objects from the sockets returned by the platform
socketpair() function.
The arguments are the same as for socket() except the default family is
AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
"""
if family is None:
try:
family = AF_UNIX
except NameError:
family = AF_INET
a, b = _socket.socketpair(family, type, proto)
a = socket(family, type, proto, a.detach())
b = socket(family, type, proto, b.detach())
return a, b
_blocking_errnos = { EAGAIN, EWOULDBLOCK }
class SocketIO(io.RawIOBase):
"""Raw I/O implementation for stream sockets.
This class supports the makefile() method on sockets. It provides
the raw I/O interface on top of a socket object.
"""
# One might wonder why not let FileIO do the job instead. There are two
# main reasons why FileIO is not adapted:
# - it wouldn't work under Windows (where you can't used read() and
# write() on a socket handle)
# - it wouldn't work with socket timeouts (FileIO would ignore the
# timeout and consider the socket non-blocking)
# XXX More docs
def __init__(self, sock, mode):
if mode not in ("r", "w", "rw", "rb", "wb", "rwb"):
raise ValueError("invalid mode: %r" % mode)
io.RawIOBase.__init__(self)
self._sock = sock
if "b" not in mode:
mode += "b"
self._mode = mode
self._reading = "r" in mode
self._writing = "w" in mode
self._timeout_occurred = False
def readinto(self, b):
"""Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
"""
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
raise IOError("cannot read from timed out object")
while True:
try:
return self._sock.recv_into(b)
except timeout:
self._timeout_occurred = True
raise
except InterruptedError:
continue
except error as e:
if e.args[0] in _blocking_errnos:
return None
raise
def write(self, b):
"""Write the given bytes or bytearray object *b* to the socket
and return the number of bytes written. This can be less than
len(b) if not all data could be written. If the socket is
non-blocking and no bytes could be written None is returned.
"""
self._checkClosed()
self._checkWritable()
try:
return self._sock.send(b)
except error as e:
# XXX what about EINTR?
if e.args[0] in _blocking_errnos:
return None
raise
def readable(self):
"""True if the SocketIO is open for reading.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._reading
def writable(self):
"""True if the SocketIO is open for writing.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._writing
def seekable(self):
"""True if the SocketIO is open for seeking.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return super().seekable()
def fileno(self):
"""Return the file descriptor of the underlying socket.
"""
self._checkClosed()
return self._sock.fileno()
@property
def name(self):
if not self.closed:
return self.fileno()
else:
return -1
@property
def mode(self):
return self._mode
def close(self):
"""Close the SocketIO object. This doesn't close the underlying
socket, except if all references to it have disappeared.
"""
if self.closed:
return
io.RawIOBase.close(self)
self._sock._decref_socketios()
self._sock = None
def getfqdn(name=''):
"""Get fully qualified domain name from name.
An empty argument is interpreted as meaning the local host.
First the hostname returned by gethostbyaddr() is checked, then
possibly existing aliases. In case no FQDN is available, hostname
from gethostname() is returned.
"""
name = name.strip()
if not name or name == '0.0.0.0':
name = gethostname()
try:
hostname, aliases, ipaddrs = gethostbyaddr(name)
except error:
pass
else:
aliases.insert(0, hostname)
for name in aliases:
if '.' in name:
break
else:
name = hostname
return name
_GLOBAL_DEFAULT_TIMEOUT = object()
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise error("getaddrinfo returns an empty list")
| agpl-3.0 |
ng110/pyqtgraph | examples/ScatterPlotSpeedTestTemplate_pyside.py | 28 | 1963 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './examples/ScatterPlotSpeedTestTemplate.ui'
#
# Created: Fri Sep 21 15:39:09 2012
# by: pyside-uic 0.2.13 running on PySide 1.1.0
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(400, 300)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setObjectName("gridLayout")
self.sizeSpin = QtGui.QSpinBox(Form)
self.sizeSpin.setProperty("value", 10)
self.sizeSpin.setObjectName("sizeSpin")
self.gridLayout.addWidget(self.sizeSpin, 1, 1, 1, 1)
self.pixelModeCheck = QtGui.QCheckBox(Form)
self.pixelModeCheck.setObjectName("pixelModeCheck")
self.gridLayout.addWidget(self.pixelModeCheck, 1, 3, 1, 1)
self.label = QtGui.QLabel(Form)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 1, 0, 1, 1)
self.plot = PlotWidget(Form)
self.plot.setObjectName("plot")
self.gridLayout.addWidget(self.plot, 0, 0, 1, 4)
self.randCheck = QtGui.QCheckBox(Form)
self.randCheck.setObjectName("randCheck")
self.gridLayout.addWidget(self.randCheck, 1, 2, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.pixelModeCheck.setText(QtGui.QApplication.translate("Form", "pixel mode", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "Size", None, QtGui.QApplication.UnicodeUTF8))
self.randCheck.setText(QtGui.QApplication.translate("Form", "Randomize", None, QtGui.QApplication.UnicodeUTF8))
from pyqtgraph import PlotWidget
| mit |
ulope/django | django/contrib/gis/geoip/libgeoip.py | 106 | 1094 | import os
from ctypes import CDLL
from ctypes.util import find_library
from django.conf import settings
# Creating the settings dictionary with any settings, if needed.
GEOIP_SETTINGS = dict((key, getattr(settings, key))
for key in ('GEOIP_PATH', 'GEOIP_LIBRARY_PATH', 'GEOIP_COUNTRY', 'GEOIP_CITY')
if hasattr(settings, key))
lib_path = GEOIP_SETTINGS.get('GEOIP_LIBRARY_PATH', None)
# The shared library for the GeoIP C API. May be downloaded
# from http://www.maxmind.com/download/geoip/api/c/
if lib_path:
lib_name = None
else:
# TODO: Is this really the library name for Windows?
lib_name = 'GeoIP'
# Getting the path to the GeoIP library.
if lib_name:
lib_path = find_library(lib_name)
if lib_path is None:
raise RuntimeError('Could not find the GeoIP library (tried "%s"). '
'Try setting GEOIP_LIBRARY_PATH in your settings.' % lib_name)
lgeoip = CDLL(lib_path)
# Getting the C `free` for the platform.
if os.name == 'nt':
libc = CDLL('msvcrt')
else:
libc = CDLL(None)
free = libc.free
| bsd-3-clause |
ThunderGemios10/The-Super-Duper-Script-Editor-2 | script_map.py | 1 | 109334 | ################################################################################
### Copyright © 2012-2013 BlackDragonHunt
###
### This file is part of the Super Duper Script Editor.
###
### The Super Duper Script Editor is free software: you can redistribute it
### and/or modify it under the terms of the GNU General Public License as
### published by the Free Software Foundation, either version 3 of the License,
### or (at your option) any later version.
###
### The Super Duper Script Editor is distributed in the hope that it will be
### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with the Super Duper Script Editor.
### If not, see <http://www.gnu.org/licenses/>.
################################################################################
EVERYDAY_LIFE = "(n)everyday life"
INVESTIGATION = "neveryday life"
CLASS_TRIAL = "Class Trial"
POST_TRIAL = "Post-Trial"
GENERIC_TEXT = "Generic Text"
SCRIPT_MAP = [
(
"Main Script",
[
(
"Prologue",
[
"e00_001_180.lin",
"e00_001_001.lin",
"e00_002_001.lin",
"e00_003_019.lin",
"e00_003_001.lin",
"e00_003_002.lin",
"e00_003_003.lin",
"e00_003_004.lin",
"e00_003_100.lin",
"e00_003_101.lin",
"e00_003_102.lin",
"e00_003_142.lin",
"e00_999_000.lin",
"e00_999_023.lin",
"e00_005_001.lin",
"e00_006_001.lin",
"e00_006_100.lin",
"e00_006_142.lin",
"e00_007_142.lin",
"e00_007_143.lin",
(
GENERIC_TEXT,
[
"e00_000_001.lin",
"e00_000_002.lin",
"e00_000_003.lin",
"e00_000_004.lin",
"e00_000_100.lin",
"e00_000_101.lin",
"e00_000_102.lin",
"e00_000_142.lin",
"e00_000_143.lin",
"e00_000_144.lin",
"e00_000_145.lin",
"e00_000_180.lin",
"e00_999_001.lin",
"e00_999_002.lin",
"e00_999_003.lin",
"e00_999_004.lin",
"e00_999_008.lin",
"e00_999_010.lin",
"e00_999_011.lin",
"e00_999_013.lin",
"e00_999_014.lin",
"e00_999_015.lin",
"e00_999_016.lin",
"e00_999_017.lin",
"e00_999_018.lin",
"e00_999_019.lin",
"e00_999_020.lin",
"e00_999_021.lin",
"e00_999_022.lin",
"e00_999_024.lin",
"script_pak_e00_001.lin",
"script_pak_e00_002.lin",
"script_pak_e00_003.lin",
"script_pak_e00_004.lin",
"script_pak_e00_006.lin",
"script_pak_e00_007.lin",
"script_pak_e00_008.lin",
"script_pak_e00_011.lin",
"script_pak_e00_012.lin",
"script_pak_e00_013.lin",
"script_pak_e00_014.lin",
"script_pak_e00_015.lin",
"script_pak_e00_016.lin",
"script_pak_e00_017.lin",
"script_pak_e00_018.lin",
"script_pak_e00_019.lin",
"script_pak_e00_020.lin",
"script_pak_e00_021.lin",
"script_pak_e00_022.lin",
"script_pak_e00_023.lin",
"script_pak_e00_024.lin",
"script_pak_e00_025.lin",
"script_pak_e00_026.lin",
"script_pak_e00_027.lin",
"script_pak_e00_028.lin",
"script_pak_e00_029.lin",
"script_pak_e00_030.lin",
"script_pak_e00_031.lin",
"script_pak_e00_032.lin",
"script_pak_e00_033.lin",
"script_pak_e00_034.lin",
"script_pak_e00_035.lin",
]
),
],
),
(
"Chapter 1",
[
(
GENERIC_TEXT,
[
"e01_000_001.lin",
"e01_000_002.lin",
"e01_000_003.lin",
"e01_000_004.lin",
"e01_000_005.lin",
"e01_000_006.lin",
"e01_000_007.lin",
"e01_000_008.lin",
"e01_000_009.lin",
"e01_000_011.lin",
"e01_000_100.lin",
"e01_000_101.lin",
"e01_000_102.lin",
"e01_000_104.lin",
"e01_000_143.lin",
]
),
(
EVERYDAY_LIFE,
[
"e01_001_100.lin",
"e01_001_101.lin",
"e01_001_102.lin",
"e01_001_104.lin",
"e08_100_000.lin",
"e01_002_104.lin",
"e01_002_100.lin",
"e01_002_101.lin",
"e01_003_102.lin",
"e01_004_102.lin",
"e01_004_101.lin",
"e01_004_100.lin",
"e01_005_143.lin",
"e01_006_104.lin",
"e01_006_001.lin",
(
"Free Time 00",
[
"e08_001_000.lin",
"e08_002_000.lin",
"e08_003_000.lin",
"e08_004_000.lin",
"e08_005_000.lin",
"e08_006_000.lin",
"e08_007_000.lin",
"e08_008_000.lin",
"e08_009_000.lin",
"e08_010_000.lin",
"e08_011_000.lin",
"e08_012_000.lin",
"e08_013_000.lin",
"e08_014_000.lin",
"e08_015_000.lin",
]
),
(
"Free Time 01",
[
"e08_001_001.lin",
"e08_002_001.lin",
"e08_003_001.lin",
"e08_004_001.lin",
"e08_005_001.lin",
"e08_006_001.lin",
"e08_008_001.lin",
"e08_009_001.lin",
"e08_010_001.lin",
"e08_011_001.lin",
"e08_012_001.lin",
"e08_013_001.lin",
"e08_014_001.lin",
"e08_015_001.lin",
]
),
"e01_007_104.lin",
(
"Free Time 02",
[
"e08_001_002.lin",
"e08_002_002.lin",
"e08_003_002.lin",
"e08_004_002.lin",
"e08_005_002.lin",
"e08_006_002.lin",
"e08_008_002.lin",
"e08_009_002.lin",
"e08_010_002.lin",
"e08_011_002.lin",
"e08_012_002.lin",
"e08_013_002.lin",
"e08_014_002.lin",
"e08_015_002.lin",
]
),
(
GENERIC_TEXT,
[
"e01_007_109.lin",
"e01_007_116.lin",
"e01_007_118.lin",
"e01_007_119.lin",
]
),
"e01_008_104.lin",
"e01_008_140.lin",
"e01_009_143.lin",
"e01_009_104.lin",
"e08_100_001.lin",
"e01_010_104.lin",
"e01_010_100.lin",
"e01_010_101.lin",
"e01_011_102.lin",
"e01_012_104.lin",
"e01_012_102.lin",
(
"Free Time 03",
[
"e08_002_003.lin",
"e08_003_003.lin",
"e08_004_003.lin",
"e08_005_003.lin",
"e08_006_003.lin",
"e08_008_003.lin",
"e08_009_003.lin",
"e08_010_003.lin",
"e08_011_003.lin",
"e08_012_003.lin",
"e08_013_003.lin",
"e08_014_003.lin",
"e08_015_003.lin",
]
),
(
GENERIC_TEXT,
[
"e01_012_106.lin",
"e01_012_108.lin",
"e01_012_114.lin",
"e01_012_115.lin",
]
),
"e01_013_104.lin",
(
"Free Time 04",
[
"e08_002_004.lin",
"e08_003_004.lin",
"e08_004_004.lin",
"e08_005_004.lin",
"e08_006_004.lin",
"e08_008_004.lin",
"e08_009_004.lin",
"e08_010_004.lin",
"e08_011_004.lin",
"e08_012_004.lin",
"e08_013_004.lin",
"e08_014_004.lin",
"e08_015_004.lin",
]
),
(
GENERIC_TEXT,
[
"e01_013_107.lin",
"e01_013_110.lin",
"e01_013_113.lin",
]
),
"e01_014_104.lin",
"e01_014_100.lin",
"e01_015_005.lin",
"e01_015_008.lin",
"e01_015_006.lin",
"e01_016_005.lin",
"e01_016_008.lin",
"e01_016_006.lin",
"e01_017_005.lin",
"e01_017_006.lin",
"e01_017_008.lin",
"e01_017_011.lin",
"e01_018_100.lin",
"e01_018_006.lin",
]
),
(
INVESTIGATION,
[
"e01_100_007.lin",
"e01_100_005.lin",
"e01_100_008.lin",
"e01_100_009.lin",
"e01_100_011.lin",
"e01_100_100.lin",
"e01_100_108.lin",
"e01_100_001.lin",
"e01_100_003.lin",
(
GENERIC_TEXT,
[
"e01_100_002.lin",
"e01_100_004.lin",
"e01_100_019.lin",
"e01_100_101.lin",
"e01_100_102.lin",
"e01_100_143.lin",
]
),
"e01_101_140.lin",
"e01_102_144.lin",
"e01_102_200.lin",
]
),
(
CLASS_TRIAL,
[
"e01_201_000.lin",
"e01_250_000.lin",
"nonstop_01_001.dat",
"e01_202_000.lin",
"e01_203_000.lin",
"e01_251_000.lin",
"nonstop_01_002.dat",
"e01_204_000.lin",
"e01_205_000.lin",
"e01_255_000.lin",
"hanron_01_001.dat",
"e01_206_000.lin",
"e01_207_000.lin",
"e01_252_000.lin",
"nonstop_01_003.dat",
"e01_208_000.lin",
"e01_209_000.lin",
"nonstop_01_004.dat",
"e01_210_000.lin",
"e01_211_000.lin",
"nonstop_01_005.dat",
"e01_212_000.lin",
"e01_213_000.lin",
"hanron_01_002.dat",
"e01_214_000.lin",
"e01_215_000.lin",
"e01_253_000.lin",
"nonstop_01_006.dat",
"e01_216_000.lin",
"e01_217_000.lin",
"e01_218_000.lin",
"e01_219_000.lin",
"e01_256_000.lin",
"e01_220_000.lin",
"e01_221_000.lin",
"e01_254_000.lin",
"nonstop_01_007.dat",
"e01_222_000.lin",
"e01_223_000.lin",
"nonstop_01_008.dat",
"e01_224_000.lin",
"e01_225_000.lin",
"hanron_01_003.dat",
"e01_226_000.lin",
"e01_227_000.lin",
"nonstop_01_009.dat",
"e01_228_000.lin",
"e01_229_000.lin",
"e01_230_000.lin",
"e01_231_000.lin",
"e01_232_000.lin",
"e01_257_000.lin",
"dr2_mtb2_s01.pak",
"e01_233_000.lin",
"e01_259_000.lin",
"e01_234_000.lin",
"e01_235_000.lin",
"e01_300_200.lin",
"e01_301_200.lin",
"e01_296_000.lin",
"e01_297_000.lin",
"e01_299_000.lin",
]
),
(
POST_TRIAL,
[
"e01_301_100.lin",
"e01_301_143.lin",
]
),
(
GENERIC_TEXT,
[
"e01_999_001.lin",
"e01_999_002.lin",
"e01_999_008.lin",
"e01_999_010.lin",
"e01_999_013.lin",
"e01_999_014.lin",
"e01_999_015.lin",
"e01_999_016.lin",
"e01_999_017.lin",
"e01_999_018.lin",
"e01_999_019.lin",
"e01_999_020.lin",
"e01_999_021.lin",
"e01_999_022.lin",
"e01_999_023.lin",
"e01_999_024.lin",
"e01_999_025.lin",
"e01_999_026.lin",
"e01_999_027.lin",
"e01_999_028.lin",
"e01_999_031.lin",
"e01_999_035.lin",
"e01_999_036.lin",
"e01_999_037.lin",
"e01_999_038.lin",
"e01_999_039.lin",
"e01_999_041.lin",
"e01_999_042.lin",
"e01_999_046.lin",
"e01_999_048.lin",
"e01_999_049.lin",
"e01_999_050.lin",
"e01_999_051.lin",
"e01_999_052.lin",
"e01_999_053.lin",
"e01_999_054.lin",
"e01_999_055.lin",
"e01_999_056.lin",
"e01_999_057.lin",
"e01_999_058.lin",
"e01_999_059.lin",
"e01_999_060.lin",
"e01_999_061.lin",
"e01_999_062.lin",
"e01_999_063.lin",
"e01_999_064.lin",
"e01_999_065.lin",
"e01_999_066.lin",
"e01_999_067.lin",
"e01_999_068.lin",
"e01_999_070.lin",
"e01_999_072.lin",
"e01_999_073.lin",
"e01_999_074.lin",
"e01_999_077.lin",
"e01_999_078.lin",
"e01_999_079.lin",
"e01_999_081.lin",
"e01_999_082.lin",
"e01_999_083.lin",
"e01_999_084.lin",
"e01_999_085.lin",
"e01_999_086.lin",
"e01_999_088.lin",
"e01_999_099.lin",
"script_pak_e01_001.lin",
"script_pak_e01_002.lin",
"script_pak_e01_003.lin",
"script_pak_e01_004.lin",
"script_pak_e01_005.lin",
"script_pak_e01_006.lin",
"script_pak_e01_007.lin",
"script_pak_e01_008.lin",
"script_pak_e01_009.lin",
"script_pak_e01_010.lin",
"script_pak_e01_012.lin",
"script_pak_e01_013.lin",
"script_pak_e01_014.lin",
"script_pak_e01_015.lin",
"script_pak_e01_017.lin",
"script_pak_e01_018.lin",
"script_pak_e01_019.lin",
"script_pak_e01_020.lin",
"script_pak_e01_021.lin",
"script_pak_e01_022.lin",
"script_pak_e01_023.lin",
"script_pak_e01_024.lin",
"script_pak_e01_025.lin",
"script_pak_e01_026.lin",
"script_pak_e01_027.lin",
"script_pak_e01_028.lin",
"script_pak_e01_029.lin",
"script_pak_e01_030.lin",
"script_pak_e01_031.lin",
"script_pak_e01_032.lin",
"script_pak_e01_033.lin",
"script_pak_e01_034.lin",
"script_pak_e01_035.lin",
"script_pak_e01_036.lin",
"script_pak_e01_037.lin",
"script_pak_e01_038.lin",
"script_pak_e01_039.lin",
"script_pak_e01_040.lin",
"script_pak_e01_041.lin",
"script_pak_e01_042.lin",
"script_pak_e01_043.lin",
"script_pak_e01_044.lin",
"script_pak_e01_045.lin",
"script_pak_e01_046.lin",
"script_pak_e01_047.lin",
"script_pak_e01_048.lin",
"script_pak_e01_049.lin",
"script_pak_e01_050.lin",
"script_pak_e01_051.lin",
"script_pak_e01_052.lin",
"script_pak_e01_053.lin",
"script_pak_e01_054.lin",
"script_pak_e01_055.lin",
"script_pak_e01_056.lin",
"script_pak_e01_057.lin",
"script_pak_e01_058.lin",
"script_pak_e01_059.lin",
"script_pak_e01_060.lin",
"script_pak_e01_061.lin",
"script_pak_e01_062.lin",
"script_pak_e01_063.lin",
"script_pak_e01_064.lin",
"script_pak_e01_065.lin",
"script_pak_e01_066.lin",
"script_pak_e01_067.lin",
"script_pak_e01_068.lin",
"script_pak_e01_069.lin",
"script_pak_e01_070.lin",
"script_pak_e01_071.lin",
"script_pak_e01_072.lin",
"script_pak_e01_073.lin",
"script_pak_e01_074.lin",
"script_pak_e01_075.lin",
"script_pak_e01_076.lin",
"script_pak_e01_077.lin",
"script_pak_e01_078.lin",
"script_pak_e01_079.lin",
"script_pak_e01_080.lin",
"script_pak_e01_082.lin",
"script_pak_e01_087.lin",
]
),
]
),
(
"Chapter 2",
[
(
GENERIC_TEXT,
[
"e02_000_001.lin",
"e02_000_002.lin",
"e02_000_003.lin",
"e02_000_004.lin",
"e02_000_005.lin",
"e02_000_006.lin",
"e02_000_008.lin",
"e02_000_009.lin",
"e02_000_011.lin",
"e02_000_019.lin",
"e02_000_021.lin",
"e02_000_022.lin",
"e02_000_023.lin",
"e02_000_024.lin",
"e02_000_026.lin",
"e02_000_028.lin",
"e02_000_030.lin",
"e02_000_100.lin",
"e02_000_101.lin",
"e02_000_102.lin",
"e02_000_104.lin",
"e02_000_143.lin",
"e02_000_144.lin",
"e02_000_145.lin",
]
),
(
EVERYDAY_LIFE,
[
"e02_001_104.lin",
"e02_001_102.lin",
"e02_001_141.lin",
"e02_002_020.lin",
"e02_002_021.lin",
"e02_002_022.lin",
"e02_002_023.lin",
"e02_002_024.lin",
"e02_002_025.lin",
"e02_002_026.lin",
"e02_002_030.lin",
"e02_999_007.lin",
"e02_003_030.lin",
"e02_003_104.lin",
"e08_100_002.lin",
"e02_004_104.lin",
"e02_004_100.lin",
"e02_004_101.lin",
"e02_005_102.lin",
"e02_006_104.lin",
(
"Free Time 05",
[
"e08_003_005.lin",
"e08_004_005.lin",
"e08_006_005.lin",
"e08_008_005.lin",
"e08_009_005.lin",
"e08_010_005.lin",
"e08_011_005.lin",
"e08_012_005.lin",
"e08_013_006.lin",
"e08_014_005.lin",
"e08_015_005.lin",
]
),
"e02_007_104.lin",
(
"Free Time 06",
[
"e08_003_006.lin",
"e08_004_006.lin",
"e08_006_006.lin",
"e08_008_006.lin",
"e08_009_006.lin",
"e08_010_006.lin",
"e08_011_006.lin",
"e08_012_006.lin",
"e08_013_005.lin",
"e08_014_006.lin",
"e08_015_006.lin",
]
),
"e02_008_104.lin",
"e02_008_100.lin",
"e02_009_143.lin",
"e02_009_104.lin",
"e08_100_003.lin",
"e02_010_104.lin",
"e02_010_100.lin",
"e02_010_101.lin",
"e02_010_102.lin",
"e02_010_005.lin",
"e02_011_006.lin",
"e02_012_104.lin",
(
GENERIC_TEXT,
[
"e02_012_107.lin",
"e02_012_110.lin",
"e02_012_118.lin",
"e02_013_114.lin",
]
),
(
"Free Time 07",
[
"e08_003_007.lin",
"e08_004_007.lin",
"e08_006_007.lin",
"e08_008_022.lin",
"e08_009_007.lin",
"e08_010_007.lin",
"e08_011_007.lin",
"e08_012_007.lin",
"e08_013_007.lin",
"e08_014_007.lin",
"e08_015_007.lin",
]
),
"e02_013_104.lin",
(
"Free Time 08",
[
"e08_003_008.lin",
"e08_004_008.lin",
"e08_006_008.lin",
"e08_008_007.lin",
"e08_009_008.lin",
"e08_010_008.lin",
"e08_011_008.lin",
"e08_012_008.lin",
"e08_013_008.lin",
"e08_014_008.lin",
"e08_015_008.lin",
]
),
"e02_014_104.lin",
"e02_014_100.lin",
"e02_014_101.lin",
"e02_014_102.lin",
"e02_014_019.lin",
"e02_015_143.lin",
(
"Twilight Syndrome",
[
"e02_150_000.lin",
"e02_150_252.lin",
"e02_150_250.lin",
"e02_150_251.lin",
"e02_151_252.lin",
]
),
"e02_017_143.lin",
"e08_100_004.lin",
"e02_018_104.lin",
"e02_018_100.lin",
"e02_018_101.lin",
"e02_019_102.lin",
"e02_020_104.lin",
(
GENERIC_TEXT,
[
"e02_020_112.lin",
"e02_020_113.lin",
"e02_020_114.lin",
]
),
(
"Free Time 09",
[
"e08_003_009.lin",
"e08_006_009.lin",
"e08_008_008.lin",
"e08_009_009.lin",
"e08_010_009.lin",
]
),
"e02_021_104.lin",
"e02_021_003.lin",
"e02_021_019.lin",
"e02_021_141.lin",
"e02_021_020.lin",
"e02_021_022.lin",
"e02_022_023.lin",
"e02_022_022.lin",
"e02_023_025.lin",
]
),
(
INVESTIGATION,
[
"e02_100_026.lin",
"e02_100_028.lin",
"e02_100_029.lin",
"e02_999_013.lin",
"e02_101_143.lin",
"e02_102_143.lin",
(
"Twilight Syndrome",
[
"e02_160_000.lin",
"e02_160_252.lin",
"e02_160_251.lin",
"e02_160_253.lin",
"e02_160_250.lin",
"e02_161_254.lin",
"e02_162_255.lin",
"e02_163_256.lin",
"e02_162_000.lin",
"e02_163_000.lin",
"e02_170_000.lin",
"e02_171_000.lin",
"e02_170_256.lin",
"e02_171_256.lin",
]
),
"e02_103_143.lin",
"e02_104_141.lin",
"e02_104_143.lin",
"e02_104_019.lin",
"e02_105_002.lin",
"e02_999_015.lin",
"e02_105_019.lin",
"e02_105_100.lin",
"e02_106_116.lin",
"e02_106_100.lin",
(
GENERIC_TEXT,
[
"e02_105_101.lin",
"e02_105_102.lin",
"e02_106_002.lin",
"e02_106_021.lin",
"e02_106_022.lin",
"e02_106_143.lin",
]
),
"e02_106_025.lin",
"e02_107_026.lin",
"e02_107_028.lin",
"e02_107_029.lin",
"e02_999_017.lin",
"e02_109_144.lin",
"e02_109_201.lin",
]
),
(
CLASS_TRIAL,
[
"e02_201_000.lin",
"e02_202_000.lin",
"nonstop_02_001.dat",
"e02_203_000.lin",
"e02_204_000.lin",
"e02_250_000.lin",
"nonstop_02_002.dat",
"e02_205_000.lin",
"e02_206_000.lin",
"nonstop_02_003.dat",
"e02_207_000.lin",
"e02_208_000.lin",
"nonstop_02_004.dat",
"e02_209_000.lin",
"e02_210_000.lin",
"e02_212_000.lin",
"nonstop_02_005.dat",
"e02_213_000.lin",
"e02_214_000.lin",
"nonstop_02_006.dat",
"e02_215_000.lin",
"e02_216_000.lin",
"hanron_02_001.dat",
"e02_217_000.lin",
"e02_218_000.lin",
"e02_219_000.lin",
"e02_220_000.lin",
"e02_221_000.lin",
"e02_252_000.lin",
"ldive_s01.pak",
"e02_222_000.lin",
"e02_251_000.lin",
"nonstop_02_007.dat",
"e02_223_000.lin",
"e02_224_000.lin",
"hanron_02_002.dat",
"e02_225_000.lin",
"e02_226_000.lin",
"e02_227_000.lin",
"e02_228_000.lin",
"e02_229_000.lin",
"e02_253_000.lin",
"dr2_mtb2_s02.pak",
"e02_230_000.lin",
"e02_231_000.lin",
"e02_232_000.lin",
"e02_233_000.lin",
"e02_300_201.lin",
"e02_301_201.lin",
"e02_296_000.lin",
"e02_297_000.lin",
"e02_299_000.lin",
]
),
(
POST_TRIAL,
[
"e02_301_143.lin",
]
),
(
GENERIC_TEXT,
[
"e02_999_001.lin",
"e02_999_002.lin",
"e02_999_003.lin",
"e02_999_004.lin",
"e02_999_005.lin",
"e02_999_006.lin",
"e02_999_008.lin",
"e02_999_009.lin",
"e02_999_010.lin",
"e02_999_011.lin",
"e02_999_012.lin",
"e02_999_014.lin",
"e02_999_016.lin",
"e02_999_018.lin",
"e02_999_019.lin",
"e02_999_020.lin",
"e02_999_021.lin",
"e02_999_022.lin",
"e02_999_023.lin",
"e02_999_099.lin",
"e02_999_100.lin",
"e02_999_101.lin",
"e02_999_102.lin",
"e02_999_103.lin",
"e02_999_106.lin",
"e02_999_108.lin",
"e02_999_110.lin",
"e02_999_111.lin",
"e02_999_112.lin",
"e02_999_113.lin",
"e02_999_114.lin",
"e02_999_115.lin",
"e02_999_116.lin",
"e02_999_117.lin",
"e02_999_118.lin",
"e02_999_119.lin",
"e02_999_120.lin",
"e02_999_121.lin",
"e02_999_122.lin",
"e02_999_123.lin",
"e02_999_124.lin",
"e02_999_125.lin",
"e02_999_126.lin",
"e02_999_127.lin",
"e02_999_128.lin",
"e02_999_129.lin",
"e02_999_130.lin",
"e02_999_131.lin",
"e02_999_132.lin",
"e02_999_133.lin",
"e02_999_134.lin",
"e02_999_135.lin",
"e02_999_136.lin",
"e02_999_137.lin",
"e02_999_138.lin",
"e02_999_139.lin",
"e02_999_141.lin",
"e02_999_142.lin",
"e02_999_143.lin",
"script_pak_e02_001.lin",
"script_pak_e02_002.lin",
"script_pak_e02_003.lin",
"script_pak_e02_004.lin",
"script_pak_e02_005.lin",
"script_pak_e02_006.lin",
"script_pak_e02_007.lin",
"script_pak_e02_008.lin",
"script_pak_e02_009.lin",
"script_pak_e02_010.lin",
"script_pak_e02_012.lin",
"script_pak_e02_013.lin",
"script_pak_e02_014.lin",
"script_pak_e02_016.lin",
"script_pak_e02_017.lin",
"script_pak_e02_019.lin",
"script_pak_e02_020.lin",
"script_pak_e02_021.lin",
"script_pak_e02_022.lin",
"script_pak_e02_023.lin",
"script_pak_e02_026.lin",
"script_pak_e02_027.lin",
"script_pak_e02_028.lin",
"script_pak_e02_029.lin",
"script_pak_e02_030.lin",
"script_pak_e02_031.lin",
"script_pak_e02_032.lin",
"script_pak_e02_033.lin",
"script_pak_e02_034.lin",
"script_pak_e02_035.lin",
"script_pak_e02_036.lin",
"script_pak_e02_037.lin",
"script_pak_e02_038.lin",
"script_pak_e02_039.lin",
"script_pak_e02_040.lin",
"script_pak_e02_041.lin",
"script_pak_e02_042.lin",
"script_pak_e02_043.lin",
"script_pak_e02_044.lin",
"script_pak_e02_045.lin",
"script_pak_e02_046.lin",
"script_pak_e02_047.lin",
"script_pak_e02_048.lin",
"script_pak_e02_049.lin",
"script_pak_e02_050.lin",
"script_pak_e02_051.lin",
"script_pak_e02_057.lin",
"script_pak_e02_058.lin",
"script_pak_e02_059.lin",
"script_pak_e02_060.lin",
"script_pak_e02_061.lin",
"script_pak_e02_062.lin",
"script_pak_e02_063.lin",
"script_pak_e02_064.lin",
"script_pak_e02_065.lin",
"script_pak_e02_066.lin",
"script_pak_e02_067.lin",
"script_pak_e02_068.lin",
"script_pak_e02_069.lin",
"script_pak_e02_070.lin",
"script_pak_e02_071.lin",
"script_pak_e02_072.lin",
"script_pak_e02_073.lin",
"script_pak_e02_074.lin",
"script_pak_e02_075.lin",
"script_pak_e02_076.lin",
"script_pak_e02_077.lin",
"script_pak_e02_078.lin",
"script_pak_e02_079.lin",
"script_pak_e02_080.lin",
"script_pak_e02_081.lin",
"script_pak_e02_082.lin",
"script_pak_e02_083.lin",
"script_pak_e02_084.lin",
"script_pak_e02_085.lin",
"script_pak_e02_086.lin",
"script_pak_e02_087.lin",
"script_pak_e02_088.lin",
"script_pak_e02_089.lin",
"script_pak_e02_090.lin",
"script_pak_e02_091.lin",
"script_pak_e02_092.lin",
"script_pak_e02_093.lin",
"script_pak_e02_095.lin",
"script_pak_e02_096.lin",
"script_pak_e02_097.lin",
]
),
]
),
(
"Chapter 3",
[
(
GENERIC_TEXT,
[
"e03_000_001.lin",
"e03_000_002.lin",
"e03_000_003.lin",
"e03_000_004.lin",
"e03_000_021.lin",
"e03_000_022.lin",
"e03_000_023.lin",
"e03_000_024.lin",
"e03_000_026.lin",
"e03_000_030.lin",
"e03_000_041.lin",
"e03_000_042.lin",
"e03_000_043.lin",
"e03_000_044.lin",
"e03_000_046.lin",
"e03_000_047.lin",
"e03_000_053.lin",
"e03_000_055.lin",
"e03_000_056.lin",
"e03_000_059.lin",
"e03_000_060.lin",
"e03_000_061.lin",
"e03_000_062.lin",
"e03_000_063.lin",
"e03_000_064.lin",
"e03_000_100.lin",
"e03_000_101.lin",
"e03_000_102.lin",
"e03_000_104.lin",
"e03_000_140.lin",
"e03_000_143.lin",
"e03_000_144.lin",
]
),
(
EVERYDAY_LIFE,
[
"e03_001_100.lin",
"e03_001_101.lin",
"e03_001_102.lin",
"e03_001_104.lin",
"e03_002_102.lin",
"e03_003_019.lin",
"e03_003_140.lin",
"e03_004_040.lin",
"e03_004_041.lin",
"e03_004_042.lin",
"e03_004_044.lin",
"e03_004_046.lin",
"e03_004_047.lin",
"e03_004_053.lin",
"e03_004_055.lin",
"e03_005_055.lin",
"e03_005_056.lin",
"e03_005_059.lin",
"e03_006_100.lin",
"e03_007_100.lin",
"e03_007_101.lin",
"e03_007_104.lin",
"e03_008_102.lin",
"e03_009_047.lin",
"e03_009_104.lin",
"e03_010_104.lin",
"e03_010_115.lin",
"e03_010_117.lin",
"e03_011_001.lin",
"e03_011_003.lin",
"e03_011_100.lin",
"e03_011_101.lin",
"e03_011_104.lin",
"e03_012_102.lin",
"e03_013_104.lin",
"e03_013_106.lin",
"e03_013_113.lin",
"e03_013_117.lin",
"e03_014_104.lin",
"e03_014_107.lin",
"e03_014_110.lin",
"e03_015_046.lin",
"e03_015_104.lin",
"e03_016_019.lin",
"e03_016_040.lin",
"e03_016_047.lin",
"e03_016_048.lin",
"e03_017_001.lin",
"e03_018_100.lin",
"e03_018_101.lin",
"e03_018_104.lin",
"e03_019_102.lin",
"e03_020_055.lin",
"e03_021_056.lin",
"e03_021_060.lin",
"e03_021_061.lin",
"e03_021_062.lin",
"e03_021_063.lin",
"e03_022_055.lin",
"e03_022_104.lin",
"e03_023_055.lin",
"e03_023_056.lin",
"e03_023_060.lin",
"e03_023_062.lin",
"e03_023_104.lin",
"e03_024_055.lin",
"e03_024_056.lin",
"e03_024_060.lin",
"e03_024_061.lin",
"e03_024_062.lin",
"e03_025_055.lin",
"e03_025_056.lin",
"e03_025_060.lin",
"e03_025_061.lin",
"e03_025_062.lin",
"e03_025_104.lin",
"e03_026_055.lin",
"e03_026_056.lin",
"e03_026_104.lin",
"e03_027_055.lin",
"e03_027_056.lin",
"e03_027_061.lin",
"e03_028_040.lin",
"e03_028_046.lin",
"e03_029_040.lin",
"e03_029_044.lin",
"e03_029_049.lin",
"e03_030_046.lin",
(
"Free Time 10",
[
# "e08_001_003.lin",
# "e08_003_010.lin",
# "e08_004_009.lin",
# "e08_006_010.lin",
# "e08_007_001.lin",
# "e08_008_009.lin",
# "e08_009_010.lin",
# "e08_010_010.lin",
# "e08_011_009.lin",
# "e08_012_009.lin",
# "e08_013_009.lin",
# "e08_014_009.lin",
# "e08_015_009.lin",
]
),
]
),
(
INVESTIGATION,
[
"e03_100_042.lin",
"e03_100_043.lin",
"e03_100_044.lin",
"e03_100_045.lin",
"e03_100_050.lin",
"e03_100_051.lin",
"e03_100_054.lin",
"e03_100_055.lin",
"e03_100_056.lin",
"e03_100_064.lin",
"e03_102_144.lin",
"e03_102_202.lin",
]
),
(
CLASS_TRIAL,
[
"e03_201_000.lin",
"e03_202_000.lin",
"e03_202_001.lin",
"e03_203_000.lin",
"e03_204_000.lin",
"e03_204_001.lin",
"e03_205_000.lin",
"e03_206_000.lin",
"e03_207_000.lin",
"e03_208_000.lin",
"e03_209_000.lin",
"e03_210_000.lin",
"e03_210_001.lin",
"e03_211_000.lin",
"e03_212_000.lin",
"e03_212_001.lin",
"e03_213_000.lin",
"e03_214_000.lin",
"e03_214_001.lin",
"e03_215_000.lin",
"e03_216_000.lin",
"e03_217_000.lin",
"e03_218_000.lin",
"e03_218_001.lin",
"e03_219_000.lin",
"e03_220_000.lin",
"e03_220_001.lin",
"e03_221_000.lin",
"e03_222_000.lin",
"e03_223_000.lin",
"e03_224_000.lin",
"e03_225_000.lin",
"e03_226_000.lin",
"e03_227_000.lin",
"e03_228_000.lin",
"e03_229_000.lin",
"e03_230_000.lin",
"e03_230_001.lin",
"e03_231_000.lin",
"e03_232_000.lin",
"e03_233_000.lin",
"e03_233_001.lin",
"e03_234_000.lin",
"nonstop_03_008.dat",
"e03_235_000.lin",
"e03_236_000.lin",
"e03_237_000.lin",
"e03_238_000.lin",
"e03_239_000.lin",
"e03_240_000.lin",
"e03_250_000.lin",
"e03_296_000.lin",
"e03_297_000.lin",
"e03_299_000.lin",
]
),
(
POST_TRIAL,
[
"e03_300_202.lin",
"e03_301_001.lin",
"e03_301_143.lin",
"e03_301_144.lin",
"e03_301_202.lin",
]
),
(
GENERIC_TEXT,
[
"e03_999_001.lin",
"e03_999_002.lin",
"e03_999_008.lin",
"e03_999_009.lin",
"e03_999_010.lin",
"e03_999_013.lin",
"e03_999_014.lin",
"e03_999_015.lin",
"e03_999_016.lin",
"e03_999_017.lin",
"e03_999_018.lin",
"e03_999_019.lin",
"e03_999_020.lin",
"e03_999_021.lin",
"e03_999_022.lin",
"e03_999_023.lin",
"e03_999_024.lin",
"e03_999_025.lin",
"e03_999_026.lin",
"e03_999_027.lin",
"e03_999_028.lin",
"e03_999_031.lin",
"e03_999_041.lin",
"e03_999_042.lin",
"e03_999_050.lin",
"e03_999_051.lin",
"e03_999_052.lin",
"e03_999_053.lin",
"e03_999_054.lin",
"e03_999_056.lin",
"e03_999_057.lin",
"e03_999_058.lin",
"e03_999_061.lin",
"e03_999_062.lin",
"e03_999_063.lin",
"e03_999_064.lin",
"e03_999_065.lin",
"e03_999_066.lin",
"e03_999_067.lin",
"e03_999_068.lin",
"e03_999_076.lin",
"e03_999_078.lin",
"e03_999_079.lin",
"e03_999_080.lin",
"e03_999_081.lin",
"e03_999_082.lin",
"e03_999_083.lin",
"e03_999_084.lin",
"e03_999_085.lin",
"e03_999_086.lin",
"e03_999_088.lin",
"e03_999_091.lin",
"e03_999_092.lin",
"e03_999_095.lin",
"e03_999_096.lin",
"e03_999_097.lin",
"e03_999_099.lin",
"e03_999_106.lin",
"e03_999_107.lin",
"e03_999_108.lin",
"e03_999_111.lin",
"e03_999_115.lin",
"e03_999_119.lin",
"e03_999_120.lin",
"e03_999_121.lin",
"e03_999_122.lin",
"e03_999_124.lin",
"e03_999_127.lin",
"e03_999_128.lin",
"e03_999_129.lin",
"e03_999_130.lin",
"e03_999_131.lin",
"e03_999_132.lin",
"e03_999_133.lin",
"e03_999_134.lin",
"e03_999_135.lin",
"e03_999_136.lin",
"e03_999_137.lin",
"e03_999_138.lin",
"e03_999_139.lin",
"e03_999_140.lin",
"e03_999_141.lin",
"e03_999_142.lin",
"e03_999_143.lin",
"e03_999_151.lin",
"e03_999_152.lin",
"e03_999_153.lin",
"e03_999_154.lin",
"script_pak_e03_001.lin",
"script_pak_e03_002.lin",
"script_pak_e03_003.lin",
"script_pak_e03_004.lin",
"script_pak_e03_007.lin",
"script_pak_e03_008.lin",
"script_pak_e03_009.lin",
"script_pak_e03_010.lin",
"script_pak_e03_012.lin",
"script_pak_e03_013.lin",
"script_pak_e03_015.lin",
"script_pak_e03_016.lin",
"script_pak_e03_017.lin",
"script_pak_e03_018.lin",
"script_pak_e03_019.lin",
"script_pak_e03_020.lin",
"script_pak_e03_021.lin",
"script_pak_e03_022.lin",
"script_pak_e03_023.lin",
"script_pak_e03_025.lin",
"script_pak_e03_026.lin",
"script_pak_e03_027.lin",
"script_pak_e03_028.lin",
"script_pak_e03_029.lin",
"script_pak_e03_030.lin",
"script_pak_e03_031.lin",
"script_pak_e03_032.lin",
"script_pak_e03_033.lin",
"script_pak_e03_034.lin",
"script_pak_e03_035.lin",
"script_pak_e03_037.lin",
"script_pak_e03_038.lin",
"script_pak_e03_039.lin",
"script_pak_e03_040.lin",
"script_pak_e03_041.lin",
"script_pak_e03_042.lin",
"script_pak_e03_043.lin",
"script_pak_e03_044.lin",
"script_pak_e03_045.lin",
"script_pak_e03_046.lin",
"script_pak_e03_047.lin",
"script_pak_e03_048.lin",
"script_pak_e03_049.lin",
"script_pak_e03_050.lin",
"script_pak_e03_051.lin",
"script_pak_e03_052.lin",
"script_pak_e03_053.lin",
"script_pak_e03_054.lin",
"script_pak_e03_055.lin",
"script_pak_e03_056.lin",
"script_pak_e03_057.lin",
"script_pak_e03_058.lin",
"script_pak_e03_059.lin",
"script_pak_e03_060.lin",
"script_pak_e03_061.lin",
"script_pak_e03_062.lin",
"script_pak_e03_063.lin",
"script_pak_e03_064.lin",
"script_pak_e03_065.lin",
"script_pak_e03_066.lin",
"script_pak_e03_067.lin",
"script_pak_e03_068.lin",
"script_pak_e03_069.lin",
"script_pak_e03_070.lin",
"script_pak_e03_071.lin",
"script_pak_e03_072.lin",
"script_pak_e03_073.lin",
"script_pak_e03_074.lin",
"script_pak_e03_075.lin",
"script_pak_e03_076.lin",
"script_pak_e03_077.lin",
"script_pak_e03_078.lin",
"script_pak_e03_079.lin",
"script_pak_e03_080.lin",
"script_pak_e03_081.lin",
"script_pak_e03_082.lin",
"script_pak_e03_083.lin",
"script_pak_e03_084.lin",
"script_pak_e03_085.lin",
"script_pak_e03_086.lin",
"script_pak_e03_087.lin",
"script_pak_e03_088.lin",
"script_pak_e03_089.lin",
"script_pak_e03_090.lin",
"script_pak_e03_091.lin",
"script_pak_e03_092.lin",
"script_pak_e03_093.lin",
"script_pak_e03_094.lin",
"script_pak_e03_095.lin",
"script_pak_e03_096.lin",
"script_pak_e03_097.lin",
"script_pak_e03_098.lin",
"script_pak_e03_099.lin",
"script_pak_e03_100.lin",
"script_pak_e03_101.lin",
"script_pak_e03_102.lin",
"script_pak_e03_103.lin",
"script_pak_e03_104.lin",
"script_pak_e03_105.lin",
"script_pak_e03_106.lin",
"script_pak_e03_107.lin",
"script_pak_e03_108.lin",
"script_pak_e03_109.lin",
"script_pak_e03_110.lin",
"script_pak_e03_111.lin",
"script_pak_e03_112.lin",
"script_pak_e03_113.lin",
"script_pak_e03_114.lin",
"script_pak_e03_115.lin",
"script_pak_e03_116.lin",
"script_pak_e03_117.lin",
"script_pak_e03_118.lin",
"script_pak_e03_119.lin",
"script_pak_e03_120.lin",
"script_pak_e03_121.lin",
"script_pak_e03_122.lin",
"script_pak_e03_130.lin",
"script_pak_e03_131.lin",
"script_pak_e03_132.lin",
"script_pak_e03_133.lin",
]
),
]
),
(
"Chapter 4",
[
(
GENERIC_TEXT,
[
"e04_000_001.lin",
"e04_000_002.lin",
"e04_000_003.lin",
"e04_000_004.lin",
"e04_000_071.lin",
"e04_000_072.lin",
"e04_000_073.lin",
"e04_000_074.lin",
"e04_000_075.lin",
"e04_000_076.lin",
"e04_000_079.lin",
"e04_000_080.lin",
"e04_000_081.lin",
"e04_000_082.lin",
"e04_000_083.lin",
"e04_000_084.lin",
"e04_000_086.lin",
"e04_000_087.lin",
"e04_000_091.lin",
"e04_000_097.lin",
"e04_000_100.lin",
"e04_000_101.lin",
"e04_000_102.lin",
"e04_000_104.lin",
"e04_000_130.lin",
"e04_000_132.lin",
"e04_000_133.lin",
"e04_000_135.lin",
]
),
(
EVERYDAY_LIFE,
[
"e04_001_001.lin",
"e04_002_100.lin",
"e04_002_102.lin",
"e04_002_141.lin",
"e04_003_070.lin",
"e04_003_130.lin",
"e04_003_132.lin",
"e04_003_133.lin",
"e04_003_135.lin",
"e04_004_132.lin",
"e04_004_135.lin",
"e04_005_071.lin",
"e04_005_072.lin",
"e04_005_074.lin",
"e04_005_087.lin",
"e04_006_071.lin",
"e04_006_079.lin",
"e04_007_091.lin",
"e04_008_071.lin",
"e04_008_079.lin",
"e04_008_087.lin",
"e04_009_079.lin",
"e04_009_084.lin",
"e04_010_079.lin",
"e04_010_084.lin",
"e04_013_079.lin",
"e04_013_084.lin",
"e04_014_079.lin",
"e04_014_084.lin",
"e04_015_091.lin",
"e04_016_079.lin",
"e04_018_071.lin",
"e04_018_079.lin",
"e04_018_084.lin",
"e04_019_079.lin",
"e04_019_084.lin",
(
"Free Time",
[
]
),
]
),
(
INVESTIGATION,
[
"e04_100_079.lin",
"e04_100_091.lin",
"e04_100_093.lin",
"e04_101_071.lin",
"e04_103_096.lin",
"e04_104_096.lin",
"e04_104_097.lin",
"e04_105_079.lin",
"e04_106_091.lin",
"e04_107_079.lin",
"e04_107_091.lin",
"e04_107_095.lin",
"e04_108_071.lin",
"e04_108_088.lin",
"e04_108_089.lin",
"e04_109_090.lin",
"e04_109_203.lin",
]
),
(
CLASS_TRIAL,
[
"e04_201_000.lin",
"e04_202_000.lin",
"e04_202_001.lin",
"e04_203_000.lin",
"e04_204_000.lin",
"e04_204_001.lin",
"e04_205_000.lin",
"e04_206_000.lin",
"e04_206_001.lin",
"e04_207_000.lin",
"e04_208_000.lin",
"e04_208_001.lin",
"e04_209_000.lin",
"e04_210_000.lin",
"e04_211_000.lin",
"e04_212_000.lin",
"e04_213_000.lin",
"e04_213_001.lin",
"e04_214_000.lin",
"e04_215_000.lin",
"e04_216_000.lin",
"e04_217_000.lin",
"e04_217_001.lin",
"e04_218_000.lin",
"e04_219_000.lin",
"e04_220_000.lin",
"e04_221_000.lin",
"e04_222_000.lin",
"e04_223_000.lin",
"e04_223_001.lin",
"e04_224_000.lin",
"e04_225_000.lin",
"e04_225_001.lin",
"e04_226_000.lin",
"e04_227_000.lin",
"e04_227_001.lin",
"e04_228_000.lin",
"e04_229_000.lin",
"e04_229_001.lin",
"e04_230_000.lin",
"e04_231_000.lin",
"e04_232_000.lin",
"e04_233_000.lin",
"e04_233_001.lin",
"e04_234_000.lin",
"e04_235_000.lin",
"e04_236_000.lin",
"e04_237_000.lin",
"e04_238_000.lin",
"e04_239_000.lin",
"e04_240_000.lin",
"e04_241_000.lin",
"e04_241_001.lin",
"e04_242_000.lin",
"e04_243_000.lin",
"e04_244_000.lin",
"e04_245_000.lin",
"e04_245_001.lin",
"e04_246_000.lin",
"e04_247_000.lin",
"e04_248_000.lin",
"e04_249_000.lin",
"e04_250_000.lin",
"e04_296_000.lin",
"e04_297_000.lin",
"e04_299_000.lin",
]
),
(
POST_TRIAL,
[
"e04_300_203.lin",
"e04_301_105.lin",
"e04_301_143.lin",
"e04_301_150.lin",
"e04_301_203.lin",
]
),
(
GENERIC_TEXT,
[
"e04_999_001.lin",
"e04_999_002.lin",
"e04_999_003.lin",
"e04_999_004.lin",
"e04_999_005.lin",
"e04_999_006.lin",
"e04_999_007.lin",
"e04_999_008.lin",
"e04_999_009.lin",
"e04_999_015.lin",
"e04_999_016.lin",
"e04_999_017.lin",
"e04_999_018.lin",
"e04_999_019.lin",
"e04_999_099.lin",
"e04_999_100.lin",
"e04_999_101.lin",
"e04_999_102.lin",
"e04_999_103.lin",
"e04_999_106.lin",
"e04_999_108.lin",
"e04_999_111.lin",
"e04_999_112.lin",
"e04_999_113.lin",
"e04_999_114.lin",
"e04_999_116.lin",
"e04_999_117.lin",
"e04_999_118.lin",
"e04_999_119.lin",
"e04_999_136.lin",
"e04_999_137.lin",
"e04_999_138.lin",
"e04_999_139.lin",
"e04_999_140.lin",
"e04_999_141.lin",
"e04_999_142.lin",
"e04_999_143.lin",
"e04_999_144.lin",
"e04_999_145.lin",
"e04_999_146.lin",
"e04_999_147.lin",
"e04_999_148.lin",
"e04_999_149.lin",
"e04_999_150.lin",
"e04_999_151.lin",
"e04_999_152.lin",
"e04_999_153.lin",
"e04_999_154.lin",
"e04_999_155.lin",
"e04_999_156.lin",
"e04_999_157.lin",
"e04_999_158.lin",
"e04_999_159.lin",
"e04_999_160.lin",
"e04_999_161.lin",
"e04_999_162.lin",
"e04_999_163.lin",
"e04_999_164.lin",
"e04_999_165.lin",
"e04_999_166.lin",
"script_pak_e04_001.lin",
"script_pak_e04_002.lin",
"script_pak_e04_003.lin",
"script_pak_e04_004.lin",
"script_pak_e04_007.lin",
"script_pak_e04_008.lin",
"script_pak_e04_009.lin",
"script_pak_e04_010.lin",
"script_pak_e04_011.lin",
"script_pak_e04_012.lin",
"script_pak_e04_013.lin",
"script_pak_e04_014.lin",
"script_pak_e04_015.lin",
"script_pak_e04_016.lin",
"script_pak_e04_017.lin",
"script_pak_e04_018.lin",
"script_pak_e04_019.lin",
"script_pak_e04_020.lin",
"script_pak_e04_021.lin",
"script_pak_e04_023.lin",
"script_pak_e04_024.lin",
"script_pak_e04_025.lin",
"script_pak_e04_026.lin",
"script_pak_e04_027.lin",
"script_pak_e04_028.lin",
"script_pak_e04_029.lin",
"script_pak_e04_030.lin",
"script_pak_e04_031.lin",
"script_pak_e04_034.lin",
"script_pak_e04_035.lin",
"script_pak_e04_036.lin",
"script_pak_e04_037.lin",
"script_pak_e04_038.lin",
"script_pak_e04_039.lin",
"script_pak_e04_040.lin",
"script_pak_e04_041.lin",
"script_pak_e04_042.lin",
"script_pak_e04_048.lin",
"script_pak_e04_049.lin",
"script_pak_e04_050.lin",
"script_pak_e04_051.lin",
"script_pak_e04_052.lin",
"script_pak_e04_053.lin",
"script_pak_e04_054.lin",
"script_pak_e04_055.lin",
"script_pak_e04_056.lin",
"script_pak_e04_057.lin",
"script_pak_e04_058.lin",
"script_pak_e04_059.lin",
"script_pak_e04_060.lin",
"script_pak_e04_061.lin",
"script_pak_e04_062.lin",
"script_pak_e04_063.lin",
"script_pak_e04_064.lin",
"script_pak_e04_065.lin",
"script_pak_e04_066.lin",
"script_pak_e04_067.lin",
"script_pak_e04_068.lin",
"script_pak_e04_069.lin",
"script_pak_e04_070.lin",
"script_pak_e04_071.lin",
"script_pak_e04_072.lin",
"script_pak_e04_073.lin",
"script_pak_e04_074.lin",
"script_pak_e04_075.lin",
"script_pak_e04_076.lin",
"script_pak_e04_077.lin",
"script_pak_e04_078.lin",
"script_pak_e04_079.lin",
"script_pak_e04_080.lin",
"script_pak_e04_081.lin",
"script_pak_e04_082.lin",
"script_pak_e04_083.lin",
"script_pak_e04_084.lin",
"script_pak_e04_085.lin",
"script_pak_e04_086.lin",
"script_pak_e04_087.lin",
"script_pak_e04_088.lin",
"script_pak_e04_089.lin",
"script_pak_e04_090.lin",
"script_pak_e04_091.lin",
"script_pak_e04_092.lin",
"script_pak_e04_093.lin",
"script_pak_e04_094.lin",
"script_pak_e04_095.lin",
"script_pak_e04_096.lin",
"script_pak_e04_097.lin",
"script_pak_e04_098.lin",
]
),
]
),
(
"Chapter 5",
[
(
GENERIC_TEXT,
[
"e05_000_001.lin",
"e05_000_002.lin",
"e05_000_003.lin",
"e05_000_004.lin",
"e05_000_021.lin",
"e05_000_022.lin",
"e05_000_023.lin",
"e05_000_024.lin",
"e05_000_026.lin",
"e05_000_030.lin",
"e05_000_041.lin",
"e05_000_042.lin",
"e05_000_044.lin",
"e05_000_046.lin",
"e05_000_047.lin",
"e05_000_053.lin",
"e05_000_055.lin",
"e05_000_063.lin",
"e05_000_064.lin",
"e05_000_100.lin",
"e05_000_101.lin",
"e05_000_102.lin",
"e05_000_104.lin",
"e05_000_105.lin",
"e05_000_130.lin",
"e05_000_131.lin",
"e05_000_132.lin",
"e05_000_133.lin",
"e05_000_134.lin",
"e05_000_135.lin",
"e05_000_143.lin",
"e05_000_144.lin",
"e05_000_161.lin",
"e05_000_162.lin",
"e05_000_163.lin",
"e05_000_164.lin",
"e05_000_165.lin",
"e05_000_166.lin",
"e05_000_167.lin",
"e05_000_168.lin",
"e05_000_170.lin",
"e05_000_171.lin",
]
),
(
EVERYDAY_LIFE,
[
"e05_001_102.lin",
"e05_002_160.lin",
"e05_002_161.lin",
"e05_002_162.lin",
"e05_002_163.lin",
"e05_002_166.lin",
"e05_002_170.lin",
"e05_002_171.lin",
"e05_003_160.lin",
"e05_003_171.lin",
"e05_004_100.lin",
"e05_004_101.lin",
"e05_004_104.lin",
"e05_005_101.lin",
"e05_005_104.lin",
"e05_006_102.lin",
"e05_007_019.lin",
"e05_007_020.lin",
"e05_007_100.lin",
"e05_007_101.lin",
"e05_007_102.lin",
"e05_007_140.lin",
"e05_008_030.lin",
"e05_009_104.lin",
"e05_010_104.lin",
"e05_010_111.lin",
"e05_010_112.lin",
"e05_011_100.lin",
"e05_011_104.lin",
"e05_012_101.lin",
"e05_013_100.lin",
"e05_013_101.lin",
"e05_013_104.lin",
"e05_014_030.lin",
"e05_014_070.lin",
"e05_014_100.lin",
"e05_014_102.lin",
"e05_014_133.lin",
"e05_015_100.lin",
"e05_015_104.lin",
"e05_015_130.lin",
"e05_015_131.lin",
"e05_016_100.lin",
"e05_016_101.lin",
"e05_016_104.lin",
"e05_017_102.lin",
"e05_018_070.lin",
"e05_018_130.lin",
"e05_018_132.lin",
"e05_018_133.lin",
"e05_018_134.lin",
"e05_018_135.lin",
"e05_019_040.lin",
"e05_019_070.lin",
"e05_019_140.lin",
"e05_019_143.lin",
"e05_019_144.lin",
"e05_019_160.lin",
"e05_019_161.lin",
"e05_020_162.lin",
"e05_020_164.lin",
"e05_021_162.lin",
"e05_021_164.lin",
"e05_021_165.lin",
"e05_022_162.lin",
"e05_022_172.lin",
"e05_023_162.lin",
"e05_023_167.lin",
(
"Free Time",
[
]
),
]
),
(
INVESTIGATION,
[
"e05_100_167.lin",
"e05_100_168.lin",
"e05_101_041.lin",
"e05_101_100.lin",
"e05_101_131.lin",
"e05_101_161.lin",
"e05_101_164.lin",
"e05_101_167.lin",
"e05_102_105.lin",
"e05_104_144.lin",
"e05_104_204.lin",
]
),
(
CLASS_TRIAL,
[
"e05_201_000.lin",
"e05_202_000.lin",
"e05_202_001.lin",
"e05_203_000.lin",
"e05_204_000.lin",
"e05_204_001.lin",
"e05_205_000.lin",
"e05_206_000.lin",
"e05_206_001.lin",
"e05_207_000.lin",
"e05_208_000.lin",
"e05_209_000.lin",
"e05_210_000.lin",
"e05_211_000.lin",
"e05_212_000.lin",
"e05_212_001.lin",
"e05_213_000.lin",
"e05_214_000.lin",
"e05_214_001.lin",
"e05_215_000.lin",
"e05_216_000.lin",
"e05_216_001.lin",
"e05_217_000.lin",
"e05_218_000.lin",
"e05_219_000.lin",
"e05_220_000.lin",
"e05_221_000.lin",
"e05_222_000.lin",
"e05_223_000.lin",
"e05_224_000.lin",
"e05_225_000.lin",
"e05_226_000.lin",
"e05_226_001.lin",
"e05_227_000.lin",
"e05_228_000.lin",
"e05_229_000.lin",
"e05_230_000.lin",
"e05_231_000.lin",
"e05_232_000.lin",
"e05_233_000.lin",
"e05_234_000.lin",
"e05_235_000.lin",
"e05_236_000.lin",
"e05_237_000.lin",
"e05_238_000.lin",
"e05_239_000.lin",
"e05_240_000.lin",
"e05_240_001.lin",
"e05_241_000.lin",
"e05_242_000.lin",
"e05_242_001.lin",
"e05_243_000.lin",
"e05_296_000.lin",
"e05_297_000.lin",
"e05_299_000.lin",
]
),
(
POST_TRIAL,
[
"e05_300_204.lin",
"e05_301_104.lin",
"e05_301_204.lin",
"e05_302_104.lin",
"e05_303_104.lin",
"e05_304_100.lin",
"e05_304_101.lin",
"e05_304_104.lin",
"e05_305_100.lin",
"e05_305_101.lin",
"e05_305_102.lin",
"e05_307_030.lin",
"e05_307_211.lin",
]
),
(
GENERIC_TEXT,
[
"e05_999_001.lin",
"e05_999_002.lin",
"e05_999_003.lin",
"e05_999_004.lin",
"e05_999_005.lin",
"e05_999_006.lin",
"e05_999_007.lin",
"e05_999_008.lin",
"e05_999_009.lin",
"e05_999_010.lin",
"e05_999_011.lin",
"e05_999_012.lin",
"e05_999_013.lin",
"e05_999_014.lin",
"e05_999_015.lin",
"e05_999_016.lin",
"e05_999_099.lin",
"e05_999_100.lin",
"e05_999_101.lin",
"e05_999_102.lin",
"e05_999_103.lin",
"e05_999_105.lin",
"e05_999_106.lin",
"e05_999_107.lin",
"e05_999_108.lin",
"e05_999_110.lin",
"e05_999_111.lin",
"e05_999_112.lin",
"e05_999_113.lin",
"e05_999_114.lin",
"e05_999_115.lin",
"e05_999_116.lin",
"e05_999_117.lin",
"e05_999_118.lin",
"e05_999_119.lin",
"e05_999_120.lin",
"e05_999_121.lin",
"e05_999_122.lin",
"e05_999_123.lin",
"e05_999_124.lin",
"e05_999_125.lin",
"e05_999_126.lin",
"e05_999_136.lin",
"e05_999_137.lin",
"e05_999_138.lin",
"e05_999_170.lin",
"e05_999_171.lin",
"e05_999_172.lin",
"e05_999_173.lin",
"e05_999_174.lin",
"e05_999_175.lin",
"e05_999_176.lin",
"e05_999_177.lin",
"e05_999_178.lin",
"e05_999_179.lin",
"e05_999_180.lin",
"e05_999_181.lin",
"e05_999_182.lin",
"e05_999_183.lin",
"e05_999_184.lin",
"e05_999_185.lin",
"e05_999_186.lin",
"e05_999_187.lin",
"e05_999_188.lin",
"e05_999_189.lin",
"e05_999_190.lin",
"e05_999_191.lin",
"e05_999_192.lin",
"e05_999_193.lin",
"e05_999_194.lin",
"e05_999_195.lin",
"e05_999_196.lin",
"e05_999_197.lin",
"e05_999_198.lin",
"e05_999_199.lin",
"e05_999_200.lin",
"e05_999_201.lin",
"e05_999_202.lin",
"e05_999_203.lin",
"e05_999_204.lin",
"e05_999_205.lin",
"e05_999_206.lin",
"e05_999_207.lin",
"e05_999_208.lin",
"e05_999_209.lin",
"e05_999_210.lin",
"e05_999_211.lin",
"e05_999_212.lin",
"e05_999_213.lin",
"e05_999_214.lin",
"e05_999_215.lin",
"e05_999_216.lin",
"e05_999_217.lin",
"e05_999_218.lin",
"e05_999_219.lin",
"e05_999_220.lin",
"e05_999_222.lin",
"script_pak_e05_001.lin",
"script_pak_e05_002.lin",
"script_pak_e05_003.lin",
"script_pak_e05_004.lin",
"script_pak_e05_007.lin",
"script_pak_e05_008.lin",
"script_pak_e05_009.lin",
"script_pak_e05_010.lin",
"script_pak_e05_012.lin",
"script_pak_e05_013.lin",
"script_pak_e05_015.lin",
"script_pak_e05_016.lin",
"script_pak_e05_017.lin",
"script_pak_e05_018.lin",
"script_pak_e05_019.lin",
"script_pak_e05_020.lin",
"script_pak_e05_021.lin",
"script_pak_e05_023.lin",
"script_pak_e05_024.lin",
"script_pak_e05_026.lin",
"script_pak_e05_027.lin",
"script_pak_e05_028.lin",
"script_pak_e05_029.lin",
"script_pak_e05_030.lin",
"script_pak_e05_031.lin",
"script_pak_e05_032.lin",
"script_pak_e05_033.lin",
"script_pak_e05_034.lin",
"script_pak_e05_035.lin",
"script_pak_e05_036.lin",
"script_pak_e05_039.lin",
"script_pak_e05_040.lin",
"script_pak_e05_042.lin",
"script_pak_e05_043.lin",
"script_pak_e05_044.lin",
"script_pak_e05_045.lin",
"script_pak_e05_046.lin",
"script_pak_e05_047.lin",
"script_pak_e05_048.lin",
"script_pak_e05_049.lin",
"script_pak_e05_050.lin",
"script_pak_e05_051.lin",
"script_pak_e05_052.lin",
"script_pak_e05_053.lin",
"script_pak_e05_054.lin",
"script_pak_e05_055.lin",
"script_pak_e05_056.lin",
"script_pak_e05_057.lin",
"script_pak_e05_058.lin",
"script_pak_e05_059.lin",
"script_pak_e05_060.lin",
"script_pak_e05_061.lin",
"script_pak_e05_062.lin",
"script_pak_e05_063.lin",
"script_pak_e05_064.lin",
"script_pak_e05_065.lin",
"script_pak_e05_066.lin",
"script_pak_e05_067.lin",
"script_pak_e05_073.lin",
"script_pak_e05_074.lin",
"script_pak_e05_075.lin",
"script_pak_e05_076.lin",
"script_pak_e05_077.lin",
"script_pak_e05_078.lin",
"script_pak_e05_079.lin",
"script_pak_e05_080.lin",
"script_pak_e05_081.lin",
"script_pak_e05_082.lin",
"script_pak_e05_083.lin",
"script_pak_e05_084.lin",
"script_pak_e05_085.lin",
"script_pak_e05_086.lin",
"script_pak_e05_087.lin",
"script_pak_e05_088.lin",
"script_pak_e05_089.lin",
"script_pak_e05_090.lin",
"script_pak_e05_091.lin",
"script_pak_e05_092.lin",
"script_pak_e05_093.lin",
"script_pak_e05_094.lin",
"script_pak_e05_095.lin",
"script_pak_e05_096.lin",
"script_pak_e05_097.lin",
"script_pak_e05_098.lin",
"script_pak_e05_099.lin",
"script_pak_e05_100.lin",
"script_pak_e05_101.lin",
"script_pak_e05_102.lin",
"script_pak_e05_103.lin",
"script_pak_e05_104.lin",
"script_pak_e05_105.lin",
"script_pak_e05_106.lin",
"script_pak_e05_107.lin",
"script_pak_e05_108.lin",
"script_pak_e05_109.lin",
"script_pak_e05_110.lin",
"script_pak_e05_111.lin",
"script_pak_e05_112.lin",
"script_pak_e05_113.lin",
"script_pak_e05_114.lin",
"script_pak_e05_115.lin",
"script_pak_e05_116.lin",
"script_pak_e05_117.lin",
"script_pak_e05_118.lin",
"script_pak_e05_119.lin",
"script_pak_e05_120.lin",
"script_pak_e05_121.lin",
"script_pak_e05_122.lin",
"script_pak_e05_123.lin",
"script_pak_e05_124.lin",
"script_pak_e05_125.lin",
"script_pak_e05_126.lin",
"script_pak_e05_127.lin",
"script_pak_e05_128.lin",
"script_pak_e05_129.lin",
"script_pak_e05_130.lin",
"script_pak_e05_131.lin",
"script_pak_e05_132.lin",
"script_pak_e05_133.lin",
"script_pak_e05_134.lin",
"script_pak_e05_135.lin",
"script_pak_e05_136.lin",
"script_pak_e05_137.lin",
"script_pak_e05_138.lin",
"script_pak_e05_139.lin",
"script_pak_e05_140.lin",
"script_pak_e05_141.lin",
"script_pak_e05_142.lin",
"script_pak_e05_143.lin",
"script_pak_e05_144.lin",
"script_pak_e05_145.lin",
"script_pak_e05_146.lin",
"script_pak_e05_147.lin",
"script_pak_e05_148.lin",
"script_pak_e05_149.lin",
"script_pak_e05_150.lin",
"script_pak_e05_151.lin",
"script_pak_e05_152.lin",
"script_pak_e05_154.lin",
]
),
]
),
(
"Chapter 0",
[
"e06_001_001.lin",
]
),
(
"Chapter 6",
[
(
INVESTIGATION,
[
"e06_100_260.lin",
"e06_100_265.lin",
"e06_101_267.lin",
"e06_102_264.lin",
"e06_102_273.lin",
"e06_102_274.lin",
"e06_102_275.lin",
"e06_102_276.lin",
"e06_103_263.lin",
"e06_103_271.lin",
"e06_103_272.lin",
"e06_104_262.lin",
"e06_104_269.lin",
"e06_105_261.lin",
"e06_105_268.lin",
"e06_105_270.lin",
"e06_106_260.lin",
"e06_106_265.lin",
"e06_106_266.lin",
"e06_106_267.lin",
"e06_107_260.lin",
"e06_108_205.lin",
]
),
(
CLASS_TRIAL,
[
"e06_201_000.lin",
"e06_202_000.lin",
"e06_203_000.lin",
"e06_204_000.lin",
"e06_204_001.lin",
"e06_205_000.lin",
"e06_207_000.lin",
"e06_208_000.lin",
"e06_208_001.lin",
"e06_209_000.lin",
"e06_210_000.lin",
"e06_211_000.lin",
"e06_212_000.lin",
"e06_213_000.lin",
"e06_214_000.lin",
"e06_214_001.lin",
"e06_215_000.lin",
"e06_216_000.lin",
"e06_216_001.lin",
"e06_217_000.lin",
"e06_218_000.lin",
"e06_219_000.lin",
"e06_221_000.lin",
"e06_222_000.lin",
"e06_223_000.lin",
"e06_224_000.lin",
"e06_225_000.lin",
"e06_226_000.lin",
"e06_227_000.lin",
"e06_228_000.lin",
"e06_229_000.lin",
"e06_229_001.lin",
"e06_230_000.lin",
"e06_231_000.lin",
"e06_231_001.lin",
"e06_232_000.lin",
"e06_234_000.lin",
"e06_234_001.lin",
"e06_236_000.lin",
"e06_237_000.lin",
"e06_237_001.lin",
"e06_238_000.lin",
"e06_240_000.lin",
"e06_240_001.lin",
"e06_241_000.lin",
"e06_242_000.lin",
"e06_242_001.lin",
"e06_244_000.lin",
"e06_290_000.lin",
"e06_295_000.lin",
"e06_296_000.lin",
"e06_297_000.lin",
"e06_298_000.lin",
"e06_299_000.lin",
]
),
(
GENERIC_TEXT,
[
"e06_999_100.lin",
"e06_999_101.lin",
"e06_999_102.lin",
"e06_999_103.lin",
"e06_999_104.lin",
"script_pak_e06_001.lin",
"script_pak_e06_002.lin",
"script_pak_e06_003.lin",
"script_pak_e06_004.lin",
"script_pak_e06_005.lin",
]
),
]
),
(
"Epilogue",
[
"e07_001_001.lin",
"script_pak_e07_002.lin",
]
),
]
),
(
"Monokuma Theatre",
[
"e08_100_000.lin",
"e08_100_001.lin",
"e08_100_002.lin",
"e08_100_003.lin",
"e08_100_004.lin",
"e08_100_005.lin",
"e08_100_006.lin",
"e08_100_007.lin",
"e08_100_008.lin",
"e08_100_009.lin",
"e08_100_010.lin",
"e08_100_011.lin",
"e08_100_012.lin",
"e08_100_013.lin",
"e08_100_014.lin",
"e08_100_015.lin",
"e08_100_016.lin",
"e08_100_017.lin",
]
),
(
"Free Time",
[
(
"Nagito Komaeda",
[
"e08_001_001.lin",
"e08_001_002.lin",
"e08_001_003.lin",
"e08_001_004.lin",
"e08_001_005.lin",
"e08_001_006.lin",
"e08_001_007.lin",
"e08_001_008.lin",
"e08_001_009.lin",
"e08_001_010.lin",
"e08_001_011.lin",
"e08_001_012.lin",
"e08_001_013.lin",
"e08_001_050.lin",
(
"Present Reactions",
[
"e08_001_000.lin",
"script_pak_e08_000.lin",
]
),
(
"Special Events",
[
"e08_001_030.lin",
"e08_001_031.lin",
"e08_001_032.lin",
"e08_001_033.lin",
"e08_001_034.lin",
]
),
]
),
(
"Byakuya Togami",
[
"e08_002_001.lin",
"e08_002_002.lin",
"e08_002_003.lin",
"e08_002_004.lin",
"e08_002_020.lin",
"e08_002_050.lin",
(
"Present Reactions",
[
"e08_002_000.lin",
]
),
(
"Special Events",
[
"e08_002_030.lin",
"e08_002_031.lin",
"e08_002_032.lin",
"e08_002_033.lin",
"e08_002_034.lin",
]
),
]
),
(
"Gundam Tanaka",
[
"e08_003_001.lin",
"e08_003_002.lin",
"e08_003_003.lin",
"e08_003_004.lin",
"e08_003_005.lin",
"e08_003_006.lin",
"e08_003_007.lin",
"e08_003_008.lin",
"e08_003_009.lin",
"e08_003_010.lin",
"e08_003_011.lin",
"e08_003_012.lin",
"e08_003_013.lin",
"e08_003_014.lin",
"e08_003_015.lin",
"e08_003_016.lin",
"e08_003_017.lin",
"e08_003_018.lin",
"e08_003_050.lin",
(
"Present Reactions",
[
"e08_003_000.lin",
]
),
(
"Special Events",
[
"e08_003_030.lin",
"e08_003_031.lin",
"e08_003_032.lin",
"e08_003_033.lin",
"e08_003_034.lin",
]
),
]
),
(
"Kazuichi Souda",
[
"e08_004_001.lin",
"e08_004_002.lin",
"e08_004_003.lin",
"e08_004_004.lin",
"e08_004_005.lin",
"e08_004_006.lin",
"e08_004_007.lin",
"e08_004_008.lin",
"e08_004_009.lin",
"e08_004_010.lin",
"e08_004_011.lin",
"e08_004_012.lin",
"e08_004_013.lin",
"e08_004_014.lin",
"e08_004_015.lin",
"e08_004_016.lin",
"e08_004_017.lin",
"e08_004_018.lin",
"e08_004_019.lin",
"e08_004_020.lin",
"e08_004_021.lin",
"e08_004_050.lin",
(
"Present Reactions",
[
"e08_004_000.lin",
]
),
(
"Special Events",
[
"e08_004_030.lin",
"e08_004_031.lin",
"e08_004_032.lin",
"e08_004_033.lin",
"e08_004_034.lin",
]
),
]
),
(
"Teruteru Hanamura",
[
"e08_005_001.lin",
"e08_005_002.lin",
"e08_005_003.lin",
"e08_005_004.lin",
"e08_005_050.lin",
(
"Present Reactions",
[
"e08_005_000.lin",
]
),
(
"Special Events",
[
"e08_005_030.lin",
"e08_005_031.lin",
"e08_005_032.lin",
"e08_005_033.lin",
"e08_005_034.lin",
]
),
]
),
(
"Nekomaru Nidai",
[
"e08_006_001.lin",
"e08_006_002.lin",
"e08_006_003.lin",
"e08_006_004.lin",
"e08_006_005.lin",
"e08_006_006.lin",
"e08_006_007.lin",
"e08_006_008.lin",
"e08_006_009.lin",
"e08_006_010.lin",
"e08_006_011.lin",
"e08_006_012.lin",
"e08_006_013.lin",
"e08_006_014.lin",
"e08_006_015.lin",
"e08_006_016.lin",
"e08_006_017.lin",
"e08_006_018.lin",
"e08_006_050.lin",
(
"Present Reactions",
[
"e08_006_000.lin",
"e08_006_100.lin",
]
),
(
"Special Events",
[
"e08_006_030.lin",
"e08_006_031.lin",
"e08_006_032.lin",
"e08_006_033.lin",
"e08_006_034.lin",
"e08_006_040.lin",
"e08_006_041.lin",
"e08_006_042.lin",
"e08_006_043.lin",
"e08_006_044.lin",
]
),
]
),
(
"Fuyuhiko Kuzuryuu",
[
"e08_007_001.lin",
"e08_007_002.lin",
"e08_007_003.lin",
"e08_007_004.lin",
"e08_007_005.lin",
"e08_007_006.lin",
"e08_007_007.lin",
"e08_007_008.lin",
"e08_007_009.lin",
"e08_007_010.lin",
"e08_007_011.lin",
"e08_007_012.lin",
"e08_007_013.lin",
"e08_007_014.lin",
"e08_007_015.lin",
"e08_007_016.lin",
"e08_007_017.lin",
"e08_007_018.lin",
"e08_007_020.lin",
"e08_007_050.lin",
(
"Present Reactions",
[
"e08_007_000.lin",
]
),
(
"Special Events",
[
"e08_007_030.lin",
"e08_007_031.lin",
"e08_007_032.lin",
"e08_007_033.lin",
"e08_007_034.lin",
]
),
]
),
(
"Akane Owari",
[
"e08_008_001.lin",
"e08_008_002.lin",
"e08_008_003.lin",
"e08_008_004.lin",
"e08_008_005.lin",
"e08_008_006.lin",
"e08_008_007.lin",
"e08_008_008.lin",
"e08_008_009.lin",
"e08_008_010.lin",
"e08_008_011.lin",
"e08_008_012.lin",
"e08_008_013.lin",
"e08_008_014.lin",
"e08_008_015.lin",
"e08_008_016.lin",
"e08_008_017.lin",
"e08_008_018.lin",
"e08_008_019.lin",
"e08_008_020.lin",
"e08_008_021.lin",
"e08_008_022.lin",
"e08_008_050.lin",
(
"Present Reactions",
[
"e08_008_000.lin",
]
),
(
"Special Events",
[
"e08_008_030.lin",
"e08_008_031.lin",
"e08_008_032.lin",
"e08_008_033.lin",
"e08_008_034.lin",
]
),
]
),
(
"Chiaki Nanami",
[
"e08_009_001.lin",
"e08_009_002.lin",
"e08_009_003.lin",
"e08_009_004.lin",
"e08_009_005.lin",
"e08_009_006.lin",
"e08_009_007.lin",
"e08_009_008.lin",
"e08_009_009.lin",
"e08_009_010.lin",
"e08_009_011.lin",
"e08_009_012.lin",
"e08_009_013.lin",
"e08_009_014.lin",
"e08_009_015.lin",
"e08_009_016.lin",
"e08_009_017.lin",
"e08_009_018.lin",
"e08_009_019.lin",
"e08_009_020.lin",
"e08_009_021.lin",
"e08_009_022.lin",
"e08_009_050.lin",
(
"Present Reactions",
[
"e08_009_000.lin",
]
),
(
"Special Events",
[
"e08_009_030.lin",
"e08_009_031.lin",
"e08_009_032.lin",
"e08_009_033.lin",
"e08_009_034.lin",
]
),
]
),
(
"Sonia Nevermind",
[
"e08_010_001.lin",
"e08_010_002.lin",
"e08_010_003.lin",
"e08_010_004.lin",
"e08_010_005.lin",
"e08_010_006.lin",
"e08_010_007.lin",
"e08_010_008.lin",
"e08_010_009.lin",
"e08_010_010.lin",
"e08_010_011.lin",
"e08_010_012.lin",
"e08_010_013.lin",
"e08_010_014.lin",
"e08_010_015.lin",
"e08_010_016.lin",
"e08_010_017.lin",
"e08_010_018.lin",
"e08_010_019.lin",
"e08_010_020.lin",
"e08_010_021.lin",
"e08_010_050.lin",
(
"Present Reactions",
[
"e08_010_000.lin",
]
),
(
"Special Events",
[
"e08_010_030.lin",
"e08_010_031.lin",
"e08_010_032.lin",
"e08_010_033.lin",
"e08_010_034.lin",
]
),
]
),
(
"Hiyoko Saionji",
[
"e08_011_001.lin",
"e08_011_002.lin",
"e08_011_003.lin",
"e08_011_004.lin",
"e08_011_005.lin",
"e08_011_006.lin",
"e08_011_007.lin",
"e08_011_008.lin",
"e08_011_009.lin",
"e08_011_010.lin",
"e08_011_011.lin",
"e08_011_012.lin",
"e08_011_050.lin",
(
"Present Reactions",
[
"e08_011_000.lin",
]
),
(
"Special Events",
[
"e08_011_030.lin",
"e08_011_031.lin",
"e08_011_032.lin",
"e08_011_033.lin",
"e08_011_034.lin",
]
),
]
),
(
"Mahiru Koizumi",
[
"e08_012_001.lin",
"e08_012_002.lin",
"e08_012_003.lin",
"e08_012_004.lin",
"e08_012_005.lin",
"e08_012_006.lin",
"e08_012_007.lin",
"e08_012_008.lin",
"e08_012_050.lin",
(
"Present Reactions",
[
"e08_012_000.lin",
]
),
(
"Special Events",
[
"e08_012_030.lin",
"e08_012_031.lin",
"e08_012_032.lin",
"e08_012_033.lin",
"e08_012_034.lin",
]
),
]
),
(
"Mikan Tsumiki",
[
"e08_013_001.lin",
"e08_013_002.lin",
"e08_013_003.lin",
"e08_013_004.lin",
"e08_013_005.lin",
"e08_013_006.lin",
"e08_013_007.lin",
"e08_013_008.lin",
"e08_013_009.lin",
"e08_013_010.lin",
"e08_013_011.lin",
"e08_013_012.lin",
"e08_013_050.lin",
(
"Present Reactions",
[
"e08_013_000.lin",
]
),
(
"Special Events",
[
"e08_013_030.lin",
"e08_013_031.lin",
"e08_013_032.lin",
"e08_013_033.lin",
"e08_013_034.lin",
]
),
]
),
(
"Ibuki Mioda",
[
"e08_014_001.lin",
"e08_014_002.lin",
"e08_014_003.lin",
"e08_014_004.lin",
"e08_014_005.lin",
"e08_014_006.lin",
"e08_014_007.lin",
"e08_014_008.lin",
"e08_014_009.lin",
"e08_014_010.lin",
"e08_014_011.lin",
"e08_014_012.lin",
"e08_014_050.lin",
(
"Present Reactions",
[
"e08_014_000.lin",
]
),
(
"Special Events",
[
"e08_014_030.lin",
"e08_014_031.lin",
"e08_014_032.lin",
"e08_014_033.lin",
"e08_014_034.lin",
]
),
]
),
(
"Peko Pekoyama",
[
"e08_015_001.lin",
"e08_015_002.lin",
"e08_015_003.lin",
"e08_015_004.lin",
"e08_015_005.lin",
"e08_015_006.lin",
"e08_015_007.lin",
"e08_015_008.lin",
"e08_015_050.lin",
(
"Present Reactions",
[
"e08_015_000.lin",
]
),
(
"Special Events",
[
"e08_015_030.lin",
"e08_015_031.lin",
"e08_015_032.lin",
"e08_015_033.lin",
"e08_015_034.lin",
]
),
]
),
]
),
(
"Dangan Island",
[
"e09_100_001.lin",
"e09_090_100.lin",
"e09_200_000.lin",
"e09_201_000.lin",
"e09_201_001.lin",
"e09_201_002.lin",
"e09_201_003.lin",
"e09_300_001.lin",
"e09_500_001.lin",
"e09_500_002.lin",
"e09_500_003.lin",
"e09_600_000.lin",
"e09_601_000.lin",
"e09_602_000.lin",
"e09_602_100.lin",
"e09_800_000.lin",
"e09_800_001.lin",
"e09_900_000.lin",
"e09_900_001.lin",
"e09_900_002.lin",
"e09_900_003.lin",
"e09_900_004.lin",
"e09_900_005.lin",
"e09_900_006.lin",
"e09_900_007.lin",
"e09_900_008.lin",
"e09_900_009.lin",
"e09_900_010.lin",
"e09_900_011.lin",
"e09_900_012.lin",
"e09_900_013.lin",
"e09_900_014.lin",
"e09_900_015.lin",
"e09_900_016.lin",
"e09_900_100.lin",
(
"Nagito Komaeda",
[
"e09_701_000.lin",
"e09_701_001.lin",
"e09_701_002.lin",
"e09_701_003.lin",
"e09_701_004.lin",
"e09_701_005.lin",
"e09_701_100.lin",
"kokoro_09_001.dat",
(
"Date Reactions",
[
"e09_701_200.lin",
]
),
]
),
(
"Byakuya Togami",
[
"e09_702_000.lin",
"e09_702_001.lin",
"e09_702_002.lin",
"e09_702_003.lin",
"e09_702_004.lin",
"e09_702_005.lin",
"e09_702_100.lin",
"kokoro_09_002.dat",
(
"Date Reactions",
[
"e09_702_200.lin",
]
),
]
),
(
"Gundam Tanaka",
[
"e09_703_000.lin",
"e09_703_001.lin",
"e09_703_002.lin",
"e09_703_003.lin",
"e09_703_004.lin",
"e09_703_005.lin",
"e09_703_100.lin",
"kokoro_09_003.dat",
(
"Date Reactions",
[
"e09_703_200.lin",
]
),
]
),
(
"Kazuichi Souda",
[
"e09_704_000.lin",
"e09_704_001.lin",
"e09_704_002.lin",
"e09_704_003.lin",
"e09_704_004.lin",
"e09_704_005.lin",
"e09_704_100.lin",
"kokoro_09_004.dat",
(
"Date Reactions",
[
"e09_704_200.lin",
]
),
]
),
(
"Teruteru Hanamura",
[
"e09_705_000.lin",
"e09_705_001.lin",
"e09_705_002.lin",
"e09_705_003.lin",
"e09_705_004.lin",
"e09_705_005.lin",
"e09_705_100.lin",
"kokoro_09_005.dat",
(
"Date Reactions",
[
"e09_705_200.lin",
]
),
]
),
(
"Nekomaru Nidai",
[
"e09_706_000.lin",
"e09_706_001.lin",
"e09_706_002.lin",
"e09_706_003.lin",
"e09_706_004.lin",
"e09_706_005.lin",
"e09_706_100.lin",
"kokoro_09_006.dat",
(
"Date Reactions",
[
"e09_706_200.lin",
]
),
]
),
(
"Fuyuhiko Kuzuryuu",
[
"e09_707_000.lin",
"e09_707_001.lin",
"e09_707_002.lin",
"e09_707_003.lin",
"e09_707_004.lin",
"e09_707_005.lin",
"e09_707_100.lin",
"kokoro_09_007.dat",
(
"Date Reactions",
[
"e09_707_200.lin",
]
),
]
),
(
"Akane Owari",
[
"e09_708_000.lin",
"e09_708_001.lin",
"e09_708_002.lin",
"e09_708_003.lin",
"e09_708_004.lin",
"e09_708_005.lin",
"e09_708_100.lin",
"kokoro_09_008.dat",
(
"Date Reactions",
[
"e09_708_200.lin",
]
),
]
),
(
"Chiaki Nanami",
[
"e09_709_000.lin",
"e09_709_001.lin",
"e09_709_002.lin",
"e09_709_003.lin",
"e09_709_004.lin",
"e09_709_005.lin",
"e09_709_100.lin",
"kokoro_09_009.dat",
(
"Date Reactions",
[
"e09_709_200.lin",
]
),
]
),
(
"Sonia Nevermind",
[
"e09_710_000.lin",
"e09_710_001.lin",
"e09_710_002.lin",
"e09_710_003.lin",
"e09_710_004.lin",
"e09_710_005.lin",
"e09_710_100.lin",
"kokoro_09_010.dat",
(
"Date Reactions",
[
"e09_710_200.lin",
]
),
]
),
(
"Hiyoko Saionji",
[
"e09_711_000.lin",
"e09_711_001.lin",
"e09_711_002.lin",
"e09_711_003.lin",
"e09_711_004.lin",
"e09_711_005.lin",
"e09_711_100.lin",
"kokoro_09_011.dat",
(
"Date Reactions",
[
"e09_711_200.lin",
]
),
]
),
(
"Mahiru Koizumi",
[
"e09_712_000.lin",
"e09_712_001.lin",
"e09_712_002.lin",
"e09_712_003.lin",
"e09_712_004.lin",
"e09_712_005.lin",
"e09_712_100.lin",
"kokoro_09_012.dat",
(
"Date Reactions",
[
"e09_712_200.lin",
]
),
]
),
(
"Mikan Tsumiki",
[
"e09_713_000.lin",
"e09_713_001.lin",
"e09_713_002.lin",
"e09_713_003.lin",
"e09_713_004.lin",
"e09_713_005.lin",
"e09_713_100.lin",
"kokoro_09_013.dat",
(
"Date Reactions",
[
"e09_713_200.lin",
]
),
]
),
(
"Ibuki Mioda",
[
"e09_714_000.lin",
"e09_714_001.lin",
"e09_714_002.lin",
"e09_714_003.lin",
"e09_714_004.lin",
"e09_714_005.lin",
"e09_714_100.lin",
"kokoro_09_014.dat",
(
"Date Reactions",
[
"e09_714_200.lin",
]
),
]
),
(
"Peko Pekoyama",
[
"e09_715_000.lin",
"e09_715_001.lin",
"e09_715_002.lin",
"e09_715_003.lin",
"e09_715_004.lin",
"e09_715_005.lin",
"e09_715_100.lin",
"kokoro_09_015.dat",
(
"Date Reactions",
[
"e09_715_200.lin",
]
),
]
),
(
GENERIC_TEXT,
[
"script_pak_e09_001.lin",
"script_pak_e09_003.lin",
"script_pak_e09_006.lin",
"script_pak_e09_008.lin",
"script_pak_e09_009.lin",
"script_pak_e09_010.lin",
"script_pak_e09_011.lin",
"script_pak_e09_012.lin",
]
),
]
),
(
"Omake VN",
[
"novel_000.lin",
"novel_001.lin",
"novel_002.lin",
"novel_003.lin",
"novel_004.lin",
"novel_005.lin",
"novel_006.lin",
"novel_007.lin",
"novel_008.lin",
"novel_009.lin",
"novel_010.lin",
"novel_011.lin",
"novel_012.lin",
"novel_013.lin",
"novel_014.lin",
"novel_015.lin",
"novel_016.lin",
"novel_017.lin",
"novel_018.lin",
"novel_019.lin",
"novel_020.lin",
"novel_021.lin",
"novel_022.lin",
"novel_023.lin",
"novel_024.lin",
"novel_025.lin",
"novel_026.lin",
"novel_027.lin",
"novel_028.lin",
"novel_029.lin",
"novel_030.lin",
"novel_031.lin",
"novel_032.lin",
"novel_033.lin",
"novel_034.lin",
(
"script_pak_novel",
[
"script_pak_novel_000.lin",
"script_pak_novel_001.lin",
"script_pak_novel_002.lin",
"script_pak_novel_003.lin",
"script_pak_novel_004.lin",
"script_pak_novel_005.lin",
"script_pak_novel_006.lin",
"script_pak_novel_007.lin",
"script_pak_novel_008.lin",
"script_pak_novel_009.lin",
"script_pak_novel_010.lin",
"script_pak_novel_011.lin",
"script_pak_novel_012.lin",
"script_pak_novel_013.lin",
"script_pak_novel_014.lin",
"script_pak_novel_015.lin",
"script_pak_novel_016.lin",
"script_pak_novel_017.lin",
"script_pak_novel_018.lin",
"script_pak_novel_019.lin",
"script_pak_novel_020.lin",
"script_pak_novel_021.lin",
"script_pak_novel_022.lin",
"script_pak_novel_023.lin",
"script_pak_novel_024.lin",
"script_pak_novel_025.lin",
"script_pak_novel_026.lin",
"script_pak_novel_027.lin",
"script_pak_novel_028.lin",
"script_pak_novel_029.lin",
"script_pak_novel_030.lin",
"script_pak_novel_031.lin",
"script_pak_novel_032.lin",
"script_pak_novel_033.lin",
"script_pak_novel_034.lin",
]
),
]
),
(
"Minigames",
[
(
"PTA",
[
"dr2_mtb2_s01.pak",
"dr2_mtb2_s02.pak",
"dr2_mtb2_s03.pak",
"dr2_mtb2_s04.pak",
"dr2_mtb2_s05.pak",
"dr2_mtb2_s06.pak",
"dr2_mtb2_s07.pak",
(
"Unused",
[
"dr2_mtb2_s08.pak",
"dr2_mtb2_s09.pak",
"dr2_mtb2_s10.pak",
"hs_mtb_s01.pak",
"hs_mtb_s02.pak",
"hs_mtb_s03.pak",
"hs_mtb_s04.pak",
"hs_mtb_s05.pak",
"hs_mtb_s06.pak",
"hs_mtb_s07.pak",
"hs_mtb_s08.pak",
"hs_mtb_s09.pak",
"hs_mtb_s10.pak",
"hs_mtb_s11.pak",
"hs_mtb_s21.pak",
"hs_mtb_s22.pak",
"hs_mtb_s23.pak",
"hs_mtb_s24.pak",
"hs_mtb_s25.pak",
"hs_mtb_s26.pak",
"hs_mtb_s27.pak",
"hs_mtb_s28.pak",
"hs_mtb_s29.pak",
"hs_mtb_s30.pak",
"hs_mtb_s31.pak",
"hs_mtb_s32.pak",
"hs_mtb_s33.pak",
"hs_mtb_s34.pak",
"hs_mtb_s35.pak",
"hs_mtb_s36.pak",
"hs_mtb_s37.pak",
"hs_mtb_s38.pak",
"hs_mtb_s39.pak",
"hs_mtb_s40.pak",
]
),
]
),
(
"Epiphany Anagram",
[
"anagram2_level00.dat",
"anagram2_level01.dat",
"anagram2_level02.dat",
"anagram2_level03.dat",
"anagram2_level04.dat",
"anagram2_level05.dat",
"anagram2_level06.dat",
"anagram2_level07.dat",
"anagram2_level08.dat",
"anagram2_level09.dat",
"anagram2_level10.dat",
"anagram2_level11.dat",
]
),
(
"Logical Dive",
[
"ldive_s00.pak",
"ldive_s01.pak",
"ldive_s02.pak",
"ldive_s03.pak",
"ldive_s04.pak",
"ldive_s05.pak",
"ldive_s06.pak",
"ldive_s07.pak",
"ldive_s08.pak",
"ldive_s09.pak",
"ldive_s10.pak",
"ldive_s11.pak",
]
),
]
),
(
"Menu Text",
[
"00_System.pak",
"01_Contents.pak",
"02_BgmName.pak",
"03_EventName.pak",
"04_ItemName.pak",
"05_ItemDescription.pak",
"06_KotodamaName.pak",
"07_KotodamaDesc1.pak",
"08_KotodamaDesc2.pak",
"09_KotodamaDesc3.pak",
"10_Special.pak",
"11_Report.pak",
"12_SkillName.pak",
"13_SkillDesc.pak",
"14_Rule.pak",
"15_OperateMode.pak",
"16_OperateDesc.pak",
"17_FloorName.pak",
"18_MapName.pak",
"19_HandBookMenu.pak",
"20_HandBookDesc.pak",
"21_Briefing.pak",
"22_Profile.pak",
"23_Anagram.pak",
"24_MovieName.pak",
"25_SkillDescHB.pak",
"26_Menu.pak",
"27_ArtWorkName.pak",
"28_Credit.pak",
"29_OperateModeR.pak",
"30_OperateDescR.pak",
"31_OperateSysR.pak",
"32_CharaName.pak",
"33_MapNameEnglish.pak",
"34_BgmNameEnglish.pak",
"35_Option.pak",
"36_Pet.pak",
"37_SkillExchange.pak",
"38_SurvivalMenu.pak",
"39_UsamiFlower.pak",
"40_SurvivalItem.pak",
"41_SurvivalItemDesc.pak",
"42_SurvivalMaterial.pak",
"43_SurvivalMaterialDesc.pak",
"44_EquipName.pak",
"45_EquipDesc.pak",
"46_StageTitle.pak",
"47_SurvivalTitle.pak",
"48_TwilightText.pak",
"49_Novel.pak",
"bin_help_font_l.pak",
"bin_pb_font_l.pak",
"bin_progress_font_l.pak",
"bin_special_font_l.pak",
"bin_sv_font_l.pak",
]
),
(
"Map Object Labels",
[
"MAP_000.pak",
"MAP_001.pak",
"MAP_002.pak",
"MAP_003.pak",
"MAP_004.pak",
"MAP_005.pak",
"MAP_006.pak",
"MAP_007.pak",
"MAP_008.pak",
"MAP_009.pak",
"MAP_010.pak",
"MAP_011.pak",
"MAP_019.pak",
"MAP_020.pak",
"MAP_021.pak",
"MAP_022.pak",
"MAP_023.pak",
"MAP_024.pak",
"MAP_025.pak",
"MAP_026.pak",
"MAP_028.pak",
"MAP_029.pak",
"MAP_030.pak",
"MAP_031.pak",
"MAP_040.pak",
"MAP_041.pak",
"MAP_042.pak",
"MAP_043.pak",
"MAP_044.pak",
"MAP_045.pak",
"MAP_046.pak",
"MAP_047.pak",
"MAP_048.pak",
"MAP_053.pak",
"MAP_055.pak",
"MAP_056.pak",
"MAP_059.pak",
"MAP_060.pak",
"MAP_061.pak",
"MAP_062.pak",
"MAP_063.pak",
"MAP_064.pak",
"MAP_070.pak",
"MAP_071.pak",
"MAP_072.pak",
"MAP_073.pak",
"MAP_074.pak",
"MAP_075.pak",
"MAP_076.pak",
"MAP_079.pak",
"MAP_080.pak",
"MAP_081.pak",
"MAP_082.pak",
"MAP_083.pak",
"MAP_084.pak",
"MAP_086.pak",
"MAP_087.pak",
"MAP_089.pak",
"MAP_090.pak",
"MAP_091.pak",
"MAP_093.pak",
"MAP_097.pak",
"MAP_100.pak",
"MAP_101.pak",
"MAP_102.pak",
"MAP_103.pak",
"MAP_104.pak",
"MAP_105.pak",
"MAP_106.pak",
"MAP_107.pak",
"MAP_108.pak",
"MAP_109.pak",
"MAP_110.pak",
"MAP_111.pak",
"MAP_112.pak",
"MAP_113.pak",
"MAP_114.pak",
"MAP_115.pak",
"MAP_116.pak",
"MAP_117.pak",
"MAP_118.pak",
"MAP_119.pak",
"MAP_120.pak",
"MAP_130.pak",
"MAP_131.pak",
"MAP_132.pak",
"MAP_133.pak",
"MAP_134.pak",
"MAP_135.pak",
"MAP_140.pak",
"MAP_141.pak",
"MAP_142.pak",
"MAP_143.pak",
"MAP_144.pak",
"MAP_145.pak",
"MAP_160.pak",
"MAP_161.pak",
"MAP_162.pak",
"MAP_163.pak",
"MAP_164.pak",
"MAP_165.pak",
"MAP_166.pak",
"MAP_167.pak",
"MAP_168.pak",
"MAP_170.pak",
"MAP_171.pak",
"MAP_259.pak",
"MAP_260.pak",
"MAP_261.pak",
"MAP_262.pak",
"MAP_263.pak",
"MAP_264.pak",
"MAP_265.pak",
"MAP_266.pak",
"MAP_268.pak",
"MAP_269.pak",
"MAP_270.pak",
"MAP_271.pak",
"MAP_272.pak",
"MAP_273.pak",
"MAP_274.pak",
"MAP_275.pak",
"MAP_276.pak",
]
),
(
"Misc.",
[
"event.pak",
"voice.pak",
"e08_150_000.lin",
"e08_151_000.lin",
"e08_152_000.lin",
"e09_000_001.lin",
"e09_090_001.lin",
"e09_100_096.lin",
"e09_100_250.lin",
"e09_100_251.lin",
]
),
]
### EOF ###
| gpl-3.0 |
patrickcurl/ztruck | dj/lib/python2.7/site-packages/pip/_vendor/requests/compat.py | 1039 | 1469 | # -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except (ImportError, SyntaxError):
# simplejson does not support Python 3.2, it throws a SyntaxError
# because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
| apache-2.0 |
Nick-OpusVL/odoo | addons/l10n_lu/__openerp__.py | 197 | 2804 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2011 Thamini S.à.R.L (<http://www.thamini.com>)
# Copyright (C) 2011 ADN Consultants S.à.R.L (<http://www.adn-luxembourg.com>)
# Copyright (C) 2012-today OpenERP SA (<http://openerp.com>)
# Copyright (C) 2014 ACSONE SA/NV (<http://acsone.eu>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Luxembourg - Accounting',
'version': '1.0',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Luxembourg.
======================================================================
* the Luxembourg Official Chart of Accounts (law of June 2009 + 2015 chart and Taxes),
* the Tax Code Chart for Luxembourg
* the main taxes used in Luxembourg
* default fiscal position for local, intracom, extracom
Notes:
* the 2015 chart of taxes is implemented to a large extent,
see the first sheet of tax.xls for details of coverage
* to update the chart of tax template, update tax.xls and run tax2csv.py
""",
'author': 'OpenERP SA, ADN, ACSONE SA/NV',
'website': 'https://www.odoo.com',
'depends': ['account', 'base_vat', 'base_iban'],
'data': [
# basic accounting data
'account_financial_report.xml',
'account_financial_report_abr.xml',
'account.account.type-2011.csv',
'account.account.template-2011.csv',
'account.tax.code.template-2015.csv',
'account_chart_template.xml',
'account.tax.template-2015.csv',
'account.fiscal.position.template-2011.csv',
'account.fiscal.position.tax.template-2015.csv',
# configuration wizard, views, reports...
'l10n_lu_wizard.xml',
],
'test': [],
'demo': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
adalke/rdkit | rdkit/Chem/AtomPairs/UnitTestDescriptors.py | 4 | 2805 | # $Id$
#
# Copyright (C) 2007 greg Landrum
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
from __future__ import print_function
import unittest,os,gzip
from rdkit.six.moves import cPickle #@UnresolvedImport #pylint: disable=F0401
from rdkit import Chem
from rdkit import RDConfig
from rdkit.Chem.AtomPairs import Pairs,Torsions,Utils
class TestCase(unittest.TestCase):
def setUp(self):
self.testDataPath=os.path.join(RDConfig.RDCodeDir,'Chem','AtomPairs','test_data')
inF = gzip.open(os.path.join(self.testDataPath,'mols1000.pkl.gz'),'rb')
self.mols=cPickle.load(inF, encoding='bytes')
def testPairsRegression(self):
inF = gzip.open(os.path.join(self.testDataPath,'mols1000.aps.pkl.gz'),'rb')
atomPairs = cPickle.load(inF, encoding='bytes')
for i,m in enumerate(self.mols):
ap = Pairs.GetAtomPairFingerprint(m)
#if ap!=atomPairs[i]:
# print Chem.MolToSmiles(m)
# pd=ap.GetNonzeroElements()
# rd=atomPairs[i].GetNonzeroElements()
# for k,v in pd.iteritems():
# if rd.has_key(k):
# if rd[k]!=v: print '>>>1',k,v,rd[k]
# else:
# print '>>>2',k,v
# for k,v in rd.iteritems():
# if pd.has_key(k):
# if pd[k]!=v: print '>>>3',k,v,pd[k]
# else:
# print '>>>4',k,v
self.assertTrue(ap==atomPairs[i])
self.assertTrue(ap!=atomPairs[i-1])
def testTorsionsRegression(self):
inF = gzip.open(os.path.join(self.testDataPath,'mols1000.tts.pkl.gz'),'rb')
torsions = cPickle.load(inF, encoding='bytes')
for i,m in enumerate(self.mols):
tt = Torsions.GetTopologicalTorsionFingerprintAsIntVect(m)
if tt!=torsions[i]:
print(Chem.MolToSmiles(m))
pd=tt.GetNonzeroElements()
rd=torsions[i].GetNonzeroElements()
for k,v in pd.iteritems():
if rd.has_key(k):
if rd[k]!=v: print('>>>1',k,v,rd[k])
else:
print('>>>2',k,v)
for k,v in rd.iteritems():
if pd.has_key(k):
if pd[k]!=v: print('>>>3',k,v,pd[k])
else:
print('>>>4',k,v)
self.assertTrue(tt==torsions[i])
self.assertTrue(tt!=torsions[i-1])
def testGithub334(self):
m1 = Chem.MolFromSmiles('N#C')
self.assertEqual(Utils.NumPiElectrons(m1.GetAtomWithIdx(0)),2)
self.assertEqual(Utils.NumPiElectrons(m1.GetAtomWithIdx(1)),2)
m1 = Chem.MolFromSmiles('N#[CH]')
self.assertEqual(Utils.NumPiElectrons(m1.GetAtomWithIdx(0)),2)
self.assertEqual(Utils.NumPiElectrons(m1.GetAtomWithIdx(1)),2)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
aronsky/home-assistant | homeassistant/components/alarm_control_panel/canary.py | 7 | 2978 | """
Support for Canary alarm.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/alarm_control_panel.canary/
"""
import logging
from homeassistant.components.alarm_control_panel import AlarmControlPanel
from homeassistant.components.canary import DATA_CANARY
from homeassistant.const import STATE_ALARM_DISARMED, STATE_ALARM_ARMED_AWAY, \
STATE_ALARM_ARMED_NIGHT, STATE_ALARM_ARMED_HOME
DEPENDENCIES = ['canary']
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Canary alarms."""
data = hass.data[DATA_CANARY]
devices = []
for location in data.locations:
devices.append(CanaryAlarm(data, location.location_id))
add_entities(devices, True)
class CanaryAlarm(AlarmControlPanel):
"""Representation of a Canary alarm control panel."""
def __init__(self, data, location_id):
"""Initialize a Canary security camera."""
self._data = data
self._location_id = location_id
@property
def name(self):
"""Return the name of the alarm."""
location = self._data.get_location(self._location_id)
return location.name
@property
def state(self):
"""Return the state of the device."""
from canary.api import LOCATION_MODE_AWAY, LOCATION_MODE_HOME, \
LOCATION_MODE_NIGHT
location = self._data.get_location(self._location_id)
if location.is_private:
return STATE_ALARM_DISARMED
mode = location.mode
if mode.name == LOCATION_MODE_AWAY:
return STATE_ALARM_ARMED_AWAY
if mode.name == LOCATION_MODE_HOME:
return STATE_ALARM_ARMED_HOME
if mode.name == LOCATION_MODE_NIGHT:
return STATE_ALARM_ARMED_NIGHT
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
location = self._data.get_location(self._location_id)
return {
'private': location.is_private
}
def alarm_disarm(self, code=None):
"""Send disarm command."""
location = self._data.get_location(self._location_id)
self._data.set_location_mode(self._location_id, location.mode.name,
True)
def alarm_arm_home(self, code=None):
"""Send arm home command."""
from canary.api import LOCATION_MODE_HOME
self._data.set_location_mode(self._location_id, LOCATION_MODE_HOME)
def alarm_arm_away(self, code=None):
"""Send arm away command."""
from canary.api import LOCATION_MODE_AWAY
self._data.set_location_mode(self._location_id, LOCATION_MODE_AWAY)
def alarm_arm_night(self, code=None):
"""Send arm night command."""
from canary.api import LOCATION_MODE_NIGHT
self._data.set_location_mode(self._location_id, LOCATION_MODE_NIGHT)
| apache-2.0 |
alexbruy/QGIS | python/ext-libs/jinja2/utils.py | 598 | 16165 | # -*- coding: utf-8 -*-
"""
jinja2.utils
~~~~~~~~~~~~
Utility functions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import errno
from collections import deque
from jinja2._compat import text_type, string_types, implements_iterator, \
allocate_lock, url_quote
_word_split_re = re.compile(r'(\s+)')
_punctuation_re = re.compile(
'^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % (
'|'.join(map(re.escape, ('(', '<', '<'))),
'|'.join(map(re.escape, ('.', ',', ')', '>', '\n', '>')))
)
)
_simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$')
_striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
_entity_re = re.compile(r'&([^;]+);')
_letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
_digits = '0123456789'
# special singleton representing missing values for the runtime
missing = type('MissingType', (), {'__repr__': lambda x: 'missing'})()
# internal code
internal_code = set()
concat = u''.join
def contextfunction(f):
"""This decorator can be used to mark a function or method context callable.
A context callable is passed the active :class:`Context` as first argument when
called from the template. This is useful if a function wants to get access
to the context or functions provided on the context object. For example
a function that returns a sorted list of template variables the current
template exports could look like this::
@contextfunction
def get_exported_names(context):
return sorted(context.exported_vars)
"""
f.contextfunction = True
return f
def evalcontextfunction(f):
"""This decorator can be used to mark a function or method as an eval
context callable. This is similar to the :func:`contextfunction`
but instead of passing the context, an evaluation context object is
passed. For more information about the eval context, see
:ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfunction = True
return f
def environmentfunction(f):
"""This decorator can be used to mark a function or method as environment
callable. This decorator works exactly like the :func:`contextfunction`
decorator just that the first argument is the active :class:`Environment`
and not context.
"""
f.environmentfunction = True
return f
def internalcode(f):
"""Marks the function as internally used"""
internal_code.add(f.__code__)
return f
def is_undefined(obj):
"""Check if the object passed is undefined. This does nothing more than
performing an instance check against :class:`Undefined` but looks nicer.
This can be used for custom filters or tests that want to react to
undefined variables. For example a custom default filter can look like
this::
def default(var, default=''):
if is_undefined(var):
return default
return var
"""
from jinja2.runtime import Undefined
return isinstance(obj, Undefined)
def consume(iterable):
"""Consumes an iterable without doing anything with it."""
for event in iterable:
pass
def clear_caches():
"""Jinja2 keeps internal caches for environments and lexers. These are
used so that Jinja2 doesn't have to recreate environments and lexers all
the time. Normally you don't have to care about that but if you are
messuring memory consumption you may want to clean the caches.
"""
from jinja2.environment import _spontaneous_environments
from jinja2.lexer import _lexer_cache
_spontaneous_environments.clear()
_lexer_cache.clear()
def import_string(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If the `silent` is True the return value will be `None` if the import
fails.
:return: imported object
"""
try:
if ':' in import_name:
module, obj = import_name.split(':', 1)
elif '.' in import_name:
items = import_name.split('.')
module = '.'.join(items[:-1])
obj = items[-1]
else:
return __import__(import_name)
return getattr(__import__(module, None, None, [obj]), obj)
except (ImportError, AttributeError):
if not silent:
raise
def open_if_exists(filename, mode='rb'):
"""Returns a file descriptor for the filename if that file exists,
otherwise `None`.
"""
try:
return open(filename, mode)
except IOError as e:
if e.errno not in (errno.ENOENT, errno.EISDIR):
raise
def object_type_repr(obj):
"""Returns the name of the object's type. For some recognized
singletons the name of the object is returned instead. (For
example for `None` and `Ellipsis`).
"""
if obj is None:
return 'None'
elif obj is Ellipsis:
return 'Ellipsis'
# __builtin__ in 2.x, builtins in 3.x
if obj.__class__.__module__ in ('__builtin__', 'builtins'):
name = obj.__class__.__name__
else:
name = obj.__class__.__module__ + '.' + obj.__class__.__name__
return '%s object' % name
def pformat(obj, verbose=False):
"""Prettyprint an object. Either use the `pretty` library or the
builtin `pprint`.
"""
try:
from pretty import pretty
return pretty(obj, verbose=verbose)
except ImportError:
from pprint import pformat
return pformat(obj)
def urlize(text, trim_url_limit=None, nofollow=False):
"""Converts any URLs in text into clickable links. Works on http://,
https:// and www. links. Links can have trailing punctuation (periods,
commas, close-parens) and leading punctuation (opening parens) and
it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text will be limited
to trim_url_limit characters.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None \
and (x[:limit] + (len(x) >=limit and '...'
or '')) or x
words = _word_split_re.split(text_type(escape(text)))
nofollow_attr = nofollow and ' rel="nofollow"' or ''
for i, word in enumerate(words):
match = _punctuation_re.match(word)
if match:
lead, middle, trail = match.groups()
if middle.startswith('www.') or (
'@' not in middle and
not middle.startswith('http://') and
not middle.startswith('https://') and
len(middle) > 0 and
middle[0] in _letters + _digits and (
middle.endswith('.org') or
middle.endswith('.net') or
middle.endswith('.com')
)):
middle = '<a href="http://%s"%s>%s</a>' % (middle,
nofollow_attr, trim_url(middle))
if middle.startswith('http://') or \
middle.startswith('https://'):
middle = '<a href="%s"%s>%s</a>' % (middle,
nofollow_attr, trim_url(middle))
if '@' in middle and not middle.startswith('www.') and \
not ':' in middle and _simple_email_re.match(middle):
middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
if lead + middle + trail != word:
words[i] = lead + middle + trail
return u''.join(words)
def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
"""Generate some lorem impsum for the template."""
from jinja2.constants import LOREM_IPSUM_WORDS
from random import choice, randrange
words = LOREM_IPSUM_WORDS.split()
result = []
for _ in range(n):
next_capitalized = True
last_comma = last_fullstop = 0
word = None
last = None
p = []
# each paragraph contains out of 20 to 100 words.
for idx, _ in enumerate(range(randrange(min, max))):
while True:
word = choice(words)
if word != last:
last = word
break
if next_capitalized:
word = word.capitalize()
next_capitalized = False
# add commas
if idx - randrange(3, 8) > last_comma:
last_comma = idx
last_fullstop += 2
word += ','
# add end of sentences
if idx - randrange(10, 20) > last_fullstop:
last_comma = last_fullstop = idx
word += '.'
next_capitalized = True
p.append(word)
# ensure that the paragraph ends with a dot.
p = u' '.join(p)
if p.endswith(','):
p = p[:-1] + '.'
elif not p.endswith('.'):
p += '.'
result.append(p)
if not html:
return u'\n\n'.join(result)
return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result))
def unicode_urlencode(obj, charset='utf-8'):
"""URL escapes a single bytestring or unicode string with the
given charset if applicable to URL safe quoting under all rules
that need to be considered under all supported Python versions.
If non strings are provided they are converted to their unicode
representation first.
"""
if not isinstance(obj, string_types):
obj = text_type(obj)
if isinstance(obj, text_type):
obj = obj.encode(charset)
return text_type(url_quote(obj))
class LRUCache(object):
"""A simple LRU Cache implementation."""
# this is fast for small capacities (something below 1000) but doesn't
# scale. But as long as it's only used as storage for templates this
# won't do any harm.
def __init__(self, capacity):
self.capacity = capacity
self._mapping = {}
self._queue = deque()
self._postinit()
def _postinit(self):
# alias all queue methods for faster lookup
self._popleft = self._queue.popleft
self._pop = self._queue.pop
self._remove = self._queue.remove
self._wlock = allocate_lock()
self._append = self._queue.append
def __getstate__(self):
return {
'capacity': self.capacity,
'_mapping': self._mapping,
'_queue': self._queue
}
def __setstate__(self, d):
self.__dict__.update(d)
self._postinit()
def __getnewargs__(self):
return (self.capacity,)
def copy(self):
"""Return a shallow copy of the instance."""
rv = self.__class__(self.capacity)
rv._mapping.update(self._mapping)
rv._queue = deque(self._queue)
return rv
def get(self, key, default=None):
"""Return an item from the cache dict or `default`"""
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default=None):
"""Set `default` if the key is not in the cache otherwise
leave unchanged. Return the value of this key.
"""
self._wlock.acquire()
try:
try:
return self[key]
except KeyError:
self[key] = default
return default
finally:
self._wlock.release()
def clear(self):
"""Clear the cache."""
self._wlock.acquire()
try:
self._mapping.clear()
self._queue.clear()
finally:
self._wlock.release()
def __contains__(self, key):
"""Check if a key exists in this cache."""
return key in self._mapping
def __len__(self):
"""Return the current size of the cache."""
return len(self._mapping)
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self._mapping
)
def __getitem__(self, key):
"""Get an item from the cache. Moves the item up so that it has the
highest priority then.
Raise a `KeyError` if it does not exist.
"""
self._wlock.acquire()
try:
rv = self._mapping[key]
if self._queue[-1] != key:
try:
self._remove(key)
except ValueError:
# if something removed the key from the container
# when we read, ignore the ValueError that we would
# get otherwise.
pass
self._append(key)
return rv
finally:
self._wlock.release()
def __setitem__(self, key, value):
"""Sets the value for an item. Moves the item up so that it
has the highest priority then.
"""
self._wlock.acquire()
try:
if key in self._mapping:
self._remove(key)
elif len(self._mapping) == self.capacity:
del self._mapping[self._popleft()]
self._append(key)
self._mapping[key] = value
finally:
self._wlock.release()
def __delitem__(self, key):
"""Remove an item from the cache dict.
Raise a `KeyError` if it does not exist.
"""
self._wlock.acquire()
try:
del self._mapping[key]
try:
self._remove(key)
except ValueError:
# __getitem__ is not locked, it might happen
pass
finally:
self._wlock.release()
def items(self):
"""Return a list of items."""
result = [(key, self._mapping[key]) for key in list(self._queue)]
result.reverse()
return result
def iteritems(self):
"""Iterate over all items."""
return iter(self.items())
def values(self):
"""Return a list of all values."""
return [x[1] for x in self.items()]
def itervalue(self):
"""Iterate over all values."""
return iter(self.values())
def keys(self):
"""Return a list of all keys ordered by most recent usage."""
return list(self)
def iterkeys(self):
"""Iterate over all keys in the cache dict, ordered by
the most recent usage.
"""
return reversed(tuple(self._queue))
__iter__ = iterkeys
def __reversed__(self):
"""Iterate over the values in the cache dict, oldest items
coming first.
"""
return iter(tuple(self._queue))
__copy__ = copy
# register the LRU cache as mutable mapping if possible
try:
from collections import MutableMapping
MutableMapping.register(LRUCache)
except ImportError:
pass
@implements_iterator
class Cycler(object):
"""A cycle helper for templates."""
def __init__(self, *items):
if not items:
raise RuntimeError('at least one item has to be provided')
self.items = items
self.reset()
def reset(self):
"""Resets the cycle."""
self.pos = 0
@property
def current(self):
"""Returns the current item."""
return self.items[self.pos]
def __next__(self):
"""Goes one item ahead and returns it."""
rv = self.current
self.pos = (self.pos + 1) % len(self.items)
return rv
class Joiner(object):
"""A joining helper for templates."""
def __init__(self, sep=u', '):
self.sep = sep
self.used = False
def __call__(self):
if not self.used:
self.used = True
return u''
return self.sep
# Imported here because that's where it was in the past
from markupsafe import Markup, escape, soft_unicode
| gpl-2.0 |
louyihua/edx-platform | common/lib/xmodule/xmodule/modulestore/xml.py | 17 | 42843 | import hashlib
import itertools
import json
import logging
import os
import re
import sys
import glob
from collections import defaultdict
from cStringIO import StringIO
from fs.osfs import OSFS
from importlib import import_module
from lxml import etree
from path import Path as path
from contextlib import contextmanager
from lazy import lazy
from xmodule.error_module import ErrorDescriptor
from xmodule.errortracker import make_error_tracker, exc_info_to_str
from xmodule.mako_module import MakoDescriptorSystem
from xmodule.x_module import (
XMLParsingSystem, policy_key,
OpaqueKeyReader, AsideKeyGenerator, DEPRECATION_VSCOMPAT_EVENT
)
from xmodule.modulestore.xml_exporter import DEFAULT_CONTENT_FIELDS
from xmodule.modulestore import ModuleStoreEnum, ModuleStoreReadBase, LIBRARY_ROOT, COURSE_ROOT
from xmodule.tabs import CourseTabList
from opaque_keys.edx.locations import SlashSeparatedCourseKey, Location
from opaque_keys.edx.locator import CourseLocator, LibraryLocator, BlockUsageLocator
from xblock.field_data import DictFieldData
from xblock.runtime import DictKeyValueStore
from xblock.fields import ScopeIds
import dogstats_wrapper as dog_stats_api
from .exceptions import ItemNotFoundError
from .inheritance import compute_inherited_metadata, inheriting_field_data, InheritanceKeyValueStore
edx_xml_parser = etree.XMLParser(dtd_validation=False, load_dtd=False,
remove_comments=True, remove_blank_text=True)
etree.set_default_parser(edx_xml_parser)
log = logging.getLogger(__name__)
# VS[compat]
# TODO (cpennington): Remove this once all fall 2012 courses have been imported
# into the cms from xml
def clean_out_mako_templating(xml_string):
orig_xml = xml_string
xml_string = xml_string.replace('%include', 'include')
xml_string = re.sub(r"(?m)^\s*%.*$", '', xml_string)
if orig_xml != xml_string:
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=["location:xml_clean_out_mako_templating"]
)
return xml_string
class ImportSystem(XMLParsingSystem, MakoDescriptorSystem):
def __init__(self, xmlstore, course_id, course_dir,
error_tracker,
load_error_modules=True, target_course_id=None, **kwargs):
"""
A class that handles loading from xml. Does some munging to ensure that
all elements have unique slugs.
xmlstore: the XMLModuleStore to store the loaded modules in
"""
self.unnamed = defaultdict(int) # category -> num of new url_names for that category
self.used_names = defaultdict(set) # category -> set of used url_names
# Adding the course_id as passed in for later reference rather than
# having to recombine the org/course/url_name
self.course_id = course_id
self.load_error_modules = load_error_modules
self.modulestore = xmlstore
def process_xml(xml):
"""Takes an xml string, and returns a XBlock created from
that xml.
"""
def make_name_unique(xml_data):
"""
Make sure that the url_name of xml_data is unique. If a previously loaded
unnamed descriptor stole this element's url_name, create a new one.
Removes 'slug' attribute if present, and adds or overwrites the 'url_name' attribute.
"""
# VS[compat]. Take this out once course conversion is done (perhaps leave the uniqueness check)
# tags that really need unique names--they store (or should store) state.
need_uniq_names = ('problem', 'sequential', 'video', 'course', 'chapter',
'videosequence', 'poll_question', 'vertical')
attr = xml_data.attrib
tag = xml_data.tag
id = lambda x: x
# Things to try to get a name, in order (key, cleaning function, remove key after reading?)
lookups = [('url_name', id, False),
('slug', id, True),
('name', Location.clean, False),
('display_name', Location.clean, False)]
url_name = None
for key, clean, remove in lookups:
if key in attr:
url_name = clean(attr[key])
if remove:
del attr[key]
break
def looks_like_fallback(url_name):
"""Does this look like something that came from fallback_name()?"""
return (url_name is not None
and url_name.startswith(tag)
and re.search('[0-9a-fA-F]{12}$', url_name))
def fallback_name(orig_name=None):
"""Return the fallback name for this module. This is a function instead of a variable
because we want it to be lazy."""
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:import_system_fallback_name",
u"name:{}".format(orig_name),
)
)
if looks_like_fallback(orig_name):
# We're about to re-hash, in case something changed, so get rid of the tag_ and hash
orig_name = orig_name[len(tag) + 1:-12]
# append the hash of the content--the first 12 bytes should be plenty.
orig_name = "_" + orig_name if orig_name not in (None, "") else ""
xml_bytes = xml.encode('utf8')
return tag + orig_name + "_" + hashlib.sha1(xml_bytes).hexdigest()[:12]
# Fallback if there was nothing we could use:
if url_name is None or url_name == "":
url_name = fallback_name()
# Don't log a warning--we don't need this in the log. Do
# put it in the error tracker--content folks need to see it.
if tag in need_uniq_names:
error_tracker(u"PROBLEM: no name of any kind specified for {tag}. Student "
u"state will not be properly tracked for this module. Problem xml:"
u" '{xml}...'".format(tag=tag, xml=xml[:100]))
else:
# TODO (vshnayder): We may want to enable this once course repos are cleaned up.
# (or we may want to give up on the requirement for non-state-relevant issues...)
# error_tracker("WARNING: no name specified for module. xml='{0}...'".format(xml[:100]))
pass
# Make sure everything is unique
if url_name in self.used_names[tag]:
# Always complain about modules that store state. If it
# doesn't store state, don't complain about things that are
# hashed.
if tag in need_uniq_names:
msg = (u"Non-unique url_name in xml. This may break state tracking for content."
u" url_name={0}. Content={1}".format(url_name, xml[:100]))
error_tracker("PROBLEM: " + msg)
log.warning(msg)
# Just set name to fallback_name--if there are multiple things with the same fallback name,
# they are actually identical, so it's fragile, but not immediately broken.
# TODO (vshnayder): if the tag is a pointer tag, this will
# break the content because we won't have the right link.
# That's also a legitimate attempt to reuse the same content
# from multiple places. Once we actually allow that, we'll
# need to update this to complain about non-unique names for
# definitions, but allow multiple uses.
url_name = fallback_name(url_name)
self.used_names[tag].add(url_name)
xml_data.set('url_name', url_name)
try:
# VS[compat]
# TODO (cpennington): Remove this once all fall 2012 courses
# have been imported into the cms from xml
xml = clean_out_mako_templating(xml)
xml_data = etree.fromstring(xml)
make_name_unique(xml_data)
descriptor = self.xblock_from_node(
xml_data,
None, # parent_id
id_manager,
)
except Exception as err: # pylint: disable=broad-except
if not self.load_error_modules:
raise
# Didn't load properly. Fall back on loading as an error
# descriptor. This should never error due to formatting.
msg = "Error loading from xml. %s"
log.warning(
msg,
unicode(err)[:200],
# Normally, we don't want lots of exception traces in our logs from common
# content problems. But if you're debugging the xml loading code itself,
# uncomment the next line.
# exc_info=True
)
msg = msg % (unicode(err)[:200])
self.error_tracker(msg)
err_msg = msg + "\n" + exc_info_to_str(sys.exc_info())
descriptor = ErrorDescriptor.from_xml(
xml,
self,
id_manager,
err_msg
)
descriptor.data_dir = course_dir
if descriptor.scope_ids.usage_id in xmlstore.modules[course_id]:
# keep the parent pointer if any but allow everything else to overwrite
other_copy = xmlstore.modules[course_id][descriptor.scope_ids.usage_id]
descriptor.parent = other_copy.parent
if descriptor != other_copy:
log.warning("%s has more than one definition", descriptor.scope_ids.usage_id)
xmlstore.modules[course_id][descriptor.scope_ids.usage_id] = descriptor
if descriptor.has_children:
for child in descriptor.get_children():
# parent is alphabetically least
if child.parent is None or child.parent > descriptor.scope_ids.usage_id:
child.parent = descriptor.location
child.save()
# After setting up the descriptor, save any changes that we have
# made to attributes on the descriptor to the underlying KeyValueStore.
descriptor.save()
return descriptor
render_template = lambda template, context: u''
# TODO (vshnayder): we are somewhat architecturally confused in the loading code:
# load_item should actually be get_instance, because it expects the course-specific
# policy to be loaded. For now, just add the course_id here...
def load_item(usage_key, for_parent=None):
"""Return the XBlock for the specified location"""
return xmlstore.get_item(usage_key, for_parent=for_parent)
resources_fs = OSFS(xmlstore.data_dir / course_dir)
id_manager = CourseImportLocationManager(course_id, target_course_id)
super(ImportSystem, self).__init__(
load_item=load_item,
resources_fs=resources_fs,
render_template=render_template,
error_tracker=error_tracker,
process_xml=process_xml,
id_generator=id_manager,
id_reader=id_manager,
**kwargs
)
# id_generator is ignored, because each ImportSystem is already local to
# a course, and has it's own id_generator already in place
def add_node_as_child(self, block, node, id_generator):
child_block = self.process_xml(etree.tostring(node))
block.children.append(child_block.scope_ids.usage_id)
class CourseLocationManager(OpaqueKeyReader, AsideKeyGenerator):
"""
IdGenerator for Location-based definition ids and usage ids
based within a course
"""
def __init__(self, course_id):
super(CourseLocationManager, self).__init__()
self.course_id = course_id
self.autogen_ids = itertools.count(0)
def create_usage(self, def_id):
return def_id
def create_definition(self, block_type, slug=None):
assert block_type is not None
if slug is None:
slug = 'autogen_{}_{}'.format(block_type, self.autogen_ids.next())
return self.course_id.make_usage_key(block_type, slug)
def get_definition_id(self, usage_id):
"""Retrieve the definition that a usage is derived from.
Args:
usage_id: The id of the usage to query
Returns:
The `definition_id` the usage is derived from
"""
return usage_id
class CourseImportLocationManager(CourseLocationManager):
"""
IdGenerator for Location-based definition ids and usage ids
based within a course, for use during course import.
In addition to the functionality provided by CourseLocationManager,
this class also contains the target_course_id for the course import
process.
Note: This is a temporary solution to workaround the fact that
the from_xml method is passed the source course_id instead of the
target course_id in the import process. For a more ideal solution,
see https://openedx.atlassian.net/browse/MA-417 as a pending TODO.
"""
def __init__(self, course_id, target_course_id):
super(CourseImportLocationManager, self).__init__(course_id=course_id)
self.target_course_id = target_course_id
class XMLModuleStore(ModuleStoreReadBase):
"""
An XML backed ModuleStore
"""
parent_xml = COURSE_ROOT
def __init__(
self, data_dir, default_class=None, source_dirs=None, course_ids=None,
load_error_modules=True, i18n_service=None, fs_service=None, user_service=None,
signal_handler=None, target_course_id=None, **kwargs # pylint: disable=unused-argument
):
"""
Initialize an XMLModuleStore from data_dir
Args:
data_dir (str): path to data directory containing the course directories
default_class (str): dot-separated string defining the default descriptor
class to use if none is specified in entry_points
source_dirs or course_ids (list of str): If specified, the list of source_dirs or course_ids to load.
Otherwise, load all courses. Note, providing both
"""
super(XMLModuleStore, self).__init__(**kwargs)
self.data_dir = path(data_dir)
self.modules = defaultdict(dict) # course_id -> dict(location -> XBlock)
self.courses = {} # course_dir -> XBlock for the course
self.errored_courses = {} # course_dir -> errorlog, for dirs that failed to load
if course_ids is not None:
course_ids = [SlashSeparatedCourseKey.from_deprecated_string(course_id) for course_id in course_ids]
self.load_error_modules = load_error_modules
if default_class is None:
self.default_class = None
else:
module_path, _, class_name = default_class.rpartition('.')
class_ = getattr(import_module(module_path), class_name)
self.default_class = class_
# All field data will be stored in an inheriting field data.
self.field_data = inheriting_field_data(kvs=DictKeyValueStore())
self.i18n_service = i18n_service
self.fs_service = fs_service
self.user_service = user_service
# If we are specifically asked for missing courses, that should
# be an error. If we are asked for "all" courses, find the ones
# that have a course.xml. We sort the dirs in alpha order so we always
# read things in the same order (OS differences in load order have
# bitten us in the past.)
if source_dirs is None:
source_dirs = sorted([d for d in os.listdir(self.data_dir) if
os.path.exists(self.data_dir / d / self.parent_xml)])
for course_dir in source_dirs:
self.try_load_course(course_dir, course_ids, target_course_id)
def try_load_course(self, course_dir, course_ids=None, target_course_id=None):
'''
Load a course, keeping track of errors as we go along. If course_ids is not None,
then reject the course unless its id is in course_ids.
'''
# Special-case code here, since we don't have a location for the
# course before it loads.
# So, make a tracker to track load-time errors, then put in the right
# place after the course loads and we have its location
errorlog = make_error_tracker()
course_descriptor = None
try:
course_descriptor = self.load_course(course_dir, course_ids, errorlog.tracker, target_course_id)
except Exception as exc: # pylint: disable=broad-except
msg = "ERROR: Failed to load courselike '{0}': {1}".format(
course_dir.encode("utf-8"), unicode(exc)
)
log.exception(msg)
errorlog.tracker(msg)
self.errored_courses[course_dir] = errorlog
if course_descriptor is None:
pass
elif isinstance(course_descriptor, ErrorDescriptor):
# Didn't load course. Instead, save the errors elsewhere.
self.errored_courses[course_dir] = errorlog
else:
self.courses[course_dir] = course_descriptor
course_descriptor.parent = None
course_id = self.id_from_descriptor(course_descriptor)
self._course_errors[course_id] = errorlog
def __unicode__(self):
'''
String representation - for debugging
'''
return '<%s data_dir=%r, %d courselikes, %d modules>' % (
self.__class__.__name__, self.data_dir, len(self.courses), len(self.modules)
)
@staticmethod
def id_from_descriptor(descriptor):
"""
Grab the course ID from the descriptor
"""
return descriptor.id
def load_policy(self, policy_path, tracker):
"""
Attempt to read a course policy from policy_path. If the file
exists, but is invalid, log an error and return {}.
If the policy loads correctly, returns the deserialized version.
"""
if not os.path.exists(policy_path):
return {}
try:
with open(policy_path) as f:
return json.load(f)
except (IOError, ValueError) as err:
msg = "ERROR: loading courselike policy from {0}".format(policy_path)
tracker(msg)
log.warning(msg + " " + str(err))
return {}
def load_course(self, course_dir, course_ids, tracker, target_course_id=None):
"""
Load a course into this module store
course_path: Course directory name
returns a CourseDescriptor for the course
"""
log.debug('========> Starting courselike import from %s', course_dir)
with open(self.data_dir / course_dir / self.parent_xml) as course_file:
# VS[compat]
# TODO (cpennington): Remove this once all fall 2012 courses have
# been imported into the cms from xml
course_file = StringIO(clean_out_mako_templating(course_file.read()))
course_data = etree.parse(course_file, parser=edx_xml_parser).getroot()
org = course_data.get('org')
if org is None:
msg = ("No 'org' attribute set for courselike in {dir}. "
"Using default 'edx'".format(dir=course_dir))
log.warning(msg)
tracker(msg)
org = 'edx'
# Parent XML should be something like 'library.xml' or 'course.xml'
courselike_label = self.parent_xml.split('.')[0]
course = course_data.get(courselike_label)
if course is None:
msg = (
"No '{courselike_label}' attribute set for course in {dir}."
" Using default '{default}'".format(
courselike_label=courselike_label,
dir=course_dir,
default=course_dir
)
)
log.warning(msg)
tracker(msg)
course = course_dir
url_name = course_data.get('url_name', course_data.get('slug'))
if url_name:
policy_dir = self.data_dir / course_dir / 'policies' / url_name
policy_path = policy_dir / 'policy.json'
policy = self.load_policy(policy_path, tracker)
# VS[compat]: remove once courses use the policy dirs.
if policy == {}:
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:xml_load_course_policy_dir",
u"course:{}".format(course),
)
)
old_policy_path = self.data_dir / course_dir / 'policies' / '{0}.json'.format(url_name)
policy = self.load_policy(old_policy_path, tracker)
else:
policy = {}
# VS[compat] : 'name' is deprecated, but support it for now...
if course_data.get('name'):
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:xml_load_course_course_data_name",
u"course:{}".format(course_data.get('course')),
u"org:{}".format(course_data.get('org')),
u"name:{}".format(course_data.get('name')),
)
)
url_name = Location.clean(course_data.get('name'))
tracker("'name' is deprecated for module xml. Please use "
"display_name and url_name.")
else:
url_name = None
course_id = self.get_id(org, course, url_name)
if course_ids is not None and course_id not in course_ids:
return None
def get_policy(usage_id):
"""
Return the policy dictionary to be applied to the specified XBlock usage
"""
return policy.get(policy_key(usage_id), {})
services = {}
if self.i18n_service:
services['i18n'] = self.i18n_service
if self.fs_service:
services['fs'] = self.fs_service
if self.user_service:
services['user'] = self.user_service
system = ImportSystem(
xmlstore=self,
course_id=course_id,
course_dir=course_dir,
error_tracker=tracker,
load_error_modules=self.load_error_modules,
get_policy=get_policy,
mixins=self.xblock_mixins,
default_class=self.default_class,
select=self.xblock_select,
field_data=self.field_data,
services=services,
target_course_id=target_course_id,
)
course_descriptor = system.process_xml(etree.tostring(course_data, encoding='unicode'))
# If we fail to load the course, then skip the rest of the loading steps
if isinstance(course_descriptor, ErrorDescriptor):
return course_descriptor
self.content_importers(system, course_descriptor, course_dir, url_name)
log.debug('========> Done with courselike import from %s', course_dir)
return course_descriptor
def content_importers(self, system, course_descriptor, course_dir, url_name):
"""
Load all extra non-course content, and calculate metadata inheritance.
"""
# NOTE: The descriptors end up loading somewhat bottom up, which
# breaks metadata inheritance via get_children(). Instead
# (actually, in addition to, for now), we do a final inheritance pass
# after we have the course descriptor.
compute_inherited_metadata(course_descriptor)
# now import all pieces of course_info which is expected to be stored
# in <content_dir>/info or <content_dir>/info/<url_name>
self.load_extra_content(
system, course_descriptor, 'course_info',
self.data_dir / course_dir / 'info',
course_dir, url_name
)
# now import all static tabs which are expected to be stored in
# in <content_dir>/tabs or <content_dir>/tabs/<url_name>
self.load_extra_content(
system, course_descriptor, 'static_tab',
self.data_dir / course_dir / 'tabs',
course_dir, url_name
)
self.load_extra_content(
system, course_descriptor, 'custom_tag_template',
self.data_dir / course_dir / 'custom_tags',
course_dir, url_name
)
self.load_extra_content(
system, course_descriptor, 'about',
self.data_dir / course_dir / 'about',
course_dir, url_name
)
@staticmethod
def get_id(org, course, url_name):
"""
Validate and return an ID for a course if given org, course, and url_name.
"""
if not url_name:
raise ValueError("Can't load a course without a 'url_name' "
"(or 'name') set. Set url_name.")
# Have to use SlashSeparatedCourseKey here because it makes sure the same format is
# always used, preventing duplicate keys.
return SlashSeparatedCourseKey(org, course, url_name)
def load_extra_content(self, system, course_descriptor, category, base_dir, course_dir, url_name):
self._load_extra_content(system, course_descriptor, category, base_dir, course_dir)
# then look in a override folder based on the course run
if os.path.isdir(base_dir / url_name):
self._load_extra_content(system, course_descriptor, category, base_dir / url_name, course_dir)
def _import_field_content(self, course_descriptor, category, file_path):
"""
Import field data content for field other than 'data' or 'metadata' form json file and
return field data content as dictionary
"""
slug, location, data_content = None, None, None
try:
# try to read json file
# file_path format: {dirname}.{field_name}.json
dirname, field, file_suffix = file_path.split('/')[-1].split('.')
if file_suffix == 'json' and field not in DEFAULT_CONTENT_FIELDS:
slug = os.path.splitext(os.path.basename(dirname))[0]
location = course_descriptor.scope_ids.usage_id.replace(category=category, name=slug)
with open(file_path) as field_content_file:
field_data = json.load(field_content_file)
data_content = {field: field_data}
except (IOError, ValueError):
# ignore this exception
# only new exported courses which use content fields other than 'metadata' and 'data'
# will have this file '{dirname}.{field_name}.json'
data_content = None
return slug, location, data_content
def _load_extra_content(self, system, course_descriptor, category, content_path, course_dir):
"""
Import fields data content from files
"""
for filepath in glob.glob(content_path / '*'):
if not os.path.isfile(filepath):
continue
if filepath.endswith('~'): # skip *~ files
continue
with open(filepath) as f:
try:
if filepath.find('.json') != -1:
# json file with json data content
slug, loc, data_content = self._import_field_content(course_descriptor, category, filepath)
if data_content is None:
continue
else:
try:
# get and update data field in xblock runtime
module = system.load_item(loc)
for key, value in data_content.iteritems():
setattr(module, key, value)
module.save()
except ItemNotFoundError:
module = None
data_content['location'] = loc
data_content['category'] = category
else:
slug = os.path.splitext(os.path.basename(filepath))[0]
loc = course_descriptor.scope_ids.usage_id.replace(category=category, name=slug)
# html file with html data content
html = f.read().decode('utf-8')
try:
module = system.load_item(loc)
module.data = html
module.save()
except ItemNotFoundError:
module = None
data_content = {'data': html, 'location': loc, 'category': category}
if module is None:
module = system.construct_xblock(
category,
# We're loading a descriptor, so student_id is meaningless
# We also don't have separate notions of definition and usage ids yet,
# so we use the location for both
ScopeIds(None, category, loc, loc),
DictFieldData(data_content),
)
# VS[compat]:
# Hack because we need to pull in the 'display_name' for static tabs (because we need to edit them)
# from the course policy
if category == "static_tab":
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:xml_load_extra_content_static_tab",
u"course_dir:{}".format(course_dir),
)
)
tab = CourseTabList.get_tab_by_slug(tab_list=course_descriptor.tabs, url_slug=slug)
if tab:
module.display_name = tab.name
module.data_dir = course_dir
module.save()
self.modules[course_descriptor.id][module.scope_ids.usage_id] = module
except Exception as exc: # pylint: disable=broad-except
logging.exception("Failed to load %s. Skipping... \
Exception: %s", filepath, unicode(exc))
system.error_tracker("ERROR: " + unicode(exc))
def has_item(self, usage_key):
"""
Returns True if location exists in this ModuleStore.
"""
return usage_key in self.modules[usage_key.course_key]
def get_item(self, usage_key, depth=0, **kwargs):
"""
Returns an XBlock instance for the item for this UsageKey.
If any segment of the location is None except revision, raises
xmodule.modulestore.exceptions.InsufficientSpecificationError
If no object is found at that location, raises
xmodule.modulestore.exceptions.ItemNotFoundError
usage_key: a UsageKey that matches the module we are looking for.
"""
try:
return self.modules[usage_key.course_key][usage_key]
except KeyError:
raise ItemNotFoundError(usage_key)
def get_items(self, course_id, settings=None, content=None, revision=None, qualifiers=None, **kwargs):
"""
Returns:
list of XModuleDescriptor instances for the matching items within the course with
the given course_id
NOTE: don't use this to look for courses
as the course_id is required. Use get_courses.
Args:
course_id (CourseKey): the course identifier
settings (dict): fields to look for which have settings scope. Follows same syntax
and rules as qualifiers below
content (dict): fields to look for which have content scope. Follows same syntax and
rules as qualifiers below.
qualifiers (dict): what to look for within the course.
Common qualifiers are ``category`` or any field name. if the target field is a list,
then it searches for the given value in the list not list equivalence.
Substring matching pass a regex object.
For this modulestore, ``name`` is another commonly provided key (Location based stores)
(but not revision!)
For this modulestore,
you can search dates by providing either a datetime for == (probably
useless) or a tuple (">"|"<" datetime) for after or before, etc.
"""
if revision == ModuleStoreEnum.RevisionOption.draft_only:
return []
items = []
qualifiers = qualifiers.copy() if qualifiers else {} # copy the qualifiers (destructively manipulated here)
category = qualifiers.pop('category', None)
name = qualifiers.pop('name', None)
def _block_matches_all(mod_loc, module):
if category and mod_loc.category != category:
return False
if name:
if isinstance(name, list):
# Support for passing a list as the name qualifier
if mod_loc.name not in name:
return False
elif mod_loc.name != name:
return False
return all(
self._block_matches(module, fields or {})
for fields in [settings, content, qualifiers]
)
for mod_loc, module in self.modules[course_id].iteritems():
if _block_matches_all(mod_loc, module):
items.append(module)
return items
def make_course_key(self, org, course, run):
"""
Return a valid :class:`~opaque_keys.edx.locator.CourseLocator` for this modulestore
that matches the supplied `org`, `course`, and `run`.
This key may represent a course that doesn't exist in this modulestore.
"""
return CourseLocator(org, course, run, deprecated=True)
def make_course_usage_key(self, course_key):
"""
Return a valid :class:`~opaque_keys.edx.keys.UsageKey` for this modulestore
that matches the supplied course_key.
"""
return BlockUsageLocator(course_key, 'course', course_key.run)
def get_courses(self, **kwargs):
"""
Returns a list of course descriptors. If there were errors on loading,
some of these may be ErrorDescriptors instead.
"""
return self.courses.values()
def get_course_summaries(self, **kwargs):
"""
Returns `self.get_courses()`. Use to list courses to the global staff user.
"""
return self.get_courses(**kwargs)
def get_errored_courses(self):
"""
Return a dictionary of course_dir -> [(msg, exception_str)], for each
course_dir where course loading failed.
"""
return dict((k, self.errored_courses[k].errors) for k in self.errored_courses)
def get_orphans(self, course_key, **kwargs):
"""
Get all of the xblocks in the given course which have no parents and are not of types which are
usually orphaned. NOTE: may include xblocks which still have references via xblocks which don't
use children to point to their dependents.
"""
# here just to quell the abstractmethod. someone could write the impl if needed
raise NotImplementedError
def get_parent_location(self, location, **kwargs):
'''Find the location that is the parent of this location in this
course. Needed for path_to_location().
'''
block = self.get_item(location, 0)
return block.parent
def get_modulestore_type(self, course_key=None):
"""
Returns an enumeration-like type reflecting the type of this modulestore, per ModuleStoreEnum.Type
Args:
course_key: just for signature compatibility
"""
# return ModuleStoreEnum.Type.xml
return None
def get_courses_for_wiki(self, wiki_slug, **kwargs):
"""
Return the list of courses which use this wiki_slug
:param wiki_slug: the course wiki root slug
:return: list of course locations
"""
courses = self.get_courses()
return [course.location.course_key for course in courses if course.wiki_slug == wiki_slug]
def heartbeat(self):
"""
Ensure that every known course is loaded and ready to go. Really, just return b/c
if this gets called the __init__ finished which means the courses are loaded.
Returns the course count
"""
return {'xml': True}
@contextmanager
def branch_setting(self, branch_setting, course_id=None): # pylint: disable=unused-argument
"""
A context manager for temporarily setting the branch value for the store to the given branch_setting.
"""
if branch_setting != ModuleStoreEnum.Branch.published_only:
raise ValueError(u"Cannot set branch setting to {} on a ReadOnly store".format(branch_setting))
yield
def _find_course_asset(self, asset_key):
"""
For now this is not implemented, but others should feel free to implement using the asset.json
which export produces.
"""
log.warning("_find_course_asset request of XML modulestore - not implemented.")
return (None, None)
def find_asset_metadata(self, asset_key, **kwargs):
"""
For now this is not implemented, but others should feel free to implement using the asset.json
which export produces.
"""
log.warning("find_asset_metadata request of XML modulestore - not implemented.")
return None
def get_all_asset_metadata(self, course_key, asset_type, start=0, maxresults=-1, sort=None, **kwargs):
"""
For now this is not implemented, but others should feel free to implement using the asset.json
which export produces.
"""
log.warning("get_all_asset_metadata request of XML modulestore - not implemented.")
return []
def fill_in_run(self, course_key):
"""
A no-op.
Added to simplify tests which use the XML-store directly.
"""
return course_key
class LibraryXMLModuleStore(XMLModuleStore):
"""
A modulestore for importing Libraries from XML.
"""
parent_xml = LIBRARY_ROOT
@staticmethod
def get_id(org, library, url_name):
"""
Create a LibraryLocator given an org and library. url_name is ignored, but left in
for compatibility with the parent signature.
"""
return LibraryLocator(org=org, library=library)
@staticmethod
def patch_descriptor_kvs(library_descriptor):
"""
Metadata inheritance can be done purely through XBlocks, but in the import phase
a root block with an InheritanceKeyValueStore is assumed to be at the top of the hierarchy.
This should change in the future, but as XBlocks don't have this KVS, we have to patch it
here manually.
"""
init_dict = {key: getattr(library_descriptor, key) for key in library_descriptor.fields.keys()}
# if set, invalidate '_unwrapped_field_data' so it will be reset
# the next time it will be called
lazy.invalidate(library_descriptor, '_unwrapped_field_data')
# pylint: disable=protected-access
library_descriptor._field_data = inheriting_field_data(InheritanceKeyValueStore(init_dict))
def content_importers(self, system, course_descriptor, course_dir, url_name):
"""
Handle Metadata inheritance for Libraries.
"""
self.patch_descriptor_kvs(course_descriptor)
compute_inherited_metadata(course_descriptor)
def get_library(self, library_id, depth=0, **kwargs): # pylint: disable=unused-argument
"""
Get a library from this modulestore or return None if it does not exist.
"""
assert isinstance(library_id, LibraryLocator)
for library in self.get_courses(**kwargs):
if library.location.library_key == library_id:
return library
return None
@staticmethod
def id_from_descriptor(descriptor):
"""
Get the Library Key from the Library descriptor.
"""
return descriptor.location.library_key
def get_orphans(self, course_key, **kwargs):
"""
Get all of the xblocks in the given course which have no parents and are not of types which are
usually orphaned. NOTE: may include xblocks which still have references via xblocks which don't
use children to point to their dependents.
"""
# here just to quell the abstractmethod. someone could write the impl if needed
raise NotImplementedError
| agpl-3.0 |
vipul-sharma20/oh-mainline | vendor/packages/Django/django/core/validators.py | 107 | 7201 | from __future__ import unicode_literals
import re
try:
from urllib.parse import urlsplit, urlunsplit
except ImportError: # Python 2
from urlparse import urlsplit, urlunsplit
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_text
from django.utils.ipv6 import is_valid_ipv6_address
from django.utils import six
# These values, if given to validate(), will trigger the self.required check.
EMPTY_VALUES = (None, '', [], (), {})
class RegexValidator(object):
regex = ''
message = _('Enter a valid value.')
code = 'invalid'
def __init__(self, regex=None, message=None, code=None):
if regex is not None:
self.regex = regex
if message is not None:
self.message = message
if code is not None:
self.code = code
# Compile the regex if it was not passed pre-compiled.
if isinstance(self.regex, six.string_types):
self.regex = re.compile(self.regex)
def __call__(self, value):
"""
Validates that the input matches the regular expression.
"""
if not self.regex.search(force_text(value)):
raise ValidationError(self.message, code=self.code)
class URLValidator(RegexValidator):
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def __call__(self, value):
try:
super(URLValidator, self).__call__(value)
except ValidationError as e:
# Trivial case failed. Try for possible IDN domain
if value:
value = force_text(value)
scheme, netloc, path, query, fragment = urlsplit(value)
try:
netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
except UnicodeError: # invalid domain part
raise e
url = urlunsplit((scheme, netloc, path, query, fragment))
super(URLValidator, self).__call__(url)
else:
raise
else:
url = value
def validate_integer(value):
try:
int(value)
except (ValueError, TypeError):
raise ValidationError('')
class EmailValidator(RegexValidator):
def __call__(self, value):
try:
super(EmailValidator, self).__call__(value)
except ValidationError as e:
# Trivial case failed. Try for possible IDN domain-part
if value and '@' in value:
parts = value.split('@')
try:
parts[-1] = parts[-1].encode('idna').decode('ascii')
except UnicodeError:
raise e
super(EmailValidator, self).__call__('@'.join(parts))
else:
raise
email_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
# quoted-string, see also http://tools.ietf.org/html/rfc2822#section-3.2.5
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"'
r')@((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)$)' # domain
r'|\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$', re.IGNORECASE) # literal form, ipv4 address (SMTP 4.1.3)
validate_email = EmailValidator(email_re, _('Enter a valid email address.'), 'invalid')
slug_re = re.compile(r'^[-a-zA-Z0-9_]+$')
validate_slug = RegexValidator(slug_re, _("Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."), 'invalid')
ipv4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
validate_ipv4_address = RegexValidator(ipv4_re, _('Enter a valid IPv4 address.'), 'invalid')
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
raise ValidationError(_('Enter a valid IPv6 address.'), code='invalid')
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except ValidationError:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid')
ip_address_validator_map = {
'both': ([validate_ipv46_address], _('Enter a valid IPv4 or IPv6 address.')),
'ipv4': ([validate_ipv4_address], _('Enter a valid IPv4 address.')),
'ipv6': ([validate_ipv6_address], _('Enter a valid IPv6 address.')),
}
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters returns the appropriate validators for
the GenericIPAddressField.
This code is here, because it is exactly the same for the model and the form field.
"""
if protocol != 'both' and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'")
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError("The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map)))
comma_separated_int_list_re = re.compile('^[\d,]+$')
validate_comma_separated_integer_list = RegexValidator(comma_separated_int_list_re, _('Enter only digits separated by commas.'), 'invalid')
class BaseValidator(object):
compare = lambda self, a, b: a is not b
clean = lambda self, x: x
message = _('Ensure this value is %(limit_value)s (it is %(show_value)s).')
code = 'limit_value'
def __init__(self, limit_value):
self.limit_value = limit_value
def __call__(self, value):
cleaned = self.clean(value)
params = {'limit_value': self.limit_value, 'show_value': cleaned}
if self.compare(cleaned, self.limit_value):
raise ValidationError(
self.message % params,
code=self.code,
params=params,
)
class MaxValueValidator(BaseValidator):
compare = lambda self, a, b: a > b
message = _('Ensure this value is less than or equal to %(limit_value)s.')
code = 'max_value'
class MinValueValidator(BaseValidator):
compare = lambda self, a, b: a < b
message = _('Ensure this value is greater than or equal to %(limit_value)s.')
code = 'min_value'
class MinLengthValidator(BaseValidator):
compare = lambda self, a, b: a < b
clean = lambda self, x: len(x)
message = _('Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).')
code = 'min_length'
class MaxLengthValidator(BaseValidator):
compare = lambda self, a, b: a > b
clean = lambda self, x: len(x)
message = _('Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).')
code = 'max_length'
| agpl-3.0 |
shwinpiocess/fire | fire/settings.py | 1 | 5636 | """
Django settings for fire project.
Generated by 'django-admin startproject' using Django 1.8.14.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'c#f=i+o*$v@f=wczah3-&^i%y-fwt09u_k)!9-e#)yjq3)c3-z'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djcelery',
'job',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'job.middleware.LoginRequiredMiddleware',
)
ROOT_URLCONF = 'fire.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fire.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'fire',
'USER': 'fire',
'PASSWORD': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
#'format': '%(asctime)s %(levelname)s %(pathname)s %(lineno)d %(funcName)s %(message)s'
'format': '%(asctime)s %(levelname)s %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'rotating_file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(BASE_DIR, 'fire.log'),
'maxBytes': 5242880,
'backupCount': 5,
'formatter': 'verbose'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
}
},
'loggers': {
'django': {
'handlers': ['null'],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'job.views': {
'handlers': ['rotating_file'],
'level': 'DEBUG',
},
'job.models': {
'handlers': ['rotating_file'],
'level': 'DEBUG',
},
'job.tasks': {
'handlers': ['rotating_file'],
'level': 'DEBUG',
},
'job.socket': {
'handlers': ['rotating_file'],
'level': 'DEBUG',
},
'job.utils': {
'handlers': ['rotating_file'],
'level': 'DEBUG',
}
}
}
###############################################################################
# Celery Settings
###############################################################################
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://localhost'
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TRACE_STARTED = True
CELERY_TASK_TIME_LIMIT = None
#CELERY_TASK_SOFT_TIME_LIMIT = None
#CELERYBEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
#CELERYBEAT_MAX_LOOP_INTERVAL = 60
#CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
#CELERYBEAT_SCHEDULE = {
# 'phoenix_scheduler': {
# 'task': 'crane.tasks.crane_periodic_scheduler',
# 'schedule': timedelta(seconds=30)
# }
#}
#
#SCHEDULE_METADATA_LOCATION = os.path.join(BASE_DIR, '.phoenix_cycle')
JOBOUTPUT_ROOT = os.path.join(BASE_DIR, 'job_output')
PROJECTS_ROOT = os.path.join(BASE_DIR, 'projects')
INVENTORY_URL = 'http://127.0.0.1:8888/inventories/'
| apache-2.0 |
Dhivyap/ansible | lib/ansible/module_utils/facts/virtual/hpux.py | 199 | 2486 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from ansible.module_utils.facts.virtual.base import Virtual, VirtualCollector
class HPUXVirtual(Virtual):
"""
This is a HP-UX specific subclass of Virtual. It defines
- virtualization_type
- virtualization_role
"""
platform = 'HP-UX'
def get_virtual_facts(self):
virtual_facts = {}
if os.path.exists('/usr/sbin/vecheck'):
rc, out, err = self.module.run_command("/usr/sbin/vecheck")
if rc == 0:
virtual_facts['virtualization_type'] = 'guest'
virtual_facts['virtualization_role'] = 'HP vPar'
if os.path.exists('/opt/hpvm/bin/hpvminfo'):
rc, out, err = self.module.run_command("/opt/hpvm/bin/hpvminfo")
if rc == 0 and re.match('.*Running.*HPVM vPar.*', out):
virtual_facts['virtualization_type'] = 'guest'
virtual_facts['virtualization_role'] = 'HPVM vPar'
elif rc == 0 and re.match('.*Running.*HPVM guest.*', out):
virtual_facts['virtualization_type'] = 'guest'
virtual_facts['virtualization_role'] = 'HPVM IVM'
elif rc == 0 and re.match('.*Running.*HPVM host.*', out):
virtual_facts['virtualization_type'] = 'host'
virtual_facts['virtualization_role'] = 'HPVM'
if os.path.exists('/usr/sbin/parstatus'):
rc, out, err = self.module.run_command("/usr/sbin/parstatus")
if rc == 0:
virtual_facts['virtualization_type'] = 'guest'
virtual_facts['virtualization_role'] = 'HP nPar'
return virtual_facts
class HPUXVirtualCollector(VirtualCollector):
_fact_class = HPUXVirtual
_platform = 'HP-UX'
| gpl-3.0 |
Orochimarufan/youtube-dl | youtube_dl/extractor/byutv.py | 14 | 4172 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
merge_dicts,
parse_duration,
url_or_none,
)
class BYUtvIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?byutv\.org/(?:watch|player)/(?!event/)(?P<id>[0-9a-f-]+)(?:/(?P<display_id>[^/?#&]+))?'
_TESTS = [{
# ooyalaVOD
'url': 'http://www.byutv.org/watch/6587b9a3-89d2-42a6-a7f7-fd2f81840a7d/studio-c-season-5-episode-5',
'info_dict': {
'id': 'ZvanRocTpW-G5_yZFeltTAMv6jxOU9KH',
'display_id': 'studio-c-season-5-episode-5',
'ext': 'mp4',
'title': 'Season 5 Episode 5',
'description': 'md5:1d31dc18ef4f075b28f6a65937d22c65',
'thumbnail': r're:^https?://.*',
'duration': 1486.486,
},
'params': {
'skip_download': True,
},
'add_ie': ['Ooyala'],
}, {
# dvr
'url': 'https://www.byutv.org/player/8f1dab9b-b243-47c8-b525-3e2d021a3451/byu-softball-pacific-vs-byu-41219---game-2',
'info_dict': {
'id': '8f1dab9b-b243-47c8-b525-3e2d021a3451',
'display_id': 'byu-softball-pacific-vs-byu-41219---game-2',
'ext': 'mp4',
'title': 'Pacific vs. BYU (4/12/19)',
'description': 'md5:1ac7b57cb9a78015910a4834790ce1f3',
'duration': 11645,
},
'params': {
'skip_download': True
},
}, {
'url': 'http://www.byutv.org/watch/6587b9a3-89d2-42a6-a7f7-fd2f81840a7d',
'only_matching': True,
}, {
'url': 'https://www.byutv.org/player/27741493-dc83-40b0-8420-e7ae38a2ae98/byu-football-toledo-vs-byu-93016?listid=4fe0fee5-0d3c-4a29-b725-e4948627f472&listindex=0&q=toledo',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id') or video_id
video = self._download_json(
'https://api.byutv.org/api3/catalog/getvideosforcontent',
display_id, query={
'contentid': video_id,
'channel': 'byutv',
'x-byutv-context': 'web$US',
}, headers={
'x-byutv-context': 'web$US',
'x-byutv-platformkey': 'xsaaw9c7y5',
})
ep = video.get('ooyalaVOD')
if ep:
return {
'_type': 'url_transparent',
'ie_key': 'Ooyala',
'url': 'ooyala:%s' % ep['providerId'],
'id': video_id,
'display_id': display_id,
'title': ep.get('title'),
'description': ep.get('description'),
'thumbnail': ep.get('imageThumbnail'),
}
info = {}
formats = []
for format_id, ep in video.items():
if not isinstance(ep, dict):
continue
video_url = url_or_none(ep.get('videoUrl'))
if not video_url:
continue
ext = determine_ext(video_url)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
video_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
elif ext == 'mpd':
formats.extend(self._extract_mpd_formats(
video_url, video_id, mpd_id='dash', fatal=False))
else:
formats.append({
'url': video_url,
'format_id': format_id,
})
merge_dicts(info, {
'title': ep.get('title'),
'description': ep.get('description'),
'thumbnail': ep.get('imageThumbnail'),
'duration': parse_duration(ep.get('length')),
})
self._sort_formats(formats)
return merge_dicts(info, {
'id': video_id,
'display_id': display_id,
'title': display_id,
'formats': formats,
})
| unlicense |
openstack/ironic | ironic/tests/unit/drivers/modules/ilo/test_inspect.py | 1 | 26121 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for Management Interface used by iLO modules."""
from unittest import mock
from ironic.common import exception
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.conductor import utils as conductor_utils
from ironic.drivers.modules.ilo import common as ilo_common
from ironic.drivers.modules.ilo import inspect as ilo_inspect
from ironic.drivers.modules.ilo import power as ilo_power
from ironic.drivers.modules import inspect_utils
from ironic.tests.unit.drivers.modules.ilo import test_common
class IloInspectTestCase(test_common.BaseIloTest):
def test_get_properties(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
properties = ilo_common.REQUIRED_PROPERTIES.copy()
properties.update(ilo_common.SNMP_PROPERTIES)
properties.update(ilo_common.SNMP_OPTIONAL_PROPERTIES)
self.assertEqual(properties,
task.driver.inspect.get_properties())
@mock.patch.object(ilo_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate(self, driver_info_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.validate(task)
driver_info_mock.assert_called_once_with(task.node)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(inspect_utils, 'create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_ok(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capabilities = {}
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.inspect_hardware(task)
self.assertEqual(properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task, macs)
@mock.patch.object(ilo_inspect.LOG, 'warning',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(inspect_utils, 'create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_ok_local_gb_zero(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock,
log_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': 0,
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capabilities = {}
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
properties = task.node.properties
properties['local_gb'] = 10
task.node.properties = properties
task.node.save()
expected_properties = {'memory_mb': '512', 'local_gb': 10,
'cpus': '1', 'cpu_arch': 'x86_64'}
task.driver.inspect.inspect_hardware(task)
self.assertEqual(expected_properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
self.assertTrue(log_mock.called)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task, macs)
@mock.patch.object(ilo_inspect.LOG, 'warning',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(inspect_utils, 'create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_ok_gen8(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock,
log_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': 10,
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capabilities = {'server_model': 'Gen8'}
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
expected_properties = {'memory_mb': '512', 'local_gb': 10,
'cpus': '1', 'cpu_arch': 'x86_64',
'capabilities': 'server_model:Gen8'}
task.driver.inspect.inspect_hardware(task)
self.assertEqual(expected_properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
self.assertTrue(log_mock.called)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task, macs)
@mock.patch.object(ilo_inspect.LOG, 'warning',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_security_parameters',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(inspect_utils, 'create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_ok_gen10(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock,
get_security_params_mock,
log_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': 10,
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'NIC.LOM.1.1': 'aa:aa:aa:aa:aa:aa'}
capabilities = {'server_model': 'Gen10'}
security_params = (
{'security_parameters': {'Password Complexity': 'ok'}})
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
get_security_params_mock.return_value = security_params
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
expected_properties = {
'memory_mb': '512', 'local_gb': 10, 'cpus': '1',
'cpu_arch': 'x86_64', 'capabilities': 'server_model:Gen10',
'security_parameters': {'Password Complexity': 'ok'}}
task.driver.inspect.inspect_hardware(task)
self.assertEqual(expected_properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
self.assertFalse(log_mock.called)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task, macs)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(inspect_utils, 'create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(conductor_utils, 'node_power_action', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_ok_power_off(self, get_ilo_object_mock,
power_mock,
set_power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capabilities = {}
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_OFF
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.inspect_hardware(task)
self.assertEqual(properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
set_power_mock.assert_any_call(task, states.POWER_ON)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task, macs)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(inspect_utils, 'create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_capabilities_ok(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
capability_str = 'sriov_enabled:true'
capabilities = {'sriov_enabled': 'true'}
result = {'properties': properties, 'macs': macs}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.inspect.inspect_hardware(task)
expected_properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64',
'capabilities': capability_str}
self.assertEqual(expected_properties, task.node.properties)
power_mock.assert_called_once_with(mock.ANY, task)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task, macs)
@mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
autospec=True)
@mock.patch.object(inspect_utils, 'create_ports_if_not_exist',
spec_set=True, autospec=True)
@mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
autospec=True)
@mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
autospec=True)
def test_inspect_essential_capabilities_exist_ok(self, get_ilo_object_mock,
power_mock,
get_essential_mock,
create_port_mock,
get_capabilities_mock):
ilo_object_mock = get_ilo_object_mock.return_value
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64',
'somekey': 'somevalue'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
result = {'properties': properties, 'macs': macs}
capabilities = {'sriov_enabled': 'true'}
get_essential_mock.return_value = result
get_capabilities_mock.return_value = capabilities
power_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.node.properties = {'capabilities': 'boot_mode:uefi'}
expected_capabilities = ('sriov_enabled:true,'
'boot_mode:uefi')
set1 = set(expected_capabilities.split(','))
task.driver.inspect.inspect_hardware(task)
end_capabilities = task.node.properties['capabilities']
set2 = set(end_capabilities.split(','))
self.assertEqual(set1, set2)
expected_properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64',
'capabilities': end_capabilities}
power_mock.assert_called_once_with(mock.ANY, task)
self.assertEqual(task.node.properties, expected_properties)
get_essential_mock.assert_called_once_with(task.node,
ilo_object_mock)
get_capabilities_mock.assert_called_once_with(task.node,
ilo_object_mock)
create_port_mock.assert_called_once_with(task, macs)
class TestInspectPrivateMethods(test_common.BaseIloTest):
def test__get_essential_properties_ok(self):
ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
actual_result = ilo_inspect._get_essential_properties(self.node,
ilo_mock)
self.assertEqual(result, actual_result)
def test__get_essential_properties_fail(self):
ilo_mock = mock.MagicMock(
spec=['get_additional_capabilities', 'get_essential_properties'])
# Missing key: cpu_arch
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
result = self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node,
ilo_mock)
self.assertEqual(
str(result),
("Failed to inspect hardware. Reason: Server didn't return the "
"key(s): cpu_arch"))
def test__get_essential_properties_fail_invalid_format(self):
ilo_mock = mock.MagicMock(
spec=['get_additional_capabilities', 'get_essential_properties'])
# Not a dict
properties = ['memory_mb', '512', 'local_gb', '10',
'cpus', '1']
macs = ['aa:aa:aa:aa:aa:aa', 'bb:bb:bb:bb:bb:bb']
capabilities = ''
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
ilo_mock.get_additional_capabilities.return_value = capabilities
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node, ilo_mock)
def test__get_essential_properties_fail_mac_invalid_format(self):
ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
# Not a dict
macs = 'aa:aa:aa:aa:aa:aa'
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node, ilo_mock)
def test__get_essential_properties_hardware_port_empty(self):
ilo_mock = mock.MagicMock(
spec=['get_additional_capabilities', 'get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
# Not a dictionary
macs = None
result = {'properties': properties, 'macs': macs}
capabilities = ''
ilo_mock.get_essential_properties.return_value = result
ilo_mock.get_additional_capabilities.return_value = capabilities
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties,
self.node, ilo_mock)
def test__get_essential_properties_hardware_port_not_dict(self):
ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1', 'cpu_arch': 'x86_64'}
# Not a dict
macs = 'aa:bb:cc:dd:ee:ff'
result = {'properties': properties, 'macs': macs}
ilo_mock.get_essential_properties.return_value = result
result = self.assertRaises(
exception.HardwareInspectionFailure,
ilo_inspect._get_essential_properties, self.node, ilo_mock)
@mock.patch.object(utils, 'get_updated_capabilities', spec_set=True,
autospec=True)
def test__get_capabilities_ok(self, capability_mock):
ilo_mock = mock.MagicMock(spec=['get_server_capabilities'])
capabilities = {'ilo_firmware_version': 'xyz'}
ilo_mock.get_server_capabilities.return_value = capabilities
cap = ilo_inspect._get_capabilities(self.node, ilo_mock)
self.assertEqual(cap, capabilities)
def test__validate_ok(self):
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '2', 'cpu_arch': 'x86_arch'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa'}
data = {'properties': properties, 'macs': macs}
valid_keys = ilo_inspect.IloInspect.ESSENTIAL_PROPERTIES
ilo_inspect._validate(self.node, data)
self.assertEqual(sorted(set(properties)), sorted(valid_keys))
def test__validate_essential_keys_fail_missing_key(self):
properties = {'memory_mb': '512', 'local_gb': '10',
'cpus': '1'}
macs = {'Port 1': 'aa:aa:aa:aa:aa:aa'}
data = {'properties': properties, 'macs': macs}
self.assertRaises(exception.HardwareInspectionFailure,
ilo_inspect._validate, self.node, data)
def test___create_supported_capabilities_dict(self):
capabilities = {}
expected = {}
for key in ilo_inspect.CAPABILITIES_KEYS:
capabilities.update({key: 'true'})
expected.update({key: 'true'})
capabilities.update({'unknown_property': 'true'})
cap = ilo_inspect._create_supported_capabilities_dict(capabilities)
self.assertEqual(expected, cap)
def test___create_supported_capabilities_dict_excluded_capability(self):
capabilities = {}
expected = {}
for key in ilo_inspect.CAPABILITIES_KEYS - {'has_ssd'}:
capabilities.update({key: 'true'})
expected.update({key: 'true'})
cap = ilo_inspect._create_supported_capabilities_dict(capabilities)
self.assertEqual(expected, cap)
def test___create_supported_capabilities_dict_gpu_capabilities(self):
capabilities = {'gpu_Nvidia_count': 1, 'gpu_Nvidia_Tesla_M10_count': 1,
'gpu_Nvidia_Tesla_M10': True}
expected = {}
expected.update(capabilities)
for key in ilo_inspect.CAPABILITIES_KEYS:
capabilities.update({key: 'true'})
expected.update({key: 'true'})
capabilities.update({'unknown_property': 'true'})
cap = ilo_inspect._create_supported_capabilities_dict(capabilities)
self.assertEqual(expected, cap)
| apache-2.0 |
yelizariev/tkobr-addons | tko_account_contract_report_template/report/contract_report.py | 3 | 4477 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv, orm
from openerp.report import report_sxw
from openerp.exceptions import Warning
from openerp import _
import re
import logging
_logger = logging.getLogger(__name__)
class tko_contract_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(tko_contract_report, self).__init__(cr, uid, name, context=context)
active_ids = context['active_ids']
self.pool.get('account.analytic.account').check_fields(cr, uid, active_ids, context=context)
self.localcontext.update({
'time': time,
'compute_template_variables':self.compute_template_variables,
})
def compute_template_variables(self, object, text):
pattern = re.compile('\$\((.*?)\)s')
matches = pattern.findall(str(text.encode('utf-8')))
while len(matches):
value = ''
type = ''
if len(matches):
for match in matches:
value = object
block = match.split(',')
for field in block[0].split('.'):
try:
type = value._fields[field].type
value = value[field]
except Exception, err:
value = ('<font color="red"><strong>[ERROR: Field %s doesn\'t exist in %s]<strong></font>') % (err, value)
_logger.error(("Field %s doesn't exist in %s") % (err, value))
if value:
if type != 'binary':
text = text.replace('$(' + match + ')s', str(unicode(value).encode('utf-8')).decode('utf-8'))
else:
width, height = '', ''
try:
if block[1]:
width = ' width="%spx"' % block[1]
if block[2]:
height = ' height="%spx"' % block[2]
text = text.replace('$(' + match + ')s' , '<img src="data:image/jpeg;base64,' + str(value) + '"%s%s/>' % (width, height))
except Exception, err:
value = _(u'<font color="red"><strong>[ERROR: Wrong image size indication in "%s". Examples: "(partner_id.image,160,160)" or "(partner_id.image,,160)" or "(partner_id.image,160,)" or "(partner_id.image,,)"]<strong></font>' % match)
_logger.error(_(u'Wrong image size indication in "$(%s)s". Examples: $(partner_id.image,160,160)s or $(partner_id.image,,160)s or $(partner_id.image,160,)s or $(partner_id.image,,)s' % match))
text = text.replace('$(' + match + ')s' , str(value))
if not value:
text = text.replace('$(' + match + ')s', '')
matches = pattern.findall(str(text.encode('utf-8')))
return text
class report_tko_contract_report(osv.AbstractModel):
_name = 'report.tko_account_contract_report_template.tko_contract_report'
_inherit = 'report.abstract_report'
_template = 'tko_account_contract_report_template.tko_contract_report'
_wrapped_report_class = tko_contract_report
| agpl-3.0 |
chepazzo/ansible-modules-extras | clustering/consul_kv.py | 66 | 8661 | #!/usr/bin/python
#
# (c) 2015, Steve Gargan <steve.gargan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: consul_kv
short_description: Manipulate entries in the key/value store of a consul cluster.
description:
- Allows the addition, modification and deletion of key/value entries in a
consul cluster via the agent. The entire contents of the record, including
the indices, flags and session are returned as 'value'.
- If the key represents a prefix then Note that when a value is removed, the existing
value if any is returned as part of the results.
- "See http://www.consul.io/docs/agent/http.html#kv for more details."
requirements:
- "python >= 2.6"
- python-consul
- requests
version_added: "2.0"
author: "Steve Gargan (@sgargan)"
options:
state:
description:
- the action to take with the supplied key and value. If the state is
'present', the key contents will be set to the value supplied,
'changed' will be set to true only if the value was different to the
current contents. The state 'absent' will remove the key/value pair,
again 'changed' will be set to true only if the key actually existed
prior to the removal. An attempt can be made to obtain or free the
lock associated with a key/value pair with the states 'acquire' or
'release' respectively. a valid session must be supplied to make the
attempt changed will be true if the attempt is successful, false
otherwise.
required: false
choices: ['present', 'absent', 'acquire', 'release']
default: present
key:
description:
- the key at which the value should be stored.
required: true
value:
description:
- the value should be associated with the given key, required if state
is present
required: true
recurse:
description:
- if the key represents a prefix, each entry with the prefix can be
retrieved by setting this to true.
required: false
default: false
session:
description:
- the session that should be used to acquire or release a lock
associated with a key/value pair
required: false
default: None
token:
description:
- the token key indentifying an ACL rule set that controls access to
the key value pair
required: false
default: None
cas:
description:
- used when acquiring a lock with a session. If the cas is 0, then
Consul will only put the key if it does not already exist. If the
cas value is non-zero, then the key is only set if the index matches
the ModifyIndex of that key.
required: false
default: None
flags:
description:
- opaque integer value that can be passed when setting a value.
required: false
default: None
host:
description:
- host of the consul agent defaults to localhost
required: false
default: localhost
port:
description:
- the port on which the consul agent is running
required: false
default: 8500
"""
EXAMPLES = '''
- name: add or update the value associated with a key in the key/value store
consul_kv:
key: somekey
value: somevalue
- name: remove a key from the store
consul_kv:
key: somekey
state: absent
- name: add a node to an arbitrary group via consul inventory (see consul.ini)
consul_kv:
key: ansible/groups/dc1/somenode
value: 'top_secret'
'''
import sys
try:
import json
except ImportError:
import simplejson as json
try:
import consul
from requests.exceptions import ConnectionError
python_consul_installed = True
except ImportError, e:
python_consul_installed = False
from requests.exceptions import ConnectionError
def execute(module):
state = module.params.get('state')
if state == 'acquire' or state == 'release':
lock(module, state)
if state == 'present':
add_value(module)
else:
remove_value(module)
def lock(module, state):
session = module.params.get('session')
key = module.params.get('key')
value = module.params.get('value')
if not session:
module.fail(
msg='%s of lock for %s requested but no session supplied' %
(state, key))
if state == 'acquire':
successful = consul_api.kv.put(key, value,
cas=module.params.get('cas'),
acquire=session,
flags=module.params.get('flags'))
else:
successful = consul_api.kv.put(key, value,
cas=module.params.get('cas'),
release=session,
flags=module.params.get('flags'))
module.exit_json(changed=successful,
index=index,
key=key)
def add_value(module):
consul_api = get_consul_api(module)
key = module.params.get('key')
value = module.params.get('value')
index, existing = consul_api.kv.get(key)
changed = not existing or (existing and existing['Value'] != value)
if changed and not module.check_mode:
changed = consul_api.kv.put(key, value,
cas=module.params.get('cas'),
flags=module.params.get('flags'))
if module.params.get('retrieve'):
index, stored = consul_api.kv.get(key)
module.exit_json(changed=changed,
index=index,
key=key,
data=stored)
def remove_value(module):
''' remove the value associated with the given key. if the recurse parameter
is set then any key prefixed with the given key will be removed. '''
consul_api = get_consul_api(module)
key = module.params.get('key')
value = module.params.get('value')
index, existing = consul_api.kv.get(
key, recurse=module.params.get('recurse'))
changed = existing != None
if changed and not module.check_mode:
consul_api.kv.delete(key, module.params.get('recurse'))
module.exit_json(changed=changed,
index=index,
key=key,
data=existing)
def get_consul_api(module, token=None):
return consul.Consul(host=module.params.get('host'),
port=module.params.get('port'),
token=module.params.get('token'))
def test_dependencies(module):
if not python_consul_installed:
module.fail_json(msg="python-consul required for this module. "\
"see http://python-consul.readthedocs.org/en/latest/#installation")
def main():
argument_spec = dict(
cas=dict(required=False),
flags=dict(required=False),
key=dict(required=True),
host=dict(default='localhost'),
port=dict(default=8500, type='int'),
recurse=dict(required=False, type='bool'),
retrieve=dict(required=False, default=True),
state=dict(default='present', choices=['present', 'absent']),
token=dict(required=False, default='anonymous'),
value=dict(required=False)
)
module = AnsibleModule(argument_spec, supports_check_mode=False)
test_dependencies(module)
try:
execute(module)
except ConnectionError, e:
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (
module.params.get('host'), module.params.get('port'), str(e)))
except Exception, e:
module.fail_json(msg=str(e))
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
alfa-addon/addon | plugin.video.alfa/channels/supergoku.py | 1 | 32185 | # -*- coding: utf-8 -*-
import sys
import re
import datetime
from bs4 import BeautifulSoup
from core.tmdb import Tmdb
from core import httptools, scrapertools, servertools, tmdb, jsontools
from core.scrapertools import unescape
from core.item import Item, InfoLabels
from platformcode import config, logger, platformtools
from channelselector import get_thumb
from lib import strptime_fix
host = 'https://supergoku.com'
IDIOMAS = {'VOSE': 'VOSE', 'LAT': 'Latino'}
list_language = list(IDIOMAS.keys())
def mainlist(item):
logger.info()
itemlist = []
itemlist.append(
Item(
action = "newest",
channel = item.channel,
fanart = item.fanart,
title = "Nuevos capítulos",
thumbnail = get_thumb("new episodes", auto=True),
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "recomended",
title = "Animes recomendados",
thumbnail = get_thumb("recomended", auto=True),
url = host
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "popular",
title = "Animes populares",
thumbnail = get_thumb("favorites", auto=True),
url = host
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "more_watched",
title = "Animes mas vistos",
thumbnail = get_thumb("more watched", auto=True),
url = host + '/tvshows/'
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "",
title = "Animes",
thumbnail = get_thumb("anime", auto=True),
url = host + '/categoria/anime/'
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "",
title = "Películas",
thumbnail = get_thumb("movies", auto=True),
url = host + '/categoria/pelicula/'
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "",
title = "OVAs",
thumbnail = get_thumb("anime", auto=True),
url = host + '/categoria/ova/'
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "",
title = "ONAs",
thumbnail = get_thumb("anime", auto=True),
url = host + '/categoria/ona/'
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "",
title = "Cortos",
thumbnail = get_thumb("anime", auto=True),
url = host + '/categoria/corto/'
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "",
title = "Especiales",
thumbnail = get_thumb("anime", auto=True),
url = host + '/categoria/especial/'
)
)
itemlist.append(
Item(
action = "filter_by_selection",
channel = item.channel,
fanart = item.fanart,
param = "genres",
title = "Géneros",
thumbnail = get_thumb("genres", auto=True),
url = host + '/tvshows/'
)
)
itemlist.append(
Item(
action = "filter_by_selection",
channel = item.channel,
fanart = item.fanart,
param = "airtime",
title = "Filtrar por año/estado",
thumbnail = get_thumb("year", auto=True),
url = host + '/tvshows/'
)
)
itemlist.append(
Item(
action = "list_all",
channel = item.channel,
fanart = item.fanart,
param = "allanimes",
title = "Todos los animes",
thumbnail = get_thumb("all", auto=True),
url = host + '/tvshows/'
)
)
itemlist.append(
Item(
action = "search",
channel = item.channel,
fanart = item.fanart,
title = "Buscar",
thumbnail = get_thumb("search", auto=True),
url = host + '/?s='
)
)
return itemlist
def create_soup(url, post=None, headers=None):
logger.info()
data = httptools.downloadpage(url, post=post, headers=headers).data
soup = BeautifulSoup(data, "html5lib", from_encoding="utf-8")
return soup
def newest(item):
item.param = "newepisodes"
item.url = host
return list_all(item)
def filter_by_selection(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
soup = create_soup(item.url)
if item.param == "genres":
section = soup.find('ul', class_ = 'genres falsescroll')
elif item.param == "airtime":
section = soup.find('ul', class_ = 'releases falsescroll')
for article in section.children:
itemlist.append(
Item(
action = 'list_all',
channel = item.channel,
param = '',
title = str(article.a.string),
url = str(article.a['href'])
)
)
return itemlist
def labeler(item):
logger.info()
oldtitle = ''
if item.contentSerieName:
oldtitle = item.contentSerieName
else:
oldtitle = item.contentTitle
# Excepción(es) por algunas cosas que TMDB suele retornar erróneamente.
# Estas en particular, las retorna mal en muchos de los canales que se busca cuando no hay año correcto
year_exceptions = {'(?i)Yuru Camp': '2018', '(?i)One Piece': '1999', '(?i)Shingeki no Kyojin': '2013', '(?i)Higurashi no Naku Koro ni': '2020'}
title_exceptions = {'(?i)Bem': 'Bem: Become Human'}
title = item.infoLabels['title']
for title_exc, title_replace in title_exceptions.items():
if scrapertools.find_single_match(title, title_exc):
if item.contentTitle:
item.contentTitle = title_replace
if item.contentSerieName:
item.contentSerieName = title_replace
for title_exc, year_replace in year_exceptions.items():
if scrapertools.find_single_match(title, title_exc):
item.infoLabels['year'] = year_replace
tmdb.set_infoLabels(item, seekTmdb = True)
if not item.infoLabels['tmdb_id']:
oldcontentType = item.contentType
year = item.infoLabels['year']
#---Probamos como serie pero sin el año---#
item.contentTitle = ''
item.contentType = 'tv'
item.contentSerieName = oldtitle
item.infoLabels['year'] = ''
tmdb.set_infoLabels(item, seekTmdb = True)
if not item.infoLabels['tmdb_id']:
#---Probamos si es película en vez de serie (con año)---#
item.contentSerieName = ''
item.contentTitle = oldtitle
item.contentType = 'movie'
item.infoLabels['year'] = year
item.infoLabels['filtro'] = scrapertools.find_single_match(item.fanart, '(?is)/[^/]+\.(?:jpg|png)')
tmdb.set_infoLabels(item, seekTmdb = True)
if not item.infoLabels['tmdb_id']:
special_rubbish = ['(?is)(:.+?)']
#---Si aún no da, tratamos con casos especiales---#
item.contentType = oldcontentType
if oldcontentType == 'tv':
item.contentSerieName = oldtitle
item.contentTitle = ''
else:
item.contentSerieName = ''
item.contentTitle = oldtitle
if item.contentSerieName:
for rubbish in special_rubbish:
item.contentSerieName = re.sub(rubbish, '', oldtitle)
tmdb.set_infoLabels(item, seekTmdb = True)
if item.infoLabels['tmdb_id']:
break
else:
#---Con título especial, probamos si es película en vez de serie---#
item.contentSerieName = ''
item.contentTitle = oldtitle
item.contentType = 'movie'
tmdb.set_infoLabels(item, seekTmdb = True)
if not item.infoLabels['tmdb_id']:
#---Con título especial, probamos como serie pero sin el año---#
item.contentSerieName = oldtitle
item.contentTitle = ''
item.contentType = oldcontentType
item.infoLabels['year'] = ''
tmdb.set_infoLabels(item, seekTmdb = True)
else:
for rubbish in special_rubbish:
item.contentSerieName = re.sub(rubbish, '', oldtitle)
return
tmdb.set_infoLabels(item, seekTmdb = True)
if item.infoLabels['tmdb_id']:
break
else:
#---Con título especial, probamos si es serie en vez de película---#
item.contentSerieName = oldtitle
item.contentTitle = ''
item.contentType = 'tv'
tmdb.set_infoLabels(item, seekTmdb = True)
if not item.infoLabels['tmdb_id']:
#---Con título especial, probamos como pelicula pero sin el año---#
item.contentSerieName = ''
item.contentTitle = oldtitle
item.contentType = oldcontentType
item.infoLabels['year'] = ''
tmdb.set_infoLabels(item, seekTmdb = True)
if not item.infoLabels['tmdb_id']:
item.contentType = oldcontentType
if item.contentType == 'tv':
item.contentSerieName = oldtitle
else:
item.contentTitle = oldtitle
return item
def set_infoLabels_async(itemlist):
import threading
threads_num = config.get_setting("tmdb_threads", default=20)
semaforo = threading.Semaphore(threads_num)
lock = threading.Lock()
r_list = list()
i = 0
l_hilo = list()
def sub_thread(_item, _i):
semaforo.acquire()
ret = labeler(_item)
semaforo.release()
r_list.append((_i, _item, ret))
for item in itemlist:
t = threading.Thread(target = sub_thread, args = (item, i))
t.start()
i += 1
l_hilo.append(t)
# esperar q todos los hilos terminen
for x in l_hilo:
x.join()
# Ordenar lista de resultados por orden de llamada para mantener el mismo orden q itemlist
r_list.sort(key=lambda i: i[0])
# Reconstruir y devolver la lista solo con los resultados de las llamadas individuales
return [ii[2] for ii in r_list]
def process_title(old_title, getWithTags = False, get_contentTitle = False, get_lang = False):
logger.info()
stupid_little_things = {'(?is)[\–]+':'', '(?is)[\/]+':'', '(?is)([\(]).+?([\)])':'', '(?is)\s+\s':' ', '\(':'', '\)':''}
trash = {'(?is)ova[s]?':'[OVA]', '(?is)(?:\(|\))':'', '(?is)Pelicula':'[Película]',
'(?is)(Audio latino|latino)':'LAT', '(?is)Sub Español':'VOSE',
'(?is)Fandub':'[Fandub]', '(?is)Mini anime':'', '(?is)(Especiales|Especial)':'[Especiales]',
'(?i)\d\w\w Season':''}
# title_rubbish = ['(?is)(\s?(?:19|20)\d\d)', '(?is)\s[0-9].+?\s.*?(?:Season)?']
for pattern, replacement in stupid_little_things.items():
old_title = re.sub(pattern, replacement, old_title)
old_title = old_title.strip()
contentTitle = old_title
title = old_title
langs = []
for pattern, key in list(trash.items()):
if scrapertools.find_single_match(contentTitle, pattern):
if key in IDIOMAS:
langs.append(key)
title = re.sub(pattern, '[{}]'.format(IDIOMAS[key]), contentTitle)
else:
title = re.sub(pattern, '[{}]'.format(key), contentTitle)
contentTitle = contentTitle.replace(pattern.split(')')[1], '')
contentTitle = contentTitle.strip()
title = title.strip()
if getWithTags and get_contentTitle and get_lang:
return title, contentTitle, langs
elif getWithTags and get_contentTitle:
return title, contentTitle
elif getWithTags and get_contentTitle:
return title, langs
elif getWithTags:
return title
else:
return contentTitle
def get_next_page(data):
pattern = '<span class=.current.+?a href=["|\'](.+?)["|\'] class="inactive"'
match = scrapertools.find_single_match(data, pattern)
if match != '':
return match
else:
return False
def list_all(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
soup = create_soup(item.url)
sectionptn = ''
pattern = ''
matches = []
genericvalues = {'recomended': True, 'more_watched': True,
'popular': True, 'search': True,
'newepisodes': False, 'allanimes': False,
'': False}
#==================Fase 1: Detección de patrones==================#
# Obtenemos la sección especifica (evita conflictos con regex) #
# Verificamos qué parte de la función se llama #
# para usar el patrón corecto (o generalizamos) #
# Reciclamos patrones donde sea posible #
# ===== Patrones de novedades (nuevos episodios) =====
if item.param == 'newepisodes':
section = soup.find('div', class_='animation-2 items')
elif genericvalues[item.param] == True:
if item.param == 'recomended' or item.param == 'more_watched':
if item.param == 'recomended': # == Patrones de recomendados ==
section = soup.find('div', id='slider-tvshows')
elif item.param == 'more_watched': # == Patrones de mas vistos ==
section = soup.find('div', class_='items featured')
elif item.param == 'popular': # == Patrones de populares ==
section = soup.find('div', class_='items featured')
elif item.param == 'search': # == Patrones de resultados de búsqueda ==
section = soup.find('div', class_='search-page')
elif item.param == 'allanimes':
section = soup.find('div', id='archive-content')
else:
section = soup.find('div', class_='items')
articles = section.find_all('article')
for article in articles:
match = []
if item.param == 'newepisodes':
thumb = article.find('img', class_='lazyload')['data-src']
url = article.find('a')['href']
epnum = scrapertools.find_single_match(article.find('div', class_='epiposter').text, '\d+$')
title = article.find('div', class_='data').text
match = [thumb, url, epnum, title]
elif genericvalues[item.param] == True:
thumb = article.find('img', class_='lazyload')['data-src']
fanart = scrapertools.find_single_match(article.find('noscript'), 'src="([^"]+)')
if item.param == 'recomended' or item.param == 'more_watched' or item.param == 'popular':
url = article.find('a')['href']
title = article.find('div', class_='data').find('h3').text
elif item.param == 'search': # == Patrones de resultados de búsqueda ==
url = article.find('div', class_='title').find('a')['href']
title = article.find('div', class_='title').text
match = [thumb, fanart, url, title]
else: # == Patrón genérico para páginas comunes ==
thumb = scrapertools.find_single_match(article.find('noscript').text, 'src=["\'](.+?)[\'"]')
contentType = article.find('div', class_='CategoriaEnPoster').text
status = article.find('div', class_='estadoposter').text
url = article.find('div', class_='data').find('a')['href']
title = article.find('div', class_='data').find('h3').text
airdate = ''
year = ''
plot = article.find('div', class_='texto').text
genres = article.find('div', class_='genres')
if article.find("div", class_="data"):
if article.find("div", class_="data").find("span"):
airdate = article.find("div", class_="data").find("span").text.strip()
match = [thumb, contentType, status, url, title, airdate, year, plot, genres]
matches.append(match)
#==============Fase 2: Asignación de valores==============#
# Como cada sección da distintos niveles de información, #
# se necesita un ciclo for diferente según el caso #
listitem = Item()
logger.info("item.param: "+str(item.param))
# >>>> Ciclo para nuevos episodios (lleva directo a findvideos) <<<< #
if item.param == "newepisodes":
for scpthumb, scpurl, scpepnum, scptitle in matches:
conType = ''
infoLabels = {}
title, contentTitle, langs = process_title(scptitle.strip(), getWithTags = True, get_contentTitle = True, get_lang = True)
if scpepnum is not '':
infoLabels['episode'] = int(scpepnum)
conType = 'tvshow'
else:
conType = 'movie'
# -----Casi nunca devuelve temporada, pero en raro caso que sí----- #
scpseason = scrapertools.find_single_match(scpurl, 'season.(\d+)')
if str(scpseason) is not None:
infoLabels['season'] = scpseason
else:
infoLabels['season'] = None
itemlist.append(
Item(
action = "findvideos",
channel = item.channel,
contentSerieName = contentTitle,
contentTitle = contentTitle,
contentType = conType,
infoLabels = infoLabels,
language = langs,
title = title,
thumbnail = scpthumb,
url = scpurl
)
)
# >>>> Ciclo para secciones similares (dan 4 variables en mismo orden) <<<< #
elif genericvalues[item.param]:
for scpthumb, scpfanart, scpurl, scptitle in matches:
title, contentTitle, langs = process_title(scptitle.strip(), getWithTags = True, get_contentTitle = True, get_lang = True)
itemlist.append(
Item(
action = "seasons",
channel = item.channel,
contentSerieName = contentTitle,
contentTitle = contentTitle,
contentType = 'tvshow',
language = langs,
title = title,
thumbnail = scpthumb,
url = scpurl
)
)
# >>>> Ciclo para secciones genéricas (casi cualquier página fuera de la principal) <<<< #
else:
for scpthumb, scpcontentType, scpstatus, scpurl, scptitle, scpairdate, scpyear, scpplot, scpgenres in matches:
tagged_title, title, langs = process_title(scptitle.strip(), getWithTags = True, get_contentTitle = True, get_lang = True)
infoLabels = {"status": scpstatus.strip().title()}
if scpairdate:
date = datetime.datetime.strptime(scpairdate, "%b. %d, %Y")
infoLabels['year'] = date.strftime("%Y")
if scpgenres:
genmatch = scpgenres.find_all('a')
if len(genmatch) > 0:
genre = ", ".join([x.text.strip() for x in genmatch])
infoLabels['genre'] = genre.strip()
new_item = Item(
action = "seasons",
channel = item.channel,
infoLabels = infoLabels,
language = langs,
param = item.param,
plot = scpplot,
title = tagged_title,
thumbnail = scpthumb,
url = scpurl
)
if scpcontentType == 'pelicula' or 'pelicula' in item.url:
new_item.contentType = 'movie'
new_item.contentTitle = title
if "date" in locals():
infoLabels['release_date'] = date.strftime("%Y/%m/%d")
else:
new_item.contentType = 'tv'
new_item.contentSerieName = title
if "date" in locals():
infoLabels['first_air_date'] = date.strftime("%Y/%m/%d")
infoLabels['premiered'] = infoLabels['first_air_date']
itemlist.append(new_item)
#================================Fase 3: Corrección de valores============================#
#----------Corregir si es una película en vez de serie o casos raros en el título---------#
#---Corregir el título según tmdb y limpiar según el contenido (si es serie o película)---#
# set_infoLabels_async(itemlist)
for i in itemlist:
#---Quitamos números de episodio y espacios inútiles---#
if i.contentType == 'movie':
i.contentTitle = i.infoLabels['title']
i.contentSerieName = ''
else:
i.contentSerieName = i.infoLabels['title']
i.contentTitle = ''
if i.infoLabels['episode']:
pretext = ''
if i.infoLabels['season'] is not '':
pretext += 'S' + str(i.infoLabels['season'])
pretext += 'E' + str(i.infoLabels['episode'])
i.title = pretext + ': ' + i.title
# tmdb.set_infoLabels_itemlist(itemlist, force_no_year=True)
#======================Fase 4: Asignación de paginador (si aplica)======================#
#---Si se encuentra otra página, se agrega un paginador (solo los items con páginas)---#
if not genericvalues[item.param]:
nextpage = get_next_page(data)
if nextpage:
itemlist.append(
Item(
action = 'list_all',
channel = item.channel,
param = item.param,
title = '[COLOR=yellow]Siguiente página >[/COLOR]',
url = nextpage
)
)
return itemlist
def seasons(item, add_to_videolibrary = False):
logger.info()
itemlist = []
soup = create_soup(item.url)
section = soup.find('div', id='seasons')
for article in section.children:
if not article.find('li', class_="none"):
contentType = item.contentType
infoLabels = item.infoLabels
title = item.title
seasontitle = str(article.find('span', class_='title').contents[0])
if not infoLabels['last_air_date'] and not infoLabels['premiered']:
date = article.find('span', class_='title').i.text
date = datetime.datetime.strptime(date, "%b. %d, %Y")
infoLabels['last_air_date'] = date.strftime("%Y/%m/%d")
infoLabels['premiered'] = infoLabels['last_air_date']
if not infoLabels['plot']:
plot = str(soup.find('div', id='info').find('div', class_='wp-content').p.contents[0])
if plot:
infoLabels['plot'] = plot
# --- Si buscamos nº de temporada y es película, devolverá la cadena 'PELI' en vez de número --- #
if 'PELI' in seasontitle:
contentType = 'movie'
else:
if 'Especial' in item.title:
seasonnum = '0'
else:
seasonnum = scrapertools.find_single_match(seasontitle, '(?is)\s(\d+)')
if seasonnum:
contentType = 'tvshow'
infoLabels['season'] = int(seasonnum)
if int(seasonnum) == 0:
title = 'Especiales de ' + item.contentSerieName
else:
title = 'Temporada ' + str(seasonnum)
else:
contentType = 'movie'
itemlist.append(
item.clone(
action = 'episodesxseason',
contentType = contentType,
episode_data = str(article),
infoLabels = infoLabels,
title = title
)
)
tmdb.set_infoLabels_itemlist(itemlist, seekTmdb = True)
itemlist.reverse() # Empieza por el último capítulo, así que se revierte la lista
if len(itemlist) == 1 and not add_to_videolibrary:
itemlist = episodesxseason(itemlist[0], add_to_videolibrary)
if len(itemlist) > 0 and config.get_videolibrary_support() and not itemlist[0].contentType == 'movie' and not add_to_videolibrary:
itemlist.append(
Item(
action = "add_serie_to_library",
channel = item.channel,
contentSerieName = item.contentSerieName,
extra = "episodios",
title = '[COLOR yellow]{}[/COLOR]'.format(config.get_localized_string(70092)),
url = item.url
)
)
return itemlist
def episodios(item):
logger.info()
itemlist = []
if not item.contentType == 'movie':
seasons_list = seasons(item, True)
for season in seasons_list:
itemlist.extend(episodesxseason(season, True))
else:
itemlist.extend(findvideos(item, True))
return itemlist
def episodesxseason(item, add_to_videolibrary = False):
logger.info()
itemlist = []
if item.episode_data or item.param == 'pager':
soup = BeautifulSoup(item.episode_data, "html5lib", from_encoding="utf-8")
else:
soup = create_soup(item.url)
soup = soup.find('div', id='episodes')
seasons = soup.find_all('div', class_='se-c')
for season in seasons:
seasonnum = scrapertools.find_single_match(str(season.find('span', class_='title').contents[0]), '(?is)\s(\d+)')
if seasonnum:
if item.infoLabels['season'] == int(seasonnum):
soup = season
episodes = soup.find('ul', class_='episodios')
remainingitems = None
if len(episodes.contents) > 30 and not add_to_videolibrary:
remainingitems = BeautifulSoup('<ul class="episodios"></ul>', "html5lib", from_encoding="utf-8")
remainingcount = int(len(episodes.contents) - 30)
i = 0
while i < remainingcount:
remainingitems.find('ul', class_='episodios').append(episodes.li.extract())
i += 1
for episode in episodes.children:
contentType = item.contentType
infoLabels = item.infoLabels
infoLabels['title'] = ''
epname = str(episode.find('div', class_='episodiotitle').a.string)
epnum = scrapertools.find_single_match(epname, '(?is)(\d+)')
title = str(episode.find(class_='episodiotitle').a.string)
if not contentType == 'movie':
if 'MOVIE' in epnum:
contentType = 'movie'
elif epnum:
infoLabels['episode'] = int(epnum)
itemlist.append(
item.clone(
action = 'findvideos',
contentType = contentType,
infoLabels = infoLabels,
title = title,
thumbnail = str(episode.find('img', class_='lazyload')['data-src']),
url = str(episode.find(class_='episodiotitle').a['href'])
)
)
itemlist.reverse()
tmdb.set_infoLabels(itemlist, seekTmdb = True)
for i in itemlist:
if i.infoLabels['episode'] and i.infoLabels['title']:
ss_and_ep = scrapertools.get_season_and_episode('{}x{}'.format(str(i.infoLabels['season']), str(i.infoLabels['episode'])))
i.title = '{}: {}'.format(ss_and_ep, i.infoLabels['title'])
elif i.infoLabels['episode']:
ss_and_ep = scrapertools.get_season_and_episode('{}x{}'.format(str(i.infoLabels['season']), str(i.infoLabels['episode'])))
i.title = '{}: {}'.format(ss_and_ep, i.title)
if remainingitems:
itemlist.append(
item.clone(
action = 'episodesxseason',
episode_data = str(remainingitems),
param = 'pager',
title = '[COLOR=yellow]Siguiente página >[/COLOR]'
)
)
if len(itemlist) == 1 and (not add_to_videolibrary or item.contentType == 'movie'):
return findvideos(itemlist[0], add_to_videolibrary)
else:
return itemlist
def findvideos(item, add_to_videolibrary = False):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
base_url = '{}/wp-json/dooplayer/v1/post/'.format(host)
postnum = scrapertools.find_single_match(data, '(?is)data-post=.(\d+).*?')
srcsection = scrapertools.find_single_match(data, '(?is)playeroptionsul.+?</ul>')
srccount = scrapertools.find_multiple_matches(srcsection, '(?is)<li .+?data-nume=["|\'](.+?)["|\']')
urls = ''
for i in range(len(srccount)):
composed_url = '{}{}?type=tv&source={}'.format(base_url, postnum, srccount[i])
response = jsontools.load(httptools.downloadpage(composed_url).data)
if not response['embed_url'].startswith('http'):
response['embed_url'] = 'https:{}'.format(response['embed_url'])
urls = '{}{}\n'.format(urls, response['embed_url'])
temp_item = item
itemlist.extend(servertools.find_video_items(item = temp_item, data = urls))
itemlist = servertools.get_servers_itemlist(itemlist, None, True)
for it in itemlist:
it.title = '[{}] {}'.format(it.server.title(), it.contentTitle)
if len(itemlist) > 0 and config.get_videolibrary_support() and not add_to_videolibrary \
and item.contentTitle and item.contentType == 'movie':
itemlist.append(
Item(
action = "add_pelicula_to_library",
channel = item.channel,
contentTitle = item.contentTitle,
extra = "episodios",
title = '[COLOR yellow]{}[/COLOR]'.format(config.get_localized_string(70092)),
url = item.url
)
)
return itemlist
def search(item, text):
logger.info()
itemlist = []
if text != '':
try:
text = scrapertools.slugify(text)
text = text.replace('-', '+')
item.url += text
item.param = "search"
return list_all(item)
except:
for line in sys.exc_info():
logger.error("%s" % line)
return itemlist | gpl-3.0 |
MobinRanjbar/hue | desktop/core/ext-py/django-axes-1.4.0/examples/example/settings.py | 25 | 7878 | # Django settings for example project.
import os
PROJECT_DIR = lambda base : os.path.abspath(os.path.join(os.path.dirname(__file__), base).replace('\\','/'))
gettext = lambda s: s
DEBUG = False
DEBUG_TOOLBAR = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': PROJECT_DIR('../db/example.db'), # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
#LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = PROJECT_DIR(os.path.join('..', 'media'))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = PROJECT_DIR(os.path.join('..', 'static'))
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_DIR(os.path.join('..', 'media', 'static')),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '6sf18c*w971i8a-m^1coasrmur2k6+q5_kyn*)s@(*_dk5q3&r'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'axes.middleware.FailedLoginMiddleware'
)
ROOT_URLCONF = 'urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'wsgi.application'
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.request"
)
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_DIR('templates')
)
INSTALLED_APPS = (
# Django core and contrib apps
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.sitemaps',
'axes',
# Test app
'foo',
)
LOGIN_REDIRECT_URL = '/admin'
# ******************** django-axes settings *********************
# Max number of login attemts within the ``AXES_COOLOFF_TIME``
AXES_LOGIN_FAILURE_LIMIT = 3
from datetime import timedelta
AXES_COOLOFF_TIME=timedelta(seconds = 200)
# ******************** /django-axes settings *********************
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s [%(pathname)s:%(lineno)s] %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'django_log': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': PROJECT_DIR("../logs/django.log"),
'maxBytes': 1048576,
'backupCount': 99,
'formatter': 'verbose',
},
'axes_log': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': PROJECT_DIR("../logs/axes.log"),
'maxBytes': 1048576,
'backupCount': 99,
'formatter': 'verbose',
},
},
'loggers': {
'django': {
'handlers': ['django_log'],
'level': 'ERROR',
'propagate': True,
},
'axes': {
'handlers': ['console', 'axes_log'],
'level': 'DEBUG',
'propagate': True,
},
},
}
# Do not put any settings below this line
try:
from local_settings import *
except:
pass
if DEBUG and DEBUG_TOOLBAR:
# debug_toolbar
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
INSTALLED_APPS += (
'debug_toolbar',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}
| apache-2.0 |
quattor/aquilon | lib/aquilon/aqdb/model/network_device.py | 1 | 2044 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2017,2018 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Network Devices """
from datetime import datetime
from sqlalchemy import Column, ForeignKey, DateTime
from aquilon.exceptions_ import ArgumentError
from aquilon.aqdb.model import HardwareEntity, Building
from aquilon.aqdb.column_types import Enum
from aquilon.config import Config
SWITCH_TYPES = ('tor', 'bor', 'agg', 'misc')
_TN = 'network_device'
_config = Config()
class NetworkDevice(HardwareEntity):
__tablename__ = _TN
_class_label = 'Switch'
__mapper_args__ = {'polymorphic_identity': _TN}
hardware_entity_id = Column(ForeignKey(HardwareEntity.id,
ondelete='CASCADE'),
primary_key=True)
switch_type = Column(Enum(16, SWITCH_TYPES), nullable=False)
last_poll = Column(DateTime, nullable=False, default=datetime.now)
@classmethod
def check_type(cls, type):
if type is not None and type not in SWITCH_TYPES:
raise ArgumentError("Unknown switch type '%s'." % type)
def validates_location(self, key, value):
if isinstance(value, Building) and value.netdev_rack:
raise ArgumentError("This building is restricted to use racks as location: "
"--rack must be specified when adding new network devices.")
return value
| apache-2.0 |
numenta/nupic | src/nupic/regions/sdr_classifier_region.py | 6 | 18282 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2016, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This file implements the SDR Classifier region. See the comments in the class
definition of SDRClassifierRegion for a description.
"""
import warnings
from nupic.bindings.regions.PyRegion import PyRegion
from nupic.algorithms.sdr_classifier_factory import SDRClassifierFactory
from nupic.support.configuration import Configuration
try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.regions.SDRClassifierRegion_capnp import SDRClassifierRegionProto
class SDRClassifierRegion(PyRegion):
"""
SDRClassifierRegion implements a SDR classifier that accepts a binary
input from the level below (the "activationPattern") and information from the
sensor and encoders (the "classification") describing the input to the system
at that time step.
The SDR classifier maps input patterns to class labels. There are as many
output units as the number of class labels or buckets (in the case of scalar
encoders). The output is a probabilistic distribution over all class labels.
During inference, the output is calculated by first doing a weighted summation
of all the inputs, and then perform a softmax nonlinear function to get
the predicted distribution of class labels
During learning, the connection weights between input units and output units
are adjusted to maximize the likelihood of the model
The caller can choose to tell the region that the classifications for
iteration N+K should be aligned with the activationPattern for iteration N.
This results in the classifier producing predictions for K steps in advance.
Any number of different K's can be specified, allowing the classifier to learn
and infer multi-step predictions for a number of steps in advance.
:param steps: (int) default=1
:param alpha: (float) default=0.001
:param verbosity: (int) How verbose to log, default=0
:param implementation: (string) default=None
:param maxCategoryCount: (int) default=None
"""
@classmethod
def getSpec(cls):
"""
Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getSpec`.
"""
ns = dict(
description=SDRClassifierRegion.__doc__,
singleNodeOnly=True,
inputs=dict(
actValueIn=dict(
description="Actual value of the field to predict. Only taken "
"into account if the input has no category field.",
dataType="Real32",
count=0,
required=False,
regionLevel=False,
isDefaultInput=False,
requireSplitterMap=False),
bucketIdxIn=dict(
description="Active index of the encoder bucket for the "
"actual value of the field to predict. Only taken "
"into account if the input has no category field.",
dataType="UInt64",
count=0,
required=False,
regionLevel=False,
isDefaultInput=False,
requireSplitterMap=False),
categoryIn=dict(
description='Vector of categories of the input sample',
dataType='Real32',
count=0,
required=True,
regionLevel=True,
isDefaultInput=False,
requireSplitterMap=False),
bottomUpIn=dict(
description='Belief values over children\'s groups',
dataType='Real32',
count=0,
required=True,
regionLevel=False,
isDefaultInput=True,
requireSplitterMap=False),
predictedActiveCells=dict(
description="The cells that are active and predicted",
dataType='Real32',
count=0,
required=True,
regionLevel=True,
isDefaultInput=False,
requireSplitterMap=False),
sequenceIdIn=dict(
description="Sequence ID",
dataType='UInt64',
count=1,
required=False,
regionLevel=True,
isDefaultInput=False,
requireSplitterMap=False),
),
outputs=dict(
categoriesOut=dict(
description='Classification results',
dataType='Real32',
count=0,
regionLevel=True,
isDefaultOutput=False,
requireSplitterMap=False),
actualValues=dict(
description='Classification results',
dataType='Real32',
count=0,
regionLevel=True,
isDefaultOutput=False,
requireSplitterMap=False),
probabilities=dict(
description='Classification results',
dataType='Real32',
count=0,
regionLevel=True,
isDefaultOutput=False,
requireSplitterMap=False),
),
parameters=dict(
learningMode=dict(
description='Boolean (0/1) indicating whether or not a region '
'is in learning mode.',
dataType='UInt32',
count=1,
constraints='bool',
defaultValue=1,
accessMode='ReadWrite'),
inferenceMode=dict(
description='Boolean (0/1) indicating whether or not a region '
'is in inference mode.',
dataType='UInt32',
count=1,
constraints='bool',
defaultValue=0,
accessMode='ReadWrite'),
maxCategoryCount=dict(
description='The maximal number of categories the '
'classifier will distinguish between.',
dataType='UInt32',
required=True,
count=1,
constraints='',
# arbitrarily large value
defaultValue=2000,
accessMode='Create'),
steps=dict(
description='Comma separated list of the desired steps of '
'prediction that the classifier should learn',
dataType="Byte",
count=0,
constraints='',
defaultValue='0',
accessMode='Create'),
alpha=dict(
description='The alpha is the learning rate of the classifier.'
'lower alpha results in longer term memory and slower '
'learning',
dataType="Real32",
count=1,
constraints='',
defaultValue=0.001,
accessMode='Create'),
implementation=dict(
description='The classifier implementation to use.',
accessMode='ReadWrite',
dataType='Byte',
count=0,
constraints='enum: py, cpp'),
verbosity=dict(
description='An integer that controls the verbosity level, '
'0 means no verbose output, increasing integers '
'provide more verbosity.',
dataType='UInt32',
count=1,
constraints='',
defaultValue=0,
accessMode='ReadWrite'),
),
commands=dict()
)
return ns
def __init__(self,
steps='1',
alpha=0.001,
verbosity=0,
implementation=None,
maxCategoryCount=None
):
# Set default implementation
if implementation is None:
implementation = Configuration.get(
'nupic.opf.sdrClassifier.implementation')
self.implementation = implementation
# Convert the steps designation to a list
self.steps = steps
self.stepsList = [int(i) for i in steps.split(",")]
self.alpha = alpha
self.verbosity = verbosity
# Initialize internal structures
self._sdrClassifier = None
self.learningMode = True
self.inferenceMode = False
self.maxCategoryCount = maxCategoryCount
self.recordNum = 0
# Flag to know if the compute() function is ever called. This is to
# prevent backward compatibilities issues with the customCompute() method
# being called at the same time as the compute() method. Only compute()
# should be called via network.run(). This flag will be removed once we
# get to cleaning up the htm_prediction_model.py file.
self._computeFlag = False
def initialize(self):
"""
Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.initialize`.
Is called once by NuPIC before the first call to compute().
Initializes self._sdrClassifier if it is not already initialized.
"""
if self._sdrClassifier is None:
self._sdrClassifier = SDRClassifierFactory.create(
steps=self.stepsList,
alpha=self.alpha,
verbosity=self.verbosity,
implementation=self.implementation,
)
def getAlgorithmInstance(self):
"""
:returns: (:class:`nupic.regions.sdr_classifier_region.SDRClassifierRegion`)
"""
return self._sdrClassifier
def getParameter(self, name, index=-1):
"""
Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getParameter`.
"""
# If any spec parameter name is the same as an attribute, this call
# will get it automatically, e.g. self.learningMode
return PyRegion.getParameter(self, name, index)
def setParameter(self, name, index, value):
"""
Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.setParameter`.
"""
if name == "learningMode":
self.learningMode = bool(int(value))
elif name == "inferenceMode":
self.inferenceMode = bool(int(value))
else:
return PyRegion.setParameter(self, name, index, value)
@staticmethod
def getSchema():
"""
:returns: the pycapnp proto type that the class uses for serialization.
"""
return SDRClassifierRegionProto
def writeToProto(self, proto):
"""
Write state to proto object.
:param proto: SDRClassifierRegionProto capnproto object
"""
proto.implementation = self.implementation
proto.steps = self.steps
proto.alpha = self.alpha
proto.verbosity = self.verbosity
proto.maxCategoryCount = self.maxCategoryCount
proto.learningMode = self.learningMode
proto.inferenceMode = self.inferenceMode
proto.recordNum = self.recordNum
self._sdrClassifier.write(proto.sdrClassifier)
@classmethod
def readFromProto(cls, proto):
"""
Read state from proto object.
:param proto: SDRClassifierRegionProto capnproto object
"""
instance = cls()
instance.implementation = proto.implementation
instance.steps = proto.steps
instance.stepsList = [int(i) for i in proto.steps.split(",")]
instance.alpha = proto.alpha
instance.verbosity = proto.verbosity
instance.maxCategoryCount = proto.maxCategoryCount
instance._sdrClassifier = SDRClassifierFactory.read(proto)
instance.learningMode = proto.learningMode
instance.inferenceMode = proto.inferenceMode
instance.recordNum = proto.recordNum
return instance
def compute(self, inputs, outputs):
"""
Process one input sample.
This method is called by the runtime engine.
:param inputs: (dict) mapping region input names to numpy.array values
:param outputs: (dict) mapping region output names to numpy.arrays that
should be populated with output values by this method
"""
# This flag helps to prevent double-computation, in case the deprecated
# customCompute() method is being called in addition to compute() called
# when network.run() is called
self._computeFlag = True
patternNZ = inputs["bottomUpIn"].nonzero()[0]
if self.learningMode:
# An input can potentially belong to multiple categories.
# If a category value is < 0, it means that the input does not belong to
# that category.
categories = [category for category in inputs["categoryIn"]
if category >= 0]
if len(categories) > 0:
# Allow to train on multiple input categories.
bucketIdxList = []
actValueList = []
for category in categories:
bucketIdxList.append(int(category))
if "actValueIn" not in inputs:
actValueList.append(int(category))
else:
actValueList.append(float(inputs["actValueIn"]))
classificationIn = {"bucketIdx": bucketIdxList,
"actValue": actValueList}
else:
# If the input does not belong to a category, i.e. len(categories) == 0,
# then look for bucketIdx and actValueIn.
if "bucketIdxIn" not in inputs:
raise KeyError("Network link missing: bucketIdxOut -> bucketIdxIn")
if "actValueIn" not in inputs:
raise KeyError("Network link missing: actValueOut -> actValueIn")
classificationIn = {"bucketIdx": int(inputs["bucketIdxIn"]),
"actValue": float(inputs["actValueIn"])}
else:
# Use Dummy classification input, because this param is required even for
# inference mode. Because learning is off, the classifier is not learning
# this dummy input. Inference only here.
classificationIn = {"actValue": 0, "bucketIdx": 0}
# Perform inference if self.inferenceMode is True
# Train classifier if self.learningMode is True
clResults = self._sdrClassifier.compute(recordNum=self.recordNum,
patternNZ=patternNZ,
classification=classificationIn,
learn=self.learningMode,
infer=self.inferenceMode)
# fill outputs with clResults
if clResults is not None and len(clResults) > 0:
outputs['actualValues'][:len(clResults["actualValues"])] = \
clResults["actualValues"]
for step in self.stepsList:
stepIndex = self.stepsList.index(step)
categoryOut = clResults["actualValues"][clResults[step].argmax()]
outputs['categoriesOut'][stepIndex] = categoryOut
# Flatten the rest of the output. For example:
# Original dict {1 : [0.1, 0.3, 0.2, 0.7]
# 4 : [0.2, 0.4, 0.3, 0.5]}
# becomes: [0.1, 0.3, 0.2, 0.7, 0.2, 0.4, 0.3, 0.5]
stepProbabilities = clResults[step]
for categoryIndex in xrange(self.maxCategoryCount):
flatIndex = categoryIndex + stepIndex * self.maxCategoryCount
if categoryIndex < len(stepProbabilities):
outputs['probabilities'][flatIndex] = \
stepProbabilities[categoryIndex]
else:
outputs['probabilities'][flatIndex] = 0.0
self.recordNum += 1
def customCompute(self, recordNum, patternNZ, classification):
"""
Just return the inference value from one input sample. The actual
learning happens in compute() -- if, and only if learning is enabled --
which is called when you run the network.
.. warning:: This method is deprecated and exists only to maintain backward
compatibility. This method is deprecated, and will be removed. Use
:meth:`nupic.engine.Network.run` instead, which will call
:meth:`~nupic.regions.sdr_classifier_region.compute`.
:param recordNum: (int) Record number of the input sample.
:param patternNZ: (list) of the active indices from the output below
:param classification: (dict) of the classification information:
* ``bucketIdx``: index of the encoder bucket
* ``actValue``: actual value going into the encoder
:returns: (dict) containing inference results, one entry for each step in
``self.steps``. The key is the number of steps, the value is an
array containing the relative likelihood for each ``bucketIdx``
starting from 0.
For example:
::
{'actualValues': [0.0, 1.0, 2.0, 3.0]
1 : [0.1, 0.3, 0.2, 0.7]
4 : [0.2, 0.4, 0.3, 0.5]}
"""
# If the compute flag has not been initialized (for example if we
# restored a model from an old checkpoint) initialize it to False.
if not hasattr(self, "_computeFlag"):
self._computeFlag = False
if self._computeFlag:
# Will raise an exception if the deprecated method customCompute() is
# being used at the same time as the compute function.
warnings.simplefilter('error', DeprecationWarning)
warnings.warn("The customCompute() method should not be "
"called at the same time as the compute() "
"method. The compute() method is called "
"whenever network.run() is called.",
DeprecationWarning)
return self._sdrClassifier.compute(recordNum,
patternNZ,
classification,
self.learningMode,
self.inferenceMode)
def getOutputElementCount(self, outputName):
"""
Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getOutputElementCount`.
"""
if outputName == "categoriesOut":
return len(self.stepsList)
elif outputName == "probabilities":
return len(self.stepsList) * self.maxCategoryCount
elif outputName == "actualValues":
return self.maxCategoryCount
else:
raise ValueError("Unknown output {}.".format(outputName))
| agpl-3.0 |
trotterdylan/grumpy | lib/os/path.py | 5 | 2409 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""""Utilities for manipulating and inspecting OS paths."""
from '__go__/os' import Stat
from '__go__/path/filepath' import Abs, Base, Clean, Dir as dirname, IsAbs as isabs, Join, Split # pylint: disable=g-multiple-import,unused-import
def abspath(path):
result, err = Abs(path)
if err:
raise OSError(err.Error())
if isinstance(path, unicode):
# Grumpy compiler encoded the string into utf-8, so the result can be
# decoded using utf-8.
return unicode(result, 'utf-8')
return result
def basename(path):
return '' if path.endswith('/') else Base(path)
def exists(path):
_, err = Stat(path)
return err is None
def isdir(path):
info, err = Stat(path)
if info and err is None:
return info.Mode().IsDir()
return False
def isfile(path):
info, err = Stat(path)
if info and err is None:
return info.Mode().IsRegular()
return False
# NOTE(compatibility): This method uses Go's filepath.Join() method which
# implicitly normalizes the resulting path (pruning extra /, .., etc.) The usual
# CPython behavior is to leave all the cruft. This deviation is reasonable
# because a) result paths will point to the same files and b) one cannot assume
# much about the results of join anyway since it's platform dependent.
def join(*paths):
if not paths:
raise TypeError('join() takes at least 1 argument (0 given)')
parts = []
for p in paths:
if isabs(p):
parts = [p]
else:
parts.append(p)
result = Join(*parts)
if result and not paths[-1]:
result += '/'
return result
def normpath(path):
result = Clean(path)
if isinstance(path, unicode):
return unicode(result, 'utf-8')
return result
def split(path):
head, tail = Split(path)
if len(head) > 1 and head[-1] == '/':
head = head[:-1]
return (head, tail)
| apache-2.0 |
ioana-delaney/spark | python/pyspark/util.py | 2 | 2303 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import inspect
from py4j.protocol import Py4JJavaError
__all__ = []
import sys
def _exception_message(excp):
"""Return the message from an exception as either a str or unicode object. Supports both
Python 2 and Python 3.
>>> msg = "Exception message"
>>> excp = Exception(msg)
>>> msg == _exception_message(excp)
True
>>> msg = u"unicöde"
>>> excp = Exception(msg)
>>> msg == _exception_message(excp)
True
"""
if isinstance(excp, Py4JJavaError):
# 'Py4JJavaError' doesn't contain the stack trace available on the Java side in 'message'
# attribute in Python 2. We should call 'str' function on this exception in general but
# 'Py4JJavaError' has an issue about addressing non-ascii strings. So, here we work
# around by the direct call, '__str__()'. Please see SPARK-23517.
return excp.__str__()
if hasattr(excp, "message"):
return excp.message
return str(excp)
def _get_argspec(f):
"""
Get argspec of a function. Supports both Python 2 and Python 3.
"""
# `getargspec` is deprecated since python3.0 (incompatible with function annotations).
# See SPARK-23569.
if sys.version_info[0] < 3:
argspec = inspect.getargspec(f)
else:
argspec = inspect.getfullargspec(f)
return argspec
if __name__ == "__main__":
import doctest
(failure_count, test_count) = doctest.testmod()
if failure_count:
sys.exit(-1)
| apache-2.0 |
reiven/pungabot | modules/module_salute.py | 1 | 1049 | import datetime
import random
def handle_userJoined(bot, user, channel):
"""Someone Joined, lets salute him"""
if random.randrange(10) < 8:
return
tarde = datetime.time(13, 00)
noche = datetime.time(20, 00)
manha = datetime.time(07, 00)
ahora = datetime.datetime.now().time()
# custom ahora, for debug
#ahora = datetime.time(07,00)
if ahora > noche or ahora < manha:
hi = random.choice(['wenas noches', 'ah we.. ', 'nas noches',
'welcome back', 'nas noches', 'guten abend', 'uh']
)
return bot.say(channel, '%s, %s' % (getNick(user),hi))
elif ahora > tarde:
hi = random.choice(['guten tag', 'estas son horas de llegar?',
'wenas tardes', 'doctor!', 'alo', 'apa!']
)
return bot.say(channel, '%s, %s' % (getNick(user),hi))
else:
hi = random.choice(['morning', 'wen dia', 'madrugando eh?',
'que temprano!!!', 'guten morgen', 'si! q tal'])
return bot.say(channel, '%s, %s' % (getNick(user),hi))
| gpl-3.0 |
sirex/manopozicija.lt | scripts/runtests.py | 2 | 3211 | #!/usr/bin/env python3
"""
Run project tests.
This script mostly useful for running tests in single file.
"""
import sys
import argparse
import subprocess
import pathlib
def get_cover_package(path):
if ':' in path:
path = path[:path.index(':')]
base = pathlib.Path(__file__).parents[1].resolve()
path = pathlib.Path(path).resolve()
path = path.relative_to(base)
if len(path.parts) > 1:
return '.'.join(path.parts[:2])
else:
return path.parts[0]
def get_paths(paths):
if paths:
for path in paths:
if ':' in path:
path = path[:path.index(':')]
yield path
else:
yield 'manopozicija'
def is_coverage_enabled(args):
if args.nocoverage or args.profile:
return False
else:
return True
def run_tests(args):
if args.fast:
settings = 'manopozicija.settings.fasttesting'
else:
settings = 'manopozicija.settings.testing'
cmd = [
'bin/django', 'test',
'--settings=%s' % settings,
'--nocapture',
'--nologcapture',
'--all-modules',
'--with-doctest',
'--doctest-tests',
'--noinput',
] + args.paths
if args.profile:
cmd = [
'bin/kernprof',
'--line-by-line',
'--builtin',
'--outfile=/dev/null',
'--view',
] + cmd
elif is_coverage_enabled(args):
coverage_modules = list(set(map(get_cover_package, args.paths)))
subprocess.call(['bin/coverage', 'erase'])
cmd = [
'bin/coverage', 'run',
'--source=%s' % ','.join(coverage_modules),
] + cmd
return subprocess.call(cmd)
def run_flake8(args):
cmd = [
'bin/flake8',
'--exclude=migrations',
'--ignore=E501,E241',
] + list(get_paths(args.paths))
return subprocess.call(cmd)
def run_pylint(args):
cmd = [
'bin/pylint',
'--msg-template="%s"' % (
'{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}',
)
] + list(get_paths(args.paths))
return subprocess.call(cmd)
def run_coverage_report(args):
# Also see .coveragerc
return subprocess.call(['bin/coverage', 'report', '--show-missing'])
def main(args=None):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('paths', nargs='+', help='paths to test files')
parser.add_argument(
'--fast', action='store_true', default=False,
help='run tests with manopozicija.settings.fasttests settings',
)
parser.add_argument(
'--profile', action='store_true', default=False,
help='run tests with line profiler',
)
parser.add_argument(
'--nocoverage', action='store_true', default=False,
help='run tests without test coverage report',
)
args = parser.parse_args(args)
retcode = run_tests(args)
if retcode == 0:
retcode = run_flake8(args)
if retcode == 0:
retcode = run_pylint(args)
if retcode == 0 and is_coverage_enabled(args):
run_coverage_report(args)
sys.exit(retcode)
if __name__ == '__main__':
main()
| agpl-3.0 |
tranhuucuong91/autoscaling | docs/learning-by-doing/week10-restful-api/cuongnb14/scaling/models.py | 1 | 4240 | """Declare all class mapping database autoscaling in mariadb
author: cuongnb14@gmail.com
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship, backref
Base = declarative_base()
class App(Base):
"""mapping with table apps"""
__tablename__ = 'apps'
Id = Column(Integer, primary_key=True)
app_uuid = Column(String)
name = Column(String)
min_instances = Column(Integer)
max_instances = Column(Integer)
enabled = Column(Integer)
locked = Column(Integer)
next_time = Column(Integer)
#policies = relationship("Policie", order_by="Policie.Id", backref="app")
#crons = relationship("Cron", order_by="Cron.Id", backref="app")
def __repr__(self):
return '{"Id": '+str(self.Id)+', "app_uuid": "'+str(self.app_uuid)+'","name": "'+str(self.name)+'","min_instances": '+str(self.min_instances)+',"max_instances": '+str(self.max_instances)+',"enabled": '+str(self.enabled)+',"locked": '+str(self.locked)+',"next_time": '+str(self.next_time)+'}'
class Policie(Base):
"""mapping with table policies"""
__tablename__ = 'policies'
Id = Column(Integer, primary_key=True)
app_uuid = Column(String, ForeignKey('apps.Id'))
metric_type = Column(Integer)
upper_threshold = Column(Integer)
lower_threshold = Column(Integer)
instances_out = Column(Integer)
instances_in = Column(Integer)
cooldown_period = Column(Integer)
measurement_period = Column(Integer)
deleted = Column(Integer)
#app = relationship("App", backref=backref('policies', order_by=Id))
def __repr__(self):
return '{"Id": '+str(self.Id)+', "app_uuid": "'+str(self.app_uuid)+'","metric_type": '+str(self.metric_type)+',"upper_threshold": '+str(self.upper_threshold)+',"lower_threshold": '+str(self.lower_threshold)+',"instances_out": '+str(self.instances_out)+',"instances_in": '+str(self.instances_in)+',"cooldown_period": '+str(self.cooldown_period)+', "measurement_period": '+str(self.measurement_period)+', "deleted": '+str(self.deleted)+'}'
class Cron(Base):
"""mapping with table crons"""
__tablename__ = 'crons'
Id = Column(Integer, primary_key=True)
app_uuid = Column(String, ForeignKey('apps.Id'))
cron_uuid = Column(String)
min_instances = Column(Integer)
max_instances = Column(Integer)
cron_string = Column(String)
deleted = Column(Integer)
#app = relationship("App", backref=backref('crons', order_by=Id))
def __repr__(self):
return '{"Id": '+str(self.Id)+', "app_uuid": "'+str(self.app_uuid)+'","cron_uuid": '+str(self.cron_uuid)+',"min_instances": '+str(self.min_instances)+',"max_instances": '+str(self.max_instances)+',"cron_string": '+str(self.cron_string)+',"deleted": '+str(self.deleted)+'}'
engine = create_engine("mysql://autoscaling:autoscaling@123@172.17.42.1:3306/autoscaling", encoding='utf-8', echo=True)
Session = sessionmaker(bind=engine)
session = Session()
def get_by_id(table_class, id):
"""Return record of table_class have id is $id
@param Base table_class Class of table. Ex: models.Apps
@param int id id of record
"""
result = session.query(table_class).filter_by(Id=id).first()
return result
def get_all(table_class):
"""Return all record of table_class
@param Base table_class Class of table. Ex: models.Apps
"""
result = session.query(table_class)
return result
def get_app_of_appname(app_name):
app = session.query(App).filter_by(name=app_name).first()
return app
def get_policies_of_appuuid(app_uuid):
"""Return all policies of app_uuid
@param string app_uuid
"""
policies = session.query(Policie).filter_by(app_uuid=app_uuid)
return policies
def get_policies_of_appname(app_name):
"""Return all policies of app_name
@param string app_name
"""
app = session.query(App).filter_by(name=app_name).first()
policies = session.query(Policie).filter_by(app_uuid=app.app_uuid)
return policies
def to_json(result):
"""Return json string of result query
@param Base result result query
@return string string json
"""
list_record = []
for record in result:
list_record.append(str(record))
json_record = "["+",".join(list_record)+"]"
return json_record
| apache-2.0 |
scylladb/scylla-cluster-tests | performance_regression_alternator_test.py | 1 | 14260 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright (c) 2020 ScyllaDB
import contextlib
from performance_regression_test import PerformanceRegressionTest
from sdcm.sct_events.group_common_events import ignore_operation_errors, ignore_alternator_client_errors
from sdcm.utils import alternator
class PerformanceRegressionAlternatorTest(PerformanceRegressionTest):
def __init__(self, *args):
super().__init__(*args)
# suppress YCSB client error and timeout to warnings for all the test in this class
self.stack = contextlib.ExitStack()
self.stack.enter_context(ignore_alternator_client_errors())
self.stack.enter_context(ignore_operation_errors())
def _workload(self, stress_cmd, stress_num, test_name=None, sub_type=None, keyspace_num=1, prefix='', debug_message='', # pylint: disable=too-many-arguments,arguments-differ
save_stats=True, is_alternator=True):
if not is_alternator:
stress_cmd = stress_cmd.replace('dynamodb', 'cassandra-cql')
if debug_message:
self.log.debug(debug_message)
if save_stats:
self.create_test_stats(test_name=test_name, sub_type=sub_type,
doc_id_with_timestamp=True, append_sub_test_to_name=False)
stress_queue = self.run_stress_thread(stress_cmd=stress_cmd, stress_num=stress_num, keyspace_num=keyspace_num,
prefix=prefix, stats_aggregate_cmds=False)
self.get_stress_results(queue=stress_queue, store_results=True)
if save_stats:
self.update_test_details(scylla_conf=True, alternator=is_alternator)
def create_alternator_table(self, schema, alternator_write_isolation):
node = self.db_cluster.nodes[0]
# drop tables
table_name = alternator.consts.TABLE_NAME
if self.alternator.is_table_exists(node=node, table_name=table_name):
self.alternator.delete_table(node=node, table_name=table_name)
# create new tables
self.log.info("Going to create alternator tables")
self.alternator.create_table(node=node, schema=schema, alternator_write_isolation=alternator_write_isolation)
self.run_fstrim_on_all_db_nodes()
self.wait_no_compactions_running()
def create_cql_ks_and_table(self, field_number):
node = self.db_cluster.nodes[0]
with self.db_cluster.cql_connection_patient(node) as session:
session.execute(
"""create keyspace ycsb WITH REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor': 3 };""")
fields = ', '.join([f"field{i} varchar" for i in range(field_number)])
session.execute(f"""CREATE TABLE ycsb.usertable (
y_id varchar primary key,
{fields});""")
def preload_data(self):
# if test require a pre-population of data
prepare_write_cmd = self.params.get('prepare_write_cmd')
if prepare_write_cmd:
# create new document in ES with doc_id = test_id + timestamp
# allow to correctly save results for future compare
self.create_test_stats(sub_type='write-prepare', doc_id_with_timestamp=True)
stress_queue = list()
params = {'prefix': 'preload-'}
for stress_type in ['dynamodb', 'cassandra-cql']:
# Check if the prepare_cmd is a list of commands
if not isinstance(prepare_write_cmd, str) and len(prepare_write_cmd) > 1:
# Check if it should be round_robin across loaders
if self.params.get('round_robin'):
self.log.debug('Populating data using round_robin')
params.update({'stress_num': 1, 'round_robin': True})
for stress_cmd in prepare_write_cmd:
params.update({'stress_cmd': stress_cmd.replace('dynamodb', stress_type)})
# Run all stress commands
params.update(dict(stats_aggregate_cmds=False))
self.log.debug('RUNNING stress cmd: {}'.format(stress_cmd.replace('dynamodb', stress_type)))
stress_queue.append(self.run_stress_thread(**params))
# One stress cmd command
else:
stress_queue.append(self.run_stress_thread(stress_cmd=prepare_write_cmd.replace('dynamodb', stress_type), stress_num=1,
prefix='preload-', stats_aggregate_cmds=False))
for stress in stress_queue:
self.get_stress_results(queue=stress, store_results=False)
self.update_test_details()
else:
self.log.warning("No prepare command defined in YAML!")
def test_write(self):
"""
Test steps:
1. Run a write workload with cql
2. Run a write workload without lwt
3. Run a write workload with lwt enabled
"""
# run a write workload
base_cmd_w = self.params.get('stress_cmd_w')
stress_multiplier = self.params.get('stress_multiplier')
self.create_cql_ks_and_table(field_number=10)
self._workload(sub_type='cql', stress_cmd=base_cmd_w,
stress_num=stress_multiplier, keyspace_num=1, is_alternator=False)
schema = self.params.get("dynamodb_primarykey_type")
# run a workload without lwt as baseline
self.create_alternator_table(
schema=schema, alternator_write_isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self._workload(sub_type='without-lwt', stress_cmd=base_cmd_w, stress_num=stress_multiplier, keyspace_num=1)
# run a workload with lwt
self.create_alternator_table(
schema=schema, alternator_write_isolation=alternator.enums.WriteIsolation.ALWAYS_USE_LWT)
self._workload(sub_type='with-lwt', stress_cmd=base_cmd_w, stress_num=stress_multiplier, keyspace_num=1)
self.check_regression_with_baseline('cql')
def test_read(self):
"""
Test steps:
1. Run a write workload as a preparation
2. Run a read workload with cql
3. Run a read workload without lwt
4. Run a read workload with lwt enabled
"""
node = self.db_cluster.nodes[0]
base_cmd_r = self.params.get('stress_cmd_r')
stress_multiplier = self.params.get('stress_multiplier')
self.run_fstrim_on_all_db_nodes()
# run a prepare write workload
self.create_cql_ks_and_table(field_number=10)
self.create_alternator_table(schema=self.params.get("dynamodb_primarykey_type"),
alternator_write_isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self.preload_data()
self._workload(sub_type='cql', stress_cmd=base_cmd_r,
stress_num=stress_multiplier, keyspace_num=1, is_alternator=False)
# run a workload without lwt as baseline
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self._workload(sub_type='without-lwt', stress_cmd=base_cmd_r, stress_num=stress_multiplier, keyspace_num=1)
# run a workload with lwt
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.ALWAYS_USE_LWT)
self._workload(sub_type='with-lwt', stress_cmd=base_cmd_r, stress_num=stress_multiplier, keyspace_num=1)
self.check_regression_with_baseline('cql')
def test_mixed(self):
"""
Test steps:
1. Run a write workload as a preparation
2. Run a mixed workload with cql
3. Run a mixed workload without lwt
4. Run a mixed workload with lwt
"""
node = self.db_cluster.nodes[0]
base_cmd_m = self.params.get('stress_cmd_m')
stress_multiplier = self.params.get('stress_multiplier')
self.run_fstrim_on_all_db_nodes()
self.create_cql_ks_and_table(field_number=10)
self.create_alternator_table(schema=self.params.get("dynamodb_primarykey_type"),
alternator_write_isolation=alternator.enums.WriteIsolation.FORBID_RMW)
# run a write workload as a preparation
self.preload_data()
self._workload(sub_type='cql', stress_cmd=base_cmd_m,
stress_num=stress_multiplier, keyspace_num=1, is_alternator=False)
# run a mixed workload
# run a workload without lwt as baseline
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self._workload(sub_type='without-lwt', stress_cmd=base_cmd_m, stress_num=stress_multiplier, keyspace_num=1)
# run a workload with lwt
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.ALWAYS_USE_LWT)
self._workload(sub_type='with-lwt', stress_cmd=base_cmd_m, stress_num=stress_multiplier, keyspace_num=1)
self.check_regression_with_baseline('cql')
def test_latency(self):
"""
Test steps:
1. Prepare cluster with data (reach steady_stet of compactions and ~x10 capacity than RAM.
with round_robin and list of stress_cmd - the data will load several times faster.
2. Run READ workload with cql.
3. Run READ workload without lwt.
4. Run READ workload with lwt.
5. Run WRITE workload with cql.
6. Run WRITE workload without lwt.
7. Run WRITE workload with lwt.
8. Run MIXED workload with cql.
9. Run MIXED workload without lwt.
10. Run MIXED workload with lwt.
"""
node = self.db_cluster.nodes[0]
self.create_alternator_table(schema=self.params.get("dynamodb_primarykey_type"),
alternator_write_isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self.create_cql_ks_and_table(field_number=10)
self.run_fstrim_on_all_db_nodes()
self.preload_data()
base_cmd_w = self.params.get('stress_cmd_w')
base_cmd_r = self.params.get('stress_cmd_r')
base_cmd_m = self.params.get('stress_cmd_m')
stress_multiplier = 2
self.wait_no_compactions_running(n=120)
self.run_fstrim_on_all_db_nodes()
self._workload(
test_name=self.id() + '_read', sub_type='cql', stress_cmd=base_cmd_r, stress_num=stress_multiplier,
keyspace_num=1, is_alternator=False)
# run a workload without lwt as baseline
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self._workload(
test_name=self.id() + '_read', sub_type='without-lwt', stress_cmd=base_cmd_r, stress_num=stress_multiplier,
keyspace_num=1)
self.wait_no_compactions_running()
# run a workload with lwt
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.ALWAYS_USE_LWT)
self._workload(
test_name=self.id() + '_read', sub_type='with-lwt', stress_cmd=base_cmd_r, stress_num=stress_multiplier,
keyspace_num=1)
self.check_regression_with_baseline('cql')
stress_multiplier = 1
self.run_fstrim_on_all_db_nodes()
self.wait_no_compactions_running()
self._workload(
test_name=self.id() + '_write', sub_type='cql', stress_cmd=base_cmd_w + " -target 10000",
stress_num=stress_multiplier, keyspace_num=1, is_alternator=False)
self.wait_no_compactions_running()
# run a workload without lwt as baseline
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self._workload(
test_name=self.id() + '_write', sub_type='without-lwt', stress_cmd=base_cmd_w + " -target 10000",
stress_num=stress_multiplier, keyspace_num=1)
self.wait_no_compactions_running(n=120)
# run a workload with lwt
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.ALWAYS_USE_LWT)
self._workload(
test_name=self.id() + '_write', sub_type='with-lwt', stress_cmd=base_cmd_w + " -target 3000",
stress_num=stress_multiplier, keyspace_num=1)
self.check_regression_with_baseline('cql')
stress_multiplier = 1
self.wait_no_compactions_running(n=120)
self.run_fstrim_on_all_db_nodes()
self._workload(
test_name=self.id() + '_mixed', sub_type='cql', stress_cmd=base_cmd_m + " -target 10000",
stress_num=stress_multiplier, keyspace_num=1, is_alternator=False)
self.wait_no_compactions_running()
# run a workload without lwt as baseline
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.FORBID_RMW)
self._workload(test_name=self.id() + '_mixed', sub_type='without-lwt',
stress_cmd=base_cmd_m + " -target 10000", stress_num=stress_multiplier, keyspace_num=1)
self.wait_no_compactions_running()
# run a workload with lwt
self.alternator.set_write_isolation(node=node, isolation=alternator.enums.WriteIsolation.ALWAYS_USE_LWT)
self._workload(test_name=self.id() + '_mixed', sub_type='with-lwt',
stress_cmd=base_cmd_m + " -target 5000", stress_num=stress_multiplier, keyspace_num=1)
self.check_regression_with_baseline('cql')
| agpl-3.0 |
lepistone/odoo | addons/hw_scale/controllers/main.py | 49 | 6932 | # -*- coding: utf-8 -*-
import logging
import os
import time
from os import listdir
from os.path import join
from threading import Thread, Lock
from select import select
from Queue import Queue, Empty
import openerp
import openerp.addons.hw_proxy.controllers.main as hw_proxy
from openerp import http
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
try:
import serial
except ImportError:
_logger.error('OpenERP module hw_scale depends on the pyserial python module')
serial = None
class Scale(Thread):
def __init__(self):
Thread.__init__(self)
self.lock = Lock()
self.scalelock = Lock()
self.status = {'status':'connecting', 'messages':[]}
self.input_dir = '/dev/serial/by-id/'
self.weight = 0
self.weight_info = 'ok'
self.device = None
def lockedstart(self):
with self.lock:
if not self.isAlive():
self.daemon = True
self.start()
def set_status(self, status, message = None):
if status == self.status['status']:
if message != None and message != self.status['messages'][-1]:
self.status['messages'].append(message)
else:
self.status['status'] = status
if message:
self.status['messages'] = [message]
else:
self.status['messages'] = []
if status == 'error' and message:
_logger.error('Scale Error: '+message)
elif status == 'disconnected' and message:
_logger.warning('Disconnected Scale: '+message)
def get_device(self):
try:
devices = [ device for device in listdir(self.input_dir)]
scales = [ device for device in devices if ('mettler' in device.lower()) or ('toledo' in device.lower()) ]
if len(scales) > 0:
print join(self.input_dir,scales[0])
self.set_status('connected','Connected to '+scales[0])
return serial.Serial(join(self.input_dir,scales[0]),
baudrate = 9600,
bytesize = serial.SEVENBITS,
stopbits = serial.STOPBITS_ONE,
parity = serial.PARITY_EVEN,
#xonxoff = serial.XON,
timeout = 0.01,
writeTimeout= 0.01)
else:
self.set_status('disconnected','Scale Not Found')
return None
except Exception as e:
self.set_status('error',str(e))
return None
def get_weight(self):
self.lockedstart()
return self.weight
def get_weight_info(self):
self.lockedstart()
return self.weight_info
def get_status(self):
self.lockedstart()
return self.status
def read_weight(self):
with self.scalelock:
if self.device:
try:
self.device.write('W')
time.sleep(0.1)
answer = []
while True:
char = self.device.read(1)
if not char:
break
else:
answer.append(char)
if '?' in answer:
stat = ord(answer[answer.index('?')+1])
if stat == 0:
self.weight_info = 'ok'
else:
self.weight_info = []
if stat & 1 :
self.weight_info.append('moving')
if stat & 1 << 1:
self.weight_info.append('over_capacity')
if stat & 1 << 2:
self.weight_info.append('negative')
self.weight = 0.0
if stat & 1 << 3:
self.weight_info.append('outside_zero_capture_range')
if stat & 1 << 4:
self.weight_info.append('center_of_zero')
if stat & 1 << 5:
self.weight_info.append('net_weight')
else:
answer = answer[1:-1]
if 'N' in answer:
answer = answer[0:-1]
try:
self.weight = float(''.join(answer))
except ValueError as v:
self.set_status('error','No data Received, please power-cycle the scale');
self.device = None
except Exception as e:
self.set_status('error',str(e))
self.device = None
def set_zero(self):
with self.scalelock:
if self.device:
try:
self.device.write('Z')
except Exception as e:
self.set_status('error',str(e))
self.device = None
def set_tare(self):
with self.scalelock:
if self.device:
try:
self.device.write('T')
except Exception as e:
self.set_status('error',str(e))
self.device = None
def clear_tare(self):
with self.scalelock:
if self.device:
try:
self.device.write('C')
except Exception as e:
self.set_status('error',str(e))
self.device = None
def run(self):
self.device = None
while True:
if self.device:
self.read_weight()
time.sleep(0.05)
else:
with self.scalelock:
self.device = self.get_device()
if not self.device:
time.sleep(5)
s = Scale()
hw_proxy.drivers['scale'] = s
class ScaleDriver(hw_proxy.Proxy):
@http.route('/hw_proxy/scale_read/', type='json', auth='none', cors='*')
def scale_read(self):
return {'weight':s.get_weight(), 'unit':'kg', 'info':s.get_weight_info()}
@http.route('/hw_proxy/scale_zero/', type='json', auth='none', cors='*')
def scale_zero(self):
s.set_zero()
return True
@http.route('/hw_proxy/scale_tare/', type='json', auth='none', cors='*')
def scale_tare(self):
s.set_tare()
return True
@http.route('/hw_proxy/scale_clear_tare/', type='json', auth='none', cors='*')
def scale_clear_tare(self):
s.clear_tare()
return True
| agpl-3.0 |
Belxjander/Kirito | Python-3.5.0-Amiga/Lib/encodings/utf_8_sig.py | 266 | 4133 | """ Python 'utf-8-sig' Codec
This work similar to UTF-8 with the following changes:
* On encoding/writing a UTF-8 encoded BOM will be prepended/written as the
first three bytes.
* On decoding/reading if the first three bytes are a UTF-8 encoded BOM, these
bytes will be skipped.
"""
import codecs
### Codec APIs
def encode(input, errors='strict'):
return (codecs.BOM_UTF8 + codecs.utf_8_encode(input, errors)[0],
len(input))
def decode(input, errors='strict'):
prefix = 0
if input[:3] == codecs.BOM_UTF8:
input = input[3:]
prefix = 3
(output, consumed) = codecs.utf_8_decode(input, errors, True)
return (output, consumed+prefix)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.first = 1
def encode(self, input, final=False):
if self.first:
self.first = 0
return codecs.BOM_UTF8 + \
codecs.utf_8_encode(input, self.errors)[0]
else:
return codecs.utf_8_encode(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.first = 1
def getstate(self):
return self.first
def setstate(self, state):
self.first = state
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors='strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.first = 1
def _buffer_decode(self, input, errors, final):
if self.first:
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this really is a BOM
# => try again on the next call
return ("", 0)
else:
self.first = 0
else:
self.first = 0
if input[:3] == codecs.BOM_UTF8:
(output, consumed) = \
codecs.utf_8_decode(input[3:], errors, final)
return (output, consumed+3)
return codecs.utf_8_decode(input, errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.first = 1
def getstate(self):
state = codecs.BufferedIncrementalDecoder.getstate(self)
# state[1] must be 0 here, as it isn't passed along to the caller
return (state[0], self.first)
def setstate(self, state):
# state[1] will be ignored by BufferedIncrementalDecoder.setstate()
codecs.BufferedIncrementalDecoder.setstate(self, state)
self.first = state[1]
class StreamWriter(codecs.StreamWriter):
def reset(self):
codecs.StreamWriter.reset(self)
try:
del self.encode
except AttributeError:
pass
def encode(self, input, errors='strict'):
self.encode = codecs.utf_8_encode
return encode(input, errors)
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors='strict'):
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this is a BOM
# => try again on the next call
return ("", 0)
elif input[:3] == codecs.BOM_UTF8:
self.decode = codecs.utf_8_decode
(output, consumed) = codecs.utf_8_decode(input[3:],errors)
return (output, consumed+3)
# (else) no BOM present
self.decode = codecs.utf_8_decode
return codecs.utf_8_decode(input, errors)
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-8-sig',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| gpl-3.0 |
smn/onadata | onadata/apps/logger/migrations/0028_update_is_deleted_field.py | 13 | 8711 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
no_dry_run = True
def forwards(self, orm):
# update values
db.execute("UPDATE odk_logger_instance SET is_deleted=%s WHERE deleted_at IS NOT NULL", [True])
def backwards(self, orm):
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.attachment': {
'Meta': {'object_name': 'Attachment'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['odk_logger.Instance']"}),
'media_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'mimetype': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20', 'blank': 'True'})
},
'odk_logger.instance': {
'Meta': {'object_name': 'Instance'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'submitted_via_web'", 'max_length': '20'}),
'survey_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['odk_logger.SurveyType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['odk_logger.XForm']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.instancehistory': {
'Meta': {'object_name': 'InstanceHistory'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform_instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'submission_history'", 'to': "orm['odk_logger.Instance']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.surveytype': {
'Meta': {'object_name': 'SurveyType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'odk_logger.xform': {
'Meta': {'ordering': "('id_string',)", 'unique_together': "(('user', 'id_string'), ('user', 'sms_id_string'))", 'object_name': 'XForm'},
'allows_sms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bamboo_dataset': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '60'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'null': 'True'}),
'downloadable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'has_start_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_string': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'is_crowd_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'json': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'shared': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shared_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sms_id_string': ('django.db.models.fields.SlugField', [], {'default': "''", 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'xforms'", 'null': 'True', 'to': "orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32'}),
'xls': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'xml': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['logger']
| bsd-2-clause |
orbitfp7/nova | nova/tests/unit/volume/encryptors/test_cryptsetup.py | 38 | 3321 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import array
import mock
from nova.keymgr import key
from nova.tests.unit.volume.encryptors import test_base
from nova.volume.encryptors import cryptsetup
def fake__get_key(context):
raw = array.array('B', ('0' * 64).decode('hex')).tolist()
symmetric_key = key.SymmetricKey('AES', raw)
return symmetric_key
class CryptsetupEncryptorTestCase(test_base.VolumeEncryptorTestCase):
def _create(self, connection_info):
return cryptsetup.CryptsetupEncryptor(connection_info)
def setUp(self):
super(CryptsetupEncryptorTestCase, self).setUp()
self.dev_path = self.connection_info['data']['device_path']
self.dev_name = self.dev_path.split('/')[-1]
self.symlink_path = self.dev_path
@mock.patch('nova.utils.execute')
def test__open_volume(self, mock_execute):
self.encryptor._open_volume("passphrase")
mock_execute.assert_has_calls([
mock.call('cryptsetup', 'create', '--key-file=-', self.dev_name,
self.dev_path, process_input='passphrase',
run_as_root=True, check_exit_code=True),
])
self.assertEqual(1, mock_execute.call_count)
@mock.patch('nova.utils.execute')
def test_attach_volume(self, mock_execute):
self.encryptor._get_key = mock.MagicMock()
self.encryptor._get_key.return_value = fake__get_key(None)
self.encryptor.attach_volume(None)
mock_execute.assert_has_calls([
mock.call('cryptsetup', 'create', '--key-file=-', self.dev_name,
self.dev_path, process_input='0' * 32,
run_as_root=True, check_exit_code=True),
mock.call('ln', '--symbolic', '--force',
'/dev/mapper/%s' % self.dev_name, self.symlink_path,
run_as_root=True, check_exit_code=True),
])
self.assertEqual(2, mock_execute.call_count)
@mock.patch('nova.utils.execute')
def test__close_volume(self, mock_execute):
self.encryptor.detach_volume()
mock_execute.assert_has_calls([
mock.call('cryptsetup', 'remove', self.dev_name,
run_as_root=True, check_exit_code=True),
])
self.assertEqual(1, mock_execute.call_count)
@mock.patch('nova.utils.execute')
def test_detach_volume(self, mock_execute):
self.encryptor.detach_volume()
mock_execute.assert_has_calls([
mock.call('cryptsetup', 'remove', self.dev_name,
run_as_root=True, check_exit_code=True),
])
self.assertEqual(1, mock_execute.call_count)
| apache-2.0 |
RicardoJohann/frappe | frappe/model/dynamic_links.py | 1 | 2131 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
# select doctypes that are accessed by the user (not read_only) first, so that the
# the validation message shows the user-facing doctype first.
# For example Journal Entry should be validated before GL Entry (which is an internal doctype)
dynamic_link_queries = [
"""select `tabDocField`.parent,
`tabDocType`.read_only, `tabDocType`.in_create,
`tabDocField`.fieldname, `tabDocField`.options
from `tabDocField`, `tabDocType`
where `tabDocField`.fieldtype='Dynamic Link' and
`tabDocType`.`name`=`tabDocField`.parent
order by `tabDocType`.read_only, `tabDocType`.in_create""",
"""select `tabCustom Field`.dt as parent,
`tabDocType`.read_only, `tabDocType`.in_create,
`tabCustom Field`.fieldname, `tabCustom Field`.options
from `tabCustom Field`, `tabDocType`
where `tabCustom Field`.fieldtype='Dynamic Link' and
`tabDocType`.`name`=`tabCustom Field`.dt
order by `tabDocType`.read_only, `tabDocType`.in_create""",
]
def get_dynamic_link_map(for_delete=False):
'''Build a map of all dynamically linked tables. For example,
if Note is dynamically linked to ToDo, the function will return
`{"Note": ["ToDo"], "Sales Invoice": ["Journal Entry Detail"]}`
Note: Will not map single doctypes
'''
if getattr(frappe.local, 'dynamic_link_map', None)==None or frappe.flags.in_test:
# Build from scratch
dynamic_link_map = {}
for df in get_dynamic_links():
meta = frappe.get_meta(df.parent)
if meta.issingle:
# always check in Single DocTypes
dynamic_link_map.setdefault(meta.name, []).append(df)
else:
links = frappe.db.sql_list("""select distinct {options} from `tab{parent}`""".format(**df))
for doctype in links:
dynamic_link_map.setdefault(doctype, []).append(df)
frappe.local.dynamic_link_map = dynamic_link_map
return frappe.local.dynamic_link_map
def get_dynamic_links():
'''Return list of dynamic link fields as DocField.
Uses cache if possible'''
df = []
for query in dynamic_link_queries:
df += frappe.db.sql(query, as_dict=True)
return df
| mit |
NolanBecker/aima-python | tests/test_utils.py | 26 | 5959 | import pytest
from utils import * # noqa
def test_removeall_list():
assert removeall(4, []) == []
assert removeall(4, [1, 2, 3, 4]) == [1, 2, 3]
assert removeall(4, [4, 1, 4, 2, 3, 4, 4]) == [1, 2, 3]
def test_removeall_string():
assert removeall('s', '') == ''
assert removeall('s', 'This is a test. Was a test.') == 'Thi i a tet. Wa a tet.'
def test_unique():
assert unique([1, 2, 3, 2, 1]) == [1, 2, 3]
assert unique([1, 5, 6, 7, 6, 5]) == [1, 5, 6, 7]
def test_product():
assert product([1, 2, 3, 4]) == 24
assert product(list(range(1, 11))) == 3628800
def test_first():
assert first('word') == 'w'
assert first('') is None
assert first('', 'empty') == 'empty'
assert first(range(10)) == 0
assert first(x for x in range(10) if x > 3) == 4
assert first(x for x in range(10) if x > 100) is None
def test_is_in():
e = []
assert is_in(e, [1, e, 3]) is True
assert is_in(e, [1, [], 3]) is False
def test_argminmax():
assert argmin([-2, 1], key=abs) == 1
assert argmax([-2, 1], key=abs) == -2
assert argmax(['one', 'to', 'three'], key=len) == 'three'
def test_histogram():
assert histogram([1, 2, 4, 2, 4, 5, 7, 9, 2, 1]) == [(1, 2), (2, 3),
(4, 2), (5, 1),
(7, 1), (9, 1)]
assert histogram([1, 2, 4, 2, 4, 5, 7, 9, 2, 1], 0, lambda x: x*x) == [(1, 2), (4, 3),
(16, 2), (25, 1),
(49, 1), (81, 1)]
assert histogram([1, 2, 4, 2, 4, 5, 7, 9, 2, 1], 1) == [(2, 3), (4, 2),
(1, 2), (9, 1),
(7, 1), (5, 1)]
def test_dotproduct():
assert dotproduct([1, 2, 3], [1000, 100, 10]) == 1230
def test_element_wise_product():
assert element_wise_product([1, 2, 5], [7, 10, 0]) == [7, 20, 0]
assert element_wise_product([1, 6, 3, 0], [9, 12, 0, 0]) == [9, 72, 0, 0]
def test_matrix_multiplication():
assert matrix_multiplication([[1, 2, 3],
[2, 3, 4]],
[[3, 4],
[1, 2],
[1, 0]]) == [[8, 8], [13, 14]]
assert matrix_multiplication([[1, 2, 3],
[2, 3, 4]],
[[3, 4, 8, 1],
[1, 2, 5, 0],
[1, 0, 0, 3]],
[[1, 2],
[3, 4],
[5, 6],
[1, 2]]) == [[132, 176], [224, 296]]
def test_vector_to_diagonal():
assert vector_to_diagonal([1, 2, 3]) == [[1, 0, 0], [0, 2, 0], [0, 0, 3]]
assert vector_to_diagonal([0, 3, 6]) == [[0, 0, 0], [0, 3, 0], [0, 0, 6]]
def test_vector_add():
assert vector_add((0, 1), (8, 9)) == (8, 10)
def test_scalar_vector_product():
assert scalar_vector_product(2, [1, 2, 3]) == [2, 4, 6]
def test_scalar_matrix_product():
assert rounder(scalar_matrix_product(-5, [[1, 2], [3, 4], [0, 6]])) == [[-5, -10], [-15, -20], [0, -30]]
assert rounder(scalar_matrix_product(0.2, [[1, 2], [2, 3]])) == [[0.2, 0.4], [0.4, 0.6]]
def test_inverse_matrix():
assert rounder(inverse_matrix([[1, 0], [0, 1]])) == [[1, 0], [0, 1]]
assert rounder(inverse_matrix([[2, 1], [4, 3]])) == [[1.5, -0.5], [-2.0, 1.0]]
assert rounder(inverse_matrix([[4, 7], [2, 6]])) == [[0.6, -0.7], [-0.2, 0.4]]
def test_rounder():
assert rounder(5.3330000300330) == 5.3330
assert rounder(10.234566) == 10.2346
assert rounder([1.234566, 0.555555, 6.010101]) == [1.2346, 0.5556, 6.0101]
assert rounder([[1.234566, 0.555555, 6.010101],
[10.505050, 12.121212, 6.030303]]) == [[1.2346, 0.5556, 6.0101],
[10.5051, 12.1212, 6.0303]]
def test_num_or_str():
assert num_or_str('42') == 42
assert num_or_str(' 42x ') == '42x'
def test_normalize():
assert normalize([1, 2, 1]) == [0.25, 0.5, 0.25]
def test_clip():
assert [clip(x, 0, 1) for x in [-1, 0.5, 10]] == [0, 0.5, 1]
def test_sigmoid():
assert isclose(0.5, sigmoid(0))
assert isclose(0.7310585786300049, sigmoid(1))
assert isclose(0.2689414213699951, sigmoid(-1))
def test_step():
assert step(1) == step(0.5) == 1
assert step(0) == 1
assert step(-1) == step(-0.5) == 0
def test_Expr():
A, B, C = symbols('A, B, C')
assert symbols('A, B, C') == (Symbol('A'), Symbol('B'), Symbol('C'))
assert A.op == repr(A) == 'A'
assert arity(A) == 0 and A.args == ()
b = Expr('+', A, 1)
assert arity(b) == 2 and b.op == '+' and b.args == (A, 1)
u = Expr('-', b)
assert arity(u) == 1 and u.op == '-' and u.args == (b,)
assert (b ** u) == (b ** u)
assert (b ** u) != (u ** b)
assert A + b * C ** 2 == A + (b * (C ** 2))
ex = C + 1 / (A % 1)
assert list(subexpressions(ex)) == [(C + (1 / (A % 1))), C, (1 / (A % 1)), 1, (A % 1), A, 1]
assert A in subexpressions(ex)
assert B not in subexpressions(ex)
def test_expr():
P, Q, x, y, z, GP = symbols('P, Q, x, y, z, GP')
assert (expr(y + 2 * x)
== expr('y + 2 * x')
== Expr('+', y, Expr('*', 2, x)))
assert expr('P & Q ==> P') == Expr('==>', P & Q, P)
assert expr('P & Q <=> Q & P') == Expr('<=>', (P & Q), (Q & P))
assert expr('P(x) | P(y) & Q(z)') == (P(x) | (P(y) & Q(z)))
# x is grandparent of z if x is parent of y and y is parent of z:
assert (expr('GP(x, z) <== P(x, y) & P(y, z)')
== Expr('<==', GP(x, z), P(x, y) & P(y, z)))
if __name__ == '__main__':
pytest.main()
| mit |
prashanthr/wakatime | wakatime/packages/pytz/tests/test_tzinfo.py | 11 | 28136 | # -*- coding: ascii -*-
import sys, os, os.path
import unittest, doctest
try:
import cPickle as pickle
except ImportError:
import pickle
from datetime import datetime, time, timedelta, tzinfo
import warnings
if __name__ == '__main__':
# Only munge path if invoked as a script. Testrunners should have setup
# the paths already
sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, os.pardir)))
import pytz
from pytz import reference
from pytz.tzfile import _byte_string
from pytz.tzinfo import DstTzInfo, StaticTzInfo
# I test for expected version to ensure the correct version of pytz is
# actually being tested.
EXPECTED_VERSION='2013d'
fmt = '%Y-%m-%d %H:%M:%S %Z%z'
NOTIME = timedelta(0)
# GMT is a tzinfo.StaticTzInfo--the class we primarily want to test--while
# UTC is reference implementation. They both have the same timezone meaning.
UTC = pytz.timezone('UTC')
GMT = pytz.timezone('GMT')
assert isinstance(GMT, StaticTzInfo), 'GMT is no longer a StaticTzInfo'
def prettydt(dt):
"""datetime as a string using a known format.
We don't use strftime as it doesn't handle years earlier than 1900
per http://bugs.python.org/issue1777412
"""
if dt.utcoffset() >= timedelta(0):
offset = '+%s' % (dt.utcoffset(),)
else:
offset = '-%s' % (-1 * dt.utcoffset(),)
return '%04d-%02d-%02d %02d:%02d:%02d %s %s' % (
dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.tzname(), offset)
try:
unicode
except NameError:
# Python 3.x doesn't have unicode(), making writing code
# for Python 2.3 and Python 3.x a pain.
unicode = str
class BasicTest(unittest.TestCase):
def testVersion(self):
# Ensuring the correct version of pytz has been loaded
self.assertEqual(EXPECTED_VERSION, pytz.__version__,
'Incorrect pytz version loaded. Import path is stuffed '
'or this test needs updating. (Wanted %s, got %s)'
% (EXPECTED_VERSION, pytz.__version__)
)
def testGMT(self):
now = datetime.now(tz=GMT)
self.assertTrue(now.utcoffset() == NOTIME)
self.assertTrue(now.dst() == NOTIME)
self.assertTrue(now.timetuple() == now.utctimetuple())
self.assertTrue(now==now.replace(tzinfo=UTC))
def testReferenceUTC(self):
now = datetime.now(tz=UTC)
self.assertTrue(now.utcoffset() == NOTIME)
self.assertTrue(now.dst() == NOTIME)
self.assertTrue(now.timetuple() == now.utctimetuple())
def testUnknownOffsets(self):
# This tzinfo behavior is required to make
# datetime.time.{utcoffset, dst, tzname} work as documented.
dst_tz = pytz.timezone('US/Eastern')
# This information is not known when we don't have a date,
# so return None per API.
self.assertTrue(dst_tz.utcoffset(None) is None)
self.assertTrue(dst_tz.dst(None) is None)
# We don't know the abbreviation, but this is still a valid
# tzname per the Python documentation.
self.assertEqual(dst_tz.tzname(None), 'US/Eastern')
def clearCache(self):
pytz._tzinfo_cache.clear()
def testUnicodeTimezone(self):
# We need to ensure that cold lookups work for both Unicode
# and traditional strings, and that the desired singleton is
# returned.
self.clearCache()
eastern = pytz.timezone(unicode('US/Eastern'))
self.assertTrue(eastern is pytz.timezone('US/Eastern'))
self.clearCache()
eastern = pytz.timezone('US/Eastern')
self.assertTrue(eastern is pytz.timezone(unicode('US/Eastern')))
class PicklingTest(unittest.TestCase):
def _roundtrip_tzinfo(self, tz):
p = pickle.dumps(tz)
unpickled_tz = pickle.loads(p)
self.assertTrue(tz is unpickled_tz, '%s did not roundtrip' % tz.zone)
def _roundtrip_datetime(self, dt):
# Ensure that the tzinfo attached to a datetime instance
# is identical to the one returned. This is important for
# DST timezones, as some state is stored in the tzinfo.
tz = dt.tzinfo
p = pickle.dumps(dt)
unpickled_dt = pickle.loads(p)
unpickled_tz = unpickled_dt.tzinfo
self.assertTrue(tz is unpickled_tz, '%s did not roundtrip' % tz.zone)
def testDst(self):
tz = pytz.timezone('Europe/Amsterdam')
dt = datetime(2004, 2, 1, 0, 0, 0)
for localized_tz in tz._tzinfos.values():
self._roundtrip_tzinfo(localized_tz)
self._roundtrip_datetime(dt.replace(tzinfo=localized_tz))
def testRoundtrip(self):
dt = datetime(2004, 2, 1, 0, 0, 0)
for zone in pytz.all_timezones:
tz = pytz.timezone(zone)
self._roundtrip_tzinfo(tz)
def testDatabaseFixes(self):
# Hack the pickle to make it refer to a timezone abbreviation
# that does not match anything. The unpickler should be able
# to repair this case
tz = pytz.timezone('Australia/Melbourne')
p = pickle.dumps(tz)
tzname = tz._tzname
hacked_p = p.replace(_byte_string(tzname), _byte_string('???'))
self.assertNotEqual(p, hacked_p)
unpickled_tz = pickle.loads(hacked_p)
self.assertTrue(tz is unpickled_tz)
# Simulate a database correction. In this case, the incorrect
# data will continue to be used.
p = pickle.dumps(tz)
new_utcoffset = tz._utcoffset.seconds + 42
# Python 3 introduced a new pickle protocol where numbers are stored in
# hexadecimal representation. Here we extract the pickle
# representation of the number for the current Python version.
old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)
self.assertNotEqual(p, hacked_p)
unpickled_tz = pickle.loads(hacked_p)
self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
self.assertTrue(tz is not unpickled_tz)
def testOldPickles(self):
# Ensure that applications serializing pytz instances as pickles
# have no troubles upgrading to a new pytz release. These pickles
# where created with pytz2006j
east1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
"I0\nS'EST'\np3\ntRp4\n."
))
east2 = pytz.timezone('US/Eastern')
self.assertTrue(east1 is east2)
# Confirm changes in name munging between 2006j and 2007c cause
# no problems.
pap1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
"\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
pap2 = pytz.timezone('America/Port-au-Prince')
self.assertTrue(pap1 is pap2)
gmt1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
gmt2 = pytz.timezone('Etc/GMT+10')
self.assertTrue(gmt1 is gmt2)
class USEasternDSTStartTestCase(unittest.TestCase):
tzinfo = pytz.timezone('US/Eastern')
# 24 hours before DST changeover
transition_time = datetime(2002, 4, 7, 7, 0, 0, tzinfo=UTC)
# Increase for 'flexible' DST transitions due to 1 minute granularity
# of Python's datetime library
instant = timedelta(seconds=1)
# before transition
before = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
# after transition
after = {
'tzname': 'EDT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
def _test_tzname(self, utc_dt, wanted):
tzname = wanted['tzname']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(dt.tzname(), tzname,
'Expected %s as tzname for %s. Got %s' % (
tzname, str(utc_dt), dt.tzname()
)
)
def _test_utcoffset(self, utc_dt, wanted):
utcoffset = wanted['utcoffset']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(
dt.utcoffset(), wanted['utcoffset'],
'Expected %s as utcoffset for %s. Got %s' % (
utcoffset, utc_dt, dt.utcoffset()
)
)
def _test_dst(self, utc_dt, wanted):
dst = wanted['dst']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(dt.dst(),dst,
'Expected %s as dst for %s. Got %s' % (
dst, utc_dt, dt.dst()
)
)
def test_arithmetic(self):
utc_dt = self.transition_time
for days in range(-420, 720, 20):
delta = timedelta(days=days)
# Make sure we can get back where we started
dt = utc_dt.astimezone(self.tzinfo)
dt2 = dt + delta
dt2 = dt2 - delta
self.assertEqual(dt, dt2)
# Make sure arithmetic crossing DST boundaries ends
# up in the correct timezone after normalization
utc_plus_delta = (utc_dt + delta).astimezone(self.tzinfo)
local_plus_delta = self.tzinfo.normalize(dt + delta)
self.assertEqual(
prettydt(utc_plus_delta),
prettydt(local_plus_delta),
'Incorrect result for delta==%d days. Wanted %r. Got %r'%(
days,
prettydt(utc_plus_delta),
prettydt(local_plus_delta),
)
)
def _test_all(self, utc_dt, wanted):
self._test_utcoffset(utc_dt, wanted)
self._test_tzname(utc_dt, wanted)
self._test_dst(utc_dt, wanted)
def testDayBefore(self):
self._test_all(
self.transition_time - timedelta(days=1), self.before
)
def testTwoHoursBefore(self):
self._test_all(
self.transition_time - timedelta(hours=2), self.before
)
def testHourBefore(self):
self._test_all(
self.transition_time - timedelta(hours=1), self.before
)
def testInstantBefore(self):
self._test_all(
self.transition_time - self.instant, self.before
)
def testTransition(self):
self._test_all(
self.transition_time, self.after
)
def testInstantAfter(self):
self._test_all(
self.transition_time + self.instant, self.after
)
def testHourAfter(self):
self._test_all(
self.transition_time + timedelta(hours=1), self.after
)
def testTwoHoursAfter(self):
self._test_all(
self.transition_time + timedelta(hours=1), self.after
)
def testDayAfter(self):
self._test_all(
self.transition_time + timedelta(days=1), self.after
)
class USEasternDSTEndTestCase(USEasternDSTStartTestCase):
tzinfo = pytz.timezone('US/Eastern')
transition_time = datetime(2002, 10, 27, 6, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EDT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
class USEasternEPTStartTestCase(USEasternDSTStartTestCase):
transition_time = datetime(1945, 8, 14, 23, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EWT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EPT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
class USEasternEPTEndTestCase(USEasternDSTStartTestCase):
transition_time = datetime(1945, 9, 30, 6, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EPT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
class WarsawWMTEndTestCase(USEasternDSTStartTestCase):
# In 1915, Warsaw changed from Warsaw to Central European time.
# This involved the clocks being set backwards, causing a end-of-DST
# like situation without DST being involved.
tzinfo = pytz.timezone('Europe/Warsaw')
transition_time = datetime(1915, 8, 4, 22, 36, 0, tzinfo=UTC)
before = {
'tzname': 'WMT',
'utcoffset': timedelta(hours=1, minutes=24),
'dst': timedelta(0),
}
after = {
'tzname': 'CET',
'utcoffset': timedelta(hours=1),
'dst': timedelta(0),
}
class VilniusWMTEndTestCase(USEasternDSTStartTestCase):
# At the end of 1916, Vilnius changed timezones putting its clock
# forward by 11 minutes 35 seconds. Neither timezone was in DST mode.
tzinfo = pytz.timezone('Europe/Vilnius')
instant = timedelta(seconds=31)
transition_time = datetime(1916, 12, 31, 22, 36, 00, tzinfo=UTC)
before = {
'tzname': 'WMT',
'utcoffset': timedelta(hours=1, minutes=24),
'dst': timedelta(0),
}
after = {
'tzname': 'KMT',
'utcoffset': timedelta(hours=1, minutes=36), # Really 1:35:36
'dst': timedelta(0),
}
class VilniusCESTStartTestCase(USEasternDSTStartTestCase):
# In 1941, Vilnius changed from MSG to CEST, switching to summer
# time while simultaneously reducing its UTC offset by two hours,
# causing the clocks to go backwards for this summer time
# switchover.
tzinfo = pytz.timezone('Europe/Vilnius')
transition_time = datetime(1941, 6, 23, 21, 00, 00, tzinfo=UTC)
before = {
'tzname': 'MSK',
'utcoffset': timedelta(hours=3),
'dst': timedelta(0),
}
after = {
'tzname': 'CEST',
'utcoffset': timedelta(hours=2),
'dst': timedelta(hours=1),
}
class LondonHistoryStartTestCase(USEasternDSTStartTestCase):
# The first known timezone transition in London was in 1847 when
# clocks where synchronized to GMT. However, we currently only
# understand v1 format tzfile(5) files which does handle years
# this far in the past, so our earliest known transition is in
# 1916.
tzinfo = pytz.timezone('Europe/London')
# transition_time = datetime(1847, 12, 1, 1, 15, 00, tzinfo=UTC)
# before = {
# 'tzname': 'LMT',
# 'utcoffset': timedelta(minutes=-75),
# 'dst': timedelta(0),
# }
# after = {
# 'tzname': 'GMT',
# 'utcoffset': timedelta(0),
# 'dst': timedelta(0),
# }
transition_time = datetime(1916, 5, 21, 2, 00, 00, tzinfo=UTC)
before = {
'tzname': 'GMT',
'utcoffset': timedelta(0),
'dst': timedelta(0),
}
after = {
'tzname': 'BST',
'utcoffset': timedelta(hours=1),
'dst': timedelta(hours=1),
}
class LondonHistoryEndTestCase(USEasternDSTStartTestCase):
# Timezone switchovers are projected into the future, even
# though no official statements exist or could be believed even
# if they did exist. We currently only check the last known
# transition in 2037, as we are still using v1 format tzfile(5)
# files.
tzinfo = pytz.timezone('Europe/London')
# transition_time = datetime(2499, 10, 25, 1, 0, 0, tzinfo=UTC)
transition_time = datetime(2037, 10, 25, 1, 0, 0, tzinfo=UTC)
before = {
'tzname': 'BST',
'utcoffset': timedelta(hours=1),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'GMT',
'utcoffset': timedelta(0),
'dst': timedelta(0),
}
class NoumeaHistoryStartTestCase(USEasternDSTStartTestCase):
# Noumea adopted a whole hour offset in 1912. Previously
# it was 11 hours, 5 minutes and 48 seconds off UTC. However,
# due to limitations of the Python datetime library, we need
# to round that to 11 hours 6 minutes.
tzinfo = pytz.timezone('Pacific/Noumea')
transition_time = datetime(1912, 1, 12, 12, 54, 12, tzinfo=UTC)
before = {
'tzname': 'LMT',
'utcoffset': timedelta(hours=11, minutes=6),
'dst': timedelta(0),
}
after = {
'tzname': 'NCT',
'utcoffset': timedelta(hours=11),
'dst': timedelta(0),
}
class NoumeaDSTEndTestCase(USEasternDSTStartTestCase):
# Noumea dropped DST in 1997.
tzinfo = pytz.timezone('Pacific/Noumea')
transition_time = datetime(1997, 3, 1, 15, 00, 00, tzinfo=UTC)
before = {
'tzname': 'NCST',
'utcoffset': timedelta(hours=12),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'NCT',
'utcoffset': timedelta(hours=11),
'dst': timedelta(0),
}
class NoumeaNoMoreDSTTestCase(NoumeaDSTEndTestCase):
# Noumea dropped DST in 1997. Here we test that it stops occuring.
transition_time = (
NoumeaDSTEndTestCase.transition_time + timedelta(days=365*10))
before = NoumeaDSTEndTestCase.after
after = NoumeaDSTEndTestCase.after
class TahitiTestCase(USEasternDSTStartTestCase):
# Tahiti has had a single transition in its history.
tzinfo = pytz.timezone('Pacific/Tahiti')
transition_time = datetime(1912, 10, 1, 9, 58, 16, tzinfo=UTC)
before = {
'tzname': 'LMT',
'utcoffset': timedelta(hours=-9, minutes=-58),
'dst': timedelta(0),
}
after = {
'tzname': 'TAHT',
'utcoffset': timedelta(hours=-10),
'dst': timedelta(0),
}
class SamoaInternationalDateLineChange(USEasternDSTStartTestCase):
# At the end of 2011, Samoa will switch from being east of the
# international dateline to the west. There will be no Dec 30th
# 2011 and it will switch from UTC-10 to UTC+14.
tzinfo = pytz.timezone('Pacific/Apia')
transition_time = datetime(2011, 12, 30, 10, 0, 0, tzinfo=UTC)
before = {
'tzname': 'WSDT',
'utcoffset': timedelta(hours=-10),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'WSDT',
'utcoffset': timedelta(hours=14),
'dst': timedelta(hours=1),
}
class ReferenceUSEasternDSTStartTestCase(USEasternDSTStartTestCase):
tzinfo = reference.Eastern
def test_arithmetic(self):
# Reference implementation cannot handle this
pass
class ReferenceUSEasternDSTEndTestCase(USEasternDSTEndTestCase):
tzinfo = reference.Eastern
def testHourBefore(self):
# Python's datetime library has a bug, where the hour before
# a daylight savings transition is one hour out. For example,
# at the end of US/Eastern daylight savings time, 01:00 EST
# occurs twice (once at 05:00 UTC and once at 06:00 UTC),
# whereas the first should actually be 01:00 EDT.
# Note that this bug is by design - by accepting this ambiguity
# for one hour one hour per year, an is_dst flag on datetime.time
# became unnecessary.
self._test_all(
self.transition_time - timedelta(hours=1), self.after
)
def testInstantBefore(self):
self._test_all(
self.transition_time - timedelta(seconds=1), self.after
)
def test_arithmetic(self):
# Reference implementation cannot handle this
pass
class LocalTestCase(unittest.TestCase):
def testLocalize(self):
loc_tz = pytz.timezone('Europe/Amsterdam')
loc_time = loc_tz.localize(datetime(1930, 5, 10, 0, 0, 0))
# Actually +00:19:32, but Python datetime rounds this
self.assertEqual(loc_time.strftime('%Z%z'), 'AMT+0020')
loc_time = loc_tz.localize(datetime(1930, 5, 20, 0, 0, 0))
# Actually +00:19:32, but Python datetime rounds this
self.assertEqual(loc_time.strftime('%Z%z'), 'NST+0120')
loc_time = loc_tz.localize(datetime(1940, 5, 10, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'NET+0020')
loc_time = loc_tz.localize(datetime(1940, 5, 20, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')
loc_time = loc_tz.localize(datetime(2004, 2, 1, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')
loc_time = loc_tz.localize(datetime(2004, 4, 1, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')
tz = pytz.timezone('Europe/Amsterdam')
loc_time = loc_tz.localize(datetime(1943, 3, 29, 1, 59, 59))
self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')
# Switch to US
loc_tz = pytz.timezone('US/Eastern')
# End of DST ambiguity check
loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')
loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
self.assertRaises(pytz.AmbiguousTimeError,
loc_tz.localize, datetime(1918, 10, 27, 1, 59, 59), is_dst=None
)
# Start of DST non-existent times
loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')
self.assertRaises(pytz.NonExistentTimeError,
loc_tz.localize, datetime(1918, 3, 31, 2, 0, 0), is_dst=None
)
# Weird changes - war time and peace time both is_dst==True
loc_time = loc_tz.localize(datetime(1942, 2, 9, 3, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'EWT-0400')
loc_time = loc_tz.localize(datetime(1945, 8, 14, 19, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')
loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')
loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
def testNormalize(self):
tz = pytz.timezone('US/Eastern')
dt = datetime(2004, 4, 4, 7, 0, 0, tzinfo=UTC).astimezone(tz)
dt2 = dt - timedelta(minutes=10)
self.assertEqual(
dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'2004-04-04 02:50:00 EDT-0400'
)
dt2 = tz.normalize(dt2)
self.assertEqual(
dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'2004-04-04 01:50:00 EST-0500'
)
def testPartialMinuteOffsets(self):
# utcoffset in Amsterdam was not a whole minute until 1937
# However, we fudge this by rounding them, as the Python
# datetime library
tz = pytz.timezone('Europe/Amsterdam')
utc_dt = datetime(1914, 1, 1, 13, 40, 28, tzinfo=UTC) # correct
utc_dt = utc_dt.replace(second=0) # But we need to fudge it
loc_dt = utc_dt.astimezone(tz)
self.assertEqual(
loc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'1914-01-01 14:00:00 AMT+0020'
)
# And get back...
utc_dt = loc_dt.astimezone(UTC)
self.assertEqual(
utc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'1914-01-01 13:40:00 UTC+0000'
)
def no_testCreateLocaltime(self):
# It would be nice if this worked, but it doesn't.
tz = pytz.timezone('Europe/Amsterdam')
dt = datetime(2004, 10, 31, 2, 0, 0, tzinfo=tz)
self.assertEqual(
dt.strftime(fmt),
'2004-10-31 02:00:00 CET+0100'
)
class CommonTimezonesTestCase(unittest.TestCase):
def test_bratislava(self):
# Bratislava is the default timezone for Slovakia, but our
# heuristics where not adding it to common_timezones. Ideally,
# common_timezones should be populated from zone.tab at runtime,
# but I'm hesitant to pay the startup cost as loading the list
# on demand whilst remaining backwards compatible seems
# difficult.
self.assertTrue('Europe/Bratislava' in pytz.common_timezones)
self.assertTrue('Europe/Bratislava' in pytz.common_timezones_set)
def test_us_eastern(self):
self.assertTrue('US/Eastern' in pytz.common_timezones)
self.assertTrue('US/Eastern' in pytz.common_timezones_set)
def test_belfast(self):
# Belfast uses London time.
self.assertTrue('Europe/Belfast' in pytz.all_timezones_set)
self.assertFalse('Europe/Belfast' in pytz.common_timezones)
self.assertFalse('Europe/Belfast' in pytz.common_timezones_set)
class BaseTzInfoTestCase:
'''Ensure UTC, StaticTzInfo and DstTzInfo work consistently.
These tests are run for each type of tzinfo.
'''
tz = None # override
tz_class = None # override
def test_expectedclass(self):
self.assertTrue(isinstance(self.tz, self.tz_class))
def test_fromutc(self):
# naive datetime.
dt1 = datetime(2011, 10, 31)
# localized datetime, same timezone.
dt2 = self.tz.localize(dt1)
# Both should give the same results. Note that the standard
# Python tzinfo.fromutc() only supports the second.
for dt in [dt1, dt2]:
loc_dt = self.tz.fromutc(dt)
loc_dt2 = pytz.utc.localize(dt1).astimezone(self.tz)
self.assertEqual(loc_dt, loc_dt2)
# localized datetime, different timezone.
new_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not new_tz)
dt3 = new_tz.localize(dt1)
self.assertRaises(ValueError, self.tz.fromutc, dt3)
def test_normalize(self):
other_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not other_tz)
dt = datetime(2012, 3, 26, 12, 0)
other_dt = other_tz.localize(dt)
local_dt = self.tz.normalize(other_dt)
self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
self.assertNotEqual(
local_dt.replace(tzinfo=None), other_dt.replace(tzinfo=None))
def test_astimezone(self):
other_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not other_tz)
dt = datetime(2012, 3, 26, 12, 0)
other_dt = other_tz.localize(dt)
local_dt = other_dt.astimezone(self.tz)
self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
self.assertNotEqual(
local_dt.replace(tzinfo=None), other_dt.replace(tzinfo=None))
class OptimizedUTCTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.utc
tz_class = tz.__class__
class LegacyUTCTestCase(unittest.TestCase, BaseTzInfoTestCase):
# Deprecated timezone, but useful for comparison tests.
tz = pytz.timezone('Etc/UTC')
tz_class = StaticTzInfo
class StaticTzInfoTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.timezone('GMT')
tz_class = StaticTzInfo
class DstTzInfoTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.timezone('Australia/Melbourne')
tz_class = DstTzInfo
def test_suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite('pytz'))
suite.addTest(doctest.DocTestSuite('pytz.tzinfo'))
import test_tzinfo
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_tzinfo))
return suite
if __name__ == '__main__':
warnings.simplefilter("error") # Warnings should be fatal in tests.
unittest.main(defaultTest='test_suite')
| bsd-3-clause |
surajx/shadowsocks | tests/test.py | 1016 | 5029 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import sys
import os
import signal
import select
import time
import argparse
from subprocess import Popen, PIPE
python = ['python']
default_url = 'http://localhost/'
parser = argparse.ArgumentParser(description='test Shadowsocks')
parser.add_argument('-c', '--client-conf', type=str, default=None)
parser.add_argument('-s', '--server-conf', type=str, default=None)
parser.add_argument('-a', '--client-args', type=str, default=None)
parser.add_argument('-b', '--server-args', type=str, default=None)
parser.add_argument('--with-coverage', action='store_true', default=None)
parser.add_argument('--should-fail', action='store_true', default=None)
parser.add_argument('--tcp-only', action='store_true', default=None)
parser.add_argument('--url', type=str, default=default_url)
parser.add_argument('--dns', type=str, default='8.8.8.8')
config = parser.parse_args()
if config.with_coverage:
python = ['coverage', 'run', '-p', '-a']
client_args = python + ['shadowsocks/local.py', '-v']
server_args = python + ['shadowsocks/server.py', '-v']
if config.client_conf:
client_args.extend(['-c', config.client_conf])
if config.server_conf:
server_args.extend(['-c', config.server_conf])
else:
server_args.extend(['-c', config.client_conf])
if config.client_args:
client_args.extend(config.client_args.split())
if config.server_args:
server_args.extend(config.server_args.split())
else:
server_args.extend(config.client_args.split())
if config.url == default_url:
server_args.extend(['--forbidden-ip', ''])
p1 = Popen(server_args, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
p2 = Popen(client_args, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
p3 = None
p4 = None
p3_fin = False
p4_fin = False
# 1 shadowsocks started
# 2 curl started
# 3 curl finished
# 4 dig started
# 5 dig finished
stage = 1
try:
local_ready = False
server_ready = False
fdset = [p1.stdout, p2.stdout, p1.stderr, p2.stderr]
while True:
r, w, e = select.select(fdset, [], fdset)
if e:
break
for fd in r:
line = fd.readline()
if not line:
if stage == 2 and fd == p3.stdout:
stage = 3
if stage == 4 and fd == p4.stdout:
stage = 5
if bytes != str:
line = str(line, 'utf8')
sys.stderr.write(line)
if line.find('starting local') >= 0:
local_ready = True
if line.find('starting server') >= 0:
server_ready = True
if stage == 1:
time.sleep(2)
p3 = Popen(['curl', config.url, '-v', '-L',
'--socks5-hostname', '127.0.0.1:1081',
'-m', '15', '--connect-timeout', '10'],
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
if p3 is not None:
fdset.append(p3.stdout)
fdset.append(p3.stderr)
stage = 2
else:
sys.exit(1)
if stage == 3 and p3 is not None:
fdset.remove(p3.stdout)
fdset.remove(p3.stderr)
r = p3.wait()
if config.should_fail:
if r == 0:
sys.exit(1)
else:
if r != 0:
sys.exit(1)
if config.tcp_only:
break
p4 = Popen(['socksify', 'dig', '@%s' % config.dns,
'www.google.com'],
stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
if p4 is not None:
fdset.append(p4.stdout)
fdset.append(p4.stderr)
stage = 4
else:
sys.exit(1)
if stage == 5:
r = p4.wait()
if config.should_fail:
if r == 0:
sys.exit(1)
print('test passed (expecting failure)')
else:
if r != 0:
sys.exit(1)
print('test passed')
break
finally:
for p in [p1, p2]:
try:
os.kill(p.pid, signal.SIGINT)
os.waitpid(p.pid, 0)
except OSError:
pass
| apache-2.0 |
MadRocker/experimental-2.6.29-MadRocker | arch/ia64/scripts/unwcheck.py | 916 | 1718 | #!/usr/bin/env python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
ParanoidAndroid/android_kernel_grouper | tools/perf/scripts/python/sctop.py | 11180 | 1924 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
| gpl-2.0 |
dcroc16/skunk_works | google_appengine/lib/django-1.4/django/contrib/messages/api.py | 321 | 2952 | from django.contrib.messages import constants
from django.contrib.messages.storage import default_storage
__all__ = (
'add_message', 'get_messages',
'get_level', 'set_level',
'debug', 'info', 'success', 'warning', 'error',
)
class MessageFailure(Exception):
pass
def add_message(request, level, message, extra_tags='', fail_silently=False):
"""
Attempts to add a message to the request using the 'messages' app.
"""
if hasattr(request, '_messages'):
return request._messages.add(level, message, extra_tags)
if not fail_silently:
raise MessageFailure('You cannot add messages without installing '
'django.contrib.messages.middleware.MessageMiddleware')
def get_messages(request):
"""
Returns the message storage on the request if it exists, otherwise returns
an empty list.
"""
if hasattr(request, '_messages'):
return request._messages
else:
return []
def get_level(request):
"""
Returns the minimum level of messages to be recorded.
The default level is the ``MESSAGE_LEVEL`` setting. If this is not found,
the ``INFO`` level is used.
"""
if hasattr(request, '_messages'):
storage = request._messages
else:
storage = default_storage(request)
return storage.level
def set_level(request, level):
"""
Sets the minimum level of messages to be recorded, returning ``True`` if
the level was recorded successfully.
If set to ``None``, the default level will be used (see the ``get_level``
method).
"""
if not hasattr(request, '_messages'):
return False
request._messages.level = level
return True
def debug(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``DEBUG`` level.
"""
add_message(request, constants.DEBUG, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def info(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``INFO`` level.
"""
add_message(request, constants.INFO, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def success(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``SUCCESS`` level.
"""
add_message(request, constants.SUCCESS, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def warning(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``WARNING`` level.
"""
add_message(request, constants.WARNING, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def error(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``ERROR`` level.
"""
add_message(request, constants.ERROR, message, extra_tags=extra_tags,
fail_silently=fail_silently)
| mit |
mixisbad/qos | apps/qos/mininet-add-queues_fairness.py | 1 | 1833 | #! /usr/bin/python
# coding: utf-8
'''
Add queues to Mininet using ovs-vsctl and ovs-ofctl
@Author Ryan Wallner
'''
import os
import sys
import time
import subprocess
def find_all(a_str, sub_str):
start = 0
b_starts = []
while True:
start = a_str.find(sub_str, start)
if start == -1: return b_starts
#print start
b_starts.append(start)
start += 1
if os.getuid() != 0:
print "Root permissions required"
exit()
cmd = "ovs-vsctl show"
p = os.popen(cmd).read()
#print p
brdgs = find_all(p, "Bridge")
print brdgs
switches = []
for bn in brdgs:
sw = p[(bn+8):(bn+10)]
switches.append(sw)
ports = find_all(p,"Port")
print ports
prts = []
for prt in ports:
prt = p[(prt+6):(prt+13)]
if '"' not in prt:
print prt
prts.append(prt)
config_strings = {}
for i in range(len(switches)):
str = ""
sw = switches[i]
for n in range(len(prts)):
#verify correct order
if switches[i] in prts[n]:
#print switches[i]
#print prts[n]
port_name = prts[n]
str = str+" -- set port %s qos=@defaultqos" % port_name
config_strings[sw] = str
#build queues per sw
print config_strings
for sw in switches:
queuecmd = "sudo ovs-vsctl %s -- --id=@defaultqos " % config_strings[sw]
queuecmd = queuecmd + "create qos type=linux-htb other-config:max-rate=3000000 queues=0=@q0,1=@q1,2=@q2 -- "
queuecmd = queuecmd + "--id=@q0 create queue other-config:max-rate=3000000 -- "
queuecmd = queuecmd + "--id=@q1 create queue other-config:max-rate=1000000 other-config:min-rate=1000000 -- "
queuecmd = queuecmd + "--id=@q2 create queue other-config:max-rate=4000000 other-config:min-rate=4000000"
print queuecmd
q_res = os.popen(queuecmd).read()
print q_res
| apache-2.0 |
vikas1885/test1 | cms/djangoapps/contentstore/features/signup.py | 111 | 2821 | # pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from lettuce import world, step
from nose.tools import assert_true, assert_false # pylint: disable=no-name-in-module
@step('I fill in the registration form$')
def i_fill_in_the_registration_form(step):
def fill_in_reg_form():
register_form = world.css_find('form#register_form')
register_form.find_by_name('email').fill('robot+studio@edx.org')
register_form.find_by_name('password').fill('test')
register_form.find_by_name('username').fill('robot-studio')
register_form.find_by_name('name').fill('Robot Studio')
register_form.find_by_name('terms_of_service').click()
world.retry_on_exception(fill_in_reg_form)
@step('I press the Create My Account button on the registration form$')
def i_press_the_button_on_the_registration_form(step):
submit_css = 'form#register_form button#submit'
world.css_click(submit_css)
@step('I should see an email verification prompt')
def i_should_see_an_email_verification_prompt(step):
world.css_has_text('h1.page-header', u'Studio Home')
world.css_has_text('div.msg h3.title', u'We need to verify your email address')
@step(u'I fill in and submit the signin form$')
def i_fill_in_the_signin_form(step):
def fill_login_form():
login_form = world.browser.find_by_css('form#login_form')
login_form.find_by_name('email').fill('robot+studio@edx.org')
login_form.find_by_name('password').fill('test')
login_form.find_by_name('submit').click()
world.retry_on_exception(fill_login_form)
@step(u'I should( not)? see a login error message$')
def i_should_see_a_login_error(step, should_not_see):
if should_not_see:
# the login error may be absent or invisible. Check absence first,
# because css_visible will throw an exception if the element is not present
if world.is_css_present('div#login_error'):
assert_false(world.css_visible('div#login_error'))
else:
assert_true(world.css_visible('div#login_error'))
@step(u'I fill in and submit the signin form incorrectly$')
def i_goof_in_the_signin_form(step):
def fill_login_form():
login_form = world.browser.find_by_css('form#login_form')
login_form.find_by_name('email').fill('robot+studio@edx.org')
login_form.find_by_name('password').fill('oops')
login_form.find_by_name('submit').click()
world.retry_on_exception(fill_login_form)
@step(u'I edit the password field$')
def i_edit_the_password_field(step):
password_css = 'form#login_form input#password'
world.css_fill(password_css, 'test')
@step(u'I submit the signin form$')
def i_submit_the_signin_form(step):
submit_css = 'form#login_form button#submit'
world.css_click(submit_css)
| agpl-3.0 |
czgu/metaHack | env/lib/python2.7/site-packages/django/core/files/temp.py | 56 | 2879 | """
The temp module provides a NamedTemporaryFile that can be reopened in the same
process on any platform. Most platforms use the standard Python
tempfile.NamedTemporaryFile class, but Windows users are given a custom class.
This is needed because the Python implementation of NamedTemporaryFile uses the
O_TEMPORARY flag under Windows, which prevents the file from being reopened
if the same flag is not provided [1][2]. Note that this does not address the
more general issue of opening a file for writing and reading in multiple
processes in a manner that works across platforms.
Also note that the custom version of NamedTemporaryFile does not support the
full range of keyword arguments available in Python 2.6+ and 3.0+.
1: https://mail.python.org/pipermail/python-list/2005-December/336958.html
2: http://bugs.python.org/issue14243
"""
import os
import tempfile
from django.core.files.utils import FileProxyMixin
__all__ = ('NamedTemporaryFile', 'gettempdir',)
if os.name == 'nt':
class TemporaryFile(FileProxyMixin):
"""
Temporary file object constructor that supports reopening of the
temporary file in Windows.
Note that __init__() does not support the 'delete' keyword argument in
Python 2.6+, or the 'delete', 'buffering', 'encoding', or 'newline'
keyword arguments in Python 3.0+.
"""
def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='',
dir=None):
fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)
self.name = name
self.file = os.fdopen(fd, mode, bufsize)
self.close_called = False
# Because close can be called during shutdown
# we need to cache os.unlink and access it
# as self.unlink only
unlink = os.unlink
def close(self):
if not self.close_called:
self.close_called = True
try:
self.file.close()
except (OSError, IOError):
pass
try:
self.unlink(self.name)
except (OSError):
pass
@property
def closed(self):
"""
This attribute needs to be accessible in certain situations,
because this class is supposed to mock the API of the class
tempfile.NamedTemporaryFile in the Python standard library.
"""
return self.file.closed
def __del__(self):
self.close()
def __enter__(self):
self.file.__enter__()
return self
def __exit__(self, exc, value, tb):
self.file.__exit__(exc, value, tb)
NamedTemporaryFile = TemporaryFile
else:
NamedTemporaryFile = tempfile.NamedTemporaryFile
gettempdir = tempfile.gettempdir
| apache-2.0 |
danvk/oldnyc | ocr/group_by_letters.py | 3 | 1608 | #!/usr/bin/env python
'''Use the transcriptions of backing images to group letters.
The output of this is ocr/images/by-letter/[a-zA-Z0-9,.]/*.png
'''
import csv
import errno
import shutil
import sys
import os
# From http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST:
pass
else: raise
def escape_letter(char):
# The OS X file system is case insensitive.
# This makes sure that 'a' and 'A' get mapped to different directories, and
# that the file name is valid (i.e. doesn't have a slash in it).
safe_char = char.replace(r'[^a-zA-Z0-9.,"\'\[\]\(\)]', '')
return str(ord(char)) + safe_char
for row in csv.DictReader(open('ocr/transcribe/output.csv')):
photo_id = row['photo_id']
num_cols = row['num_cols']
num_rows = row['num_rows']
transcription = row['transcription']
for j, line in enumerate(transcription.split('\n')):
for i, char in enumerate(line):
if char == '\r' or char == '\n' or char == ' ': continue
img = 'ocr/large-images/letters/%s-%02d-%02d.png' % (photo_id, j, i)
if not os.path.exists(img):
sys.stderr.write('Missing %s\n' % img)
continue
dest_dir = 'ocr/large-images/by-letter/%s' % escape_letter(char)
mkdir_p(dest_dir)
shutil.copy2(img, dest_dir)
# ocr/large-images/700078bu.jpg,700078f,40.8,67.6,0.8909002235013688,622,3448.959228515625,2453,3217.71240234375
| apache-2.0 |
pombredanne/0install | zeroinstall/gtkui/pygtkcompat.py | 3 | 15982 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
#
# Copyright (C) 2011-2012 Johan Dahlin <johan@gnome.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
"""
PyGTK compatibility layer.
This modules goes a little bit longer to maintain PyGTK compatibility than
the normal overrides system.
It is recommended to not depend on this layer, but only use it as an
intermediate step when porting your application to PyGI.
Compatibility might never be 100%, but the aim is to make it possible to run
a well behaved PyGTK application mostly unmodified on top of PyGI.
"""
import sys
import warnings
try:
# Python 3
from collections import UserList
from imp import reload
UserList # pyflakes
except ImportError:
# Python 2 ships that in a different module
from UserList import UserList
UserList # pyflakes
import gi
from gi.repository import GObject
def _install_enums(module, dest=None, strip=''):
if dest is None:
dest = module
modname = dest.__name__.rsplit('.', 1)[1].upper()
for attr in dir(module):
try:
obj = getattr(module, attr, None)
except:
continue
try:
if issubclass(obj, GObject.GEnum):
for value, enum in obj.__enum_values__.items():
name = enum.value_name
name = name.replace(modname + '_', '')
if strip and name.startswith(strip):
name = name[len(strip):]
setattr(dest, name, enum)
except TypeError:
continue
try:
if issubclass(obj, GObject.GFlags):
for value, flag in obj.__flags_values__.items():
name = flag.value_names[-1].replace(modname + '_', '')
setattr(dest, name, flag)
except TypeError:
continue
def enable():
# gobject
from gi.repository import GLib
sys.modules['glib'] = GLib
# gobject
from gi.repository import GObject
sys.modules['gobject'] = GObject
#from gi._gobject import propertyhelper
#sys.modules['gobject.propertyhelper'] = propertyhelper
# gio
from gi.repository import Gio
sys.modules['gio'] = Gio
_unset = object()
def enable_gtk(version='2.0'):
# set the default encoding like PyGTK
reload(sys)
if sys.version_info < (3, 0):
sys.setdefaultencoding('utf-8')
# atk
gi.require_version('Atk', '1.0')
from gi.repository import Atk
sys.modules['atk'] = Atk
_install_enums(Atk)
# pango
gi.require_version('Pango', '1.0')
from gi.repository import Pango
sys.modules['pango'] = Pango
_install_enums(Pango)
# pangocairo
gi.require_version('PangoCairo', '1.0')
from gi.repository import PangoCairo
sys.modules['pangocairo'] = PangoCairo
# gdk
gi.require_version('Gdk', version)
gi.require_version('GdkPixbuf', '2.0')
from gi.repository import Gdk
from gi.repository import GdkPixbuf
sys.modules['gtk.gdk'] = Gdk
_install_enums(Gdk)
_install_enums(GdkPixbuf, dest=Gdk)
Gdk._2BUTTON_PRESS = 5
Gdk.BUTTON_PRESS = 4
Gdk.screen_get_default = Gdk.Screen.get_default
Gdk.Pixbuf = GdkPixbuf.Pixbuf
Gdk.pixbuf_new_from_file = GdkPixbuf.Pixbuf.new_from_file
Gdk.PixbufLoader = GdkPixbuf.PixbufLoader.new_with_type
orig_get_formats = GdkPixbuf.Pixbuf.get_formats
def get_formats():
formats = orig_get_formats()
result = []
def make_dict(format_):
result = {}
result['description'] = format_.get_description()
result['name'] = format_.get_name()
result['mime_types'] = format_.get_mime_types()
result['extensions'] = format_.get_extensions()
return result
for format_ in formats:
result.append(make_dict(format_))
return result
Gdk.pixbuf_get_formats = get_formats
orig_get_frame_extents = Gdk.Window.get_frame_extents
def get_frame_extents(window):
try:
try:
rect = Gdk.Rectangle(0, 0, 0, 0)
except TypeError:
rect = Gdk.Rectangle()
orig_get_frame_extents(window, rect)
except TypeError:
rect = orig_get_frame_extents(window)
return rect
Gdk.Window.get_frame_extents = get_frame_extents
orig_get_origin = Gdk.Window.get_origin
def get_origin(self):
return orig_get_origin(self)[1:]
Gdk.Window.get_origin = get_origin
Gdk.screen_width = Gdk.Screen.width
Gdk.screen_height = Gdk.Screen.height
# gtk
gi.require_version('Gtk', version)
from gi.repository import Gtk
sys.modules['gtk'] = Gtk
Gtk.gdk = Gdk
Gtk.pygtk_version = (2, 99, 0)
Gtk.gtk_version = (Gtk.MAJOR_VERSION,
Gtk.MINOR_VERSION,
Gtk.MICRO_VERSION)
_install_enums(Gtk)
# Action
def set_tool_item_type(menuaction, gtype):
warnings.warn('set_tool_item_type() is not supported',
DeprecationWarning, stacklevel=2)
Gtk.Action.set_tool_item_type = classmethod(set_tool_item_type)
# Alignment
orig_Alignment = Gtk.Alignment
class Alignment(orig_Alignment):
def __init__(self, xalign=0.0, yalign=0.0, xscale=0.0, yscale=0.0):
orig_Alignment.__init__(self)
self.props.xalign = xalign
self.props.yalign = yalign
self.props.xscale = xscale
self.props.yscale = yscale
Gtk.Alignment = Alignment
# Box
orig_pack_end = Gtk.Box.pack_end
def pack_end(self, child, expand=True, fill=True, padding=0):
orig_pack_end(self, child, expand, fill, padding)
Gtk.Box.pack_end = pack_end
orig_pack_start = Gtk.Box.pack_start
def pack_start(self, child, expand=True, fill=True, padding=0):
orig_pack_start(self, child, expand, fill, padding)
Gtk.Box.pack_start = pack_start
# TreeViewColumn
orig_tree_view_column_pack_end = Gtk.TreeViewColumn.pack_end
def tree_view_column_pack_end(self, cell, expand=True):
orig_tree_view_column_pack_end(self, cell, expand)
Gtk.TreeViewColumn.pack_end = tree_view_column_pack_end
orig_tree_view_column_pack_start = Gtk.TreeViewColumn.pack_start
def tree_view_column_pack_start(self, cell, expand=True):
orig_tree_view_column_pack_start(self, cell, expand)
Gtk.TreeViewColumn.pack_start = tree_view_column_pack_start
# TreeView
def insert_column_with_attributes(view, position, title, cell, *args, **kwargs):
pass
Gtk.TreeView.insert_column_with_attributes = insert_column_with_attributes
# CellLayout
orig_cell_pack_end = Gtk.CellLayout.pack_end
def cell_pack_end(self, cell, expand=True):
orig_cell_pack_end(self, cell, expand)
Gtk.CellLayout.pack_end = cell_pack_end
orig_cell_pack_start = Gtk.CellLayout.pack_start
def cell_pack_start(self, cell, expand=True):
orig_cell_pack_start(self, cell, expand)
Gtk.CellLayout.pack_start = cell_pack_start
orig_set_cell_data_func = Gtk.CellLayout.set_cell_data_func
def set_cell_data_func(self, cell, func, user_data=_unset):
def callback(*args):
if args[-1] == _unset:
args = args[:-1]
return func(*args)
orig_set_cell_data_func(self, cell, callback, user_data)
Gtk.CellLayout.set_cell_data_func = set_cell_data_func
# CellRenderer
class GenericCellRenderer(Gtk.CellRenderer):
pass
Gtk.GenericCellRenderer = GenericCellRenderer
# ComboBox
orig_combo_row_separator_func = Gtk.ComboBox.set_row_separator_func
def combo_row_separator_func(self, func, user_data=_unset):
def callback(*args):
if args[-1] == _unset:
args = args[:-1]
return func(*args)
orig_combo_row_separator_func(self, callback, user_data)
Gtk.ComboBox.set_row_separator_func = combo_row_separator_func
# ComboBoxEntry
class ComboBoxEntry(Gtk.ComboBox):
def __init__(self, **kwds):
Gtk.ComboBox.__init__(self, has_entry=True, **kwds)
def set_text_column(self, text_column):
self.set_entry_text_column(text_column)
def get_text_column(self):
return self.get_entry_text_column()
Gtk.ComboBoxEntry = ComboBoxEntry
def combo_box_entry_new():
return Gtk.ComboBoxEntry()
Gtk.combo_box_entry_new = combo_box_entry_new
def combo_box_entry_new_with_model(model):
return Gtk.ComboBoxEntry(model=model)
Gtk.combo_box_entry_new_with_model = combo_box_entry_new_with_model
# Container
def install_child_property(container, flag, pspec):
warnings.warn('install_child_property() is not supported',
DeprecationWarning, stacklevel=2)
Gtk.Container.install_child_property = classmethod(install_child_property)
def new_text():
combo = Gtk.ComboBox()
model = Gtk.ListStore(str)
combo.set_model(model)
combo.set_entry_text_column(0)
return combo
Gtk.combo_box_new_text = new_text
def append_text(self, text):
model = self.get_model()
model.append([text])
Gtk.ComboBox.append_text = append_text
Gtk.expander_new_with_mnemonic = Gtk.Expander.new_with_mnemonic
Gtk.icon_theme_get_default = Gtk.IconTheme.get_default
Gtk.image_new_from_pixbuf = Gtk.Image.new_from_pixbuf
Gtk.image_new_from_stock = Gtk.Image.new_from_stock
Gtk.image_new_from_animation = Gtk.Image.new_from_animation
Gtk.image_new_from_icon_set = Gtk.Image.new_from_icon_set
Gtk.image_new_from_file = Gtk.Image.new_from_file
Gtk.settings_get_default = Gtk.Settings.get_default
Gtk.window_set_default_icon = Gtk.Window.set_default_icon
def clipboard_get(selection = "CLIPBOARD"):
if selection == "CLIPBOARD":
return Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
else:
return Gtk.Clipboard.get(Gdk.SELECTION_PRIMARY)
Gtk.clipboard_get = clipboard_get
#AccelGroup
Gtk.AccelGroup.connect_group = Gtk.AccelGroup.connect
#StatusIcon
Gtk.status_icon_position_menu = Gtk.StatusIcon.position_menu
Gtk.StatusIcon.set_tooltip = Gtk.StatusIcon.set_tooltip_text
# Scale
orig_HScale = Gtk.HScale
orig_VScale = Gtk.VScale
class HScale(orig_HScale):
def __init__(self, adjustment=None):
orig_HScale.__init__(self, adjustment=adjustment)
Gtk.HScale = HScale
class VScale(orig_VScale):
def __init__(self, adjustment=None):
orig_VScale.__init__(self, adjustment=adjustment)
Gtk.VScale = VScale
Gtk.stock_add = lambda items: None
# Widget
Gtk.widget_get_default_direction = Gtk.Widget.get_default_direction
orig_size_request = Gtk.Widget.size_request
def size_request(widget):
class SizeRequest(UserList):
def __init__(self, req):
self.height = req.height
self.width = req.width
UserList.__init__(self, [self.width, self.height])
return SizeRequest(orig_size_request(widget))
Gtk.Widget.size_request = size_request
Gtk.Widget.hide_all = Gtk.Widget.hide
def widget_set_flags(widget, flags):
assert flags == Gtk.CAN_DEFAULT, flags
widget.set_can_default(True)
Gtk.Widget.set_flags = widget_set_flags
class BaseGetter(object):
def __init__(self, context):
self.context = context
def __getitem__(self, state):
color = self.context.get_background_color(state)
return Gdk.Color(red=int(color.red * 65535),
green=int(color.green * 65535),
blue=int(color.blue * 65535))
class Styles(object):
def __init__(self, widget):
context = widget.get_style_context()
self.base = BaseGetter(context)
self.black = Gdk.Color(red=0, green=0, blue=0)
class StyleDescriptor(object):
def __get__(self, instance, class_):
return Styles(instance)
Gtk.Widget.style = StyleDescriptor()
# gtk.unixprint
class UnixPrint(object):
pass
unixprint = UnixPrint()
sys.modules['gtkunixprint'] = unixprint
# gtk.keysyms
class Keysyms(object):
pass
keysyms = Keysyms()
sys.modules['gtk.keysyms'] = keysyms
Gtk.keysyms = keysyms
for name in dir(Gdk):
if name.startswith('KEY_'):
target = name[4:]
if target[0] in '0123456789':
target = '_' + target
value = getattr(Gdk, name)
setattr(keysyms, target, value)
Gtk.TreePath.__len__ = lambda path: path.get_depth()
# TreeStore
Gtk.TreeStore.get_iter_root = Gtk.TreeStore.get_iter_first
def enable_vte():
gi.require_version('Vte', '0.0')
from gi.repository import Vte
sys.modules['vte'] = Vte
def enable_poppler():
gi.require_version('Poppler', '0.18')
from gi.repository import Poppler
sys.modules['poppler'] = Poppler
Poppler.pypoppler_version = (1, 0, 0)
def enable_webkit(version='1.0'):
gi.require_version('WebKit', version)
from gi.repository import WebKit
sys.modules['webkit'] = WebKit
WebKit.WebView.get_web_inspector = WebKit.WebView.get_inspector
def enable_gudev():
gi.require_version('GUdev', '1.0')
from gi.repository import GUdev
sys.modules['gudev'] = GUdev
def enable_gst():
gi.require_version('Gst', '0.10')
from gi.repository import Gst
sys.modules['gst'] = Gst
_install_enums(Gst)
Gst.registry_get_default = Gst.Registry.get_default
Gst.element_register = Gst.Element.register
Gst.element_factory_make = Gst.ElementFactory.make
Gst.caps_new_any = Gst.Caps.new_any
Gst.get_pygst_version = lambda: (0, 10, 19)
Gst.get_gst_version = lambda: (0, 10, 40)
from gi.repository import GstInterfaces
sys.modules['gst.interfaces'] = GstInterfaces
_install_enums(GstInterfaces)
from gi.repository import GstAudio
sys.modules['gst.audio'] = GstAudio
_install_enums(GstAudio)
from gi.repository import GstVideo
sys.modules['gst.video'] = GstVideo
_install_enums(GstVideo)
from gi.repository import GstBase
sys.modules['gst.base'] = GstBase
_install_enums(GstBase)
Gst.BaseTransform = GstBase.BaseTransform
Gst.BaseSink = GstBase.BaseSink
from gi.repository import GstController
sys.modules['gst.controller'] = GstController
_install_enums(GstController, dest=Gst)
from gi.repository import GstPbutils
sys.modules['gst.pbutils'] = GstPbutils
_install_enums(GstPbutils)
def enable_goocanvas():
gi.require_version('GooCanvas', '2.0')
from gi.repository import GooCanvas
sys.modules['goocanvas'] = GooCanvas
_install_enums(GooCanvas, strip='GOO_CANVAS_')
GooCanvas.ItemSimple = GooCanvas.CanvasItemSimple
GooCanvas.Item = GooCanvas.CanvasItem
GooCanvas.Image = GooCanvas.CanvasImage
GooCanvas.Group = GooCanvas.CanvasGroup
GooCanvas.Rect = GooCanvas.CanvasRect
| lgpl-2.1 |
nkrinner/nova | nova/tests/api/openstack/compute/contrib/test_evacuate.py | 16 | 10998 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo.config import cfg
import webob
from nova.compute import api as compute_api
from nova.compute import vm_states
from nova import context
from nova import exception
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
def fake_compute_api(*args, **kwargs):
return True
def fake_compute_api_get(self, context, instance_id):
# BAD_UUID is something that does not exist
if instance_id == 'BAD_UUID':
raise exception.InstanceNotFound(instance_id=instance_id)
else:
return {
'id': 1,
'uuid': instance_id,
'vm_state': vm_states.ACTIVE,
'task_state': None, 'host': 'host1'
}
def fake_service_get_by_compute_host(self, context, host):
if host == 'bad-host':
raise exception.ComputeHostNotFound(host=host)
else:
return {
'host_name': host,
'service': 'compute',
'zone': 'nova'
}
class EvacuateTest(test.NoDBTestCase):
_methods = ('resize', 'evacuate')
def setUp(self):
super(EvacuateTest, self).setUp()
self.stubs.Set(compute_api.API, 'get', fake_compute_api_get)
self.stubs.Set(compute_api.HostAPI, 'service_get_by_compute_host',
fake_service_get_by_compute_host)
self.UUID = uuid.uuid4()
for _method in self._methods:
self.stubs.Set(compute_api.API, _method, fake_compute_api)
def _get_admin_context(self, user_id='fake', project_id='fake'):
ctxt = context.get_admin_context()
ctxt.user_id = user_id
ctxt.project_id = project_id
return ctxt
def test_evacuate_with_valid_instance(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'false',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 200)
def test_evacuate_with_underscore_in_hostname(self):
ctxt = context.get_admin_context()
ctxt.user_id = 'fake'
ctxt.project_id = 'fake'
ctxt.is_admin = True
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
# NOTE: The hostname grammar in RFC952 does not allow for
# underscores in hostnames. However, we should test that it
# is supported because it sometimes occurs in real systems.
'host': 'underscore_hostname',
'onSharedStorage': 'false',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 200)
def test_evacuate_with_invalid_instance(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % 'BAD_UUID')
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'false',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 404)
def test_evacuate_with_active_service(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.content_type = 'application/json'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'false',
'adminPass': 'MyNewPass'
}
})
def fake_evacuate(*args, **kwargs):
raise exception.ComputeServiceInUse("Service still in use")
self.stubs.Set(compute_api.API, 'evacuate', fake_evacuate)
res = req.get_response(app)
self.assertEqual(res.status_int, 400)
def test_evacuate_instance_with_no_target(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'onSharedStorage': 'False',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 400)
def test_evacuate_instance_without_on_shared_storage(self):
ctxt = context.get_admin_context()
ctxt.user_id = 'fake'
ctxt.project_id = 'fake'
ctxt.is_admin = True
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 400)
def test_evacuate_instance_with_bad_target(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
req = webob.Request.blank('/v2/fake/servers/%s/action' % self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'bad-host',
'onSharedStorage': 'false',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 404)
def test_evacuate_instance_with_target(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
uuid1 = self.UUID
req = webob.Request.blank('/v2/fake/servers/%s/action' % uuid1)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'false',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
def fake_update(inst, context, instance,
task_state, expected_task_state):
return None
self.stubs.Set(compute_api.API, 'update', fake_update)
resp = req.get_response(app)
self.assertEqual(resp.status_int, 200)
resp_json = jsonutils.loads(resp.body)
self.assertEqual("MyNewPass", resp_json['adminPass'])
def test_evacuate_shared_and_pass(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
uuid1 = self.UUID
req = webob.Request.blank('/v2/fake/servers/%s/action' % uuid1)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'True',
'adminPass': 'MyNewPass'
}
})
req.content_type = 'application/json'
def fake_update(inst, context, instance,
task_state, expected_task_state):
return None
self.stubs.Set(compute_api.API, 'update', fake_update)
res = req.get_response(app)
self.assertEqual(res.status_int, 400)
def test_evacuate_not_shared_pass_generated(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
uuid1 = self.UUID
req = webob.Request.blank('/v2/fake/servers/%s/action' % uuid1)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'False',
}
})
req.content_type = 'application/json'
def fake_update(inst, context, instance,
task_state, expected_task_state):
return None
self.stubs.Set(compute_api.API, 'update', fake_update)
resp = req.get_response(app)
self.assertEqual(resp.status_int, 200)
resp_json = jsonutils.loads(resp.body)
self.assertEqual(CONF.password_length, len(resp_json['adminPass']))
def test_evacuate_shared(self):
ctxt = self._get_admin_context()
app = fakes.wsgi_app(fake_auth_context=ctxt)
uuid1 = self.UUID
req = webob.Request.blank('/v2/fake/servers/%s/action' % uuid1)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'True',
}
})
req.content_type = 'application/json'
def fake_update(inst, context, instance,
task_state, expected_task_state):
return None
self.stubs.Set(compute_api.API, 'update', fake_update)
res = req.get_response(app)
self.assertEqual(res.status_int, 200)
def test_not_admin(self):
ctxt = context.RequestContext('fake', 'fake', is_admin=False)
app = fakes.wsgi_app(fake_auth_context=ctxt)
uuid1 = self.UUID
req = webob.Request.blank('/v2/fake/servers/%s/action' % uuid1)
req.method = 'POST'
req.body = jsonutils.dumps({
'evacuate': {
'host': 'my-host',
'onSharedStorage': 'True',
}
})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 403)
| apache-2.0 |
TakayukiSakai/tensorflow | tensorflow/python/tools/strip_unused_test.py | 8 | 3204 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests the graph freezing tool."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.python.framework import test_util
from tensorflow.python.tools import strip_unused
class FreezeGraphTest(test_util.TensorFlowTestCase):
def testFreezeGraph(self):
input_graph_name = "input_graph.pb"
output_graph_name = "output_graph.pb"
# We'll create an input graph that has a single constant containing 1.0,
# and that then multiplies it by 2.
with tf.Graph().as_default():
constant_node = tf.constant(1.0, name="constant_node")
wanted_input_node = tf.sub(constant_node, 3.0, name="wanted_input_node")
output_node = tf.mul(wanted_input_node, 2.0, name="output_node")
tf.add(output_node, 2.0, name="later_node")
sess = tf.Session()
output = sess.run(output_node)
self.assertNear(-4.0, output, 0.00001)
tf.train.write_graph(sess.graph.as_graph_def(), self.get_temp_dir(),
input_graph_name)
# We save out the graph to disk, and then call the const conversion
# routine.
input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
input_binary = False
input_node_names = "wanted_input_node"
output_node_names = "output_node"
output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
strip_unused.strip_unused(input_graph_path, input_binary, output_graph_path,
input_node_names, output_node_names,
tf.float32.as_datatype_enum)
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with tf.Graph().as_default():
output_graph_def = tf.GraphDef()
with open(output_graph_path, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(output_graph_def, name="")
self.assertEqual(3, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("Add", node.op)
self.assertNotEqual("Sub", node.op)
with tf.Session() as sess:
input_node = sess.graph.get_tensor_by_name("wanted_input_node:0")
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node, feed_dict={input_node: [10.0]})
self.assertNear(20.0, output, 0.00001)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
ar45/django | tests/flatpages_tests/test_forms.py | 155 | 4568 | from __future__ import unicode_literals
from django.conf import settings
from django.contrib.flatpages.forms import FlatpageForm
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.test import TestCase, modify_settings, override_settings
from django.utils import translation
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.flatpages', ]})
@override_settings(SITE_ID=1)
class FlatpageAdminFormTests(TestCase):
@classmethod
def setUpTestData(cls):
# don't use the manager because we want to ensure the site exists
# with pk=1, regardless of whether or not it already exists.
cls.site1 = Site(pk=1, domain='example.com', name='example.com')
cls.site1.save()
def setUp(self):
# Site fields cache needs to be cleared after flatpages is added to
# INSTALLED_APPS
Site._meta._expire_cache()
self.form_data = {
'title': "A test page",
'content': "This is a test",
'sites': [settings.SITE_ID],
}
def test_flatpage_admin_form_url_validation(self):
"The flatpage admin form correctly validates urls"
self.assertTrue(FlatpageForm(data=dict(url='/new_flatpage/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.special~chars/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.very_special~chars-here/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a space/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a % char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ! char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a & char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ? char/', **self.form_data)).is_valid())
def test_flatpage_requires_leading_slash(self):
form = FlatpageForm(data=dict(url='no_leading_slash/', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a leading slash."])
@override_settings(APPEND_SLASH=True,
MIDDLEWARE_CLASSES=['django.middleware.common.CommonMiddleware'])
def test_flatpage_requires_trailing_slash_with_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a trailing slash."])
@override_settings(APPEND_SLASH=False,
MIDDLEWARE_CLASSES=['django.middleware.common.CommonMiddleware'])
def test_flatpage_doesnt_requires_trailing_slash_without_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
self.assertTrue(form.is_valid())
def test_flatpage_admin_form_url_uniqueness_validation(self):
"The flatpage admin form correctly enforces url uniqueness among flatpages of the same site"
data = dict(url='/myflatpage1/', **self.form_data)
FlatpageForm(data=data).save()
f = FlatpageForm(data=data)
with translation.override('en'):
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'__all__': ['Flatpage with url /myflatpage1/ already exists for site example.com']})
def test_flatpage_admin_form_edit(self):
"""
Existing flatpages can be edited in the admin form without triggering
the url-uniqueness validation.
"""
existing = FlatPage.objects.create(
url="/myflatpage1/", title="Some page", content="The content")
existing.sites.add(settings.SITE_ID)
data = dict(url='/myflatpage1/', **self.form_data)
f = FlatpageForm(data=data, instance=existing)
self.assertTrue(f.is_valid(), f.errors)
updated = f.save()
self.assertEqual(updated.title, "A test page")
def test_flatpage_nosites(self):
data = dict(url='/myflatpage1/', **self.form_data)
data.update({'sites': ''})
f = FlatpageForm(data=data)
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'sites': [translation.ugettext('This field is required.')]})
| bsd-3-clause |
kuriositeetti/wamp-tikki | venv/lib/python2.7/site-packages/pip/cmdoptions.py | 117 | 11475 | """
shared options and groups
The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parse's general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.
"""
from __future__ import absolute_import
import copy
from optparse import OptionGroup, SUPPRESS_HELP, Option
from pip.index import PyPI
from pip.locations import CA_BUNDLE_PATH, USER_CACHE_DIR, src_prefix
def make_option_group(group, parser):
"""
Return an OptionGroup object
group -- assumed to be dict with 'name' and 'options' keys
parser -- an optparse Parser
"""
option_group = OptionGroup(parser, group['name'])
for option in group['options']:
option_group.add_option(option.make())
return option_group
class OptionMaker(object):
"""Class that stores the args/kwargs that would be used to make an Option,
for making them later, and uses deepcopy's to reset state."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def make(self):
args_copy = copy.deepcopy(self.args)
kwargs_copy = copy.deepcopy(self.kwargs)
return Option(*args_copy, **kwargs_copy)
###########
# options #
###########
help_ = OptionMaker(
'-h', '--help',
dest='help',
action='help',
help='Show help.')
isolated_mode = OptionMaker(
"--isolated",
dest="isolated_mode",
action="store_true",
default=False,
help=(
"Run pip in an isolated mode, ignoring environment variables and user "
"configuration."
),
)
require_virtualenv = OptionMaker(
# Run only if inside a virtualenv, bail if not.
'--require-virtualenv', '--require-venv',
dest='require_venv',
action='store_true',
default=False,
help=SUPPRESS_HELP)
verbose = OptionMaker(
'-v', '--verbose',
dest='verbose',
action='count',
default=0,
help='Give more output. Option is additive, and can be used up to 3 times.'
)
version = OptionMaker(
'-V', '--version',
dest='version',
action='store_true',
help='Show version and exit.')
quiet = OptionMaker(
'-q', '--quiet',
dest='quiet',
action='count',
default=0,
help='Give less output.')
log = OptionMaker(
"--log", "--log-file", "--local-log",
dest="log",
metavar="path",
help="Path to a verbose appending log."
)
log_explicit_levels = OptionMaker(
# Writes the log levels explicitely to the log'
'--log-explicit-levels',
dest='log_explicit_levels',
action='store_true',
default=False,
help=SUPPRESS_HELP)
no_input = OptionMaker(
# Don't ask for input
'--no-input',
dest='no_input',
action='store_true',
default=False,
help=SUPPRESS_HELP)
proxy = OptionMaker(
'--proxy',
dest='proxy',
type='str',
default='',
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
retries = OptionMaker(
'--retries',
dest='retries',
type='int',
default=5,
help="Maximum number of retries each connection should attempt "
"(default %default times).")
timeout = OptionMaker(
'--timeout', '--default-timeout',
metavar='sec',
dest='timeout',
type='float',
default=15,
help='Set the socket timeout (default %default seconds).')
default_vcs = OptionMaker(
# The default version control system for editables, e.g. 'svn'
'--default-vcs',
dest='default_vcs',
type='str',
default='',
help=SUPPRESS_HELP)
skip_requirements_regex = OptionMaker(
# A regex to be used to skip requirements
'--skip-requirements-regex',
dest='skip_requirements_regex',
type='str',
default='',
help=SUPPRESS_HELP)
exists_action = OptionMaker(
# Option when path already exist
'--exists-action',
dest='exists_action',
type='choice',
choices=['s', 'i', 'w', 'b'],
default=[],
action='append',
metavar='action',
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup.")
cert = OptionMaker(
'--cert',
dest='cert',
type='str',
default=CA_BUNDLE_PATH,
metavar='path',
help="Path to alternate CA bundle.")
client_cert = OptionMaker(
'--client-cert',
dest='client_cert',
type='str',
default=None,
metavar='path',
help="Path to SSL client certificate, a single file containing the "
"private key and the certificate in PEM format.")
index_url = OptionMaker(
'-i', '--index-url', '--pypi-url',
dest='index_url',
metavar='URL',
default=PyPI.simple_url,
help='Base URL of Python Package Index (default %default).')
extra_index_url = OptionMaker(
'--extra-index-url',
dest='extra_index_urls',
metavar='URL',
action='append',
default=[],
help='Extra URLs of package indexes to use in addition to --index-url.')
no_index = OptionMaker(
'--no-index',
dest='no_index',
action='store_true',
default=False,
help='Ignore package index (only looking at --find-links URLs instead).')
find_links = OptionMaker(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='url',
help="If a url or path to an html file, then parse for links to archives. "
"If a local path or file:// url that's a directory, then look for "
"archives in the directory listing.")
# TODO: Remove after 6.0
use_mirrors = OptionMaker(
'-M', '--use-mirrors',
dest='use_mirrors',
action='store_true',
default=False,
help=SUPPRESS_HELP)
# TODO: Remove after 6.0
mirrors = OptionMaker(
'--mirrors',
dest='mirrors',
metavar='URL',
action='append',
default=[],
help=SUPPRESS_HELP)
allow_external = OptionMaker(
"--allow-external",
dest="allow_external",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of a package even if it is externally hosted",
)
allow_all_external = OptionMaker(
"--allow-all-external",
dest="allow_all_external",
action="store_true",
default=False,
help="Allow the installation of all packages that are externally hosted",
)
trusted_host = OptionMaker(
"--trusted-host",
dest="trusted_hosts",
action="append",
metavar="HOSTNAME",
default=[],
help="Mark this host as trusted, even though it does not have valid or "
"any HTTPS.",
)
# Remove after 7.0
no_allow_external = OptionMaker(
"--no-allow-external",
dest="allow_all_external",
action="store_false",
default=False,
help=SUPPRESS_HELP,
)
# Remove --allow-insecure after 7.0
allow_unsafe = OptionMaker(
"--allow-unverified", "--allow-insecure",
dest="allow_unverified",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of a package even if it is hosted "
"in an insecure and unverifiable way",
)
# Remove after 7.0
no_allow_unsafe = OptionMaker(
"--no-allow-insecure",
dest="allow_all_insecure",
action="store_false",
default=False,
help=SUPPRESS_HELP
)
# Remove after 1.5
process_dependency_links = OptionMaker(
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)
requirements = OptionMaker(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Install from the given requirements file. '
'This option can be used multiple times.')
editable = OptionMaker(
'-e', '--editable',
dest='editables',
action='append',
default=[],
metavar='path/url',
help=('Install a project in editable mode (i.e. setuptools '
'"develop mode") from a local project path or a VCS url.'),
)
src = OptionMaker(
'--src', '--source', '--source-dir', '--source-directory',
dest='src_dir',
metavar='dir',
default=src_prefix,
help='Directory to check out editable projects into. '
'The default in a virtualenv is "<venv path>/src". '
'The default for global installs is "<current dir>/src".'
)
use_wheel = OptionMaker(
'--use-wheel',
dest='use_wheel',
action='store_true',
help=SUPPRESS_HELP,
)
no_use_wheel = OptionMaker(
'--no-use-wheel',
dest='use_wheel',
action='store_false',
default=True,
help=('Do not Find and prefer wheel archives when searching indexes and '
'find-links locations.'),
)
cache_dir = OptionMaker(
"--cache-dir",
dest="cache_dir",
default=USER_CACHE_DIR,
metavar="dir",
help="Store the cache data in <dir>."
)
no_cache = OptionMaker(
"--no-cache-dir",
dest="cache_dir",
action="store_false",
help="Disable the cache.",
)
download_cache = OptionMaker(
'--download-cache',
dest='download_cache',
default=None,
help=SUPPRESS_HELP)
no_deps = OptionMaker(
'--no-deps', '--no-dependencies',
dest='ignore_dependencies',
action='store_true',
default=False,
help="Don't install package dependencies.")
build_dir = OptionMaker(
'-b', '--build', '--build-dir', '--build-directory',
dest='build_dir',
metavar='dir',
help='Directory to unpack packages into and build in.'
)
install_options = OptionMaker(
'--install-option',
dest='install_options',
action='append',
metavar='options',
help="Extra arguments to be supplied to the setup.py install "
"command (use like --install-option=\"--install-scripts=/usr/local/"
"bin\"). Use multiple --install-option options to pass multiple "
"options to setup.py install. If you are using an option with a "
"directory path, be sure to use absolute path.")
global_options = OptionMaker(
'--global-option',
dest='global_options',
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the install command.")
no_clean = OptionMaker(
'--no-clean',
action='store_true',
default=False,
help="Don't clean up build directories.")
disable_pip_version_check = OptionMaker(
"--disable-pip-version-check",
dest="disable_pip_version_check",
action="store_true",
default=False,
help="Don't periodically check PyPI to determine whether a new version "
"of pip is available for download. Implied with --no-index.")
##########
# groups #
##########
general_group = {
'name': 'General Options',
'options': [
help_,
isolated_mode,
require_virtualenv,
verbose,
version,
quiet,
log,
log_explicit_levels,
no_input,
proxy,
retries,
timeout,
default_vcs,
skip_requirements_regex,
exists_action,
trusted_host,
cert,
client_cert,
cache_dir,
no_cache,
disable_pip_version_check,
]
}
index_group = {
'name': 'Package Index Options',
'options': [
index_url,
extra_index_url,
no_index,
find_links,
use_mirrors,
mirrors,
allow_external,
allow_all_external,
no_allow_external,
allow_unsafe,
no_allow_unsafe,
process_dependency_links,
]
}
| mit |
potatolondon/django-nonrel-1-4 | django/contrib/localflavor/it/util.py | 436 | 1807 | from django.utils.encoding import smart_str, smart_unicode
def ssn_check_digit(value):
"Calculate Italian social security number check digit."
ssn_even_chars = {
'0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8,
'9': 9, 'A': 0, 'B': 1, 'C': 2, 'D': 3, 'E': 4, 'F': 5, 'G': 6, 'H': 7,
'I': 8, 'J': 9, 'K': 10, 'L': 11, 'M': 12, 'N': 13, 'O': 14, 'P': 15,
'Q': 16, 'R': 17, 'S': 18, 'T': 19, 'U': 20, 'V': 21, 'W': 22, 'X': 23,
'Y': 24, 'Z': 25
}
ssn_odd_chars = {
'0': 1, '1': 0, '2': 5, '3': 7, '4': 9, '5': 13, '6': 15, '7': 17, '8':
19, '9': 21, 'A': 1, 'B': 0, 'C': 5, 'D': 7, 'E': 9, 'F': 13, 'G': 15,
'H': 17, 'I': 19, 'J': 21, 'K': 2, 'L': 4, 'M': 18, 'N': 20, 'O': 11,
'P': 3, 'Q': 6, 'R': 8, 'S': 12, 'T': 14, 'U': 16, 'V': 10, 'W': 22,
'X': 25, 'Y': 24, 'Z': 23
}
# Chars from 'A' to 'Z'
ssn_check_digits = [chr(x) for x in range(65, 91)]
ssn = value.upper()
total = 0
for i in range(0, 15):
try:
if i % 2 == 0:
total += ssn_odd_chars[ssn[i]]
else:
total += ssn_even_chars[ssn[i]]
except KeyError:
msg = "Character '%(char)s' is not allowed." % {'char': ssn[i]}
raise ValueError(msg)
return ssn_check_digits[total % 26]
def vat_number_check_digit(vat_number):
"Calculate Italian VAT number check digit."
normalized_vat_number = smart_str(vat_number).zfill(10)
total = 0
for i in range(0, 10, 2):
total += int(normalized_vat_number[i])
for i in range(1, 11, 2):
quotient , remainder = divmod(int(normalized_vat_number[i]) * 2, 10)
total += quotient + remainder
return smart_unicode((10 - total % 10) % 10)
| bsd-3-clause |
mpetyx/pyrif | 3rdPartyLibraries/FuXi-master/test/suite.py | 1 | 12559 | #!/usr/bin/env python
'''
Created on Sep 16, 2010
@author: onewnan
Run all FuXi tests.
Note the discovery mechanisms require sources on the python path.
If a coverage tool is installed this module can help generate coverage
statistics, e.g.,
coverage erase
coverage run --branch --source="FuXi" suite.py --variants --unittest2
coverage report
'''
import doctest
import unittest
import additionalDLPTests
import imp
# import FuXi
import os
import rdflib
import sys
import test_builtin_ordering
import test_network_reset
import test_superproperty_entailment
import testExistentialInHead
import testOWL
import testReteAction
import testSkolemization
import traceback
import types
from unittest import TestResult
import logging
log = logging.getLogger(__name__)
FILES_TO_IGNORE = ('suite.py', 'CommandLine.py')
VISUAL_SEPARATOR = " ===================================="
modsWithLoadErrors = []
def moduleIterator(root):
'''
Successively return all modules in all packages beneath root.
The parameter 'root' may be either a module or a module's path name.
'''
# OTHER_EXTENSIONS = ('.pyo', '.pyc')
stack = []
if isinstance(root, types.ModuleType):
packageRoute = root.__path__
file = False
name = root.__name__
else: # string
try:
name = root
file, packageRoute, description = imp.find_module(name)
package = imp.load_module(root, file, packageRoute, description)
except ImportError, err:
print('ImportError:', err)
return
if file:
raise ImportError('Not a package: %r', name)
file.close()
return
else:
stack.append((name, packageRoute, package))
while stack:
name, packageRoute, package = stack.pop()
packagePath = package.__path__
for entry in os.listdir(packageRoute):
if not entry:
continue
if entry in FILES_TO_IGNORE:
pass
elif entry.endswith('.py'):
modName = entry[:-3]
try:
file, pathName, description = imp.find_module(
modName, packagePath)
qualName = name + '.' + modName
mod = imp.load_module(
qualName, file, pathName, description)
yield (entry, mod)
except:
fullPath = packageRoute + os.sep + modName
modsWithLoadErrors.append(modName)
print("ERROR -- exception loading " + fullPath)
if traceback:
traceback.print_exc()
finally:
file.close()
elif entry.find('.') != -1:
pass
else:
newRoute = packageRoute + os.sep + entry
file, newPath, description = imp.find_module(
entry, packagePath)
#mod = imp.load_module("__main__", file, pathName, description)
qualName = name + "." + entry
mod = imp.load_module(qualName, file, newPath, description)
stack.append((qualName, newRoute, mod))
def runSuite(title, suite, summary):
'''
Run the indicated test suite, accumulating statistics and the title in the
summary.
The summary is a list of strings.
'''
splash(title)
sys.argv = [""]
results = TestResult()
suite.run(results)
summary.append(("Summary of %s " % (title)) + VISUAL_SEPARATOR)
summary.append("* Ran %i tests with %i failures and %i errors." % (
results.testsRun,
results.failures.__len__(),
results.errors.__len__()))
if results.failures.__len__():
summary.append("* Failed tests were:")
for test, trace in results.failures:
print("FAIL: ", test, "(unit test)")
print(trace)
summary.append(repr(test))
if results.errors.__len__():
summary.append("* Tests in error were:")
for test, trace in results.errors:
print("ERROR: ", test, "(unit test)")
print(trace)
summary.append(repr(test))
return summary
def extractEmbeddedSuite(root):
'''
Use unittest2 to extract a suite of embedded unit tests from root.
The parameter 'root' can be either a module or the path name of a module.
'''
if isinstance(root, types.ModuleType):
packageRoute = root.__path__[0]
file = False
packageName = root.__name__
else:
packageName = root
file, packageRoute, description = imp.find_module(packageName)
top_level_dir = os.path.abspath(packageRoute + "/..")
loader = unittest2.TestLoader()
suite = loader.discover(
packageRoute, top_level_dir=top_level_dir, pattern="*.py")
return suite
def runEmbeddedTests(root, summary, usingUnittest2):
'''
For each module nested under root, run its unit- and doc-tests.
The root parameter may be a module or a module name.
'''
if isinstance(root, types.ModuleType):
packageRoute = root.__path__[0]
file = False
packageName = root.__name__
else:
packageName = root
file, packageRoute, description = imp.find_module(packageName)
if not usingUnittest2:
if options.verbose:
print("Running test functions rather than directly running unit tests.")
print("For better test results, install unittest2 and execute with flag --unittest2.")
print("Please disregard warnings of the form 'running xx test function ... \\n*** DocTestRunner.merge: yy in both testers; summing outcomes.)")
print("Refer instead to the testmod results for these tests.")
else:
if options.verbose:
print("Running unit tests directly rather than invoking test functions.")
embeddedSuite = extractEmbeddedSuite(root)
title = "Embedded " + packageName + " Unit Tests "
runSuite(title, embeddedSuite, summary)
sys.argv = [""]
# modsWithLoadErrors = []
modsWithDoctestFailures = []
modsWithDoctestErrors = []
modsWithTestFunctionFailures = []
totalDoctests = 0
totalDoctestFailures = 0
totalTestFunctionsRun = 0
for entry, mod in moduleIterator(packageName):
if not entry in FILES_TO_IGNORE:
if not usingUnittest2 and "test" in mod.__dict__:
try:
totalTestFunctionsRun += 1
if options.verbose:
print("running %s's test function" % (entry))
mod.__dict__["test"]()
except:
modsWithTestFunctionFailures.append(mod)
print("ERROR--exception running unittest " + entry)
if traceback:
traceback.print_exc()
sys.__stderr__.flush()
sys.__stdout__.flush()
tests = 0
try:
if options.verbose:
print("running %s using doctest.testmod" % (entry))
failures, tests = doctest.testmod(mod)
except:
modsWithDoctestErrors.append(mod)
print("ERROR--exception running doctest for", entry)
traceback.print_exc()
sys.__stderr__.flush()
if tests > 0:
totalDoctests += tests
totalDoctestFailures += failures
if failures > 0:
modsWithDoctestFailures.append(entry)
if usingUnittest2:
title = "Summary of " + packageName + " Doctests"
else:
title = "Summary of Embedded " + packageName + " Tests "
summary.append(title + VISUAL_SEPARATOR)
# summary.append("* %i mods with load errors:" % (modsWithLoadErrors.__len__()))
# summary.append(modsWithLoadErrors)
summary.append("* %i mods with doctest failures:" % (modsWithDoctestFailures.__len__()) )
summary.append(modsWithDoctestFailures)
summary.append("* %i mods with doctest errors: " % (modsWithDoctestErrors.__len__()))
summary.append(modsWithDoctestErrors)
summary.append("* Total doctests run %i: " % (totalDoctests))
if not usingUnittest2:
summary.append("* Total attempted test functions: %i" % (totalTestFunctionsRun))
summary.append("* %i mods with test function failures: " % (modsWithTestFunctionFailures.__len__()))
summary.append(modsWithTestFunctionFailures)
return summary
def suite():
'''
Return a TestSuite containing all tests from the test directory.
'''
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(additionalDLPTests.AdditionalDescriptionLogicTests,'test'))
suite.addTest(unittest.makeSuite(test_builtin_ordering.URIRefStringStartsWith,'test'))
suite.addTest(unittest.makeSuite(test_network_reset.NetworkReset,'test'))
suite.addTest(unittest.makeSuite(test_superproperty_entailment.test_superproperty_entailment,'test'))
suite.addTest(unittest.makeSuite(testExistentialInHead.ExistentialInHeadTest,'test'))
suite.addTest(unittest.makeSuite(testReteAction.ReteActionTest,'test'))
suite.addTest(unittest.makeSuite(testSkolemization.UnionSkolemizedTest,'test'))
return suite
def splash(txt):
'''
Print a separator line indicating what part of the suite is running.
'''
print("\nRunning " + txt + VISUAL_SEPARATOR)
if __name__ == "__main__":
from optparse import OptionParser
op = OptionParser('usage: %prog [options]')
op.add_option('--variants',
action='store_true',
default=False,
help='Whether to run testOwl three ways or just bottom up ')
op.add_option('--unittest2',
action='store_true',
default=False,
help='Whether to use unittest2 discovery. ')
op.add_option('--rdftests',
action='store_true',
default=False,
help="Whether to run rdflib's (in addition to FuXi's) embedded tests. ")
op.add_option('--verbose',
action='store_true',
default=False,
help="Whether to include informational messages besides summaries. ")
(options, facts) = op.parse_args()
summary = []
flagCount = 0
if options.variants:
flagCount = 1
flags = "--variants"
else:
flagCount = 0
flags = ""
usingUnittest2 = False
if options.unittest2:
try:
import unittest2
usingUnittest2 = True
flagCount+=1
flags = flags + " --unittest2"
except:
print("Unittest2 libraries not found, --unittest2 flag ignored.")
pass
rdftests = False
if options.rdftests:
rdfpath = rdflib.__path__[0]
if os.path.isdir(rdfpath):
rdftests = True
flagCount+=1
flags = flags + " --rdftests"
else:
print("--rdftests flag is ignored since", rdfpath, "is not a directory.",)
if options.verbose:
print("Running suite.py with %i flags set: %s" % (flagCount, flags))
if usingUnittest2 and options.verbose:
print("Using unittest2 libraries. CAUTION: these may not be compatible with your debugger. See")
print("\thttp://pydev.blogspot.com/2007/06/why-cant-pydev-debugger-work-with.html")
testOWLoptions = testOWL.defaultOptions()
splash("testOWL with " + testOWLoptions.strategy)
testOWL.runTests(testOWLoptions)
if options.variants:
testOWLoptions.strategy = "sld"
splash("testOWL with " + testOWLoptions.strategy)
testOWL.runTests(testOWLoptions)
testOWLoptions.strategy = "bfp"
splash("testOWL with " + testOWLoptions.strategy)
testOWL.runTests(testOWLoptions)
sys.__stderr__.flush()
# Caution: for some reason external unit tests run properly only if run before embedded tests.
#runStandaloneUnitTests()
runSuite("FuXi External Unit Tests (other than testOWL)", suite(), summary)
runEmbeddedTests("FuXi",summary,usingUnittest2)
if rdftests:
runEmbeddedTests(rdflib,summary,usingUnittest2)
for line in summary:
print(line)
print("\nNote summary statistics are not available for the testOWL runs.")
| mit |
intgr/django | tests/auth_tests/test_decorators.py | 68 | 4129 | from django.conf import settings
from django.contrib.auth import models
from django.contrib.auth.decorators import login_required, permission_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from .test_views import AuthViewsTestCase
@override_settings(ROOT_URLCONF='auth_tests.urls')
class LoginRequiredTestCase(AuthViewsTestCase):
"""
Tests the login_required decorators
"""
def testCallable(self):
"""
login_required is assignable to callable objects.
"""
class CallableView:
def __call__(self, *args, **kwargs):
pass
login_required(CallableView())
def testView(self):
"""
login_required is assignable to normal views.
"""
def normal_view(request):
pass
login_required(normal_view)
def testLoginRequired(self, view_url='/login_required/', login_url=None):
"""
login_required works on a simple view wrapped in a login_required
decorator.
"""
if login_url is None:
login_url = settings.LOGIN_URL
response = self.client.get(view_url)
self.assertEqual(response.status_code, 302)
self.assertIn(login_url, response.url)
self.login()
response = self.client.get(view_url)
self.assertEqual(response.status_code, 200)
def testLoginRequiredNextUrl(self):
"""
login_required works on a simple view wrapped in a login_required
decorator with a login_url set.
"""
self.testLoginRequired(view_url='/login_required_login_url/', login_url='/somewhere/')
class PermissionsRequiredDecoratorTest(TestCase):
"""
Tests for the permission_required decorator
"""
def setUp(self):
self.user = models.User.objects.create(username='joe', password='qwerty')
self.factory = RequestFactory()
# Add permissions auth.add_customuser and auth.change_customuser
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
self.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
@permission_required(['auth_tests.add_customuser', 'auth_tests.change_customuser'])
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_many_permissions_in_set_pass(self):
@permission_required({'auth_tests.add_customuser', 'auth_tests.change_customuser'})
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
@permission_required('auth_tests.add_customuser')
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
@permission_required(['auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission'])
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
@permission_required([
'auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission'
], raise_exception=True)
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
with self.assertRaises(PermissionDenied):
a_view(request)
| bsd-3-clause |
myang321/django | tests/template_tests/syntax_tests/test_filter_tag.py | 521 | 1795 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class FilterTagTests(SimpleTestCase):
@setup({'filter01': '{% filter upper %}{% endfilter %}'})
def test_filter01(self):
output = self.engine.render_to_string('filter01')
self.assertEqual(output, '')
@setup({'filter02': '{% filter upper %}django{% endfilter %}'})
def test_filter02(self):
output = self.engine.render_to_string('filter02')
self.assertEqual(output, 'DJANGO')
@setup({'filter03': '{% filter upper|lower %}django{% endfilter %}'})
def test_filter03(self):
output = self.engine.render_to_string('filter03')
self.assertEqual(output, 'django')
@setup({'filter04': '{% filter cut:remove %}djangospam{% endfilter %}'})
def test_filter04(self):
output = self.engine.render_to_string('filter04', {'remove': 'spam'})
self.assertEqual(output, 'django')
@setup({'filter05': '{% filter safe %}fail{% endfilter %}'})
def test_filter05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('filter05')
@setup({'filter05bis': '{% filter upper|safe %}fail{% endfilter %}'})
def test_filter05bis(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('filter05bis')
@setup({'filter06': '{% filter escape %}fail{% endfilter %}'})
def test_filter06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('filter06')
@setup({'filter06bis': '{% filter upper|escape %}fail{% endfilter %}'})
def test_filter06bis(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('filter06bis')
| bsd-3-clause |
armersong/zato | code/zato-web-admin/src/zato/admin/web/views/outgoing/sql.py | 6 | 5214 | # -*- coding: utf-8 -*-
"""
Copyright (C) 2010 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
from traceback import format_exc
# Django
from django.http import HttpResponse, HttpResponseServerError
from django.template.response import TemplateResponse
# anyjson
from anyjson import dumps
# Zato
from zato.admin.settings import engine_friendly_name
from zato.admin.web.views import change_password as _change_password
from zato.admin.web.forms import ChangePasswordForm
from zato.admin.web.forms.outgoing.sql import CreateForm, EditForm
from zato.admin.web.views import Delete as _Delete, method_allowed
from zato.common.odb.model import SQLConnectionPool
logger = logging.getLogger(__name__)
def _get_edit_create_message(params, prefix=''):
""" Creates a base dictionary which can be used by both 'edit' and 'create' actions.
"""
return {
'id': params.get('id'),
'cluster_id': params['cluster_id'],
'name': params[prefix + 'name'],
'is_active': bool(params.get(prefix + 'is_active')),
'engine': params[prefix + 'engine'],
'host': params[prefix + 'host'],
'port': params[prefix + 'port'],
'db_name': params[prefix + 'db_name'],
'username': params[prefix + 'username'],
'pool_size': params[prefix + 'pool_size'],
'extra': params.get(prefix + 'extra'),
}
def _edit_create_response(verb, id, name, engine, cluster_id):
""" A common function for producing return data for create and edit actions.
"""
return_data = {'id': id,
'message': 'Successfully {0} the outgoing SQL connection [{1}]'.format(verb, name.encode('utf-8')),
'engine_text': engine_friendly_name[engine],
'cluster_id': cluster_id,
}
return HttpResponse(dumps(return_data), mimetype='application/javascript')
@method_allowed('GET')
def index(req):
""" Lists all the SQL connections.
"""
items = []
create_form = CreateForm()
edit_form = EditForm(prefix='edit')
change_password_form = ChangePasswordForm()
if req.zato.cluster_id and req.method == 'GET':
for item in req.zato.client.invoke('zato.outgoing.sql.get-list', {'cluster_id': req.zato.cluster_id}):
_item = SQLConnectionPool()
for name in('id', 'name', 'is_active', 'engine', 'host', 'port', 'db_name', 'username', 'pool_size'):
value = getattr(item, name)
setattr(_item, name, value)
_item.extra = item.extra or ''
_item.engine_text = engine_friendly_name[_item.engine]
items.append(_item)
return_data = {'zato_clusters':req.zato.clusters,
'cluster_id':req.zato.cluster_id,
'choose_cluster_form':req.zato.choose_cluster_form,
'items':items,
'create_form':create_form,
'edit_form':edit_form,
'change_password_form': change_password_form
}
return TemplateResponse(req, 'zato/outgoing/sql.html', return_data)
@method_allowed('POST')
def create(req):
""" Creates a new SQL connection.
"""
try:
request = _get_edit_create_message(req.POST)
engine = request['engine']
response = req.zato.client.invoke('zato.outgoing.sql.create', request)
return _edit_create_response('created', response.data.id, req.POST['name'], engine, req.zato.cluster.id)
except Exception, e:
msg = 'Could not create an outgoing SQL connection, e:[{e}]'.format(e=format_exc(e))
logger.error(msg)
return HttpResponseServerError(msg)
@method_allowed('POST')
def edit(req):
""" Updates an SQL connection.
"""
try:
request = _get_edit_create_message(req.POST, 'edit-')
engine = request['engine']
req.zato.client.invoke('zato.outgoing.sql.edit', request)
return _edit_create_response('updated', req.POST['id'], req.POST['edit-name'], engine, req.zato.cluster.id)
except Exception, e:
msg = 'Could not update the outgoing SQL connection, e:[{e}]'.format(e=format_exc(e))
logger.error(msg)
return HttpResponseServerError(msg)
class Delete(_Delete):
url_name = 'out-sql-delete'
error_message = 'Could not delete the SQL connection'
service_name = 'zato.outgoing.sql.delete'
@method_allowed('POST')
def ping(req, cluster_id, id):
""" Pings a database and returns the time it took, in milliseconds.
"""
try:
response = req.zato.client.invoke('zato.outgoing.sql.ping', {'id':id})
if response.ok:
return TemplateResponse(req, 'zato/outgoing/sql-ping-ok.html',
{'response_time':'%.3f' % float(response.data.response_time)})
else:
return HttpResponseServerError(response.details)
except Exception, e:
msg = 'Could not ping the outgoing SQL connection, e:[{}]'.format(format_exc(e))
logger.error(msg)
return HttpResponseServerError(msg)
@method_allowed('POST')
def change_password(req):
return _change_password(req, 'zato.outgoing.sql.change-password')
| gpl-3.0 |
kyvinh/home-assistant | tests/util/test_yaml.py | 12 | 15282 | """Test Home Assistant yaml loader."""
import io
import os
import unittest
from unittest.mock import patch
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import yaml
from homeassistant.config import YAML_CONFIG_FILE, load_yaml_config_file
from tests.common import get_test_config_dir, patch_yaml_files
class TestYaml(unittest.TestCase):
"""Test util.yaml loader."""
# pylint: disable=no-self-use, invalid-name
def test_simple_list(self):
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert doc['config'] == ["simple", "list"]
def test_simple_dict(self):
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert doc['key'] == 'value'
def test_duplicate_key(self):
"""Test duplicate dict keys."""
files = {YAML_CONFIG_FILE: 'key: thing1\nkey: thing2'}
with self.assertRaises(HomeAssistantError):
with patch_yaml_files(files):
load_yaml_config_file(YAML_CONFIG_FILE)
def test_unhashable_key(self):
"""Test an unhasable key."""
files = {YAML_CONFIG_FILE: 'message:\n {{ states.state }}'}
with self.assertRaises(HomeAssistantError), \
patch_yaml_files(files):
load_yaml_config_file(YAML_CONFIG_FILE)
def test_no_key(self):
"""Test item without an key."""
files = {YAML_CONFIG_FILE: 'a: a\nnokeyhere'}
with self.assertRaises(HomeAssistantError), \
patch_yaml_files(files):
yaml.load_yaml(YAML_CONFIG_FILE)
def test_enviroment_variable(self):
"""Test config file with enviroment variable."""
os.environ["PASSWORD"] = "secret_password"
conf = "password: !env_var PASSWORD"
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert doc['password'] == "secret_password"
del os.environ["PASSWORD"]
def test_invalid_enviroment_variable(self):
"""Test config file with no enviroment variable sat."""
conf = "password: !env_var PASSWORD"
with self.assertRaises(HomeAssistantError):
with io.StringIO(conf) as file:
yaml.yaml.safe_load(file)
def test_include_yaml(self):
"""Test include yaml."""
with patch_yaml_files({'test.yaml': 'value'}):
conf = 'key: !include test.yaml'
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert doc["key"] == "value"
with patch_yaml_files({'test.yaml': None}):
conf = 'key: !include test.yaml'
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert doc["key"] == {}
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_list(self, mock_walk):
"""Test include dir list yaml."""
mock_walk.return_value = [
['/tmp', [], ['one.yaml', 'two.yaml']],
]
with patch_yaml_files({
'/tmp/one.yaml': 'one',
'/tmp/two.yaml': 'two',
}):
conf = "key: !include_dir_list /tmp"
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert sorted(doc["key"]) == sorted(["one", "two"])
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_list_recursive(self, mock_walk):
"""Test include dir recursive list yaml."""
mock_walk.return_value = [
['/tmp', ['tmp2', '.ignore', 'ignore'], ['zero.yaml']],
['/tmp/tmp2', [], ['one.yaml', 'two.yaml']],
['/tmp/ignore', [], ['.ignore.yaml']]
]
with patch_yaml_files({
'/tmp/zero.yaml': 'zero',
'/tmp/tmp2/one.yaml': 'one',
'/tmp/tmp2/two.yaml': 'two'
}):
conf = "key: !include_dir_list /tmp"
with io.StringIO(conf) as file:
assert '.ignore' in mock_walk.return_value[0][1], \
"Expecting .ignore in here"
doc = yaml.yaml.safe_load(file)
assert 'tmp2' in mock_walk.return_value[0][1]
assert '.ignore' not in mock_walk.return_value[0][1]
assert sorted(doc["key"]) == sorted(["zero", "one", "two"])
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_named(self, mock_walk):
"""Test include dir named yaml."""
mock_walk.return_value = [
['/tmp', [], ['first.yaml', 'second.yaml']]
]
with patch_yaml_files({
'/tmp/first.yaml': 'one',
'/tmp/second.yaml': 'two'
}):
conf = "key: !include_dir_named /tmp"
correct = {'first': 'one', 'second': 'two'}
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert doc["key"] == correct
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_named_recursive(self, mock_walk):
"""Test include dir named yaml."""
mock_walk.return_value = [
['/tmp', ['tmp2', '.ignore', 'ignore'], ['first.yaml']],
['/tmp/tmp2', [], ['second.yaml', 'third.yaml']],
['/tmp/ignore', [], ['.ignore.yaml']]
]
with patch_yaml_files({
'/tmp/first.yaml': 'one',
'/tmp/tmp2/second.yaml': 'two',
'/tmp/tmp2/third.yaml': 'three'
}):
conf = "key: !include_dir_named /tmp"
correct = {'first': 'one', 'second': 'two', 'third': 'three'}
with io.StringIO(conf) as file:
assert '.ignore' in mock_walk.return_value[0][1], \
"Expecting .ignore in here"
doc = yaml.yaml.safe_load(file)
assert 'tmp2' in mock_walk.return_value[0][1]
assert '.ignore' not in mock_walk.return_value[0][1]
assert doc["key"] == correct
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_merge_list(self, mock_walk):
"""Test include dir merge list yaml."""
mock_walk.return_value = [['/tmp', [], ['first.yaml', 'second.yaml']]]
with patch_yaml_files({
'/tmp/first.yaml': '- one',
'/tmp/second.yaml': '- two\n- three'
}):
conf = "key: !include_dir_merge_list /tmp"
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert sorted(doc["key"]) == sorted(["one", "two", "three"])
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_merge_list_recursive(self, mock_walk):
"""Test include dir merge list yaml."""
mock_walk.return_value = [
['/tmp', ['tmp2', '.ignore', 'ignore'], ['first.yaml']],
['/tmp/tmp2', [], ['second.yaml', 'third.yaml']],
['/tmp/ignore', [], ['.ignore.yaml']]
]
with patch_yaml_files({
'/tmp/first.yaml': '- one',
'/tmp/tmp2/second.yaml': '- two',
'/tmp/tmp2/third.yaml': '- three\n- four'
}):
conf = "key: !include_dir_merge_list /tmp"
with io.StringIO(conf) as file:
assert '.ignore' in mock_walk.return_value[0][1], \
"Expecting .ignore in here"
doc = yaml.yaml.safe_load(file)
assert 'tmp2' in mock_walk.return_value[0][1]
assert '.ignore' not in mock_walk.return_value[0][1]
assert sorted(doc["key"]) == sorted(["one", "two",
"three", "four"])
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_merge_named(self, mock_walk):
"""Test include dir merge named yaml."""
mock_walk.return_value = [['/tmp', [], ['first.yaml', 'second.yaml']]]
files = {
'/tmp/first.yaml': 'key1: one',
'/tmp/second.yaml': 'key2: two\nkey3: three',
}
with patch_yaml_files(files):
conf = "key: !include_dir_merge_named /tmp"
with io.StringIO(conf) as file:
doc = yaml.yaml.safe_load(file)
assert doc["key"] == {
"key1": "one",
"key2": "two",
"key3": "three"
}
@patch('homeassistant.util.yaml.os.walk')
def test_include_dir_merge_named_recursive(self, mock_walk):
"""Test include dir merge named yaml."""
mock_walk.return_value = [
['/tmp', ['tmp2', '.ignore', 'ignore'], ['first.yaml']],
['/tmp/tmp2', [], ['second.yaml', 'third.yaml']],
['/tmp/ignore', [], ['.ignore.yaml']]
]
with patch_yaml_files({
'/tmp/first.yaml': 'key1: one',
'/tmp/tmp2/second.yaml': 'key2: two',
'/tmp/tmp2/third.yaml': 'key3: three\nkey4: four'
}):
conf = "key: !include_dir_merge_named /tmp"
with io.StringIO(conf) as file:
assert '.ignore' in mock_walk.return_value[0][1], \
"Expecting .ignore in here"
doc = yaml.yaml.safe_load(file)
assert 'tmp2' in mock_walk.return_value[0][1]
assert '.ignore' not in mock_walk.return_value[0][1]
assert doc["key"] == {
"key1": "one",
"key2": "two",
"key3": "three",
"key4": "four"
}
@patch('homeassistant.util.yaml.open', create=True)
def test_load_yaml_encoding_error(self, mock_open):
"""Test raising a UnicodeDecodeError."""
mock_open.side_effect = UnicodeDecodeError('', b'', 1, 0, '')
self.assertRaises(HomeAssistantError, yaml.load_yaml, 'test')
def test_dump(self):
"""The that the dump method returns empty None values."""
assert yaml.dump({'a': None, 'b': 'b'}) == 'a:\nb: b\n'
FILES = {}
def load_yaml(fname, string):
"""Write a string to file and return the parsed yaml."""
FILES[fname] = string
with patch_yaml_files(FILES):
return load_yaml_config_file(fname)
class FakeKeyring():
"""Fake a keyring class."""
def __init__(self, secrets_dict):
"""Store keyring dictionary."""
self._secrets = secrets_dict
# pylint: disable=protected-access
def get_password(self, domain, name):
"""Retrieve password."""
assert domain == yaml._SECRET_NAMESPACE
return self._secrets.get(name)
class TestSecrets(unittest.TestCase):
"""Test the secrets parameter in the yaml utility."""
# pylint: disable=protected-access,invalid-name
def setUp(self):
"""Create & load secrets file."""
config_dir = get_test_config_dir()
yaml.clear_secret_cache()
self._yaml_path = os.path.join(config_dir, YAML_CONFIG_FILE)
self._secret_path = os.path.join(config_dir, yaml._SECRET_YAML)
self._sub_folder_path = os.path.join(config_dir, 'subFolder')
self._unrelated_path = os.path.join(config_dir, 'unrelated')
load_yaml(self._secret_path,
'http_pw: pwhttp\n'
'comp1_un: un1\n'
'comp1_pw: pw1\n'
'stale_pw: not_used\n'
'logger: debug\n')
self._yaml = load_yaml(self._yaml_path,
'http:\n'
' api_password: !secret http_pw\n'
'component:\n'
' username: !secret comp1_un\n'
' password: !secret comp1_pw\n'
'')
def tearDown(self):
"""Clean up secrets."""
yaml.clear_secret_cache()
FILES.clear()
def test_secrets_from_yaml(self):
"""Did secrets load ok."""
expected = {'api_password': 'pwhttp'}
self.assertEqual(expected, self._yaml['http'])
expected = {
'username': 'un1',
'password': 'pw1'}
self.assertEqual(expected, self._yaml['component'])
def test_secrets_from_parent_folder(self):
"""Test loading secrets from parent foler."""
expected = {'api_password': 'pwhttp'}
self._yaml = load_yaml(os.path.join(self._sub_folder_path, 'sub.yaml'),
'http:\n'
' api_password: !secret http_pw\n'
'component:\n'
' username: !secret comp1_un\n'
' password: !secret comp1_pw\n'
'')
self.assertEqual(expected, self._yaml['http'])
def test_secret_overrides_parent(self):
"""Test loading current directory secret overrides the parent."""
expected = {'api_password': 'override'}
load_yaml(os.path.join(self._sub_folder_path, yaml._SECRET_YAML),
'http_pw: override')
self._yaml = load_yaml(os.path.join(self._sub_folder_path, 'sub.yaml'),
'http:\n'
' api_password: !secret http_pw\n'
'component:\n'
' username: !secret comp1_un\n'
' password: !secret comp1_pw\n'
'')
self.assertEqual(expected, self._yaml['http'])
def test_secrets_from_unrelated_fails(self):
"""Test loading secrets from unrelated folder fails."""
load_yaml(os.path.join(self._unrelated_path, yaml._SECRET_YAML),
'test: failure')
with self.assertRaises(HomeAssistantError):
load_yaml(os.path.join(self._sub_folder_path, 'sub.yaml'),
'http:\n'
' api_password: !secret test')
def test_secrets_keyring(self):
"""Test keyring fallback & get_password."""
yaml.keyring = None # Ensure its not there
yaml_str = 'http:\n api_password: !secret http_pw_keyring'
with self.assertRaises(yaml.HomeAssistantError):
load_yaml(self._yaml_path, yaml_str)
yaml.keyring = FakeKeyring({'http_pw_keyring': 'yeah'})
_yaml = load_yaml(self._yaml_path, yaml_str)
self.assertEqual({'http': {'api_password': 'yeah'}}, _yaml)
def test_secrets_logger_removed(self):
"""Ensure logger: debug was removed."""
with self.assertRaises(yaml.HomeAssistantError):
load_yaml(self._yaml_path, 'api_password: !secret logger')
@patch('homeassistant.util.yaml._LOGGER.error')
def test_bad_logger_value(self, mock_error):
"""Ensure logger: debug was removed."""
yaml.clear_secret_cache()
load_yaml(self._secret_path, 'logger: info\npw: abc')
load_yaml(self._yaml_path, 'api_password: !secret pw')
assert mock_error.call_count == 1, \
"Expected an error about logger: value"
| apache-2.0 |
easyw/kicad-3d-models-in-freecad | cadquery/FCAD_script_generator/phoenix_contact/main_generator.py | 2 | 14974 | # -*- coding: utf8 -*-
#!/usr/bin/python
#
# This was originaly derived from a cadquery script for generating PDIP models in X3D format
# from https://bitbucket.org/hyOzd/freecad-macros
# author hyOzd
#
# Adapted by easyw for step and vrlm export
# See https://github.com/easyw/kicad-3d-models-in-freecad
## requirements
## cadquery FreeCAD plugin
## https://github.com/jmwright/cadquery-freecad-module
## to run the script just do: freecad scriptName modelName
## e.g. FreeCAD main_generator.py all
## the script will generate STEP and VRML parametric models
## to be used with kicad StepUp script
#* These are FreeCAD & cadquery tools *
#* to export generated models in STEP & VRML format. *
#* *
#* cadquery script for generating JST-XH models in STEP AP214 *
#* Copyright (c) 2016 *
#* Rene Poeschl https://github.com/poeschlr *
#* All trademarks within this guide belong to their legitimate owners. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU General Public License (GPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., *
#* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA *
#* *
#****************************************************************************
__title__ = "make 3D models of phoenix contact connectors (MSTB and MC series)."
__author__ = "scripts: maurice and hyOzd; models: poeschlr"
__Comment__ = '''make 3D models of phoenix contact types MSTB and MC.'''
___ver___ = "1.2 03/12/2017"
import sys, os
import traceback
import datetime
from datetime import datetime
sys.path.append("../_tools")
import exportPartToVRML as expVRML
import shaderColors
import re, fnmatch
import yaml
save_memory = True #reducing memory consuming for all generation params
check_Model = True
stop_on_first_error = True
check_log_file = 'check-log.md'
global_3dpath = '../_3Dmodels/'
# Licence information of the generated models.
#################################################################################################
STR_licAuthor = "Rene Poeschl"
STR_licEmail = "poeschlr@gmail.com"
STR_licOrgSys = ""
STR_licPreProc = ""
LIST_license = ["",]
#################################################################################################
body_color_key = "green body"
body_color = shaderColors.named_colors[body_color_key].getDiffuseInt()
pins_color_key = "metal grey pins"
pins_color = shaderColors.named_colors[pins_color_key].getDiffuseInt()
insert_color_key = "gold pins"
insert_color = shaderColors.named_colors[insert_color_key].getDiffuseInt()
screw_color_key = "metal grey pins"
screw_color = shaderColors.named_colors[screw_color_key].getDiffuseInt()
if FreeCAD.GuiUp:
from PySide import QtCore, QtGui
#checking requirements
#######################################################################
try:
# Gui.SendMsgToActiveView("Run")
# from Gui.Command import *
Gui.activateWorkbench("CadQueryWorkbench")
import cadquery as cq
from Helpers import show
# CadQuery Gui
except Exception as e: # catch *all* exceptions
print(e)
msg = "missing CadQuery 0.3.0 or later Module!\r\n\r\n"
msg += "https://github.com/jmwright/cadquery-freecad-module/wiki\n"
if QtGui is not None:
reply = QtGui.QMessageBox.information(None,"Info ...",msg)
#######################################################################
from Gui.Command import *
# Import cad_tools
#sys.path.append("../")
from cqToolsExceptions import *
import cq_cad_tools
# Reload tools
reload(cq_cad_tools)
# Explicitly load all needed functions
from cq_cad_tools import GetListOfObjects, restore_Main_Tools,\
exportSTEP, close_CQ_Example, saveFCdoc, z_RotateObject, multiFuseObjs_wColors,\
runGeometryCheck
try:
close_CQ_Example(App, Gui)
except:
FreeCAD.Console.PrintMessage("can't close example.")
from math import sqrt
from collections import namedtuple
#import FreeCAD, Draft, FreeCADGui
import ImportGui
sys.path.append("cq_models")
import conn_phoenix_mstb as MSTB
import conn_phoenix_mc as MC
import add_license as L
if LIST_license[0]=="":
LIST_license=L.LIST_int_license
LIST_license.append("")
def export_one_part(modul, variant, configuration, log, with_plug=False):
if not variant in modul.all_params:
FreeCAD.Console.PrintMessage("Parameters for %s doesn't exist in 'M.all_params', skipping." % variant)
return
LIST_license[0] = "Copyright (C) "+datetime.now().strftime("%Y")+", " + STR_licAuthor
params = modul.all_params[variant]
series_params = modul.seriesParams
series = series_params.series_name
subseries, connector_style = params.series_name.split('-')
pitch_mpn = '-{:g}'.format(params.pin_pitch)
if series[0] == 'MSTB':
pitch_mpn = ''
if params.pin_pitch == 5.08:
pitch_mpn = '-5,08'
elif params.pin_pitch == 7.62:
pitch_mpn = '-7,62'
lib_name = configuration['lib_name_format_str'].format(series=series[0], style=series[1], pitch=params.pin_pitch)
mpn = configuration['mpn_format_string'].format(subseries=subseries, style = connector_style,
rating=series[1], num_pins=params.num_pins, pitch=pitch_mpn)
FileName = configuration['fp_name_format_string'].format(man = configuration['manufacturer'],
series = series[0], mpn = mpn, num_rows = 1,
num_pins = params.num_pins, pitch = params.pin_pitch,
orientation = configuration['orientation_str'][1] if params.angled else configuration['orientation_str'][0],
flanged = configuration['flanged_str'][1] if params.flanged else configuration['flanged_str'][0],
mount_hole = configuration['mount_hole_str'][1] if params.mount_hole else configuration['mount_hole_str'][0])
destination_dir=global_3dpath+lib_name
if with_plug:
destination_dir += "__with_plug"
destination_dir+=".3dshapes"
ModelName = variant
ModelName = ModelName.replace(".","_")
Newdoc = FreeCAD.newDocument(ModelName)
App.setActiveDocument(ModelName)
App.ActiveDocument=App.getDocument(ModelName)
Gui.ActiveDocument=Gui.getDocument(ModelName)
#App.setActiveDocument(ModelName)
#Gui.ActiveDocument=Gui.getDocument(ModelName)
(pins, body, insert, mount_screw, plug, plug_screws) = modul.generate_part(variant, with_plug)
color_attr = body_color + (0,)
show(body, color_attr)
color_attr = pins_color + (0,)
show(pins, color_attr)
if insert is not None:
color_attr = insert_color + (0,)
show(insert, color_attr)
if mount_screw is not None:
color_attr = screw_color + (0,)
show(mount_screw, color_attr)
if plug is not None:
color_attr = body_color + (0,)
show(plug, color_attr)
color_attr = screw_color + (0,)
show(plug_screws, color_attr)
doc = FreeCAD.ActiveDocument
doc.Label=ModelName
objs=FreeCAD.ActiveDocument.Objects
FreeCAD.Console.PrintMessage(objs)
i=0
objs[i].Label = ModelName + "__body"
i+=1
objs[i].Label = ModelName + "__pins"
i+=1
if insert is not None:
objs[i].Label = ModelName + "__thread_insert"
i+=1
if mount_screw is not None:
objs[i].Label = ModelName + "__mount_screw"
i+=1
if plug is not None:
objs[i].Label = ModelName + "__plug"
i+=1
objs[i].Label = ModelName + "__plug_screws"
restore_Main_Tools()
out_dir=destination_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
used_color_keys = [body_color_key, pins_color_key]
export_file_name=destination_dir+os.sep+FileName+'.wrl'
export_objects = []
i=0
export_objects.append(expVRML.exportObject(freecad_object = objs[i],
shape_color=body_color_key,
face_colors=None))
i+=1
export_objects.append(expVRML.exportObject(freecad_object = objs[i],
shape_color=pins_color_key,
face_colors=None))
i+=1
if insert is not None:
export_objects.append(expVRML.exportObject(freecad_object = objs[i],
shape_color=insert_color_key,
face_colors=None))
used_color_keys.append(insert_color_key)
i+=1
if mount_screw is not None:
export_objects.append(expVRML.exportObject(freecad_object = objs[i],
shape_color=screw_color_key,
face_colors=None))
used_color_keys.append(screw_color_key)
i+=1
if plug is not None:
export_objects.append(expVRML.exportObject(freecad_object = objs[i],
shape_color=body_color_key,
face_colors=None))
i+=1
export_objects.append(expVRML.exportObject(freecad_object = objs[i],
shape_color=screw_color_key,
face_colors=None))
scale=1/2.54
colored_meshes = expVRML.getColoredMesh(Gui, export_objects , scale)
expVRML.writeVRMLFile(colored_meshes, export_file_name, used_color_keys, LIST_license)
fusion = multiFuseObjs_wColors(FreeCAD, FreeCADGui,
ModelName, objs, keepOriginals=True)
exportSTEP(doc,FileName,out_dir,fusion)
step_path = '{dir:s}/{name:s}.step'.format(dir=out_dir, name=FileName)
L.addLicenseToStep(out_dir, '{:s}.step'.format(FileName), LIST_license,\
STR_licAuthor, STR_licEmail, STR_licOrgSys, STR_licPreProc)
FreeCAD.activeDocument().recompute()
# FreeCADGui.activateWorkbench("PartWorkbench")
if save_memory == False and check_Model==False:
Gui.SendMsgToActiveView("ViewFit")
Gui.activeDocument().activeView().viewAxometric()
# Save the doc in Native FC format
saveFCdoc(App, Gui, doc, FileName, out_dir)
if save_memory == True or check_Model==True:
doc=FreeCAD.ActiveDocument
FreeCAD.closeDocument(doc.Name)
if check_Model==True:
runGeometryCheck(App, Gui, step_path,
log, ModelName, save_memory=save_memory)
class argparse():
def __init__(self):
self.config = 'config_phoenix_KLCv3.0.yaml'
self.model_filter = '*'
self.series = ['mc','mstb']
self.with_plug = False
def parse_args(self, args):
for arg in args:
if '=' in arg:
self.parseValueArg(*arg.split('='))
else:
self.argSwitchArg(arg)
def parseValueArg(self, name, value):
if name == 'config':
self.config = value
elif name == 'model_filter':
self.model_filter = value
elif name == 'series':
self.series = value.split(',')
def argSwitchArg(self, name):
if name == '?':
self.print_usage()
exit()
elif name == 'with_plug':
self.with_plug = True
elif name == 'disable_check':
global check_Model
check_Model = False
elif name == 'disable_Memory_reduction':
global save_memory
save_memory = False
elif name == 'error_tolerant':
global stop_on_first_error
stop_on_first_error = False
def print_usage(self):
print("Generater script for phoenix contact 3d models.")
print('usage: FreeCAD main_generator.py [optional arguments]')
print('optional arguments:')
print('\tconfig=[config file]: default:config_phoenix_KLCv3.0.yaml')
print('\tmodel_filter=[filter using linux file filter syntax]')
print('\tseries=[series name],[series name],...')
print('switches:')
print('\twith_plug')
print('\tdisable_check')
print('\tdisable_Memory_reduction')
print('\terror_tolerant\n')
def __str__(self):
return 'config:{:s}, filter:{:s}, series:{:s}, with_plug:{:d}'.format(
self.config, self.model_filter, str(self.series), self.with_plug)
def exportSeries(series_params, log):
for variant in series_params.all_params.keys():
if model_filter_regobj.match(variant):
#FreeCAD.Console.PrintMessage('\r\n'+variant+'\r\n')
try:
export_one_part(series_params, variant, configuration, log, with_plug)
except GeometryError as e:
e.print_errors(stop_on_first_error)
if stop_on_first_error:
return -1
except FreeCADVersionError as e:
FreeCAD.Console.PrintError(e)
return -1
return 0
if __name__ == "__main__" or __name__ == "main_generator":
FreeCAD.Console.PrintMessage('\r\nRunning...\r\n')
series_to_build = []
modelfilter = ""
with_plug = False
args = argparse()
args.parse_args(sys.argv)
with open(args.config, 'r') as config_stream:
try:
configuration = yaml.load(config_stream)
except yaml.YAMLError as exc:
print(exc)
series_to_build = map(str.lower, args.series)
print(args)
modelfilter = args.model_filter
series = []
if 'mc' in series_to_build:
series += [MC]
if 'mstb' in series_to_build:
series += [MSTB]
model_filter_regobj=re.compile(fnmatch.translate(modelfilter))
print("########################################")
print(args.model_filter)
with open(check_log_file, 'w') as log:
log.write('# Check report for Phoenix Contact 3d model genration\n')
for typ in series:
try:
if exportSeries(typ, log) != 0:
break
except Exception as exeption:
traceback.print_exc()
break
FreeCAD.Console.PrintMessage('\r\nDone\r\n')
| gpl-2.0 |
alrifqi/django | tests/template_tests/filter_tests/test_floatformat.py | 345 | 4480 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from decimal import Decimal, localcontext
from unittest import expectedFailure
from django.template.defaultfilters import floatformat
from django.test import SimpleTestCase
from django.utils import six
from django.utils.safestring import mark_safe
from ..utils import setup
class FloatformatTests(SimpleTestCase):
@setup({'floatformat01':
'{% autoescape off %}{{ a|floatformat }} {{ b|floatformat }}{% endautoescape %}'})
def test_floatformat01(self):
output = self.engine.render_to_string('floatformat01', {"a": "1.42", "b": mark_safe("1.42")})
self.assertEqual(output, "1.4 1.4")
@setup({'floatformat02': '{{ a|floatformat }} {{ b|floatformat }}'})
def test_floatformat02(self):
output = self.engine.render_to_string('floatformat02', {"a": "1.42", "b": mark_safe("1.42")})
self.assertEqual(output, "1.4 1.4")
class FunctionTests(SimpleTestCase):
def test_inputs(self):
self.assertEqual(floatformat(7.7), '7.7')
self.assertEqual(floatformat(7.0), '7')
self.assertEqual(floatformat(0.7), '0.7')
self.assertEqual(floatformat(0.07), '0.1')
self.assertEqual(floatformat(0.007), '0.0')
self.assertEqual(floatformat(0.0), '0')
self.assertEqual(floatformat(7.7, 3), '7.700')
self.assertEqual(floatformat(6.000000, 3), '6.000')
self.assertEqual(floatformat(6.200000, 3), '6.200')
self.assertEqual(floatformat(6.200000, -3), '6.200')
self.assertEqual(floatformat(13.1031, -3), '13.103')
self.assertEqual(floatformat(11.1197, -2), '11.12')
self.assertEqual(floatformat(11.0000, -2), '11')
self.assertEqual(floatformat(11.000001, -2), '11.00')
self.assertEqual(floatformat(8.2798, 3), '8.280')
self.assertEqual(floatformat(5555.555, 2), '5555.56')
self.assertEqual(floatformat(001.3000, 2), '1.30')
self.assertEqual(floatformat(0.12345, 2), '0.12')
self.assertEqual(floatformat(Decimal('555.555'), 2), '555.56')
self.assertEqual(floatformat(Decimal('09.000')), '9')
self.assertEqual(floatformat('foo'), '')
self.assertEqual(floatformat(13.1031, 'bar'), '13.1031')
self.assertEqual(floatformat(18.125, 2), '18.13')
self.assertEqual(floatformat('foo', 'bar'), '')
self.assertEqual(floatformat('¿Cómo esta usted?'), '')
self.assertEqual(floatformat(None), '')
def test_zero_values(self):
"""
Check that we're not converting to scientific notation.
"""
self.assertEqual(floatformat(0, 6), '0.000000')
self.assertEqual(floatformat(0, 7), '0.0000000')
self.assertEqual(floatformat(0, 10), '0.0000000000')
self.assertEqual(floatformat(0.000000000000000000015, 20),
'0.00000000000000000002')
def test_infinity(self):
pos_inf = float(1e30000)
self.assertEqual(floatformat(pos_inf), six.text_type(pos_inf))
neg_inf = float(-1e30000)
self.assertEqual(floatformat(neg_inf), six.text_type(neg_inf))
nan = pos_inf / pos_inf
self.assertEqual(floatformat(nan), six.text_type(nan))
def test_float_dunder_method(self):
class FloatWrapper(object):
def __init__(self, value):
self.value = value
def __float__(self):
return self.value
self.assertEqual(floatformat(FloatWrapper(11.000001), -2), '11.00')
def test_low_decimal_precision(self):
"""
#15789
"""
with localcontext() as ctx:
ctx.prec = 2
self.assertEqual(floatformat(1.2345, 2), '1.23')
self.assertEqual(floatformat(15.2042, -3), '15.204')
self.assertEqual(floatformat(1.2345, '2'), '1.23')
self.assertEqual(floatformat(15.2042, '-3'), '15.204')
self.assertEqual(floatformat(Decimal('1.2345'), 2), '1.23')
self.assertEqual(floatformat(Decimal('15.2042'), -3), '15.204')
def test_many_zeroes(self):
self.assertEqual(floatformat(1.00000000000000015, 16), '1.0000000000000002')
if six.PY2:
# The above test fails because of Python 2's float handling. Floats
# with many zeroes after the decimal point should be passed in as
# another type such as unicode or Decimal.
test_many_zeroes = expectedFailure(test_many_zeroes)
| bsd-3-clause |
0jpq0/kbengine | kbe/src/lib/python/Lib/test/test_compare.py | 169 | 1666 | import unittest
from test import support
class Empty:
def __repr__(self):
return '<Empty>'
class Cmp:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<Cmp %s>' % self.arg
def __eq__(self, other):
return self.arg == other
class Anything:
def __eq__(self, other):
return True
def __ne__(self, other):
return False
class ComparisonTest(unittest.TestCase):
set1 = [2, 2.0, 2, 2+0j, Cmp(2.0)]
set2 = [[1], (3,), None, Empty()]
candidates = set1 + set2
def test_comparisons(self):
for a in self.candidates:
for b in self.candidates:
if ((a in self.set1) and (b in self.set1)) or a is b:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
def test_id_comparisons(self):
# Ensure default comparison compares id() of args
L = []
for i in range(10):
L.insert(len(L)//2, Empty())
for a in L:
for b in L:
self.assertEqual(a == b, id(a) == id(b),
'a=%r, b=%r' % (a, b))
def test_ne_defaults_to_not_eq(self):
a = Cmp(1)
b = Cmp(1)
self.assertTrue(a == b)
self.assertFalse(a != b)
def test_issue_1393(self):
x = lambda: None
self.assertEqual(x, Anything())
self.assertEqual(Anything(), x)
y = object()
self.assertEqual(y, Anything())
self.assertEqual(Anything(), y)
def test_main():
support.run_unittest(ComparisonTest)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
mdworks2016/work_development | Python/20_Third_Certification/venv/lib/python3.7/site-packages/chardet/gb2312prober.py | 289 | 1754 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
from .mbcssm import GB2312_SM_MODEL
class GB2312Prober(MultiByteCharSetProber):
def __init__(self):
super(GB2312Prober, self).__init__()
self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
self.distribution_analyzer = GB2312DistributionAnalysis()
self.reset()
@property
def charset_name(self):
return "GB2312"
@property
def language(self):
return "Chinese"
| apache-2.0 |
zqzhang/crosswalk-test-suite | webapi/tct-csp-w3c-tests/csp-py/csp_frame-src_none_blocked_int-manual.py | 30 | 2502 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
response.headers.set("Content-Security-Policy", "frame-src 'none'")
response.headers.set("X-Content-Security-Policy", "frame-src 'none'")
response.headers.set("X-WebKit-CSP", "frame-src 'none'")
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <yunfeix.hao@intel.com>
-->
<html>
<head>
<title>CSP Test: csp_frame-src_none_blocked_int</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#frame-src"/>
<meta name="flags" content=""/>
<meta name="assert" content="frame-src 'none'"/>
<meta charset="utf-8"/>
</head>
<body>
<p>Test passes if there is <strong>no red</strong>.</p>
<iframe src="support/red-100x100.png"/>
</body>
</html> """
| bsd-3-clause |
mluke93/osf.io | scripts/embargo_registrations.py | 15 | 4636 | """Run nightly, this script will activate any pending embargoes that have
elapsed the pending approval time and make public and registrations whose
embargo end dates have been passed.
"""
import logging
import datetime
from modularodm import Q
from framework.celery_tasks import app as celery_app
from framework.transactions.context import TokuTransaction
from website.app import init_app
from website import models, settings
from website.project.model import NodeLog
from scripts import utils as scripts_utils
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def main(dry_run=True):
pending_embargoes = models.Embargo.find(Q('state', 'eq', models.Embargo.UNAPPROVED))
for embargo in pending_embargoes:
if should_be_embargoed(embargo):
if dry_run:
logger.warn('Dry run mode')
parent_registration = models.Node.find_one(Q('embargo', 'eq', embargo))
logger.warn(
'Embargo {0} approved. Activating embargo for registration {1}'
.format(embargo._id, parent_registration._id)
)
if not dry_run:
if parent_registration.is_deleted:
# Clean up any registration failures during archiving
embargo.forcibly_reject()
embargo.save()
continue
with TokuTransaction():
try:
embargo.state = models.Embargo.APPROVED
parent_registration.registered_from.add_log(
action=NodeLog.EMBARGO_APPROVED,
params={
'node': parent_registration.registered_from_id,
'registration': parent_registration._id,
'embargo_id': embargo._id,
},
auth=None,
)
embargo.save()
except Exception as err:
logger.error(
'Unexpected error raised when activating embargo for '
'registration {}. Continuing...'.format(parent_registration))
logger.exception(err)
active_embargoes = models.Embargo.find(Q('state', 'eq', models.Embargo.APPROVED))
for embargo in active_embargoes:
if embargo.end_date < datetime.datetime.utcnow():
if dry_run:
logger.warn('Dry run mode')
parent_registration = models.Node.find_one(Q('embargo', 'eq', embargo))
logger.warn(
'Embargo {0} complete. Making registration {1} public'
.format(embargo._id, parent_registration._id)
)
if not dry_run:
if parent_registration.is_deleted:
# Clean up any registration failures during archiving
embargo.forcibly_reject()
embargo.save()
continue
with TokuTransaction():
try:
embargo.state = models.Embargo.COMPLETED
for node in parent_registration.node_and_primary_descendants():
node.set_privacy('public', auth=None, save=True)
parent_registration.registered_from.add_log(
action=NodeLog.EMBARGO_COMPLETED,
params={
'node': parent_registration.registered_from_id,
'registration': parent_registration._id,
'embargo_id': embargo._id,
},
auth=None,
)
embargo.save()
except Exception as err:
logger.error(
'Unexpected error raised when completing embargo for '
'registration {}. Continuing...'.format(parent_registration))
logger.exception(err)
def should_be_embargoed(embargo):
"""Returns true if embargo was initiated more than 48 hours prior."""
return (datetime.datetime.utcnow() - embargo.initiation_date) >= settings.EMBARGO_PENDING_TIME
@celery_app.task(name='scripts.embargo_registrations')
def run_main(dry_run=True):
init_app(routes=False)
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
main(dry_run=dry_run)
| apache-2.0 |
Jnosh/swift | utils/android/adb/commands.py | 32 | 6053 | # adb/commands.py - Run executables on an Android device -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Push executables to an Android device and run them, capturing their output
# and exit code.
#
# ----------------------------------------------------------------------------
from __future__ import print_function
import subprocess
import tempfile
import uuid
# A temporary directory on the Android device.
DEVICE_TEMP_DIR = '/data/local/tmp'
def shell(args):
"""
Execute 'adb shell' with the given arguments.
Raise an exception if 'adb shell' returns a non-zero exit code.
Note that this only occurs if communication with the connected device
fails, not if the command run on the device fails.
"""
return subprocess.check_output(['adb', 'shell'] + args)
def rmdir(path):
"""Remove all files in the device directory at `path`."""
shell(['rm', '-rf', '{}/*'.format(path)])
def push(local_path, device_path):
"""Move the file at the given local path to the path on the device."""
return subprocess.check_output(['adb', 'push', local_path, device_path],
stderr=subprocess.STDOUT).strip()
def reboot():
"""Reboot the connected Android device, waiting for it to return online."""
subprocess.check_call(['adb', 'reboot'])
subprocess.check_call(['adb', 'wait-for-device'])
def _create_executable_on_device(device_path, contents):
_, tmp = tempfile.mkstemp()
with open(tmp, 'w') as f:
f.write(contents)
push(tmp, device_path)
shell(['chmod', '755', device_path])
def execute_on_device(executable_path, executable_arguments):
"""
Run an executable on an Android device.
Push an executable at the given 'executable_path' to an Android device,
then execute that executable on the device, passing any additional
'executable_arguments'. Return 0 if the executable succeeded when run on
device, and 1 otherwise.
This function is not as simple as calling 'adb shell', for two reasons:
1. 'adb shell' can only take input up to a certain length, so it fails for
long executable names or when a large amount of arguments are passed to
the executable. This function attempts to limit the size of any string
passed to 'adb shell'.
2. 'adb shell' ignores the exit code of any command it runs. This function
therefore uses its own mechanisms to determine whether the executable
had a successful exit code when run on device.
"""
# We'll be running the executable in a temporary directory in
# /data/local/tmp. `adb shell` has trouble with commands that
# exceed a certain length, so to err on the safe side we only
# use the first 10 characters of the UUID.
uuid_dir = '{}/{}'.format(DEVICE_TEMP_DIR, str(uuid.uuid4())[:10])
shell(['mkdir', '-p', uuid_dir])
# `adb` can only handle commands under a certain length. No matter what the
# original executable's name, on device we call it `__executable`.
executable = '{}/__executable'.format(uuid_dir)
push(executable_path, executable)
# When running the executable on the device, we need to pass it the same
# arguments, as well as specify the correct LD_LIBRARY_PATH. Save these
# to a file we can easily call multiple times.
executable_with_args = '{}/__executable_with_args'.format(uuid_dir)
_create_executable_on_device(
executable_with_args,
'LD_LIBRARY_PATH={uuid_dir}:{tmp_dir} '
'{executable} {executable_arguments}'.format(
uuid_dir=uuid_dir,
tmp_dir=DEVICE_TEMP_DIR,
executable=executable,
executable_arguments=' '.join(executable_arguments)))
# Write the output from the test executable to a file named '__stdout', and
# if the test executable succeeds, write 'SUCCEEDED' to a file
# named '__succeeded'. We do this because `adb shell` does not report
# the exit code of the command it executes on the device, so instead we
# check the '__succeeded' file for our string.
executable_stdout = '{}/__stdout'.format(uuid_dir)
succeeded_token = 'SUCCEEDED'
executable_succeeded = '{}/__succeeded'.format(uuid_dir)
executable_piped = '{}/__executable_piped'.format(uuid_dir)
_create_executable_on_device(
executable_piped,
'{executable_with_args} > {executable_stdout} && '
'echo "{succeeded_token}" > {executable_succeeded}'.format(
executable_with_args=executable_with_args,
executable_stdout=executable_stdout,
succeeded_token=succeeded_token,
executable_succeeded=executable_succeeded))
# We've pushed everything we need to the device.
# Now execute the wrapper script.
shell([executable_piped])
# Grab the results of running the executable on device.
stdout = shell(['cat', executable_stdout])
exitcode = shell(['cat', executable_succeeded])
if not exitcode.startswith(succeeded_token):
debug_command = '$ adb shell {}'.format(executable_with_args)
print('Executable exited with a non-zero code on the Android device.\n'
'Device stdout:\n'
'{stdout}\n'
'To debug, run:\n'
'{debug_command}\n'.format(
stdout=stdout,
debug_command=debug_command))
# Exit early so that the output isn't passed to FileCheck, nor are any
# temporary directories removed; this allows the user to re-run
# the executable on the device.
return 1
print(stdout)
shell(['rm', '-rf', uuid_dir])
return 0
| apache-2.0 |
xxsergzzxx/python-for-android | python3-alpha/python3-src/Lib/sqlite3/test/regression.py | 46 | 10666 | #-*- coding: ISO-8859-1 -*-
# pysqlite2/test/regression.py: pysqlite regression tests
#
# Copyright (C) 2006-2010 Gerhard Häring <gh@ghaering.de>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import datetime
import unittest
import sqlite3 as sqlite
class RegressionTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def tearDown(self):
self.con.close()
def CheckPragmaUserVersion(self):
# This used to crash pysqlite because this pragma command returns NULL for the column name
cur = self.con.cursor()
cur.execute("pragma user_version")
def CheckPragmaSchemaVersion(self):
# This still crashed pysqlite <= 2.2.1
con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES)
try:
cur = self.con.cursor()
cur.execute("pragma schema_version")
finally:
cur.close()
con.close()
def CheckStatementReset(self):
# pysqlite 2.1.0 to 2.2.0 have the problem that not all statements are
# reset before a rollback, but only those that are still in the
# statement cache. The others are not accessible from the connection object.
con = sqlite.connect(":memory:", cached_statements=5)
cursors = [con.cursor() for x in range(5)]
cursors[0].execute("create table test(x)")
for i in range(10):
cursors[0].executemany("insert into test(x) values (?)", [(x,) for x in range(10)])
for i in range(5):
cursors[i].execute(" " * i + "select x from test")
con.rollback()
def CheckColumnNameWithSpaces(self):
cur = self.con.cursor()
cur.execute('select 1 as "foo bar [datetime]"')
self.assertEqual(cur.description[0][0], "foo bar")
cur.execute('select 1 as "foo baz"')
self.assertEqual(cur.description[0][0], "foo baz")
def CheckStatementFinalizationOnCloseDb(self):
# pysqlite versions <= 2.3.3 only finalized statements in the statement
# cache when closing the database. statements that were still
# referenced in cursors weren't closed an could provoke "
# "OperationalError: Unable to close due to unfinalised statements".
con = sqlite.connect(":memory:")
cursors = []
# default statement cache size is 100
for i in range(105):
cur = con.cursor()
cursors.append(cur)
cur.execute("select 1 x union select " + str(i))
con.close()
def CheckOnConflictRollback(self):
if sqlite.sqlite_version_info < (3, 2, 2):
return
con = sqlite.connect(":memory:")
con.execute("create table foo(x, unique(x) on conflict rollback)")
con.execute("insert into foo(x) values (1)")
try:
con.execute("insert into foo(x) values (1)")
except sqlite.DatabaseError:
pass
con.execute("insert into foo(x) values (2)")
try:
con.commit()
except sqlite.OperationalError:
self.fail("pysqlite knew nothing about the implicit ROLLBACK")
def CheckWorkaroundForBuggySqliteTransferBindings(self):
"""
pysqlite would crash with older SQLite versions unless
a workaround is implemented.
"""
self.con.execute("create table foo(bar)")
self.con.execute("drop table foo")
self.con.execute("create table foo(bar)")
def CheckEmptyStatement(self):
"""
pysqlite used to segfault with SQLite versions 3.5.x. These return NULL
for "no-operation" statements
"""
self.con.execute("")
def CheckTypeMapUsage(self):
"""
pysqlite until 2.4.1 did not rebuild the row_cast_map when recompiling
a statement. This test exhibits the problem.
"""
SELECT = "select * from foo"
con = sqlite.connect(":memory:",detect_types=sqlite.PARSE_DECLTYPES)
con.execute("create table foo(bar timestamp)")
con.execute("insert into foo(bar) values (?)", (datetime.datetime.now(),))
con.execute(SELECT)
con.execute("drop table foo")
con.execute("create table foo(bar integer)")
con.execute("insert into foo(bar) values (5)")
con.execute(SELECT)
def CheckErrorMsgDecodeError(self):
# When porting the module to Python 3.0, the error message about
# decoding errors disappeared. This verifies they're back again.
failure = None
try:
self.con.execute("select 'xxx' || ? || 'yyy' colname",
(bytes(bytearray([250])),)).fetchone()
failure = "should have raised an OperationalError with detailed description"
except sqlite.OperationalError as e:
msg = e.args[0]
if not msg.startswith("Could not decode to UTF-8 column 'colname' with text 'xxx"):
failure = "OperationalError did not have expected description text"
if failure:
self.fail(failure)
def CheckRegisterAdapter(self):
"""
See issue 3312.
"""
self.assertRaises(TypeError, sqlite.register_adapter, {}, None)
def CheckSetIsolationLevel(self):
"""
See issue 3312.
"""
con = sqlite.connect(":memory:")
setattr(con, "isolation_level", "\xe9")
def CheckCursorConstructorCallCheck(self):
"""
Verifies that cursor methods check wether base class __init__ was called.
"""
class Cursor(sqlite.Cursor):
def __init__(self, con):
pass
con = sqlite.connect(":memory:")
cur = Cursor(con)
try:
cur.execute("select 4+5").fetchall()
self.fail("should have raised ProgrammingError")
except sqlite.ProgrammingError:
pass
except:
self.fail("should have raised ProgrammingError")
def CheckStrSubclass(self):
"""
The Python 3.0 port of the module didn't cope with values of subclasses of str.
"""
class MyStr(str): pass
self.con.execute("select ?", (MyStr("abc"),))
def CheckConnectionConstructorCallCheck(self):
"""
Verifies that connection methods check wether base class __init__ was called.
"""
class Connection(sqlite.Connection):
def __init__(self, name):
pass
con = Connection(":memory:")
try:
cur = con.cursor()
self.fail("should have raised ProgrammingError")
except sqlite.ProgrammingError:
pass
except:
self.fail("should have raised ProgrammingError")
def CheckCursorRegistration(self):
"""
Verifies that subclassed cursor classes are correctly registered with
the connection object, too. (fetch-across-rollback problem)
"""
class Connection(sqlite.Connection):
def cursor(self):
return Cursor(self)
class Cursor(sqlite.Cursor):
def __init__(self, con):
sqlite.Cursor.__init__(self, con)
con = Connection(":memory:")
cur = con.cursor()
cur.execute("create table foo(x)")
cur.executemany("insert into foo(x) values (?)", [(3,), (4,), (5,)])
cur.execute("select x from foo")
con.rollback()
try:
cur.fetchall()
self.fail("should have raised InterfaceError")
except sqlite.InterfaceError:
pass
except:
self.fail("should have raised InterfaceError")
def CheckAutoCommit(self):
"""
Verifies that creating a connection in autocommit mode works.
2.5.3 introduced a regression so that these could no longer
be created.
"""
con = sqlite.connect(":memory:", isolation_level=None)
def CheckPragmaAutocommit(self):
"""
Verifies that running a PRAGMA statement that does an autocommit does
work. This did not work in 2.5.3/2.5.4.
"""
cur = self.con.cursor()
cur.execute("create table foo(bar)")
cur.execute("insert into foo(bar) values (5)")
cur.execute("pragma page_size")
row = cur.fetchone()
def CheckSetDict(self):
"""
See http://bugs.python.org/issue7478
It was possible to successfully register callbacks that could not be
hashed. Return codes of PyDict_SetItem were not checked properly.
"""
class NotHashable:
def __call__(self, *args, **kw):
pass
def __hash__(self):
raise TypeError()
var = NotHashable()
self.assertRaises(TypeError, self.con.create_function, var)
self.assertRaises(TypeError, self.con.create_aggregate, var)
self.assertRaises(TypeError, self.con.set_authorizer, var)
self.assertRaises(TypeError, self.con.set_progress_handler, var)
def CheckConnectionCall(self):
"""
Call a connection with a non-string SQL request: check error handling
of the statement constructor.
"""
self.assertRaises(sqlite.Warning, self.con, 1)
def CheckCollation(self):
def collation_cb(a, b):
return 1
self.assertRaises(sqlite.ProgrammingError, self.con.create_collation,
# Lone surrogate cannot be encoded to the default encoding (utf8)
"\uDC80", collation_cb)
def suite():
regression_suite = unittest.makeSuite(RegressionTests, "Check")
return unittest.TestSuite((regression_suite,))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| apache-2.0 |
anandpdoshi/frappe | frappe/tests/test_password.py | 8 | 3274 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.utils.password import update_password, check_password
class TestPassword(unittest.TestCase):
def setUp(self):
frappe.delete_doc('Email Account', 'Test Email Account Password')
frappe.delete_doc('Email Account', 'Test Email Account Password-new')
def test_encrypted_password(self):
doc = self.make_email_account()
new_password = 'test-password'
doc.password = new_password
doc.save()
self.assertEquals(doc.password, '*'*len(new_password))
auth_password = frappe.db.sql('''select `password` from `__Auth`
where doctype=%(doctype)s and name=%(name)s and fieldname="password"''', doc.as_dict())[0][0]
# encrypted
self.assertTrue(auth_password != new_password)
# decrypted
self.assertEquals(doc.get_password(), new_password)
return doc, new_password
def make_email_account(self, name='Test Email Account Password'):
if not frappe.db.exists('Email Account', name):
return frappe.get_doc({
'doctype': 'Email Account',
'email_account_name': name,
'append_to': 'Communication',
'smtp_server': 'test.example.com',
'pop3_server': 'pop.test.example.com',
'email_id': 'test@example.com',
'password': 'password',
}).insert()
else:
return frappe.get_doc('Email Account', name)
def test_hashed_password(self, user='test@example.com'):
old_password = 'testpassword'
new_password = 'testpassword-new'
update_password(user, new_password)
auth = frappe.db.sql('''select `password`, `salt` from `__Auth`
where doctype='User' and name=%s and fieldname="password"''', user, as_dict=True)[0]
self.assertTrue(auth.password != new_password)
self.assertTrue(auth.salt)
# stored password = password(plain_text_password + salt)
self.assertEquals(frappe.db.sql('select password(concat(%s, %s))', (new_password, auth.salt))[0][0], auth.password)
self.assertTrue(check_password(user, new_password))
# revert back to old
update_password(user, old_password)
self.assertTrue(check_password(user, old_password))
# shouldn't work with old password
self.assertRaises(frappe.AuthenticationError, check_password, user, new_password)
def test_password_on_rename_user(self):
password = 'test-rename-password'
doc = self.make_email_account()
doc.password = password
doc.save()
old_name = doc.name
new_name = old_name + '-new'
frappe.rename_doc(doc.doctype, old_name, new_name)
new_doc = frappe.get_doc(doc.doctype, new_name)
self.assertEquals(new_doc.get_password(), password)
self.assertTrue(not frappe.db.sql('''select `password` from `__Auth`
where doctype=%s and name=%s and fieldname="password"''', (doc.doctype, doc.name)))
frappe.rename_doc(doc.doctype, new_name, old_name)
self.assertTrue(frappe.db.sql('''select `password` from `__Auth`
where doctype=%s and name=%s and fieldname="password"''', (doc.doctype, doc.name)))
def test_password_on_delete(self):
doc = self.make_email_account()
doc.delete()
self.assertTrue(not frappe.db.sql('''select `password` from `__Auth`
where doctype=%s and name=%s and fieldname="password"''', (doc.doctype, doc.name)))
| mit |
jjkoletar/panda3d | direct/src/particles/ParticleEffect.py | 3 | 7612 |
from pandac.PandaModules import *
import Particles
import ForceGroup
from direct.directnotify import DirectNotifyGlobal
class ParticleEffect(NodePath):
notify = DirectNotifyGlobal.directNotify.newCategory('ParticleEffect')
pid = 1
def __init__(self, name=None, particles=None):
if name == None:
name = 'particle-effect-%d' % ParticleEffect.pid
ParticleEffect.pid += 1
NodePath.__init__(self, name)
# Record particle effect name
self.name = name
# Enabled flag
self.fEnabled = 0
# Dictionary of particles and forceGroups
self.particlesDict = {}
self.forceGroupDict = {}
# The effect's particle system
if particles != None:
self.addParticles(particles)
self.renderParent = None
def cleanup(self):
self.removeNode()
self.disable()
if self.__isValid():
for f in self.forceGroupDict.values():
f.cleanup()
for p in self.particlesDict.values():
p.cleanup()
del self.forceGroupDict
del self.particlesDict
del self.renderParent
def getName(self):
# override NodePath.getName()
return self.name
def reset(self):
self.removeAllForces()
self.removeAllParticles()
self.forceGroupDict = {}
self.particlesDict = {}
def start(self, parent=None, renderParent=None):
assert self.notify.debug('start() - name: %s' % self.name)
self.renderParent = renderParent
self.enable()
if parent != None:
self.reparentTo(parent)
def enable(self):
# band-aid added for client crash - grw
if self.__isValid():
if self.renderParent:
for p in self.particlesDict.values():
p.setRenderParent(self.renderParent.node())
for f in self.forceGroupDict.values():
f.enable()
for p in self.particlesDict.values():
p.enable()
self.fEnabled = 1
def disable(self):
self.detachNode()
# band-aid added for client crash - grw
if self.__isValid():
for p in self.particlesDict.values():
p.setRenderParent(p.node)
for f in self.forceGroupDict.values():
f.disable()
for p in self.particlesDict.values():
p.disable()
self.fEnabled = 0
def isEnabled(self):
"""
Note: this may be misleading if enable(), disable() not used
"""
return self.fEnabled
def addForceGroup(self, forceGroup):
forceGroup.nodePath.reparentTo(self)
forceGroup.particleEffect = self
self.forceGroupDict[forceGroup.getName()] = forceGroup
# Associate the force group with all particles
for i in range(len(forceGroup)):
self.addForce(forceGroup[i])
def addForce(self, force):
for p in self.particlesDict.values():
p.addForce(force)
def removeForceGroup(self, forceGroup):
# Remove forces from all particles
for i in range(len(forceGroup)):
self.removeForce(forceGroup[i])
forceGroup.nodePath.removeNode()
forceGroup.particleEffect = None
self.forceGroupDict.pop(forceGroup.getName(), None)
def removeForce(self, force):
for p in self.particlesDict.values():
p.removeForce(force)
def removeAllForces(self):
for fg in self.forceGroupDict.values():
self.removeForceGroup(fg)
def addParticles(self, particles):
particles.nodePath.reparentTo(self)
self.particlesDict[particles.getName()] = particles
# Associate all forces in all force groups with the particles
for fg in self.forceGroupDict.values():
for i in range(len(fg)):
particles.addForce(fg[i])
def removeParticles(self, particles):
if particles == None:
self.notify.warning('removeParticles() - particles == None!')
return
particles.nodePath.detachNode()
self.particlesDict.pop(particles.getName(), None)
# Remove all forces from the particles
for fg in self.forceGroupDict.values():
for f in fg:
particles.removeForce(f)
def removeAllParticles(self):
for p in self.particlesDict.values():
self.removeParticles(p)
def getParticlesList(self):
return self.particlesDict.values()
def getParticlesNamed(self, name):
return self.particlesDict.get(name, None)
def getParticlesDict(self):
return self.particlesDict
def getForceGroupList(self):
return self.forceGroupDict.values()
def getForceGroupNamed(self, name):
return self.forceGroupDict.get(name, None)
def getForceGroupDict(self):
return self.forceGroupDict
def saveConfig(self, filename):
filename = Filename(filename)
f = open(filename.toOsSpecific(), 'wb')
# Add a blank line
f.write('\n')
# Make sure we start with a clean slate
f.write('self.reset()\n')
pos = self.getPos()
hpr = self.getHpr()
scale = self.getScale()
f.write('self.setPos(%0.3f, %0.3f, %0.3f)\n' %
(pos[0], pos[1], pos[2]))
f.write('self.setHpr(%0.3f, %0.3f, %0.3f)\n' %
(hpr[0], hpr[1], hpr[2]))
f.write('self.setScale(%0.3f, %0.3f, %0.3f)\n' %
(scale[0], scale[1], scale[2]))
# Save all the particles to file
num = 0
for p in self.particlesDict.values():
target = 'p%d' % num
num = num + 1
f.write(target + ' = Particles.Particles(\'%s\')\n' % p.getName())
p.printParams(f, target)
f.write('self.addParticles(%s)\n' % target)
# Save all the forces to file
num = 0
for fg in self.forceGroupDict.values():
target = 'f%d' % num
num = num + 1
f.write(target + ' = ForceGroup.ForceGroup(\'%s\')\n' % \
fg.getName())
fg.printParams(f, target)
f.write('self.addForceGroup(%s)\n' % target)
# Close the file
f.close()
def loadConfig(self, filename):
data = vfs.readFile(filename, 1)
data = data.replace('\r', '')
try:
exec(data)
except:
self.notify.warning('loadConfig: failed to load particle file: '+ repr(filename))
raise
def accelerate(self,time,stepCount = 1,stepTime=0.0):
for particles in self.getParticlesList():
particles.accelerate(time,stepCount,stepTime)
def clearToInitial(self):
for particles in self.getParticlesList():
particles.clearToInitial()
def softStop(self):
for particles in self.getParticlesList():
particles.softStop()
def softStart(self):
if self.__isValid():
for particles in self.getParticlesList():
particles.softStart()
else:
# Not asserting here since we want to crash live clients for more expedient bugfix
# (Sorry, live clients)
self.notify.error('Trying to start effect(%s) after cleanup.' % (self.getName(),))
def __isValid(self):
return hasattr(self, 'forceGroupDict') and \
hasattr(self, 'particlesDict')
| bsd-3-clause |
bryceliu/ansible | lib/ansible/plugins/callback/skippy.py | 94 | 6549 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'skippy'
def v2_runner_on_failed(self, result, ignore_errors=False):
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
self._display.display(msg, color='red')
# finally, remove the exception from the result so it's not shown every time
del result._result['exception']
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red')
if result._task.ignore_errors:
self._display.display("...ignoring", color='cyan')
def v2_runner_on_ok(self, result):
if result._task.action == 'include':
msg = 'included: %s for %s' % (result._task.args.get('_raw_params'), result._host.name)
color = 'cyan'
elif result._result.get('changed', False):
msg = "changed: [%s]" % result._host.get_name()
color = 'yellow'
else:
msg = "ok: [%s]" % result._host.get_name()
color = 'green'
if result._task.loop and 'results' in result._result:
self._process_items(result)
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
self._handle_warnings(result._result)
def v2_runner_on_unreachable(self, result):
self._display.display("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red')
def v2_playbook_on_no_hosts_matched(self):
self._display.display("skipping: no hosts matched", color='cyan')
def v2_playbook_on_no_hosts_remaining(self):
self._display.banner("NO MORE HOSTS LEFT")
def v2_playbook_on_task_start(self, task, is_conditional):
self._display.banner("TASK [%s]" % task.get_name().strip())
def v2_playbook_on_cleanup_task_start(self, task):
self._display.banner("CLEANUP TASK [%s]" % task.get_name().strip())
def v2_playbook_on_handler_task_start(self, task):
self._display.banner("RUNNING HANDLER [%s]" % task.get_name().strip())
def v2_playbook_on_play_start(self, play):
name = play.get_name().strip()
if not name:
msg = "PLAY"
else:
msg = "PLAY [%s]" % name
self._display.banner(msg)
def v2_on_file_diff(self, result):
if 'diff' in result._result and result._result['diff']:
self._display.display(self._get_diff(result._result['diff']))
def v2_playbook_item_on_ok(self, result):
if result._task.action == 'include':
msg = 'included: %s for %s' % (result._task.args.get('_raw_params'), result._host.name)
color = 'cyan'
elif result._result.get('changed', False):
msg = "changed: [%s]" % result._host.get_name()
color = 'yellow'
else:
msg = "ok: [%s]" % result._host.get_name()
color = 'green'
msg += " => (item=%s)" % result._result['item']
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
def v2_playbook_item_on_failed(self, result):
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
self._display.display(msg, color='red')
# finally, remove the exception from the result so it's not shown every time
del result._result['exception']
self._display.display("failed: [%s] => (item=%s) => %s" % (result._host.get_name(), result._result['item'], self._dump_results(result._result)), color='red')
self._handle_warnings(result._result)
def v2_playbook_item_on_skipped(self, result):
msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), result._result['item'])
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color='cyan')
| gpl-3.0 |
thetreerat/django-swingtime | swingtime/conf/swingtime_settings.py | 3 | 1436 | import datetime
# A "strftime" string for formatting start and end time selectors in forms
TIMESLOT_TIME_FORMAT = '%I:%M %p'
# Used for creating start and end time form selectors as well as time slot grids.
# Value should be datetime.timedelta value representing the incremental
# differences between temporal options
TIMESLOT_INTERVAL = datetime.timedelta(minutes=15)
# A datetime.time value indicting the starting time for time slot grids and form
# selectors
TIMESLOT_START_TIME = datetime.time(9)
# A datetime.timedelta value indicating the offset value from
# TIMESLOT_START_TIME for creating time slot grids and form selectors. The for
# using a time delta is that it possible to span dates. For instance, one could
# have a starting time of 3pm (15:00) and wish to indicate a ending value
# 1:30am (01:30), in which case a value of datetime.timedelta(hours=10.5)
# could be specified to indicate that the 1:30 represents the following date's
# time and not the current date.
TIMESLOT_END_TIME_DURATION = datetime.timedelta(hours=+8)
# Indicates a minimum value for the number grid columns to be shown in the time
# slot table.
TIMESLOT_MIN_COLUMNS = 4
# Indicate the default length in time for a new occurrence, specifed by using
# a datetime.timedelta object
DEFAULT_OCCURRENCE_DURATION = datetime.timedelta(hours=+1)
# If not None, passed to the calendar module's setfirstweekday function.
CALENDAR_FIRST_WEEKDAY = 6 | mit |
Daniex/horizon | openstack_dashboard/utils/filters.py | 70 | 1058 | # Copyright 2012 NEC Corporation All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
def get_int_or_uuid(value):
"""Check if a value is valid as UUID or an integer.
This method is mainly used to convert floating IP id to the
appropriate type. For floating IP id, integer is used in Nova's
original implementation, but UUID is used in Neutron based one.
"""
try:
uuid.UUID(value)
return value
except (ValueError, AttributeError):
return int(value)
| apache-2.0 |
jdramani/servo | tests/wpt/css-tests/tools/wptserve/wptserve/constants.py | 326 | 4616 | import utils
content_types = utils.invert_dict({"text/html": ["htm", "html"],
"application/json": ["json"],
"application/xhtml+xml": ["xht", "xhtm", "xhtml"],
"application/xml": ["xml"],
"application/x-xpinstall": ["xpi"],
"text/javascript": ["js"],
"text/css": ["css"],
"text/plain": ["txt", "md"],
"image/svg+xml": ["svg"],
"image/gif": ["gif"],
"image/jpeg": ["jpg", "jpeg"],
"image/png": ["png"],
"image/bmp": ["bmp"],
"text/event-stream": ["event_stream"],
"text/cache-manifest": ["manifest"],
"video/mp4": ["mp4", "m4v"],
"audio/mp4": ["m4a"],
"audio/mpeg": ["mp3"],
"video/webm": ["webm"],
"audio/webm": ["weba"],
"video/ogg": ["ogg", "ogv"],
"audio/ogg": ["oga"],
"audio/x-wav": ["wav"],
"text/vtt": ["vtt"],})
response_codes = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted',
'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No Content', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices',
'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not Modified',
'Document has not changed since given time'),
305: ('Use Proxy',
'You must use proxy specified in Location to access this '
'resource.'),
307: ('Temporary Redirect',
'Object moved temporarily -- see URI list'),
400: ('Bad Request',
'Bad request syntax or unsupported method'),
401: ('Unauthorized',
'No permission -- see authorization schemes'),
402: ('Payment Required',
'No payment -- see charging schemes'),
403: ('Forbidden',
'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed',
'Specified method is invalid for this resource.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with '
'this proxy before proceeding.'),
408: ('Request Timeout', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone',
'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable',
'Cannot satisfy request range.'),
417: ('Expectation Failed',
'Expect condition could not be satisfied.'),
500: ('Internal Server Error', 'Server got itself in trouble'),
501: ('Not Implemented',
'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service Unavailable',
'The server cannot process the request due to a high load'),
504: ('Gateway Timeout',
'The gateway server did not receive a timely response'),
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
| mpl-2.0 |
Cactuslegs/audacity-of-nope | lib-src/lv2/lv2/plugins/eg02-midigate.lv2/waflib/Configure.py | 147 | 9872 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,shlex,sys,time
from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors
try:
from urllib import request
except ImportError:
from urllib import urlopen
else:
urlopen=request.urlopen
BREAK='break'
CONTINUE='continue'
WAF_CONFIG_LOG='config.log'
autoconfig=False
conf_template='''# project %(app)s configured on %(now)s by
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
# using %(args)s
#'''
def download_check(node):
pass
def download_tool(tool,force=False,ctx=None):
for x in Utils.to_list(Context.remote_repo):
for sub in Utils.to_list(Context.remote_locs):
url='/'.join((x,sub,tool+'.py'))
try:
web=urlopen(url)
try:
if web.getcode()!=200:
continue
except AttributeError:
pass
except Exception:
continue
else:
tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
tmp.write(web.read(),'wb')
Logs.warn('Downloaded %s from %s'%(tool,url))
download_check(tmp)
try:
module=Context.load_tool(tool)
except Exception:
Logs.warn('The tool %s from %s is unusable'%(tool,url))
try:
tmp.delete()
except Exception:
pass
continue
return module
raise Errors.WafError('Could not load the Waf tool')
class ConfigurationContext(Context.Context):
'''configures the project'''
cmd='configure'
error_handlers=[]
def __init__(self,**kw):
super(ConfigurationContext,self).__init__(**kw)
self.environ=dict(os.environ)
self.all_envs={}
self.top_dir=None
self.out_dir=None
self.tools=[]
self.hash=0
self.files=[]
self.tool_cache=[]
self.setenv('')
def setenv(self,name,env=None):
if name not in self.all_envs or env:
if not env:
env=ConfigSet.ConfigSet()
self.prepare_env(env)
else:
env=env.derive()
self.all_envs[name]=env
self.variant=name
def get_env(self):
return self.all_envs[self.variant]
def set_env(self,val):
self.all_envs[self.variant]=val
env=property(get_env,set_env)
def init_dirs(self):
top=self.top_dir
if not top:
top=Options.options.top
if not top:
top=getattr(Context.g_module,Context.TOP,None)
if not top:
top=self.path.abspath()
top=os.path.abspath(top)
self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top)
assert(self.srcnode)
out=self.out_dir
if not out:
out=Options.options.out
if not out:
out=getattr(Context.g_module,Context.OUT,None)
if not out:
out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','')
self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out)
self.bldnode.mkdir()
if not os.path.isdir(self.bldnode.abspath()):
conf.fatal('Could not create the build directory %s'%self.bldnode.abspath())
def execute(self):
self.init_dirs()
self.cachedir=self.bldnode.make_node(Build.CACHE_DIR)
self.cachedir.mkdir()
path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG)
self.logger=Logs.make_logger(path,'cfg')
app=getattr(Context.g_module,'APPNAME','')
if app:
ver=getattr(Context.g_module,'VERSION','')
if ver:
app="%s (%s)"%(app,ver)
now=time.ctime()
pyver=sys.hexversion
systype=sys.platform
args=" ".join(sys.argv)
wafver=Context.WAFVERSION
abi=Context.ABI
self.to_log(conf_template%vars())
self.msg('Setting top to',self.srcnode.abspath())
self.msg('Setting out to',self.bldnode.abspath())
if id(self.srcnode)==id(self.bldnode):
Logs.warn('Setting top == out (remember to use "update_outputs")')
elif id(self.path)!=id(self.srcnode):
if self.srcnode.is_child_of(self.path):
Logs.warn('Are you certain that you do not want to set top="." ?')
super(ConfigurationContext,self).execute()
self.store()
Context.top_dir=self.srcnode.abspath()
Context.out_dir=self.bldnode.abspath()
env=ConfigSet.ConfigSet()
env['argv']=sys.argv
env['options']=Options.options.__dict__
env.run_dir=Context.run_dir
env.top_dir=Context.top_dir
env.out_dir=Context.out_dir
env['hash']=self.hash
env['files']=self.files
env['environ']=dict(self.environ)
if not self.env.NO_LOCK_IN_RUN:
env.store(Context.run_dir+os.sep+Options.lockfile)
if not self.env.NO_LOCK_IN_TOP:
env.store(Context.top_dir+os.sep+Options.lockfile)
if not self.env.NO_LOCK_IN_OUT:
env.store(Context.out_dir+os.sep+Options.lockfile)
def prepare_env(self,env):
if not env.PREFIX:
if Options.options.prefix or Utils.is_win32:
env.PREFIX=os.path.abspath(os.path.expanduser(Options.options.prefix))
else:
env.PREFIX=''
if not env.BINDIR:
env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)
if not env.LIBDIR:
env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env)
def store(self):
n=self.cachedir.make_node('build.config.py')
n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools))
if not self.all_envs:
self.fatal('nothing to store in the configuration context!')
for key in self.all_envs:
tmpenv=self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX))
def load(self,input,tooldir=None,funs=None,download=True):
tools=Utils.to_list(input)
if tooldir:tooldir=Utils.to_list(tooldir)
for tool in tools:
mag=(tool,id(self.env),funs)
if mag in self.tool_cache:
self.to_log('(tool %s is already loaded, skipping)'%tool)
continue
self.tool_cache.append(mag)
module=None
try:
module=Context.load_tool(tool,tooldir)
except ImportError ,e:
if Options.options.download:
module=download_tool(tool,ctx=self)
if not module:
self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
else:
self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
except Exception ,e:
self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
self.to_log(Utils.ex_stack())
raise
if funs is not None:
self.eval_rules(funs)
else:
func=getattr(module,'configure',None)
if func:
if type(func)is type(Utils.readf):func(self)
else:self.eval_rules(func)
self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
def post_recurse(self,node):
super(ConfigurationContext,self).post_recurse(node)
self.hash=hash((self.hash,node.read('rb')))
self.files.append(node.abspath())
def eval_rules(self,rules):
self.rules=Utils.to_list(rules)
for x in self.rules:
f=getattr(self,x)
if not f:self.fatal("No such method '%s'."%x)
try:
f()
except Exception ,e:
ret=self.err_handler(x,e)
if ret==BREAK:
break
elif ret==CONTINUE:
continue
else:
raise
def err_handler(self,fun,error):
pass
def conf(f):
def fun(*k,**kw):
mandatory=True
if'mandatory'in kw:
mandatory=kw['mandatory']
del kw['mandatory']
try:
return f(*k,**kw)
except Errors.ConfigurationError:
if mandatory:
raise
setattr(ConfigurationContext,f.__name__,fun)
setattr(Build.BuildContext,f.__name__,fun)
return f
@conf
def add_os_flags(self,var,dest=None):
try:self.env.append_value(dest or var,shlex.split(self.environ[var]))
except KeyError:pass
@conf
def cmd_to_list(self,cmd):
if isinstance(cmd,str)and cmd.find(' '):
try:
os.stat(cmd)
except OSError:
return shlex.split(cmd)
else:
return[cmd]
return cmd
@conf
def check_waf_version(self,mini='1.6.99',maxi='1.8.0'):
self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)))
ver=Context.HEXVERSION
if Utils.num2ver(mini)>ver:
self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver))
if Utils.num2ver(maxi)<ver:
self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver))
self.end_msg('ok')
@conf
def find_file(self,filename,path_list=[]):
for n in Utils.to_list(filename):
for d in Utils.to_list(path_list):
p=os.path.join(d,n)
if os.path.exists(p):
return p
self.fatal('Could not find %r'%filename)
@conf
def find_program(self,filename,**kw):
exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py')
environ=kw.get('environ',os.environ)
ret=''
filename=Utils.to_list(filename)
var=kw.get('var','')
if not var:
var=filename[0].upper()
if self.env[var]:
ret=self.env[var]
elif var in environ:
ret=environ[var]
path_list=kw.get('path_list','')
if not ret:
if path_list:
path_list=Utils.to_list(path_list)
else:
path_list=environ.get('PATH','').split(os.pathsep)
if not isinstance(filename,list):
filename=[filename]
for a in exts.split(','):
if ret:
break
for b in filename:
if ret:
break
for c in path_list:
if ret:
break
x=os.path.expanduser(os.path.join(c,b+a))
if os.path.isfile(x):
ret=x
if not ret and Utils.winreg:
ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
if not ret and Utils.winreg:
ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
self.msg('Checking for program '+','.join(filename),ret or False)
self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))
if not ret:
self.fatal(kw.get('errmsg','')or'Could not find the program %s'%','.join(filename))
if var:
self.env[var]=ret
return ret
@conf
def find_perl_program(self,filename,path_list=[],var=None,environ=None,exts=''):
try:
app=self.find_program(filename,path_list=path_list,var=var,environ=environ,exts=exts)
except Exception:
self.find_program('perl',var='PERL')
app=self.find_file(filename,os.environ['PATH'].split(os.pathsep))
if not app:
raise
if var:
self.env[var]=Utils.to_list(self.env['PERL'])+[app]
self.msg('Checking for %r'%filename,app)
| gpl-2.0 |
Kazade/NeHe-Website | google_appengine/lib/django-1.3/tests/regressiontests/utils/feedgenerator.py | 51 | 2526 | import datetime
from django.utils import feedgenerator, tzinfo, unittest
class FeedgeneratorTest(unittest.TestCase):
"""
Tests for the low-level syndication feed framework.
"""
def test_get_tag_uri(self):
"""
Test get_tag_uri() correctly generates TagURIs.
"""
self.assertEqual(
feedgenerator.get_tag_uri('http://example.org/foo/bar#headline', datetime.date(2004, 10, 25)),
u'tag:example.org,2004-10-25:/foo/bar/headline')
def test_get_tag_uri_with_port(self):
"""
Test that get_tag_uri() correctly generates TagURIs from URLs with port
numbers.
"""
self.assertEqual(
feedgenerator.get_tag_uri('http://www.example.org:8000/2008/11/14/django#headline', datetime.datetime(2008, 11, 14, 13, 37, 0)),
u'tag:www.example.org,2008-11-14:/2008/11/14/django/headline')
def test_rfc2822_date(self):
"""
Test rfc2822_date() correctly formats datetime objects.
"""
self.assertEqual(
feedgenerator.rfc2822_date(datetime.datetime(2008, 11, 14, 13, 37, 0)),
"Fri, 14 Nov 2008 13:37:00 -0000"
)
def test_rfc2822_date_with_timezone(self):
"""
Test rfc2822_date() correctly formats datetime objects with tzinfo.
"""
self.assertEqual(
feedgenerator.rfc2822_date(datetime.datetime(2008, 11, 14, 13, 37, 0, tzinfo=tzinfo.FixedOffset(datetime.timedelta(minutes=60)))),
"Fri, 14 Nov 2008 13:37:00 +0100"
)
def test_rfc3339_date(self):
"""
Test rfc3339_date() correctly formats datetime objects.
"""
self.assertEqual(
feedgenerator.rfc3339_date(datetime.datetime(2008, 11, 14, 13, 37, 0)),
"2008-11-14T13:37:00Z"
)
def test_rfc3339_date_with_timezone(self):
"""
Test rfc3339_date() correctly formats datetime objects with tzinfo.
"""
self.assertEqual(
feedgenerator.rfc3339_date(datetime.datetime(2008, 11, 14, 13, 37, 0, tzinfo=tzinfo.FixedOffset(datetime.timedelta(minutes=120)))),
"2008-11-14T13:37:00+02:00"
)
def test_atom1_mime_type(self):
"""
Test to make sure Atom MIME type has UTF8 Charset parameter set
"""
atom_feed = feedgenerator.Atom1Feed("title", "link", "description")
self.assertEqual(
atom_feed.mime_type, "application/atom+xml; charset=utf8"
)
| bsd-3-clause |
srjoglekar246/sympy | sympy/mpmath/tests/test_summation.py | 38 | 1865 | from sympy.mpmath import *
def test_sumem():
mp.dps = 15
assert sumem(lambda k: 1/k**2.5, [50, 100]).ae(0.0012524505324784962)
assert sumem(lambda k: k**4 + 3*k + 1, [10, 100]).ae(2050333103)
def test_nsum():
mp.dps = 15
assert nsum(lambda x: x**2, [1, 3]) == 14
assert nsum(lambda k: 1/factorial(k), [0, inf]).ae(e)
assert nsum(lambda k: (-1)**(k+1) / k, [1, inf]).ae(log(2))
assert nsum(lambda k: (-1)**(k+1) / k**2, [1, inf]).ae(pi**2 / 12)
assert nsum(lambda k: (-1)**k / log(k), [2, inf]).ae(0.9242998972229388)
assert nsum(lambda k: 1/k**2, [1, inf]).ae(pi**2 / 6)
assert nsum(lambda k: 2**k/fac(k), [0, inf]).ae(exp(2))
assert nsum(lambda k: 1/k**2, [4, inf], method='e').ae(0.2838229557371153)
def test_nprod():
mp.dps = 15
assert nprod(lambda k: exp(1/k**2), [1,inf], method='r').ae(exp(pi**2/6))
assert nprod(lambda x: x**2, [1, 3]) == 36
def test_fsum():
mp.dps = 15
assert fsum([]) == 0
assert fsum([-4]) == -4
assert fsum([2,3]) == 5
assert fsum([1e-100,1]) == 1
assert fsum([1,1e-100]) == 1
assert fsum([1e100,1]) == 1e100
assert fsum([1,1e100]) == 1e100
assert fsum([1e-100,0]) == 1e-100
assert fsum([1e-100,1e100,1e-100]) == 1e100
assert fsum([2,1+1j,1]) == 4+1j
assert fsum([2,inf,3]) == inf
assert fsum([2,-1], absolute=1) == 3
assert fsum([2,-1], squared=1) == 5
assert fsum([1,1+j], squared=1) == 1+2j
assert fsum([1,3+4j], absolute=1) == 6
assert fsum([1,2+3j], absolute=1, squared=1) == 14
assert isnan(fsum([inf,-inf]))
assert fsum([inf,-inf], absolute=1) == inf
assert fsum([inf,-inf], squared=1) == inf
assert fsum([inf,-inf], absolute=1, squared=1) == inf
assert iv.fsum([1,mpi(2,3)]) == mpi(3,4)
def test_fprod():
mp.dps = 15
assert fprod([]) == 1
assert fprod([2,3]) == 6
| bsd-3-clause |
Brainiarc7/typhon-vx | mysrc/crack2.py | 6 | 3337 | #!/usr/bin/env python
import socket, string, re, os, sys, ConfigParser, time
HOST = '127.0.0.1'
PORT = 6666
config = ConfigParser.ConfigParser()
config.read(os.getenv('HOME') + '/.omgsm/config')
gsmpath = config.get('Main', 'GSMPATH')
gsmkrakenhost = config.get('Main', 'GSMKRAKENHOST')
gsmkrakenport = config.getint('Main', 'GSMKRAKENPORT')
# foundkey = A5/1 secret state; bitpos = #iteration with this state
# framecount = # of burst on which we cracked the key
# frame2 = other burst we use to verify that key is correct
# bursts_pos = position of possible cracked burst in "bursts" array
def verify(foundkey, bitpos, framecount, bursts_pos):
key=''
# verify only previous or next burst (each frame have 4 bursts => %4)
if(bursts_pos%4 == 3):
frame2=bursts[bursts_pos-1];
else:
frame2=bursts[bursts_pos+1];
# print ("%s %s %s %s %s %s %s"%(gsmpath + '/kraken/Utilities/find_kc', foundkey, bitpos, framecount, frame2[0], frame2[1],fflags))
f=os.popen("%s %s %s %s %s %s %s"%(gsmpath + '/kraken/Utilities/find_kc', foundkey, bitpos, framecount, frame2[0], frame2[1],fflags))
res=f.read()
m=re.search('.*: (..) (..) (..) (..) (..) (..) (..) (..) ... MATCHED', res)
f.close()
if (m):
key=(m.group(1)+m.group(2)+m.group(3)+m.group(4)+m.group(5)+m.group(6)+m.group(7)+m.group(8)).upper()
return key
def crackall(bursts):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((gsmkrakenhost, gsmkrakenport))
print "connect"
file = sock.makefile("rb")
jobnums={}
pos=0
for i in range(parallel):
sock.send("crack %s\n"%bursts[pos][1])
pos=pos+1
if (pos == len(bursts)):
break
# time.sleep(4)
while (1):
s=file.readline().strip()
#print s
if(len(s)==0):
print 'Unexpected connection close'
sock.close()
sys.exit(1)
m=re.search('crack #(.*) took',s)
if(m):
del jobnums[m.group(1)]
if (pos != len(bursts)):
sock.send("crack %s\n"%bursts[pos][1])
pos=pos+1
m=re.search('Found (.*) @ (.*) #(.*) ',s)
if(m):
key = verify(m.group(1),m.group(2), jobnums[m.group(3)][0], jobnums[m.group(3)][1])
if (key != ''):
sock.close()
print sys.argv[1]+"\t"+key
sys.exit(0);
m=re.search('Cracking #(.*) (.*)',s)
if(m):
for i in range(len(bursts)):
if (m.group(2) == bursts[i][1]): # frame number
jobnums[m.group(1)]=[]
jobnums[m.group(1)].append(bursts[i][0])
jobnums[m.group(1)].append(i) # pos in bursts (for veriify)
#print "Matched "+s
if (len(jobnums) == 0):
sock.close()
return ''
cflags="" # convert flags
fflags="" # find_kc flags
if len(sys.argv) < 2:
print("Usage: "+sys.argv[0]+" bursts_file")
sys.exit(1)
if len(sys.argv) >= 3:
cflags=" -p "+sys.argv[2] # known plaintext
if len(sys.argv) >= 4:
fflags="u" # uplink
cflags=cflags+" -u"
#print(gsmpath+"/bin/gsm_convert -f %s %s 2>/dev/null |grep ^S"%(sys.argv[1],cflags))
f=os.popen(gsmpath+"/bin/gsm_convert -f %s %s 2>/dev/null |grep ^S"%(sys.argv[1],cflags))
lines=[]
for line in f.readlines():
m=re.search('^S.* .* (.*): (.*)', line)
lines.append([m.group(1), m.group(2)])
f.close()
# file does not exits, or something is wrong
if not lines:
print "Cannot_read_data\t0000000000000000"
sys.exit(42)
parallel=16
bursts=[]
for line in lines:
bursts=bursts+[line]
crackall(bursts)
print sys.argv[1]+"\t0000000000000000"
| gpl-3.0 |
Unow/edx-platform | lms/djangoapps/django_comment_client/migrations/0001_initial.py | 188 | 8982 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Role'
db.create_table('django_comment_client_role', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=30)),
('course_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, blank=True)),
))
db.send_create_signal('django_comment_client', ['Role'])
# Adding M2M table for field users on 'Role'
db.create_table('django_comment_client_role_users', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('role', models.ForeignKey(orm['django_comment_client.role'], null=False)),
('user', models.ForeignKey(orm['auth.user'], null=False))
))
db.create_unique('django_comment_client_role_users', ['role_id', 'user_id'])
# Adding model 'Permission'
db.create_table('django_comment_client_permission', (
('name', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
))
db.send_create_signal('django_comment_client', ['Permission'])
# Adding M2M table for field roles on 'Permission'
db.create_table('django_comment_client_permission_roles', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('permission', models.ForeignKey(orm['django_comment_client.permission'], null=False)),
('role', models.ForeignKey(orm['django_comment_client.role'], null=False))
))
db.create_unique('django_comment_client_permission_roles', ['permission_id', 'role_id'])
def backwards(self, orm):
# Deleting model 'Role'
db.delete_table('django_comment_client_role')
# Removing M2M table for field users on 'Role'
db.delete_table('django_comment_client_role_users')
# Deleting model 'Permission'
db.delete_table('django_comment_client_permission')
# Removing M2M table for field roles on 'Permission'
db.delete_table('django_comment_client_permission_roles')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'django_comment_client.permission': {
'Meta': {'object_name': 'Permission'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'roles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'permissions'", 'symmetrical': 'False', 'to': "orm['django_comment_client.Role']"})
},
'django_comment_client.role': {
'Meta': {'object_name': 'Role'},
'course_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'roles'", 'symmetrical': 'False', 'to': "orm['auth.User']"})
}
}
complete_apps = ['django_comment_client']
| agpl-3.0 |
blueboxgroup/cinder | cinder/db/sqlalchemy/migrate_repo/versions/006_snapshots_add_provider_location.py | 42 | 1216 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
snapshots.update().values(provider_location=None).execute()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
snapshots.drop_column(provider_location)
| apache-2.0 |
birkin/channels_exploration_project | email_app/consumers.py | 1 | 2470 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
For django channels controller (akin to views.py).
Activated by config/routing.py
"""
import logging, pprint, time
# from django.contrib.sites.models import Site
from django.core.mail import EmailMessage
from django.utils import timezone
from email_app.models import Invitation
# logger = logging.getLogger('email')
log = logging.getLogger(__name__)
log.debug( 'consumers.py loaded' )
def send_invite( message_obj ):
log.debug( 'starting send_invite()' )
time.sleep( 1 )
try:
log.debug( 'message_obj, `{}`'.format(message_obj) )
log.debug( 'message_obj.__dict__, `{}`'.format(pprint.pformat(message_obj.__dict__)) )
notification_dct = message_obj.content
log.debug( 'notification_dct, `{}`'.format(notification_dct) )
invite_key = notification_dct['key']
log.debug( 'invite_key, `{}`'.format(invite_key) )
invite = Invitation.objects.get( key=invite_key )
except Exception as e:
log.error( 'e, ```{}```'.format(unicode(repr(e))) )
log.error("Invitation to send not found")
return
subject = "You've been invited!"
body = "Go to https://%s/invites/accept/%s/ to join!" % (
'foo',
invite.key,
)
try:
message = EmailMessage(
subject=subject,
body=body,
from_email="from_email",
to=[invite.email,],
)
message.send()
invite.sent = timezone.now()
invite.save()
except:
log.exception('Problem sending invite %s' % (invite.id))
# def send_invite(message):
# log.debug( 'starting send_invite()' )
# try:
# invite = Invitation.objects.get(
# id=message.content.get('id'),
# )
# except Invitation.DoesNotExist:
# log.error("Invitation to send not found")
# return
# subject = "You've been invited!"
# body = "Go to https://%s/invites/accept/%s/ to join!" % (
# Site.objects.get_current().domain,
# invite.key,
# )
# try:
# message = EmailMessage(
# subject=subject,
# body=body,
# from_email="from_email",
# to=[invite.email,],
# )
# message.send()
# invite.sent = timezone.now()
# invite.save()
# except:
# log.exception('Problem sending invite %s' % (invite.id))
| mit |
PulsePod/evepod | lib/python2.7/site-packages/newrelic-2.12.0.10/newrelic/hooks/framework_webpy.py | 4 | 1524 | import sys
import newrelic.packages.six as six
import newrelic.api.transaction
import newrelic.api.function_trace
import newrelic.api.in_function
import newrelic.api.out_function
import newrelic.api.pre_function
from newrelic.api.object_wrapper import callable_name
from newrelic.api.web_transaction import WSGIApplicationWrapper
def transaction_name_delegate(*args, **kwargs):
transaction = newrelic.api.transaction.current_transaction()
if transaction:
if isinstance(args[1], six.string_types):
f = args[1]
else:
f = callable_name(args[1])
transaction.set_transaction_name(f)
return (args, kwargs)
def wrap_handle_exception(self):
transaction = newrelic.api.transaction.current_transaction()
if transaction:
transaction.record_exception(*sys.exc_info())
def template_name(render_obj, name):
return name
def instrument(module):
if module.__name__ == 'web.application':
newrelic.api.out_function.wrap_out_function(
module, 'application.wsgifunc', WSGIApplicationWrapper)
newrelic.api.in_function.wrap_in_function(
module, 'application._delegate', transaction_name_delegate)
newrelic.api.pre_function.wrap_pre_function(
module, 'application.internalerror', wrap_handle_exception)
elif module.__name__ == 'web.template':
newrelic.api.function_trace.wrap_function_trace(
module, 'render.__getattr__', template_name, 'Template/Render')
| apache-2.0 |
TeamExodus/external_chromium_org | chrome/test/chromedriver/server/server.py | 121 | 2131 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import atexit
import os
import socket
import subprocess
import time
import urllib2
class Server(object):
"""A running ChromeDriver server."""
def __init__(self, exe_path, log_path=None):
"""Starts the ChromeDriver server and waits for it to be ready.
Args:
exe_path: path to the ChromeDriver executable
log_path: path to the log file
Raises:
RuntimeError if ChromeDriver fails to start
"""
if not os.path.exists(exe_path):
raise RuntimeError('ChromeDriver exe not found at: ' + exe_path)
port = self._FindOpenPort()
chromedriver_args = [exe_path, '--port=%d' % port]
if log_path:
chromedriver_args.extend(['--verbose', '--log-path=%s' % log_path])
self._process = subprocess.Popen(chromedriver_args)
self._url = 'http://127.0.0.1:%d' % port
if self._process is None:
raise RuntimeError('ChromeDriver server cannot be started')
max_time = time.time() + 10
while not self.IsRunning():
if time.time() > max_time:
self._process.terminate()
raise RuntimeError('ChromeDriver server did not start')
time.sleep(0.1)
atexit.register(self.Kill)
def _FindOpenPort(self):
for port in range(9500, 10000):
try:
socket.create_connection(('127.0.0.1', port), 0.2).close()
except socket.error:
return port
raise RuntimeError('Cannot find open port to launch ChromeDriver')
def GetUrl(self):
return self._url
def IsRunning(self):
"""Returns whether the server is up and running."""
try:
urllib2.urlopen(self.GetUrl() + '/status')
return True
except urllib2.URLError:
return False
def Kill(self):
"""Kills the ChromeDriver server, if it is running."""
if self._process is None:
return
try:
urllib2.urlopen(self.GetUrl() + '/shutdown', timeout=10).close()
except:
self._process.terminate()
self._process.wait()
self._process = None
| bsd-3-clause |
cgstudiomap/cgstudiomap | main/eggs/Shapely-1.5.13-py2.7.egg/shapely/geometry/polygon.py | 1 | 16292 | """Polygons and their linear ring components
"""
import sys
if sys.version_info[0] < 3:
range = xrange
from ctypes import c_double, c_void_p, cast, POINTER
from ctypes import ArgumentError
import weakref
from shapely.algorithms.cga import signed_area
from shapely.coords import required
from shapely.geos import lgeos
from shapely.geometry.base import BaseGeometry, geos_geom_from_py
from shapely.geometry.linestring import LineString, LineStringAdapter
from shapely.geometry.proxy import PolygonProxy
__all__ = ['Polygon', 'asPolygon', 'LinearRing', 'asLinearRing']
class LinearRing(LineString):
"""
A closed one-dimensional feature comprising one or more line segments
A LinearRing that crosses itself or touches itself at a single point is
invalid and operations on it may fail.
"""
def __init__(self, coordinates=None):
"""
Parameters
----------
coordinates : sequence
A sequence of (x, y [,z]) numeric coordinate pairs or triples
Rings are implicitly closed. There is no need to specific a final
coordinate pair identical to the first.
Example
-------
Construct a square ring.
>>> ring = LinearRing( ((0, 0), (0, 1), (1 ,1 ), (1 , 0)) )
>>> ring.is_closed
True
>>> ring.length
4.0
"""
BaseGeometry.__init__(self)
if coordinates is not None:
self._set_coords(coordinates)
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
# Coordinate access
_get_coords = BaseGeometry._get_coords
def _set_coords(self, coordinates):
self.empty()
self._geom, self._ndim = geos_linearring_from_py(coordinates)
coords = property(_get_coords, _set_coords)
@property
def is_ccw(self):
"""True is the ring is oriented counter clock-wise"""
return bool(self.impl['is_ccw'](self))
@property
def is_simple(self):
"""True if the geometry is simple, meaning that any self-intersections
are only at boundary points, else False"""
return LineString(self).is_simple
class LinearRingAdapter(LineStringAdapter):
__p__ = None
def __init__(self, context):
self.context = context
self.factory = geos_linearring_from_py
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
coords = property(BaseGeometry._get_coords)
def asLinearRing(context):
"""Adapt an object to the LinearRing interface"""
return LinearRingAdapter(context)
class InteriorRingSequence(object):
_factory = None
_geom = None
__p__ = None
_ndim = None
_index = 0
_length = 0
__rings__ = None
_gtag = None
def __init__(self, parent):
self.__p__ = parent
self._geom = parent._geom
self._ndim = parent._ndim
def __iter__(self):
self._index = 0
self._length = self.__len__()
return self
def __next__(self):
if self._index < self._length:
ring = self._get_ring(self._index)
self._index += 1
return ring
else:
raise StopIteration
if sys.version_info[0] < 3:
next = __next__
def __len__(self):
return lgeos.GEOSGetNumInteriorRings(self._geom)
def __getitem__(self, key):
m = self.__len__()
if isinstance(key, int):
if key + m < 0 or key >= m:
raise IndexError("index out of range")
if key < 0:
i = m + key
else:
i = key
return self._get_ring(i)
elif isinstance(key, slice):
res = []
start, stop, stride = key.indices(m)
for i in range(start, stop, stride):
res.append(self._get_ring(i))
return res
else:
raise TypeError("key must be an index or slice")
@property
def _longest(self):
max = 0
for g in iter(self):
l = len(g.coords)
if l > max:
max = l
def gtag(self):
return hash(repr(self.__p__))
def _get_ring(self, i):
gtag = self.gtag()
if gtag != self._gtag:
self.__rings__ = {}
if i not in self.__rings__:
g = lgeos.GEOSGetInteriorRingN(self._geom, i)
ring = LinearRing()
ring._geom = g
ring.__p__ = self
ring._other_owned = True
ring._ndim = self._ndim
self.__rings__[i] = weakref.ref(ring)
return self.__rings__[i]()
class Polygon(BaseGeometry):
"""
A two-dimensional figure bounded by a linear ring
A polygon has a non-zero area. It may have one or more negative-space
"holes" which are also bounded by linear rings. If any rings cross each
other, the feature is invalid and operations on it may fail.
Attributes
----------
exterior : LinearRing
The ring which bounds the positive space of the polygon.
interiors : sequence
A sequence of rings which bound all existing holes.
"""
_exterior = None
_interiors = []
_ndim = 2
def __init__(self, shell=None, holes=None):
"""
Parameters
----------
shell : sequence
A sequence of (x, y [,z]) numeric coordinate pairs or triples
holes : sequence
A sequence of objects which satisfy the same requirements as the
shell parameters above
Example
-------
Create a square polygon with no holes
>>> coords = ((0., 0.), (0., 1.), (1., 1.), (1., 0.), (0., 0.))
>>> polygon = Polygon(coords)
>>> polygon.area
1.0
"""
BaseGeometry.__init__(self)
if shell is not None:
self._geom, self._ndim = geos_polygon_from_py(shell, holes)
@property
def exterior(self):
if self.is_empty:
return None
elif self._exterior is None or self._exterior() is None:
g = lgeos.GEOSGetExteriorRing(self._geom)
ring = LinearRing()
ring._geom = g
ring.__p__ = self
ring._other_owned = True
ring._ndim = self._ndim
self._exterior = weakref.ref(ring)
return self._exterior()
@property
def interiors(self):
if self.is_empty:
return []
return InteriorRingSequence(self)
def __eq__(self, other):
if not isinstance(other, Polygon):
return False
my_coords = [
tuple(self.exterior.coords),
[tuple(interior.coords) for interior in self.interiors]
]
other_coords = [
tuple(other.exterior.coords),
[tuple(interior.coords) for interior in other.interiors]
]
return my_coords == other_coords
def __ne__(self, other):
return not self.__eq__(other)
__hash__ = None
@property
def ctypes(self):
if not self._ctypes_data:
self._ctypes_data = self.exterior.ctypes
return self._ctypes_data
@property
def __array_interface__(self):
raise NotImplementedError(
"A polygon does not itself provide the array interface. Its rings do.")
def _get_coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
def _set_coords(self, ob):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def __geo_interface__(self):
coords = [tuple(self.exterior.coords)]
for hole in self.interiors:
coords.append(tuple(hole.coords))
return {
'type': 'Polygon',
'coordinates': tuple(coords)
}
def svg(self, scale_factor=1., fill_color=None):
"""Returns SVG path element for the Polygon geometry.
Parameters
==========
scale_factor : float
Multiplication factor for the SVG stroke-width. Default is 1.
fill_color : str, optional
Hex string for fill color. Default is to use "#66cc99" if
geometry is valid, and "#ff3333" if invalid.
"""
if self.is_empty:
return '<g />'
if fill_color is None:
fill_color = "#66cc99" if self.is_valid else "#ff3333"
exterior_coords = [
["{0},{1}".format(*c) for c in self.exterior.coords]]
interior_coords = [
["{0},{1}".format(*c) for c in interior.coords]
for interior in self.interiors]
path = " ".join([
"M {0} L {1} z".format(coords[0], " L ".join(coords[1:]))
for coords in exterior_coords + interior_coords])
return (
'<path fill-rule="evenodd" fill="{2}" stroke="#555555" '
'stroke-width="{0}" opacity="0.6" d="{1}" />'
).format(2. * scale_factor, path, fill_color)
class PolygonAdapter(PolygonProxy, Polygon):
def __init__(self, shell, holes=None):
self.shell = shell
self.holes = holes
self.context = (shell, holes)
self.factory = geos_polygon_from_py
@property
def _ndim(self):
try:
# From array protocol
array = self.shell.__array_interface__
n = array['shape'][1]
assert n == 2 or n == 3
return n
except AttributeError:
# Fall back on list
return len(self.shell[0])
def asPolygon(shell, holes=None):
"""Adapt objects to the Polygon interface"""
return PolygonAdapter(shell, holes)
def orient(polygon, sign=1.0):
s = float(sign)
rings = []
ring = polygon.exterior
if signed_area(ring)/s >= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
for ring in polygon.interiors:
if signed_area(ring)/s <= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
return Polygon(rings[0], rings[1:])
def geos_linearring_from_py(ob, update_geom=None, update_ndim=0):
# If a LinearRing is passed in, clone it and return
# If a LineString is passed in, clone the coord seq and return a LinearRing
if isinstance(ob, LineString):
if type(ob) == LinearRing:
return geos_geom_from_py(ob)
else:
if ob.is_closed and len(ob.coords) >= 4:
return geos_geom_from_py(ob, lgeos.GEOSGeom_createLinearRing)
# If numpy is present, we use numpy.require to ensure that we have a
# C-continguous array that owns its data. View data will be copied.
ob = required(ob)
try:
# From array protocol
array = ob.__array_interface__
assert len(array['shape']) == 2
m = array['shape'][0]
n = array['shape'][1]
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert n == 2 or n == 3
# Make pointer to the coordinate array
if isinstance(array['data'], tuple):
# numpy tuple (addr, read-only)
cp = cast(array['data'][0], POINTER(c_double))
else:
cp = array['data']
# Add closing coordinates to sequence?
if cp[0] != cp[m*n-n] or cp[1] != cp[m*n-n+1]:
M = m + 1
else:
M = m
# Create a coordinate sequence
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
# add to coordinate sequence
for i in range(m):
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, i, cp[n*i])
lgeos.GEOSCoordSeq_setY(cs, i, cp[n*i+1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, i, cp[n*i+2])
# Add closing coordinates to sequence?
if M > m:
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, M-1, cp[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, cp[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, cp[2])
except AttributeError:
# Fall back on list
try:
m = len(ob)
except TypeError: # Iterators, e.g. Python 3 zip
ob = list(ob)
m = len(ob)
n = len(ob[0])
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert (n == 2 or n == 3)
# Add closing coordinates if not provided
if m == 3 or ob[0][0] != ob[-1][0] or ob[0][1] != ob[-1][1]:
M = m + 1
else:
M = m
# Create a coordinate sequence
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
# add to coordinate sequence
for i in range(m):
coords = ob[i]
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, i, coords[0])
lgeos.GEOSCoordSeq_setY(cs, i, coords[1])
if n == 3:
try:
lgeos.GEOSCoordSeq_setZ(cs, i, coords[2])
except IndexError:
raise ValueError("Inconsistent coordinate dimensionality")
# Add closing coordinates to sequence?
if M > m:
coords = ob[0]
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, M-1, coords[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, coords[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, coords[2])
if update_geom is not None:
return None
else:
return lgeos.GEOSGeom_createLinearRing(cs), n
def update_linearring_from_py(geom, ob):
geos_linearring_from_py(ob, geom._geom, geom._ndim)
def geos_polygon_from_py(shell, holes=None):
if isinstance(shell, Polygon):
return geos_geom_from_py(shell)
if shell is not None:
geos_shell, ndim = geos_linearring_from_py(shell)
if holes is not None and len(holes) > 0:
ob = holes
L = len(ob)
exemplar = ob[0]
try:
N = len(exemplar[0])
except TypeError:
N = exemplar._ndim
if not L >= 1:
raise ValueError("number of holes must be non zero")
if not N in (2, 3):
raise ValueError("insufficiant coordinate dimension")
# Array of pointers to ring geometries
geos_holes = (c_void_p * L)()
# add to coordinate sequence
for l in range(L):
geom, ndim = geos_linearring_from_py(ob[l])
geos_holes[l] = cast(geom, c_void_p)
else:
geos_holes = POINTER(c_void_p)()
L = 0
return (
lgeos.GEOSGeom_createPolygon(
c_void_p(geos_shell),
geos_holes,
L
),
ndim
)
# Test runner
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
| agpl-3.0 |
alexus37/AugmentedRealityChess | pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/numpy/polynomial/legendre.py | 51 | 56252 | """
Legendre Series (:mod: `numpy.polynomial.legendre`)
===================================================
.. currentmodule:: numpy.polynomial.polynomial
This module provides a number of objects (mostly functions) useful for
dealing with Legendre series, including a `Legendre` class that
encapsulates the usual arithmetic operations. (General information
on how this module represents and works with such polynomials is in the
docstring for its "parent" sub-package, `numpy.polynomial`).
Constants
---------
.. autosummary::
:toctree: generated/
legdomain Legendre series default domain, [-1,1].
legzero Legendre series that evaluates identically to 0.
legone Legendre series that evaluates identically to 1.
legx Legendre series for the identity map, ``f(x) = x``.
Arithmetic
----------
.. autosummary::
:toctree: generated/
legmulx multiply a Legendre series in P_i(x) by x.
legadd add two Legendre series.
legsub subtract one Legendre series from another.
legmul multiply two Legendre series.
legdiv divide one Legendre series by another.
legpow raise a Legendre series to an positive integer power
legval evaluate a Legendre series at given points.
legval2d evaluate a 2D Legendre series at given points.
legval3d evaluate a 3D Legendre series at given points.
leggrid2d evaluate a 2D Legendre series on a Cartesian product.
leggrid3d evaluate a 3D Legendre series on a Cartesian product.
Calculus
--------
.. autosummary::
:toctree: generated/
legder differentiate a Legendre series.
legint integrate a Legendre series.
Misc Functions
--------------
.. autosummary::
:toctree: generated/
legfromroots create a Legendre series with specified roots.
legroots find the roots of a Legendre series.
legvander Vandermonde-like matrix for Legendre polynomials.
legvander2d Vandermonde-like matrix for 2D power series.
legvander3d Vandermonde-like matrix for 3D power series.
leggauss Gauss-Legendre quadrature, points and weights.
legweight Legendre weight function.
legcompanion symmetrized companion matrix in Legendre form.
legfit least-squares fit returning a Legendre series.
legtrim trim leading coefficients from a Legendre series.
legline Legendre series representing given straight line.
leg2poly convert a Legendre series to a polynomial.
poly2leg convert a polynomial to a Legendre series.
Classes
-------
Legendre A Legendre series class.
See also
--------
numpy.polynomial.polynomial
numpy.polynomial.chebyshev
numpy.polynomial.laguerre
numpy.polynomial.hermite
numpy.polynomial.hermite_e
"""
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
import numpy.linalg as la
from . import polyutils as pu
from ._polybase import ABCPolyBase
__all__ = [
'legzero', 'legone', 'legx', 'legdomain', 'legline', 'legadd',
'legsub', 'legmulx', 'legmul', 'legdiv', 'legpow', 'legval', 'legder',
'legint', 'leg2poly', 'poly2leg', 'legfromroots', 'legvander',
'legfit', 'legtrim', 'legroots', 'Legendre', 'legval2d', 'legval3d',
'leggrid2d', 'leggrid3d', 'legvander2d', 'legvander3d', 'legcompanion',
'leggauss', 'legweight']
legtrim = pu.trimcoef
def poly2leg(pol):
"""
Convert a polynomial to a Legendre series.
Convert an array representing the coefficients of a polynomial (relative
to the "standard" basis) ordered from lowest degree to highest, to an
array of the coefficients of the equivalent Legendre series, ordered
from lowest to highest degree.
Parameters
----------
pol : array_like
1-D array containing the polynomial coefficients
Returns
-------
c : ndarray
1-D array containing the coefficients of the equivalent Legendre
series.
See Also
--------
leg2poly
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy import polynomial as P
>>> p = P.Polynomial(np.arange(4))
>>> p
Polynomial([ 0., 1., 2., 3.], [-1., 1.])
>>> c = P.Legendre(P.poly2leg(p.coef))
>>> c
Legendre([ 1. , 3.25, 1. , 0.75], [-1., 1.])
"""
[pol] = pu.as_series([pol])
deg = len(pol) - 1
res = 0
for i in range(deg, -1, -1):
res = legadd(legmulx(res), pol[i])
return res
def leg2poly(c):
"""
Convert a Legendre series to a polynomial.
Convert an array representing the coefficients of a Legendre series,
ordered from lowest degree to highest, to an array of the coefficients
of the equivalent polynomial (relative to the "standard" basis) ordered
from lowest to highest degree.
Parameters
----------
c : array_like
1-D array containing the Legendre series coefficients, ordered
from lowest order term to highest.
Returns
-------
pol : ndarray
1-D array containing the coefficients of the equivalent polynomial
(relative to the "standard" basis) ordered from lowest order term
to highest.
See Also
--------
poly2leg
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> c = P.Legendre(range(4))
>>> c
Legendre([ 0., 1., 2., 3.], [-1., 1.])
>>> p = c.convert(kind=P.Polynomial)
>>> p
Polynomial([-1. , -3.5, 3. , 7.5], [-1., 1.])
>>> P.leg2poly(range(4))
array([-1. , -3.5, 3. , 7.5])
"""
from .polynomial import polyadd, polysub, polymulx
[c] = pu.as_series([c])
n = len(c)
if n < 3:
return c
else:
c0 = c[-2]
c1 = c[-1]
# i is the current degree of c1
for i in range(n - 1, 1, -1):
tmp = c0
c0 = polysub(c[i - 2], (c1*(i - 1))/i)
c1 = polyadd(tmp, (polymulx(c1)*(2*i - 1))/i)
return polyadd(c0, polymulx(c1))
#
# These are constant arrays are of integer type so as to be compatible
# with the widest range of other types, such as Decimal.
#
# Legendre
legdomain = np.array([-1, 1])
# Legendre coefficients representing zero.
legzero = np.array([0])
# Legendre coefficients representing one.
legone = np.array([1])
# Legendre coefficients representing the identity x.
legx = np.array([0, 1])
def legline(off, scl):
"""
Legendre series whose graph is a straight line.
Parameters
----------
off, scl : scalars
The specified line is given by ``off + scl*x``.
Returns
-------
y : ndarray
This module's representation of the Legendre series for
``off + scl*x``.
See Also
--------
polyline, chebline
Examples
--------
>>> import numpy.polynomial.legendre as L
>>> L.legline(3,2)
array([3, 2])
>>> L.legval(-3, L.legline(3,2)) # should be -3
-3.0
"""
if scl != 0:
return np.array([off, scl])
else:
return np.array([off])
def legfromroots(roots):
"""
Generate a Legendre series with given roots.
The function returns the coefficients of the polynomial
.. math:: p(x) = (x - r_0) * (x - r_1) * ... * (x - r_n),
in Legendre form, where the `r_n` are the roots specified in `roots`.
If a zero has multiplicity n, then it must appear in `roots` n times.
For instance, if 2 is a root of multiplicity three and 3 is a root of
multiplicity 2, then `roots` looks something like [2, 2, 2, 3, 3]. The
roots can appear in any order.
If the returned coefficients are `c`, then
.. math:: p(x) = c_0 + c_1 * L_1(x) + ... + c_n * L_n(x)
The coefficient of the last term is not generally 1 for monic
polynomials in Legendre form.
Parameters
----------
roots : array_like
Sequence containing the roots.
Returns
-------
out : ndarray
1-D array of coefficients. If all roots are real then `out` is a
real array, if some of the roots are complex, then `out` is complex
even if all the coefficients in the result are real (see Examples
below).
See Also
--------
polyfromroots, chebfromroots, lagfromroots, hermfromroots,
hermefromroots.
Examples
--------
>>> import numpy.polynomial.legendre as L
>>> L.legfromroots((-1,0,1)) # x^3 - x relative to the standard basis
array([ 0. , -0.4, 0. , 0.4])
>>> j = complex(0,1)
>>> L.legfromroots((-j,j)) # x^2 + 1 relative to the standard basis
array([ 1.33333333+0.j, 0.00000000+0.j, 0.66666667+0.j])
"""
if len(roots) == 0:
return np.ones(1)
else:
[roots] = pu.as_series([roots], trim=False)
roots.sort()
p = [legline(-r, 1) for r in roots]
n = len(p)
while n > 1:
m, r = divmod(n, 2)
tmp = [legmul(p[i], p[i+m]) for i in range(m)]
if r:
tmp[0] = legmul(tmp[0], p[-1])
p = tmp
n = m
return p[0]
def legadd(c1, c2):
"""
Add one Legendre series to another.
Returns the sum of two Legendre series `c1` + `c2`. The arguments
are sequences of coefficients ordered from lowest order term to
highest, i.e., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the Legendre series of their sum.
See Also
--------
legsub, legmul, legdiv, legpow
Notes
-----
Unlike multiplication, division, etc., the sum of two Legendre series
is a Legendre series (without having to "reproject" the result onto
the basis set) so addition, just like that of "standard" polynomials,
is simply "component-wise."
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legadd(c1,c2)
array([ 4., 4., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] += c2
ret = c1
else:
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def legsub(c1, c2):
"""
Subtract one Legendre series from another.
Returns the difference of two Legendre series `c1` - `c2`. The
sequences of coefficients are from lowest order term to highest, i.e.,
[1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their difference.
See Also
--------
legadd, legmul, legdiv, legpow
Notes
-----
Unlike multiplication, division, etc., the difference of two Legendre
series is a Legendre series (without having to "reproject" the result
onto the basis set) so subtraction, just like that of "standard"
polynomials, is simply "component-wise."
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legsub(c1,c2)
array([-2., 0., 2.])
>>> L.legsub(c2,c1) # -C.legsub(c1,c2)
array([ 2., 0., -2.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] -= c2
ret = c1
else:
c2 = -c2
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def legmulx(c):
"""Multiply a Legendre series by x.
Multiply the Legendre series `c` by x, where x is the independent
variable.
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the result of the multiplication.
Notes
-----
The multiplication uses the recursion relationship for Legendre
polynomials in the form
.. math::
xP_i(x) = ((i + 1)*P_{i + 1}(x) + i*P_{i - 1}(x))/(2i + 1)
"""
# c is a trimmed copy
[c] = pu.as_series([c])
# The zero series needs special treatment
if len(c) == 1 and c[0] == 0:
return c
prd = np.empty(len(c) + 1, dtype=c.dtype)
prd[0] = c[0]*0
prd[1] = c[0]
for i in range(1, len(c)):
j = i + 1
k = i - 1
s = i + j
prd[j] = (c[i]*j)/s
prd[k] += (c[i]*i)/s
return prd
def legmul(c1, c2):
"""
Multiply one Legendre series by another.
Returns the product of two Legendre series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their product.
See Also
--------
legadd, legsub, legdiv, legpow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Legendre polynomial basis set. Thus, to express
the product as a Legendre series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2)
>>> P.legmul(c1,c2) # multiplication requires "reprojection"
array([ 4.33333333, 10.4 , 11.66666667, 3.6 ])
"""
# s1, s2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else:
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = legsub(c[-i]*xs, (c1*(nd - 1))/nd)
c1 = legadd(tmp, (legmulx(c1)*(2*nd - 1))/nd)
return legadd(c0, legmulx(c1))
def legdiv(c1, c2):
"""
Divide one Legendre series by another.
Returns the quotient-with-remainder of two Legendre series
`c1` / `c2`. The arguments are sequences of coefficients from lowest
order "term" to highest, e.g., [1,2,3] represents the series
``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
quo, rem : ndarrays
Of Legendre series coefficients representing the quotient and
remainder.
See Also
--------
legadd, legsub, legmul, legpow
Notes
-----
In general, the (polynomial) division of one Legendre series by another
results in quotient and remainder terms that are not in the Legendre
polynomial basis set. Thus, to express these results as a Legendre
series, it is necessary to "reproject" the results onto the Legendre
basis set, which may produce "unintuitive" (but correct) results; see
Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legdiv(c1,c2) # quotient "intuitive," remainder not
(array([ 3.]), array([-8., -4.]))
>>> c2 = (0,1,2,3)
>>> L.legdiv(c2,c1) # neither "intuitive"
(array([-0.07407407, 1.66666667]), array([-1.03703704, -2.51851852]))
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0:
raise ZeroDivisionError()
lc1 = len(c1)
lc2 = len(c2)
if lc1 < lc2:
return c1[:1]*0, c1
elif lc2 == 1:
return c1/c2[-1], c1[:1]*0
else:
quo = np.empty(lc1 - lc2 + 1, dtype=c1.dtype)
rem = c1
for i in range(lc1 - lc2, - 1, -1):
p = legmul([0]*i + [1], c2)
q = rem[-1]/p[-1]
rem = rem[:-1] - q*p[:-1]
quo[i] = q
return quo, pu.trimseq(rem)
def legpow(c, pow, maxpower=16):
"""Raise a Legendre series to a power.
Returns the Legendre series `c` raised to the power `pow`. The
arguement `c` is a sequence of coefficients ordered from low to high.
i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.``
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to
high.
pow : integer
Power to which the series will be raised
maxpower : integer, optional
Maximum power allowed. This is mainly to limit growth of the series
to unmanageable size. Default is 16
Returns
-------
coef : ndarray
Legendre series of power.
See Also
--------
legadd, legsub, legmul, legdiv
Examples
--------
"""
# c is a trimmed copy
[c] = pu.as_series([c])
power = int(pow)
if power != pow or power < 0:
raise ValueError("Power must be a non-negative integer.")
elif maxpower is not None and power > maxpower:
raise ValueError("Power is too large")
elif power == 0:
return np.array([1], dtype=c.dtype)
elif power == 1:
return c
else:
# This can be made more efficient by using powers of two
# in the usual way.
prd = c
for i in range(2, power + 1):
prd = legmul(prd, c)
return prd
def legder(c, m=1, scl=1, axis=0):
"""
Differentiate a Legendre series.
Returns the Legendre series coefficients `c` differentiated `m` times
along `axis`. At each iteration the result is multiplied by `scl` (the
scaling factor is for use in a linear change of variable). The argument
`c` is an array of coefficients from low to high degree along each
axis, e.g., [1,2,3] represents the series ``1*L_0 + 2*L_1 + 3*L_2``
while [[1,2],[1,2]] represents ``1*L_0(x)*L_0(y) + 1*L_1(x)*L_0(y) +
2*L_0(x)*L_1(y) + 2*L_1(x)*L_1(y)`` if axis=0 is ``x`` and axis=1 is
``y``.
Parameters
----------
c : array_like
Array of Legendre series coefficients. If c is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Number of derivatives taken, must be non-negative. (Default: 1)
scl : scalar, optional
Each differentiation is multiplied by `scl`. The end result is
multiplication by ``scl**m``. This is for use in a linear change of
variable. (Default: 1)
axis : int, optional
Axis over which the derivative is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
der : ndarray
Legendre series of the derivative.
See Also
--------
legint
Notes
-----
In general, the result of differentiating a Legendre series does not
resemble the same operation on a power series. Thus the result of this
function may be "unintuitive," albeit correct; see Examples section
below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c = (1,2,3,4)
>>> L.legder(c)
array([ 6., 9., 20.])
>>> L.legder(c, 3)
array([ 60.])
>>> L.legder(c, scl=-1)
array([ -6., -9., -20.])
>>> L.legder(c, 2,-1)
array([ 9., 60.])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of derivation must be integer")
if cnt < 0:
raise ValueError("The order of derivation must be non-negative")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
n = len(c)
if cnt >= n:
c = c[:1]*0
else:
for i in range(cnt):
n = n - 1
c *= scl
der = np.empty((n,) + c.shape[1:], dtype=c.dtype)
for j in range(n, 2, -1):
der[j - 1] = (2*j - 1)*c[j]
c[j - 2] += c[j]
if n > 1:
der[1] = 3*c[2]
der[0] = c[1]
c = der
c = np.rollaxis(c, 0, iaxis + 1)
return c
def legint(c, m=1, k=[], lbnd=0, scl=1, axis=0):
"""
Integrate a Legendre series.
Returns the Legendre series coefficients `c` integrated `m` times from
`lbnd` along `axis`. At each iteration the resulting series is
**multiplied** by `scl` and an integration constant, `k`, is added.
The scaling factor is for use in a linear change of variable. ("Buyer
beware": note that, depending on what one is doing, one may want `scl`
to be the reciprocal of what one might expect; for more information,
see the Notes section below.) The argument `c` is an array of
coefficients from low to high degree along each axis, e.g., [1,2,3]
represents the series ``L_0 + 2*L_1 + 3*L_2`` while [[1,2],[1,2]]
represents ``1*L_0(x)*L_0(y) + 1*L_1(x)*L_0(y) + 2*L_0(x)*L_1(y) +
2*L_1(x)*L_1(y)`` if axis=0 is ``x`` and axis=1 is ``y``.
Parameters
----------
c : array_like
Array of Legendre series coefficients. If c is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Order of integration, must be positive. (Default: 1)
k : {[], list, scalar}, optional
Integration constant(s). The value of the first integral at
``lbnd`` is the first value in the list, the value of the second
integral at ``lbnd`` is the second value, etc. If ``k == []`` (the
default), all constants are set to zero. If ``m == 1``, a single
scalar can be given instead of a list.
lbnd : scalar, optional
The lower bound of the integral. (Default: 0)
scl : scalar, optional
Following each integration the result is *multiplied* by `scl`
before the integration constant is added. (Default: 1)
axis : int, optional
Axis over which the integral is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
S : ndarray
Legendre series coefficient array of the integral.
Raises
------
ValueError
If ``m < 0``, ``len(k) > m``, ``np.isscalar(lbnd) == False``, or
``np.isscalar(scl) == False``.
See Also
--------
legder
Notes
-----
Note that the result of each integration is *multiplied* by `scl`.
Why is this important to note? Say one is making a linear change of
variable :math:`u = ax + b` in an integral relative to `x`. Then
.. math::`dx = du/a`, so one will need to set `scl` equal to
:math:`1/a` - perhaps not what one would have first thought.
Also note that, in general, the result of integrating a C-series needs
to be "reprojected" onto the C-series basis set. Thus, typically,
the result of this function is "unintuitive," albeit correct; see
Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c = (1,2,3)
>>> L.legint(c)
array([ 0.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, 3)
array([ 1.66666667e-02, -1.78571429e-02, 4.76190476e-02,
-1.73472348e-18, 1.90476190e-02, 9.52380952e-03])
>>> L.legint(c, k=3)
array([ 3.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, lbnd=-2)
array([ 7.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, scl=2)
array([ 0.66666667, 0.8 , 1.33333333, 1.2 ])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if not np.iterable(k):
k = [k]
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of integration must be integer")
if cnt < 0:
raise ValueError("The order of integration must be non-negative")
if len(k) > cnt:
raise ValueError("Too many integration constants")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
k = list(k) + [0]*(cnt - len(k))
for i in range(cnt):
n = len(c)
c *= scl
if n == 1 and np.all(c[0] == 0):
c[0] += k[i]
else:
tmp = np.empty((n + 1,) + c.shape[1:], dtype=c.dtype)
tmp[0] = c[0]*0
tmp[1] = c[0]
if n > 1:
tmp[2] = c[1]/3
for j in range(2, n):
t = c[j]/(2*j + 1)
tmp[j + 1] = t
tmp[j - 1] -= t
tmp[0] += k[i] - legval(lbnd, tmp)
c = tmp
c = np.rollaxis(c, 0, iaxis + 1)
return c
def legval(x, c, tensor=True):
"""
Evaluate a Legendre series at points x.
If `c` is of length `n + 1`, this function returns the value:
.. math:: p(x) = c_0 * L_0(x) + c_1 * L_1(x) + ... + c_n * L_n(x)
The parameter `x` is converted to an array only if it is a tuple or a
list, otherwise it is treated as a scalar. In either case, either `x`
or its elements must support multiplication and addition both with
themselves and with the elements of `c`.
If `c` is a 1-D array, then `p(x)` will have the same shape as `x`. If
`c` is multidimensional, then the shape of the result depends on the
value of `tensor`. If `tensor` is true the shape will be c.shape[1:] +
x.shape. If `tensor` is false the shape will be c.shape[1:]. Note that
scalars have shape (,).
Trailing zeros in the coefficients will be used in the evaluation, so
they should be avoided if efficiency is a concern.
Parameters
----------
x : array_like, compatible object
If `x` is a list or tuple, it is converted to an ndarray, otherwise
it is left unchanged and treated as a scalar. In either case, `x`
or its elements must support addition and multiplication with
with themselves and with the elements of `c`.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree n are contained in c[n]. If `c` is multidimensional the
remaining indices enumerate multiple polynomials. In the two
dimensional case the coefficients may be thought of as stored in
the columns of `c`.
tensor : boolean, optional
If True, the shape of the coefficient array is extended with ones
on the right, one for each dimension of `x`. Scalars have dimension 0
for this action. The result is that every column of coefficients in
`c` is evaluated for every element of `x`. If False, `x` is broadcast
over the columns of `c` for the evaluation. This keyword is useful
when `c` is multidimensional. The default value is True.
.. versionadded:: 1.7.0
Returns
-------
values : ndarray, algebra_like
The shape of the return value is described above.
See Also
--------
legval2d, leggrid2d, legval3d, leggrid3d
Notes
-----
The evaluation uses Clenshaw recursion, aka synthetic division.
Examples
--------
"""
c = np.array(c, ndmin=1, copy=0)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if isinstance(x, (tuple, list)):
x = np.asarray(x)
if isinstance(x, np.ndarray) and tensor:
c = c.reshape(c.shape + (1,)*x.ndim)
if len(c) == 1:
c0 = c[0]
c1 = 0
elif len(c) == 2:
c0 = c[0]
c1 = c[1]
else:
nd = len(c)
c0 = c[-2]
c1 = c[-1]
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = c[-i] - (c1*(nd - 1))/nd
c1 = tmp + (c1*x*(2*nd - 1))/nd
return c0 + c1*x
def legval2d(x, y, c):
"""
Evaluate a 2-D Legendre series at points (x, y).
This function returns the values:
.. math:: p(x,y) = \\sum_{i,j} c_{i,j} * L_i(x) * L_j(y)
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars and they
must have the same shape after conversion. In either case, either `x`
and `y` or their elements must support multiplication and addition both
with themselves and with the elements of `c`.
If `c` is a 1-D array a one is implicitly appended to its shape to make
it 2-D. The shape of the result will be c.shape[2:] + x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points `(x, y)`,
where `x` and `y` must have the same shape. If `x` or `y` is a list
or tuple, it is first converted to an ndarray, otherwise it is left
unchanged and if it isn't an ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term
of multi-degree i,j is contained in ``c[i,j]``. If `c` has
dimension greater than two the remaining indices enumerate multiple
sets of coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional Legendre series at points formed
from pairs of corresponding values from `x` and `y`.
See Also
--------
legval, leggrid2d, legval3d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y = np.array((x, y), copy=0)
except:
raise ValueError('x, y are incompatible')
c = legval(x, c)
c = legval(y, c, tensor=False)
return c
def leggrid2d(x, y, c):
"""
Evaluate a 2-D Legendre series on the Cartesian product of x and y.
This function returns the values:
.. math:: p(a,b) = \sum_{i,j} c_{i,j} * L_i(a) * L_j(b)
where the points `(a, b)` consist of all pairs formed by taking
`a` from `x` and `b` from `y`. The resulting points form a grid with
`x` in the first dimension and `y` in the second.
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars. In either
case, either `x` and `y` or their elements must support multiplication
and addition both with themselves and with the elements of `c`.
If `c` has fewer than two dimensions, ones are implicitly appended to
its shape to make it 2-D. The shape of the result will be c.shape[2:] +
x.shape + y.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points in the
Cartesian product of `x` and `y`. If `x` or `y` is a list or
tuple, it is first converted to an ndarray, otherwise it is left
unchanged and, if it isn't an ndarray, it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j is contained in `c[i,j]`. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional Chebyshev series at points in the
Cartesian product of `x` and `y`.
See Also
--------
legval, legval2d, legval3d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
c = legval(x, c)
c = legval(y, c)
return c
def legval3d(x, y, z, c):
"""
Evaluate a 3-D Legendre series at points (x, y, z).
This function returns the values:
.. math:: p(x,y,z) = \\sum_{i,j,k} c_{i,j,k} * L_i(x) * L_j(y) * L_k(z)
The parameters `x`, `y`, and `z` are converted to arrays only if
they are tuples or a lists, otherwise they are treated as a scalars and
they must have the same shape after conversion. In either case, either
`x`, `y`, and `z` or their elements must support multiplication and
addition both with themselves and with the elements of `c`.
If `c` has fewer than 3 dimensions, ones are implicitly appended to its
shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape.
Parameters
----------
x, y, z : array_like, compatible object
The three dimensional series is evaluated at the points
`(x, y, z)`, where `x`, `y`, and `z` must have the same shape. If
any of `x`, `y`, or `z` is a list or tuple, it is first converted
to an ndarray, otherwise it is left unchanged and if it isn't an
ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j,k is contained in ``c[i,j,k]``. If `c` has dimension
greater than 3 the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the multidimensional polynomial on points formed with
triples of corresponding values from `x`, `y`, and `z`.
See Also
--------
legval, legval2d, leggrid2d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y, z = np.array((x, y, z), copy=0)
except:
raise ValueError('x, y, z are incompatible')
c = legval(x, c)
c = legval(y, c, tensor=False)
c = legval(z, c, tensor=False)
return c
def leggrid3d(x, y, z, c):
"""
Evaluate a 3-D Legendre series on the Cartesian product of x, y, and z.
This function returns the values:
.. math:: p(a,b,c) = \\sum_{i,j,k} c_{i,j,k} * L_i(a) * L_j(b) * L_k(c)
where the points `(a, b, c)` consist of all triples formed by taking
`a` from `x`, `b` from `y`, and `c` from `z`. The resulting points form
a grid with `x` in the first dimension, `y` in the second, and `z` in
the third.
The parameters `x`, `y`, and `z` are converted to arrays only if they
are tuples or a lists, otherwise they are treated as a scalars. In
either case, either `x`, `y`, and `z` or their elements must support
multiplication and addition both with themselves and with the elements
of `c`.
If `c` has fewer than three dimensions, ones are implicitly appended to
its shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape + y.shape + z.shape.
Parameters
----------
x, y, z : array_like, compatible objects
The three dimensional series is evaluated at the points in the
Cartesian product of `x`, `y`, and `z`. If `x`,`y`, or `z` is a
list or tuple, it is first converted to an ndarray, otherwise it is
left unchanged and, if it isn't an ndarray, it is treated as a
scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
legval, legval2d, leggrid2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
c = legval(x, c)
c = legval(y, c)
c = legval(z, c)
return c
def legvander(x, deg):
"""Pseudo-Vandermonde matrix of given degree.
Returns the pseudo-Vandermonde matrix of degree `deg` and sample points
`x`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., i] = L_i(x)
where `0 <= i <= deg`. The leading indices of `V` index the elements of
`x` and the last index is the degree of the Legendre polynomial.
If `c` is a 1-D array of coefficients of length `n + 1` and `V` is the
array ``V = legvander(x, n)``, then ``np.dot(V, c)`` and
``legval(x, c)`` are the same up to roundoff. This equivalence is
useful both for least squares fitting and for the evaluation of a large
number of Legendre series of the same degree and sample points.
Parameters
----------
x : array_like
Array of points. The dtype is converted to float64 or complex128
depending on whether any of the elements are complex. If `x` is
scalar it is converted to a 1-D array.
deg : int
Degree of the resulting matrix.
Returns
-------
vander : ndarray
The pseudo-Vandermonde matrix. The shape of the returned matrix is
``x.shape + (deg + 1,)``, where The last index is the degree of the
corresponding Legendre polynomial. The dtype will be the same as
the converted `x`.
"""
ideg = int(deg)
if ideg != deg:
raise ValueError("deg must be integer")
if ideg < 0:
raise ValueError("deg must be non-negative")
x = np.array(x, copy=0, ndmin=1) + 0.0
dims = (ideg + 1,) + x.shape
dtyp = x.dtype
v = np.empty(dims, dtype=dtyp)
# Use forward recursion to generate the entries. This is not as accurate
# as reverse recursion in this application but it is more efficient.
v[0] = x*0 + 1
if ideg > 0:
v[1] = x
for i in range(2, ideg + 1):
v[i] = (v[i-1]*x*(2*i - 1) - v[i-2]*(i - 1))/i
return np.rollaxis(v, 0, v.ndim)
def legvander2d(x, y, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y)`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., deg[1]*i + j] = L_i(x) * L_j(y),
where `0 <= i <= deg[0]` and `0 <= j <= deg[1]`. The leading indices of
`V` index the points `(x, y)` and the last index encodes the degrees of
the Legendre polynomials.
If ``V = legvander2d(x, y, [xdeg, ydeg])``, then the columns of `V`
correspond to the elements of a 2-D coefficient array `c` of shape
(xdeg + 1, ydeg + 1) in the order
.. math:: c_{00}, c_{01}, c_{02} ... , c_{10}, c_{11}, c_{12} ...
and ``np.dot(V, c.flat)`` and ``legval2d(x, y, c)`` will be the same
up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 2-D Legendre
series of the same degrees and sample points.
Parameters
----------
x, y : array_like
Arrays of point coordinates, all of the same shape. The dtypes
will be converted to either float64 or complex128 depending on
whether any of the elements are complex. Scalars are converted to
1-D arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg].
Returns
-------
vander2d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)`. The dtype will be the same
as the converted `x` and `y`.
See Also
--------
legvander, legvander3d. legval2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy = ideg
x, y = np.array((x, y), copy=0) + 0.0
vx = legvander(x, degx)
vy = legvander(y, degy)
v = vx[..., None]*vy[..., None,:]
return v.reshape(v.shape[:-2] + (-1,))
def legvander3d(x, y, z, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y, z)`. If `l, m, n` are the given degrees in `x, y, z`,
then The pseudo-Vandermonde matrix is defined by
.. math:: V[..., (m+1)(n+1)i + (n+1)j + k] = L_i(x)*L_j(y)*L_k(z),
where `0 <= i <= l`, `0 <= j <= m`, and `0 <= j <= n`. The leading
indices of `V` index the points `(x, y, z)` and the last index encodes
the degrees of the Legendre polynomials.
If ``V = legvander3d(x, y, z, [xdeg, ydeg, zdeg])``, then the columns
of `V` correspond to the elements of a 3-D coefficient array `c` of
shape (xdeg + 1, ydeg + 1, zdeg + 1) in the order
.. math:: c_{000}, c_{001}, c_{002},... , c_{010}, c_{011}, c_{012},...
and ``np.dot(V, c.flat)`` and ``legval3d(x, y, z, c)`` will be the
same up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 3-D Legendre
series of the same degrees and sample points.
Parameters
----------
x, y, z : array_like
Arrays of point coordinates, all of the same shape. The dtypes will
be converted to either float64 or complex128 depending on whether
any of the elements are complex. Scalars are converted to 1-D
arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg, z_deg].
Returns
-------
vander3d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)*(deg[2]+1)`. The dtype will
be the same as the converted `x`, `y`, and `z`.
See Also
--------
legvander, legvander3d. legval2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy, degz = ideg
x, y, z = np.array((x, y, z), copy=0) + 0.0
vx = legvander(x, degx)
vy = legvander(y, degy)
vz = legvander(z, degz)
v = vx[..., None, None]*vy[..., None,:, None]*vz[..., None, None,:]
return v.reshape(v.shape[:-3] + (-1,))
def legfit(x, y, deg, rcond=None, full=False, w=None):
"""
Least squares fit of Legendre series to data.
Return the coefficients of a Legendre series of degree `deg` that is the
least squares fit to the data values `y` given at points `x`. If `y` is
1-D the returned coefficients will also be 1-D. If `y` is 2-D multiple
fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * L_1(x) + ... + c_n * L_n(x),
where `n` is `deg`.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int
Degree of the fitting polynomial
rcond : float, optional
Relative condition number of the fit. Singular values smaller than
this relative to the largest singular value will be ignored. The
default value is len(x)*eps, where eps is the relative precision of
the float type, about 2e-16 in most cases.
full : bool, optional
Switch determining nature of return value. When it is False (the
default) just the coefficients are returned, when True diagnostic
information from the singular value decomposition is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
.. versionadded:: 1.5.0
Returns
-------
coef : ndarray, shape (M,) or (M, K)
Legendre coefficients ordered from low to high. If `y` was 2-D,
the coefficients for the data in column k of `y` are in column
`k`.
[residuals, rank, singular_values, rcond] : list
These values are only returned if `full` = True
resid -- sum of squared residuals of the least squares fit
rank -- the numerical rank of the scaled Vandermonde matrix
sv -- singular values of the scaled Vandermonde matrix
rcond -- value of `rcond`.
For more details, see `linalg.lstsq`.
Warns
-----
RankWarning
The rank of the coefficient matrix in the least-squares fit is
deficient. The warning is only raised if `full` = False. The
warnings can be turned off by
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, polyfit, lagfit, hermfit, hermefit
legval : Evaluates a Legendre series.
legvander : Vandermonde matrix of Legendre series.
legweight : Legendre weight function (= 1).
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the Legendre series `p` that
minimizes the sum of the weighted squared errors
.. math:: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where :math:`w_j` are the weights. This problem is solved by setting up
as the (typically) overdetermined matrix equation
.. math:: V(x) * c = w * y,
where `V` is the weighted pseudo Vandermonde matrix of `x`, `c` are the
coefficients to be solved for, `w` are the weights, and `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected, then a `RankWarning` will be issued. This means that the
coefficient values may be poorly determined. Using a lower order fit
will usually get rid of the warning. The `rcond` parameter can also be
set to a value smaller than its default, but the resulting fit may be
spurious and have large contributions from roundoff error.
Fits using Legendre series are usually better conditioned than fits
using power series, but much can depend on the distribution of the
sample points and the smoothness of the data. If the quality of the fit
is inadequate splines may be a good alternative.
References
----------
.. [1] Wikipedia, "Curve fitting",
http://en.wikipedia.org/wiki/Curve_fitting
Examples
--------
"""
order = int(deg) + 1
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
# check arguments.
if deg < 0:
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2:
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
# set up the least squares matrices in transposed form
lhs = legvander(x, deg).T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# apply weights. Don't use inplace operations as they
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None:
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning)
if full:
return c, [resids, rank, s, rcond]
else:
return c
def legcompanion(c):
"""Return the scaled companion matrix of c.
The basis polynomials are scaled so that the companion matrix is
symmetric when `c` is an Legendre basis polynomial. This provides
better eigenvalue estimates than the unscaled case and for basis
polynomials the eigenvalues are guaranteed to be real if
`numpy.linalg.eigvalsh` is used to obtain them.
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to high
degree.
Returns
-------
mat : ndarray
Scaled companion matrix of dimensions (deg, deg).
Notes
-----
.. versionadded::1.7.0
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
raise ValueError('Series must have maximum degree of at least 1.')
if len(c) == 2:
return np.array([[-c[0]/c[1]]])
n = len(c) - 1
mat = np.zeros((n, n), dtype=c.dtype)
scl = 1./np.sqrt(2*np.arange(n) + 1)
top = mat.reshape(-1)[1::n+1]
bot = mat.reshape(-1)[n::n+1]
top[...] = np.arange(1, n)*scl[:n-1]*scl[1:n]
bot[...] = top
mat[:, -1] -= (c[:-1]/c[-1])*(scl/scl[-1])*(n/(2*n - 1))
return mat
def legroots(c):
"""
Compute the roots of a Legendre series.
Return the roots (a.k.a. "zeros") of the polynomial
.. math:: p(x) = \\sum_i c[i] * L_i(x).
Parameters
----------
c : 1-D array_like
1-D array of coefficients.
Returns
-------
out : ndarray
Array of the roots of the series. If all the roots are real,
then `out` is also real, otherwise it is complex.
See Also
--------
polyroots, chebroots, lagroots, hermroots, hermeroots
Notes
-----
The root estimates are obtained as the eigenvalues of the companion
matrix, Roots far from the origin of the complex plane may have large
errors due to the numerical instability of the series for such values.
Roots with multiplicity greater than 1 will also show larger errors as
the value of the series near such points is relatively insensitive to
errors in the roots. Isolated roots near the origin can be improved by
a few iterations of Newton's method.
The Legendre series basis polynomials aren't powers of ``x`` so the
results of this function may seem unintuitive.
Examples
--------
>>> import numpy.polynomial.legendre as leg
>>> leg.legroots((1, 2, 3, 4)) # 4L_3 + 3L_2 + 2L_1 + 1L_0, all real roots
array([-0.85099543, -0.11407192, 0.51506735])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
return np.array([], dtype=c.dtype)
if len(c) == 2:
return np.array([-c[0]/c[1]])
m = legcompanion(c)
r = la.eigvals(m)
r.sort()
return r
def leggauss(deg):
"""
Gauss-Legendre quadrature.
Computes the sample points and weights for Gauss-Legendre quadrature.
These sample points and weights will correctly integrate polynomials of
degree :math:`2*deg - 1` or less over the interval :math:`[-1, 1]` with
the weight function :math:`f(x) = 1`.
Parameters
----------
deg : int
Number of sample points and weights. It must be >= 1.
Returns
-------
x : ndarray
1-D ndarray containing the sample points.
y : ndarray
1-D ndarray containing the weights.
Notes
-----
.. versionadded::1.7.0
The results have only been tested up to degree 100, higher degrees may
be problematic. The weights are determined by using the fact that
.. math:: w_k = c / (L'_n(x_k) * L_{n-1}(x_k))
where :math:`c` is a constant independent of :math:`k` and :math:`x_k`
is the k'th root of :math:`L_n`, and then scaling the results to get
the right value when integrating 1.
"""
ideg = int(deg)
if ideg != deg or ideg < 1:
raise ValueError("deg must be a non-negative integer")
# first approximation of roots. We use the fact that the companion
# matrix is symmetric in this case in order to obtain better zeros.
c = np.array([0]*deg + [1])
m = legcompanion(c)
x = la.eigvals(m)
x.sort()
# improve roots by one application of Newton
dy = legval(x, c)
df = legval(x, legder(c))
x -= dy/df
# compute the weights. We scale the factor to avoid possible numerical
# overflow.
fm = legval(x, c[1:])
fm /= np.abs(fm).max()
df /= np.abs(df).max()
w = 1/(fm * df)
# for Legendre we can also symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w to get the right value
w *= 2. / w.sum()
return x, w
def legweight(x):
"""
Weight function of the Legendre polynomials.
The weight function is :math:`1` and the interval of integration is
:math:`[-1, 1]`. The Legendre polynomials are orthogonal, but not
normalized, with respect to this weight function.
Parameters
----------
x : array_like
Values at which the weight function will be computed.
Returns
-------
w : ndarray
The weight function at `x`.
Notes
-----
.. versionadded::1.7.0
"""
w = x*0.0 + 1.0
return w
#
# Legendre series class
#
class Legendre(ABCPolyBase):
"""A Legendre series class.
The Legendre class provides the standard Python numerical methods
'+', '-', '*', '//', '%', 'divmod', '**', and '()' as well as the
attributes and methods listed in the `ABCPolyBase` documentation.
Parameters
----------
coef : array_like
Legendre coefficients in order of increasing degree, i.e.,
``(1, 2, 3)`` gives ``1*P_0(x) + 2*P_1(x) + 3*P_2(x)``.
domain : (2,) array_like, optional
Domain to use. The interval ``[domain[0], domain[1]]`` is mapped
to the interval ``[window[0], window[1]]`` by shifting and scaling.
The default value is [-1, 1].
window : (2,) array_like, optional
Window, see `domain` for its use. The default value is [-1, 1].
.. versionadded:: 1.6.0
"""
# Virtual Functions
_add = staticmethod(legadd)
_sub = staticmethod(legsub)
_mul = staticmethod(legmul)
_div = staticmethod(legdiv)
_pow = staticmethod(legpow)
_val = staticmethod(legval)
_int = staticmethod(legint)
_der = staticmethod(legder)
_fit = staticmethod(legfit)
_line = staticmethod(legline)
_roots = staticmethod(legroots)
_fromroots = staticmethod(legfromroots)
# Virtual properties
nickname = 'leg'
domain = np.array(legdomain)
window = np.array(legdomain)
| mit |
mahak/ansible | test/units/module_utils/basic/test_selinux.py | 18 | 9842 | # -*- coding: utf-8 -*-
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2016 Toshio Kuratomi <tkuratomi@ansible.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import errno
import json
import pytest
from ...compat.mock import mock_open, patch
from ansible.module_utils import basic
from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils.six.moves import builtins
@pytest.fixture
def no_args_module_exec():
with patch.object(basic, '_ANSIBLE_ARGS', b'{"ANSIBLE_MODULE_ARGS": {}}'):
yield # we're patching the global module object, so nothing to yield
def no_args_module(selinux_enabled=None, selinux_mls_enabled=None):
am = basic.AnsibleModule(argument_spec={})
# just dirty-patch the wrappers on the object instance since it's short-lived
if isinstance(selinux_enabled, bool):
patch.object(am, 'selinux_enabled', return_value=selinux_enabled).start()
if isinstance(selinux_mls_enabled, bool):
patch.object(am, 'selinux_mls_enabled', return_value=selinux_mls_enabled).start()
return am
# test AnsibleModule selinux wrapper methods
@pytest.mark.usefixtures('no_args_module_exec')
class TestSELinuxMU:
def test_selinux_enabled(self):
# test selinux unavailable
# selinux unavailable, should return false
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_enabled() is False
# test selinux present/not-enabled
disabled_mod = no_args_module()
with patch('ansible.module_utils.compat.selinux.is_selinux_enabled', return_value=0):
assert disabled_mod.selinux_enabled() is False
# ensure value is cached (same answer after unpatching)
assert disabled_mod.selinux_enabled() is False
# and present / enabled
enabled_mod = no_args_module()
with patch('ansible.module_utils.compat.selinux.is_selinux_enabled', return_value=1):
assert enabled_mod.selinux_enabled() is True
# ensure value is cached (same answer after unpatching)
assert enabled_mod.selinux_enabled() is True
def test_selinux_mls_enabled(self):
# selinux unavailable, should return false
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_mls_enabled() is False
# selinux disabled, should return false
with patch('ansible.module_utils.compat.selinux.is_selinux_mls_enabled', return_value=0):
assert no_args_module(selinux_enabled=False).selinux_mls_enabled() is False
# selinux enabled, should pass through the value of is_selinux_mls_enabled
with patch('ansible.module_utils.compat.selinux.is_selinux_mls_enabled', return_value=1):
assert no_args_module(selinux_enabled=True).selinux_mls_enabled() is True
def test_selinux_initial_context(self):
# selinux missing/disabled/enabled sans MLS is 3-element None
assert no_args_module(selinux_enabled=False, selinux_mls_enabled=False).selinux_initial_context() == [None, None, None]
assert no_args_module(selinux_enabled=True, selinux_mls_enabled=False).selinux_initial_context() == [None, None, None]
# selinux enabled with MLS is 4-element None
assert no_args_module(selinux_enabled=True, selinux_mls_enabled=True).selinux_initial_context() == [None, None, None, None]
def test_selinux_default_context(self):
# selinux unavailable
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_default_context(path='/foo/bar') == [None, None, None]
am = no_args_module(selinux_enabled=True, selinux_mls_enabled=True)
# matchpathcon success
with patch('ansible.module_utils.compat.selinux.matchpathcon', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
assert am.selinux_default_context(path='/foo/bar') == ['unconfined_u', 'object_r', 'default_t', 's0']
# matchpathcon fail (return initial context value)
with patch('ansible.module_utils.compat.selinux.matchpathcon', return_value=[-1, '']):
assert am.selinux_default_context(path='/foo/bar') == [None, None, None, None]
# matchpathcon OSError
with patch('ansible.module_utils.compat.selinux.matchpathcon', side_effect=OSError):
assert am.selinux_default_context(path='/foo/bar') == [None, None, None, None]
def test_selinux_context(self):
# selinux unavailable
with patch.object(basic, 'HAVE_SELINUX', False):
assert no_args_module().selinux_context(path='/foo/bar') == [None, None, None]
am = no_args_module(selinux_enabled=True, selinux_mls_enabled=True)
# lgetfilecon_raw passthru
with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
assert am.selinux_context(path='/foo/bar') == ['unconfined_u', 'object_r', 'default_t', 's0']
# lgetfilecon_raw returned a failure
with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', return_value=[-1, '']):
assert am.selinux_context(path='/foo/bar') == [None, None, None, None]
# lgetfilecon_raw OSError (should bomb the module)
with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', side_effect=OSError(errno.ENOENT, 'NotFound')):
with pytest.raises(SystemExit):
am.selinux_context(path='/foo/bar')
with patch('ansible.module_utils.compat.selinux.lgetfilecon_raw', side_effect=OSError()):
with pytest.raises(SystemExit):
am.selinux_context(path='/foo/bar')
def test_is_special_selinux_path(self):
args = to_bytes(json.dumps(dict(ANSIBLE_MODULE_ARGS={'_ansible_selinux_special_fs': "nfs,nfsd,foos",
'_ansible_remote_tmp': "/tmp",
'_ansible_keep_remote_files': False})))
with patch.object(basic, '_ANSIBLE_ARGS', args):
am = basic.AnsibleModule(
argument_spec=dict(),
)
def _mock_find_mount_point(path):
if path.startswith('/some/path'):
return '/some/path'
elif path.startswith('/weird/random/fstype'):
return '/weird/random/fstype'
return '/'
am.find_mount_point = _mock_find_mount_point
am.selinux_context = lambda path: ['foo_u', 'foo_r', 'foo_t', 's0']
m = mock_open()
m.side_effect = OSError
with patch.object(builtins, 'open', m, create=True):
assert am.is_special_selinux_path('/some/path/that/should/be/nfs') == (False, None)
mount_data = [
'/dev/disk1 / ext4 rw,seclabel,relatime,data=ordered 0 0\n',
'10.1.1.1:/path/to/nfs /some/path nfs ro 0 0\n',
'whatever /weird/random/fstype foos rw 0 0\n',
]
# mock_open has a broken readlines() implementation apparently...
# this should work by default but doesn't, so we fix it
m = mock_open(read_data=''.join(mount_data))
m.return_value.readlines.return_value = mount_data
with patch.object(builtins, 'open', m, create=True):
assert am.is_special_selinux_path('/some/random/path') == (False, None)
assert am.is_special_selinux_path('/some/path/that/should/be/nfs') == (True, ['foo_u', 'foo_r', 'foo_t', 's0'])
assert am.is_special_selinux_path('/weird/random/fstype/path') == (True, ['foo_u', 'foo_r', 'foo_t', 's0'])
def test_set_context_if_different(self):
am = no_args_module(selinux_enabled=False)
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True) is True
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is False
am = no_args_module(selinux_enabled=True, selinux_mls_enabled=True)
am.selinux_context = lambda path: ['bar_u', 'bar_r', None, None]
am.is_special_selinux_path = lambda path: (False, None)
with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=0) as m:
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
m.assert_called_with('/path/to/file', 'foo_u:foo_r:foo_t:s0')
m.reset_mock()
am.check_mode = True
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
assert not m.called
am.check_mode = False
with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=1):
with pytest.raises(SystemExit):
am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
with patch('ansible.module_utils.compat.selinux.lsetfilecon', side_effect=OSError):
with pytest.raises(SystemExit):
am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
am.is_special_selinux_path = lambda path: (True, ['sp_u', 'sp_r', 'sp_t', 's0'])
with patch('ansible.module_utils.compat.selinux.lsetfilecon', return_value=0) as m:
assert am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False) is True
m.assert_called_with('/path/to/file', 'sp_u:sp_r:sp_t:s0')
| gpl-3.0 |
jymannob/Sick-Beard | lib/html5lib/treewalkers/_base.py | 658 | 6907 | from __future__ import absolute_import, division, unicode_literals
from six import text_type, string_types
import gettext
_ = gettext.gettext
from xml.dom import Node
DOCUMENT = Node.DOCUMENT_NODE
DOCTYPE = Node.DOCUMENT_TYPE_NODE
TEXT = Node.TEXT_NODE
ELEMENT = Node.ELEMENT_NODE
COMMENT = Node.COMMENT_NODE
ENTITY = Node.ENTITY_NODE
UNKNOWN = "<#UNKNOWN#>"
from ..constants import voidElements, spaceCharacters
spaceCharacters = "".join(spaceCharacters)
def to_text(s, blank_if_none=True):
"""Wrapper around six.text_type to convert None to empty string"""
if s is None:
if blank_if_none:
return ""
else:
return None
elif isinstance(s, text_type):
return s
else:
return text_type(s)
def is_text_or_none(string):
"""Wrapper around isinstance(string_types) or is None"""
return string is None or isinstance(string, string_types)
class TreeWalker(object):
def __init__(self, tree):
self.tree = tree
def __iter__(self):
raise NotImplementedError
def error(self, msg):
return {"type": "SerializeError", "data": msg}
def emptyTag(self, namespace, name, attrs, hasChildren=False):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(name)
assert all((namespace is None or isinstance(namespace, string_types)) and
isinstance(name, string_types) and
isinstance(value, string_types)
for (namespace, name), value in attrs.items())
yield {"type": "EmptyTag", "name": to_text(name, False),
"namespace": to_text(namespace),
"data": attrs}
if hasChildren:
yield self.error(_("Void element has children"))
def startTag(self, namespace, name, attrs):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(name)
assert all((namespace is None or isinstance(namespace, string_types)) and
isinstance(name, string_types) and
isinstance(value, string_types)
for (namespace, name), value in attrs.items())
return {"type": "StartTag",
"name": text_type(name),
"namespace": to_text(namespace),
"data": dict(((to_text(namespace, False), to_text(name)),
to_text(value, False))
for (namespace, name), value in attrs.items())}
def endTag(self, namespace, name):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(namespace)
return {"type": "EndTag",
"name": to_text(name, False),
"namespace": to_text(namespace),
"data": {}}
def text(self, data):
assert isinstance(data, string_types), type(data)
data = to_text(data)
middle = data.lstrip(spaceCharacters)
left = data[:len(data) - len(middle)]
if left:
yield {"type": "SpaceCharacters", "data": left}
data = middle
middle = data.rstrip(spaceCharacters)
right = data[len(middle):]
if middle:
yield {"type": "Characters", "data": middle}
if right:
yield {"type": "SpaceCharacters", "data": right}
def comment(self, data):
assert isinstance(data, string_types), type(data)
return {"type": "Comment", "data": text_type(data)}
def doctype(self, name, publicId=None, systemId=None, correct=True):
assert is_text_or_none(name), type(name)
assert is_text_or_none(publicId), type(publicId)
assert is_text_or_none(systemId), type(systemId)
return {"type": "Doctype",
"name": to_text(name),
"publicId": to_text(publicId),
"systemId": to_text(systemId),
"correct": to_text(correct)}
def entity(self, name):
assert isinstance(name, string_types), type(name)
return {"type": "Entity", "name": text_type(name)}
def unknown(self, nodeType):
return self.error(_("Unknown node type: ") + nodeType)
class NonRecursiveTreeWalker(TreeWalker):
def getNodeDetails(self, node):
raise NotImplementedError
def getFirstChild(self, node):
raise NotImplementedError
def getNextSibling(self, node):
raise NotImplementedError
def getParentNode(self, node):
raise NotImplementedError
def __iter__(self):
currentNode = self.tree
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
hasChildren = False
if type == DOCTYPE:
yield self.doctype(*details)
elif type == TEXT:
for token in self.text(*details):
yield token
elif type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name in voidElements:
for token in self.emptyTag(namespace, name, attributes,
hasChildren):
yield token
hasChildren = False
else:
yield self.startTag(namespace, name, attributes)
elif type == COMMENT:
yield self.comment(details[0])
elif type == ENTITY:
yield self.entity(details[0])
elif type == DOCUMENT:
hasChildren = True
else:
yield self.unknown(details[0])
if hasChildren:
firstChild = self.getFirstChild(currentNode)
else:
firstChild = None
if firstChild is not None:
currentNode = firstChild
else:
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
if type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name not in voidElements:
yield self.endTag(namespace, name)
if self.tree is currentNode:
currentNode = None
break
nextSibling = self.getNextSibling(currentNode)
if nextSibling is not None:
currentNode = nextSibling
break
else:
currentNode = self.getParentNode(currentNode)
| gpl-3.0 |
dohoangkhiem/ansible | contrib/inventory/openvz.py | 79 | 2692 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# openvz.py
#
# Copyright 2014 jordonr <jordon@beamsyn.net>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Inspired by libvirt_lxc.py inventory script
# https://github.com/ansible/ansible/blob/e5ef0eca03cbb6c8950c06dc50d0ca22aa8902f4/plugins/inventory/libvirt_lxc.py
#
# Groups are determined by the description field of openvz guests
# multiple groups can be seperated by commas: webserver,dbserver
from subprocess import Popen,PIPE
import sys
import json
#List openvz hosts
vzhosts = ['vzhost1','vzhost2','vzhost3']
#Add openvz hosts to the inventory and Add "_meta" trick
inventory = {'vzhosts': {'hosts': vzhosts}, '_meta': {'hostvars': {}}}
#default group, when description not defined
default_group = ['vzguest']
def get_guests():
#Loop through vzhosts
for h in vzhosts:
#SSH to vzhost and get the list of guests in json
pipe = Popen(['ssh', h,'vzlist','-j'], stdout=PIPE, universal_newlines=True)
#Load Json info of guests
json_data = json.loads(pipe.stdout.read())
#loop through guests
for j in json_data:
#Add information to host vars
inventory['_meta']['hostvars'][j['hostname']] = {'ctid': j['ctid'], 'veid': j['veid'], 'vpsid': j['vpsid'], 'private_path': j['private'], 'root_path': j['root'], 'ip': j['ip']}
#determine group from guest description
if j['description'] is not None:
groups = j['description'].split(",")
else:
groups = default_group
#add guest to inventory
for g in groups:
if g not in inventory:
inventory[g] = {'hosts': []}
inventory[g]['hosts'].append(j['hostname'])
return inventory
if len(sys.argv) == 2 and sys.argv[1] == '--list':
inv_json = get_guests()
print(json.dumps(inv_json, sort_keys=True))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({}))
else:
print("Need an argument, either --list or --host <host>")
| gpl-3.0 |
ambientsound/rsync | web/core/models.py | 1 | 2129 | from __future__ import unicode_literals
from django.db import models
from django.conf import settings
import django.db.models.signals
import django.dispatch.dispatcher
import web.core
import re
import os
import uuid
class File(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
slug = models.CharField(max_length=64, default=web.core.random_slug_default_length, editable=False, unique=True)
author = models.ForeignKey(settings.AUTH_USER_MODEL)
file = models.FileField(upload_to=web.core.get_file_path)
created = models.DateTimeField(auto_now_add=True)
expiry = models.DateTimeField(default=web.core.default_expiry, blank=True)
def uri(self):
"""
@returns The URI of this file, which can be used to retrieve it using a redirect.
"""
filename = re.sub(r'^%s' % settings.UPLOAD_BASE_DIR, '', self.file.name).lstrip('/')
return os.path.join(settings.FILES_DIR, filename)
def url(self):
"""
@returns The short URL of this file, including the protocol and domain.
"""
return os.path.join(settings.SITE_URL, self.slug)
def delete_file(self):
base_path = os.path.dirname(self.file.name)
if os.path.exists(self.file.name):
os.unlink(self.file.name)
if os.path.exists(base_path):
os.rmdir(base_path)
def __unicode__(self):
return self.file.name
class Option(models.Model):
"""
Option is a key/value store that stores serialized options on specific users.
"""
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
author = models.ForeignKey(settings.AUTH_USER_MODEL)
key = models.CharField(max_length=128)
value = models.CharField(max_length=4096, null=True, blank=True)
class Meta:
unique_together = ('key', 'author')
def __unicode__(self):
return '%s=%s' % (self.key, self.value)
@django.dispatch.dispatcher.receiver(django.db.models.signals.post_delete, sender=File)
def file_delete(sender, instance, **kwargs):
instance.delete_file()
| bsd-3-clause |
erinspace/osf.io | addons/gitlab/tests/utils.py | 15 | 4852 | import mock
from addons.gitlab.api import GitLabClient
from addons.base.tests.base import OAuthAddonTestCaseMixin, AddonTestCase
from addons.gitlab.models import GitLabProvider
from addons.gitlab.tests.factories import GitLabAccountFactory
class GitLabAddonTestCase(OAuthAddonTestCaseMixin, AddonTestCase):
ADDON_SHORT_NAME = 'gitlab'
ExternalAccountFactory = GitLabAccountFactory
Provider = GitLabProvider
def set_node_settings(self, settings):
super(GitLabAddonTestCase, self).set_node_settings(settings)
settings.repo = 'osfgitlabtest'
settings.user = 'osfio'
def create_mock_gitlab(user='osfio', private=False):
"""Factory for mock GitLab objects.
Example: ::
>>> gitlab = create_mock_gitlab(user='osfio')
>>> gitlab.branches(user='osfio', repo='hello-world')
>>> [{u'commit': {u'sha': u'e22d92d5d90bb8f9695e9a5e2e2311a5c1997230',
... u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/e22d92d5d90bb8f9695e9a5e2e2311a5c1997230'},
... u'name': u'dev'},
... {u'commit': {u'sha': u'444a74d0d90a4aea744dacb31a14f87b5c30759c',
... u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/444a74d0d90a4aea744dacb31a14f87b5c30759c'},
... u'name': u'master'},
... {u'commit': {u'sha': u'c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6',
... u'url': u'https://api.gitlab.com/repos/osfio/mock-repo/commits/c6eaaf6708561c3d4439c0c8dd99c2e33525b1e6'},
... u'name': u'no-bundle'}]
:param str user: GitLab username.
:param bool private: Whether repo is private.
:return: An autospecced GitLab Mock object
"""
gitlab_mock = mock.create_autospec(GitLabClient)
gitlab_mock.repo = mock.Mock(**{
u'approvals_before_merge': 0,
u'archived': False,
u'avatar_url': None,
u'builds_enabled': True,
u'container_registry_enabled': True,
u'created_at': u'2017-07-05T16:40:26.428Z',
u'creator_id': 1444024,
u'default_branch': u'master',
u'description': u'For testing',
u'forks_count': 0,
u'http_url_to_repo': u'https://gitlab.com/{}/mock-repo.git'.format(user),
u'id': 3643758,
u'issues_enabled': True,
u'last_activity_at': u'2017-07-05T16:40:26.428Z',
u'lfs_enabled': True,
u'merge_requests_enabled': True,
u'name': u'mock-repo',
u'name_with_namespace': u'{} / mock-repo'.format(user),
u'namespace': {u'full_path': u'{}'.format(user),
u'id': 1748448,
u'kind': u'user',
u'name': u'{}'.format(user),
u'path': u'{}'.format(user)},
u'only_allow_merge_if_all_discussions_are_resolved': False,
u'only_allow_merge_if_build_succeeds': False,
u'open_issues_count': 0,
u'owner': {u'avatar_url': u'https://secure.gravatar.com/avatar/a7fa245b01a35ad586d8e2fa5bd7be5f?s=80&d=identicon',
u'id': 1444024,
u'name': u'{}'.format(user),
u'state': u'active',
u'username': u'{}'.format(user),
u'web_url': u'https://gitlab.com/{}'.format(user)},
u'path': u'mock-repo',
u'path_with_namespace': u'{}/mock-repo'.format(user),
u'permissions': {u'group_access': None,
u'project_access': {u'access_level': 40, u'notification_level': 3}},
u'public': False,
u'public_builds': True,
u'request_access_enabled': False,
u'shared_runners_enabled': True,
u'shared_with_groups': [],
u'snippets_enabled': True,
u'ssh_url_to_repo': u'git@gitlab.com:{}/mock-repo.git'.format(user),
u'star_count': 0,
u'tag_list': [],
u'visibility_level': 0,
u'web_url': u'https://gitlab.com/{}/mock-repo'.format(user),
u'wiki_enabled': True
})
branch = mock.Mock(**{
u'commit': {u'author_email': u'{}@gmail.com'.format(user),
u'author_name': u''.format(user),
u'authored_date': u'2017-07-05T16:43:04.000+00:00',
u'committed_date': u'2017-07-05T16:43:04.000+00:00',
u'committer_email': u'{}@gmail.com'.format(user),
u'committer_name': u'{}'.format(user),
u'created_at': u'2017-07-05T16:43:04.000+00:00',
u'id': u'f064566f133ddfad636ceec72c5937cc0044c371',
u'message': u'Add readme.md',
u'parent_ids': [],
u'short_id': u'f064566f',
u'title': u'Add readme.md'},
u'developers_can_merge': False,
u'developers_can_push': False,
u'merged': False,
u'protected': True
})
# Hack because 'name' is a reserved keyword in a Mock object
type(branch).name = 'master'
gitlab_mock.branches.return_value = [branch]
return gitlab_mock
| apache-2.0 |
zhlooking/flask-microservices-users | plato/test/test_user_model.py | 1 | 1853 | from sqlalchemy.exc import IntegrityError
from plato import db
from plato.api.models import User
from plato.test.base import BaseTestCase
from plato.test.utils import add_user
class TestUserModel(BaseTestCase):
def test_user_model(self):
user = add_user('foo', 'foo@bar.com', 'test_pwd')
self.assertTrue(user.id)
self.assertEqual('foo', user.username)
self.assertEqual('foo@bar.com', user.email)
self.assertTrue(user.active)
self.assertTrue(user.created_at)
self.assertTrue(user.password)
self.assertTrue(user.admin == False)
def test_add_user_duplicate_username(self):
add_user('foo', 'foo@bar.com', 'test_pwd')
duplicate_user = User('foo', 'foo_1@bar.com', 'test_pwd')
db.session.add(duplicate_user)
self.assertRaises(IntegrityError, db.session.commit)
def test_add_user_duplicate_email(self):
add_user('foo', 'foo@bar.com', 'test_pwd')
duplicate_user = User('foo_1', 'foo@bar.com', 'test_pwd')
db.session.add(duplicate_user)
self.assertRaises(IntegrityError, db.session.commit)
def test_passwords_are_random(self):
user_foo = add_user('foo', 'foo@bar.com', 'test_pwd')
user_bar = add_user('bar', 'bar@bar.com', 'test_pwd')
self.assertNotEqual(user_foo.password, user_bar.password)
def test_encode_auth_token(self):
user = add_user('test@test.com', 'test@test.com', 'test')
auth_token = user.encode_auth_token(user.id)
self.assertTrue(isinstance(auth_token, bytes))
def test_decode_auth_token(self):
user = add_user('test@test.com', 'test@test.com', 'test')
auth_token = user.encode_auth_token(user.id)
self.assertTrue(isinstance(auth_token, bytes))
self.assertTrue(User.decode_auth_token(auth_token), user.id)
| mit |
AvadootNachankar/gstudio | gnowsys-ndf/gnowsys_ndf/ndf/urls/forum.py | 11 | 1473 | from django.conf.urls import patterns, url
urlpatterns = patterns('gnowsys_ndf.ndf.views.forum',
url(r'^[/]$', 'forum', name='forum'),
# url(r'^/(?P<node_id>[\w-]+)$', 'forum', name='forum'),
url(r'^/create/$', 'create_forum', name='create_forum'),
# url(r'^/show/(?P<forum_id>[\w-]+)$', 'display_forum', name='show'),
url(r'^/edit_forum/(?P<forum_id>[\w-]+)$', 'edit_forum', name='edit_forum'),
url(r'^/edit_thread/(?P<forum_id>[\w-]+)/(?P<thread_id>[\w-]+)$', 'edit_thread', name='edit_thread'),
url(r'^/delete/(?P<node_id>[\w-]+)$', 'delete_forum', name='forum_delete'),
url(r'^/delete/thread/(?P<forum_id>[\w-]+)/(?P<node_id>[\w-]+)$', 'delete_thread', name='thread_delete'),
url(r'^/delete/reply/(?P<forum_id>[\w-]+)/(?P<thread_id>[\w-]+)/(?P<node_id>[\w-]+)$', 'delete_reply', name='reply_delete'),
url(r'^/(?P<forum_id>[\w-]+)$', 'display_forum', name='show'),
url(r'^/(?P<forum_id>[\w-]+)/thread/create/$', 'create_thread', name='create_thread'),###
url(r'^/thread/(?P<thread_id>[\w-]+)$', 'display_thread', name='thread'),
# url(r'^/(?P<forum_id>[\w-]+)/(?P<thread_id>[\w-]+)$', 'display_thread', name='thread'),
url(r'^/add_node/$','add_node',name="add_node"),
)
| agpl-3.0 |
crmccreary/openerp_server | openerp/addons/l10n_br/account.py | 8 | 8592 | # -*- encoding: utf-8 -*-
#################################################################################
# #
# Copyright (C) 2009 Renato Lima - Akretion #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU General Public License for more details. #
# #
#You should have received a copy of the GNU General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
#################################################################################
import time
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from operator import itemgetter
import netsvc
import pooler
from osv import fields, osv
import decimal_precision as dp
from tools.misc import currency
from tools.translate import _
from tools import config
class account_tax_code_template(osv.osv):
_inherit = 'account.tax.code.template'
_columns = {
'domain':fields.char('Domain', size=32, help="This field is only used if you develop your own module allowing developers to create specific taxes in a custom domain."),
'tax_discount': fields.boolean('Discount this Tax in Prince', help="Mark it for (ICMS, PIS e etc.)."),
}
account_tax_code_template()
class account_tax_code(osv.osv):
_inherit = 'account.tax.code'
_columns = {
'domain':fields.char('Domain', size=32, help="This field is only used if you develop your own module allowing developers to create specific taxes in a custom domain."),
'tax_discount': fields.boolean('Discount this Tax in Prince', help="Mark it for (ICMS, PIS e etc.)."),
}
account_tax_code()
class account_tax_template(osv.osv):
_inherit = 'account.tax.template'
def get_precision_tax():
def change_digit_tax(cr):
res = pooler.get_pool(cr.dbname).get('decimal.precision').precision_get(cr, 1, 'Account')
return (16, res+2)
return change_digit_tax
_columns = {
'tax_discount': fields.boolean('Discount this Tax in Prince', help="Mark it for (ICMS, PIS e etc.)."),
'base_reduction': fields.float('Redution', required=True, digits_compute=get_precision_tax(), help="For taxes of type percentage, enter % ratio between 0-1."),
'amount_mva': fields.float('MVA Percent', required=True, digits_compute=get_precision_tax(), help="For taxes of type percentage, enter % ratio between 0-1."),
'type': fields.selection( [('percent','Percentage'), ('fixed','Fixed Amount'), ('none','None'), ('code','Python Code'), ('balance','Balance'), ('quantity','Quantity')], 'Tax Type', required=True,
help="The computation method for the tax amount."),
}
_defaults = {
'base_reduction': 0,
'amount_mva': 0,
}
def onchange_tax_code_id(self, cr, uid, ids, tax_code_id, context=None):
result = {'value': {}}
if not tax_code_id:
return result
obj_tax_code = self.pool.get('account.tax.code.template').browse(cr, uid, tax_code_id)
if obj_tax_code:
result['value']['tax_discount'] = obj_tax_code.tax_discount
result['value']['domain'] = obj_tax_code.domain
return result
account_tax_template()
class account_tax(osv.osv):
_inherit = 'account.tax'
def get_precision_tax():
def change_digit_tax(cr):
res = pooler.get_pool(cr.dbname).get('decimal.precision').precision_get(cr, 1, 'Account')
return (16, res+2)
return change_digit_tax
_columns = {
'tax_discount': fields.boolean('Discount this Tax in Prince', help="Mark it for (ICMS, PIS e etc.)."),
'base_reduction': fields.float('Redution', required=True, digits_compute=get_precision_tax(), help="Um percentual decimal em % entre 0-1."),
'amount_mva': fields.float('MVA Percent', required=True, digits_compute=get_precision_tax(), help="Um percentual decimal em % entre 0-1."),
'type': fields.selection( [('percent','Percentage'), ('fixed','Fixed Amount'), ('none','None'), ('code','Python Code'), ('balance','Balance'), ('quantity','Quantity')], 'Tax Type', required=True,
help="The computation method for the tax amount."),
}
_defaults = {
'base_reduction': 0,
'amount_mva': 0,
}
def onchange_tax_code_id(self, cr, uid, ids, tax_code_id, context=None):
result = {'value': {}}
if not tax_code_id:
return result
obj_tax_code = self.pool.get('account.tax.code').browse(cr, uid, tax_code_id)
if obj_tax_code:
result['value']['tax_discount'] = obj_tax_code.tax_discount
result['value']['domain'] = obj_tax_code.domain
return result
account_tax()
class wizard_multi_charts_accounts(osv.osv_memory):
_inherit = 'wizard.multi.charts.accounts'
def execute(self, cr, uid, ids, context=None):
super(wizard_multi_charts_accounts, self).execute(cr, uid, ids, context)
obj_multi = self.browse(cr, uid, ids[0])
obj_acc_tax = self.pool.get('account.tax')
obj_acc_tax_tmp = self.pool.get('account.tax.template')
obj_acc_cst = self.pool.get('l10n_br_account.cst')
obj_acc_cst_tmp = self.pool.get('l10n_br_account.cst.template')
obj_tax_code = self.pool.get('account.tax.code')
obj_tax_code_tmp = self.pool.get('account.tax.code.template')
# Creating Account
obj_acc_root = obj_multi.chart_template_id.account_root_id
tax_code_root_id = obj_multi.chart_template_id.tax_code_root_id.id
company_id = obj_multi.company_id.id
children_tax_code_template = self.pool.get('account.tax.code.template').search(cr, uid, [('parent_id','child_of',[tax_code_root_id])], order='id')
children_tax_code_template.sort()
for tax_code_template in self.pool.get('account.tax.code.template').browse(cr, uid, children_tax_code_template, context=context):
tax_code_id = self.pool.get('account.tax.code').search(cr, uid, [('code','=',tax_code_template.code),('company_id','=',company_id)])
if tax_code_id:
obj_tax_code.write(cr, uid, tax_code_id, {'domain': tax_code_template.domain,'tax_discount': tax_code_template.tax_discount})
cst_tmp_ids = self.pool.get('l10n_br_account.cst.template').search(cr, uid, [('tax_code_template_id','=',tax_code_template.id)], order='id')
for cst_tmp in self.pool.get('l10n_br_account.cst.template').browse(cr, uid, cst_tmp_ids, context=context):
obj_acc_cst.create(cr, uid, {
'code': cst_tmp.code,
'name': cst_tmp.name,
'tax_code_id': tax_code_id[0],
})
tax_ids = self.pool.get('account.tax').search(cr, uid, [('company_id','=',company_id)])
for tax in self.pool.get('account.tax').browse(cr, uid, tax_ids, context=context):
if tax.tax_code_id:
obj_acc_tax.write(cr, uid, tax.id, {'domain': tax.tax_code_id.domain,'tax_discount': tax.tax_code_id.tax_discount})
wizard_multi_charts_accounts()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mzweilin/HashTag-Understanding | twitter-1.12.1/twitter/twitter_globals.py | 6 | 1024 | '''
This module is automatically generated using `update.py`
.. data:: POST_ACTIONS
List of twitter method names that require the use of POST
'''
POST_ACTIONS = [
# Status Methods
'update', 'retweet', 'update_with_media',
# Direct Message Methods
'new',
# Account Methods
'update_profile_image', 'update_delivery_device', 'update_profile',
'update_profile_background_image', 'update_profile_colors',
'update_location', 'end_session', 'settings',
'update_profile_banner', 'remove_profile_banner',
# Notification Methods
'leave', 'follow',
# Status Methods, Block Methods, Direct Message Methods,
# Friendship Methods, Favorite Methods
'destroy', 'destroy_all',
# Block Methods, Friendship Methods, Favorite Methods
'create', 'create_all',
# Users Methods
'lookup', 'report_spam',
# Streaming Methods
'filter', 'user', 'site',
# OAuth Methods
'token', 'access_token',
'request_token', 'invalidate_token',
]
| apache-2.0 |
odoousers2014/odoo_addons-2 | clv_person/clv_annotation/clv_annotation.py | 2 | 2115 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields
class clv_person(models.Model):
_inherit = 'clv_person'
annotation_ids = fields.Many2many('clv_annotation',
'clv_person_annotation_rel',
'person_id',
'annotation_id',
'Annotations')
class clv_annotation(models.Model):
_inherit = 'clv_annotation'
person_ids = fields.Many2many('clv_person',
'clv_person_annotation_rel',
'annotation_id',
'person_id',
'Persons')
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.